Actual source code: pvec2.c
petsc-3.7.4 2016-10-02
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
6: #include <petscblaslapack.h>
10: PetscErrorCode VecMDot_MPI(Vec xin,PetscInt nv,const Vec y[],PetscScalar *z)
11: {
12: PetscScalar awork[128],*work = awork;
16: if (nv > 128) {
17: PetscMalloc1(nv,&work);
18: }
19: VecMDot_Seq(xin,nv,y,work);
20: MPIU_Allreduce(work,z,nv,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
21: if (nv > 128) {
22: PetscFree(work);
23: }
24: return(0);
25: }
29: PetscErrorCode VecMTDot_MPI(Vec xin,PetscInt nv,const Vec y[],PetscScalar *z)
30: {
31: PetscScalar awork[128],*work = awork;
35: if (nv > 128) {
36: PetscMalloc1(nv,&work);
37: }
38: VecMTDot_Seq(xin,nv,y,work);
39: MPIU_Allreduce(work,z,nv,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
40: if (nv > 128) {
41: PetscFree(work);
42: }
43: return(0);
44: }
46: #include <../src/vec/vec/impls/seq/ftn-kernels/fnorm.h>
49: PetscErrorCode VecNorm_MPI(Vec xin,NormType type,PetscReal *z)
50: {
51: PetscReal sum,work = 0.0;
52: const PetscScalar *xx;
53: PetscErrorCode ierr;
54: PetscInt n = xin->map->n;
55: PetscBLASInt one = 1,bn;
58: PetscBLASIntCast(n,&bn);
59: if (type == NORM_2 || type == NORM_FROBENIUS) {
60: VecGetArrayRead(xin,&xx);
61: work = PetscRealPart(BLASdot_(&bn,xx,&one,xx,&one));
62: VecRestoreArrayRead(xin,&xx);
63: MPIU_Allreduce(&work,&sum,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
64: *z = PetscSqrtReal(sum);
65: PetscLogFlops(2.0*xin->map->n);
66: } else if (type == NORM_1) {
67: /* Find the local part */
68: VecNorm_Seq(xin,NORM_1,&work);
69: /* Find the global max */
70: MPIU_Allreduce(&work,z,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
71: } else if (type == NORM_INFINITY) {
72: /* Find the local max */
73: VecNorm_Seq(xin,NORM_INFINITY,&work);
74: /* Find the global max */
75: MPIU_Allreduce(&work,z,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)xin));
76: } else if (type == NORM_1_AND_2) {
77: PetscReal temp[2];
78: VecNorm_Seq(xin,NORM_1,temp);
79: VecNorm_Seq(xin,NORM_2,temp+1);
80: temp[1] = temp[1]*temp[1];
81: MPIU_Allreduce(temp,z,2,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
82: z[1] = PetscSqrtReal(z[1]);
83: }
84: return(0);
85: }
87: /*
88: These two functions are the MPI reduction operation used for max and min with index
89: A call to MPI_Op_create() converts the function Vec[Max,Min]_Local() to the MPI operator Vec[Max,Min]_Local_Op.
90: These are marked PETSC_EXTERN since the function pointers are passed to MPI.
91: */
92: MPI_Op VecMax_Local_Op = 0;
93: MPI_Op VecMin_Local_Op = 0;
97: PETSC_EXTERN void MPIAPI VecMax_Local(void *in,void *out,PetscMPIInt *cnt,MPI_Datatype *datatype)
98: {
99: PetscReal *xin = (PetscReal*)in,*xout = (PetscReal*)out;
102: if (*datatype != MPIU_REAL) {
103: (*PetscErrorPrintf)("Can only handle MPIU_REAL data types");
104: MPI_Abort(MPI_COMM_SELF,1);
105: }
106: if (xin[0] > xout[0]) {
107: xout[0] = xin[0];
108: xout[1] = xin[1];
109: } else if (xin[0] == xout[0]) {
110: xout[1] = PetscMin(xin[1],xout[1]);
111: }
112: PetscFunctionReturnVoid(); /* cannot return a value */
113: }
117: PETSC_EXTERN void MPIAPI VecMin_Local(void *in,void *out,PetscMPIInt *cnt,MPI_Datatype *datatype)
118: {
119: PetscReal *xin = (PetscReal*)in,*xout = (PetscReal*)out;
122: if (*datatype != MPIU_REAL) {
123: (*PetscErrorPrintf)("Can only handle MPIU_REAL data types");
124: MPI_Abort(MPI_COMM_SELF,1);
125: }
126: if (xin[0] < xout[0]) {
127: xout[0] = xin[0];
128: xout[1] = xin[1];
129: } else if (xin[0] == xout[0]) {
130: xout[1] = PetscMin(xin[1],xout[1]);
131: }
132: PetscFunctionReturnVoid();
133: }
137: PetscErrorCode VecMax_MPI(Vec xin,PetscInt *idx,PetscReal *z)
138: {
140: PetscReal work;
143: /* Find the local max */
144: VecMax_Seq(xin,idx,&work);
146: /* Find the global max */
147: if (!idx) {
148: MPIU_Allreduce(&work,z,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)xin));
149: } else {
150: PetscReal work2[2],z2[2];
151: PetscInt rstart;
152: rstart = xin->map->rstart;
153: work2[0] = work;
154: work2[1] = *idx + rstart;
155: MPIU_Allreduce(work2,z2,2,MPIU_REAL,VecMax_Local_Op,PetscObjectComm((PetscObject)xin));
156: *z = z2[0];
157: *idx = (PetscInt)z2[1];
158: }
159: return(0);
160: }
164: PetscErrorCode VecMin_MPI(Vec xin,PetscInt *idx,PetscReal *z)
165: {
167: PetscReal work;
170: /* Find the local Min */
171: VecMin_Seq(xin,idx,&work);
173: /* Find the global Min */
174: if (!idx) {
175: MPIU_Allreduce(&work,z,1,MPIU_REAL,MPIU_MIN,PetscObjectComm((PetscObject)xin));
176: } else {
177: PetscReal work2[2],z2[2];
178: PetscInt rstart;
180: VecGetOwnershipRange(xin,&rstart,NULL);
181: work2[0] = work;
182: work2[1] = *idx + rstart;
183: MPIU_Allreduce(work2,z2,2,MPIU_REAL,VecMin_Local_Op,PetscObjectComm((PetscObject)xin));
184: *z = z2[0];
185: *idx = (PetscInt)z2[1];
186: }
187: return(0);
188: }