Actual source code: xyt.c
1: /*
2: Module Name: xyt
3: Module Info:
5: author: Henry M. Tufo III
6: e-mail: hmt@asci.uchicago.edu
7: contact:
8: +--------------------------------+--------------------------------+
9: |MCS Division - Building 221 |Department of Computer Science |
10: |Argonne National Laboratory |Ryerson 152 |
11: |9700 S. Cass Avenue |The University of Chicago |
12: |Argonne, IL 60439 |Chicago, IL 60637 |
13: |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx |
14: +--------------------------------+--------------------------------+
16: Last Modification: 3.20.01
17: */
18: #include <../src/ksp/pc/impls/tfs/tfs.h>
20: #define LEFT -1
21: #define RIGHT 1
22: #define BOTH 0
24: typedef struct xyt_solver_info {
25: PetscInt n, m, n_global, m_global;
26: PetscInt nnz, max_nnz, msg_buf_sz;
27: PetscInt *nsep, *lnsep, *fo, nfo, *stages;
28: PetscInt *xcol_sz, *xcol_indices;
29: PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
30: PetscInt *ycol_sz, *ycol_indices;
31: PetscScalar **ycol_vals, *y;
32: PetscInt nsolves;
33: PetscScalar tot_solve_time;
34: } xyt_info;
36: typedef struct matvec_info {
37: PetscInt n, m, n_global, m_global;
38: PetscInt *local2global;
39: PCTFS_gs_ADT PCTFS_gs_handle;
40: PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
41: void *grid_data;
42: } mv_info;
44: struct xyt_CDT {
45: PetscInt id;
46: PetscInt ns;
47: PetscInt level;
48: xyt_info *info;
49: mv_info *mvi;
50: };
52: static PetscInt n_xyt = 0;
54: /* prototypes */
55: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
56: static PetscErrorCode check_handle(xyt_ADT xyt_handle);
57: static PetscErrorCode det_separators(xyt_ADT xyt_handle);
58: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
59: static PetscErrorCode xyt_generate(xyt_ADT xyt_handle);
60: static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle);
61: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
63: xyt_ADT XYT_new(void)
64: {
65: xyt_ADT xyt_handle;
67: /* rolling count on n_xyt ... pot. problem here */
68: xyt_handle = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
69: xyt_handle->id = ++n_xyt;
70: xyt_handle->info = NULL;
71: xyt_handle->mvi = NULL;
73: return xyt_handle;
74: }
76: PetscErrorCode XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt handle */
77: PetscInt *local2global, /* global column mapping */
78: PetscInt n, /* local num rows */
79: PetscInt m, /* local num cols */
80: PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc */
81: void *grid_data) /* grid data for matvec */
82: {
83: PetscFunctionBegin;
84: PetscCall(PCTFS_comm_init());
85: PetscCall(check_handle(xyt_handle));
87: /* only 2^k for now and all nodes participating */
88: PetscCheck((1 << (xyt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
90: /* space for X info */
91: xyt_handle->info = (xyt_info *)malloc(sizeof(xyt_info));
93: /* set up matvec handles */
94: xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
96: /* matrix is assumed to be of full rank */
97: /* LATER we can reset to indicate rank def. */
98: xyt_handle->ns = 0;
100: /* determine separators and generate firing order - NB xyt info set here */
101: PetscCall(det_separators(xyt_handle));
103: PetscCall(do_xyt_factor(xyt_handle));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: PetscErrorCode XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
108: {
109: PetscFunctionBegin;
110: PetscCall(PCTFS_comm_init());
111: PetscCall(check_handle(xyt_handle));
113: /* need to copy b into x? */
114: if (b) PetscCall(PCTFS_rvec_copy(x, b, xyt_handle->mvi->n));
115: PetscCall(do_xyt_solve(xyt_handle, x));
116: PetscFunctionReturn(PETSC_SUCCESS);
117: }
119: PetscErrorCode XYT_free(xyt_ADT xyt_handle)
120: {
121: PetscFunctionBegin;
122: PetscCall(PCTFS_comm_init());
123: PetscCall(check_handle(xyt_handle));
125: free(xyt_handle->info->nsep);
126: free(xyt_handle->info->lnsep);
127: free(xyt_handle->info->fo);
128: free(xyt_handle->info->stages);
129: free(xyt_handle->info->solve_uu);
130: free(xyt_handle->info->solve_w);
131: free(xyt_handle->info->x);
132: free(xyt_handle->info->xcol_vals);
133: free(xyt_handle->info->xcol_sz);
134: free(xyt_handle->info->xcol_indices);
135: free(xyt_handle->info->y);
136: free(xyt_handle->info->ycol_vals);
137: free(xyt_handle->info->ycol_sz);
138: free(xyt_handle->info->ycol_indices);
139: free(xyt_handle->info);
140: free(xyt_handle->mvi->local2global);
141: PetscCall(PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle));
142: free(xyt_handle->mvi);
143: free(xyt_handle);
145: /* if the check fails we nuke */
146: /* if NULL pointer passed to free we nuke */
147: /* if the calls to free fail that's not my problem */
148: PetscFunctionReturn(PETSC_SUCCESS);
149: }
151: /* This function is currently not used */
152: PetscErrorCode XYT_stats(xyt_ADT xyt_handle)
153: {
154: PetscInt op[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD};
155: PetscInt fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD};
156: PetscInt vals[9], work[9];
157: PetscScalar fvals[3], fwork[3];
159: PetscFunctionBegin;
160: PetscCall(PCTFS_comm_init());
161: PetscCall(check_handle(xyt_handle));
163: /* if factorization not done there are no stats */
164: if (!xyt_handle->info || !xyt_handle->mvi) {
165: if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "XYT_stats() :: no stats available!\n"));
166: PetscFunctionReturn(PETSC_SUCCESS);
167: }
169: vals[0] = vals[1] = vals[2] = xyt_handle->info->nnz;
170: vals[3] = vals[4] = vals[5] = xyt_handle->mvi->n;
171: vals[6] = vals[7] = vals[8] = xyt_handle->info->msg_buf_sz;
172: PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
174: fvals[0] = fvals[1] = fvals[2] = xyt_handle->info->tot_solve_time / xyt_handle->info->nsolves++;
175: PetscCall(PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop));
177: if (!PCTFS_my_id) {
178: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0]));
179: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1]));
180: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes)));
181: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2]));
182: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt C(2d) =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5)))));
183: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt C(3d) =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667)))));
184: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3]));
185: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4]));
186: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_n =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes)));
187: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot xyt_n =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5]));
188: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6]));
189: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7]));
190: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes)));
191: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0])));
192: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1])));
193: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes)));
194: }
195: PetscFunctionReturn(PETSC_SUCCESS);
196: }
198: /*
200: Description: get A_local, local portion of global coarse matrix which
201: is a row dist. nxm matrix w/ n<m.
202: o my_ml holds address of ML struct associated w/A_local and coarse grid
203: o local2global holds global number of column i (i=0,...,m-1)
204: o local2global holds global number of row i (i=0,...,n-1)
205: o mylocmatvec performs A_local . vec_local (note that gs is performed using
206: PCTFS_gs_init/gop).
208: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
209: mylocmatvec (void :: void *data, double *in, double *out)
210: */
211: static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle)
212: {
213: return xyt_generate(xyt_handle);
214: }
216: static PetscErrorCode xyt_generate(xyt_ADT xyt_handle)
217: {
218: PetscInt i, j, k, idx;
219: PetscInt dim, col;
220: PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
221: PetscInt *segs;
222: PetscInt op[] = {GL_ADD, 0};
223: PetscInt off, len;
224: PetscScalar *x_ptr, *y_ptr;
225: PetscInt *iptr, flag;
226: PetscInt start = 0, end, work;
227: PetscInt op2[] = {GL_MIN, 0};
228: PCTFS_gs_ADT PCTFS_gs_handle;
229: PetscInt *nsep, *lnsep, *fo;
230: PetscInt a_n = xyt_handle->mvi->n;
231: PetscInt a_m = xyt_handle->mvi->m;
232: PetscInt *a_local2global = xyt_handle->mvi->local2global;
233: PetscInt level;
234: PetscInt n, m;
235: PetscInt *xcol_sz, *xcol_indices, *stages;
236: PetscScalar **xcol_vals, *x;
237: PetscInt *ycol_sz, *ycol_indices;
238: PetscScalar **ycol_vals, *y;
239: PetscInt n_global;
240: PetscInt xt_nnz = 0, xt_max_nnz = 0;
241: PetscInt yt_nnz = 0, yt_max_nnz = 0;
242: PetscBLASInt i1 = 1, dlen;
243: PetscScalar dm1 = -1.0;
245: n = xyt_handle->mvi->n;
246: nsep = xyt_handle->info->nsep;
247: lnsep = xyt_handle->info->lnsep;
248: fo = xyt_handle->info->fo;
249: end = lnsep[0];
250: level = xyt_handle->level;
251: PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
253: /* is there a null space? */
254: /* LATER add in ability to detect null space by checking alpha */
255: for (i = 0, j = 0; i <= level; i++) j += nsep[i];
257: m = j - xyt_handle->ns;
258: if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xyt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xyt_handle->ns));
260: PetscCall(PetscInfo(0, "xyt_generate() :: X(%" PetscInt_FMT ",%" PetscInt_FMT ")\n", n, m));
262: /* get and initialize storage for x local */
263: /* note that x local is nxm and stored by columns */
264: xcol_sz = (PetscInt *)malloc(m * sizeof(PetscInt));
265: xcol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
266: xcol_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
267: for (i = j = 0; i < m; i++, j += 2) {
268: xcol_indices[j] = xcol_indices[j + 1] = xcol_sz[i] = -1;
269: xcol_vals[i] = NULL;
270: }
271: xcol_indices[j] = -1;
273: /* get and initialize storage for y local */
274: /* note that y local is nxm and stored by columns */
275: ycol_sz = (PetscInt *)malloc(m * sizeof(PetscInt));
276: ycol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
277: ycol_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
278: for (i = j = 0; i < m; i++, j += 2) {
279: ycol_indices[j] = ycol_indices[j + 1] = ycol_sz[i] = -1;
280: ycol_vals[i] = NULL;
281: }
282: ycol_indices[j] = -1;
284: /* size of separators for each sub-hc working from bottom of tree to top */
285: /* this looks like nsep[]=segments */
286: stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
287: segs = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
288: PetscCall(PCTFS_ivec_zero(stages, level + 1));
289: PCTFS_ivec_copy(segs, nsep, level + 1);
290: for (i = 0; i < level; i++) segs[i + 1] += segs[i];
291: stages[0] = segs[0];
293: /* temporary vectors */
294: u = (PetscScalar *)malloc(n * sizeof(PetscScalar));
295: z = (PetscScalar *)malloc(n * sizeof(PetscScalar));
296: v = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
297: uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
298: w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
300: /* extra nnz due to replication of vertices across separators */
301: for (i = 1, j = 0; i <= level; i++) j += nsep[i];
303: /* storage for sparse x values */
304: n_global = xyt_handle->info->n_global;
305: xt_max_nnz = yt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
306: x = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
307: y = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
309: /* LATER - can embed next sep to fire in gs */
310: /* time to make the donuts - generate X factor */
311: for (dim = i = j = 0; i < m; i++) {
312: /* time to move to the next level? */
313: while (i == segs[dim]) {
314: PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
315: stages[dim++] = i;
316: end += lnsep[dim];
317: }
318: stages[dim] = i;
320: /* which column are we firing? */
321: /* i.e. set v_l */
322: /* use new seps and do global min across hc to determine which one to fire */
323: (start < end) ? (col = fo[start]) : (col = INT_MAX);
324: PetscCall(PCTFS_giop_hc(&col, &work, 1, op2, dim));
326: /* shouldn't need this */
327: if (col == INT_MAX) {
328: PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
329: continue;
330: }
332: /* do I own it? I should */
333: PetscCall(PCTFS_rvec_zero(v, a_m));
334: if (col == fo[start]) {
335: start++;
336: idx = PCTFS_ivec_linear_search(col, a_local2global, a_n);
337: PetscCheck(idx != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
338: v[idx] = 1.0;
339: j++;
340: } else {
341: idx = PCTFS_ivec_linear_search(col, a_local2global, a_m);
342: if (idx != -1) v[idx] = 1.0;
343: }
345: /* perform u = A.v_l */
346: PetscCall(PCTFS_rvec_zero(u, n));
347: PetscCall(do_matvec(xyt_handle->mvi, v, u));
349: /* uu = X^T.u_l (local portion) */
350: /* technically only need to zero out first i entries */
351: /* later turn this into an XYT_solve call ? */
352: PetscCall(PCTFS_rvec_zero(uu, m));
353: y_ptr = y;
354: iptr = ycol_indices;
355: for (k = 0; k < i; k++) {
356: off = *iptr++;
357: len = *iptr++;
358: PetscCall(PetscBLASIntCast(len, &dlen));
359: PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, y_ptr, &i1));
360: y_ptr += len;
361: }
363: /* uu = X^T.u_l (comm portion) */
364: PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
366: /* z = X.uu */
367: PetscCall(PCTFS_rvec_zero(z, n));
368: x_ptr = x;
369: iptr = xcol_indices;
370: for (k = 0; k < i; k++) {
371: off = *iptr++;
372: len = *iptr++;
373: PetscCall(PetscBLASIntCast(len, &dlen));
374: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
375: x_ptr += len;
376: }
378: /* compute v_l = v_l - z */
379: PetscCall(PCTFS_rvec_zero(v + a_n, a_m - a_n));
380: PetscCall(PetscBLASIntCast(n, &dlen));
381: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
383: /* compute u_l = A.v_l */
384: if (a_n != a_m) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim));
385: PetscCall(PCTFS_rvec_zero(u, n));
386: PetscCall(do_matvec(xyt_handle->mvi, v, u));
388: /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
389: PetscCall(PetscBLASIntCast(n, &dlen));
390: PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, u, &i1));
391: /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
392: PetscCall(PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim));
394: alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
396: /* check for small alpha */
397: /* LATER use this to detect and determine null space */
398: PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
400: /* compute v_l = v_l/sqrt(alpha) */
401: PetscCall(PCTFS_rvec_scale(v, 1.0 / alpha, n));
402: PetscCall(PCTFS_rvec_scale(u, 1.0 / alpha, n));
404: /* add newly generated column, v_l, to X */
405: flag = 1;
406: off = len = 0;
407: for (k = 0; k < n; k++) {
408: if (v[k] != 0.0) {
409: len = k;
410: if (flag) {
411: off = k;
412: flag = 0;
413: }
414: }
415: }
417: len -= (off - 1);
419: if (len > 0) {
420: if ((xt_nnz + len) > xt_max_nnz) {
421: PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
422: xt_max_nnz *= 2;
423: x_ptr = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
424: PetscCall(PCTFS_rvec_copy(x_ptr, x, xt_nnz));
425: free(x);
426: x = x_ptr;
427: x_ptr += xt_nnz;
428: }
429: xt_nnz += len;
430: PetscCall(PCTFS_rvec_copy(x_ptr, v + off, len));
432: xcol_indices[2 * i] = off;
433: xcol_sz[i] = xcol_indices[2 * i + 1] = len;
434: xcol_vals[i] = x_ptr;
435: } else {
436: xcol_indices[2 * i] = 0;
437: xcol_sz[i] = xcol_indices[2 * i + 1] = 0;
438: xcol_vals[i] = x_ptr;
439: }
441: /* add newly generated column, u_l, to Y */
442: flag = 1;
443: off = len = 0;
444: for (k = 0; k < n; k++) {
445: if (u[k] != 0.0) {
446: len = k;
447: if (flag) {
448: off = k;
449: flag = 0;
450: }
451: }
452: }
454: len -= (off - 1);
456: if (len > 0) {
457: if ((yt_nnz + len) > yt_max_nnz) {
458: PetscCall(PetscInfo(0, "increasing space for Y by 2x!\n"));
459: yt_max_nnz *= 2;
460: y_ptr = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
461: PetscCall(PCTFS_rvec_copy(y_ptr, y, yt_nnz));
462: free(y);
463: y = y_ptr;
464: y_ptr += yt_nnz;
465: }
466: yt_nnz += len;
467: PetscCall(PCTFS_rvec_copy(y_ptr, u + off, len));
469: ycol_indices[2 * i] = off;
470: ycol_sz[i] = ycol_indices[2 * i + 1] = len;
471: ycol_vals[i] = y_ptr;
472: } else {
473: ycol_indices[2 * i] = 0;
474: ycol_sz[i] = ycol_indices[2 * i + 1] = 0;
475: ycol_vals[i] = y_ptr;
476: }
477: }
479: /* close off stages for execution phase */
480: while (dim != level) {
481: stages[dim++] = i;
482: PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
483: }
484: stages[dim] = i;
486: xyt_handle->info->n = xyt_handle->mvi->n;
487: xyt_handle->info->m = m;
488: xyt_handle->info->nnz = xt_nnz + yt_nnz;
489: xyt_handle->info->max_nnz = xt_max_nnz + yt_max_nnz;
490: xyt_handle->info->msg_buf_sz = stages[level] - stages[0];
491: xyt_handle->info->solve_uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
492: xyt_handle->info->solve_w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
493: xyt_handle->info->x = x;
494: xyt_handle->info->xcol_vals = xcol_vals;
495: xyt_handle->info->xcol_sz = xcol_sz;
496: xyt_handle->info->xcol_indices = xcol_indices;
497: xyt_handle->info->stages = stages;
498: xyt_handle->info->y = y;
499: xyt_handle->info->ycol_vals = ycol_vals;
500: xyt_handle->info->ycol_sz = ycol_sz;
501: xyt_handle->info->ycol_indices = ycol_indices;
503: free(segs);
504: free(u);
505: free(v);
506: free(uu);
507: free(z);
508: free(w);
510: return PETSC_SUCCESS;
511: }
513: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc)
514: {
515: PetscInt off, len, *iptr;
516: PetscInt level = xyt_handle->level;
517: PetscInt n = xyt_handle->info->n;
518: PetscInt m = xyt_handle->info->m;
519: PetscInt *stages = xyt_handle->info->stages;
520: PetscInt *xcol_indices = xyt_handle->info->xcol_indices;
521: PetscInt *ycol_indices = xyt_handle->info->ycol_indices;
522: PetscScalar *x_ptr, *y_ptr, *uu_ptr;
523: PetscScalar *solve_uu = xyt_handle->info->solve_uu;
524: PetscScalar *solve_w = xyt_handle->info->solve_w;
525: PetscScalar *x = xyt_handle->info->x;
526: PetscScalar *y = xyt_handle->info->y;
527: PetscBLASInt i1 = 1, dlen;
529: PetscFunctionBegin;
530: uu_ptr = solve_uu;
531: PetscCall(PCTFS_rvec_zero(uu_ptr, m));
533: /* x = X.Y^T.b */
534: /* uu = Y^T.b */
535: for (y_ptr = y, iptr = ycol_indices; *iptr != -1; y_ptr += len) {
536: off = *iptr++;
537: len = *iptr++;
538: PetscCall(PetscBLASIntCast(len, &dlen));
539: PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, y_ptr, &i1));
540: }
542: /* communication of beta */
543: uu_ptr = solve_uu;
544: if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
545: PetscCall(PCTFS_rvec_zero(uc, n));
547: /* x = X.uu */
548: for (x_ptr = x, iptr = xcol_indices; *iptr != -1; x_ptr += len) {
549: off = *iptr++;
550: len = *iptr++;
551: PetscCall(PetscBLASIntCast(len, &dlen));
552: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
553: }
554: PetscFunctionReturn(PETSC_SUCCESS);
555: }
557: static PetscErrorCode check_handle(xyt_ADT xyt_handle)
558: {
559: PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
561: PetscFunctionBegin;
562: PetscCheck(xyt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", (void *)xyt_handle);
564: vals[0] = vals[1] = xyt_handle->id;
565: PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
566: PetscCheck(!(vals[0] != vals[1]) && !(xyt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xyt_handle->id);
567: PetscFunctionReturn(PETSC_SUCCESS);
568: }
570: static PetscErrorCode det_separators(xyt_ADT xyt_handle)
571: {
572: PetscInt i, ct, id;
573: PetscInt mask, edge, *iptr;
574: PetscInt *dir, *used;
575: PetscInt sum[4], w[4];
576: PetscScalar rsum[4], rw[4];
577: PetscInt op[] = {GL_ADD, 0};
578: PetscScalar *lhs, *rhs;
579: PetscInt *nsep, *lnsep, *fo, nfo = 0;
580: PCTFS_gs_ADT PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
581: PetscInt *local2global = xyt_handle->mvi->local2global;
582: PetscInt n = xyt_handle->mvi->n;
583: PetscInt m = xyt_handle->mvi->m;
584: PetscInt level = xyt_handle->level;
585: PetscInt shared = 0;
587: PetscFunctionBegin;
588: dir = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
589: nsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
590: lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
591: fo = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
592: used = (PetscInt *)malloc(sizeof(PetscInt) * n);
594: PetscCall(PCTFS_ivec_zero(dir, level + 1));
595: PetscCall(PCTFS_ivec_zero(nsep, level + 1));
596: PetscCall(PCTFS_ivec_zero(lnsep, level + 1));
597: PetscCall(PCTFS_ivec_set(fo, -1, n + 1));
598: PetscCall(PCTFS_ivec_zero(used, n));
600: lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
601: rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
603: /* determine the # of unique dof */
604: PetscCall(PCTFS_rvec_zero(lhs, m));
605: PetscCall(PCTFS_rvec_set(lhs, 1.0, n));
606: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level));
607: PetscCall(PetscInfo(0, "done first PCTFS_gs_gop_hc\n"));
608: PetscCall(PCTFS_rvec_zero(rsum, 2));
609: for (i = 0; i < n; i++) {
610: if (lhs[i] != 0.0) {
611: rsum[0] += 1.0 / lhs[i];
612: rsum[1] += lhs[i];
613: }
614: if (lhs[i] != 1.0) shared = 1;
615: }
617: PetscCall(PCTFS_grop_hc(rsum, rw, 2, op, level));
618: rsum[0] += 0.1;
619: rsum[1] += 0.1;
621: xyt_handle->info->n_global = xyt_handle->info->m_global = (PetscInt)rsum[0];
622: xyt_handle->mvi->n_global = xyt_handle->mvi->m_global = (PetscInt)rsum[0];
624: /* determine separator sets top down */
625: PetscCheck(!shared, PETSC_COMM_SELF, PETSC_ERR_PLIB, "shared dof separator determination not ready ... see hmt!!!");
626: /* solution is to do as in the symmetric shared case but then */
627: /* pick the sub-hc with the most free dofs and do a mat-vec */
628: /* and pick up the responses on the other sub-hc from the */
629: /* initial separator set obtained from the symm. shared case */
630: /* [dead code deleted since it is unlikely to be completed] */
631: for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
632: /* set rsh of hc, fire, and collect lhs responses */
633: PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
634: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
636: /* set lsh of hc, fire, and collect rhs responses */
637: PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
638: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
640: /* count number of dofs I own that have signal and not in sep set */
641: PetscCall(PCTFS_ivec_zero(sum, 4));
642: for (ct = i = 0; i < n; i++) {
643: if (!used[i]) {
644: /* number of unmarked dofs on node */
645: ct++;
646: /* number of dofs to be marked on lhs hc */
647: if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
648: /* number of dofs to be marked on rhs hc */
649: if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
650: }
651: }
653: /* for the non-symmetric case we need separators of width 2 */
654: /* so take both sides */
655: (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
656: PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
658: ct = 0;
659: if (id < mask) {
660: /* mark dofs I own that have signal and not in sep set */
661: for (i = 0; i < n; i++) {
662: if ((!used[i]) && (lhs[i] != 0.0)) {
663: ct++;
664: nfo++;
665: *--iptr = local2global[i];
666: used[i] = edge;
667: }
668: }
669: /* LSH hc summation of ct should be sum[0] */
670: } else {
671: /* mark dofs I own that have signal and not in sep set */
672: for (i = 0; i < n; i++) {
673: if ((!used[i]) && (rhs[i] != 0.0)) {
674: ct++;
675: nfo++;
676: *--iptr = local2global[i];
677: used[i] = edge;
678: }
679: }
680: /* RSH hc summation of ct should be sum[1] */
681: }
683: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
684: lnsep[edge] = ct;
685: nsep[edge] = sum[0] + sum[1];
686: dir[edge] = BOTH;
688: /* LATER or we can recur on these to order seps at this level */
689: /* do we need full set of separators for this? */
691: /* fold rhs hc into lower */
692: if (id >= mask) id -= mask;
693: }
695: /* level 0 is on processor case - so mark the remainder */
696: for (ct = i = 0; i < n; i++) {
697: if (!used[i]) {
698: ct++;
699: nfo++;
700: *--iptr = local2global[i];
701: used[i] = edge;
702: }
703: }
704: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
705: lnsep[edge] = ct;
706: nsep[edge] = ct;
707: dir[edge] = BOTH;
709: xyt_handle->info->nsep = nsep;
710: xyt_handle->info->lnsep = lnsep;
711: xyt_handle->info->fo = fo;
712: xyt_handle->info->nfo = nfo;
714: free(dir);
715: free(lhs);
716: free(rhs);
717: free(used);
718: PetscFunctionReturn(PETSC_SUCCESS);
719: }
721: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
722: {
723: mv_info *mvi;
725: mvi = (mv_info *)malloc(sizeof(mv_info));
726: mvi->n = n;
727: mvi->m = m;
728: mvi->n_global = -1;
729: mvi->m_global = -1;
730: mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
732: PCTFS_ivec_copy(mvi->local2global, local2global, m);
733: mvi->local2global[m] = INT_MAX;
734: mvi->matvec = matvec;
735: mvi->grid_data = grid_data;
737: /* set xyt communication handle to perform restricted matvec */
738: mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
740: return mvi;
741: }
743: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
744: {
745: PetscFunctionBegin;
746: PetscCall(A->matvec((mv_info *)A->grid_data, v, u));
747: PetscFunctionReturn(PETSC_SUCCESS);
748: }