Actual source code: ex2f.F90
1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: ! -----------------------------------------------------------------------
6: #include <petsc/finclude/petscksp.h>
8: module ex2fmodule
9: use petscksp
10: implicit none
12: contains
13: ! --------------------------------------------------------------
14: !
15: ! MyKSPMonitor - This is a user-defined routine for monitoring
16: ! the KSP iterative solvers.
17: !
18: ! Input Parameters:
19: ! ksp - iterative context
20: ! n - iteration number
21: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
22: ! unused - optional user-defined monitor context (unused here)
23: !
24: subroutine MyKSPMonitor(ksp, n, rnorm, vf, ierr)
26: KSP ksp
27: Vec x
28: PetscErrorCode ierr
29: PetscInt n
30: PetscViewerAndFormat vf
31: PetscMPIInt rank
32: PetscReal rnorm
34: ! Build the solution vector
35: PetscCallA(KSPBuildSolution(ksp, PETSC_NULL_VEC, x, ierr))
36: PetscCallA(KSPMonitorTrueResidual(ksp, n, rnorm, vf, ierr))
38: ! Write the solution vector and residual norm to stdout
39: ! Since the Fortran IO may be flushed differently than C
40: ! cannot reliably print both together in CI
42: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
43: if (rank == 0) write (6, 100) n
44: ! PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
45: if (rank == 0) write (6, 200) n, rnorm
47: 100 format('iteration ', i5, ' solution vector:')
48: 200 format('iteration ', i5, ' residual norm ', e11.4)
49: ierr = 0
50: end
52: ! --------------------------------------------------------------
53: !
54: ! MyKSPConverged - This is a user-defined routine for testing
55: ! convergence of the KSP iterative solvers.
56: !
57: ! Input Parameters:
58: ! ksp - iterative context
59: ! n - iteration number
60: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
61: ! unused - optional user-defined monitor context (unused here)
62: !
63: subroutine MyKSPConverged(ksp, n, rnorm, flag, unused, ierr)
65: KSP ksp
66: PetscErrorCode, intent(out) :: ierr
67: PetscInt n, unused
68: KSPConvergedReason, intent(out) :: flag
69: PetscReal, intent(in) :: rnorm
71: if (rnorm <= .05) then
72: flag = KSP_CONVERGED_RTOL_NORMAL_EQUATIONS
73: else
74: flag = KSP_CONVERGED_ITERATING
75: end if
76: ierr = 0
78: end
79: end module ex2fmodule
81: program main
82: use petscksp
83: use ex2fmodule
84: implicit none
85: !
86: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87: ! Variable declarations
88: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: !
90: ! Variables:
91: ! ksp - linear solver context
92: ! ksp - Krylov subspace method context
93: ! pc - preconditioner context
94: ! x, b, u - approx solution, right-hand side, exact solution vectors
95: ! A - matrix that defines linear system
96: ! its - iterations for convergence
97: ! norm - norm of error in solution
98: ! rctx - random number generator context
99: !
100: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
101: ! are mathematical objects that contain more than just an array of
102: ! double precision numbers. I.e., vectors in PETSc are not just
103: ! double precision x(*).
104: ! However, local vector data can be easily accessed via VecGetArray().
105: ! See the Fortran section of the PETSc users manual for details.
106: !
107: PetscReal norm
108: PetscInt i, j, II, JJ, m, n, its
109: PetscInt Istart, Iend, col(3)
110: PetscErrorCode ierr
111: PetscMPIInt rank, size
112: PetscBool flg
113: PetscScalar, parameter :: one = 1.0, neg_one = -1.0
114: PetscScalar v, val(3)
115: Vec x, b, u, xx, bb, uu
116: Mat A, AA
117: KSP ksp, kksp
118: PetscRandom rctx
119: PetscViewerAndFormat vf, vf2
120: PetscClassId classid
121: PetscViewer viewer
122: PetscLogEvent petscEventNo
124: ! These variables are not currently used.
125: ! PC pc
126: ! PCType ptype
127: ! PetscReal tol
128: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129: ! Beginning of program
130: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER, ierr))
134: PetscCallA(PetscLogNestedBegin(ierr))
135: PetscCallA(PetscLogEventRegister("myFirstEvent", classid, petscEventNo, ierr))
137: m = 3
138: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
139: n = 3
140: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
141: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
142: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
144: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: ! Compute the matrix and right-hand-side vector that define
146: ! the linear system, Ax = b.
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: ! Create parallel matrix, specifying only its global dimensions.
150: ! When using MatCreate(), the matrix format can be specified at
151: ! runtime. Also, the parallel partitioning of the matrix is
152: ! determined by PETSc at runtime.
154: PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
155: PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*n, m*n, ierr))
156: PetscCallA(MatSetFromOptions(A, ierr))
157: PetscCallA(MatSetUp(A, ierr))
159: ! Currently, all PETSc parallel matrix formats are partitioned by
160: ! contiguous chunks of rows across the processors. Determine which
161: ! rows of the matrix are locally owned.
163: PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
165: ! Set matrix elements for the 2-D, five-point stencil in parallel.
166: ! - Each processor needs to insert only elements that it owns
167: ! locally (but any non-local elements will be sent to the
168: ! appropriate processor during matrix assembly).
169: ! - Always specify global row and columns of matrix entries.
170: ! - Note that MatSetValues() uses 0-based row and column numbers
171: ! in Fortran as well as in C.
173: ! Note: this uses the less common natural ordering that orders first
174: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
175: ! instead of JJ = II +- m as you might expect. The more standard ordering
176: ! would first do all variables for y = h, then y = 2h etc.
178: do II = Istart, Iend - 1
179: v = -1.0
180: i = II/n
181: j = II - i*n
182: if (i > 0) then
183: JJ = II - n
184: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], INSERT_VALUES, ierr))
185: end if
186: if (i < m - 1) then
187: JJ = II + n
188: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], INSERT_VALUES, ierr))
189: end if
190: if (j > 0) then
191: JJ = II - 1
192: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], INSERT_VALUES, ierr))
193: end if
194: if (j < n - 1) then
195: JJ = II + 1
196: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], INSERT_VALUES, ierr))
197: end if
198: v = 4.0
199: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [II], [v], INSERT_VALUES, ierr))
200: end do
202: ! Assemble matrix, using the 2-step process:
203: ! MatAssemblyBegin(), MatAssemblyEnd()
204: ! Computations can be done while messages are in transition,
205: ! by placing code between these two statements.
207: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
208: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
210: ! Create parallel vectors.
211: ! - Here, the parallel partitioning of the vector is determined by
212: ! PETSc at runtime. We could also specify the local dimensions
213: ! if desired -- or use the more general routine VecCreate().
214: ! - When solving a linear system, the vectors and matrices MUST
215: ! be partitioned accordingly. PETSc automatically generates
216: ! appropriately partitioned matrices and vectors when MatCreate()
217: ! and VecCreate() are used with the same communicator.
218: ! - Note: We form 1 vector from scratch and then duplicate as needed.
220: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD, PETSC_NULL_CHARACTER, 1_PETSC_INT_KIND, PETSC_DECIDE, m*n, u, ierr))
221: PetscCallA(VecSetFromOptions(u, ierr))
222: PetscCallA(VecDuplicate(u, b, ierr))
223: PetscCallA(VecDuplicate(b, x, ierr))
225: ! Set exact solution; then compute right-hand-side vector.
226: ! By default we use an exact solution of a vector with all
227: ! elements of 1.0; Alternatively, using the runtime option
228: ! -random_sol forms a solution vector with random components.
230: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-random_exact_sol', flg, ierr))
231: if (flg) then
232: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
233: PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
234: PetscCallA(VecSetRandom(u, rctx, ierr))
235: PetscCallA(PetscRandomDestroy(rctx, ierr))
236: else
237: PetscCallA(VecSet(u, one, ierr))
238: end if
239: PetscCallA(MatMult(A, u, b, ierr))
241: ! View the exact solution vector if desired
243: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-view_exact_sol', flg, ierr))
244: if (flg) then
245: PetscCallA(VecView(u, PETSC_VIEWER_STDOUT_WORLD, ierr))
246: end if
248: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
249: ! Create the linear solver and set various options
250: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
252: ! Create linear solver context
254: PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
256: ! Set operators. Here the matrix that defines the linear system
257: ! also serves as the matrix from which the preconditioner is constructed.
259: PetscCallA(KSPSetOperators(ksp, A, A, ierr))
261: ! Set linear solver defaults for this problem (optional).
262: ! - By extracting the KSP and PC contexts from the KSP context,
263: ! we can then directly call any KSP and PC routines
264: ! to set various options.
265: ! - The following four statements are optional; all of these
266: ! parameters could alternatively be specified at runtime via
267: ! KSPSetFromOptions(). All of these defaults can be
268: ! overridden at runtime, as indicated below.
270: ! We comment out this section of code since the Jacobi
271: ! preconditioner is not a good general default.
273: ! PetscCallA(KSPGetPC(ksp,pc,ierr))
274: ! ptype = PCJACOBI
275: ! PetscCallA(PCSetType(pc,ptype,ierr))
276: ! tol = 1.e-7
277: ! PetscCallA(KSPSetTolerances(ksp,tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
279: ! Set user-defined monitoring routine if desired
281: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-my_ksp_monitor', flg, ierr))
282: if (flg) then
283: PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, vf, ierr))
284: PetscCallA(KSPMonitorSet(ksp, MyKSPMonitor, vf, PetscViewerAndFormatDestroy, ierr))
285: !
286: PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, vf2, ierr))
287: PetscCallA(KSPMonitorSet(ksp, KSPMonitorResidual, vf2, PetscViewerAndFormatDestroy, ierr))
288: end if
290: ! Set runtime options, e.g.,
291: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
292: ! These options will override those specified above as long as
293: ! KSPSetFromOptions() is called _after_ any other customization
294: ! routines.
296: PetscCallA(KSPSetFromOptions(ksp, ierr))
298: ! Set convergence test routine if desired
300: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-my_ksp_convergence', flg, ierr))
301: if (flg) then
302: PetscCallA(KSPSetConvergenceTest(ksp, MyKSPConverged, 0, PETSC_NULL_FUNCTION, ierr))
303: end if
304: !
305: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
306: ! Solve the linear system
307: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
309: PetscCallA(PetscLogEventBegin(petscEventNo, ierr))
310: PetscCallA(KSPSolve(ksp, b, x, ierr))
311: PetscCallA(PetscLogEventEnd(petscEventNo, ierr))
313: ! Solve small system on master
315: if (rank == 0) then
317: PetscCallA(MatCreate(PETSC_COMM_SELF, AA, ierr))
318: PetscCallA(MatSetSizes(AA, PETSC_DECIDE, PETSC_DECIDE, m, m, ierr))
319: PetscCallA(MatSetFromOptions(AA, ierr))
321: val = [-1.0, 2.0, -1.0]
322: PetscCallA(MatSetValues(AA, 1_PETSC_INT_KIND, [0_PETSC_INT_KIND], 2_PETSC_INT_KIND, [0_PETSC_INT_KIND, 1_PETSC_INT_KIND], val(2:3), INSERT_VALUES, ierr))
323: do i = 1, m - 2
324: col = [i - 1, i, i + 1]
325: PetscCallA(MatSetValues(AA, 1_PETSC_INT_KIND, [i], 2_PETSC_INT_KIND, col, val, INSERT_VALUES, ierr))
326: end do
327: PetscCallA(MatSetValues(AA, 1_PETSC_INT_KIND, [m - 1], 2_PETSC_INT_KIND, [m - 2, m - 1], val(1:2), INSERT_VALUES, ierr))
328: PetscCallA(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY, ierr))
329: PetscCallA(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY, ierr))
331: PetscCallA(VecCreate(PETSC_COMM_SELF, xx, ierr))
332: PetscCallA(VecSetSizes(xx, PETSC_DECIDE, m, ierr))
333: PetscCallA(VecSetFromOptions(xx, ierr))
334: PetscCallA(VecDuplicate(xx, bb, ierr))
335: PetscCallA(VecDuplicate(xx, uu, ierr))
336: PetscCallA(VecSet(uu, one, ierr))
337: PetscCallA(MatMult(AA, uu, bb, ierr))
338: PetscCallA(KSPCreate(PETSC_COMM_SELF, kksp, ierr))
339: PetscCallA(KSPSetOperators(kksp, AA, AA, ierr))
340: PetscCallA(KSPSetFromOptions(kksp, ierr))
341: PetscCallA(KSPSolve(kksp, bb, xx, ierr))
343: end if
345: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
346: ! Check solution and clean up
347: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
349: ! Check the error
350: PetscCallA(VecAXPY(x, neg_one, u, ierr))
351: PetscCallA(VecNorm(x, NORM_2, norm, ierr))
352: PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
353: if (rank == 0) then
354: if (norm > 1.e-12) then
355: write (6, 100) norm, its
356: else
357: write (6, 110) its
358: end if
359: end if
360: 100 format('Norm of error ', e11.4, ' iterations ', i5)
361: 110 format('Norm of error < 1.e-12 iterations ', i5)
363: ! nested log view
364: PetscCallA(PetscViewerASCIIOpen(PETSC_COMM_WORLD, 'report_performance.xml', viewer, ierr))
365: PetscCallA(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_XML, ierr))
366: PetscCallA(PetscLogView(viewer, ierr))
367: PetscCallA(PetscViewerDestroy(viewer, ierr))
369: ! Free work space. All PETSc objects should be destroyed when they
370: ! are no longer needed.
372: PetscCallA(KSPDestroy(ksp, ierr))
373: PetscCallA(VecDestroy(u, ierr))
374: PetscCallA(VecDestroy(x, ierr))
375: PetscCallA(VecDestroy(b, ierr))
376: PetscCallA(MatDestroy(A, ierr))
378: if (rank == 0) then
379: PetscCallA(KSPDestroy(kksp, ierr))
380: PetscCallA(VecDestroy(uu, ierr))
381: PetscCallA(VecDestroy(xx, ierr))
382: PetscCallA(VecDestroy(bb, ierr))
383: PetscCallA(MatDestroy(AA, ierr))
384: end if
386: ! Always call PetscFinalize() before exiting a program. This routine
387: ! - finalizes the PETSc libraries as well as MPI
388: ! - provides summary and diagnostic information if certain runtime
389: ! options are chosen (e.g., -log_view). See PetscFinalize()
390: ! manpage for more information.
392: PetscCallA(PetscFinalize(ierr))
393: end
395: !/*TEST
396: !
397: ! test:
398: ! nsize: 2
399: ! filter: sort -b
400: ! args: -pc_type jacobi -ksp_gmres_cgs_refinement_type refine_always
401: !
402: ! test:
403: ! suffix: 2
404: ! nsize: 2
405: ! filter: sort -b
406: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
407: ! test:
408: ! suffix: 3
409: ! nsize: 2
410: !
411: !
412: !TEST*/