SCALE-RM
scale_prc.F90
Go to the documentation of this file.
1 !-------------------------------------------------------------------------------
10 #include "scalelib.h"
11 module scale_prc
12  !-----------------------------------------------------------------------------
13  !
14  !++ used modules
15  !
16  use mpi
17  use scale_precision
18  use scale_io
19  use scale_fpm, only: &
20  fpm_alive, &
22  use scale_sigvars
23  !-----------------------------------------------------------------------------
24  implicit none
25  private
26  !-----------------------------------------------------------------------------
27  !
28  !++ Public procedure
29  !
30  public :: prc_mpistart
31  public :: prc_universal_setup
32  public :: prc_global_setup
33  public :: prc_local_setup
34  public :: prc_singlecom_setup
35  public :: prc_abort
36  public :: prc_mpifinish
37  public :: prc_mpisplit_bulk
38  public :: prc_mpisplit_nest
39 
40  public :: prc_mpibarrier
41  public :: prc_mpitime
42  public :: prc_mpitimestat
43 
44  public :: prc_set_file_closer
45 
46  public :: prc_errhandler_setup
47 
48  abstract interface
49  subroutine closer( skip_abort )
50  logical, intent(in), optional :: skip_abort
51  end subroutine closer
52  end interface
53 
54  !-----------------------------------------------------------------------------
55  !
56  !++ Public parameters & variables
57  !
58  !-----------------------------------------------------------------------------
59  ! [ communicator system ]
60  ! MPI_COMM_WORLD
61  ! |
62  ! PRC_UNIVERSAL_COMM_WORLD --split--> BULK_COMM_WORLD
63  ! |
64  ! PRC_GLOBAL_COMM_WORLD --split--> PRC_LOCAL_COMM_WORLD
65  !-----------------------------------------------------------------------------
66  integer, public, parameter :: prc_masterrank = 0
67  integer, public, parameter :: prc_domain_nlim = 10000
68  integer, public, parameter :: prc_comm_null = mpi_comm_null
69 
70  ! universal world
71  integer, public :: prc_universal_comm_world = -1
72  integer, public :: prc_universal_myrank = -1
73  integer, public :: prc_universal_nprocs = -1
74  logical, public :: prc_universal_ismaster = .false.
75 
76  integer, public :: prc_universal_jobid = 0
77 
78  ! global world
79  integer, public :: prc_global_comm_world = -1
80  integer, public :: prc_global_myrank = -1
81  integer, public :: prc_global_nprocs = -1
82  logical, public :: prc_global_ismaster = .false.
83 
84  integer, public :: prc_global_domainid = 0
85  integer, public :: prc_global_root(prc_domain_nlim)
86 
87  ! local world
88  integer, public :: prc_local_comm_world = -1
89  integer, public :: prc_nprocs = 1
90  integer, public :: prc_myrank = 0
91  logical, public :: prc_ismaster = .false.
92  !$acc declare create(PRC_myrank)
93 
94  ! inter-domain world
95  integer, public :: prc_intercomm_parent = mpi_comm_null
96  integer, public :: prc_intercomm_child = mpi_comm_null
97 
98  ! error handling
99  logical, public :: prc_mpi_alive = .false.
100  integer, public :: prc_universal_handler
101  integer, public :: prc_abort_comm_world
102  integer, public :: prc_abort_handler
103 
104  !-----------------------------------------------------------------------------
105  !
106  !++ Private procedure
107  !
108  private :: prc_mpicoloring
109  private :: prc_sort_ascd
110 
111  !-----------------------------------------------------------------------------
112  !
113  !++ Private parameters & variables
114  !
115  integer, private, parameter :: prc_abort_code = -1
116 ! integer, private, parameter :: PRC_ABORT_code_p = 2 !< mpi abort code in error handler from parent
117 ! integer, private, parameter :: PRC_ABORT_code_d = 3 !< mpi abort code in error handler from daughter
118 
119  procedure(closer), pointer :: prc_file_closer => null()
120 
121  !-----------------------------------------------------------------------------
122 contains
123  !-----------------------------------------------------------------------------
125  subroutine prc_mpistart( &
126  comm )
127  implicit none
128 
129  integer, intent(out) :: comm ! communicator
130 
131  integer :: ierr
132  !---------------------------------------------------------------------------
133 
134  call mpi_init(ierr)
135 
136  prc_mpi_alive = .true.
137 ! PRC_UNIVERSAL_handler = MPI_ERRHANDLER_NULL
138 ! call MPI_COMM_CREATE_ERRHANDLER( PRC_MPI_errorhandler, PRC_UNIVERSAL_handler, ierr )
139 
140  comm = mpi_comm_world
141  prc_abort_comm_world = comm
142 
143  return
144  end subroutine prc_mpistart
145 
146  !-----------------------------------------------------------------------------
148  subroutine prc_universal_setup( &
149  comm, &
150  nprocs, &
151  myrank, &
152  ismaster )
153  implicit none
154 
155  integer, intent(in) :: comm ! communicator
156  integer, intent(out) :: nprocs ! number of procs in this communicator
157  integer, intent(out) :: myrank ! myrank in this communicator
158  logical, intent(out) :: ismaster ! master process in this communicator?
159 
160  integer :: ierr
161  !---------------------------------------------------------------------------
162 
164 
165  call mpi_comm_size(prc_universal_comm_world,prc_universal_nprocs,ierr)
166  call mpi_comm_rank(prc_universal_comm_world,prc_universal_myrank,ierr)
167 
168  if ( prc_universal_myrank == prc_masterrank ) then
169  prc_universal_ismaster = .true.
170  else
171  prc_universal_ismaster = .false.
172  endif
173 
174  nprocs = prc_universal_nprocs
175  myrank = prc_universal_myrank
176  ismaster = prc_universal_ismaster
177 
178 
179 
180 ! PRC_ABORT_COMM_WORLD = PRC_UNIVERSAL_COMM_WORLD
181 !
182 ! call MPI_Comm_set_errhandler(PRC_ABORT_COMM_WORLD,PRC_UNIVERSAL_handler,ierr)
183 ! call MPI_Comm_get_errhandler(PRC_ABORT_COMM_WORLD,PRC_ABORT_handler ,ierr)
184 
185  return
186  end subroutine prc_universal_setup
187 
188  !-----------------------------------------------------------------------------
190  subroutine prc_global_setup( &
191  abortall, &
192  comm )
193  implicit none
194 
195  logical, intent(in) :: abortall ! abort all jobs?
196  integer, intent(in) :: comm ! communicator
197 
198  integer :: ierr
199  !---------------------------------------------------------------------------
200 
201  prc_global_comm_world = comm
202 
203  call mpi_comm_size(prc_global_comm_world,prc_global_nprocs,ierr)
204  call mpi_comm_rank(prc_global_comm_world,prc_global_myrank,ierr)
205 
206  if ( prc_global_myrank == prc_masterrank ) then
207  prc_global_ismaster = .true.
208  else
209  prc_global_ismaster = .false.
210  endif
211 
212 ! if ( .NOT. abortall ) then
213 ! PRC_ABORT_COMM_WORLD = PRC_GLOBAL_COMM_WORLD
214 !
215 ! call MPI_COMM_SET_ERRHANDLER(PRC_ABORT_COMM_WORLD,PRC_UNIVERSAL_handler,ierr)
216 ! call MPI_COMM_GET_ERRHANDLER(PRC_ABORT_COMM_WORLD,PRC_ABORT_handler ,ierr)
217 ! endif
218 
219  return
220  end subroutine prc_global_setup
221 
222  !-----------------------------------------------------------------------------
224  subroutine prc_local_setup( &
225  comm, &
226  myrank, &
227  ismaster )
228  implicit none
229 
230  integer, intent(in) :: comm ! communicator
231  integer, intent(out) :: myrank ! myrank in this communicator
232  logical, intent(out) :: ismaster ! master process in this communicator?
233 
234  integer :: ierr
235  !---------------------------------------------------------------------------
236 
237  prc_local_comm_world = comm
238 
239  call mpi_comm_rank(prc_local_comm_world,prc_myrank,ierr)
240  call mpi_comm_size(prc_local_comm_world,prc_nprocs,ierr)
241  !$acc update device(PRC_myrank)
242 
243  if ( prc_myrank == prc_masterrank ) then
244  prc_ismaster = .true.
245  else
246  prc_ismaster = .false.
247  endif
248 
249  myrank = prc_myrank
250  ismaster = prc_ismaster
251 
252  return
253  end subroutine prc_local_setup
254 
255  !-----------------------------------------------------------------------------
257  subroutine prc_singlecom_setup( &
258  comm, &
259  nprocs, &
260  myrank, &
261  ismaster )
262  implicit none
263 
264  integer, intent(in) :: comm ! communicator
265  integer, intent(out) :: nprocs ! number of procs
266  integer, intent(out) :: myrank ! myrank
267  logical, intent(out) :: ismaster ! master process?
268 
269  integer :: ierr
270  !---------------------------------------------------------------------------
271 
272  call mpi_comm_size(comm,nprocs,ierr)
273  call mpi_comm_rank(comm,myrank,ierr)
274 
275  if ( myrank == prc_masterrank ) then
276  ismaster = .true.
277  else
278  ismaster = .false.
279  endif
280 
282  prc_universal_nprocs = nprocs
283  prc_universal_myrank = myrank
284  prc_universal_ismaster = ismaster
285 
286  prc_global_comm_world = comm
287  prc_global_nprocs = nprocs
288  prc_global_myrank = myrank
289  prc_global_ismaster = ismaster
290 
291  prc_local_comm_world = comm
292  prc_nprocs = nprocs
293  prc_myrank = myrank
294  prc_ismaster = ismaster
295  !$acc update device(PRC_myrank)
296 
297 
298 
299  prc_abort_comm_world = comm
300 
301 ! call MPI_Comm_set_errhandler(PRC_ABORT_COMM_WORLD,PRC_UNIVERSAL_handler,ierr)
302 ! call MPI_Comm_get_errhandler(PRC_ABORT_COMM_WORLD,PRC_ABORT_handler ,ierr)
303 
304  return
305  end subroutine prc_singlecom_setup
306 
307  !-----------------------------------------------------------------------------
309  subroutine prc_errhandler_setup( &
310  use_fpm, &
311  master )
312  implicit none
313 
314  logical, intent(in) :: use_fpm ! fpm switch
315  logical, intent(in) :: master ! master flag
316 
317  integer :: ierr
318  !---------------------------------------------------------------------------
319 
320  call mpi_comm_create_errhandler(prc_mpi_errorhandler,prc_universal_handler,ierr)
321 
322  call mpi_comm_set_errhandler(prc_abort_comm_world,prc_universal_handler,ierr)
323  call mpi_comm_get_errhandler(prc_abort_comm_world,prc_abort_handler ,ierr)
324 
326  if( prc_universal_ismaster ) write(*,*) ""
327  if( prc_universal_ismaster ) write(*,*) "ERROR: MPI HANDLER is INCONSISTENT"
328  if( prc_universal_ismaster ) write(*,*) " PRC_UNIVERSAL_handler = ", prc_universal_handler
329  if( prc_universal_ismaster ) write(*,*) " PRC_ABORT_handler = ", prc_abort_handler
330  call prc_abort
331  endif
332 
333  if ( use_fpm ) then
334  call sigvars_get_all( master )
335  call signal( sigint, prc_abort )
336 ! call signal( SIGQUIT, PRC_abort )
337 ! call signal( SIGABRT, PRC_abort )
338 ! call signal( SIGFPE, PRC_abort )
339 ! call signal( SIGSEGV, PRC_abort )
340 ! call signal( SIGTERM, PRC_abort )
341  endif
342 
343  return
344  end subroutine prc_errhandler_setup
345 
346  !-----------------------------------------------------------------------------
348  subroutine prc_abort
349  implicit none
350 
351  integer :: ierr
352  !---------------------------------------------------------------------------
353 
354  if ( prc_mpi_alive ) then
355  ! tentative approach; input "PRC_UNIVERSAL_COMM_WORLD".
356  call mpi_comm_call_errhandler(prc_universal_comm_world,prc_abort_code,ierr)
357  endif
358 
359  stop
360  end subroutine prc_abort
361 
362  !-----------------------------------------------------------------------------
364  subroutine prc_mpifinish
365  implicit none
366 
367  integer :: ierr
368  logical :: sign_status
369  logical :: sign_exit
370  !---------------------------------------------------------------------------
371 
372  call mpi_barrier(prc_global_comm_world, ierr)
373  if ( prc_intercomm_child /= mpi_comm_null ) &
374  call mpi_comm_free(prc_intercomm_child, ierr)
375  if ( prc_intercomm_parent /= mpi_comm_null ) &
376  call mpi_comm_free(prc_intercomm_parent, ierr)
377 
378  ! FPM polling
379  if ( fpm_alive ) then
380  sign_status = .false.
381  sign_exit = .false.
382  do while ( .NOT. sign_exit )
383  call fpm_polling( sign_status, sign_exit )
384  enddo
385  endif
386 
387  if (prc_universal_handler .NE. mpi_errhandler_null) then
388  call mpi_errhandler_free(prc_universal_handler, ierr)
389  endif
390  if (prc_abort_handler .NE. mpi_errhandler_null) then
391  call mpi_errhandler_free(prc_abort_handler, ierr)
392  endif
393 
394  ! Stop MPI
395  if ( prc_mpi_alive ) then
396  ! free splitted communicator
398  call mpi_comm_free(prc_local_comm_world,ierr)
399  endif
400 
401  call mpi_barrier(prc_universal_comm_world,ierr)
402 
403  call mpi_finalize(ierr)
404  endif
405 
406  return
407  end subroutine prc_mpifinish
408 
409  !-----------------------------------------------------------------------------
411  subroutine prc_mpisplit_bulk( &
412  ORG_COMM_WORLD, &
413  NUM_BULKJOB, &
414  PRC_BULKJOB, &
415  debug, &
416  SUB_COMM_WORLD, &
417  ID_BULKJOB )
418  implicit none
419 
420  integer, intent(in) :: org_comm_world ! communicator (original group)
421  integer, intent(in) :: num_bulkjob ! number of bulk jobs
422  integer, intent(in) :: prc_bulkjob(:) ! number of ranks in subgroup communicator
423  logical, intent(in) :: debug ! log-output for mpi splitting?
424  integer, intent(out) :: sub_comm_world ! communicator (new subgroup)
425  integer, intent(out) :: id_bulkjob ! bulk job id
426 
427  integer :: org_myrank ! my rank in the original communicator
428  integer :: org_nrank ! number of ranks in the original communicator
429  integer :: sum_nrank
430 
431  integer, allocatable :: prc2color (:) ! color id for each process
432  integer, allocatable :: prc2key (:) ! local rank id for each process
433  integer :: col_domain(0:prc_domain_nlim) ! domain id of this color
434  integer :: col_master(0:prc_domain_nlim) ! master rank of this color
435  integer :: col_parent(0:prc_domain_nlim) ! parent color of this color
436  integer :: col_child (0:prc_domain_nlim) ! child color of this color
437 
438  logical :: color_reorder = .false. ! dummy
439 
440  integer :: i, color
441  integer :: itag, ierr
442  !---------------------------------------------------------------------------
443 
444 
445  if ( num_bulkjob == 1 ) then ! single domain run
446 
447  sub_comm_world = org_comm_world
448  id_bulkjob = 0
450 
451  elseif( num_bulkjob > 1 ) then ! multi domain run
452 
453  call mpi_comm_rank(org_comm_world,org_myrank,ierr)
454  call mpi_comm_size(org_comm_world,org_nrank ,ierr)
455 
456  sum_nrank = sum(prc_bulkjob(1:num_bulkjob))
457 
458  if ( sum_nrank /= org_nrank ) then
459  if ( prc_universal_ismaster ) then
460  log_error("PRC_MPIsplit",*) "MPI PROCESS NUMBER is INCONSISTENT"
461  log_error_cont(*) " REQUESTED NPROCS = ", sum_nrank, " LAUNCHED NPROCS = ", org_nrank
462  endif
463  call prc_abort
464  endif
465 
466  allocate( prc2color(0:org_nrank-1) )
467  allocate( prc2key(0:org_nrank-1) )
468 
469  call prc_mpicoloring( org_comm_world, & ! [IN]
470  org_nrank, & ! [IN]
471  num_bulkjob, & ! [IN]
472  prc_bulkjob(:), & ! [IN]
473  color_reorder, & ! [IN]
474  .true., & ! [IN]
475  debug, & ! [IN]
476  prc2color(:), & ! [OUT]
477  prc2key(:), & ! [OUT]
478  col_domain(:), & ! [OUT]
479  col_master(:), & ! [OUT]
480  col_parent(:), & ! [OUT]
481  col_child(:) ) ! [OUT]
482 
483  ! split comm_world
484  call mpi_comm_split( org_comm_world, & ! [IN]
485  prc2color(org_myrank), & ! [IN]
486  prc2key(org_myrank), & ! [IN]
487  sub_comm_world, & ! [OUT]
488  ierr ) ! [OUT]
489 
490  do i = 1, num_bulkjob
491  color = i-1
492  prc_global_root(i) = col_master(color)
493  enddo
494 
495  id_bulkjob = prc2color(org_myrank) ! color = bulk id
496  prc_universal_jobid = prc2color(org_myrank) ! color = bulk id
497 
498  deallocate( prc2color )
499  deallocate( prc2key )
500 
501  else
502  if ( prc_universal_ismaster ) then
503  log_error("PRC_MPIsplit",*) "REQUESTED DOMAIN NUMBER IS NOT ACCEPTABLE"
504  endif
505  call prc_abort
506  endif
507 
508  return
509  end subroutine prc_mpisplit_bulk
510 
511  !-----------------------------------------------------------------------------
513  subroutine prc_mpisplit_nest( &
514  ORG_COMM_WORLD, &
515  NUM_DOMAIN, &
516  PRC_DOMAIN, &
517  debug, &
518  color_reorder, &
519  SUB_COMM_WORLD, &
520  ID_DOMAIN )
521  implicit none
522 
523  integer, intent(in) :: org_comm_world ! communicator (original group)
524  integer, intent(in) :: num_domain ! number of bulk jobs
525  integer, intent(in) :: prc_domain(:) ! number of ranks in subgroup communicator
526  logical, intent(in) :: debug ! log-output for mpi splitting?
527  logical, intent(in) :: color_reorder ! reorder
528  integer, intent(out) :: sub_comm_world ! communicator (new subgroup)
529  integer, intent(out) :: id_domain ! domain id
530 
531  integer :: org_myrank ! my rank in the original communicator
532  integer :: org_nrank ! number of ranks in the original communicator
533  integer :: sum_nrank
534 
535  integer, allocatable :: prc2color (:) ! color id for each process
536  integer, allocatable :: prc2key (:) ! local rank id for each process
537  integer :: col_domain(0:prc_domain_nlim) ! domain id of this color
538  integer :: col_master(0:prc_domain_nlim) ! master rank of this color
539  integer :: col_parent(0:prc_domain_nlim) ! parent color of this color
540  integer :: col_child (0:prc_domain_nlim) ! child color of this color
541 
542  integer :: i, color
543  integer :: itag, ierr
544  !---------------------------------------------------------------------------
545 
546  if ( num_domain == 1 ) then ! single domain run
547 
548  sub_comm_world = org_comm_world
549  id_domain = 1
551 
552  elseif( num_domain > 1 ) then ! multi domain run
553 
554  call mpi_comm_rank(org_comm_world,org_myrank,ierr)
555  call mpi_comm_size(org_comm_world,org_nrank ,ierr)
556 
557  sum_nrank = sum(prc_domain(1:num_domain))
558 
559  if ( sum_nrank /= org_nrank ) then
560  if ( prc_universal_ismaster ) then
561  log_error("PRC_MPIsplit",*) "MPI PROCESS NUMBER is INCONSISTENT"
562  log_error_cont(*) " REQUESTED NPROCS = ", sum_nrank, " LAUNCHED NPROCS = ", org_nrank
563  endif
564  call prc_abort
565  endif
566 
567  allocate( prc2color(0:org_nrank-1) )
568  allocate( prc2key(0:org_nrank-1) )
569 
570  call prc_mpicoloring( org_comm_world, & ! [IN]
571  org_nrank, & ! [IN]
572  num_domain, & ! [IN]
573  prc_domain(:), & ! [IN]
574  color_reorder, & ! [IN]
575  .false., & ! [IN]
576  debug, & ! [IN]
577  prc2color(:), & ! [OUT]
578  prc2key(:), & ! [OUT]
579  col_domain(:), & ! [OUT]
580  col_master(:), & ! [OUT]
581  col_parent(:), & ! [OUT]
582  col_child(:) ) ! [OUT]
583 
584  ! split comm_world
585  call mpi_comm_split( org_comm_world, & ! [IN]
586  prc2color(org_myrank), & ! [IN]
587  prc2key(org_myrank), & ! [IN]
588  sub_comm_world, & ! [OUT]
589  ierr ) ! [OUT]
590 
591  id_domain = col_domain(prc2color(org_myrank)) ! color /= domain id
592  prc_global_domainid = col_domain(prc2color(org_myrank)) ! color /= domain id
593 
594  if ( prc_universal_ismaster ) then
595  write(*,*)
596  write(*,*) "INFO [PRC_MPIsplit] Inter-domain relationship information"
597  do i = 1, num_domain
598  color = i-1
599  if ( col_parent(color) >= 0 ) then
600  write(*,'(5x,A,I2.2)') "Relationship No.", i
601  write(*,'(5x,A,I2.2,A,A,I2.2)') "Parent color: ", col_parent(color), " <=> ", &
602  "Child color: ", color
603  endif
604  enddo
605  endif
606 
607  ! create inter-communicator
608  do i = 1, num_domain
609  color = i-1
610  if ( col_parent(color) >= 0 ) then
611  itag = i*100
612 
613  if ( prc2color(org_myrank) == col_parent(color) ) then ! as a parent
614 
615  call mpi_intercomm_create( sub_comm_world, prc_masterrank, &
616  org_comm_world, col_master(color), &
617  itag, prc_intercomm_child, ierr )
618 
619  elseif( prc2color(org_myrank) == color ) then ! as a child
620 
621  call mpi_intercomm_create( sub_comm_world, prc_masterrank, &
622  org_comm_world, col_master(col_parent(color)), &
623  itag, prc_intercomm_parent, ierr )
624 
625  endif
626 
627  call mpi_barrier(org_comm_world,ierr)
628  endif
629  enddo
630 
631  deallocate( prc2color )
632  deallocate( prc2key )
633 
634  else
635  if ( prc_universal_ismaster ) then
636  write(*,*)"ERROR [RPC_MPIsplit] REQUESTED DOMAIN NUMBER IS NOT ACCEPTABLE"
637  endif
638  call prc_abort
639  endif
640 
641  return
642  end subroutine prc_mpisplit_nest
643 
644  !-----------------------------------------------------------------------------
646  subroutine prc_mpicoloring( &
647  ORG_COMM_WORLD, &
648  ORG_nrank, &
649  NUM_SUBGROUP, &
650  PRC_SUBGROUP, &
651  color_reorder, &
652  bulkjob, &
653  debug, &
654  prc2color, &
655  prc2key, &
656  COL_domain, &
657  COL_master, &
658  COL_parent, &
659  COL_child )
660  implicit none
661 
662  integer, intent(in) :: org_comm_world
663  integer, intent(in) :: org_nrank
664  integer, intent(in) :: num_subgroup
665  integer, intent(in) :: prc_subgroup(:)
666  logical, intent(in) :: color_reorder
667  logical, intent(in) :: bulkjob
668  logical, intent(in) :: debug
669  integer, intent(out) :: prc2color (0:org_nrank-1) ! color id for each process
670  integer, intent(out) :: prc2key (0:org_nrank-1) ! local rank id for each process
671  integer, intent(out) :: col_domain(0:prc_domain_nlim) ! domain id of the color
672  integer, intent(out) :: col_master(0:prc_domain_nlim) ! master rank of the color
673  integer, intent(out) :: col_parent(0:prc_domain_nlim) ! parent color of the color
674  integer, intent(out) :: col_child (0:prc_domain_nlim) ! child color of the color
675 
676  integer :: prc_reorder( num_subgroup) ! reordered number of process
677  integer :: dom2col ( num_subgroup) ! get color number by domain number
678  integer :: col2dom (0:num_subgroup-1) ! get domain number by color number
679  logical :: touch (0:num_subgroup-1)
680 
681  integer :: id_parent ! parent domain number
682  integer :: id_child ! child domain number
683  integer :: nprc, key
684  integer :: i, domain, color, p
685  integer :: ierr
686  !---------------------------------------------------------------------------
687 
688  if ( color_reorder .AND. .NOT. bulkjob ) then ! with reordering of colors
689 
690  prc_reorder(1:num_subgroup) = prc_subgroup(1:num_subgroup)
691  call prc_sort_ascd( prc_reorder(1:num_subgroup), 1, num_subgroup )
692 
693  touch(:) = .false.
694  do domain = 1, num_subgroup
695  do i = num_subgroup, 1, -1
696  color = i-1 ! counted from 0
697  if ( prc_subgroup(domain) == prc_reorder(i) .AND. ( .NOT. touch(color) ) ) then
698  dom2col(domain) = color ! domain_num -> color_num
699  col2dom(color) = domain ! color_num -> domain_num
700  touch(color) = .true.
701  exit
702  endif
703  enddo ! order(=color+1) loop
704  enddo ! domain loop
705 
706  else ! without reordering of colors
707 
708  do domain = 1, num_subgroup
709  color = domain-1 ! counted from 0
710  dom2col(domain) = color ! domain_num -> color_num
711  col2dom(color) = domain ! color_num -> domain_num
712  enddo ! domain loop
713 
714  endif
715 
716  ! make relationship
717  col_parent(:) = -1
718  col_child(:) = -1
719  if ( .NOT. bulkjob ) then
720  do i = 1, num_subgroup
721  id_parent = i - 1
722  id_child = i + 1
723 
724  if ( id_parent >= 1 .AND. id_parent <= num_subgroup ) then
725  col_parent(dom2col(i)) = dom2col(id_parent)
726  endif
727  if ( id_child >= 1 .AND. id_child <= num_subgroup ) then
728  col_child(dom2col(i)) = dom2col(id_child)
729  endif
730 
731  if ( debug .AND. prc_universal_ismaster ) then
732  write(*,'(4(A,I2))') &
733  "DOMAIN: ", i, ", MY COL: ", dom2col(i), ", PARENT COL:", col_parent(i), ", CHILD COL:", col_child(i)
734  endif
735  enddo ! domain loop
736  endif
737 
738  if ( prc_universal_ismaster ) then
739  write(*,*)
740  write(*,*) 'INFO [PRC_MPIcoloring] Domain information'
741  write(*,'(5x,A,L2)') 'Reordering? : ', color_reorder
742  do i = 1, num_subgroup
743  color = i-1
744  domain = col2dom(color)
745  write(*,*)
746  write(*,'(5x,2(A,I2.2))') "Order No. ", i, " -> Domain No. ", domain
747  write(*,'(5x,A,I5)') "Number of process = ", prc_subgroup(domain)
748  write(*,'(5x,A,I5)') "Color of this domain = ", color
749  if ( col_parent(color) >= 0 ) then
750  write(*,'(5x,A,I5)') "Color of parent domain = ", col_parent(color)
751  else
752  write(*,'(5x,A)' ) "Color of parent domain = no parent"
753  endif
754  if ( col_child(color) >= 0 ) then
755  write(*,'(5x,A,I5)') "Color of child domain = ", col_child(color)
756  else
757  write(*,'(5x,A)' ) "Color of child domain = no child"
758  endif
759  enddo ! order(=color+1) loop
760  endif
761 
762  ! make a process table
763  color = 0
764  key = 0
765  domain = col2dom(color)
766  nprc = prc_subgroup(domain)
767  col_master(:) = -999
768 
769  do p = 0, org_nrank-1
770  prc2color(p) = color
771  prc2key(p) = key
772 
773  if ( key == 0 ) then ! master rank
774  col_domain(color) = domain
775  col_master(color) = p
776  endif
777 
778  if ( debug .AND. prc_universal_ismaster ) then
779  write(*,'(5x,4(A,I5))') &
780  "PE:", p, " COLOR:", prc2color(p), " KEY:", prc2key(p), " COL_master:", col_master(color)
781  endif
782 
783  key = key + 1
784  if ( key >= nprc ) then
785  color = color + 1
786  key = 0
787  if ( color < num_subgroup ) then ! ignore last
788  domain = col2dom(color)
789  nprc = prc_subgroup(domain)
790  endif
791  endif
792  enddo
793 
794  return
795  end subroutine prc_mpicoloring
796 
797  !-----------------------------------------------------------------------------
799  recursive subroutine prc_sort_ascd(a, top, bottom)
800  implicit none
801  integer, intent(inout) :: a(:)
802  integer, intent(in) :: top, bottom
803  integer :: i, j, cnt, trg
804  !---------------------------------------------------------------------------
805  cnt = a( (top+bottom) / 2 )
806  i = top; j = bottom
807  do
808  do while ( a(i) > cnt ) !ascending evaluation
809  i = i + 1
810  enddo
811  do while ( cnt > a(j) ) !ascending evaluation
812  j = j - 1
813  enddo
814  if ( i >= j ) exit
815  trg = a(i); a(i) = a(j); a(j) = trg
816  i = i + 1
817  j = j - 1
818  enddo
819  if ( top < i-1 ) call prc_sort_ascd( a, top, i-1 )
820  if ( j+1 < bottom ) call prc_sort_ascd( a, j+1, bottom )
821  return
822  end subroutine prc_sort_ascd
823 
824  !-----------------------------------------------------------------------------
826  subroutine prc_mpibarrier
827  implicit none
828 
829  integer :: ierr
830  !---------------------------------------------------------------------------
831 
832  if ( prc_mpi_alive ) then
833  call mpi_barrier(prc_local_comm_world,ierr)
834  endif
835 
836  end subroutine prc_mpibarrier
837 
838  !-----------------------------------------------------------------------------
841  function prc_mpitime() result(time)
842  implicit none
843 
844  real(dp) :: time
845  !---------------------------------------------------------------------------
846 
847  if ( prc_mpi_alive ) then
848  time = real(mpi_wtime(), kind=dp)
849  else
850  call cpu_time(time)
851  endif
852 
853  end function prc_mpitime
854 
855  !-----------------------------------------------------------------------------
857  subroutine prc_mpitimestat( &
858  avgvar, &
859  maxvar, &
860  minvar, &
861  maxidx, &
862  minidx, &
863  var )
864  implicit none
865 
866  real(dp), intent(out) :: avgvar(:)
867  real(dp), intent(out) :: maxvar(:)
868  real(dp), intent(out) :: minvar(:)
869  integer, intent(out) :: maxidx(:)
870  integer, intent(out) :: minidx(:)
871  real(dp), intent(in) :: var(:)
872 
873  real(dp), allocatable :: statval(:,:)
874  integer :: vsize
875 
876  real(dp) :: totalvar
877  integer :: ierr
878  integer :: v, p
879  !---------------------------------------------------------------------------
880 
881  vsize = size(var(:))
882 
883  allocate( statval(vsize,0:prc_nprocs-1) )
884  statval(:,:) = 0.0_dp
885 
886  do v = 1, vsize
887  statval(v,prc_myrank) = var(v)
888  enddo
889 
890  ! MPI broadcast
891  do p = 0, prc_nprocs-1
892  call mpi_bcast( statval(1,p), &
893  vsize, &
894  mpi_double_precision, &
895  p, &
897  ierr )
898  enddo
899 
900  do v = 1, vsize
901  totalvar = 0.0_dp
902  do p = 0, prc_nprocs-1
903  totalvar = totalvar + statval(v,p)
904  enddo
905  avgvar(v) = totalvar / prc_nprocs
906 
907  maxvar(v) = maxval(statval(v,:))
908  minvar(v) = minval(statval(v,:))
909  maxidx(v:v) = maxloc(statval(v,:))
910  minidx(v:v) = minloc(statval(v,:))
911  enddo
912 
913  deallocate( statval )
914 
915  return
916  end subroutine prc_mpitimestat
917 
918  !-----------------------------------------------------------------------------
920  subroutine prc_mpi_errorhandler( &
921  comm, &
922  errcode )
923  implicit none
924 
925  ! attributes are needed to be the same with COMM_ERRHANDLER function
926  integer :: comm
927  integer :: errcode
928 
929  character(len=MPI_MAX_ERROR_STRING) :: msg
930  integer :: len
931  integer :: ierr
932  logical :: sign_status
933  logical :: sign_exit
934  !---------------------------------------------------------------------------
935 
936 !print *, "into errhandler:", PRC_UNIVERSAL_myrank
937 
938  ! FPM polling
939  if ( fpm_alive ) then
940  sign_status = .false.
941  sign_exit = .false.
942  do while ( .NOT. sign_exit )
943  call fpm_polling( sign_status, sign_exit )
944  enddo
945  endif
946 
947  ! Print Error Messages
948  if ( prc_mpi_alive ) then
949  ! flush 1kbyte
950  if ( io_l ) then
951  log_progress(*) 'abort MPI'
952  flush(io_fid_log)
953  endif
954 
955  if ( prc_ismaster ) then
956  write(*,*) '+++++ BULK ID : ', prc_universal_jobid
957  write(*,*) '+++++ DOMAIN ID : ', prc_global_domainid
958  write(*,*) '+++++ MASTER LOCATION : ', prc_universal_myrank,'/',prc_universal_nprocs
959  write(*,*) '+++++ GLOBAL LOCATION : ', prc_global_myrank,'/',prc_global_nprocs
960  write(*,*) '+++++ LOCAL LOCATION : ', prc_myrank,'/',prc_nprocs
961  write(*,*) ''
962  endif
963 
964  if ( errcode == prc_abort_code ) then ! called from PRC_abort
965  ! do nothing
966  elseif( errcode <= mpi_err_lastcode ) then
967  call mpi_error_string(errcode, msg, len, ierr)
968  if( io_l ) write(io_fid_log,*) '+++++ ', errcode, trim(msg)
969  write(*,*) '+++++ ', errcode, trim(msg)
970  else
971  if( io_l ) write(io_fid_log,*) '+++++ Unexpected error code', errcode
972  write(*,*) '+++++ Unexpected error code', errcode
973  endif
974 
975  if ( comm /= prc_abort_comm_world ) then
976  if( io_l ) write(io_fid_log,*) '+++++ Unexpected communicator'
977  write(*,*) '+++++ Unexpected communicator'
978  endif
979  if( io_l ) write(io_fid_log,*) ''
980  write(*,*) ''
981  endif
982 
983  if ( associated( prc_file_closer ) ) call prc_file_closer( .true. )
984 
985  ! Close logfile, configfile
986  if ( io_l ) then
987  if( io_fid_log /= io_fid_stdout ) close(io_fid_log)
988  endif
989  close(io_fid_conf)
990 
991  ! Abort MPI
992  if ( prc_mpi_alive ) then
993  call sleep(5)
994  call mpi_abort(prc_abort_comm_world, prc_abort_code, ierr)
995  endif
996 
997  stop
998  end subroutine prc_mpi_errorhandler
999 
1000  subroutine prc_set_file_closer( routine )
1001  procedure(closer) :: routine
1002 
1003  prc_file_closer => routine
1004 
1005  return
1006  end subroutine prc_set_file_closer
1007 
1008 end module scale_prc
scale_prc::prc_universal_ismaster
logical, public prc_universal_ismaster
master process in universal communicator?
Definition: scale_prc.F90:74
scale_prc::prc_abort
subroutine, public prc_abort
Abort Process.
Definition: scale_prc.F90:349
scale_prc::prc_universal_setup
subroutine, public prc_universal_setup(comm, nprocs, myrank, ismaster)
setup MPI in universal communicator
Definition: scale_prc.F90:153
scale_prc::prc_abort_handler
integer, public prc_abort_handler
error handler communicator for aborting
Definition: scale_prc.F90:102
scale_prc::prc_mpisplit_bulk
subroutine, public prc_mpisplit_bulk(ORG_COMM_WORLD, NUM_BULKJOB, PRC_BULKJOB, debug, SUB_COMM_WORLD, ID_BULKJOB)
MPI Communicator Split (bulk job)
Definition: scale_prc.F90:418
scale_prc::prc_global_setup
subroutine, public prc_global_setup(abortall, comm)
setup MPI in global communicator
Definition: scale_prc.F90:193
scale_prc::prc_set_file_closer
subroutine, public prc_set_file_closer(routine)
Definition: scale_prc.F90:1001
scale_precision
module PRECISION
Definition: scale_precision.F90:14
scale_prc::prc_local_comm_world
integer, public prc_local_comm_world
local communicator
Definition: scale_prc.F90:88
scale_prc::prc_mpisplit_nest
subroutine, public prc_mpisplit_nest(ORG_COMM_WORLD, NUM_DOMAIN, PRC_DOMAIN, debug, color_reorder, SUB_COMM_WORLD, ID_DOMAIN)
MPI Communicator Split (nesting)
Definition: scale_prc.F90:521
scale_prc::prc_mpistart
subroutine, public prc_mpistart(comm)
Start MPI.
Definition: scale_prc.F90:127
scale_prc::prc_myrank
integer, public prc_myrank
process num in local communicator
Definition: scale_prc.F90:90
scale_fpm::fpm_polling
subroutine, public fpm_polling(run_stat, stop_signal)
Main system of FPM.
Definition: scale_fpm.F90:176
scale_prc::prc_mpi_alive
logical, public prc_mpi_alive
MPI is alive?
Definition: scale_prc.F90:99
scale_prc::prc_universal_myrank
integer, public prc_universal_myrank
myrank in universal communicator
Definition: scale_prc.F90:72
scale_prc
module PROCESS
Definition: scale_prc.F90:11
scale_prc::prc_local_setup
subroutine, public prc_local_setup(comm, myrank, ismaster)
Setup MPI in local communicator.
Definition: scale_prc.F90:228
scale_prc::prc_universal_jobid
integer, public prc_universal_jobid
my job ID in universal communicator
Definition: scale_prc.F90:76
scale_prc::prc_mpibarrier
subroutine, public prc_mpibarrier
Barrier MPI.
Definition: scale_prc.F90:827
scale_io
module STDIO
Definition: scale_io.F90:10
scale_prc::prc_comm_null
integer, parameter, public prc_comm_null
Definition: scale_prc.F90:68
scale_prc::prc_masterrank
integer, parameter, public prc_masterrank
master process in each communicator
Definition: scale_prc.F90:66
scale_prc::prc_global_ismaster
logical, public prc_global_ismaster
master process in global communicator?
Definition: scale_prc.F90:82
scale_prc::prc_errhandler_setup
subroutine, public prc_errhandler_setup(use_fpm, master)
Setup MPI error handler.
Definition: scale_prc.F90:312
scale_io::io_fid_log
integer, public io_fid_log
Log file ID.
Definition: scale_io.F90:58
scale_prc::prc_global_root
integer, dimension(prc_domain_nlim), public prc_global_root
root processes in global members
Definition: scale_prc.F90:85
scale_prc::prc_global_myrank
integer, public prc_global_myrank
myrank in global communicator
Definition: scale_prc.F90:80
scale_prc::prc_domain_nlim
integer, parameter, public prc_domain_nlim
max depth of domains
Definition: scale_prc.F90:67
scale_prc::prc_universal_comm_world
integer, public prc_universal_comm_world
original communicator
Definition: scale_prc.F90:71
scale_precision::dp
integer, parameter, public dp
Definition: scale_precision.F90:32
scale_prc::prc_global_domainid
integer, public prc_global_domainid
my domain ID in global communicator
Definition: scale_prc.F90:84
scale_prc::prc_abort_comm_world
integer, public prc_abort_comm_world
communicator for aborting
Definition: scale_prc.F90:101
scale_fpm::fpm_alive
logical, public fpm_alive
Definition: scale_fpm.F90:32
scale_prc::prc_intercomm_child
integer, public prc_intercomm_child
communicator between this rank and child domain
Definition: scale_prc.F90:96
scale_prc::prc_nprocs
integer, public prc_nprocs
myrank in local communicator
Definition: scale_prc.F90:89
scale_sigvars
module sigvars
Definition: scale_sigvars.F90:13
scale_prc::prc_intercomm_parent
integer, public prc_intercomm_parent
communicator between this rank and parent domain
Definition: scale_prc.F90:95
scale_prc::prc_universal_handler
integer, public prc_universal_handler
error handler in universal communicator
Definition: scale_prc.F90:100
scale_fpm
module FPM
Definition: scale_fpm.F90:10
scale_io::io_l
logical, public io_l
output log or not? (this process)
Definition: scale_io.F90:63
scale_prc::prc_mpitimestat
subroutine, public prc_mpitimestat(avgvar, maxvar, minvar, maxidx, minidx, var)
Calc global statistics for timer.
Definition: scale_prc.F90:864
scale_prc::prc_singlecom_setup
subroutine, public prc_singlecom_setup(comm, nprocs, myrank, ismaster)
Setup MPI single communicator (not use universal-global-local setting)
Definition: scale_prc.F90:262
scale_io::io_fid_stdout
integer, parameter, public io_fid_stdout
Definition: scale_io.F90:56
scale_prc::prc_global_comm_world
integer, public prc_global_comm_world
global communicator
Definition: scale_prc.F90:79
scale_sigvars::sigvars_get_all
subroutine, public sigvars_get_all(master)
Get signal values.
Definition: scale_sigvars.F90:104
scale_prc::prc_mpitime
real(dp) function, public prc_mpitime()
Get MPI time.
Definition: scale_prc.F90:842
scale_prc::prc_mpifinish
subroutine, public prc_mpifinish
Stop MPI peacefully.
Definition: scale_prc.F90:365
scale_prc::prc_universal_nprocs
integer, public prc_universal_nprocs
process num in universal communicator
Definition: scale_prc.F90:73
scale_prc::prc_global_nprocs
integer, public prc_global_nprocs
process num in global communicator
Definition: scale_prc.F90:81
scale_sigvars::sigint
integer(c_int), public sigint
Definition: scale_sigvars.F90:33
scale_io::io_fid_conf
integer, public io_fid_conf
Config file ID.
Definition: scale_io.F90:57
scale_prc::prc_ismaster
logical, public prc_ismaster
master process in local communicator?
Definition: scale_prc.F90:91