SCALE-RM
scale_prc.F90
Go to the documentation of this file.
1 !-------------------------------------------------------------------------------
10 #include "scalelib.h"
11 module scale_prc
12  !-----------------------------------------------------------------------------
13  !
14  !++ used modules
15  !
16  use mpi
17  use scale_precision
18  use scale_io
19  use scale_fpm, only: &
20  fpm_alive, &
22  use scale_sigvars
23  !-----------------------------------------------------------------------------
24  implicit none
25  private
26  !-----------------------------------------------------------------------------
27  !
28  !++ Public procedure
29  !
30  public :: prc_mpistart
31  public :: prc_universal_setup
32  public :: prc_global_setup
33  public :: prc_local_setup
34  public :: prc_singlecom_setup
35  public :: prc_abort
36  public :: prc_mpifinish
37  public :: prc_mpisplit_bulk
38  public :: prc_mpisplit_nest
39 
40  public :: prc_mpibarrier
41  public :: prc_mpitime
42  public :: prc_mpitimestat
43 
44  public :: prc_set_file_closer
45 
46  public :: prc_errhandler_setup
47 
48  abstract interface
49  subroutine closer( skip_abort )
50  logical, intent(in), optional :: skip_abort
51  end subroutine closer
52  end interface
53 
54  !-----------------------------------------------------------------------------
55  !
56  !++ Public parameters & variables
57  !
58  !-----------------------------------------------------------------------------
59  ! [ communicator system ]
60  ! MPI_COMM_WORLD
61  ! |
62  ! PRC_UNIVERSAL_COMM_WORLD --split--> BULK_COMM_WORLD
63  ! |
64  ! PRC_GLOBAL_COMM_WORLD --split--> PRC_LOCAL_COMM_WORLD
65  !-----------------------------------------------------------------------------
66  integer, public, parameter :: prc_masterrank = 0
67  integer, public, parameter :: prc_domain_nlim = 10000
68  integer, public, parameter :: prc_comm_null = mpi_comm_null
69 
70  ! universal world
71  integer, public :: prc_universal_comm_world = -1
72  integer, public :: prc_universal_myrank = -1
73  integer, public :: prc_universal_nprocs = -1
74  logical, public :: prc_universal_ismaster = .false.
75 
76  integer, public :: prc_universal_jobid = 0
77 
78  ! global world
79  integer, public :: prc_global_comm_world = -1
80  integer, public :: prc_global_myrank = -1
81  integer, public :: prc_global_nprocs = -1
82  logical, public :: prc_global_ismaster = .false.
83 
84  integer, public :: prc_global_domainid = 0
85  integer, public :: prc_global_root(prc_domain_nlim)
86 
87  ! local world
88  integer, public :: prc_local_comm_world = -1
89  integer, public :: prc_nprocs = 1
90  integer, public :: prc_myrank = 0
91  logical, public :: prc_ismaster = .false.
92 
93  ! error handling
94  logical, public :: prc_mpi_alive = .false.
95  integer, public :: prc_universal_handler
96  integer, public :: prc_abort_comm_world
97  integer, public :: prc_abort_handler
98 
99  !-----------------------------------------------------------------------------
100  !
101  !++ Private procedure
102  !
103  private :: prc_mpicoloring
104  private :: prc_sort_ascd
105 
106  !-----------------------------------------------------------------------------
107  !
108  !++ Private parameters & variables
109  !
110  integer, private, parameter :: prc_abort_code = -1
111 ! integer, private, parameter :: PRC_ABORT_code_p = 2 !< mpi abort code in error handler from parent
112 ! integer, private, parameter :: PRC_ABORT_code_d = 3 !< mpi abort code in error handler from daughter
113 
114  procedure(closer), pointer :: prc_file_closer => null()
115 
116  !-----------------------------------------------------------------------------
117 contains
118  !-----------------------------------------------------------------------------
120  subroutine prc_mpistart( &
121  comm )
122  implicit none
123 
124  integer, intent(out) :: comm ! communicator
125 
126  integer :: ierr
127  !---------------------------------------------------------------------------
128 
129  call mpi_init(ierr)
130 
131  prc_mpi_alive = .true.
132 ! PRC_UNIVERSAL_handler = MPI_ERRHANDLER_NULL
133 ! call MPI_COMM_CREATE_ERRHANDLER( PRC_MPI_errorhandler, PRC_UNIVERSAL_handler, ierr )
134 
135  comm = mpi_comm_world
136  prc_abort_comm_world = comm
137 
138  return
139  end subroutine prc_mpistart
140 
141  !-----------------------------------------------------------------------------
143  subroutine prc_universal_setup( &
144  comm, &
145  nprocs, &
146  myrank, &
147  ismaster )
148  implicit none
149 
150  integer, intent(in) :: comm ! communicator
151  integer, intent(out) :: nprocs ! number of procs in this communicator
152  integer, intent(out) :: myrank ! myrank in this communicator
153  logical, intent(out) :: ismaster ! master process in this communicator?
154 
155  integer :: ierr
156  !---------------------------------------------------------------------------
157 
159 
160  call mpi_comm_size(prc_universal_comm_world,prc_universal_nprocs,ierr)
161  call mpi_comm_rank(prc_universal_comm_world,prc_universal_myrank,ierr)
162 
163  if ( prc_universal_myrank == prc_masterrank ) then
164  prc_universal_ismaster = .true.
165  else
166  prc_universal_ismaster = .false.
167  endif
168 
169  nprocs = prc_universal_nprocs
170  myrank = prc_universal_myrank
171  ismaster = prc_universal_ismaster
172 
173 
174 
175 ! PRC_ABORT_COMM_WORLD = PRC_UNIVERSAL_COMM_WORLD
176 !
177 ! call MPI_Comm_set_errhandler(PRC_ABORT_COMM_WORLD,PRC_UNIVERSAL_handler,ierr)
178 ! call MPI_Comm_get_errhandler(PRC_ABORT_COMM_WORLD,PRC_ABORT_handler ,ierr)
179 
180  return
181  end subroutine prc_universal_setup
182 
183  !-----------------------------------------------------------------------------
185  subroutine prc_global_setup( &
186  abortall, &
187  comm )
188  implicit none
189 
190  logical, intent(in) :: abortall ! abort all jobs?
191  integer, intent(in) :: comm ! communicator
192 
193  integer :: ierr
194  !---------------------------------------------------------------------------
195 
196  prc_global_comm_world = comm
197 
198  call mpi_comm_size(prc_global_comm_world,prc_global_nprocs,ierr)
199  call mpi_comm_rank(prc_global_comm_world,prc_global_myrank,ierr)
200 
201  if ( prc_global_myrank == prc_masterrank ) then
202  prc_global_ismaster = .true.
203  else
204  prc_global_ismaster = .false.
205  endif
206 
207 ! if ( .NOT. abortall ) then
208 ! PRC_ABORT_COMM_WORLD = PRC_GLOBAL_COMM_WORLD
209 !
210 ! call MPI_COMM_SET_ERRHANDLER(PRC_ABORT_COMM_WORLD,PRC_UNIVERSAL_handler,ierr)
211 ! call MPI_COMM_GET_ERRHANDLER(PRC_ABORT_COMM_WORLD,PRC_ABORT_handler ,ierr)
212 ! endif
213 
214  return
215  end subroutine prc_global_setup
216 
217  !-----------------------------------------------------------------------------
219  subroutine prc_local_setup( &
220  comm, &
221  myrank, &
222  ismaster )
223  implicit none
224 
225  integer, intent(in) :: comm ! communicator
226  integer, intent(out) :: myrank ! myrank in this communicator
227  logical, intent(out) :: ismaster ! master process in this communicator?
228 
229  integer :: ierr
230  !---------------------------------------------------------------------------
231 
232  prc_local_comm_world = comm
233 
234  call mpi_comm_rank(prc_local_comm_world,prc_myrank,ierr)
235  call mpi_comm_size(prc_local_comm_world,prc_nprocs,ierr)
236 
237  if ( prc_myrank == prc_masterrank ) then
238  prc_ismaster = .true.
239  else
240  prc_ismaster = .false.
241  endif
242 
243  myrank = prc_myrank
244  ismaster = prc_ismaster
245 
246  return
247  end subroutine prc_local_setup
248 
249  !-----------------------------------------------------------------------------
251  subroutine prc_singlecom_setup( &
252  comm, &
253  nprocs, &
254  myrank, &
255  ismaster )
256  implicit none
257 
258  integer, intent(in) :: comm ! communicator
259  integer, intent(out) :: nprocs ! number of procs
260  integer, intent(out) :: myrank ! myrank
261  logical, intent(out) :: ismaster ! master process?
262 
263  integer :: ierr
264  !---------------------------------------------------------------------------
265 
266  call mpi_comm_size(comm,nprocs,ierr)
267  call mpi_comm_rank(comm,myrank,ierr)
268 
269  if ( myrank == prc_masterrank ) then
270  ismaster = .true.
271  else
272  ismaster = .false.
273  endif
274 
276  prc_universal_nprocs = nprocs
277  prc_universal_myrank = myrank
278  prc_universal_ismaster = ismaster
279 
280  prc_global_comm_world = comm
281  prc_global_nprocs = nprocs
282  prc_global_myrank = myrank
283  prc_global_ismaster = ismaster
284 
285  prc_local_comm_world = comm
286  prc_nprocs = nprocs
287  prc_myrank = myrank
288  prc_ismaster = ismaster
289 
290 
291 
292  prc_abort_comm_world = comm
293 
294 ! call MPI_Comm_set_errhandler(PRC_ABORT_COMM_WORLD,PRC_UNIVERSAL_handler,ierr)
295 ! call MPI_Comm_get_errhandler(PRC_ABORT_COMM_WORLD,PRC_ABORT_handler ,ierr)
296 
297  return
298  end subroutine prc_singlecom_setup
299 
300  !-----------------------------------------------------------------------------
302  subroutine prc_errhandler_setup( &
303  use_fpm, &
304  master )
305  implicit none
306 
307  logical, intent(in) :: use_fpm ! fpm switch
308  logical, intent(in) :: master ! master flag
309 
310  integer :: ierr
311  !---------------------------------------------------------------------------
312 
313  call mpi_comm_create_errhandler(prc_mpi_errorhandler,prc_universal_handler,ierr)
314 
315  call mpi_comm_set_errhandler(prc_abort_comm_world,prc_universal_handler,ierr)
316  call mpi_comm_get_errhandler(prc_abort_comm_world,prc_abort_handler ,ierr)
317 
319  if( prc_universal_ismaster ) write(*,*) ""
320  if( prc_universal_ismaster ) write(*,*) "ERROR: MPI HANDLER is INCONSISTENT"
321  if( prc_universal_ismaster ) write(*,*) " PRC_UNIVERSAL_handler = ", prc_universal_handler
322  if( prc_universal_ismaster ) write(*,*) " PRC_ABORT_handler = ", prc_abort_handler
323  call prc_abort
324  endif
325 
326  if ( use_fpm ) then
327  call sigvars_get_all( master )
328  call signal( sigint, prc_abort )
329  call signal( sigquit, prc_abort )
330  call signal( sigabrt, prc_abort )
331  call signal( sigfpe, prc_abort )
332  call signal( sigsegv, prc_abort )
333  call signal( sigterm, prc_abort )
334  endif
335 
336  return
337  end subroutine prc_errhandler_setup
338 
339  !-----------------------------------------------------------------------------
341  subroutine prc_abort
342  implicit none
343 
344  integer :: ierr
345  !---------------------------------------------------------------------------
346 
347  if ( prc_mpi_alive ) then
348  ! tentative approach; input "PRC_UNIVERSAL_COMM_WORLD".
349  call mpi_comm_call_errhandler(prc_universal_comm_world,prc_abort_code,ierr)
350  endif
351 
352  stop
353  end subroutine prc_abort
354 
355  !-----------------------------------------------------------------------------
357  subroutine prc_mpifinish
358  implicit none
359 
360  integer :: ierr
361  logical :: sign_status
362  logical :: sign_exit
363  !---------------------------------------------------------------------------
364 
365  ! FPM polling
366  if ( fpm_alive ) then
367  sign_status = .false.
368  sign_exit = .false.
369  do while ( .NOT. sign_exit )
370  call fpm_polling( sign_status, sign_exit )
371  enddo
372  endif
373 
374  if (prc_universal_handler .NE. mpi_errhandler_null) then
375  call mpi_errhandler_free(prc_universal_handler, ierr)
376  endif
377  if (prc_abort_handler .NE. mpi_errhandler_null) then
378  call mpi_errhandler_free(prc_abort_handler, ierr)
379  endif
380 
381  ! Stop MPI
382  if ( prc_mpi_alive ) then
383  log_newline
384  log_progress(*) 'finalize MPI...'
385 
386  ! free splitted communicator
388  call mpi_comm_free(prc_local_comm_world,ierr)
389  endif
390 
391  call mpi_barrier(prc_universal_comm_world,ierr)
392 
393  call mpi_finalize(ierr)
394  log_progress(*) 'MPI is peacefully finalized'
395  endif
396 
397  ! Close logfile, configfile
398  if ( io_l ) then
399  if( io_fid_log /= io_fid_stdout ) close(io_fid_log)
400  endif
401  close(io_fid_conf)
402 
403  return
404  end subroutine prc_mpifinish
405 
406  !-----------------------------------------------------------------------------
408  subroutine prc_mpisplit_bulk( &
409  ORG_COMM_WORLD, &
410  NUM_BULKJOB, &
411  PRC_BULKJOB, &
412  debug, &
413  SUB_COMM_WORLD, &
414  ID_BULKJOB )
415  implicit none
416 
417  integer, intent(in) :: org_comm_world ! communicator (original group)
418  integer, intent(in) :: num_bulkjob ! number of bulk jobs
419  integer, intent(in) :: prc_bulkjob(:) ! number of ranks in subgroup communicator
420  logical, intent(in) :: debug ! log-output for mpi splitting?
421  integer, intent(out) :: sub_comm_world ! communicator (new subgroup)
422  integer, intent(out) :: id_bulkjob ! bulk job id
423 
424  integer :: org_myrank ! my rank in the original communicator
425  integer :: org_nrank ! number of ranks in the original communicator
426  integer :: sum_nrank
427 
428  integer, allocatable :: prc2color (:) ! color id for each process
429  integer, allocatable :: prc2key (:) ! local rank id for each process
430  integer :: col_domain(0:prc_domain_nlim) ! domain id of this color
431  integer :: col_master(0:prc_domain_nlim) ! master rank of this color
432  integer :: col_parent(0:prc_domain_nlim) ! parent color of this color
433  integer :: col_child (0:prc_domain_nlim) ! child color of this color
434 
435  logical :: color_reorder = .false. ! dummy
436 
437  integer :: i, color
438  integer :: itag, ierr
439  !---------------------------------------------------------------------------
440 
441 
442  if ( num_bulkjob == 1 ) then ! single domain run
443 
444  sub_comm_world = org_comm_world
445  id_bulkjob = 0
447 
448  elseif( num_bulkjob > 1 ) then ! multi domain run
449 
450  call mpi_comm_rank(org_comm_world,org_myrank,ierr)
451  call mpi_comm_size(org_comm_world,org_nrank ,ierr)
452 
453  sum_nrank = sum(prc_bulkjob(1:num_bulkjob))
454 
455  if ( sum_nrank /= org_nrank ) then
456  if ( prc_universal_ismaster ) then
457  log_error("PRC_MPIsplit",*) "MPI PROCESS NUMBER is INCONSISTENT"
458  log_error_cont(*) " REQUESTED NPROCS = ", sum_nrank, " LAUNCHED NPROCS = ", org_nrank
459  endif
460  call prc_abort
461  endif
462 
463  allocate( prc2color(0:org_nrank-1) )
464  allocate( prc2key(0:org_nrank-1) )
465 
466  call prc_mpicoloring( org_comm_world, & ! [IN]
467  org_nrank, & ! [IN]
468  num_bulkjob, & ! [IN]
469  prc_bulkjob(:), & ! [IN]
470  color_reorder, & ! [IN]
471  .true., & ! [IN]
472  debug, & ! [IN]
473  prc2color(:), & ! [OUT]
474  prc2key(:), & ! [OUT]
475  col_domain(:), & ! [OUT]
476  col_master(:), & ! [OUT]
477  col_parent(:), & ! [OUT]
478  col_child(:) ) ! [OUT]
479 
480  ! split comm_world
481  call mpi_comm_split( org_comm_world, & ! [IN]
482  prc2color(org_myrank), & ! [IN]
483  prc2key(org_myrank), & ! [IN]
484  sub_comm_world, & ! [OUT]
485  ierr ) ! [OUT]
486 
487  do i = 1, num_bulkjob
488  color = i-1
489  prc_global_root(i) = col_master(color)
490  enddo
491 
492  id_bulkjob = prc2color(org_myrank) ! color = bulk id
493  prc_universal_jobid = prc2color(org_myrank) ! color = bulk id
494 
495  deallocate( prc2color )
496  deallocate( prc2key )
497 
498  else
499  if ( prc_universal_ismaster ) then
500  log_error("PRC_MPIsplit",*) "REQUESTED DOMAIN NUMBER IS NOT ACCEPTABLE"
501  endif
502  call prc_abort
503  endif
504 
505  return
506  end subroutine prc_mpisplit_bulk
507 
508  !-----------------------------------------------------------------------------
510  subroutine prc_mpisplit_nest( &
511  ORG_COMM_WORLD, &
512  NUM_DOMAIN, &
513  PRC_DOMAIN, &
514  debug, &
515  color_reorder, &
516  SUB_COMM_WORLD, &
517  ID_DOMAIN, &
518  INTERCOMM_parent, &
519  INTERCOMM_child )
520  implicit none
521 
522  integer, intent(in) :: org_comm_world ! communicator (original group)
523  integer, intent(in) :: num_domain ! number of bulk jobs
524  integer, intent(in) :: prc_domain(:) ! number of ranks in subgroup communicator
525  logical, intent(in) :: debug ! log-output for mpi splitting?
526  logical, intent(in) :: color_reorder ! reorder
527  integer, intent(out) :: sub_comm_world ! communicator (new subgroup)
528  integer, intent(out) :: id_domain ! domain id
529  integer, intent(out) :: intercomm_parent ! communicator between this rank and parent domain
530  integer, intent(out) :: intercomm_child ! communicator between this rank and child domain
531 
532  integer :: org_myrank ! my rank in the original communicator
533  integer :: org_nrank ! number of ranks in the original communicator
534  integer :: sum_nrank
535 
536  integer, allocatable :: prc2color (:) ! color id for each process
537  integer, allocatable :: prc2key (:) ! local rank id for each process
538  integer :: col_domain(0:prc_domain_nlim) ! domain id of this color
539  integer :: col_master(0:prc_domain_nlim) ! master rank of this color
540  integer :: col_parent(0:prc_domain_nlim) ! parent color of this color
541  integer :: col_child (0:prc_domain_nlim) ! child color of this color
542 
543  integer :: i, color
544  integer :: itag, ierr
545  !---------------------------------------------------------------------------
546 
547  intercomm_parent = mpi_comm_null
548  intercomm_child = mpi_comm_null
549 
550  if ( num_domain == 1 ) then ! single domain run
551 
552  sub_comm_world = org_comm_world
553  id_domain = 1
555 
556  elseif( num_domain > 1 ) then ! multi domain run
557 
558  call mpi_comm_rank(org_comm_world,org_myrank,ierr)
559  call mpi_comm_size(org_comm_world,org_nrank ,ierr)
560 
561  sum_nrank = sum(prc_domain(1:num_domain))
562 
563  if ( sum_nrank /= org_nrank ) then
564  if ( prc_universal_ismaster ) then
565  log_error("PRC_MPIsplit",*) "MPI PROCESS NUMBER is INCONSISTENT"
566  log_error_cont(*) " REQUESTED NPROCS = ", sum_nrank, " LAUNCHED NPROCS = ", org_nrank
567  endif
568  call prc_abort
569  endif
570 
571  allocate( prc2color(0:org_nrank-1) )
572  allocate( prc2key(0:org_nrank-1) )
573 
574  call prc_mpicoloring( org_comm_world, & ! [IN]
575  org_nrank, & ! [IN]
576  num_domain, & ! [IN]
577  prc_domain(:), & ! [IN]
578  color_reorder, & ! [IN]
579  .false., & ! [IN]
580  debug, & ! [IN]
581  prc2color(:), & ! [OUT]
582  prc2key(:), & ! [OUT]
583  col_domain(:), & ! [OUT]
584  col_master(:), & ! [OUT]
585  col_parent(:), & ! [OUT]
586  col_child(:) ) ! [OUT]
587 
588  ! split comm_world
589  call mpi_comm_split( org_comm_world, & ! [IN]
590  prc2color(org_myrank), & ! [IN]
591  prc2key(org_myrank), & ! [IN]
592  sub_comm_world, & ! [OUT]
593  ierr ) ! [OUT]
594 
595  id_domain = col_domain(prc2color(org_myrank)) ! color /= domain id
596  prc_global_domainid = col_domain(prc2color(org_myrank)) ! color /= domain id
597 
598  if ( prc_universal_ismaster ) then
599  write(*,*)
600  write(*,*) "INFO [PRC_MPIsplit] Inter-domain relationship information"
601  do i = 1, num_domain
602  color = i-1
603  if ( col_parent(color) >= 0 ) then
604  write(*,'(5x,A,I2.2)') "Relationship No.", i
605  write(*,'(5x,A,I2.2,A,A,I2.2)') "Parent color: ", col_parent(color), " <=> ", &
606  "Child color: ", color
607  endif
608  enddo
609  endif
610 
611  ! create inter-communicator
612  do i = 1, num_domain
613  color = i-1
614  if ( col_parent(color) >= 0 ) then
615  itag = i*100
616 
617  if ( prc2color(org_myrank) == col_parent(color) ) then ! as a parent
618 
619  call mpi_intercomm_create( sub_comm_world, prc_masterrank, &
620  org_comm_world, col_master(color), &
621  itag, intercomm_child, ierr )
622 
623  elseif( prc2color(org_myrank) == color ) then ! as a child
624 
625  call mpi_intercomm_create( sub_comm_world, prc_masterrank, &
626  org_comm_world, col_master(col_parent(color)), &
627  itag, intercomm_parent, ierr )
628 
629  endif
630 
631  call mpi_barrier(org_comm_world,ierr)
632  endif
633  enddo
634 
635  deallocate( prc2color )
636  deallocate( prc2key )
637 
638  else
639  if ( prc_universal_ismaster ) then
640  write(*,*)"ERROR [RPC_MPIsplit] REQUESTED DOMAIN NUMBER IS NOT ACCEPTABLE"
641  endif
642  call prc_abort
643  endif
644 
645  return
646  end subroutine prc_mpisplit_nest
647 
648  !-----------------------------------------------------------------------------
650  subroutine prc_mpicoloring( &
651  ORG_COMM_WORLD, &
652  ORG_nrank, &
653  NUM_SUBGROUP, &
654  PRC_SUBGROUP, &
655  color_reorder, &
656  bulkjob, &
657  debug, &
658  prc2color, &
659  prc2key, &
660  COL_domain, &
661  COL_master, &
662  COL_parent, &
663  COL_child )
664  implicit none
665 
666  integer, intent(in) :: org_comm_world
667  integer, intent(in) :: org_nrank
668  integer, intent(in) :: num_subgroup
669  integer, intent(in) :: prc_subgroup(:)
670  logical, intent(in) :: color_reorder
671  logical, intent(in) :: bulkjob
672  logical, intent(in) :: debug
673  integer, intent(out) :: prc2color (0:org_nrank-1) ! color id for each process
674  integer, intent(out) :: prc2key (0:org_nrank-1) ! local rank id for each process
675  integer, intent(out) :: col_domain(0:prc_domain_nlim) ! domain id of the color
676  integer, intent(out) :: col_master(0:prc_domain_nlim) ! master rank of the color
677  integer, intent(out) :: col_parent(0:prc_domain_nlim) ! parent color of the color
678  integer, intent(out) :: col_child (0:prc_domain_nlim) ! child color of the color
679 
680  integer :: prc_reorder( num_subgroup) ! reordered number of process
681  integer :: dom2col ( num_subgroup) ! get color number by domain number
682  integer :: col2dom (0:num_subgroup-1) ! get domain number by color number
683  logical :: touch (0:num_subgroup-1)
684 
685  integer :: id_parent ! parent domain number
686  integer :: id_child ! child domain number
687  integer :: nprc, key
688  integer :: i, domain, color, p
689  integer :: ierr
690  !---------------------------------------------------------------------------
691 
692  if ( color_reorder .AND. .NOT. bulkjob ) then ! with reordering of colors
693 
694  prc_reorder(1:num_subgroup) = prc_subgroup(1:num_subgroup)
695  call prc_sort_ascd( prc_reorder(1:num_subgroup), 1, num_subgroup )
696 
697  touch(:) = .false.
698  do domain = 1, num_subgroup
699  do i = num_subgroup, 1, -1
700  color = i-1 ! counted from 0
701  if ( prc_subgroup(domain) == prc_reorder(i) .AND. ( .NOT. touch(color) ) ) then
702  dom2col(domain) = color ! domain_num -> color_num
703  col2dom(color) = domain ! color_num -> domain_num
704  touch(color) = .true.
705  exit
706  endif
707  enddo ! order(=color+1) loop
708  enddo ! domain loop
709 
710  else ! without reordering of colors
711 
712  do domain = 1, num_subgroup
713  color = domain-1 ! counted from 0
714  dom2col(domain) = color ! domain_num -> color_num
715  col2dom(color) = domain ! color_num -> domain_num
716  enddo ! domain loop
717 
718  endif
719 
720  ! make relationship
721  col_parent(:) = -1
722  col_child(:) = -1
723  if ( .NOT. bulkjob ) then
724  do i = 1, num_subgroup
725  id_parent = i - 1
726  id_child = i + 1
727 
728  if ( id_parent >= 1 .AND. id_parent <= num_subgroup ) then
729  col_parent(dom2col(i)) = dom2col(id_parent)
730  endif
731  if ( id_child >= 1 .AND. id_child <= num_subgroup ) then
732  col_child(dom2col(i)) = dom2col(id_child)
733  endif
734 
735  if ( debug .AND. prc_universal_ismaster ) then
736  write(*,'(4(A,I2))') &
737  "DOMAIN: ", i, ", MY COL: ", dom2col(i), ", PARENT COL:", col_parent(i), ", CHILD COL:", col_child(i)
738  endif
739  enddo ! domain loop
740  endif
741 
742  if ( prc_universal_ismaster ) then
743  write(*,*)
744  write(*,*) 'INFO [PRC_MPIcoloring] Domain information'
745  write(*,'(5x,A,L2)') 'Reordering? : ', color_reorder
746  do i = 1, num_subgroup
747  color = i-1
748  domain = col2dom(color)
749  write(*,*)
750  write(*,'(5x,2(A,I2.2))') "Order No. ", i, " -> Domain No. ", domain
751  write(*,'(5x,A,I5)') "Number of process = ", prc_subgroup(domain)
752  write(*,'(5x,A,I5)') "Color of this domain = ", color
753  if ( col_parent(color) >= 0 ) then
754  write(*,'(5x,A,I5)') "Color of parent domain = ", col_parent(color)
755  else
756  write(*,'(5x,A)' ) "Color of parent domain = no parent"
757  endif
758  if ( col_child(color) >= 0 ) then
759  write(*,'(5x,A,I5)') "Color of child domain = ", col_child(color)
760  else
761  write(*,'(5x,A)' ) "Color of child domain = no child"
762  endif
763  enddo ! order(=color+1) loop
764  endif
765 
766  ! make a process table
767  color = 0
768  key = 0
769  domain = col2dom(color)
770  nprc = prc_subgroup(domain)
771  col_master(:) = -999
772 
773  do p = 0, org_nrank-1
774  prc2color(p) = color
775  prc2key(p) = key
776 
777  if ( key == 0 ) then ! master rank
778  col_domain(color) = domain
779  col_master(color) = p
780  endif
781 
782  if ( debug .AND. prc_universal_ismaster ) then
783  write(*,'(5x,4(A,I5))') &
784  "PE:", p, " COLOR:", prc2color(p), " KEY:", prc2key(p), " COL_master:", col_master(color)
785  endif
786 
787  key = key + 1
788  if ( key >= nprc ) then
789  color = color + 1
790  key = 0
791  if ( color < num_subgroup ) then ! ignore last
792  domain = col2dom(color)
793  nprc = prc_subgroup(domain)
794  endif
795  endif
796  enddo
797 
798  return
799  end subroutine prc_mpicoloring
800 
801  !-----------------------------------------------------------------------------
803  recursive subroutine prc_sort_ascd(a, top, bottom)
804  implicit none
805  integer, intent(inout) :: a(:)
806  integer, intent(in) :: top, bottom
807  integer :: i, j, cnt, trg
808  !---------------------------------------------------------------------------
809  cnt = a( (top+bottom) / 2 )
810  i = top; j = bottom
811  do
812  do while ( a(i) > cnt ) !ascending evaluation
813  i = i + 1
814  enddo
815  do while ( cnt > a(j) ) !ascending evaluation
816  j = j - 1
817  enddo
818  if ( i >= j ) exit
819  trg = a(i); a(i) = a(j); a(j) = trg
820  i = i + 1
821  j = j - 1
822  enddo
823  if ( top < i-1 ) call prc_sort_ascd( a, top, i-1 )
824  if ( j+1 < bottom ) call prc_sort_ascd( a, j+1, bottom )
825  return
826  end subroutine prc_sort_ascd
827 
828  !-----------------------------------------------------------------------------
830  subroutine prc_mpibarrier
831  implicit none
832 
833  integer :: ierr
834  !---------------------------------------------------------------------------
835 
836  if ( prc_mpi_alive ) then
837  call mpi_barrier(prc_local_comm_world,ierr)
838  endif
839 
840  end subroutine prc_mpibarrier
841 
842  !-----------------------------------------------------------------------------
845  function prc_mpitime() result(time)
846  implicit none
847 
848  real(dp) :: time
849  !---------------------------------------------------------------------------
850 
851  if ( prc_mpi_alive ) then
852  time = real(mpi_wtime(), kind=dp)
853  else
854  call cpu_time(time)
855  endif
856 
857  end function prc_mpitime
858 
859  !-----------------------------------------------------------------------------
861  subroutine prc_mpitimestat( &
862  avgvar, &
863  maxvar, &
864  minvar, &
865  maxidx, &
866  minidx, &
867  var )
868  implicit none
869 
870  real(dp), intent(out) :: avgvar(:)
871  real(dp), intent(out) :: maxvar(:)
872  real(dp), intent(out) :: minvar(:)
873  integer, intent(out) :: maxidx(:)
874  integer, intent(out) :: minidx(:)
875  real(dp), intent(in) :: var(:)
876 
877  real(dp), allocatable :: statval(:,:)
878  integer :: vsize
879 
880  real(dp) :: totalvar
881  integer :: ierr
882  integer :: v, p
883  !---------------------------------------------------------------------------
884 
885  vsize = size(var(:))
886 
887  allocate( statval(vsize,0:prc_nprocs-1) )
888  statval(:,:) = 0.0_dp
889 
890  do v = 1, vsize
891  statval(v,prc_myrank) = var(v)
892  enddo
893 
894  ! MPI broadcast
895  do p = 0, prc_nprocs-1
896  call mpi_bcast( statval(1,p), &
897  vsize, &
898  mpi_double_precision, &
899  p, &
901  ierr )
902  enddo
903 
904  do v = 1, vsize
905  totalvar = 0.0_dp
906  do p = 0, prc_nprocs-1
907  totalvar = totalvar + statval(v,p)
908  enddo
909  avgvar(v) = totalvar / prc_nprocs
910 
911  maxvar(v) = maxval(statval(v,:))
912  minvar(v) = minval(statval(v,:))
913  maxidx(v:v) = maxloc(statval(v,:))
914  minidx(v:v) = minloc(statval(v,:))
915  enddo
916 
917  deallocate( statval )
918 
919  return
920  end subroutine prc_mpitimestat
921 
922  !-----------------------------------------------------------------------------
924  subroutine prc_mpi_errorhandler( &
925  comm, &
926  errcode )
927  implicit none
928 
929  ! attributes are needed to be the same with COMM_ERRHANDLER function
930  integer :: comm
931  integer :: errcode
932 
933  character(len=MPI_MAX_ERROR_STRING) :: msg
934  integer :: len
935  integer :: ierr
936  logical :: sign_status
937  logical :: sign_exit
938  !---------------------------------------------------------------------------
939 
940 !print *, "into errhandler:", PRC_UNIVERSAL_myrank
941 
942  ! FPM polling
943  if ( fpm_alive ) then
944  sign_status = .false.
945  sign_exit = .false.
946  do while ( .NOT. sign_exit )
947  call fpm_polling( sign_status, sign_exit )
948  enddo
949  endif
950 
951  ! Print Error Messages
952  if ( prc_mpi_alive ) then
953  ! flush 1kbyte
954  if ( io_l ) then
955  log_progress(*) 'abort MPI'
956  flush(io_fid_log)
957  endif
958 
959  if ( prc_ismaster ) then
960  write(*,*) '+++++ BULK ID : ', prc_universal_jobid
961  write(*,*) '+++++ DOMAIN ID : ', prc_global_domainid
962  write(*,*) '+++++ MASTER LOCATION : ', prc_universal_myrank,'/',prc_universal_nprocs
963  write(*,*) '+++++ GLOBAL LOCATION : ', prc_global_myrank,'/',prc_global_nprocs
964  write(*,*) '+++++ LOCAL LOCATION : ', prc_myrank,'/',prc_nprocs
965  write(*,*) ''
966  endif
967 
968  if ( errcode == prc_abort_code ) then ! called from PRC_abort
969  ! do nothing
970  elseif( errcode <= mpi_err_lastcode ) then
971  call mpi_error_string(errcode, msg, len, ierr)
972  if( io_l ) write(io_fid_log,*) '+++++ ', errcode, trim(msg)
973  write(*,*) '+++++ ', errcode, trim(msg)
974  else
975  if( io_l ) write(io_fid_log,*) '+++++ Unexpected error code', errcode
976  write(*,*) '+++++ Unexpected error code', errcode
977  endif
978 
979  if ( comm /= prc_abort_comm_world ) then
980  if( io_l ) write(io_fid_log,*) '+++++ Unexpected communicator'
981  write(*,*) '+++++ Unexpected communicator'
982  endif
983  if( io_l ) write(io_fid_log,*) ''
984  write(*,*) ''
985  endif
986 
987  if ( associated( prc_file_closer ) ) call prc_file_closer( .true. )
988 
989  ! Close logfile, configfile
990  if ( io_l ) then
991  if( io_fid_log /= io_fid_stdout ) close(io_fid_log)
992  endif
993  close(io_fid_conf)
994 
995  ! Abort MPI
996  if ( prc_mpi_alive ) then
997  call sleep(5)
998  call mpi_abort(prc_abort_comm_world, prc_abort_code, ierr)
999  endif
1000 
1001  stop
1002  end subroutine prc_mpi_errorhandler
1003 
1004  subroutine prc_set_file_closer( routine )
1005  procedure(closer) :: routine
1006 
1007  prc_file_closer => routine
1008 
1009  return
1010  end subroutine prc_set_file_closer
1011 
1012 end module scale_prc
scale_prc::prc_universal_ismaster
logical, public prc_universal_ismaster
master process in universal communicator?
Definition: scale_prc.F90:74
scale_prc::prc_abort
subroutine, public prc_abort
Abort Process.
Definition: scale_prc.F90:342
scale_sigvars::sigabrt
integer(c_int), public sigabrt
Definition: scale_sigvars.F90:35
scale_prc::prc_universal_setup
subroutine, public prc_universal_setup(comm, nprocs, myrank, ismaster)
setup MPI in universal communicator
Definition: scale_prc.F90:148
scale_prc::prc_abort_handler
integer, public prc_abort_handler
error handler communicator for aborting
Definition: scale_prc.F90:97
scale_prc::prc_mpisplit_bulk
subroutine, public prc_mpisplit_bulk(ORG_COMM_WORLD, NUM_BULKJOB, PRC_BULKJOB, debug, SUB_COMM_WORLD, ID_BULKJOB)
MPI Communicator Split (bulk job)
Definition: scale_prc.F90:415
scale_prc::prc_global_setup
subroutine, public prc_global_setup(abortall, comm)
setup MPI in global communicator
Definition: scale_prc.F90:188
scale_prc::prc_set_file_closer
subroutine, public prc_set_file_closer(routine)
Definition: scale_prc.F90:1005
scale_precision
module PRECISION
Definition: scale_precision.F90:14
scale_prc::prc_local_comm_world
integer, public prc_local_comm_world
local communicator
Definition: scale_prc.F90:88
scale_sigvars::sigsegv
integer(c_int), public sigsegv
Definition: scale_sigvars.F90:37
scale_prc::prc_mpistart
subroutine, public prc_mpistart(comm)
Start MPI.
Definition: scale_prc.F90:122
scale_prc::prc_myrank
integer, public prc_myrank
process num in local communicator
Definition: scale_prc.F90:90
scale_fpm::fpm_polling
subroutine, public fpm_polling(run_stat, stop_signal)
Main system of FPM.
Definition: scale_fpm.F90:176
scale_prc::prc_mpi_alive
logical, public prc_mpi_alive
MPI is alive?
Definition: scale_prc.F90:94
scale_prc::prc_universal_myrank
integer, public prc_universal_myrank
myrank in universal communicator
Definition: scale_prc.F90:72
scale_prc
module PROCESS
Definition: scale_prc.F90:11
scale_prc::prc_local_setup
subroutine, public prc_local_setup(comm, myrank, ismaster)
Setup MPI in local communicator.
Definition: scale_prc.F90:223
scale_prc::prc_universal_jobid
integer, public prc_universal_jobid
my job ID in universal communicator
Definition: scale_prc.F90:76
scale_prc::prc_mpibarrier
subroutine, public prc_mpibarrier
Barrier MPI.
Definition: scale_prc.F90:831
scale_io
module STDIO
Definition: scale_io.F90:10
scale_prc::prc_comm_null
integer, parameter, public prc_comm_null
Definition: scale_prc.F90:68
scale_prc::prc_masterrank
integer, parameter, public prc_masterrank
master process in each communicator
Definition: scale_prc.F90:66
scale_prc::prc_global_ismaster
logical, public prc_global_ismaster
master process in global communicator?
Definition: scale_prc.F90:82
scale_prc::prc_errhandler_setup
subroutine, public prc_errhandler_setup(use_fpm, master)
Setup MPI error handler.
Definition: scale_prc.F90:305
scale_io::io_fid_log
integer, public io_fid_log
Log file ID.
Definition: scale_io.F90:57
scale_sigvars::sigquit
integer(c_int), public sigquit
Definition: scale_sigvars.F90:34
scale_prc::prc_global_root
integer, dimension(prc_domain_nlim), public prc_global_root
root processes in global members
Definition: scale_prc.F90:85
scale_prc::prc_global_myrank
integer, public prc_global_myrank
myrank in global communicator
Definition: scale_prc.F90:80
scale_prc::prc_domain_nlim
integer, parameter, public prc_domain_nlim
max depth of domains
Definition: scale_prc.F90:67
scale_prc::prc_universal_comm_world
integer, public prc_universal_comm_world
original communicator
Definition: scale_prc.F90:71
scale_precision::dp
integer, parameter, public dp
Definition: scale_precision.F90:32
scale_prc::prc_global_domainid
integer, public prc_global_domainid
my domain ID in global communicator
Definition: scale_prc.F90:84
scale_sigvars::sigfpe
integer(c_int), public sigfpe
Definition: scale_sigvars.F90:36
scale_prc::prc_abort_comm_world
integer, public prc_abort_comm_world
communicator for aborting
Definition: scale_prc.F90:96
scale_prc::prc_mpisplit_nest
subroutine, public prc_mpisplit_nest(ORG_COMM_WORLD, NUM_DOMAIN, PRC_DOMAIN, debug, color_reorder, SUB_COMM_WORLD, ID_DOMAIN, INTERCOMM_parent, INTERCOMM_child)
MPI Communicator Split (nesting)
Definition: scale_prc.F90:520
scale_fpm::fpm_alive
logical, public fpm_alive
Definition: scale_fpm.F90:32
scale_prc::prc_nprocs
integer, public prc_nprocs
myrank in local communicator
Definition: scale_prc.F90:89
scale_sigvars
module sigvars
Definition: scale_sigvars.F90:13
scale_prc::prc_universal_handler
integer, public prc_universal_handler
error handler in universal communicator
Definition: scale_prc.F90:95
scale_fpm
module FPM
Definition: scale_fpm.F90:10
scale_io::io_l
logical, public io_l
output log or not? (this process)
Definition: scale_io.F90:62
scale_prc::prc_mpitimestat
subroutine, public prc_mpitimestat(avgvar, maxvar, minvar, maxidx, minidx, var)
Calc global statistics for timer.
Definition: scale_prc.F90:868
scale_prc::prc_singlecom_setup
subroutine, public prc_singlecom_setup(comm, nprocs, myrank, ismaster)
Setup MPI single communicator (not use universal-global-local setting)
Definition: scale_prc.F90:256
scale_io::io_fid_stdout
integer, parameter, public io_fid_stdout
Definition: scale_io.F90:55
scale_prc::prc_global_comm_world
integer, public prc_global_comm_world
global communicator
Definition: scale_prc.F90:79
scale_sigvars::sigvars_get_all
subroutine, public sigvars_get_all(master)
Get signal values.
Definition: scale_sigvars.F90:104
scale_prc::prc_mpitime
real(dp) function, public prc_mpitime()
Get MPI time.
Definition: scale_prc.F90:846
scale_prc::prc_mpifinish
subroutine, public prc_mpifinish
Stop MPI peacefully.
Definition: scale_prc.F90:358
scale_prc::prc_universal_nprocs
integer, public prc_universal_nprocs
process num in universal communicator
Definition: scale_prc.F90:73
scale_prc::prc_global_nprocs
integer, public prc_global_nprocs
process num in global communicator
Definition: scale_prc.F90:81
scale_sigvars::sigint
integer(c_int), public sigint
Definition: scale_sigvars.F90:33
scale_sigvars::sigterm
integer(c_int), public sigterm
Definition: scale_sigvars.F90:38
scale_io::io_fid_conf
integer, public io_fid_conf
Config file ID.
Definition: scale_io.F90:56
scale_prc::prc_ismaster
logical, public prc_ismaster
master process in local communicator?
Definition: scale_prc.F90:91