Go to the documentation of this file.
49 subroutine closer( skip_abort )
50 logical,
intent(in),
optional :: skip_abort
108 private :: prc_mpicoloring
109 private :: prc_sort_ascd
115 integer,
private,
parameter :: prc_abort_code = -1
119 procedure(closer),
pointer :: prc_file_closer => null()
129 integer,
intent(out) :: comm
140 comm = mpi_comm_world
155 integer,
intent(in) :: comm
156 integer,
intent(out) :: nprocs
157 integer,
intent(out) :: myrank
158 logical,
intent(out) :: ismaster
195 logical,
intent(in) :: abortall
196 integer,
intent(in) :: comm
230 integer,
intent(in) :: comm
231 integer,
intent(out) :: myrank
232 logical,
intent(out) :: ismaster
264 integer,
intent(in) :: comm
265 integer,
intent(out) :: nprocs
266 integer,
intent(out) :: myrank
267 logical,
intent(out) :: ismaster
272 call mpi_comm_size(comm,nprocs,ierr)
273 call mpi_comm_rank(comm,myrank,ierr)
314 logical,
intent(in) :: use_fpm
315 logical,
intent(in) :: master
368 logical :: sign_status
380 sign_status = .false.
382 do while ( .NOT. sign_exit )
403 call mpi_finalize(ierr)
420 integer,
intent(in) :: org_comm_world
421 integer,
intent(in) :: num_bulkjob
422 integer,
intent(in) :: prc_bulkjob(:)
423 logical,
intent(in) :: debug
424 integer,
intent(out) :: sub_comm_world
425 integer,
intent(out) :: id_bulkjob
427 integer :: org_myrank
431 integer,
allocatable :: prc2color (:)
432 integer,
allocatable :: prc2key (:)
438 logical :: color_reorder = .false.
441 integer :: itag, ierr
445 if ( num_bulkjob == 1 )
then
447 sub_comm_world = org_comm_world
451 elseif( num_bulkjob > 1 )
then
453 call mpi_comm_rank(org_comm_world,org_myrank,ierr)
454 call mpi_comm_size(org_comm_world,org_nrank ,ierr)
456 sum_nrank = sum(prc_bulkjob(1:num_bulkjob))
458 if ( sum_nrank /= org_nrank )
then
460 log_error(
"PRC_MPIsplit",*)
"MPI PROCESS NUMBER is INCONSISTENT"
461 log_error_cont(*)
" REQUESTED NPROCS = ", sum_nrank,
" LAUNCHED NPROCS = ", org_nrank
466 allocate( prc2color(0:org_nrank-1) )
467 allocate( prc2key(0:org_nrank-1) )
469 call prc_mpicoloring( org_comm_world, &
484 call mpi_comm_split( org_comm_world, &
485 prc2color(org_myrank), &
486 prc2key(org_myrank), &
490 do i = 1, num_bulkjob
495 id_bulkjob = prc2color(org_myrank)
498 deallocate( prc2color )
499 deallocate( prc2key )
503 log_error(
"PRC_MPIsplit",*)
"REQUESTED DOMAIN NUMBER IS NOT ACCEPTABLE"
523 integer,
intent(in) :: org_comm_world
524 integer,
intent(in) :: num_domain
525 integer,
intent(in) :: prc_domain(:)
526 logical,
intent(in) :: debug
527 logical,
intent(in) :: color_reorder
528 integer,
intent(out) :: sub_comm_world
529 integer,
intent(out) :: id_domain
531 integer :: org_myrank
535 integer,
allocatable :: prc2color (:)
536 integer,
allocatable :: prc2key (:)
543 integer :: itag, ierr
546 if ( num_domain == 1 )
then
548 sub_comm_world = org_comm_world
552 elseif( num_domain > 1 )
then
554 call mpi_comm_rank(org_comm_world,org_myrank,ierr)
555 call mpi_comm_size(org_comm_world,org_nrank ,ierr)
557 sum_nrank = sum(prc_domain(1:num_domain))
559 if ( sum_nrank /= org_nrank )
then
561 log_error(
"PRC_MPIsplit",*)
"MPI PROCESS NUMBER is INCONSISTENT"
562 log_error_cont(*)
" REQUESTED NPROCS = ", sum_nrank,
" LAUNCHED NPROCS = ", org_nrank
567 allocate( prc2color(0:org_nrank-1) )
568 allocate( prc2key(0:org_nrank-1) )
570 call prc_mpicoloring( org_comm_world, &
585 call mpi_comm_split( org_comm_world, &
586 prc2color(org_myrank), &
587 prc2key(org_myrank), &
591 id_domain = col_domain(prc2color(org_myrank))
596 write(*,*)
"INFO [PRC_MPIsplit] Inter-domain relationship information"
599 if ( col_parent(color) >= 0 )
then
600 write(*,
'(5x,A,I2.2)')
"Relationship No.", i
601 write(*,
'(5x,A,I2.2,A,A,I2.2)')
"Parent color: ", col_parent(color),
" <=> ", &
602 "Child color: ", color
610 if ( col_parent(color) >= 0 )
then
613 if ( prc2color(org_myrank) == col_parent(color) )
then
616 org_comm_world, col_master(color), &
619 elseif( prc2color(org_myrank) == color )
then
622 org_comm_world, col_master(col_parent(color)), &
627 call mpi_barrier(org_comm_world,ierr)
631 deallocate( prc2color )
632 deallocate( prc2key )
636 write(*,*)
"ERROR [RPC_MPIsplit] REQUESTED DOMAIN NUMBER IS NOT ACCEPTABLE"
646 subroutine prc_mpicoloring( &
662 integer,
intent(in) :: org_comm_world
663 integer,
intent(in) :: org_nrank
664 integer,
intent(in) :: num_subgroup
665 integer,
intent(in) :: prc_subgroup(:)
666 logical,
intent(in) :: color_reorder
667 logical,
intent(in) :: bulkjob
668 logical,
intent(in) :: debug
669 integer,
intent(out) :: prc2color (0:org_nrank-1)
670 integer,
intent(out) :: prc2key (0:org_nrank-1)
676 integer :: prc_reorder( num_subgroup)
677 integer :: dom2col ( num_subgroup)
678 integer :: col2dom (0:num_subgroup-1)
679 logical :: touch (0:num_subgroup-1)
684 integer :: i, domain, color, p
688 if ( color_reorder .AND. .NOT. bulkjob )
then
690 prc_reorder(1:num_subgroup) = prc_subgroup(1:num_subgroup)
691 call prc_sort_ascd( prc_reorder(1:num_subgroup), 1, num_subgroup )
694 do domain = 1, num_subgroup
695 do i = num_subgroup, 1, -1
697 if ( prc_subgroup(domain) == prc_reorder(i) .AND. ( .NOT. touch(color) ) )
then
698 dom2col(domain) = color
699 col2dom(color) = domain
700 touch(color) = .true.
708 do domain = 1, num_subgroup
710 dom2col(domain) = color
711 col2dom(color) = domain
719 if ( .NOT. bulkjob )
then
720 do i = 1, num_subgroup
724 if ( id_parent >= 1 .AND. id_parent <= num_subgroup )
then
725 col_parent(dom2col(i)) = dom2col(id_parent)
727 if ( id_child >= 1 .AND. id_child <= num_subgroup )
then
728 col_child(dom2col(i)) = dom2col(id_child)
732 write(*,
'(4(A,I2))') &
733 "DOMAIN: ", i,
", MY COL: ", dom2col(i),
", PARENT COL:", col_parent(i),
", CHILD COL:", col_child(i)
740 write(*,*)
'INFO [PRC_MPIcoloring] Domain information'
741 write(*,
'(5x,A,L2)')
'Reordering? : ', color_reorder
742 do i = 1, num_subgroup
744 domain = col2dom(color)
746 write(*,
'(5x,2(A,I2.2))')
"Order No. ", i,
" -> Domain No. ", domain
747 write(*,
'(5x,A,I5)')
"Number of process = ", prc_subgroup(domain)
748 write(*,
'(5x,A,I5)')
"Color of this domain = ", color
749 if ( col_parent(color) >= 0 )
then
750 write(*,
'(5x,A,I5)')
"Color of parent domain = ", col_parent(color)
752 write(*,
'(5x,A)' )
"Color of parent domain = no parent"
754 if ( col_child(color) >= 0 )
then
755 write(*,
'(5x,A,I5)')
"Color of child domain = ", col_child(color)
757 write(*,
'(5x,A)' )
"Color of child domain = no child"
765 domain = col2dom(color)
766 nprc = prc_subgroup(domain)
769 do p = 0, org_nrank-1
774 col_domain(color) = domain
775 col_master(color) = p
779 write(*,
'(5x,4(A,I5))') &
780 "PE:", p,
" COLOR:", prc2color(p),
" KEY:", prc2key(p),
" COL_master:", col_master(color)
784 if ( key >= nprc )
then
787 if ( color < num_subgroup )
then
788 domain = col2dom(color)
789 nprc = prc_subgroup(domain)
795 end subroutine prc_mpicoloring
799 recursive subroutine prc_sort_ascd(a, top, bottom)
801 integer,
intent(inout) :: a(:)
802 integer,
intent(in) :: top, bottom
803 integer :: i, j, cnt, trg
805 cnt = a( (top+bottom) / 2 )
808 do while ( a(i) > cnt )
811 do while ( cnt > a(j) )
815 trg = a(i); a(i) = a(j); a(j) = trg
819 if ( top < i-1 )
call prc_sort_ascd( a, top, i-1 )
820 if ( j+1 < bottom )
call prc_sort_ascd( a, j+1, bottom )
822 end subroutine prc_sort_ascd
848 time = real(mpi_wtime(), kind=
dp)
866 real(
dp),
intent(out) :: avgvar(:)
867 real(
dp),
intent(out) :: maxvar(:)
868 real(
dp),
intent(out) :: minvar(:)
869 integer,
intent(out) :: maxidx(:)
870 integer,
intent(out) :: minidx(:)
871 real(
dp),
intent(in) :: var(:)
873 real(
dp),
allocatable :: statval(:,:)
884 statval(:,:) = 0.0_dp
892 call mpi_bcast( statval(1,p), &
894 mpi_double_precision, &
903 totalvar = totalvar + statval(v,p)
907 maxvar(v) = maxval(statval(v,:))
908 minvar(v) = minval(statval(v,:))
909 maxidx(v:v) = maxloc(statval(v,:))
910 minidx(v:v) = minloc(statval(v,:))
913 deallocate( statval )
920 subroutine prc_mpi_errorhandler( &
929 character(len=MPI_MAX_ERROR_STRING) :: msg
932 logical :: sign_status
940 sign_status = .false.
942 do while ( .NOT. sign_exit )
951 log_progress(*)
'abort MPI'
964 if ( errcode == prc_abort_code )
then
966 elseif( errcode <= mpi_err_lastcode )
then
967 call mpi_error_string(errcode, msg, len, ierr)
969 write(*,*)
'+++++ ', errcode, trim(msg)
971 if(
io_l )
write(
io_fid_log,*)
'+++++ Unexpected error code', errcode
972 write(*,*)
'+++++ Unexpected error code', errcode
977 write(*,*)
'+++++ Unexpected communicator'
983 if (
associated( prc_file_closer ) )
call prc_file_closer( .true. )
998 end subroutine prc_mpi_errorhandler
1001 procedure(closer) :: routine
1003 prc_file_closer => routine
logical, public prc_universal_ismaster
master process in universal communicator?
subroutine, public prc_abort
Abort Process.
subroutine, public prc_universal_setup(comm, nprocs, myrank, ismaster)
setup MPI in universal communicator
integer, public prc_abort_handler
error handler communicator for aborting
subroutine, public prc_mpisplit_bulk(ORG_COMM_WORLD, NUM_BULKJOB, PRC_BULKJOB, debug, SUB_COMM_WORLD, ID_BULKJOB)
MPI Communicator Split (bulk job)
subroutine, public prc_global_setup(abortall, comm)
setup MPI in global communicator
subroutine, public prc_set_file_closer(routine)
integer, public prc_local_comm_world
local communicator
subroutine, public prc_mpisplit_nest(ORG_COMM_WORLD, NUM_DOMAIN, PRC_DOMAIN, debug, color_reorder, SUB_COMM_WORLD, ID_DOMAIN)
MPI Communicator Split (nesting)
subroutine, public prc_mpistart(comm)
Start MPI.
integer, public prc_myrank
process num in local communicator
subroutine, public fpm_polling(run_stat, stop_signal)
Main system of FPM.
logical, public prc_mpi_alive
MPI is alive?
integer, public prc_universal_myrank
myrank in universal communicator
subroutine, public prc_local_setup(comm, myrank, ismaster)
Setup MPI in local communicator.
integer, public prc_universal_jobid
my job ID in universal communicator
subroutine, public prc_mpibarrier
Barrier MPI.
integer, parameter, public prc_comm_null
integer, parameter, public prc_masterrank
master process in each communicator
logical, public prc_global_ismaster
master process in global communicator?
subroutine, public prc_errhandler_setup(use_fpm, master)
Setup MPI error handler.
integer, public io_fid_log
Log file ID.
integer, dimension(prc_domain_nlim), public prc_global_root
root processes in global members
integer, public prc_global_myrank
myrank in global communicator
integer, parameter, public prc_domain_nlim
max depth of domains
integer, public prc_universal_comm_world
original communicator
integer, parameter, public dp
integer, public prc_global_domainid
my domain ID in global communicator
integer, public prc_abort_comm_world
communicator for aborting
logical, public fpm_alive
integer, public prc_intercomm_child
communicator between this rank and child domain
integer, public prc_nprocs
myrank in local communicator
integer, public prc_intercomm_parent
communicator between this rank and parent domain
integer, public prc_universal_handler
error handler in universal communicator
logical, public io_l
output log or not? (this process)
subroutine, public prc_mpitimestat(avgvar, maxvar, minvar, maxidx, minidx, var)
Calc global statistics for timer.
subroutine, public prc_singlecom_setup(comm, nprocs, myrank, ismaster)
Setup MPI single communicator (not use universal-global-local setting)
integer, parameter, public io_fid_stdout
integer, public prc_global_comm_world
global communicator
subroutine, public sigvars_get_all(master)
Get signal values.
real(dp) function, public prc_mpitime()
Get MPI time.
subroutine, public prc_mpifinish
Stop MPI peacefully.
integer, public prc_universal_nprocs
process num in universal communicator
integer, public prc_global_nprocs
process num in global communicator
integer(c_int), public sigint
integer, public io_fid_conf
Config file ID.
logical, public prc_ismaster
master process in local communicator?