forked from MFlowCode/MFC
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathm_mpi_proxy.fpp
More file actions
178 lines (141 loc) · 8.42 KB
/
m_mpi_proxy.fpp
File metadata and controls
178 lines (141 loc) · 8.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
!>
!! @file
!! @brief Contains module m_mpi_proxy
!> @brief Broadcasts user inputs and decomposes the domain across MPI ranks for pre-processing
module m_mpi_proxy
#ifdef MFC_MPI
use mpi !< Message passing interface (MPI) module
#endif
use m_helper
use m_derived_types !< Definitions of the derived types
use m_global_parameters !< Global parameters for the code
use m_mpi_common
implicit none
contains
!> Since only processor with rank 0 is in charge of reading
!! and checking the consistency of the user provided inputs,
!! these are not available to the remaining processors. This
!! subroutine is then in charge of broadcasting the required
!! information.
impure subroutine s_mpi_bcast_user_inputs
#ifdef MFC_MPI
! Generic loop iterator
integer :: i, j
! Generic flag used to identify and report MPI errors
integer :: ierr
! Logistics
call MPI_BCAST(case_dir, len(case_dir), MPI_CHARACTER, 0, MPI_COMM_WORLD, ierr)
#:for VAR in ['t_step_old', 't_step_start', 'm', 'n', 'p', 'm_glb', 'n_glb', 'p_glb', &
& 'loops_x', 'loops_y', 'loops_z', 'model_eqns', 'num_fluids', &
& 'weno_order', 'precision', 'perturb_flow_fluid', &
& 'perturb_sph_fluid', 'num_patches', 'thermal', 'nb', 'dist_type',&
& 'relax_model', 'num_ibs', 'n_start', 'elliptic_smoothing_iters', &
& 'num_bc_patches', 'mixlayer_perturb_nk', 'recon_type', &
& 'muscl_order', 'igr_order' ]
call MPI_BCAST(${VAR}$, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
#:endfor
#:for VAR in [ 'old_grid','old_ic','stretch_x','stretch_y','stretch_z',&
& 'cyl_coord','mpp_lim','hypoelasticity', 'relax', 'parallel_io', &
& 'perturb_flow', 'perturb_sph', 'mixlayer_vel_profile', &
& 'mixlayer_perturb', 'bubbles_euler', 'polytropic', 'polydisperse',&
& 'qbmm', 'file_per_process', 'adv_n', 'ib' , 'cfl_adap_dt', &
& 'cfl_const_dt', 'cfl_dt', 'surface_tension', &
& 'hyperelasticity', 'pre_stress', 'elliptic_smoothing', 'viscous',&
& 'bubbles_lagrange', 'bc_io', 'mhd', 'relativity', 'cont_damage', &
& 'igr', 'down_sample', 'simplex_perturb','fft_wrt', 'hyper_cleaning' ]
call MPI_BCAST(${VAR}$, 1, MPI_LOGICAL, 0, MPI_COMM_WORLD, ierr)
#:endfor
call MPI_BCAST(fluid_rho(1), num_fluids_max, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:for VAR in [ 'x_domain%beg', 'x_domain%end', 'y_domain%beg', &
& 'y_domain%end', 'z_domain%beg', 'z_domain%end', 'a_x', 'a_y', &
& 'a_z', 'x_a', 'x_b', 'y_a', 'y_b', 'z_a', 'z_b', 'bc_x%beg', &
& 'bc_x%end', 'bc_y%beg', 'bc_y%end', 'bc_z%beg', 'bc_z%end', &
& 'perturb_flow_mag', 'pref', 'rhoref', 'poly_sigma', 'R0ref', &
& 'Web', 'Ca', 'Re_inv', 'sigR', 'sigV', 'rhoRV', 'palpha_eps', &
& 'ptgalpha_eps', 'sigma', 'pi_fac', 'mixlayer_vel_coef', 'Bx0', &
& 'mixlayer_perturb_k0']
call MPI_BCAST(${VAR}$, 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
do i = 1, num_bc_patches_max
#:for VAR in ['geometry', 'type', 'dir', 'loc']
call MPI_BCAST(patch_bc(i)%${VAR}$, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
#:endfor
#:for VAR in ['vel', 'angular_vel', 'angles']
call MPI_BCAST(patch_ib(i)%${VAR}$, 3, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
call MPI_BCAST(patch_bc(i)%radius, 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:for VAR in ['centroid', 'length']
call MPI_BCAST(patch_bc(i)%${VAR}$, size(patch_bc(i)%${VAR}$), mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
end do
do i = 1, num_patches_max
#:for VAR in [ 'geometry', 'smooth_patch_id']
call MPI_BCAST(patch_icpp(i)%${VAR}$, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
#:endfor
call MPI_BCAST(patch_icpp(i)%smoothen, 1, MPI_LOGICAL, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(patch_icpp(i)%non_axis_sym, 1, MPI_LOGICAL, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(patch_icpp(i)%alter_patch(0), num_patches_max, MPI_LOGICAL, 0, MPI_COMM_WORLD, ierr)
#:for VAR in [ 'x_centroid', 'y_centroid', 'z_centroid', &
& 'length_x', 'length_y', 'length_z', 'radius', 'epsilon', &
& 'beta', 'smooth_coeff', 'rho', 'p0', 'm0', 'r0', 'v0', &
& 'pres', 'gamma', 'pi_inf', 'hcid', 'cv', 'qv', 'qvp', &
& 'model_threshold', 'cf_val', 'Bx', 'By', 'Bz']
call MPI_BCAST(patch_icpp(i)%${VAR}$, 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
#:for VAR in [ '2', '3', '4', '5', '6', '7', '8', '9']
call MPI_BCAST(patch_icpp(i)%a(${VAR}$), 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
call MPI_BCAST(patch_icpp(i)%model_filepath, len(patch_icpp(i)%model_filepath), MPI_CHARACTER, 0, MPI_COMM_WORLD, ierr)
#:for VAR in [ 'model_translate', 'model_scale', 'model_rotate', &
'normal', 'radii', 'vel', 'tau_e', 'alpha_rho', 'alpha' ]
call MPI_BCAST(patch_icpp(i)%${VAR}$, size(patch_icpp(i)%${VAR}$), mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
call MPI_BCAST(patch_icpp(i)%model_spc, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
if (chemistry) then
call MPI_BCAST(patch_icpp(i)%Y, size(patch_icpp(i)%Y), mpi_p, 0, MPI_COMM_WORLD, ierr)
end if
! Broadcast IB variables
call MPI_BCAST(patch_ib(i)%geometry, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(patch_ib(i)%model_filepath, len(patch_ib(i)%model_filepath), MPI_CHARACTER, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(patch_ib(i)%model_threshold, 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(patch_ib(i)%model_spc, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr)
#:for VAR in [ 'x_centroid', 'y_centroid', 'z_centroid', &
& 'length_x', 'length_y', 'length_z', 'radius', 'c', 'p', 't', 'm', 'theta']
call MPI_BCAST(patch_ib(i)%${VAR}$, 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
call MPI_BCAST(patch_ib(i)%slip, 1, MPI_LOGICAL, 0, MPI_COMM_WORLD, ierr)
#:for VAR in [ 'model_translate', 'model_scale', 'model_rotate']
call MPI_BCAST(patch_ib(i)%${VAR}$, size(patch_ib(i)%${VAR}$), mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
end do
! Simplex noise and fluid physical parameters
do i = 1, num_fluids_max
#:for VAR in [ 'gamma','pi_inf', 'G', 'cv', 'qv', 'qvp' ]
call MPI_BCAST(fluid_pp(i)%${VAR}$, 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
call MPI_BCAST(simplex_params%perturb_dens(i), 1, MPI_LOGICAL, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(simplex_params%perturb_dens_freq(i), 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(simplex_params%perturb_dens_scale(i), 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
do j = 1, 3
call MPI_BCAST(simplex_params%perturb_dens_offset(i, j), 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
end do
end do
! Subgrid bubble parameters
if (bubbles_euler .or. bubbles_lagrange) then
#:for VAR in [ 'R0ref','p0ref','rho0ref','T0ref', &
'ss','pv','vd','mu_l','mu_v','mu_g','gam_v','gam_g', &
'M_v','M_g','k_v','k_g','cp_v','cp_g','R_v','R_g']
call MPI_BCAST(bub_pp%${VAR}$, 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
#:endfor
end if
do i = 1, 3
call MPI_BCAST(simplex_params%perturb_vel(i), 1, MPI_LOGICAL, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(simplex_params%perturb_vel_freq(i), 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
call MPI_BCAST(simplex_params%perturb_vel_scale(i), 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
do j = 1, 3
call MPI_BCAST(simplex_params%perturb_vel_offset(i, j), 1, mpi_p, 0, MPI_COMM_WORLD, ierr)
end do
end do
#endif
end subroutine s_mpi_bcast_user_inputs
end module m_mpi_proxy