-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathmake coefficients native MPI.py
More file actions
160 lines (130 loc) · 5.06 KB
/
make coefficients native MPI.py
File metadata and controls
160 lines (130 loc) · 5.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
import os
import time # Used for timing the coefficient construction
import pyEXP
from mpi4py import MPI
from os.path import exists
#
# This script makes an HDF5 coefficient set from some EXP files in
# parallel using MPI. This can easily be adapted for whatever
# snapshots you have and could be run on a cluster
#
# Usage:
#
# Run command is: "mpirun -np N python3 make_coefficients_MPI.py"
# where N is the number of processes. For Slurm allocations, you can
# leave off "-np N" as usual.
#
if __name__ == "__main__":
# Parameters
#
h5file = 'Run1b_test_coefs'
beg_seq = 0
end_seq = 200
nskip = 1
# Get basic information about the MPI communicator
#
world_comm = MPI.COMM_WORLD
world_size = world_comm.Get_size()
my_rank = world_comm.Get_rank()
# NB: we are not using standard MPI commands on the Python side,
# but invoking mpi4py initializes MPI so it can be used correctly
# by the C++ routines
# Just for info and to convince yourself check that MPI is working
#
print("World size is {} and my rank is {}".format(world_size, my_rank))
# Now switch the working directory where my Gadget simulation
# lives. Change this to your working directory.
#
os.chdir('/nas/astro-th/weinberg/Nbody/SimpleHalo')
# Make the spherical basis config.
#
halo_config = """
id : sphereSL
parameters :
numr : 4000
rmin : 0.0001
rmax : 1.95
Lmax : 6
nmax : 20
scale : 0.0667
modelname : SLGridSph.model
"""
# Construct the basis instance
#
halo_basis = pyEXP.basis.Basis.factory(halo_config)
# Make the file list for the snapshot sequence assumed have the
# format: snapshot_XXXX.hdf5. Change as necessary.
#
file_list = []
for i in range(beg_seq, end_seq):
file_list.append('OUT.run1b.{:05d}'.format(i))
# Construct batches of files the particle reader. One could use the
# parseStringList to create batches from a vector/list of files. NB:
# a std::vector in C++ becomes a Python.list and vice versa
#
batches = pyEXP.read.ParticleReader.parseStringList(file_list, '')
# This will contain the coefficient container, need to start will a
# null instance to trigger construction
#
coefs = None
# One could replace the above with a read of an existent HDF5
# coeffficient file and update various stanzas. For this reason,
# I am keeping a separate center time and center position list
# below . . .
for group in batches:
okay = True
for f in group:
if not exists(f): okay = False
# Skip a file that does not exist in the sequence without
# going belly up. I notice that Gadget-2 seems to not be
# exactly sequential by 1 through a restart?
#
if not okay: continue
if my_rank==0: print("file group is", group)
# Make the reader for the desired type. One could probably try to
# do this by inspection but that's another project.
#
reader = pyEXP.read.ParticleReader.createReader('', group, 0, False);
# Print the type list
#
if my_rank==0: print('The component names are:', reader.GetTypes())
compname = 'dark'
reader.SelectType(compname)
if my_rank==0: print('We selected:', compname)
startTime = time.time()
# Now compute the coefficients with the default center
#
startTime = time.time()
coef = halo_basis.createFromReader(reader)
if my_rank==0:
print('Created coefficients at Time {:5.3f} for {} particles '
'in {:4.2f} seconds'.
format(reader.CurrentTime(), reader.CurrentNumber(),
time.time() - startTime))
# We need this stupid idiom here because None is not mapping to a
# null pointer. There is probably a way to do this. Suggestions
# anyone?
# This is optional---+
# |
if coefs is None: # v
coefs.setUnits([("length", "none", 1.0),("mass", "none", 1.0),("time", "none", 1.0), ("G", "none", 1.0)])
coefs = pyEXP.coefs.Coefs.makecoefs(coef, compname)
# Add the coefficients to the container
#
coefs.add(coef)
if my_rank==0:
print('Added coef to container')
print('-'*60)
if my_rank==0:
print('\nCompleted the file group list\n')
print('The coefficient time list is', coefs.Times())
# You can call the file something convenient. The suffix 'h5'
# will be appended. You only want the root process to write
# the file.
#
if exists(h5file):
coefs.ExtendH5Coefs(h5file) # Update an existing HDF5
print('Saved the coefficients to an existing HDF5 file')
else:
coefs.WriteH5Coefs(h5file) # Create a new HDF5
print('Saved the coefficients to a new HDF5 file')