tgenerate-consolidation-scripts-comet.py - sphere - GPU-based 3D discrete element method algorithm with optional fluid coupling
HTML git clone git://src.adamsgaard.dk/sphere
DIR Log
DIR Files
DIR Refs
DIR LICENSE
---
tgenerate-consolidation-scripts-comet.py (3349B)
---
1 #!/usr/bin/env python
2
3 # Account and cluster information
4 # https://portal.xsede.org/sdsc-comet
5 # https://www.sdsc.edu/support/user_guides/comet.html
6 account = 'csd492' # from `show_accounts`
7 jobname_prefix = 'cons-1e4-'
8 walltime = '2-0' # hours:minutes:seconds or days-hours
9 partition = 'gpu-shared'
10 no_gpus = 1
11 no_nodes = 1
12 ntasks_per_node = 1
13 folder = '~/code/sphere/python'
14
15
16 # Simulation parameter values
17 effective_stresses = [10e3, 20e3, 100e3, 200e3, 1000e3, 2000e3]
18
19
20 # Script generating functions
21
22 def generate_slurm_script(jobname):
23
24 script = '''#!/bin/bash
25 #SBATCH -A {account}
26 #SBATCH --job-name="{jobname}"
27 #SBATCH --output="{jobname}.%j.%N.out"
28 #SBATCH --time={walltime}
29 #SBATCH --partition={partition}
30 #SBATCH --gres=gpu:{no_gpus}
31 #SBATCH --nodes={no_nodes}
32 #SBATCH --ntasks-per-node={ntasks_per_node}
33 #SBATCH --export=ALL
34
35 echo Job start `whoami`@`hostname`, `date`
36 module load cmake
37 module load cuda/7.0
38 module load python
39 module load scipy
40
41 cd {folder}
42 python ./{jobname}.py
43
44 echo Job end `whoami`@`hostname`, `date`
45 '''.format(account=account,
46 jobname=jobname,
47 walltime=walltime,
48 partition=partition,
49 no_gpus=no_gpus,
50 no_nodes=no_nodes,
51 ntasks_per_node=ntasks_per_node,
52 folder=folder)
53 with open(jobname + '.sh', 'w') as file:
54 file.write(script)
55
56
57 def generate_slurm_continue_script(jobname):
58
59 script = '''#!/bin/bash
60 #SBATCH -A {account}
61 #SBATCH --job-name="{jobname}"
62 #SBATCH --output="{jobname}.%j.%N.out"
63 #SBATCH --time={walltime}
64 #SBATCH --partition={partition}
65 #SBATCH --gres=gpu:{no_gpus}
66 #SBATCH --nodes={no_nodes}
67 #SBATCH --ntasks-per-node={ntasks_per_node}
68 #SBATCH --export=ALL
69
70 echo Job start `whoami`@`hostname`, `date`
71 module load cmake
72 module load cuda/7.0
73 module load python
74 module load scipy
75
76 cd {folder}
77 python ./continue_sim.py {jobname} 0
78
79 echo Job end `whoami`@`hostname`, `date`
80 '''.format(account=account,
81 jobname=jobname,
82 walltime=walltime,
83 partition=partition,
84 no_gpus=no_gpus,
85 no_nodes=no_nodes,
86 ntasks_per_node=ntasks_per_node,
87 folder=folder)
88 with open(jobname + '-cont.sh', 'w') as file:
89 file.write(script)
90
91
92 # Generate scripts for sphere
93 def generate_simulation_script(jobname, effective_stress):
94
95 script = '''#!/usr/bin/env python
96 import sphere
97
98 cons = sphere.sim('init-1e4')
99 cons.readlast()
100 cons.id('{jobname}')
101
102 cons.periodicBoundariesXY()
103
104 cons.setStiffnessNormal(1.16e7)
105 cons.setStiffnessTangential(1.16e7)
106 cons.setStaticFriction(0.5)
107 cons.setDynamicFriction(0.5)
108 cons.setDampingNormal(0.0)
109 cons.setDampingTangential(0.0)
110
111 cons.consolidate(normal_stress={effective_stress})
112 cons.initTemporal(total=3.0, epsilon=0.07)
113
114 cons.run(dry=True)
115 cons.run(device=0)
116
117 cons.visualize('energy')
118 cons.visualize('walls')
119 '''.format(jobname=jobname,
120 effective_stress=effective_stress)
121
122 with open(jobname + '.py', 'w') as file:
123 file.write(script)
124
125
126 # Generate scripts
127 for effective_stress in effective_stresses:
128
129 jobname = jobname_prefix + '{}Pa'.format(effective_stress)
130
131 print(jobname)
132
133 # Generate scripts for slurm, submit with `sbatch <script>`
134 generate_slurm_script(jobname)
135
136 generate_slurm_continue_script(jobname)
137
138 generate_simulation_script(jobname, effective_stress)