-
Notifications
You must be signed in to change notification settings - Fork 3
/
fix_ocean_restarts.py
executable file
·77 lines (60 loc) · 2.66 KB
/
fix_ocean_restarts.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
#!/usr/bin/env python
import sys
import os
import argparse
import shutil
import numpy as np
import netCDF4 as nc
import multiprocessing as mp
from glob import glob
"""
If the topog.nc file is changed and there are new wet or dry points then
restarts will need to be fixed up. This script helps to do this. The steps to
creating/updating restarts for the new bathymetry are as follows:
1. Do a short run from rest using the new bathymetry file.
2. Collate the above.
3. Run this script using the collated outputs from 2. as the template
Presently it only works on the 0.1 deg with 75 levels.
"""
def copy_data(args):
in_f = args[0]
out_f = args[1]
print(in_f)
print(out_f)
print('Processing file {}'.format(out_f))
with nc.Dataset(in_f) as in_fp, nc.Dataset(out_f, 'r+') as out_fp:
for var in in_fp.variables:
if in_fp.variables[var].shape != (1, 75, 2700, 3600):
continue
# Copy over level by level to save memory
print('Processing var {}'.format(var))
for level in range(75):
in_data = in_fp.variables[var][0, level, :, :]
out_data = out_fp.variables[var][0, level, :, :]
mask = np.logical_and(np.logical_not(in_data.mask),
np.logical_not(out_data.mask))
out_data[np.where(mask)] = in_data[np.where(mask)]
def main():
parser = argparse.ArgumentParser()
parser.add_argument("template_dir",
help="Name of directory containing collated restart files from a run using the new bathymetry (typically a short run from rest).")
parser.add_argument("old_dir",
help="Name of directory containing collated restart files from a run using the old bathymetry.")
parser.add_argument("output_dir",
help="Name of the output directory which will contain new restarts.")
parser.add_argument("--nprocs", default=1, type=int,
help="Number of processes to use.")
args = parser.parse_args()
template_files = glob(os.path.join(args.template_dir, '*.res.nc'))
template_files.sort()
old_files = [os.path.join(args.old_dir, os.path.basename(f)) for f in template_files]
output_files = [os.path.join(args.output_dir, os.path.basename(f)) for f in template_files]
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
for tf, of in zip(template_files, output_files):
print('Copying', tf, 'to', of)
shutil.copy(tf, of)
pool = mp.Pool(args.nprocs)
pool.map(copy_data, zip(old_files, output_files))
if __name__ == '__main__':
sys.exit(main())