diff --git a/Machines/cheyenne_modules b/Machines/cheyenne_modules
index bc61be8a..9d95165e 100755
--- a/Machines/cheyenne_modules
+++ b/Machines/cheyenne_modules
@@ -6,7 +6,7 @@ module load python/2.7.14
module load intel/17.0.1
module load ncarenv
module load ncarcompilers
-module load mpt/2.15f
+module load mpt/2.19
module load netcdf/4.6.1
module load nco/4.7.4
module load ncl/6.4.0
diff --git a/Machines/machine_postprocess.xml b/Machines/machine_postprocess.xml
index 795eb273..3e64b060 100644
--- a/Machines/machine_postprocess.xml
+++ b/Machines/machine_postprocess.xml
@@ -18,7 +18,6 @@
module purge
- module load python/2.7.14
module load intel/17.0.1
module load ncarenv
module load ncarcompilers
@@ -140,11 +139,10 @@
module purge
- module load python/2.7.14
module load intel/17.0.1
module load ncarenv
module load ncarcompilers
- module load mpt/2.15f
+ module load mpt/2.19
module load netcdf/4.6.1
module load nco/4.7.4
module load ncl/6.4.0
diff --git a/Tools/ration.log b/Tools/ration.log
deleted file mode 100644
index 06a39061..00000000
--- a/Tools/ration.log
+++ /dev/null
@@ -1,31 +0,0 @@
-2/4: Recvd 0
-2/4: Recvd 3
-2/4: Recvd 6
-2/4: Recvd 9
-2/4: Recvd None
-2/4: Out of loop
-0/4: Sent 0
-0/4: Sent 1
-0/4: Sent 2
-0/4: Sent 3
-0/4: Sent 4
-0/4: Sent 5
-0/4: Sent 6
-0/4: Sent 7
-0/4: Sent 8
-0/4: Sent 9
-0/4: Sent None
-0/4: Sent None
-0/4: Sent None
-0/4: Out of loop
-Done.
-1/4: Recvd 1
-1/4: Recvd 4
-1/4: Recvd 7
-1/4: Recvd None
-1/4: Out of loop
-3/4: Recvd 2
-3/4: Recvd 5
-3/4: Recvd 8
-3/4: Recvd None
-3/4: Out of loop
diff --git a/Tools/ration_script b/Tools/ration_script
index 1e38b2b2..684e2f9d 100755
--- a/Tools/ration_script
+++ b/Tools/ration_script
@@ -1,28 +1,35 @@
-#! /usr/bin/env bash
+#!/usr/bin/env bash
#
# template to activate the virtualenv, call post process program, deactivate virtualenv
#
-#BSUB -n 4
-#BSUB -R "span[ptile=2]"
-#BSUB -q geyser
-#BSUB -N
-#BSUB -a poe
-#BSUB -J ration_script
-#BSUB -W 00:02
-#BSUB -P P93300606
+#PBS -N ration
+#PBS -q regular
+#PBS -l select=1:ncpus=8:mpiprocs=8
+#PBS -l walltime=00:00:02
+#PBS -A P93300606
-. /glade/apps/opt/lmod/lmod/init/bash
+source /etc/profile.d/modules.sh
-export MP_LABELIO=yes
+export MPI_UNBUFFERED_STDIO=true
+export TMPDIR=$TMPDIR
-module load python/2.7.7
+module purge
-. /glade/p/work/aliceb/sandboxes/dev/postprocessing/cesm-env2/bin/activate
+## activate the virtualenv that contains all the non-bootstrapped dependencies
+cd /glade/work/aliceb/sandboxes/dev/postprocessing_new/cesm-env2/bin
+echo "Running from virtualenv directory:"
+pwd
+. activate
-module load mpi4py/2.0.0
+## load the boot-strap modules
+##module load python/2.7.14
+module load intel/17.0.1
+module load ncarenv
+module load ncarcompilers
+module load mpt/2.19
-mpirun.lsf ./ration_example.py >> ./ration.log
+mpiexec_mpt dplace -s 1 /glade/work/aliceb/sandboxes/dev/postprocessing_new/Tools/ration_test.py >> /glade/work/aliceb/sandboxes/dev/postprocessing_new/Tools/ration.log
status=$?
echo $status
diff --git a/Tools/ration_script_geyser b/Tools/ration_script_dav
similarity index 57%
rename from Tools/ration_script_geyser
rename to Tools/ration_script_dav
index 95da5860..50f5c470 100755
--- a/Tools/ration_script_geyser
+++ b/Tools/ration_script_dav
@@ -1,24 +1,27 @@
#!/bin/bash -l
-## test the mpi4py and ASAPPyTools utility on geyser with ncar_pylib virtualenv
+## test the mpi4py and ASAPPyTools utility on DAV with ncar_pylib virtualenv
-#SBATCH -t 00:05:00
#SBATCH -n 4
#SBATCH -N 2
#SBATCH --ntasks-per-node=2
+#SBATCH -t 00:05:00
#SBATCH -p dav
#SBATCH -J ration_test
#SBATCH -A P93300606
-#SBATCH -C geyser
#SBATCH --mem 1G
#SBATCH -e ration_test.err.%J
#SBATCH -o ration_test.out.%J
-export MP_LABELIO=yes
-
+module purge
module load python/2.7.14
+module load intel/17.0.1
+module load ncarenv
+module load ncarcompilers
+module load impi
+
-. /glade2/work/aliceb/sandboxes/dev/postprocessing_geyser/cesm-env2/bin/activate
+. /gpfs/fs1/work/aliceb/sandboxes/dev/postprocessing_dav/cesm-env2/bin/activate
srun ./ration_test.py >> ./ration.log
diff --git a/Tools/ration_test.py b/Tools/ration_test.py
index 649c599d..7772467d 100755
--- a/Tools/ration_test.py
+++ b/Tools/ration_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python2
import sys
try:
diff --git a/diagnostics/diagnostics/ocn/ocn_avg_generator.py b/diagnostics/diagnostics/ocn/ocn_avg_generator.py
index 33023324..ccafcf84 100644
--- a/diagnostics/diagnostics/ocn/ocn_avg_generator.py
+++ b/diagnostics/diagnostics/ocn/ocn_avg_generator.py
@@ -358,7 +358,7 @@ def createClimFiles(start_year, stop_year, in_dir, htype, tavgdir, case, tseries
if len(averageListMoc) > 0:
# call the pyAverager with the inVarList
if 'MOC' in inVarList:
- tmpInVarList = ['MOC']
+ tmpInVarList = ['MOC','SALT', 'TEMP']
else:
tmpInVarList = ['SALT', 'TEMP']
if main_comm.is_manager():
diff --git a/timeseries/timeseries/cesm_tseries_generator.py b/timeseries/timeseries/cesm_tseries_generator.py
index 963337be..015b5405 100755
--- a/timeseries/timeseries/cesm_tseries_generator.py
+++ b/timeseries/timeseries/cesm_tseries_generator.py
@@ -184,7 +184,7 @@ def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone
log[comp+stream] = {'slices':[],'index':0}
ts_log_dates = log[comp+stream]['slices']
index = log[comp+stream]['index']
- files,dates,index = chunking.get_chunks(tper, index, size_n, stream_dates, ts_log_dates, cal, units, completechunk)
+ files,dates,index = chunking.get_chunks(tper, index, size_n, stream_dates, ts_log_dates, cal, units, completechunk, tseries_tper)
for d in dates:
log[comp+stream]['slices'].append(float(d))
log[comp+stream]['index']=index
diff --git a/timeseries/timeseries/chunking.py b/timeseries/timeseries/chunking.py
index e7a99447..6d227740 100755
--- a/timeseries/timeseries/chunking.py
+++ b/timeseries/timeseries/chunking.py
@@ -13,7 +13,6 @@ def num2date(time_value, unit, calendar):
time_value = int(round(time_value))
if ('common_year' in unit):
my_unit = unit.replace('common_year', 'day')
-## my_time_value = time_value * 365
my_time_value = int(round(time_value)) * 365
else:
my_unit = unit
@@ -130,7 +129,7 @@ def get_input_dates(glob_str, comm, rank, size):
comm.sync()
return g_stream_dates,g_file_slices,calendar.lower(),units,time_period_freq
-def get_cesm_date(fn,t=None):
+def get_cesm_date(fn,tseries_tper,t=None):
'''
Open a netcdf file and return its datestamp
@@ -156,19 +155,35 @@ def get_cesm_date(fn,t=None):
# for the first lnd and rof file
if ( -1.0 < d < 0.0):
d = 0
- if t == 'bb':
+ elif t == 'bb':
d = f.variables[att['bounds']][0][0]
# for the first lnd and rof file
if ( -1.0 < d < 0.0):
d = 0
elif(d > 1):
- d = d = f.variables[att['bounds']][0][1]
+ d = f.variables[att['bounds']][0][1]
elif t == 'e':
l = len(f.variables[att['bounds']])
d = (f.variables[att['bounds']][l-1][1])-1
elif t == 'ee':
l = len(f.variables[att['bounds']])
d = (f.variables[att['bounds']][l-1][1])
+
+ # problem if global attr time_period_freq does not exist in the nc file
+ if 'time_period_freq' in f.ncattrs():
+ if 'month' in f.time_period_freq:
+ if t=='bb' or t=='b':
+ d = (f.variables[att['bounds']][0][0] + f.variables[att['bounds']][0][1]) / 2
+ if t=='ee' or t=='e':
+ l = len(f.variables[att['bounds']])
+ d = (f.variables[att['bounds']][l-1][0]+f.variables[att['bounds']][l-1][1])/2
+ elif 'month' in tseries_tper:
+ if t=='bb' or t=='b':
+ d = (f.variables[att['bounds']][0][0] + f.variables[att['bounds']][0][1]) / 2
+ if t=='ee' or t=='e':
+ l = len(f.variables[att['bounds']])
+ d = (f.variables[att['bounds']][l-1][0]+f.variables[att['bounds']][l-1][1])/2
+
else:
# problem if time has only one value when units are common_year
try:
@@ -176,8 +191,6 @@ def get_cesm_date(fn,t=None):
except:
d = f.variables['time'][0]
-
-## d1 = cf_units.num2date(d,att['units'],att['calendar'].lower())
d1 = num2date(d,att['units'],att['calendar'].lower())
f.close()
@@ -202,7 +215,6 @@ def get_chunk_range(tper, size, start, cal, units):
'''
# Get the first date
-## d1 = cf_units.num2date(start, units, cal)
d1 = num2date(start, units, cal)
# Figure out how many days each chunk should be
@@ -222,17 +234,15 @@ def get_chunk_range(tper, size, start, cal, units):
y2 = y2 + 1
m2 = m2 - 12
d2 = datetime.datetime(y2, m2, d1.day, d1.hour, d1.minute)
-## end = cf_units.date2num(d2, units, cal)
end = date2num(d2, units, cal)
elif 'year' in tper: #year
d2 = datetime.datetime(int(size)+d1.year, d1.month, d1.day, d1.hour, d1.minute)
-## end = cf_units.date2num(d2, units, cal)
end = date2num(d2, units, cal)
return start, end
-def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
+def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s, tseries_tper):
'''
Figure out what chunks there are to do for a particular CESM output stream
@@ -249,6 +259,7 @@ def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
units(string) - the units to use to figure out chunk size
s(string) - flag to determine if we need to wait until we have all data before we create a chunk or
if it's okay to do an incomplete chunk
+ tseries_tper - time_period_freq read from XML rather than nc file
Output:
files(dictionary) - keys->chunk, values->a list of all files needed for this chunk and the start and end dates
@@ -307,10 +318,10 @@ def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
files[chunk_n] = {}
files[chunk_n]['fn'] = sorted(cfiles)
if chunk_n > 0:
- files[chunk_n]['start'] = get_cesm_date(cfiles[0],t='bb')
+ files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='bb')
else:
- files[chunk_n]['start'] = get_cesm_date(cfiles[0],t='b')
- files[chunk_n]['end'] = get_cesm_date(cfiles[-1],t='e')
+ files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='b')
+ files[chunk_n]['end'] = get_cesm_date(cfiles[-1],tseries_tper,t='e')
for cd in sorted(cdates):
dates.append(cd)
e = True
@@ -319,8 +330,11 @@ def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
files[chunk_n] = {}
s_cdates = sorted(cdates)
files[chunk_n]['fn'] = sorted(cfiles)
- files[chunk_n]['start'] = get_cesm_date(cfiles[0],t='bb')
- files[chunk_n]['end'] = get_cesm_date(cfiles[-1],t='ee')
+ if chunk_n > 0:
+ files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='bb')
+ else:
+ files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='b')
+ files[chunk_n]['end'] = get_cesm_date(cfiles[-1],tseries_tper,t='ee')
for cd in sorted(cdates):
dates.append(cd)
chunk_n = chunk_n+1