-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
… FV3 diag table documentation ufs-community#2277 + SKEB fix with d_con = zero ufs-community#2374 (ufs-community#2278) * UFSWM - python scripts for yaml and rocoto-xml conversion, experiment setup, and test log output * Add python superlint option * Documentation update: doc/UsersGuide/source/tables/fv3_diag_table.rst * FV3 - * atmos_cubed_sphere - bugfix: allocates heat_source when skeb is True and d_con is zero
There are no files selected for viewing
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
--- | ||
|
||
extends: default | ||
|
||
rules: | ||
braces: | ||
level: warning | ||
max-spaces-inside: 1 | ||
brackets: | ||
level: warning | ||
max-spaces-inside: 1 | ||
colons: | ||
level: warning | ||
commas: disable | ||
comments: disable | ||
comments-indentation: disable | ||
document-start: disable | ||
empty-lines: | ||
level: warning | ||
hyphens: | ||
level: warning | ||
indentation: | ||
level: warning | ||
indent-sequences: consistent | ||
line-length: disable | ||
truthy: disable | ||
trailing-spaces: disable | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
:orphan: | ||
|
||
.. _fv3diagtable: | ||
|
||
**************************** | ||
FV3 Weather Model Variables | ||
**************************** | ||
|
||
|
||
.. csv-table:: | ||
:file: fv3diagtable.csv | ||
:widths: 10, 15 | ||
:header-rows: 1 |
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
hera: | ||
QUEUE: batch | ||
COMPILE_QUEUE: batch | ||
PARTITION: | ||
dprefix: /scratch1/NCEPDEV | ||
DISKNM: /scratch2/NAGAPE/epic/UFS-WM_RT | ||
STMP: /scratch1/NCEPDEV/stmp4 | ||
PTMP: /scratch1/NCEPDEV/stmp2 | ||
RUNDIR_ROOT: | ||
SCHEDULER: slurm | ||
INPUTDATA_ROOT: /scratch2/NAGAPE/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /scratch2/NAGAPE/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /scratch2/NAGAPE/epic/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207 | ||
orion: | ||
QUEUE: batch | ||
COMPILE_QUEUE: batch | ||
PARTITION: orion | ||
dprefix: /work/noaa/stmp/${USER} | ||
DISKNM: /work/noaa/epic/UFS-WM_RT | ||
STMP: /work/noaa/stmp/${USER}/stmp | ||
PTMP: /work/noaa/stmp/${USER}/stmp | ||
RUNDIR_ROOT: | ||
SCHEDULER: slurm | ||
INPUTDATA_ROOT: /work/noaa/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /work/noaa/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /work/noaa/epic/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207 | ||
hercules: | ||
QUEUE: batch | ||
COMPILE_QUEUE: batch | ||
PARTITION: hercules | ||
dprefix: /work2/noaa/stmp/${USER} | ||
DISKNM: /work/noaa/epic/hercules/UFS-WM_RT | ||
STMP: /work2/noaa/stmp/${USER} | ||
PTMP: /work2/noaa/stmp/${USER} | ||
RUNDIR_ROOT: | ||
SCHEDULER: slurm | ||
INPUTDATA_ROOT: /work/noaa/epic/hercules/UFS-WM_RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /work/noaa/epic/hercules/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /work/noaa/epic/hercules/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207 | ||
gaea: | ||
QUEUE: normal | ||
COMPILE_QUEUE: normal | ||
PARTITION: c5 | ||
dprefix: /gpfs/f5/${ACCNR}/scratch/${USER} | ||
DISKNM: /gpfs/f5/epic/world-shared/UFS-WM_RT | ||
STMP: /gpfs/f5/${ACCNR}/scratch/${USER}/RT_BASELINE | ||
PTMP: /gpfs/f5/${ACCNR}/scratch/${USER}/RT_RUNDIR | ||
RUNDIR_ROOT: | ||
SCHEDULER: slurm | ||
INPUTDATA_ROOT: /gpfs/f5/epic/world-shared/UFS-WM_RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /gpfs/f5/epic/world-shared/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /gpfs/f5/epic/world-shared/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207 | ||
jet: | ||
QUEUE: batch | ||
COMPILE_QUEUE: batch | ||
PARTITION: xjet | ||
dprefix: /mnt/lfs4/HFIP/hfv3gfs/${USER} | ||
DISKNM: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT | ||
STMP: /mnt/lfs4/HFIP/hfv3gfs/${USER}/RT_BASELINE | ||
PTMP: /mnt/lfs4/HFIP/hfv3gfs/${USER}/RT_RUNDIRS | ||
RUNDIR_ROOT: | ||
SCHEDULER: slurm | ||
INPUTDATA_ROOT: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/BM_IC-20220207 | ||
derecho: | ||
QUEUE: main | ||
COMPILE_QUEUE: main | ||
PARTITION: | ||
dprefix: /glade/derecho/scratch | ||
DISKNM: /glade/derecho/scratch/epicufsrt/ufs-weather-model/RT | ||
STMP: /glade/derecho/scratch | ||
PTMP: /glade/derecho/scratch | ||
RUNDIR_ROOT: | ||
SCHEDULER: pbs | ||
INPUTDATA_ROOT: /glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/NEMSfv3gfs/BM_IC-20220207 | ||
noaacloud: | ||
QUEUE: batch | ||
COMPILE_QUEUE: batch | ||
PARTITION: | ||
dprefix: /lustre | ||
DISKNM: /contrib/ufs-weather-model/RT | ||
STMP: /lustre/stmp4 | ||
PTMP: /lustre/stmp2 | ||
RUNDIR_ROOT: | ||
SCHEDULER: slurm | ||
INPUTDATA_ROOT: /contrib/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /contrib/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /contrib/ufs-weather-model/RT/NEMSfv3gfs/BM_IC-20220207 | ||
s4: | ||
QUEUE: s4 | ||
COMPILE_QUEUE: s4 | ||
PARTITION: s4 | ||
dprefix: /data/prod | ||
DISKNM: /data/prod/emc.nemspara/RT | ||
STMP: /scratch/short/users | ||
PTMP: /scratch/users | ||
RUNDIR_ROOT: | ||
SCHEDULER: slurm | ||
INPUTDATA_ROOT: /data/prod/emc.nemspara/RT/NEMSfv3gfs/input-data-20240501 | ||
INPUTDATA_ROOT_WW3: /data/prod/emc.nemspara/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214 | ||
INPUTDATA_ROOT_BMIC: /data/prod/emc.nemspara/RT/NEMSfv3gfs/BM_IC-20220207 |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,209 @@ | ||
import os | ||
import sys | ||
import subprocess | ||
import yaml | ||
from datetime import datetime | ||
from ufs_test_utils import get_testcase, write_logfile, delete_files, machine_check_off | ||
|
||
def finish_log(): | ||
"""Collect regression test results and generate log file. | ||
""" | ||
UFS_TEST_YAML = str(os.getenv('UFS_TEST_YAML')) | ||
PATHRT = os.getenv('PATHRT') | ||
MACHINE_ID = os.getenv('MACHINE_ID') | ||
REGRESSIONTEST_LOG = PATHRT+'/logs/RegressionTests_'+MACHINE_ID+'.log' | ||
filename = REGRESSIONTEST_LOG | ||
KEEP_RUNDIR= str(os.getenv('KEEP_RUNDIR')) | ||
ROCOTO = str(os.getenv('ROCOTO')) | ||
CREATE_BASELINE = str(os.getenv('CREATE_BASELINE')) | ||
COMPILE_ONLY = str(os.getenv('COMPILE_ONLY')) | ||
|
||
run_logs= f""" | ||
""" | ||
COMPILE_PASS= 0 | ||
COMPILE_NR = 0 | ||
JOB_NR = 0 | ||
PASS_NR= 0 | ||
FAIL_NR= 0 | ||
failed_list= [] | ||
test_changes_list= PATHRT+'/test_changes.list' | ||
with open(UFS_TEST_YAML, 'r') as f: | ||
rt_yaml = yaml.load(f, Loader=yaml.FullLoader) | ||
for apps, jobs in rt_yaml.items(): | ||
for key, val in jobs.items(): | ||
if (str(key) == 'build'): | ||
machine_check = machine_check_off(MACHINE_ID, val) | ||
PASS_TESTS = False | ||
if machine_check: | ||
COMPILE_NR += 1 | ||
RT_COMPILER = val['compiler'] | ||
COMPILE_ID = apps | ||
COMPILE_LOG = 'compile_'+COMPILE_ID+'.log' | ||
COMPILE_LOG_TIME ='compile_'+COMPILE_ID+'_timestamp.txt' | ||
with open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG) as f: | ||
if "[100%] Linking Fortran executable" in f.read(): | ||
COMPILE_PASS += 1 | ||
f.seek(0) | ||
for line in f: | ||
if 'export RUNDIR_ROOT=' in line: | ||
RUNDIR_ROOT=line.split("=")[1] | ||
break | ||
compile_err = RUNDIR_ROOT.strip('\n')+'/compile_'+COMPILE_ID+'/err' | ||
with open(compile_err) as ferr: | ||
contents = ferr.read() | ||
count_warning = contents.count(": warning #") | ||
count_remarks = contents.count(": remark #") | ||
ferr.close() | ||
warning_log = "" | ||
if count_warning > 0: | ||
warning_log = "("+str(count_warning)+" warnings" | ||
if count_remarks > 0: | ||
warning_log+= ","+str(count_remarks)+" remarks)" | ||
flog = open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG_TIME) | ||
timing_data = flog.read() | ||
first_line = timing_data.split('\n', 1)[0] | ||
etime = int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip()) | ||
btime = int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip()) | ||
etime_min, etime_sec = divmod(int(etime), 60) | ||
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}" | ||
btime_min, btime_sec = divmod(int(btime), 60) | ||
btime_min = f"{btime_min:02}"; btime_sec = f"{btime_sec:02}" | ||
time_log = " ["+etime_min+':'+etime_sec+', '+btime_min+':'+btime_sec+"]" | ||
flog.close() | ||
compile_log = "PASS -- COMPILE "+COMPILE_ID+time_log+warning_log+"\n" | ||
else: | ||
compile_log = "FAIL -- COMPILE "+COMPILE_ID+"\n" | ||
f.close() | ||
run_logs += compile_log | ||
else: | ||
PASS_TESTS = True | ||
if (str(key) == 'tests' and COMPILE_ONLY == 'false' and not PASS_TESTS): | ||
for test in val: | ||
case, config = get_testcase(test) | ||
machine_check = machine_check_off(MACHINE_ID, config) | ||
if machine_check: | ||
JOB_NR+=1 | ||
TEST_NAME = case | ||
TEST_ID = TEST_NAME+'_'+RT_COMPILER | ||
TEST_LOG = 'rt_'+TEST_ID+'.log' | ||
TEST_LOG_TIME= 'run_'+TEST_ID+'_timestamp.txt' | ||
if 'dependency' in config.keys(): | ||
DEP_RUN = str(config['dependency'])+'_'+RT_COMPILER | ||
else: | ||
DEP_RUN = "" | ||
PASS_CHECK = 'Test '+TEST_ID+' PASS' | ||
MAXS_CHECK = 'The maximum resident set size (KB)' | ||
pass_flag = False | ||
create_dep_flag = False | ||
if (CREATE_BASELINE == 'true' and not DEP_RUN == ""): | ||
create_dep_flag = True | ||
if not create_dep_flag: | ||
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f: | ||
if PASS_CHECK in f.read(): | ||
pass_flag = True | ||
f.close() | ||
if pass_flag: | ||
f = open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG_TIME) | ||
timing_data = f.read() | ||
first_line = timing_data.split('\n', 1)[0] | ||
etime = str(int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip())) | ||
rtime = str(int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip())) | ||
etime_min, etime_sec = divmod(int(etime), 60) | ||
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}" | ||
rtime_min, rtime_sec = divmod(int(rtime), 60) | ||
rtime_min = f"{rtime_min:02}"; rtime_sec = f"{rtime_sec:02}" | ||
time_log = " ["+etime_min+':'+etime_sec+', '+rtime_min+':'+rtime_sec+"]" | ||
f.close() | ||
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f: | ||
if pass_flag : | ||
rtlog_file = f.readlines() | ||
for line in rtlog_file: | ||
if MAXS_CHECK in line: | ||
memsize= line.split('=')[1].strip() | ||
test_log = 'PASS -- TEST '+TEST_ID+time_log+' ('+memsize+' MB)\n' | ||
PASS_NR += 1 | ||
else: | ||
test_log = 'FAIL -- TEST '+TEST_ID+'\n' | ||
failed_list.append(TEST_NAME+' '+RT_COMPILER) | ||
FAIL_NR += 1 | ||
run_logs += test_log | ||
f.close() | ||
run_logs += '\n' | ||
write_logfile(filename, "a", output=run_logs) | ||
|
||
TEST_START_TIME = os.getenv('TEST_START_TIME') | ||
TEST_END_TIME = os.getenv('TEST_END_TIME') | ||
start_time = datetime.strptime(TEST_START_TIME, "%Y%m%d %H:%M:%S") | ||
end_time = datetime.strptime(TEST_END_TIME, "%Y%m%d %H:%M:%S") | ||
hours, remainder= divmod((end_time - start_time).total_seconds(), 3600) | ||
minutes, seconds= divmod(remainder, 60) | ||
hours = int(hours); minutes=int(minutes); seconds =int(seconds) | ||
hours = f"{hours:02}"; minutes= f"{minutes:02}"; seconds= f"{seconds:02}" | ||
elapsed_time = hours+'h:'+minutes+'m:'+seconds+'s' | ||
|
||
COMPILE_PASS = str(int(COMPILE_PASS)) | ||
COMPILE_NR = str(int(COMPILE_NR)) | ||
JOB_NR = str(int(JOB_NR)) | ||
PASS_NR = str(int(PASS_NR)) | ||
FAIL_NR = str(int(FAIL_NR)) | ||
synop_log = f""" | ||
SYNOPSIS: | ||
Starting Date/Time: {TEST_START_TIME} | ||
Ending Date/Time: {TEST_END_TIME} | ||
Total Time: {elapsed_time} | ||
Compiles Completed: {COMPILE_PASS}/{COMPILE_NR} | ||
Tests Completed: {PASS_NR}/{JOB_NR} | ||
""" | ||
write_logfile(filename, "a", output=synop_log) | ||
|
||
if (int(FAIL_NR) == 0): | ||
if os.path.isfile(test_changes_list): | ||
delete_files(test_changes_list) | ||
open(test_changes_list, 'a').close() | ||
SUCCESS = "SUCCESS" | ||
comment_log = f""" | ||
NOTES: | ||
A file test_changes.list was generated but is empty. | ||
If you are using this log as a pull request verification, please commit test_changes.list. | ||
Result: {SUCCESS} | ||
====END OF {MACHINE_ID} REGRESSION TESTING LOG==== | ||
""" | ||
write_logfile(filename, "a", output=comment_log) | ||
else: | ||
with open(test_changes_list, 'w') as listfile: | ||
for line in failed_list: | ||
listfile.write(f"{line}\n") | ||
listfile.close() | ||
SUCCESS = "FAILED" | ||
comment_log = f""" | ||
NOTES: | ||
A file test_changes.list was generated with list of all failed tests. | ||
You can use './rt.sh -c -b test_changes.list' to create baselines for the failed tests. | ||
If you are using this log as a pull request verification, please commit test_changes.list. | ||
Result: FAILURE | ||
====END OF {MACHINE_ID} REGRESSION TESTING LOG==== | ||
""" | ||
write_logfile(filename, "a", output=comment_log) | ||
|
||
print("Performing Cleanup...") | ||
exefiles= PATHRT+'/fv3_*.*x*'; delete_files(exefiles) | ||
modfiles= PATHRT+'/modules.fv3_*'; delete_files(modfiles) | ||
modfiles= PATHRT+'modulefiles/modules.fv3_*'; delete_files(modfiles) | ||
tmpfiles= PATHRT+'/keep_tests.tmp'; delete_files(tmpfiles) | ||
if KEEP_RUNDIR == 'false': | ||
rundir = PATHRT+'/run_dir' | ||
os.unlink(rundir) | ||
if ROCOTO == 'true': | ||
rocotofiles=PATHRT+'/rocoto*' | ||
delete_files(rocotofiles) | ||
lockfiles=PATHRT+'/*_lock.db' | ||
delete_files(lockfiles) | ||
print("REGRESSION TEST RESULT: SUCCESS") | ||
|
||
#if __name__ == '__main__': | ||
|
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
module use /glade/work/epicufsrt/contrib/derecho/rocoto/modulefiles | ||
module load rocoto | ||
|
||
module use -a /glade/work/epicufsrt/conda/modulefiles.derecho | ||
module load anaconda/23.7.4 | ||
|
||
cp fv3_conf/fv3_qsub.IN_derecho fv3_conf/fv3_qsub.IN | ||
cp fv3_conf/compile_qsub.IN_derecho fv3_conf/compile_qsub.IN | ||
|
||
ROCOTORUN=/glade/work/epicufsrt/contrib/derecho/rocoto/bin/rocotorun | ||
ROCOTOSTAT=/glade/work/epicufsrt/contrib/derecho/rocoto/bin/rocotostat | ||
ROCOTOCOMPLETE=/glade/work/epicufsrt/contrib/derecho/rocoto/bin/rocotocomplete | ||
ROCOTO_SCHEDULER=pbspro | ||
export ROCOTORUN | ||
export ROCOTOSTAT | ||
export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
module use /ncrc/proj/epic/rocoto/modulefiles | ||
module load rocoto | ||
ROCOTORUN=/ncrc/proj/epic/rocoto/1.3.6/bin/rocotorun | ||
ROCOTOSTAT=/ncrc/proj/epic/rocoto/1.3.6/bin/rocotostat | ||
ROCOTOCOMPLETE=/ncrc/proj/epic/rocoto/1.3.6/bin/rocotocomplete | ||
ROCOTO_SCHEDULER=slurm | ||
export ROCOTORUN | ||
export ROCOTOSTAT | ||
export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER | ||
|
||
module load python/3.9 |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
module load rocoto | ||
ROCOTORUN=/apps/rocoto/1.3.7/bin/rocotorun | ||
ROCOTOSTAT=/apps/rocoto/1.3.7/bin/rocotostat | ||
ROCOTOCOMPLETE=/apps/rocoto/1.3.7/bin/rocotocomplete | ||
ROCOTO_SCHEDULER=slurm | ||
export ROCOTORUN | ||
export ROCOTOSTAT | ||
export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER | ||
|
||
module load intelpython/2023.2.0 |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
module load contrib rocoto | ||
ROCOTORUN=/apps/contrib/rocoto/1.3.7/bin/rocotorun | ||
ROCOTOSTAT=/apps/contrib/rocoto/1.3.7/bin/rocotostat | ||
ROCOTOCOMPLETE=/apps/contrib/rocoto/1.3.7/bin/rocotocomplete | ||
ROCOTO_SCHEDULER=slurm | ||
export ROCOTORUN | ||
export ROCOTOSTAT | ||
export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER | ||
|
||
module use -a /work/noaa/epic/conda/modulefiles.hercules | ||
module load anaconda/23.7.4 | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
module load rocoto | ||
ROCOTORUN=/apps/rocoto/1.3.7/bin/rocotorun | ||
ROCOTOSTAT=/apps/rocoto/1.3.7/bin/rocotostat | ||
ROCOTOCOMPLETE=/apps/rocoto/1.3.7/bin/rocotocomplete | ||
ROCOTO_SCHEDULER=slurm | ||
export ROCOTORUN | ||
export ROCOTOSTAT | ||
export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER | ||
|
||
module load intelpython/2023.2.0 |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
export PATH=/contrib/EPIC/bin:${PATH} | ||
module use /apps/modules/modulefiles | ||
module load rocoto/1.3.3 | ||
|
||
ROCOTORUN=/apps/rocoto/1.3.3/bin/rocotorun | ||
ROCOTOSTAT=/apps/rocoto/1.3.3/bin/rocotostat | ||
ROCOTOCOMPLETE=/apps/rocoto/1.3.3/bin/rocotocomplete | ||
ROCOTO_SCHEDULER=slurm | ||
export ROCOTORUN | ||
export ROCOTOSTAT | ||
export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
module load gcc/12.2.0 | ||
module load python/3.10.8 | ||
|
||
module load contrib ruby/3.2.3 rocoto/1.3.7 | ||
ROCOTORUN=/apps/contrib/rocoto/1.3.7/bin/rocotorun | ||
ROCOTOSTAT=/apps/contrib/rocoto/1.3.7/bin/rocotostat | ||
ROCOTOCOMPLETE=/apps/contrib/rocoto/1.3.7/bin//rocotocomplete | ||
ROCOTO_SCHEDULER=slurm | ||
export ROCOTORUN | ||
export ROCOTOSTAT | ||
export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
module load rocoto/1.3.2 | ||
#ROCOTORUN=$(which rocotorun) | ||
#ROCOTOSTAT=$(which rocotostat) | ||
#ROCOTOCOMPLETE=$(which rocotocomplete) | ||
ROCOTO_SCHEDULER=slurm | ||
#export ROCOTORUN | ||
#export ROCOTOSTAT | ||
#export ROCOTOCOMPLETE | ||
export ROCOTO_SCHEDULER | ||
|
||
module load miniconda/3.8-s4 |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
atm_dyn32_intel: | ||
build: | ||
compiler: 'intel' | ||
option: '-DAPP=ATM -DCCPP_SUITES=FV3_GFS_v16,FV3_GFS_v16_flake,FV3_GFS_v17_p8,FV3_GFS_v17_p8_rrtmgp,FV3_GFS_v15_thompson_mynn_lam3km,FV3_WoFS_v0,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_ugwpv1 -D32BIT=ON' | ||
tests: | ||
- control_c48.v2.sfc: {'project':['daily']} | ||
- fail_to_copy: {'project':['daily']} | ||
- fail_to_run: {'project':['daily']} | ||
atm_dyn64_intel: | ||
build: | ||
compiler: 'intel' | ||
option: '-DAPP=ATM -DCCPP_SUITES=FV3_GFS_v16,FV3_GFS_v16_flake,FV3_GFS_v17_p8,FV3_GFS_v17_p8_rrtmgp,FV3_GFS_v15_thompson_mynn_lam3km,FV3_WoFS_v0,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_ugwpv1' | ||
tests: | ||
- control_c48: {'project':['daily']} | ||
fail_to_compile_intel: | ||
build: | ||
compiler: 'intel' | ||
option: '--invalid-argument -DAPP=ATM -DCCPP_SUITES=whatever' | ||
tests: | ||
- dependency_unmet: {'project':['daily']} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,276 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
SECONDS=0 | ||
|
||
hostname | ||
|
||
die() { echo "$@" >&2; exit 1; } | ||
|
||
usage() { | ||
set +x | ||
echo | ||
echo "Usage: $0 -a <account> | -b <file> | -c | -d | -e | -h | -k | -l <file> | -m | -n <name> | -o | -r | -w | -s" | ||
echo | ||
echo " -a <account> to use on for HPC queue" | ||
echo " -b create new baselines only for tests listed in <file>" | ||
echo " -c create new baseline results" | ||
echo " -d delete run direcotries that are not used by other tests" | ||
echo " -e use ecFlow workflow manager (this option is not fully functional yet)" | ||
echo " -h display this help" | ||
echo " -k keep run directory after ufs_test.sh is completed" | ||
echo " -l runs test specified in <file>" | ||
echo " -m compare against new baseline results" | ||
echo " -n run single test <name>" | ||
echo " -o compile only, skip tests" | ||
echo " -r use Rocoto workflow manager" | ||
echo " -w for weekly_test, skip comparing baseline results" | ||
echo " -s for use tests-dev, symlink sharable tests scripts" | ||
echo | ||
set -x | ||
exit 1 | ||
} | ||
|
||
[[ $# -eq 0 ]] && usage | ||
|
||
rt_trap() { | ||
[[ ${ROCOTO:-false} == true ]] && rocoto_kill | ||
[[ ${ECFLOW:-false} == true ]] && ecflow_kill | ||
cleanup | ||
} | ||
|
||
cleanup() { | ||
PID_LOCK=$(awk '{print $2}' < "${LOCKDIR}/PID") | ||
[[ ${PID_LOCK} == "$$" ]] && rm -rf "${LOCKDIR}" | ||
[[ ${ECFLOW:-false} == true ]] && ecflow_stop | ||
trap 0 | ||
exit | ||
} | ||
|
||
trap '{ echo "ufs_test.sh interrupted"; rt_trap ; }' INT | ||
trap '{ echo "ufs_test.sh quit"; rt_trap ; }' QUIT | ||
trap '{ echo "ufs_test.sh terminated"; rt_trap ; }' TERM | ||
trap '{ echo "ufs_test.sh error on line $LINENO"; cleanup ; }' ERR | ||
trap '{ echo "ufs_test.sh finished"; cleanup ; }' EXIT | ||
|
||
# PATHRT - Path to regression tests directory | ||
PATHRT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )" | ||
readonly PATHRT | ||
cd "${PATHRT}" | ||
[[ -f "${PATHRT}"/detect_machine.sh ]] || cp "${PATHRT}"/../tests/detect_machine.sh "${PATHRT}" | ||
[[ -f "${PATHRT}"/rt_utils.sh ]] || cp "${PATHRT}"/../tests/rt_utils.sh "${PATHRT}" | ||
[[ -f "${PATHRT}"/module-setup.sh ]] || cp "${PATHRT}"/../tests/module-setup.sh "${PATHRT}" | ||
|
||
# make sure only one instance of ufs_test.sh is running | ||
readonly LOCKDIR="${PATHRT}"/lock | ||
if mkdir "${LOCKDIR}" ; then | ||
HOSTNAME=$(hostname) | ||
echo "${HOSTNAME} $$" > "${LOCKDIR}/PID" | ||
else | ||
echo "Only one instance of ufs_test.sh can be running at a time" | ||
exit 1 | ||
fi | ||
|
||
CREATE_BASELINE=false | ||
ROCOTO=false | ||
ECFLOW=false | ||
KEEP_RUNDIR=false | ||
export skip_check_results=false | ||
export delete_rundir=false | ||
COMPILE_ONLY=false | ||
RTPWD_NEW_BASELINE=false | ||
TESTS_FILE='ufs_test.yaml' | ||
NEW_BASELINES_FILE='' | ||
RUN_SINGLE_TEST=false | ||
ACCNR=${ACCNR:-""} | ||
UFS_TEST_YAML="ufs_test.yaml" | ||
export UFS_TEST_YAML | ||
LINK_TESTS=false | ||
|
||
while getopts ":a:b:cl:mn:dwkreohs" opt; do | ||
case ${opt} in | ||
a) | ||
ACCNR=${OPTARG} | ||
;; | ||
b) | ||
NEW_BASELINES_FILE=${OPTARG} | ||
export NEW_BASELINES_FILE | ||
python -c "import ufs_test_utils; ufs_test_utils.update_testyaml_b()" | ||
UFS_TEST_YAML="ufs_test_temp.yaml" | ||
export UFS_TEST_YAML | ||
;; | ||
c) | ||
CREATE_BASELINE=true | ||
;; | ||
l) | ||
TESTS_FILE=${OPTARG} | ||
grep -q '[^[:space:]]' < "${TESTS_FILE}" || die "${TESTS_FILE} empty, exiting..." | ||
UFS_TEST_YAML=${TESTS_FILE} | ||
export UFS_TEST_YAML | ||
;; | ||
o) | ||
COMPILE_ONLY=true | ||
;; | ||
m) | ||
# redefine RTPWD to point to newly created baseline outputs | ||
RTPWD_NEW_BASELINE=true | ||
;; | ||
n) | ||
RUN_SINGLE_TEST=true | ||
IFS=' ' read -r -a SINGLE_OPTS <<< "${OPTARG}" | ||
|
||
if [[ ${#SINGLE_OPTS[@]} != 2 ]]; then | ||
die 'The -n option needs <testname> AND <compiler> in quotes, i.e. -n "control_p8 intel"' | ||
fi | ||
|
||
SRT_NAME="${SINGLE_OPTS[0]}" | ||
SRT_COMPILER="${SINGLE_OPTS[1]}" | ||
|
||
if [[ "${SRT_COMPILER}" != "intel" ]] && [[ "${SRT_COMPILER}" != "gnu" ]]; then | ||
die "COMPILER MUST BE 'intel' OR 'gnu'" | ||
fi | ||
|
||
export SRT_NAME | ||
export SRT_COMPILER | ||
python -c "import ufs_test_utils; ufs_test_utils.update_testyaml_n()" | ||
UFS_TEST_YAML="ufs_test_temp.yaml" | ||
export UFS_TEST_YAML | ||
;; | ||
d) | ||
export delete_rundir=true | ||
;; | ||
w) | ||
export skip_check_results=true | ||
;; | ||
k) | ||
KEEP_RUNDIR=true | ||
;; | ||
r) | ||
ROCOTO=true | ||
ECFLOW=false | ||
;; | ||
e) | ||
ECFLOW=true | ||
ROCOTO=false | ||
die "Work-in-progress to support for ECFLOW. Please, use the ROCOTO workflow manamegment option (-r)" | ||
;; | ||
s) | ||
LINK_TESTS=true | ||
;; | ||
h) | ||
usage | ||
;; | ||
:) | ||
die "Option -${OPTARG} requires an argument." | ||
;; | ||
?) | ||
die "Invalid option: -${OPTARG}" | ||
;; | ||
*) | ||
die "Invalid runtime options: no parameter included with argument-${OPTARG}" | ||
;; | ||
esac | ||
done | ||
|
||
source detect_machine.sh # Note: this does not set ACCNR. The "if" block below does. | ||
source rt_utils.sh | ||
source module-setup.sh | ||
|
||
check_machine=false | ||
platforms=( hera orion hercules gaea jet derecho noaacloud s4 ) | ||
for name in "${platforms[@]}" | ||
do | ||
if [[ ${MACHINE_ID} == "${name}" ]]; then | ||
check_machine=true | ||
break | ||
fi | ||
done | ||
|
||
if [[ ${check_machine} == true ]]; then | ||
source "${PATHRT}"/machine_config/machine_"${MACHINE_ID}".config | ||
else | ||
echo "*** Current support of ufs_test.sh only for ${platforms[*]} ! ***" | ||
exit 1 | ||
fi | ||
|
||
# If -s; link sharable test scripts from tests directory | ||
if [[ ${LINK_TESTS} == true ]]; then | ||
if ! python -c "import ufs_test_utils; ufs_test_utils.sync_testscripts()" | ||
then | ||
echo "*** error: python sync_testscripts! ***" | ||
exit 1 | ||
fi | ||
fi | ||
|
||
#Check to error out if incompatible options are chosen together | ||
[[ ${KEEP_RUNDIR} == true && ${delete_rundir} == true ]] && die "-k and -d options cannot be used at the same time" | ||
[[ ${ECFLOW} == true && ${ROCOTO} == true ]] && die "-r and -e options cannot be used at the same time" | ||
[[ ${CREATE_BASELINE} == true && ${RTPWD_NEW_BASELINE} == true ]] && die "-c and -m options cannot be used at the same time" | ||
|
||
if [[ -z "${ACCNR}" ]]; then | ||
echo "Please use -a <account> to set group account to use on HPC" | ||
exit 1 | ||
fi | ||
|
||
# Display the machine and account using the format detect_machine.sh used: | ||
echo "Machine: ""${MACHINE_ID}"" Account: ""${ACCNR}"" " | ||
|
||
shift $((OPTIND-1)) | ||
[[ $# -gt 1 ]] && usage | ||
|
||
TEST_START_TIME="$(date '+%Y%m%d %T')" | ||
export TEST_START_TIME | ||
|
||
rm -f fail_test* fail_compile* | ||
|
||
if [[ ${ROCOTO} == true ]]; then | ||
ROCOTO_XML="${PATHRT}"/rocoto_workflow.xml | ||
ROCOTO_STATE="${PATHRT}"/rocoto_workflow.state | ||
ROCOTO_DB="${PATHRT}"/rocoto_workflow.db | ||
rm -f "${ROCOTO_XML}" "${ROCOTO_DB}" "${ROCOTO_STATE}" ./*_lock.db* | ||
fi | ||
|
||
[[ -f ${TESTS_FILE} ]] || die "${TESTS_FILE} does not exist" | ||
|
||
export ROCOTO_SCHEDULER | ||
export ACCNR | ||
export ROCOTO_XML | ||
export PATHRT | ||
export ROCOTO | ||
export ECFLOW | ||
export MACHINE_ID | ||
export RTPWD_NEW_BASELINE | ||
export CREATE_BASELINE | ||
export RTVERBOSE | ||
|
||
export TESTS_FILE | ||
export NEW_BASELINES_FILE | ||
export RUN_SINGLE_TEST | ||
export COMPILE_ONLY | ||
export delete_rundir | ||
export skip_check_results | ||
export KEEP_RUNDIR | ||
|
||
if ! python -c "import create_xml; create_xml.xml_loop()" | ||
then | ||
echo "*** experiment setup didn't run successfully! ***" | ||
exit 1 | ||
fi | ||
|
||
## | ||
## run regression test workflow (currently Rocoto or ecFlow are supported) | ||
## | ||
if [[ ${ROCOTO} == true ]]; then | ||
rocoto_run | ||
fi | ||
|
||
# IF -c AND -b; LINK VERIFIED BASELINES TO NEW_BASELINE | ||
if [[ ${CREATE_BASELINE} == true && ${NEW_BASELINES_FILE} != '' ]]; then | ||
python -c "import ufs_test_utils; ufs_test_utils.link_new_baselines()" | ||
fi | ||
|
||
TEST_END_TIME="$(date '+%Y%m%d %T')" | ||
export TEST_END_TIME | ||
|
||
## Lets verify all tests were run and that they passed | ||
python -c "import create_log; create_log.finish_log()" |
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,338 @@ | ||
import os | ||
import sys | ||
import re | ||
import glob | ||
import yaml | ||
import shutil | ||
import subprocess | ||
|
||
def update_testyaml(input_list): | ||
"""Generate temporary test yaml based on list of tests received | ||
Args: | ||
input_list (list): list of tests to run | ||
""" | ||
UFS_TEST_YAML = "ufs_test.yaml" # default ufs_test.yaml | ||
new_yaml = {} | ||
yaml_item_count = None | ||
with open(UFS_TEST_YAML, 'r') as file_yaml: | ||
rt_yaml = yaml.load(file_yaml)#, Loader=yaml.FullLoader) | ||
for apps, jobs in rt_yaml.items(): | ||
app_temp = None | ||
build_temp = None | ||
for key, val in jobs.items(): | ||
if (str(key) == 'build'): | ||
#--- build information --- | ||
build_val = val | ||
compiler_val= val['compiler'] | ||
if (str(key) == 'tests'): | ||
#--- serach for test cases given with -n or -b option --- | ||
test_list = [] | ||
temp_list = [] | ||
app_temp = None | ||
build_temp = None | ||
test_temp = None | ||
test_temp_dep = None | ||
for test in val: | ||
case, config = get_testcase(test) | ||
i=0 | ||
ilist= None | ||
#--- search input_list test cases from ufs_test.yaml --- | ||
for line in input_list: | ||
case_check = line.split(" ")[0] | ||
compiler_check= line.split(" ")[1] | ||
if case == case_check and compiler_val == compiler_check: | ||
ilist=i | ||
app_temp = apps | ||
build_temp= build_val | ||
test_temp = {case:config} | ||
temp_list.append(str(case)) | ||
if 'dependency' in config.keys(): | ||
if not str(config['dependency']) in temp_list: | ||
test_temp_dep = get_testdep(str(config['dependency']),val) | ||
i+=1 | ||
#--- pop input_list element if a test case is found --- | ||
if not ilist is None: | ||
input_list.pop(ilist) | ||
#--- append test cases to new test list --- | ||
if not test_temp_dep is None: | ||
test_list.append(test_temp_dep) | ||
test_temp_dep = None | ||
if not test_temp is None: | ||
test_list.append(test_temp) | ||
test_temp = None | ||
if not app_temp is None: | ||
new_yaml[app_temp]={'build':build_temp,'tests':test_list} | ||
#--- check all search is done for input_list --- | ||
if len(input_list) == 0: | ||
break | ||
#--- dump into temporary test yaml file --- | ||
if len(new_yaml) > 0: | ||
yaml_item_count = len(new_yaml) | ||
try: | ||
yaml_item_count | ||
except NameError: | ||
print("*** Test cases given with runtime options -n or -b are not found in ufs_test.yaml! ***") | ||
else: | ||
with open(r'ufs_test_temp.yaml', 'w') as yaml_file: | ||
outputs = yaml.dump(new_yaml, yaml_file) | ||
yaml_file.close() | ||
file_yaml.close() | ||
|
||
def update_testyaml_n(): | ||
"""Update test yaml file for a single test specified in -n <test_name> <compiler> | ||
""" | ||
try: | ||
SRT_NAME = str(os.getenv('SRT_NAME')) | ||
SRT_COMPILER = str(os.getenv('SRT_COMPILER')) | ||
except NameError: | ||
print("*** SRT_NAME or SRT_COMPILER are not given with runtime option -n! ***") | ||
input_list=[SRT_NAME+" "+SRT_COMPILER] | ||
update_testyaml(input_list) | ||
|
||
def update_testyaml_b(): | ||
"""Update test yaml file for tests specified in -b <file> | ||
""" | ||
NEW_BASELINES_FILE = str(os.getenv('NEW_BASELINES_FILE')) | ||
input_list=[] | ||
with open(NEW_BASELINES_FILE) as input_file: | ||
for line in input_file: | ||
line=line.strip('\n') | ||
line=line.strip() | ||
input_list.append(str(line)) | ||
input_file.close() | ||
update_testyaml(input_list) | ||
|
||
def string_clean(str_in): | ||
"""Strip out RUN or COMPILE whitespace and separate with commas. | ||
Args: | ||
str_in (str): RUN or COMPILE line read in from rt.conf | ||
Returns: | ||
str: whitespace stripped and comma separated values | ||
""" | ||
return "'"+("','".join(str_in.split()))+"'" | ||
|
||
def parse_line(str_in): | ||
"""Parse rt.conf line into list | ||
Args: | ||
str_in (str): RUN or COMPILE line from rt.conf | ||
Returns: | ||
list: list of RUN or COMPILE test attributes | ||
""" | ||
build_attr = " ".join(str_in.split()).split('|') | ||
build_attr = [attr.strip() for attr in build_attr] | ||
return build_attr | ||
|
||
def create_yaml(): | ||
"""Parse default rt.conf into ufs_test.yaml | ||
""" | ||
with open('ufs_test.yaml', 'w') as yaml_file, open("rt.conf") as conf_file: | ||
for line in conf_file: | ||
line = line.strip() | ||
if not line: # skip: line is blank | ||
continue | ||
if line.startswith("#"): # skip: comment line | ||
continue | ||
if line.startswith("COMPILE"): # COMPILE line | ||
build = parse_line(line) | ||
apps = build[1] | ||
compiler = f"'{build[2]}'" | ||
options = f"'{build[3]}'" | ||
machine = build[4] | ||
off_machine = None | ||
on_machine = None | ||
if (machine.find('-') != -1): | ||
off_machine = machine.replace("-", "").strip() | ||
off_machine = string_clean(off_machine) | ||
if (machine.find('+') != -1): | ||
on_machine = machine.replace("+", "").strip() | ||
on_machine = string_clean(on_machine) | ||
yaml_file.write(f"{apps}_{build[2].strip()}:\n") | ||
yaml_file.write(f" build: \n") | ||
yaml_file.write(f" compiler: {compiler}\n") | ||
yaml_file.write(f" option: {options}\n") | ||
if not (off_machine is None): | ||
yaml_file.write(f" turnoff: [{off_machine}]\n") | ||
if not (on_machine is None): | ||
yaml_file.write(f" turnon: [{on_machine}]\n") | ||
prev_line = 'COMPILE' | ||
if line.startswith("RUN"): # RUN line | ||
build = parse_line(line) | ||
test = build[1] | ||
machine = build[2] | ||
baseline = f"'{build[3]}'" | ||
depend = build[4] | ||
if (machine.find('-') != -1): | ||
off_machine = machine.replace("-", "").strip() | ||
off_machine = string_clean(off_machine) | ||
if (machine.find('+') != -1): | ||
on_machine = machine.replace("+", "").strip() | ||
on_machine = string_clean(on_machine) | ||
tests = f" - {test}: {{'project':['daily']" | ||
if baseline.isalnum(): | ||
tests += f",'baseline': {baseline}" | ||
if depend and depend.strip(): | ||
tests += f",'dependency':'{depend}'" | ||
if not (off_machine is None): | ||
tests += f",'turnoff':[{off_machine}]" | ||
if not (on_machine is None): | ||
tests += f",'turnon':[{on_machine}]" | ||
if prev_line == "COMPILE": | ||
yaml_file.write(" tests: \n") | ||
yaml_file.write(tests+"}\n") | ||
prev_line = 'RUN' | ||
|
||
yaml_file.close(); conf_file.close() | ||
|
||
def sync_testscripts(): | ||
"""symlink sharable rt.sh test scripts | ||
""" | ||
dst= os.getcwd() | ||
src= os.path.split(os.getcwd())[0]+'/tests' | ||
for name in os.listdir(src): | ||
src_name= src +'/'+ name | ||
dst_name= dst +'/'+ name | ||
if not os.path.exists(dst_name): | ||
if "/compile.sh" in dst_name: | ||
shutil.copyfile(src_name, dst_name) | ||
subprocess.call(['chmod', '755', dst_name]) | ||
with open(dst_name) as rfile: | ||
buildsh = rfile.read().replace("${PATHTR}/tests/", "${PATHTR}/tests-dev/") | ||
rfile.close() | ||
with open(dst_name, "w") as wfile: | ||
wfile.write(buildsh) | ||
wfile.close() | ||
else: | ||
os.symlink(src_name, dst_name) | ||
|
||
def machine_check_off(machine_id, val): | ||
"""Check turned-off machine from yaml configuration | ||
Args: | ||
machine_id (str): local machine name | ||
val (dic): build and test config dictionary list | ||
Returns: | ||
pass_machine: logical flag to pass local machine | ||
""" | ||
pass_machine = True | ||
if 'turnoff' in val.keys(): | ||
if machine_id in val['turnoff']: | ||
pass_machine = False | ||
if 'turnon' in val.keys(): | ||
if not machine_id in val['turnon']: | ||
pass_machine = False | ||
return pass_machine | ||
|
||
def delete_files(deletefiles): | ||
"""Remove specified filepath | ||
Args: | ||
deletefiles (str): filepath to remove e.g. tests/rocoto.* | ||
""" | ||
fileList = glob.glob(deletefiles, recursive=True) | ||
for filePath in fileList: | ||
try: | ||
os.remove(filePath) | ||
except OSError: | ||
print("Error while deleting ",deletefiles) | ||
|
||
def link_new_baselines(): | ||
"""Create symlinks for newly generated baselines. | ||
""" | ||
USER = str(os.environ.get('USER')) | ||
MACHINE_ID = os.getenv('MACHINE_ID') | ||
PATHRT = os.getenv('PATHRT') | ||
with open("baseline_setup.yaml", 'r') as f: | ||
exp_config = yaml.load(f) #, Loader=yaml.FullLoader) | ||
base = exp_config[MACHINE_ID] | ||
DISKNM= str(base['DISKNM']) | ||
STMP = str(base['STMP']) | ||
PTMP = str(base['PTMP']) | ||
path = STMP+'/'+USER | ||
RTPWD = path + '/FV3_RT/REGRESSION_TEST' | ||
f.close() | ||
#--- capture user's NEW_BASELINE location ---- | ||
logfile = PATHRT+'/logs/RegressionTests_'+MACHINE_ID+'.log' | ||
with open(logfile,'r') as flog: | ||
logheads= flog.readlines() | ||
for line in logheads: | ||
if "BASELINE DIRECTORY:" in line: | ||
NEW_BASELINE=line.split(" ")[1] | ||
break | ||
flog.close() | ||
#--- symlink verified baseline cases to users new baseline --- | ||
os.environ["RTPWD"] = RTPWD | ||
os.environ["NEW_BASELINE"] = NEW_BASELINE | ||
symlink_baselines = subprocess.Popen(['bash', '-c', '. ufs_test_utils.sh; link_new_baselines']) | ||
symlink_baselines.wait() | ||
|
||
def get_testdep(casename,val): | ||
"""Retrieve test case dependencies | ||
Args: | ||
casename (str): Test case name | ||
val (dict): Test case attributes e.g. val['compiler'] | ||
Returns: | ||
dict: Test case and config for the specified dependency | ||
""" | ||
test_dep = None | ||
for test in val: | ||
case, config = get_testcase(test) | ||
if case == casename: | ||
test_dep = {case:config} | ||
return test_dep | ||
|
||
def get_testcase(test): | ||
"""Retrieve test case names and configs from given dict from pyaml | ||
Args: | ||
test (dict): dict retrieved from reading in yaml test file | ||
Returns: | ||
str, dict: test name and python dict of test configuration | ||
""" | ||
case_name = None | ||
case_config = None | ||
for case, configs in test.items(): | ||
case_name=case | ||
case_config=configs | ||
return case_name, case_config | ||
|
||
def write_logfile(logfile, openmod, output="", subproc=""): | ||
"""Append given output into log file | ||
Args: | ||
logfile (str): Log filename | ||
openmod (str): mode to open file in | ||
output (str): Content to append to log file. Defaults to "". | ||
subproc (str): Command to run within the shell. Defaults to "". | ||
""" | ||
with open(logfile, openmod) as rtlog: | ||
if (not subproc == "") : | ||
subprocess.call(subproc, shell=True, stdout=rtlog) | ||
if (not output == "") : | ||
rtlog.writelines(output) | ||
rtlog.close() | ||
|
||
def rrmdir(path): | ||
"""Remove all files and directories in specified path. | ||
Args: | ||
path (str): File path to remove | ||
""" | ||
shutil.rmtree(path) | ||
#for entry in os.scandir(path): | ||
# if entry.is_dir(): | ||
# rrmdir(entry) | ||
# else: | ||
# os.remove(entry) | ||
# os.rmdir(path) | ||
|
||
#if __name__ == "__main__": | ||
# create_yaml() |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
function set_run_task() { | ||
source default_vars.sh | ||
source rt_utils.sh | ||
source "${PATHRT}"/tests/"${TEST_NAME}" | ||
compute_petbounds_and_tasks | ||
|
||
TPN=$(( TPN / THRD )) | ||
NODES=$(( TASKS / TPN )) | ||
if (( NODES * TPN < TASKS )); then | ||
NODES=$(( NODES + 1 )) | ||
fi | ||
|
||
PPN=$(( TASKS / NODES )) | ||
if (( TASKS - ( PPN * NODES ) > 0 )); then | ||
PPN=$((PPN + 1)) | ||
fi | ||
|
||
export WLCLK | ||
|
||
python -c "import create_xml; create_xml.write_runtest_env()" | ||
rocoto_create_run_task | ||
|
||
} | ||
|
||
function link_new_baselines() { | ||
for dir in "${RTPWD}"/*/; do | ||
dir=${dir%*/} | ||
[[ -d "${NEW_BASELINE}/${dir##*/}" ]] && continue | ||
ln -s "${dir%*/}" "${NEW_BASELINE}/" | ||
done | ||
} |
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.