Skip to content

Commit

Permalink
update SWE code
Browse files Browse the repository at this point in the history
Yalan-Song committed Aug 13, 2024
1 parent 9dec1ac commit 0b96bfa
Showing 4 changed files with 65 additions and 33 deletions.
26 changes: 18 additions & 8 deletions example/snow_water_equivalent/LSTM_SWE_data_integration_testing.py
Original file line number Diff line number Diff line change
@@ -25,9 +25,10 @@
torch.cuda.set_device(traingpuid)


rootDB_s=f'/mnt/sdb/yxs275/check_code/KFOLD_inputs/SNOTEL_filter_data_1988/'
rootDB_s=f'/mnt/sdb/yxs275/check_code/SWE_data/'
modelpath = "/mnt/sdb/yxs275/snow_hydroDL/output/"

DateRange=['2000-01-01', '2019-12-31']
DateRange=['2001-01-01', '2019-12-31']
testDateRange=['2016-01-01', '2019-12-31']

var_x_list = ['pr_gridMET', 'tmmn_gridMET', 'tmmx_gridMET', 'srad_gridMET', 'vs_gridMET', 'th_gridMET',
@@ -47,6 +48,15 @@
##Can be ['SWE'] or ['snow_frac']
DI_varibale = ['SWE']

##Hyperparameters
EPOCH = 600
BATCH_SIZE = 100
RHO = 365
saveEPOCH = 50
HIDDENSIZE = 256
trainBuff = 365


### Read data:
# load forcing and target data
time_range = pd.date_range(DateRange[0], DateRange[-1], freq='d')
@@ -114,20 +124,20 @@
target = xTrain[:,DI_day:,-len(targetLst):]

## Load model
rootOut = "/mnt/sdb/yxs275/snow_hydroDL/output/"+'/LSTM_SWE_temp_DI_SWE_30/'
out = os.path.join(rootOut, f"exp_EPOCH600_BS100_RHO365_HS256_trainBuff365") # output folder to save results
if os.path.exists(out) is False:
os.mkdir(out)
rootOut = modelpath +'/LSTM_SWE_temp'+f'_DI_{DI_varibale[0]}_{DI_day}'

out = os.path.join(rootOut, f"exp_EPOCH{EPOCH}_BS{BATCH_SIZE}_RHO{RHO}_HS{HIDDENSIZE}_trainBuff{trainBuff}") # output folder to save results

with open(out + '/scaler_stat.json') as f:
stat_dict = json.load(f)

## test the model
testepoch = 600
testepoch = EPOCH ## Can check other epochs too
model_path = out
print("Load model from ", model_path)
testmodel = loadModel(model_path, epoch=testepoch)

testbatch =200 #len(indexes)
testbatch =200

filePathLst = [out+f"/SWE_norm.csv"]

Original file line number Diff line number Diff line change
@@ -49,7 +49,12 @@
device = torch.cuda.current_device()

##Please contact us if you need the training data
rootDB_s=f'/mnt/sdb/yxs275/check_code/KFOLD_inputs/SNOTEL_filter_data_1988/'
## It can be downloaded temporarily from: https://pennstateoffice365-my.sharepoint.com/:f:/g/personal/yxs275_psu_edu/EvMxqcb1DkFDuO8KGZsy20sBq9qj4V_rAubNynpMaFBuvw?e=FF4kBb
rootDB_s=f'/mnt/sdb/yxs275/check_code/SWE_data/'

##Where to save your model
savePath = "/mnt/sdb/yxs275/snow_hydroDL/output/"


DateRange=['2001-01-01', '2019-12-31']
TrainingDateRange=['2001-01-01', '2015-12-31']
@@ -140,12 +145,12 @@
target_train_norm = target_train_norm[:,DI_day:,:]
##Hyperparameters

EPOCH = 600 # total epoches to train the mode
BATCH_SIZE = 100
RHO = 365
saveEPOCH = 50
HIDDENSIZE = 256
trainBuff = 365
EPOCH = 600 # total epochs to train the model
BATCH_SIZE = 100 ## Number of sites used in one batch
RHO = 365 ## Time length
saveEPOCH = 50 ## Model will be saved per 50 epochs
HIDDENSIZE = 256 ## Hiddensize of LSTM
trainBuff = 365 ## Time length to warmup the states of the model

nx = forcing_train_norm_combined.shape[-1] + attribute_norm.shape[-1] # update nx, nx = nx + nc
ny =len(targetLst)
@@ -159,7 +164,7 @@
# loss function : NSE loss
#lossFun = crit.NSELossBatch(np.nanstd(target_train_norm, axis=1 ),device =device)

rootOut = "/mnt/sdb/yxs275/snow_hydroDL/output/"+'/LSTM_SWE_temp'+f'_DI_{DI_varibale[0]}_{DI_day}'
rootOut =savePath +'/LSTM_SWE_temp'+f'_DI_{DI_varibale[0]}_{DI_day}'
if os.path.exists(rootOut) is False:
os.mkdir(rootOut)
out = os.path.join(rootOut, f"exp_EPOCH{EPOCH}_BS{BATCH_SIZE}_RHO{RHO}_HS{HIDDENSIZE}_trainBuff{trainBuff}") # output folder to save results
25 changes: 17 additions & 8 deletions example/snow_water_equivalent/LSTM_SWE_testing.py
Original file line number Diff line number Diff line change
@@ -25,9 +25,10 @@
torch.cuda.set_device(traingpuid)


rootDB_s=f'/mnt/sdb/yxs275/check_code/KFOLD_inputs/SNOTEL_filter_data_1988/'
rootDB_s=f'/mnt/sdb/yxs275/check_code/SWE_data/'
modelpath = "/mnt/sdb/yxs275/snow_hydroDL/output/"

DateRange=['2000-01-01', '2019-12-31']
DateRange=['2001-01-01', '2019-12-31']
testDateRange=['2016-01-01', '2019-12-31']


@@ -39,6 +40,15 @@

targetLst = ['SWE']

##Hyperparameters
EPOCH = 600
BATCH_SIZE = 100
RHO = 365
saveEPOCH = 50
HIDDENSIZE = 256
trainBuff = 365


### Read data:
# load forcing and target data
time_range = pd.date_range(DateRange[0], DateRange[-1], freq='d')
@@ -100,20 +110,19 @@
target_train_norm = xTrain_norm[:,:,len(var_x_list):]

## Load model
rootOut = "/mnt/sdb/yxs275/snow_hydroDL/output/"+'/LSTM_SWE_temp/'
out = os.path.join(rootOut, f"exp_EPOCH600_BS100_RHO365_HS256_trainBuff365") # output folder to save results
if os.path.exists(out) is False:
os.mkdir(out)
rootOut = modelpath+'/LSTM_SWE_temp/'
out = os.path.join(rootOut, f"exp_EPOCH{EPOCH}_BS{BATCH_SIZE}_RHO{RHO}_HS{HIDDENSIZE}_trainBuff{trainBuff}") # output folder to save results

with open(out + '/scaler_stat.json') as f:
stat_dict = json.load(f)

## test the model
testepoch = 600
testepoch = EPOCH ## Can check other epochs too
model_path = out
print("Load model from ", model_path)
testmodel = loadModel(model_path, epoch=testepoch)

testbatch =200 #len(indexes)
testbatch =200

filePathLst = [out+f"/SWE_norm.csv"]

26 changes: 17 additions & 9 deletions example/snow_water_equivalent/LSTM_SWE_training.py
Original file line number Diff line number Diff line change
@@ -36,6 +36,7 @@
from hydroDL.model import rnn as rnn
from hydroDL.data import scale


randomseed = 111111
random.seed(randomseed)
torch.manual_seed(randomseed)
@@ -48,9 +49,13 @@
torch.cuda.set_device(traingpuid)
device = torch.cuda.current_device()


##Please contact us if you need the training data
rootDB_s=f'/mnt/sdb/yxs275/check_code/KFOLD_inputs/SNOTEL_filter_data_1988/'
## It can be downloaded temporarily from: https://pennstateoffice365-my.sharepoint.com/:f:/g/personal/yxs275_psu_edu/EvMxqcb1DkFDuO8KGZsy20sBq9qj4V_rAubNynpMaFBuvw?e=FF4kBb

rootDB_s=f'/mnt/sdb/yxs275/check_code/SWE_data/'

##Where to save your model
savePath = "/mnt/sdb/yxs275/snow_hydroDL/output/"

DateRange=['2001-01-01', '2019-12-31']
TrainingDateRange=['2001-01-01', '2015-12-31']
@@ -70,9 +75,11 @@
startyear = time_range[0].year
endyear = time_range[-1].year
for year in range(startyear,endyear+1):

for fid, foring_ in enumerate(var_x_list+targetLst):

foring_data = pd.read_csv(rootDB_s+'/'+str(year)+'/' + foring_ + '.csv', header=None, )

foring_data = np.expand_dims(foring_data, axis = -1)
if fid==0:
xTrain_year = foring_data
@@ -126,12 +133,13 @@

##Hyperparameters

EPOCH = 600 # total epoches to train the mode
BATCH_SIZE = 100
RHO = 365
saveEPOCH = 50
HIDDENSIZE = 256
trainBuff = 365
EPOCH = 600 # total epochs to train the model
BATCH_SIZE = 100 ## Number of sites used in one batch
RHO = 365 ## Time length
saveEPOCH = 50 ## Model will be saved per 50 epochs
HIDDENSIZE = 256 ## Hiddensize of LSTM
trainBuff = 365 ## Time length to warmup the states of the model

nx = forcing_train_norm.shape[-1] + attribute_norm.shape[-1] # update nx, nx = nx + nc
ny =len(targetLst)

@@ -143,7 +151,7 @@
# loss function : NSE loss
#lossFun = crit.NSELossBatch(np.nanstd(target_train_norm, axis=1 ),device =device)

rootOut = "/mnt/sdb/yxs275/snow_hydroDL/output/"+'/LSTM_SWE_temp/'
rootOut = savePath +'/LSTM_SWE_temp/'
if os.path.exists(rootOut) is False:
os.mkdir(rootOut)
out = os.path.join(rootOut, f"exp_EPOCH{EPOCH}_BS{BATCH_SIZE}_RHO{RHO}_HS{HIDDENSIZE}_trainBuff{trainBuff}") # output folder to save results

0 comments on commit 0b96bfa

Please sign in to comment.