Skip to content

Commit

Permalink
works with cam now
Browse files Browse the repository at this point in the history
  • Loading branch information
aradhakrishnanGFDL committed Nov 20, 2024
1 parent cac7330 commit 751c3ef
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 2 deletions.
2 changes: 2 additions & 0 deletions catalogbuilder/intakebuilder/dat/gfdlcmipfreq.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
monthly:
frequency: mon
h0:
frequency: mon
daily:
frequency: day
hourly:
Expand Down
11 changes: 11 additions & 0 deletions catalogbuilder/intakebuilder/getinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,13 @@ def getInfoFromGFDLFilename(filename,dictInfo,logger,configyaml):
dictInfo["table_id"] = "fx"
return dictInfo

def getRealm(dictInfo):
realm = ""
if (dictInfo["source_id"] == "cam"):
realm = "atmos"
dictInfo["realm"] = realm
return(dictInfo)

def getInfoFromGFDLDRS(dirpath,projectdir,dictInfo,configyaml,variable_id,logger):
'''
Returns info from project directory and the DRS path to the file
Expand Down Expand Up @@ -193,6 +200,10 @@ def getInfoFromGFDLDRS(dirpath,projectdir,dictInfo,configyaml,variable_id,logger
print("This is likely static")
dictInfo["cell_methods"] = ""
dictInfo["member_id"] = ""
#CAM ESM: If realm is empty, ensure if there is a helper utility to populate this

if("realm" not in dictInfo.keys()):
dictInfo = getRealm(dictInfo)
return dictInfo

def getInfoFromDRS(dirpath,projectdir,dictInfo):
Expand Down
3 changes: 1 addition & 2 deletions catalogbuilder/intakebuilder/gfdlcrawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ def crawlLocal(projectdir, dictFilter,dictFilterIgnore,logger,configyaml,slow):
missingcols = [col for col in diffcols if col not in set_ftemplate]
missingcols.remove("path") #because we get this anyway
logger.debug("Missing cols from metadata sources:"+ (str)(missingcols))

#Creating a dictionary to track the unique datasets we come across when using slow mode
#The keys are the standard names and the values are lists tracking var_id,realm,etc..
unique_datasets = {'':''}
Expand Down Expand Up @@ -156,6 +155,6 @@ def crawlLocal(projectdir, dictFilter,dictFilterIgnore,logger,configyaml,slow):
cmipfreq = getinfo.getFreqFromYAML(yamlfile,gfdlfreq=dictInfo['frequency'])
if(cmipfreq is not None):
dictInfo['frequency'] = cmipfreq
#print("Adjusting frequency from ", gfdlfreq ," to ",cmipfreq)
print("Adjusting frequency from ", gfdlfreq ," to ",cmipfreq)
listfiles.append(dictInfo)
return listfiles

0 comments on commit 751c3ef

Please sign in to comment.