diff --git a/models/clm45/R/met2model.CLM45.R b/models/clm45/R/met2model.CLM45.R index 27ebb38c2..57082ae1b 100644 --- a/models/clm45/R/met2model.CLM45.R +++ b/models/clm45/R/met2model.CLM45.R @@ -21,117 +21,114 @@ ##' @param lst timezone offset to GMT in hours ##' @param overwrite should existing files be overwritten ##' @param verbose should the function be very verbosefor(year in start_year:end_year) -met2model.CLM45 <- function(in.path,in.prefix,outfolder,start_date, end_date, lst=0,lat,lon,..., overwrite=FALSE,verbose=FALSE){ - +met2model.CLM45 <- function(in.path, in.prefix, outfolder, start_date, end_date, lst = 0, lat, lon, ..., overwrite = FALSE, verbose = FALSE) { PEcAn.logger::logger.severe("NOT IMPLEMENTED") - #General Structure- CLM Uses Netcdf so for now just need to rename vars.(Many not is CF standard. Need to Check that out) - #Get Met file from inpath. - #Loop over years (Open nc.file,rename vars,change dimensions as needed,close/save .nc file) - #close - #defining temporal dimension needs to be figured out. If we configure clm to use same tstep then we may not need to change dimensions - -# -# #Process start and end dates -# start_date<-as.POSIXlt(start.date,tz="UTC") -# end_date<-as.POSIXlt(end.date,tz="UTC") -# -# start_year <- year(start_date) -# end_year <- year(end_date) -# -# timestep.s<-86400 #Number of seconds in a day -# -# ## Build met -# met <- NULL -# for(year in start_year:end_year){ -# -# met.file.y = paste(met.file,year,"nc",sep=".") -# -# if(file.exists(met.file.y)){ -# -# ## Open netcdf file -# nc=ncdf4::nc_open(met.file.y) -# -# -# ## convert time to seconds -# sec <- nc$dim$time$vals -# sec = udunits2::ud.convert(sec,unlist(strsplit(nc$dim$time$units," "))[1],"seconds") -# -# -# -# ##build day and year -# -# dt <- PEcAn.utils::seconds_in_year(year) / length(sec) -# tstep = round(timestep.s/dt) #time steps per day -# -# diy <- PEcAn.utils::days_in_year(year) -# doy <- rep(seq_len(diy), each=tstep)[1:length(sec)] -# + # General Structure- CLM Uses Netcdf so for now just need to rename vars.(Many not is CF standard. Need to Check that out) + # Get Met file from inpath. + # Loop over years (Open nc.file,rename vars,change dimensions as needed,close/save .nc file) + # close + # defining temporal dimension needs to be figured out. If we configure clm to use same tstep then we may not need to change dimensions - ## extract variables. These need to be read in and converted to CLM standards + # + # #Process start and end dates + # start_date<-as.POSIXlt(start.date,tz="UTC") + # end_date<-as.POSIXlt(end.date,tz="UTC") + # + # start_year <- year(start_date) + # end_year <- year(end_date) + # + # timestep.s<-86400 #Number of seconds in a day + # + # ## Build met + # met <- NULL + # for(year in start_year:end_year){ + # + # met.file.y = paste(met.file,year,"nc",sep=".") + # + # if(file.exists(met.file.y)){ + # + # ## Open netcdf file + # nc=ncdf4::nc_open(met.file.y) + # + # + # ## convert time to seconds + # sec <- nc$dim$time$vals + # sec = udunits2::ud.convert(sec,unlist(strsplit(nc$dim$time$units," "))[1],"seconds") + # + # + # + # ##build day and year + # + # dt <- PEcAn.utils::seconds_in_year(year) / length(sec) + # tstep = round(timestep.s/dt) #time steps per day + # + # diy <- PEcAn.utils::days_in_year(year) + # doy <- rep(seq_len(diy), each=tstep)[1:length(sec)] + # -# ncdf4::ncvar_rename(ncfile,varid="LONGXY") -# ncdf4::ncvar_rename(ncfile,varid="LATIXY") -# # double ZBOT(time, lat, lon) ; -# # ZBOT:long_name = "observational height" ; -# # ZBOT:units = "m" ; -# ZBOT = ncvar_rename(ncfile,"ZBOT","ZBOT") -# # -# # double EDGEW(scalar) ; -# # EDGEW:long_name = "western edge in atmospheric data" ; -# # EDGEW:units = "degrees E" ; -# EDGEW = ncvar_rename(ncfile,"EDGEW","EDGEW") -# -# # double EDGEE(scalar) ; -# # EDGEE:long_name = "eastern edge in atmospheric data" ; -# # EDGEE:units = "degrees E" ; -# EDGEE = ncvar_rename(ncfile,"EDGEE","EDGEE") -# -# # double EDGES(scalar) ; -# # EDGES:long_name = "southern edge in atmospheric data" ; -# # EDGES:units = "degrees N" ; -# EDGES = ncvar_rename(ncfile,"EDGES","EDGES") -# # -# # double EDGEN(scalar) ; -# # EDGEN:long_name = "northern edge in atmospheric data" ; -# # EDGEN:units = "degrees N" ; -# EDGEN = ncvar_rename(ncfile,"EDGEN","air_temperature") -# # double TBOT(time, lat, lon) ; -# # TBOT:long_name = "temperature at the lowest atm level (TBOT)" ; -# # TBOT:units = "K" ; -# TBOT = ncvar_rename(ncfile,"TBOT","specific_humidity") -# # double RH(time, lat, lon) ; -# # RH:long_name = "relative humidity at the lowest atm level (RH)" ; -# # relative_humidity -# # RH:units = "%" ; -# RH = ncvar_rename(ncfile,"RH","relative_humidity") -# # double WIND(time, lat, lon) ; -# # WIND:long_name = "wind at the lowest atm level (WIND)" ; -# # wind_speed -# # WIND:units = "m/s" ; -# WIND = ncvar_rename(ncfile,"WIND","wind_speed") -# # double FSDS(time, lat, lon) ; -# # FSDS:long_name = "incident solar (FSDS)" ; -# # FSDS:units = "W/m2" ; -# FSDS = ncvar_rename(ncfile,"FSDS","FSDS") -# # double FLDS(time, lat, lon) ; -# # FLDS:long_name = "incident longwave (FLDS)" ; -# # FLDS:units = "W/m2" ; -# FLDS = ncvar_rename(ncfile,"FLDS","") -# # double PSRF(time, lat, lon) ; -# # PSRF:long_name = "pressure at the lowest atm level (PSRF)" ; -# # PSRF:units = "Pa" ; -# PSRF = ncvar_rename(ncfile,"PSRF","air_pressure") -# # double PRECTmms(time, lat, lon) ; -# # PRECTmms:long_name = "precipitation (PRECTmms)" ; -# # PRECTmms:units = "mm/s" ; -# PRECTmms =ncvar_rename(ncfile,"PRECTmmc","precipitation_flux") + ## extract variables. These need to be read in and converted to CLM standards - #nc_close(ncfiles) + # ncdf4::ncvar_rename(ncfile,varid="LONGXY") + # ncdf4::ncvar_rename(ncfile,varid="LATIXY") + # # double ZBOT(time, lat, lon) ; + # # ZBOT:long_name = "observational height" ; + # # ZBOT:units = "m" ; + # ZBOT = ncvar_rename(ncfile,"ZBOT","ZBOT") + # # + # # double EDGEW(scalar) ; + # # EDGEW:long_name = "western edge in atmospheric data" ; + # # EDGEW:units = "degrees E" ; + # EDGEW = ncvar_rename(ncfile,"EDGEW","EDGEW") + # + # # double EDGEE(scalar) ; + # # EDGEE:long_name = "eastern edge in atmospheric data" ; + # # EDGEE:units = "degrees E" ; + # EDGEE = ncvar_rename(ncfile,"EDGEE","EDGEE") + # + # # double EDGES(scalar) ; + # # EDGES:long_name = "southern edge in atmospheric data" ; + # # EDGES:units = "degrees N" ; + # EDGES = ncvar_rename(ncfile,"EDGES","EDGES") + # # + # # double EDGEN(scalar) ; + # # EDGEN:long_name = "northern edge in atmospheric data" ; + # # EDGEN:units = "degrees N" ; + # EDGEN = ncvar_rename(ncfile,"EDGEN","air_temperature") + # # double TBOT(time, lat, lon) ; + # # TBOT:long_name = "temperature at the lowest atm level (TBOT)" ; + # # TBOT:units = "K" ; + # TBOT = ncvar_rename(ncfile,"TBOT","specific_humidity") + # # double RH(time, lat, lon) ; + # # RH:long_name = "relative humidity at the lowest atm level (RH)" ; + # # relative_humidity + # # RH:units = "%" ; + # RH = ncvar_rename(ncfile,"RH","relative_humidity") + # # double WIND(time, lat, lon) ; + # # WIND:long_name = "wind at the lowest atm level (WIND)" ; + # # wind_speed + # # WIND:units = "m/s" ; + # WIND = ncvar_rename(ncfile,"WIND","wind_speed") + # # double FSDS(time, lat, lon) ; + # # FSDS:long_name = "incident solar (FSDS)" ; + # # FSDS:units = "W/m2" ; + # FSDS = ncvar_rename(ncfile,"FSDS","FSDS") + # # double FLDS(time, lat, lon) ; + # # FLDS:long_name = "incident longwave (FLDS)" ; + # # FLDS:units = "W/m2" ; + # FLDS = ncvar_rename(ncfile,"FLDS","") + # # double PSRF(time, lat, lon) ; + # # PSRF:long_name = "pressure at the lowest atm level (PSRF)" ; + # # PSRF:units = "Pa" ; + # PSRF = ncvar_rename(ncfile,"PSRF","air_pressure") + # # double PRECTmms(time, lat, lon) ; + # # PRECTmms:long_name = "precipitation (PRECTmms)" ; + # # PRECTmms:units = "mm/s" ; + # PRECTmms =ncvar_rename(ncfile,"PRECTmmc","precipitation_flux") -#} ### end loop over met files + # nc_close(ncfiles) -#print("Done with met2model.CLM4") + # } ### end loop over met files + # print("Done with met2model.CLM4") } ### end met2model.CLM4 - diff --git a/models/maat/R/write.config.MAAT.R b/models/maat/R/write.config.MAAT.R index b9e8d1b51..8a3abd39b 100644 --- a/models/maat/R/write.config.MAAT.R +++ b/models/maat/R/write.config.MAAT.R @@ -1,18 +1,18 @@ #------------------------------------------------------------------------------- # Copyright (c) 2012 University of Illinois, NCSA. # All rights reserved. This program and the accompanying materials -# are made available under the terms of the +# are made available under the terms of the # University of Illinois/NCSA Open Source License # which accompanies this distribution, and is available at # http://opensource.ncsa.illinois.edu/license.html #------------------------------------------------------------------------------- -##-------------------------------------------------------------------------------------------------# +## -------------------------------------------------------------------------------------------------# ## Functions to prepare and write out MAAT model xml files for MA, SA, and Ensemble runs PREFIX_XML <- "\n" -##-------------------------------------------------------------------------------------------------# +## -------------------------------------------------------------------------------------------------# -##------------------------------------------------------------------------------------------------# +## ------------------------------------------------------------------------------------------------# ##' convert parameters and parameter names from PEcAn database default units/names with MAAT ##' ##' Performs model specific unit conversions on a a list of trait values, @@ -25,32 +25,32 @@ PREFIX_XML <- "\n" ##' @export ##' @author Shawn Serbin, Anthony Walker convert.samples.MAAT <- function(trait.samples, runid) { - + ### Convert object if (is.list(trait.samples)) { trait.samples <- as.data.frame(trait.samples) } - + ### first rename variables trait.names <- colnames(trait.samples) - trait.names[trait.names == "leaf_respiration_rate_m2"] <- "atref.rd" - trait.names[trait.names == "Vcmax"] <- "atref.vcmax" - trait.names[trait.names == "Jmax"] <- "atref.jmax" - trait.names[trait.names == "Ev_Arrhenius"] <- "Ha.vcmax" # Arrhenius activation energy - trait.names[trait.names == "Ej_Arrhenius"] <- "Ha.jmax" # Arrhenius activation energy - trait.names[trait.names == "Ha_Modified_Arrhenius_Vcmax"] <- "Ha.vcmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax - trait.names[trait.names == "Hd_Modified_Arrhenius_Vcmax"] <- "Hd.vcmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax - trait.names[trait.names == "Ha_Modified_Arrhenius_Jmax"] <- "Ha.jmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax - trait.names[trait.names == "Hd_Modified_Arrhenius_Jmax"] <- "Hd.jmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax - trait.names[trait.names == "cuticular_cond"] <- "g0" # Medlyn and ball-berry min conductance value (i.e. g0, or the intercept of A/gs relationship) - trait.names[trait.names == "stomatal_slope"] <- "g1_leuning" - trait.names[trait.names == "stomatal_slope.g1"] <- "g1_medlyn" - trait.names[trait.names == "stomatal_slope.BB"] <- "g1_ball" - trait.names[trait.names == "f_frac"] <- "f" - trait.names[trait.names == "theta"] <- "theta_j" # curvature of J quadratic in Farqhuar & Wong 1984 (unitless) - trait.names[trait.names == "leaf_respiration_Q10"] <- "q10.rd" # Q10 of Rd (unitless) + trait.names[trait.names == "leaf_respiration_rate_m2"] <- "atref.rd" + trait.names[trait.names == "Vcmax"] <- "atref.vcmax" + trait.names[trait.names == "Jmax"] <- "atref.jmax" + trait.names[trait.names == "Ev_Arrhenius"] <- "Ha.vcmax" # Arrhenius activation energy + trait.names[trait.names == "Ej_Arrhenius"] <- "Ha.jmax" # Arrhenius activation energy + trait.names[trait.names == "Ha_Modified_Arrhenius_Vcmax"] <- "Ha.vcmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax + trait.names[trait.names == "Hd_Modified_Arrhenius_Vcmax"] <- "Hd.vcmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax + trait.names[trait.names == "Ha_Modified_Arrhenius_Jmax"] <- "Ha.jmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax + trait.names[trait.names == "Hd_Modified_Arrhenius_Jmax"] <- "Hd.jmax" # !!TODO: Allow for the same prior to update both Vcmax and Jmax + trait.names[trait.names == "cuticular_cond"] <- "g0" # Medlyn and ball-berry min conductance value (i.e. g0, or the intercept of A/gs relationship) + trait.names[trait.names == "stomatal_slope"] <- "g1_leuning" + trait.names[trait.names == "stomatal_slope.g1"] <- "g1_medlyn" + trait.names[trait.names == "stomatal_slope.BB"] <- "g1_ball" + trait.names[trait.names == "f_frac"] <- "f" + trait.names[trait.names == "theta"] <- "theta_j" # curvature of J quadratic in Farqhuar & Wong 1984 (unitless) + trait.names[trait.names == "leaf_respiration_Q10"] <- "q10.rd" # Q10 of Rd (unitless) colnames(trait.samples) <- trait.names - + ### Conversions -- change to only use if Collatz, should also provide standard Rd oputput if ("atref.rd" %in% names(trait.samples)) { ## Calculate dark_resp_factor - rd as a proportion of Vcmax, Williams & Flannagan 1998 ~ 0.1 @@ -73,11 +73,11 @@ convert.samples.MAAT <- function(trait.samples, runid) { ## Convert from kJ mol-1 to J mol-1 trait.samples <- transform(trait.samples, Hd.jmax = udunits2::ud.convert(Hd.jmax, "kJ", "J")) } - if ("leaf_reflect_vis" %in% names(trait.samples) & "leaf_trans_vis" %in% names(trait.samples) ){ - leaf_abs <- 1-(trait.samples[["leaf_reflect_vis"]]+trait.samples[["leaf_trans_vis"]]) + if ("leaf_reflect_vis" %in% names(trait.samples) & "leaf_trans_vis" %in% names(trait.samples)) { + leaf_abs <- 1 - (trait.samples[["leaf_reflect_vis"]] + trait.samples[["leaf_trans_vis"]]) trait.samples[["a"]] <- leaf_abs - remove <- which(colnames(trait.samples)=="leaf_trans_vis" | colnames(trait.samples)=="leaf_reflect_vis") - trait.samples <- trait.samples[,-remove] + remove <- which(colnames(trait.samples) == "leaf_trans_vis" | colnames(trait.samples) == "leaf_reflect_vis") + trait.samples <- trait.samples[, -remove] } if ("leaf_width" %in% names(trait.samples)) { ## Convert from mm to m @@ -87,17 +87,17 @@ convert.samples.MAAT <- function(trait.samples, runid) { ## Convert from umol H2O m-2 s-1 to mol m-2s-1 trait.samples <- transform(trait.samples, g0 = udunits2::ud.convert(g0, "umol H2O m-2 s-1", "mol H2O m-2 s-1")) } - - # for debugging conversions - #save(trait.samples, file = file.path(settings$host$outdir,runid,'trait.samples.Rdata')) - + + # for debugging conversions + # save(trait.samples, file = file.path(settings$host$outdir,runid,'trait.samples.Rdata')) + ### Return trait.samples as modified by function return(trait.samples) } # convert.samples.MAAT -##-------------------------------------------------------------------------------------------------# +## -------------------------------------------------------------------------------------------------# -##-------------------------------------------------------------------------------------------------# +## -------------------------------------------------------------------------------------------------# ##' Writes a MAAT config file. ##' ##' Requires a pft xml object, a list of trait values for a single model run, @@ -114,11 +114,11 @@ convert.samples.MAAT <- function(trait.samples, runid) { ##' @author Shawn Serbin, Anthony Walker, Rob Kooper, Chris Black ##' write.config.MAAT <- function(defaults = NULL, trait.values, settings, run.id) { - + # function needed to nest parameters in the appropriately the output MAAT XML. See below - nest_entries <- function(x, pattern, new_name = pattern){ + nest_entries <- function(x, pattern, new_name = pattern) { matches <- grepl(pattern, names(x)) - if(!any(matches)){ + if (!any(matches)) { return(x) } nested <- stats::setNames(x[matches], gsub(pattern, "", names(x[matches]))) @@ -126,136 +126,151 @@ write.config.MAAT <- function(defaults = NULL, trait.values, settings, run.id) { x[[new_name]] <- nested x } - + # find out where to write run/ouput rundir <- file.path(settings$host$rundir, run.id) outdir <- file.path(settings$host$outdir, run.id) - - ### Move model files to run dirs. Use built-in MAAT script setup_MAAT_project.bs --- May need to revise this with + + ### Move model files to run dirs. Use built-in MAAT script setup_MAAT_project.bs --- May need to revise this with ### lastest MAAT v1.0 and changes within. This script no longer completely fits within the PEcAn logic. May be better ### to manually move/link needed script files within PEcAn and not use any built-in MAAT bash scripts. maat_mod_obj <- as.character(settings$model$config$mod_obj) - settings$model$config$mod_obj <- NULL # remove from final MAAT *_user_static.xml MAAT file - system2(file.path(settings$model$binary, "run_scripts/setup_MAAT_project.bs"), - c(maat_mod_obj, rundir, file.path(settings$model$binary, "run_scripts"), - file.path(settings$model$binary, "src"))) + settings$model$config$mod_obj <- NULL # remove from final MAAT *_user_static.xml MAAT file + system2( + file.path(settings$model$binary, "run_scripts/setup_MAAT_project.bs"), + c( + maat_mod_obj, rundir, file.path(settings$model$binary, "run_scripts"), + file.path(settings$model$binary, "src") + ) + ) # remove leaf_user_dynamic.xml from rundir since PEcAn is not currently using dynamic variables (for now, revist later as-needed) # see: https://github.com/walkeranthonyp/MAAT/issues/8 for reference - unlink(file.path(rundir,"leaf_user_dynamic.xml"), recursive = FALSE) + unlink(file.path(rundir, "leaf_user_dynamic.xml"), recursive = FALSE) # remove leaf_user_met.xml file if running without met drivers. Look for this file during model2netCDF step to select processing path if (is.null(settings$run$inputs$met)) { - unlink(file.path(rundir,"leaf_user_met.xml"), recursive = FALSE) + unlink(file.path(rundir, "leaf_user_met.xml"), recursive = FALSE) } # below is now required given that MAAT logic no longer moves or links to the run_MAAT.R script file run_maat_script <- file.path(settings$model$binary, "src", "run_MAAT.R") - + ### Parse config options to XML if (!is.null(settings$model$config$mod_mimic)) { - PEcAn.logger::logger.info(paste0("Running with model mimic: ",settings$model$config$mod_mimic)) + PEcAn.logger::logger.info(paste0("Running with model mimic: ", settings$model$config$mod_mimic)) mod_mimic <- as.character(settings$model$config$mod_mimic) settings$model$config$mod_mimic <- NULL xml <- PEcAn.settings::listToXml(settings$model$config, "default") } else { PEcAn.logger::logger.info("*** Model mimic not selected ***") - mod_mimic <- 'NULL' + mod_mimic <- "NULL" xml <- PEcAn.settings::listToXml(settings$model$config, "default") } - + ### Run rename and conversion function on PEcAn trait values PEcAn.logger::logger.info("*** Convert input trait values to MAAT parameters and units ***") - traits <- convert.samples.MAAT(trait.samples = trait.values[[settings$pfts$pft$name]],runid=run.id) + traits <- convert.samples.MAAT(trait.samples = trait.values[[settings$pfts$pft$name]], runid = run.id) # below for debugging - #save(traits, file = file.path(settings$host$outdir,run.id,'trait.samples.converted.Rdata')) - + # save(traits, file = file.path(settings$host$outdir,run.id,'trait.samples.converted.Rdata')) + ### Convert traits to list # with MAAT v1.0 we need to generate nested lists # create full nested list and convert to MAAT XML format traits <- as.list(traits) traits.list <- list() - maat_param_prefix_list <- list(param=c("Ha.","Hd.","atref.","reftemp.","Topt.","deltaS.","a_deltaS_t.","b_deltaS_t.","q10.","a_q10_t.", - "b_q10_t.","tupp_cox.","tlow_cox.","exp_cox."), - xml=c("Ha","Hd","atref","reftemp","Topt","deltaS","a_deltaS_t","b_deltaS_t","q10","a_q10_t", - "b_q10_t","tupp_cox","tlow_cox","exp_cox")) + maat_param_prefix_list <- list( + param = c( + "Ha.", "Hd.", "atref.", "reftemp.", "Topt.", "deltaS.", "a_deltaS_t.", "b_deltaS_t.", "q10.", "a_q10_t.", + "b_q10_t.", "tupp_cox.", "tlow_cox.", "exp_cox." + ), + xml = c( + "Ha", "Hd", "atref", "reftemp", "Topt", "deltaS", "a_deltaS_t", "b_deltaS_t", "q10", "a_q10_t", + "b_q10_t", "tupp_cox", "tlow_cox", "exp_cox" + ) + ) q <- 1 for (p in seq(seq_along(1:length(maat_param_prefix_list$param)))) { - if (q==1) { + if (q == 1) { traits.list <- nest_entries(traits, paste0(maat_param_prefix_list$param[p]), paste0(maat_param_prefix_list$xml[p])) } else { traits.list <- nest_entries(traits.list, paste0(maat_param_prefix_list$param[p]), paste0(maat_param_prefix_list$xml[p])) } - q <- q+1 + q <- q + 1 } traits.xml <- PEcAn.settings::listToXml(traits.list, "pars") - rm(p,q) - + rm(p, q) + ### Finalize XML xml[[1]] <- XML::addChildren(xml[[1]], traits.xml) - + ### Save final XML stack as a properly formatted MAAT parameter/option XML file - XML::saveXML(xml, - file = file.path(settings$rundir, run.id, "leaf_user_static.xml"), - indent = TRUE, - prefix = PREFIX_XML) - + XML::saveXML(xml, + file = file.path(settings$rundir, run.id, "leaf_user_static.xml"), + indent = TRUE, + prefix = PREFIX_XML + ) + ### Setup job.sh script to run MAAT model if (is.null(settings$run$inputs$met)) { PEcAn.logger::logger.info("-- No met selected. Running without a met driver --") - jobsh <- paste0("#!/bin/bash\n","Rscript ",run_maat_script," ", - "\"srcdir <- ","'",file.path(settings$model$binary, "src"),"'","\""," ", - "\"pdir <- ","'",rundir,"'","\""," ","\"mod_obj <- ","'",maat_mod_obj,"'","\""," ", - "\"xml<-T","\""," ","\"uq<-F","\""," ", - "\"factorial<-F","\""," ","\"mod_mimic<-",mod_mimic,"\""," ", - "\"odir <- ","'",outdir,"'","\""," > ",rundir, - "/logfile.txt","\n",'echo "', - ' library(PEcAn.MAAT); model2netcdf.MAAT(', - "'",rundir,"',","'",outdir,"',", - settings$run$site$lat,",", - settings$run$site$lon,", '", - settings$run$start.date,"', '", - settings$run$end.date,"') ", - '" | R --vanilla') - - # Run with met drivers + jobsh <- paste0( + "#!/bin/bash\n", "Rscript ", run_maat_script, " ", + "\"srcdir <- ", "'", file.path(settings$model$binary, "src"), "'", "\"", " ", + "\"pdir <- ", "'", rundir, "'", "\"", " ", "\"mod_obj <- ", "'", maat_mod_obj, "'", "\"", " ", + "\"xml<-T", "\"", " ", "\"uq<-F", "\"", " ", + "\"factorial<-F", "\"", " ", "\"mod_mimic<-", mod_mimic, "\"", " ", + "\"odir <- ", "'", outdir, "'", "\"", " > ", rundir, + "/logfile.txt", "\n", 'echo "', + " library(PEcAn.MAAT); model2netcdf.MAAT(", + "'", rundir, "',", "'", outdir, "',", + settings$run$site$lat, ",", + settings$run$site$lon, ", '", + settings$run$start.date, "', '", + settings$run$end.date, "') ", + '" | R --vanilla' + ) + + # Run with met drivers } else if (!is.null(settings$run$inputs$met)) { - + ## temporary fix for #2064 - #met.dir <- dirname(settings$run$inputs$met$path) + # met.dir <- dirname(settings$run$inputs$met$path) met.dir <- dirname(as.character(settings$run$inputs$met$path)) - #met.file <- basename(settings$run$inputs$met$path) + # met.file <- basename(settings$run$inputs$met$path) met.file <- basename(as.character(settings$run$inputs$met$path)) - - file.copy(file.path(met.dir, list.files(met.dir, "*.xml")), - rundir, - overwrite = TRUE, - recursive = FALSE, - copy.mode = TRUE, - copy.date = TRUE) - + + file.copy(file.path(met.dir, list.files(met.dir, "*.xml")), + rundir, + overwrite = TRUE, + recursive = FALSE, + copy.mode = TRUE, + copy.date = TRUE + ) + PEcAn.logger::logger.info("-- Met selected. Running with a met driver --") - PEcAn.logger::logger.info(paste0("Running with met: ",met.file)) - jobsh <- paste0("#!/bin/bash\n","Rscript ",run_maat_script," ", - "\"srcdir <- ","'",file.path(settings$model$binary, "src"),"'","\""," ", - "\"pdir <- ","'",rundir,"'","\""," ","\"mod_obj <- ","'",maat_mod_obj,"'","\""," ", - "\"xml<-T","\""," ","\"uq<-F","\""," ", - "\"factorial<-F","\""," ","\"mod_mimic<-",mod_mimic,"\""," ", - "\"odir <- ","'",outdir,"'","\""," ","\"mdir <- ","'",met.dir,"'", - "\""," ","\"metdata <- ","'",met.file,"'","\""," > ",rundir, - "/logfile.txt","\n",'echo "', - ' library(PEcAn.MAAT); model2netcdf.MAAT(', - "'",rundir,"',","'",outdir,"',", - settings$run$site$lat,",", - settings$run$site$lon,", '", - settings$run$start.date,"', '", - settings$run$end.date,"') ", - '" | R --vanilla') - } #End if/else - + PEcAn.logger::logger.info(paste0("Running with met: ", met.file)) + jobsh <- paste0( + "#!/bin/bash\n", "Rscript ", run_maat_script, " ", + "\"srcdir <- ", "'", file.path(settings$model$binary, "src"), "'", "\"", " ", + "\"pdir <- ", "'", rundir, "'", "\"", " ", "\"mod_obj <- ", "'", maat_mod_obj, "'", "\"", " ", + "\"xml<-T", "\"", " ", "\"uq<-F", "\"", " ", + "\"factorial<-F", "\"", " ", "\"mod_mimic<-", mod_mimic, "\"", " ", + "\"odir <- ", "'", outdir, "'", "\"", " ", "\"mdir <- ", "'", met.dir, "'", + "\"", " ", "\"metdata <- ", "'", met.file, "'", "\"", " > ", rundir, + "/logfile.txt", "\n", 'echo "', + " library(PEcAn.MAAT); model2netcdf.MAAT(", + "'", rundir, "',", "'", outdir, "',", + settings$run$site$lat, ",", + settings$run$site$lon, ", '", + settings$run$start.date, "', '", + settings$run$end.date, "') ", + '" | R --vanilla' + ) + } # End if/else + # Write the job.sh script writeLines(jobsh, con = file.path(settings$rundir, run.id, "job.sh")) Sys.chmod(file.path(settings$rundir, run.id, "job.sh")) - } # write.config.MAAT -##-------------------------------------------------------------------------------------------------# +## -------------------------------------------------------------------------------------------------# ## EOF