Skip to content

Commit

Permalink
Finished external files script (BETA)
Browse files Browse the repository at this point in the history
-Added missing file to esternal_files.py
-Removed external file retrievals from all other scripts.
-Updated preprocess snakefile.
  • Loading branch information
maartenbrinkerink committed Sep 5, 2024
1 parent b20112e commit 4df15c7
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 107 deletions.
3 changes: 2 additions & 1 deletion workflow/rules/preprocess.smk
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ demand_figures = [

external_files = [
'PLEXOS_World_2015_Gold_V1.1.xlsx',
'All_Demand_UTC_2015.csv'
'All_Demand_UTC_2015.csv',
'PLEXOS_World_MESSAGEix_GLOBIOM_Softlink.xlsx'
]

power_plant_files = [
Expand Down
21 changes: 4 additions & 17 deletions workflow/scripts/osemosys_global/TS_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,24 +54,11 @@
region_name = config.region_name
custom_nodes = config.get("nodes_to_add")

# Checks whether PLEXOS-World 2015 data needs to be retrieved from the PLEXOS-World Harvard Dataverse.
try:
# Open = open(r'data/All_Demand_UTC_2015.csv')
Open = open(os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"))
# demand_df = pd.read_csv(r'data/All_Demand_UTC_2015.csv' , encoding='latin-1')
demand_df = pd.read_csv(
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"), encoding="latin-1"
)

except IOError:
urllib.request.urlretrieve(
"https://dataverse.harvard.edu/api/access/datafile/3985039?format=original&gbrecs=true",
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"),
)
# Inputs PLEXOS-World 2015 data.

demand_df = pd.read_csv(
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"), encoding="latin-1"
)
demand_df = pd.read_csv(
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"), encoding="latin-1"
)

seasons_raw = config.get("seasons")
seasonsData = []
Expand Down
38 changes: 4 additions & 34 deletions workflow/scripts/osemosys_global/demand_projection.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import numpy as np
import matplotlib.pyplot as plt
import itertools
import urllib
import os
from sklearn.linear_model import LinearRegression

Expand Down Expand Up @@ -37,22 +36,7 @@

region_name = config.region_name

# Checks whether PLEXOS-World 2015 data needs to be retrieved from the PLEXOS-World Harvard Dataverse.

try:
Open = open(os.path.join(input_data_dir, "PLEXOS_World_2015_Gold_V1.1.xlsx"))

except IOError:
urllib.request.urlretrieve(
"https://dataverse.harvard.edu/api/access/datafile/4008393?format=original&gbrecs=true",
os.path.join(input_data_dir, "PLEXOS_World_2015_Gold_V1.1.xlsx"),
)

Open = open(os.path.join(input_data_dir, "PLEXOS_World_2015_Gold_V1.1.xlsx"))

finally:
Open.close()

# Imports PLEXOS-World 2015 model file as basis for the spatial mapping.
Import_memberships = pd.read_excel(
os.path.join(input_data_dir, "PLEXOS_World_2015_Gold_V1.1.xlsx"),
sheet_name="Memberships",
Expand Down Expand Up @@ -128,23 +112,9 @@
# ### Retrieves PLEXOS-World 2015 hourly demand data incl. T&D losses for all nodes as baseline value for the demand forecasting
# Used to be able to disaggregate regional electricity demand to the nodal level as well as calculate relative peak demand per node.

# Checks whether PLEXOS-World 2015 data needs to be retrieved from the PLEXOS-World Harvard Dataverse.
try:
Open = open(os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"))

Import_Hourly_Demand_2015 = pd.read_csv(
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"), encoding="latin-1"
)

except IOError:
urllib.request.urlretrieve(
"https://dataverse.harvard.edu/api/access/datafile/3985039?format=original&gbrecs=true",
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"),
)

Import_Hourly_Demand_2015 = pd.read_csv(
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"), encoding="latin-1"
)
Import_Hourly_Demand_2015 = pd.read_csv(
os.path.join(input_data_dir, "All_Demand_UTC_2015.csv"), encoding="latin-1"
)

# ### Determines relative 2015 share of demand per sub-country node

Expand Down
16 changes: 11 additions & 5 deletions workflow/scripts/osemosys_global/external_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,18 @@
# CONFIGURATION PARAMETERS
config_paths = ConfigPaths()
input_data_dir = config_paths.input_data_dir
output_data_dir = config_paths.output_data_dir

external_files = {'PLEXOS_World_2015_Gold_V1.1.xlsx' :
'https://dataverse.harvard.edu/api/access/datafile/4008393?format=original&gbrecs=true',
'All_Demand_UTC_2015.csv' :
'https://dataverse.harvard.edu/api/access/datafile/3985039?format=original&gbrecs=true'}
external_files = {
'PLEXOS_World_2015_Gold_V1.1.xlsx' :
'https://dataverse.harvard.edu/api/access/datafile/4008393?format=original&gbrecs=true',

'All_Demand_UTC_2015.csv' :
'https://dataverse.harvard.edu/api/access/datafile/3985039?format=original&gbrecs=true',

'PLEXOS_World_MESSAGEix_GLOBIOM_Softlink.xlsx' :
'https://dataverse.harvard.edu/api/access/datafile/6040815'

}

if __name__ == "__main__":
for file, url in external_files.items():
Expand Down
40 changes: 10 additions & 30 deletions workflow/scripts/osemosys_global/max_capacity.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import requests
import os
import pandas as pd

# from osemosys_global.configuration import ConfigFile, ConfigPaths
from configuration import ConfigFile, ConfigPaths
from utils import apply_dtypes

# from OPG_configuration import ConfigFile, ConfigPaths
import itertools
Expand All @@ -24,6 +22,7 @@ def main():

input_dir = config_paths.input_dir
output_data_dir = config_paths.output_data_dir
input_data_dir = config_paths.input_data_dir
custom_nodes_dir = config_paths.custom_nodes_dir
region = config.region_name
years = config.get_years()
Expand All @@ -33,34 +32,15 @@ def main():
max_fuel = config.get("fuel_limits")
calibration = config.get("calibration")
re_targets = config.get("re_targets")

# Imports PLEXOS-World model which includes RES resource limits.

## Checks whether PLEXOS-World/MESSAGEix-GLOBIOM soft-link model data needs to be
# retrieved from the PLEXOS-World Harvard Dataverse.
try:
df_reslimit = pd.read_excel(
os.path.join(
input_dir, "data/PLEXOS_World_MESSAGEix_GLOBIOM_Softlink.xlsx"
),
sheet_name="Properties",
)

except IOError:
url = "https://dataverse.harvard.edu/api/access/datafile/6040815"
r = requests.get(url)
with open(
os.path.join(
input_dir, "data/PLEXOS_World_MESSAGEix_GLOBIOM_Softlink.xlsx"
),
"wb",
) as outfile:
outfile.write(r.content)

df_reslimit = pd.read_excel(
os.path.join(
input_dir, "data/PLEXOS_World_MESSAGEix_GLOBIOM_Softlink.xlsx"
),
sheet_name="Properties",
)
df_reslimit = pd.read_excel(
os.path.join(
input_data_dir, "PLEXOS_World_MESSAGEix_GLOBIOM_Softlink.xlsx"
),
sheet_name="Properties",
)

# TECHNOLOGY MAPPING FOR PLEXOS -> OSEMOSYS GLOBAL

Expand Down Expand Up @@ -508,4 +488,4 @@ def apply_re_targets(region, years, output_data_dir, re_targets, remove_nodes):

if __name__ == "__main__":
main()
logging.info(f"Max capacity limits sucessfully set")
logging.info("Max capacity limits sucessfully set")
23 changes: 3 additions & 20 deletions workflow/scripts/osemosys_global/powerplant_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,10 @@
import os
# from osemosys_global.configuration import ConfigFile, ConfigPaths
from configuration import ConfigFile, ConfigPaths
import yaml
from constants import SET_DTYPES
from utils import apply_dtypes
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
import os
from scipy import spatial

def main():
Expand Down Expand Up @@ -55,24 +53,8 @@ def main():
if not os.path.exists(output_data_dir):
os.makedirs(output_data_dir)

#Checks whether PLEXOS-World 2015 data needs to be retrieved from the PLEXOS-World Harvard Dataverse.
try:
Open = open(os.path.join(input_data_dir,
"PLEXOS_World_2015_Gold_V1.1.xlsx"))

except IOError:
urllib.request.urlretrieve("https://dataverse.harvard.edu/api/access/datafile/4008393?format=original&gbrecs=true" ,
os.path.join(input_data_dir,
"PLEXOS_World_2015_Gold_V1.1.xlsx")
)

Open = open(os.path.join(input_data_dir,
"PLEXOS_World_2015_Gold_V1.1.xlsx")
)

finally:
Open.close()

# Inputs the PLEXOS-World 2015 dataset as basis for the powerplant data.

df = pd.read_excel(os.path.join(input_data_dir,
"PLEXOS_World_2015_Gold_V1.1.xlsx"),
sheet_name = "Properties")
Expand All @@ -84,6 +66,7 @@ def main():
df_dict = df_dict[df_dict["parent_class"] == "Generator"].rename(
{"parent_object": "powerplant"}, axis=1
)

df_weo_data = pd.read_csv(os.path.join(input_data_dir,
"weo_2020_powerplant_costs.csv")
)
Expand Down

0 comments on commit 4df15c7

Please sign in to comment.