diff --git a/README.rst b/README.rst index 2fb6fc78..80a82e3c 100644 --- a/README.rst +++ b/README.rst @@ -17,34 +17,52 @@ energy storages. Grid expansion measures are not part of this tool and will be instead part of 'eGo' https://github.com/openego/eGo -Setup -========================= +Installation +============ +eTraGo is designed as a Python package therefore it is mandatory to have +`Python 3 `_ installed. If you have a +working Python3 environment, use pypi to install the latest eTraGo version. +We highly recommend you to use a virtual environment. Use following pip +command in order to install eTraGo: +.. code-block:: bash -Create a virtualenvironment (where you like it) and activate it: + $ pip3 install eTraGo --process-dependency-links -.. code-block:: +Installation for Developers +=========================== - $ virtualenv venv --clear -p python3.5 - $ source venv/bin/activate - $ cd venv - -Clone the source code from github +Clone the source code from github: .. code-block:: $ git clone https://github.com/openego/eTraGo +You can checkout to the dev branch and create new feature branches. +For the correct work-flow, please mind the +`Dreissen Branching Model `_ -With your activated environment `cd` to the cloned directory and run: +Use the pip -e to install eTraGo directly from the cloned repository: .. code-block:: - $ pip3 install -e eTraGo --process-dependency-links + $ pip3 install -e /path/to/eTraGo/ --process-dependency-links + +Using a virtual environment +=========================== + +Before installing eTraGo, +you create a virtual environment (where you like it) and activate it: + +.. code-block:: bash + + $ virtualenv venv --clear -p python3.5 + $ source venv/bin/activate + $ cd venv +Inside your activated virtual environment you can +install eTraGo with the pip command, as previously explained. -This will install all needed packages into your environment. Now you should be -ready to go. Copyleft ========================= diff --git a/doc/api/etrago.cluster.rst b/doc/api/etrago.cluster.rst index 3b1d36a8..1e2fc858 100644 --- a/doc/api/etrago.cluster.rst +++ b/doc/api/etrago.cluster.rst @@ -1,14 +1,6 @@ etrago\.cluster package ======================= -Module contents ---------------- - -.. automodule:: etrago.cluster - :members: - :undoc-members: - :show-inheritance: - Submodules ---------- @@ -28,4 +20,11 @@ etrago\.cluster\.snapshot module :undoc-members: :show-inheritance: +Module contents +--------------- + +.. automodule:: etrago.cluster + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/developer_notes.rst b/doc/developer_notes.rst index ee4aea94..f23231b4 100644 --- a/doc/developer_notes.rst +++ b/doc/developer_notes.rst @@ -18,20 +18,21 @@ Installation for Developers $ virtualenv --clear -p python3.5 etrago`` $ cd etrago/ $ source bin/activate + +2. Clone the source code from github: -2. Clone the source code from github - -.. code-block:: +.. code-block:: bash $ git clone https://github.com/openego/eTraGo - $ git checkout dev -With your activated environment `cd` to the cloned directory and run: +You can checkout to the dev branch and create new feature branches. +For the correct work-flow, please mind the +`Dreissen Branching Model `_ -.. code-block:: +3. Use the pip -e to install eTraGo directly from the cloned repository: + +.. code-block:: bash - $ pip3 install -e eTraGo/ --process-dependency-links + $ pip3 install -e /path/to/eTraGo/ --process-dependency-links -This will install all needed packages into your environment. -Now you should be ready to go. diff --git a/doc/howToUse.rst b/doc/howToUse.rst index ea5d454a..5672f454 100644 --- a/doc/howToUse.rst +++ b/doc/howToUse.rst @@ -4,21 +4,43 @@ How to use eTraGo? ================== After you installed eTraGo you would typically start optimization runs by -executing the ‘appl.py’ wich is situated in +executing the ‘appl.py’ which is situated in ``./eTrago/etrago/`` (e.g by ``python3 appl.py``). -The ‘appl.py’ is used as a simple user interface. Here -parameters, calculation methods and scenario settings are set in a python -dictionary called 'args'. It is crucial to understand these parameters. -For example some of them contradict the usage of others. +eTraGo doesn't have a graphical user interface, +the ‘appl.py’ is used as a simple user interface which can be edited with +the preferred python-editor. +Here parameters, calculation methods and scenario settings are set in a python +dictionary called 'args'. +To run the desired calculation, it is crucial to understand these parameters. +In addition, some of them contradict the usage of others. You find the documentation of all defined parameters from the 'args' here: -:meth:`etrago.appl.etrago`. +:func:`etrago.appl.etrago`. -The appl.py contains the etrago(args) function which uses the +Alternatively, the 'args' dictionary can be edited in a json-file. +Then the path to the json-file has to be defined in the function +:meth:`etrago.tools.utilities.get_args_setting`. Once a path is given +and the get_args_setting() within the `'appl.py' `_ +is executed the 'args' dictionary within the 'appl.py' is ignored +and replaced by the 'args' of the json-file. + +The appl.py contains the :func:`etrago.appl.etrago` function which uses the defined 'args' dictionary to start the desired calculation. -Afterwards a PyPSA network will contain all results. You can use -several plotting functions from the :meth:`etrago.tools.plot` in order +To improve the performance of the optimization of the selected solver, +you might want to use solver options (part of 'args'). For gurobi +the most used ones are described +`here `_. + +Moreover, if you want to change parameters apart from the options which +are provided by the 'args' you can change the default values of +the arguments used in the functions which are executed by the +:meth:`etrago.appl.etrago` function. +Lastly, for more specific or extensive changes you are highly invited +to write code and add new functionalities. + +Once the calculation has finished a PyPSA network will contain all results. +You can use several plotting functions from the :meth:`etrago.tools.plot` in order to visualize the results. For example the :meth:`etrago.tools.plot.plot_line_loading` plots the relative line loading in % of all AC lines and DC links of the network. diff --git a/doc/index.rst b/doc/index.rst index 241ab540..6f190eec 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -11,8 +11,8 @@ Welcome to eTraGo's documentation! :align: right :scale: 80% -.. warning:: Note, eTraGo and especially its documentation is still in - heavy development. +.. warning:: Note, eTraGo and its documentation is in + continuous development. .. toctree:: :maxdepth: 2 diff --git a/doc/installation.rst b/doc/installation.rst index be0390a2..9c2d272f 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -13,10 +13,12 @@ command in order to install eTraGo: -Using virtual environment -========================= +Using a virtual environment +=========================== + -Firstly, you create a virtual environment (where you like it) and activate it: +Before installing eTraGo, +you create a virtual environment (where you like it) and activate it: .. code-block:: bash @@ -24,7 +26,8 @@ Firstly, you create a virtual environment (where you like it) and activate it: $ source venv/bin/activate $ cd venv -Inside your virtual environment you can install eTraGo with the pip command. +Inside your activated virtual environment you can +install eTraGo with the pip command, as previously explained. Linux and Ubuntu ================ @@ -61,13 +64,34 @@ environments. Setup database connection ========================= -The package ``ego.io`` gives you a python SQL-Alchemy representations of -the _OpenEnergy-Database_ (oedb) and access to it by using the -`oedialect `_ a SQL-Alchemy binding -Python package for the REST-API used by the OpenEnergy Platform (OEP). Your API -access / login data will be saved in the folder ``.egoio`` in the file -``config.ini``. You can create a new account on -`openenergy-platform.org/login `_. +The package `ego.io `_ will be installed +automatically when eTraGo is installed. The ``egoio`` +gives you python SQL-Alchemy representations of +the `OpenEnergy-Database(oedb) `_ +and access to it by using the +`oedialect `_, which is a SQL-Alchemy binding +Python package for the REST-API used by the OpenEnergy Platform (OEP). + +In order to connect eTraGo via the oedialect with the oedb you +have to create an account at +`openenergy-platform.org/login `_. +You can name the `'db' `_ +argument of the 'args' of the :func:`etrago.appl.etrago` +as you wish. Once the :func:`etrago.appl.etrago` is executed you will be asked +to enter how you want to connect to which database. If you want to use +the oedialect enter the following connection parameter. For and + you have to take your credentials which you obtained by registering +at `openenergy-platform.org/login `_. + +Your API access / login data will be saved in the folder ``.egoio`` in the file +``config.ini``. Consequently, in the config.ini you can also change +your connection parameters or add new ones. +In the following you can see how the config.ini looks like when you use the +oedialect, a local postgresql database or the old psycopg2 developer connection. + +Once you have created a connection (which is saved in the config.ini) you do not have +to enter the connection parameter again. The software will take the connection parameter +which corresponds to the entry at the `'db' `_ argument. oedialect connection diff --git a/doc/theoretical_background.rst b/doc/theoretical_background.rst index d715411c..73e4d96c 100644 --- a/doc/theoretical_background.rst +++ b/doc/theoretical_background.rst @@ -110,7 +110,7 @@ optimization problem, whereby the possible extension is unlimited. With respect to the different voltage levels and lengths MVA-specific costs are considered in the linear optimization of the power flow. Besides, several planned grid expansion scenarios from the German grid development plan can be considered as -possible additional power lines. +possible additional power lines by using the 'scn_extension' argument. Miscellaneous Features @@ -122,9 +122,11 @@ marginal costs of each generator in order to prevent an optima plateau. The specific solver options depend on the applied solver like for example Gurobi, CPLEX or GLPK. Considering reproducibility, the 'load_cluster' argument enables to load a former calculated clustered network. Besides, -'line_grouping' provides a grouping of lines which connect the same buses and -the 'branch_capacity_factor' adds a factor to adapt all line capacities. The -'load_shedding' argument is used for debugging complex grids in order to avoid +'line_grouping' provides a grouping of lines which connect the same buses. +The 'branch_capacity_factor' adds a factor to adapt all line capacities in order +to consider (n-1) security. Because the average number of HV systems is much +smaller than the one of eHV lines, you can choose factors for 'HV' and 'eHV'. +The 'load_shedding' argument is used for debugging complex grids in order to avoid infeasibilities. It introduces a very expensive generator at each bus to meet the demand. When optimizing storage units and grid expansion without limiting constraints, the need for load shedding should not be existent. The diff --git a/doc/whatsnew.rst b/doc/whatsnew.rst index 8a5cd78e..a47558e2 100644 --- a/doc/whatsnew.rst +++ b/doc/whatsnew.rst @@ -8,6 +8,7 @@ These are new features and improvements of note in each release. :local: :backlinks: top +.. include:: whatsnew/v0_7_1.rst .. include:: whatsnew/v0_7_0.rst .. include:: whatsnew/v0_6_1.rst .. include:: whatsnew/v0_6.rst diff --git a/doc/whatsnew/v0_7_1.rst b/doc/whatsnew/v0_7_1.rst new file mode 100644 index 00000000..53eebb24 --- /dev/null +++ b/doc/whatsnew/v0_7_1.rst @@ -0,0 +1,18 @@ +Release 0.7.1 (October 25, 2018) +++++++++++++++++++++++++++++ +A minor release adding new options for additional constraints, modelling assumptions and plotting. + +Added features +-------------- + +* Two extra functionalities were introduced in order to apply constraints concerning a minimal share of renewable energy and a global upper bound for grid expansion. You can activate these functions in the 'args' of the etrago() function. +* The branch_capacity_factor can now be defined separately for the high and extra high voltage level in order to address the (n-1) criteria more accurately. +* There are some more plotting functions e.g. plotting the state-of-charge and dispatch of storage units. +* Storage capacities in foreign countries can easily be be optimized. +* By default the maximum expansion of each line and transformer is set to four times its original capacity. Being an argument of the extendable() function it can be easily adjusted. +* k-means clustered results can now also be exported to the oedb. + + + + + diff --git a/etrago/appl.py b/etrago/appl.py index 5e169692..419ed548 100644 --- a/etrago/appl.py +++ b/etrago/appl.py @@ -31,6 +31,7 @@ import time import numpy as np import pandas as pd +from etrago.tools.utilities import get_args_setting __copyright__ = ( "Flensburg University of Applied Sciences, " @@ -46,18 +47,18 @@ from etrago.cluster.disaggregation import ( MiniSolverDisaggregation, UniformDisaggregation) - + from etrago.cluster.networkclustering import ( busmap_from_psql, cluster_on_extra_high_voltage, kmean_clustering) - + from etrago.tools.io import ( NetworkScenario, results_to_oedb, extension, decommissioning) - + from etrago.tools.plot import ( plot_line_loading, plot_stacked_gen, @@ -67,7 +68,7 @@ storage_distribution, storage_expansion, nodal_gen_dispatch, - network_extension) + network_expansion) from etrago.tools.utilities import ( load_shedding, @@ -87,56 +88,64 @@ crossborder_capacity, ramp_limits, geolocation_buses, - get_args_setting) - - from etrago.tools.extendable import extendable, extension_preselection + get_args_setting, + set_branch_capacity, + max_line_ext, + min_renewable_share) + + from etrago.tools.extendable import ( + extendable, + extension_preselection, + print_expansion_costs) + from etrago.cluster.snapshot import snapshot_clustering, daily_bounds from egoio.tools import db from sqlalchemy.orm import sessionmaker import oedialect - -args = { # Setup and Configuration: + +args = { + # Setup and Configuration: 'db': 'oedb', # database session 'gridversion': 'v0.4.5', # None for model_draft or Version number 'method': 'lopf', # lopf or pf - 'pf_post_lopf': True, # perform a pf after a lopf simulation - 'start_snapshot': 1, - 'end_snapshot': 2, + 'pf_post_lopf': False, # perform a pf after a lopf simulation + 'start_snapshot': 12, + 'end_snapshot': 24, 'solver': 'gurobi', # glpk, cplex or gurobi - 'solver_options': {'threads':4, 'method':2,'BarConvTol':1.e-5, - 'FeasibilityTol':1.e-6, - 'logFile':'gurobi_eTraGo.log'}, # {} for default or dict of solver options - 'scn_name': 'NEP 2035', # a scenario: Status Quo, NEP 2035, eGo 100 + 'solver_options': {'BarConvTol': 1.e-5, 'FeasibilityTol': 1.e-5, + 'logFile': 'solver.log'}, # {} for default options + 'scn_name': 'eGo 100', # a scenario: Status Quo, NEP 2035, eGo 100 # Scenario variations: 'scn_extension': None, # None or array of extension scenarios - 'scn_decommissioning':None, # None or decommissioning scenario + 'scn_decommissioning': None, # None or decommissioning scenario # Export options: 'lpfile': False, # save pyomo's lp file: False or /path/tofolder - 'results': './results', # save results as csv: False or /path/tofolder - 'export': False, # export the results back to the oedb + 'csv_export': False, # save results as csv: False or /path/tofolder + 'db_export': False, # export the results back to the oedb # Settings: - 'extendable': ['network', 'storages'], # Array of components to optimize + 'extendable': ['network', 'storage'], # Array of components to optimize 'generator_noise': 789456, # apply generator noise, False or seed number 'minimize_loading': False, - 'ramp_limits': False, # Choose if using ramp limit of generators + 'ramp_limits': False, # Choose if using ramp limit of generators + 'extra_functionality': None, # Choose function name or None # Clustering: - 'network_clustering_kmeans': 10, # False or the value k for clustering + 'network_clustering_kmeans': 300, # False or the value k for clustering 'load_cluster': False, # False or predefined busmap for k-means 'network_clustering_ehv': False, # clustering of HV buses to EHV buses. - 'disaggregation': 'uniform', # or None, 'mini' or 'uniform' + 'disaggregation': None, # None, 'mini' or 'uniform' 'snapshot_clustering': False, # False or the number of 'periods' # Simplifications: 'parallelisation': False, # run snapshots parallely. 'skip_snapshots': False, 'line_grouping': False, # group lines parallel lines - 'branch_capacity_factor': 0.7, # factor to change branch capacities - 'load_shedding': False, # meet the demand at very high cost - 'foreign_lines' : {'carrier': 'AC', 'capacity': 'osmTGmod'}, # dict containing carrier and capacity settings of foreign lines + 'branch_capacity_factor': {'HV': 0.5, 'eHV': 0.7}, # p.u. branch derating + 'load_shedding': False, # meet the demand at value of loss load cost + 'foreign_lines': {'carrier': 'AC', 'capacity': 'osmTGmod'}, 'comments': None} -args = get_args_setting(args, jsonpath = None) +args = get_args_setting(args, jsonpath=None) def etrago(args): @@ -198,8 +207,8 @@ def etrago(args): Bundesnetzagentur 'nep2035_b2' includes all new lines planned by the Netzentwicklungsplan 2025 in scenario 2035 B2 - 'BE_NO_NEP 2035' includes planned lines to Belgium and Norway and adds - BE and NO as electrical neighbours + 'BE_NO_NEP 2035' includes planned lines to Belgium and Norway and + adds BE and NO as electrical neighbours scn_decommissioning : str None, @@ -214,18 +223,18 @@ def etrago(args): confirmed projects 'nep2035_b2' includes all lines that will be replaced in NEP-scenario 2035 B2 - + lpfile : obj False, State if and where you want to save pyomo's lp file. Options: False or '/path/tofolder'.import numpy as np - results : obj + csv_export : obj False, State if and where you want to save results as csv files.Options: False or '/path/tofolder'. - export : bool + db_export : bool False, State if you want to export the results of your calculation back to the database. @@ -236,7 +245,7 @@ def etrago(args): Settings can be added in /tools/extendable.py. The most important possibilities: 'network': set all lines, links and transformers extendable - 'german_network': set lines and transformers in German grid + 'german_network': set lines and transformers in German grid extendable 'foreign_network': set foreign lines and transformers extendable 'transformers': set all transformers extendable @@ -257,12 +266,21 @@ def etrago(args): minimize_loading : bool False, ... + ramp_limits : bool False, - State if you want to consider ramp limits of generators. + State if you want to consider ramp limits of generators. Increases time for solving significantly. Only works when calculating at least 30 snapshots. + extra_functionality : str or None + None, + Choose name of extra functionality described in etrago/utilities.py + "min_renewable_share" to set a minimal share of renewable energy or + "max_line_ext" to set an overall maximum of line expansion. + When activating snapshot_clustering or minimize_loading these + extra_funtionalities are overwritten and therefore neglected. + network_clustering_kmeans : bool or int False, State if you want to apply a clustering of all network buses down to @@ -300,8 +318,8 @@ def etrago(args): State if you want to group lines that connect the same two buses into one system. - branch_capacity_factor : numeric - 1, + branch_capacity_factor : dict + {'HV': 0.5, 'eHV' : 0.7}, Add a factor here if you want to globally change line capacities (e.g. to "consider" an (n-1) criterion or for debugging purposes). @@ -311,12 +329,12 @@ def etrago(args): is helpful when debugging: a very expensive generator is set to each bus and meets the demand when regular generators cannot do so. - + foreign_lines : dict - {'carrier':'AC', 'capacity': 'osm_tGmod}' - Choose transmission technology and capacity of foreign lines: + {'carrier':'AC', 'capacity': 'osmTGmod}' + Choose transmission technology and capacity of foreign lines: 'carrier': 'AC' or 'DC' - 'capacity': 'osm_tGmod', 'ntc_acer' or 'thermal_acer' + 'capacity': 'osmTGmod', 'ntc_acer' or 'thermal_acer' comments : str None @@ -326,8 +344,6 @@ def etrago(args): network : `pandas.DataFrame` eTraGo result network based on `PyPSA network `_ - - """ conn = db.connection(section=args['db']) Session = sessionmaker(bind=conn) @@ -351,21 +367,22 @@ def etrago(args): # add coordinates network = add_coordinates(network) - + # Set countrytags of buses, lines, links and transformers network = geolocation_buses(network, session) - + # Set q_sets of foreign loads - network = set_q_foreign_loads(network, cos_phi = 1) - + network = set_q_foreign_loads(network, cos_phi=1) + # Change transmission technology and/or capacity of foreign lines if args['foreign_lines']['carrier'] == 'DC': foreign_links(network) network = geolocation_buses(network, session) - + if args['foreign_lines']['capacity'] != 'osmTGmod': - crossborder_capacity(network, args['foreign_lines']['capacity'], - args['branch_capacity_factor']) + crossborder_capacity( + network, args['foreign_lines']['capacity'], + args['branch_capacity_factor']) # TEMPORARY vague adjustment due to transformer bug in data processing if args['gridversion'] == 'v0.2.11': @@ -374,21 +391,26 @@ def etrago(args): # set SOC at the beginning and end of the period to equal values network.storage_units.cyclic_state_of_charge = True - # set extra_functionality to default - extra_functionality = None - + # set extra_functionality + if args['extra_functionality'] is not None: + extra_functionality = eval(args['extra_functionality']) + elif args['extra_functionality'] is None: + extra_functionality = args['extra_functionality'] + # set disaggregated_network to default disaggregated_network = None # set clustering to default clustering = None - + if args['generator_noise'] is not False: # add random noise to all generators s = np.random.RandomState(args['generator_noise']) - network.generators.marginal_cost += \ - abs(s.normal(0, 0.001, len(network.generators.marginal_cost))) - + network.generators.marginal_cost[network.generators.bus.isin( + network.buses.index[network.buses.country_code == 'DE'])] += \ + abs(s.normal(0, 0.1, len(network.generators.marginal_cost[ + network.generators.bus.isin(network.buses.index[ + network.buses.country_code == 'DE'])]))) # for SH scenario run do data preperation: if (args['scn_name'] == 'SH Status Quo' or args['scn_name'] == 'SH NEP 2035'): @@ -401,56 +423,53 @@ def etrago(args): # Branch loading minimization if args['minimize_loading']: extra_functionality = loading_minimization - - # scenario extensions + + # scenario extensions if args['scn_extension'] is not None: for i in range(len(args['scn_extension'])): network = extension( network, session, - version = args['gridversion'], + version=args['gridversion'], scn_extension=args['scn_extension'][i], start_snapshot=args['start_snapshot'], end_snapshot=args['end_snapshot']) network = geolocation_buses(network, session) - + + # set Branch capacity factor for lines and transformer + if args['branch_capacity_factor']: + set_branch_capacity(network, args) + # scenario decommissioning if args['scn_decommissioning'] is not None: network = decommissioning( network, session, - version = args['gridversion'], - scn_decommissioning=args['scn_decommissioning']) + args) # Add missing lines in Munich and Stuttgart - network = add_missing_components(network) + network = add_missing_components(network) - # investive optimization strategies + # investive optimization strategies if args['extendable'] != []: network = extendable( network, - args) + args, + line_max=4) network = convert_capital_costs( network, args['start_snapshot'], args['end_snapshot']) - + # skip snapshots if args['skip_snapshots']: network.snapshots = network.snapshots[::args['skip_snapshots']] network.snapshot_weightings = network.snapshot_weightings[ ::args['skip_snapshots']] * args['skip_snapshots'] - + # snapshot clustering if not args['snapshot_clustering'] is False: network = snapshot_clustering( network, how='daily', clusters=args['snapshot_clustering']) extra_functionality = daily_bounds # daily_bounds or other constraint - - # set Branch capacity factor for lines and transformer - if args['branch_capacity_factor']: - network.lines.s_nom = network.lines.s_nom * \ - args['branch_capacity_factor'] - network.transformers.s_nom = network.transformers.s_nom * \ - args['branch_capacity_factor'] # load shedding in order to hunt infeasibilities if args['load_shedding']: @@ -465,28 +484,29 @@ def etrago(args): # k-mean clustering if not args['network_clustering_kmeans'] == False: - clustering = kmean_clustering(network, + clustering = kmean_clustering( + network, n_clusters=args['network_clustering_kmeans'], load_cluster=args['load_cluster'], - line_length_factor= 1, + line_length_factor=1, remove_stubs=False, use_reduced_coordinates=False, bus_weight_tocsv=None, bus_weight_fromcsv=None, - n_init=100, - max_iter=1000, - tol=1e-8, - n_jobs=-2) + n_init=10, + max_iter=100, + tol=1e-6, + n_jobs=-1) disaggregated_network = ( network.copy() if args.get('disaggregation') else None) network = clustering.network.copy() - + if args['ramp_limits']: - ramp_limits(network) - + ramp_limits(network) + # preselection of extendable lines if 'network_preselection' in args['extendable']: - extension_preselection(network, args, 'snapshot_clustering', 2) + extension_preselection(network, args, 'snapshot_clustering', 2) # parallisation if args['parallelisation']: @@ -506,7 +526,7 @@ def etrago(args): network.snapshots, solver_name=args['solver'], solver_options=args['solver_options'], - extra_functionality=extra_functionality) + extra_functionality=extra_functionality, formulation="angles") y = time.time() z = (y - x) / 60 # z is time for lopf in minutes @@ -527,20 +547,10 @@ def etrago(args): z = (y - x) / 60 print("Time for PF [min]:", round(z, 2)) calc_line_losses(network) - network = distribute_q(network, allocation = 'p_nom') - - # provide storage installation costs - if sum(network.storage_units.p_nom_opt) != 0: - installed_storages = \ - network.storage_units[network.storage_units.p_nom_opt != 0] - storage_costs = sum( - installed_storages.capital_cost * - installed_storages.p_nom_opt) - print( - "Investment costs for all storages in selected snapshots [EUR]:", - round( - storage_costs, - 2)) + network = distribute_q(network, allocation='p_nom') + + if not args['extendable'] == []: + print_expansion_costs(network, args) if clustering: disagg = args.get('disaggregation') @@ -564,10 +574,10 @@ def etrago(args): disaggregation.execute(scenario, solver=args['solver']) # temporal bug fix for solar generator which ar during night time - # nan instead of 0 + # nan instead of 0 disaggregated_network.generators_t.p.fillna(0, inplace=True) disaggregated_network.generators_t.q.fillna(0, inplace=True) - + disaggregated_network.results = network.results print("Time for overall desaggregation [min]: {:.2}" .format((time.time() - t) / 60)) @@ -579,40 +589,37 @@ def etrago(args): 'symbolic_solver_labels': True}) # write PyPSA results back to database - if args['export']: + if args['db_export']: username = str(conn.url).split('//')[1].split(':')[0] args['user_name'] = username - safe_results = False # default is False. - # If it is set to 'True' the result set will be saved - # to the versioned grid schema eventually apart from - # being saved to the model_draft. - # ONLY set to True if you know what you are doing. + results_to_oedb( session, network, dict([("disaggregated_results", False)] + list(args.items())), grid='hv', - safe_results=safe_results) + safe_results=False) + if disaggregated_network: results_to_oedb( session, disaggregated_network, dict([("disaggregated_results", True)] + list(args.items())), grid='hv', - safe_results=safe_results) + safe_results=False) # write PyPSA results to csv to path - if not args['results'] is False: + if not args['csv_export'] is False: if not args['pf_post_lopf']: results_to_csv(network, args) else: - results_to_csv(network, args,pf_solution = pf_solution) + results_to_csv(network, args, pf_solution=pf_solution) if disaggregated_network: results_to_csv( disaggregated_network, {k: os.path.join(v, 'disaggregated') - if k == 'results' else v + if k == 'csv_export' else v for k, v in args.items()}) # close session diff --git a/etrago/args.json b/etrago/args.json index 14c4182e..f90c3c95 100644 --- a/etrago/args.json +++ b/etrago/args.json @@ -3,29 +3,36 @@ "gridversion": "v0.4.5", "method": "lopf", "pf_post_lopf": true, - "start_snapshot": 1, - "end_snapshot" : 2, + "start_snapshot": 4900, + "end_snapshot" : 5000, "solver": "gurobi", - "solver_options":{}, + "solver_options":{"threads":2, + "method":2, + "BarHomogeneous":1, + "NumericFocus": 3, + "BarConvTol":"1.e-5", + "FeasibilityTol":"1.e-6", + "logFile":"gurobi_eTraGo.log"}, "scn_name": "eGo 100", "scn_extension": null, "scn_decommissioning": null, "lpfile": false, - "results": false, - "export": false, - "extendable": "['network', 'storages']", + "csv_export": "./results/", + "db_export": false, + "extendable": ["storage"], "generator_noise": 789456, "minimize_loading": false, "ramp_limits": false, - "network_clustering_kmeans": 30, + "extra_functionality": null, + "network_clustering_kmeans": 100, "load_cluster": false, "network_clustering_ehv": false, - "disaggregation": null, + "disaggregation": "uniform", "snapshot_clustering": false, "parallelisation": false, - "skip_snapshots": false, + "skip_snapshots": 5, "line_grouping": false, - "branch_capacity_factor": 0.7, + "branch_capacity_factor": {"HV": 0.5, "eHV" : 0.7}, "load_shedding": false, "foreign_lines" :{"carrier": "AC", "capacity": "osmTGmod"}, "comments": "" diff --git a/etrago/cluster/disaggregation.py b/etrago/cluster/disaggregation.py index 4bdd96d3..08ca7451 100644 --- a/etrago/cluster/disaggregation.py +++ b/etrago/cluster/disaggregation.py @@ -355,25 +355,37 @@ def extra_functionality(network, snapshots): for bustype, bustype_pypsa, suffixes in [ ('storage', 'storage_units', ['_dispatch', '_spill', '_store']), ('store', 'stores',[''])]: - generators = getattr(self.original_network, bustype_pypsa).assign( + generators = getattr(self.original_network, + bustype_pypsa).assign( bus=lambda df: df.bus.map(self.clustering.busmap)) for suffix in suffixes: def construct_constraint(model, snapshot): # TODO: Optimize - buses_p = [getattr(model,bustype + '_p' + suffix)[(x, snapshot)] for x in - generators.loc[(generators.bus == cluster)].index] + buses_p = [getattr( + model, + bustype + '_p' + suffix + )[(x, snapshot)] for x in + generators.loc[( + generators.bus == cluster + )].index] if not buses_p: return Constraint.Feasible sum_bus_p = sum(buses_p) - cluster_buses = getattr(self.clustered_network, bustype_pypsa)[ - (getattr(self.clustered_network, bustype_pypsa).bus == cluster)] + cluster_buses = getattr( + self.clustered_network, bustype_pypsa)[ + (getattr(self.clustered_network, + bustype_pypsa).bus == cluster)] sum_clustered_p = sum( - getattr(self.clustered_network, bustype_pypsa + '_t')['p'].loc[snapshot,c] for + getattr(self.clustered_network, bustype_pypsa + + '_t')['p'].loc[snapshot,c] for c in cluster_buses.index) return sum_bus_p == sum_clustered_p # TODO: Generate a better name - network.model.add_component('validate_' + bustype + suffix,Constraint(list(snapshots), rule=construct_constraint)) + network.model.add_component( + 'validate_' + bustype + suffix, + Constraint(list(snapshots), + rule=construct_constraint)) return extra_functionality class UniformDisaggregation(Disaggregation): diff --git a/etrago/cluster/networkclustering.py b/etrago/cluster/networkclustering.py index d479955b..a7067cda 100644 --- a/etrago/cluster/networkclustering.py +++ b/etrago/cluster/networkclustering.py @@ -470,7 +470,8 @@ def normed(x): network.transformers.bus1.map(network.buses.v_nom)], axis=1) network.import_components_from_dataframe( - network.transformers.loc[:, ['bus0', 'bus1', 'x', 's_nom', 'capital_cost']] + network.transformers.loc[:, [ + 'bus0', 'bus1', 'x', 's_nom', 'capital_cost']] .assign(x=network.transformers.x * (380. / transformer_voltages.max(axis=1))**2, length = 1) .set_index('T' + trafo_index), diff --git a/etrago/cluster/snapshot.py b/etrago/cluster/snapshot.py index 32a6e237..70f8e91d 100644 --- a/etrago/cluster/snapshot.py +++ b/etrago/cluster/snapshot.py @@ -114,7 +114,7 @@ def run(network, n_clusters=None, how='daily', # calculate clusters tsam_ts, cluster_weights, dates, hours = tsam_cluster( prepare_pypsa_timeseries(network), typical_periods=n_clusters, - how='daily') + how=how) update_data_frames(network, cluster_weights, dates, hours) diff --git a/etrago/tools/extendable.py b/etrago/tools/extendable.py index 862c5a40..1c6c2dbf 100644 --- a/etrago/tools/extendable.py +++ b/etrago/tools/extendable.py @@ -42,26 +42,67 @@ __author__ = "ulfmueller, s3pp, wolfbunke, mariusves, lukasol" -def extendable(network, args): +def extendable(network, args, line_max): + + """ + Function that sets selected components extendable + + 'network' for all lines, links and transformers + 'german_network' for all lines, links and transformers located in Germany + 'foreign_network' for all foreign lines, links and transformers + 'transformers' for all transformers + 'storages' for extendable storages + 'overlay_network' for lines, links and trafos in extension scenerio(s) + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + args : dict + Arguments set in appl.py + + + Returns + ------- + network : :class:`pypsa.Network + Overall container of PyPSA + """ if 'network' in args['extendable']: network.lines.s_nom_extendable = True network.lines.s_nom_min = network.lines.s_nom - network.lines.s_nom_max = float("inf") + + if not line_max==None: + network.lines.s_nom_max = line_max * network.lines.s_nom + + else: + network.lines.s_nom_max = float("inf") if not network.transformers.empty: network.transformers.s_nom_extendable = True network.transformers.s_nom_min = network.transformers.s_nom - network.transformers.s_nom_max = float("inf") + + if not line_max==None: + network.transformers.s_nom_max =\ + line_max * network.transformers.s_nom + + else: + network.transformers.s_nom_max = float("inf") if not network.links.empty: - network.links.loc.p_nom_extendable = True + network.links.p_nom_extendable = True network.links.p_nom_min = network.links.p_nom network.links.p_nom_max = float("inf") + if not line_max==None: + network.links.p_nom_max=\ + line_max * network.links.p_nom + + else: + network.links.p_nom_max = float("inf") + + network = set_line_costs(network, args) + network = set_trafo_costs(network, args) - network = set_line_costs(network) - network = set_trafo_costs(network) - if 'german_network' in args['extendable']: buses = network.buses[~network.buses.index.isin( buses_by_country(network).index)] @@ -75,12 +116,29 @@ def extendable(network, args): (network.lines.bus1.isin(buses.index)), 's_nom_max'] = float("inf") + if not line_max==None: + network.lines.loc[(network.lines.bus0.isin(buses.index)) & + (network.lines.bus1.isin(buses.index)), + 's_nom_max'] = line_max * network.lines.s_nom + + else: + network.lines.loc[(network.lines.bus0.isin(buses.index)) & + (network.lines.bus1.isin(buses.index)), + 's_nom_max'] = float("inf") + if not network.transformers.empty: network.transformers.loc[network.transformers.bus0.isin( buses.index),'s_nom_extendable'] = True network.transformers.loc[network.transformers.bus0.isin( buses.index),'s_nom_min'] = network.transformers.s_nom - network.transformers.loc[network.transformers.bus0.isin( + + if not line_max==None: + network.transformers.loc[network.transformers.bus0.isin( + buses.index),'s_nom_max'] = \ + line_max * network.transformers.s_nom + + else: + network.transformers.loc[network.transformers.bus0.isin( buses.index),'s_nom_max'] = float("inf") if not network.links.empty: @@ -90,14 +148,20 @@ def extendable(network, args): network.links.loc[(network.links.bus0.isin(buses.index)) & (network.links.bus1.isin(buses.index)), 'p_nom_min'] = network.links.p_nom - network.links.loc[(network.links.bus0.isin(buses.index)) & - (network.links.bus1.isin(buses.index)), + + if not line_max==None: + network.links.loc[(network.links.bus0.isin(buses.index)) & + (network.links.bus1.isin(buses.index)), + 'p_nom_max'] = line_max * network.links.p_nom + + else: + network.links.loc[(network.links.bus0.isin(buses.index)) & + (network.links.bus1.isin(buses.index)), 'p_nom_max'] = float("inf") - network = set_line_costs(network) - network = set_trafo_costs(network) - - + network = set_line_costs(network, args) + network = set_trafo_costs(network, args) + if 'foreign_network' in args['extendable']: buses = network.buses[network.buses.index.isin( buses_by_country(network).index)] @@ -107,9 +171,16 @@ def extendable(network, args): network.lines.loc[network.lines.bus0.isin(buses.index) | network.lines.bus1.isin(buses.index), 's_nom_min'] = network.lines.s_nom - network.lines.loc[network.lines.bus0.isin(buses.index) | + + if not line_max==None: + network.lines.loc[network.lines.bus0.isin(buses.index) | network.lines.bus1.isin(buses.index), - 's_nom_max'] = float("inf") + 's_nom_max'] = line_max * network.lines.s_nom + + else: + network.lines.loc[network.lines.bus0.isin(buses.index) | + network.lines.bus1.isin(buses.index), + 's_nom_max'] = float("inf") if not network.transformers.empty: network.transformers.loc[network.transformers.bus0.isin( @@ -118,9 +189,17 @@ def extendable(network, args): network.transformers.loc[network.transformers.bus0.isin( buses.index) | network.transformers.bus1.isin( buses.index) ,'s_nom_min'] = network.transformers.s_nom - network.transformers.loc[network.transformers.bus0.isin( + + if not line_max==None: + network.transformers.loc[network.transformers.bus0.isin( buses.index) | network.transformers.bus1.isin( - buses.index) ,'s_nom_max'] = float("inf") + buses.index),'s_nom_max'] = \ + line_max * network.transformers.s_nom + + else: + network.transformers.loc[network.transformers.bus0.isin( + buses.index) | network.transformers.bus1.isin( + buses.index),'s_nom_max'] = float("inf") if not network.links.empty: network.links.loc[(network.links.bus0.isin(buses.index)) | @@ -129,13 +208,19 @@ def extendable(network, args): network.links.loc[(network.links.bus0.isin(buses.index)) | (network.links.bus1.isin(buses.index)), 'p_nom_min'] = network.links.p_nom - network.links.loc[(network.links.bus0.isin(buses.index)) | + + if not line_max==None: + network.links.loc[(network.links.bus0.isin(buses.index)) | + (network.links.bus1.isin(buses.index)), + 'p_nom_max'] = line_max * network.links.p_nom + + else: + network.links.loc[(network.links.bus0.isin(buses.index)) | (network.links.bus1.isin(buses.index)), 'p_nom_max'] = float("inf") - network = set_line_costs(network) - network = set_trafo_costs(network) - + network = set_line_costs(network, args) + network = set_trafo_costs(network, args) if 'transformers' in args['extendable']: network.transformers.s_nom_extendable = True @@ -156,6 +241,41 @@ def extendable(network, args): network.generators.p_nom_extendable = True network.generators.p_nom_min = network.generators.p_nom network.generators.p_nom_max = float("inf") + + if 'foreign_storage' in args['extendable']: + network.storage_units.p_nom_extendable[(network.storage_units.bus.isin( + network.buses.index[network.buses.country_code != 'DE'])) & ( + network.storage_units.carrier.isin( + ['battery_storage','hydrogen_storage'] ))] = True + + network.storage_units.loc[ + network.storage_units.p_nom_max.isnull(),'p_nom_max'] = \ + network.storage_units.p_nom + + network.storage_units.loc[ + (network.storage_units.carrier == 'battery_storage'), + 'capital_cost'] = network.storage_units.loc[( + network.storage_units.carrier == 'extendable_storage') + & (network.storage_units.max_hours == 6),'capital_cost'].max() + + network.storage_units.loc[ + (network.storage_units.carrier == 'hydrogen_storage'), + 'capital_cost'] = network.storage_units.loc[( + network.storage_units.carrier == 'extendable_storage') & + (network.storage_units.max_hours == 168),'capital_cost'].max() + + network.storage_units.loc[ + (network.storage_units.carrier == 'battery_storage'), + 'marginal_cost'] = network.storage_units.loc[( + network.storage_units.carrier == 'extendable_storage') + & (network.storage_units.max_hours == 6),'marginal_cost'].max() + + network.storage_units.loc[ + (network.storage_units.carrier == 'hydrogen_storage'), + 'marginal_cost'] = network.storage_units.loc[( + network.storage_units.carrier == 'extendable_storage') & + (network.storage_units.max_hours == 168),'marginal_cost'].max() + # Extension settings for extension-NEP 2035 scenarios if 'NEP Zubaunetz' in args['extendable']: @@ -163,13 +283,13 @@ def extendable(network, args): network.lines.loc[(network.lines.project != 'EnLAG') & ( network.lines.scn_name == 'extension_' + args['scn_extension'][i]), 's_nom_extendable'] = True - + network.transformers.loc[( network.transformers.project != 'EnLAG') & ( network.transformers.scn_name == ( 'extension_'+ args['scn_extension'][i])), 's_nom_extendable'] = True - + network.links.loc[network.links.scn_name == ( 'extension_' + args['scn_extension'][i] ), 'p_nom_extendable'] = True @@ -180,6 +300,11 @@ def extendable(network, args): 'extension_' + args['scn_extension'][i] ), 's_nom_extendable'] = True + network.lines.loc[network.lines.scn_name == ( + 'extension_' + args['scn_extension'][i] + ), 's_nom_max'] = network.lines.s_nom[network.lines.scn_name == ( + 'extension_' + args['scn_extension'][i])] + network.links.loc[network.links.scn_name == ( 'extension_' + args['scn_extension'][i] ), 'p_nom_extendable'] = True @@ -187,6 +312,11 @@ def extendable(network, args): network.transformers.loc[network.transformers.scn_name == ( 'extension_' + args['scn_extension'][i] ), 's_nom_extendable'] = True + + network.lines.loc[network.lines.scn_name == ( + 'extension_' + args['scn_extension'][i] + ), 'capital_cost'] = network.lines.capital_cost/\ + args['branch_capacity_factor']['eHV'] if 'overlay_lines' in args['extendable']: for i in range(len(args['scn_extension'])): @@ -201,16 +331,16 @@ def extendable(network, args): network.lines.loc[network.lines.scn_name == ( 'extension_' + args['scn_extension'][i]), 'capital_cost'] = network.lines.capital_cost + (2 * 14166) - + network.lines.s_nom_min[network.lines.s_nom_extendable == False] =\ network.lines.s_nom - + network.transformers.s_nom_min[network.transformers.s_nom_extendable == \ False] = network.transformers.s_nom - + network.lines.s_nom_max[network.lines.s_nom_extendable == False] =\ network.lines.s_nom - + network.transformers.s_nom_max[network.transformers.s_nom_extendable == \ False] = network.transformers.s_nom @@ -231,9 +361,9 @@ def extension_preselection(network, args, method, days = 3): Arguments set in appl.py method: str Choose method of selection: - 'extreme_situations' for remarkable timsteps - (e.g. minimal resiudual load) - 'snapshot_clustering' for snapshot clustering with number of days + 'extreme_situations' for remarkable timsteps + (e.g. minimal resiudual load) + 'snapshot_clustering' for snapshot clustering with number of days days: int Number of clustered days, only used when method = 'snapshot_clustering' @@ -320,3 +450,33 @@ def extension_preselection(network, args, method, days = 3): print("Time for first LOPF [min]:", round(z1st, 2)) return network + +def print_expansion_costs(network,args): + + ext_storage=network.storage_units[network.storage_units.p_nom_extendable] + ext_lines=network.lines[network.lines.s_nom_extendable] + ext_links=network.links[network.links.p_nom_extendable] + ext_trafos=network.transformers[network.transformers.s_nom_extendable] + + if not ext_storage.empty: + storage_costs=(ext_storage.p_nom_opt*ext_storage.capital_cost).sum() + + if not ext_lines.empty: + network_costs=((ext_lines.s_nom_opt-ext_lines.s_nom_min + )*ext_lines.capital_cost +\ + (ext_links.p_nom_opt-ext_links.p_nom_min + )*ext_links.capital_cost).sum() + + if not ext_trafos.empty: + network_costs=network_costs+((ext_trafos.s_nom_opt-ext_trafos.s_nom_min + )*ext_trafos.capital_cost).sum() + + if not ext_storage.empty: + print( + "Investment costs for all storage units in selected snapshots [EUR]:", + round(storage_costs,2)) + + if not ext_lines.empty: + print( + "Investment costs for all lines and transformers in selected snapshots [EUR]:", + round(network_costs,2)) diff --git a/etrago/tools/io.py b/etrago/tools/io.py index f53e9d6b..9b272e2f 100644 --- a/etrago/tools/io.py +++ b/etrago/tools/io.py @@ -377,7 +377,8 @@ def build_network(self, network=None, *args, **kwargs): df_series = self.series_fetch_by_relname(comp_t, col) # TODO: VMagPuSet is not implemented. - if timevarying_override and comp == 'Generator' and not df_series.empty: + if timevarying_override and comp == 'Generator' \ + and not df_series.empty: idx = df[df.former_dispatch == 'flexible'].index idx = [i for i in idx if i in df_series.columns] df_series.drop(idx, axis=1, inplace=True) @@ -452,13 +453,43 @@ def clear_results_db(session): def results_to_oedb(session, network, args, grid='hv', safe_results=False): - """Return results obtained from PyPSA to oedb""" + """Return results obtained from PyPSA to oedb + + Parameters + ---------- + session: + network : PyPSA network container + Holds topology of grid including results from powerflow analysis + args: dict + Settings from appl.py + grid: str + Choose voltage-level, currently only 'hv' implemented + safe_results: boolean + If it is set to 'True' the result set will be saved + to the versioned grid schema eventually apart from + being saved to the model_draft by a SQL-script. + ONLY set to True if you know what you are doing. + + """ + # Update generator_ids when k_means clustering to get integer ids + if args['network_clustering_kmeans'] != False: + new_index=pd.DataFrame(index = network.generators.index) + new_index['new']=range(len(network.generators)) + + for col in (network.generators_t): + if not network.generators_t[col].empty: + network.generators_t[col].columns =\ + new_index.new[network.generators_t[col].columns] + + network.generators.index = range(len(network.generators)) + # moved this here to prevent error when not using the mv-schema import datetime if grid.lower() == 'mv': print('MV currently not implemented') elif grid.lower() == 'hv': - from egoio.db_tables.model_draft import EgoGridPfHvResultBus as BusResult,\ + from egoio.db_tables.model_draft import\ + EgoGridPfHvResultBus as BusResult,\ EgoGridPfHvResultBusT as BusTResult,\ EgoGridPfHvResultStorage as StorageResult,\ EgoGridPfHvResultStorageT as StorageTResult,\ @@ -487,7 +518,8 @@ def results_to_oedb(session, network, args, grid='hv', safe_results=False): res_meta = ResultMeta() meta_misc = [] for arg, value in args.items(): - if arg not in dir(res_meta) and arg not in ['db', 'lpfile', 'results', 'export']: + if arg not in dir(res_meta) and arg not in ['db', 'lpfile', + 'results', 'export']: meta_misc.append([arg, str(value)]) res_meta.result_id = new_res_id @@ -515,14 +547,17 @@ def results_to_oedb(session, network, args, grid='hv', safe_results=False): new_source.name = network.generators.carrier[gen] session.add(new_source) session.commit() - sources = pd.read_sql(session.query(Source).statement, session.bind) + sources = pd.read_sql( + session.query(Source).statement, session.bind) try: old_source_id = int( - sources.source_id[sources.name == network.generators.carrier[gen]]) + sources.source_id[ + sources.name == network.generators.carrier[gen]]) network.generators.set_value(gen, 'source', int(old_source_id)) except: print( - 'Source ' + network.generators.carrier[gen] + ' is not in the source table!') + 'Source ' + network.generators.carrier[gen] + + ' is not in the source table!') for stor in network.storage_units.index: if network.storage_units.carrier[stor] not in sources.name.values: new_source = Source() @@ -531,14 +566,17 @@ def results_to_oedb(session, network, args, grid='hv', safe_results=False): new_source.name = network.storage_units.carrier[stor] session.add(new_source) session.commit() - sources = pd.read_sql(session.query(Source).statement, session.bind) + sources = pd.read_sql( + session.query(Source).statement, session.bind) try: old_source_id = int( - sources.source_id[sources.name == network.storage_units.carrier[stor]]) + sources.source_id[ + sources.name == network.storage_units.carrier[stor]]) network.storage_units.set_value(stor, 'source', int(old_source_id)) except: print( - 'Source ' + network.storage_units.carrier[stor] + ' is not in the source table!') + 'Source ' + network.storage_units.carrier[stor] + + ' is not in the source table!') whereismyindex = {BusResult: network.buses.index, LoadResult: network.loads.index, @@ -573,8 +611,10 @@ def results_to_oedb(session, network, args, grid='hv', safe_results=False): 'soc_cyclic': 'cyclic_state_of_charge', 'soc_initial': 'state_of_charge_initial'} - ormclasses = [BusResult, LoadResult, LineResult, TransformerResult, GeneratorResult, StorageResult, - BusTResult, LoadTResult, LineTResult, TransformerTResult, GeneratorTResult, StorageTResult] + ormclasses = [BusResult, LoadResult, LineResult, TransformerResult, + GeneratorResult, StorageResult, BusTResult, LoadTResult, + LineTResult, TransformerTResult, GeneratorTResult, + StorageTResult] for ormclass in ormclasses: for index in whereismyindex[ormclass]: @@ -595,7 +635,8 @@ def results_to_oedb(session, network, args, grid='hv', safe_results=False): if col == 'soc_set': try: setattr(myinstance, col, getattr( - whereismydata[ormclass], 'state_of_charge_set')[index].tolist()) + whereismydata[ormclass], + 'state_of_charge_set')[index].tolist()) except: pass else: @@ -612,7 +653,8 @@ def results_to_oedb(session, network, args, grid='hv', safe_results=False): if col == 'soc_cyclic': try: setattr(myinstance, col, bool( - whereismydata[ormclass].loc[index, new_to_old_name[col]])) + whereismydata[ormclass].loc[index, + new_to_old_name[col]])) except: pass elif 'Storage' in str(ormclass) and col == 'dispatch': @@ -624,7 +666,8 @@ def results_to_oedb(session, network, args, grid='hv', safe_results=False): else: try: setattr( - myinstance, col, whereismydata[ormclass].loc[index, new_to_old_name[col]]) + myinstance, col, whereismydata[ormclass].\ + loc[index, new_to_old_name[col]]) except: pass elif col in ['s_nom_extendable', 'p_nom_extendable']: @@ -659,36 +702,37 @@ def run_sql_script(conn, scriptname='results_md2grid.sql'): -def extension (network, session, version, scn_extension, start_snapshot, end_snapshot, **kwargs): - ''' - Function that adds an additional network to the existing network container. - The new network can include every PyPSA-component (e.g. buses, lines, links). - To connect it to the existing network, transformers are needed. +def extension (network, session, version, scn_extension, start_snapshot, + end_snapshot, **kwargs): + """ + Function that adds an additional network to the existing network container. + The new network can include every PyPSA-component (e.g. buses, lines, links). + To connect it to the existing network, transformers are needed. - All components and its timeseries of the additional scenario need to be inserted in the fitting 'model_draft.ego_grid_pf_hv_extension_' table. - The scn_name in the tables have to be labled with 'extension_' + scn_name (e.g. 'extension_nep2035'). + All components and its timeseries of the additional scenario need to be inserted in the fitting 'model_draft.ego_grid_pf_hv_extension_' table. + The scn_name in the tables have to be labled with 'extension_' + scn_name (e.g. 'extension_nep2035'). - Until now, the tables include three additional scenarios: - 'nep2035_confirmed': all new lines and needed transformers planed in the 'Netzentwicklungsplan 2035' (NEP2035) that have been confirmed by the Bundesnetzagentur (BNetzA) + Until now, the tables include three additional scenarios: + 'nep2035_confirmed': all new lines and needed transformers planed in the 'Netzentwicklungsplan 2035' (NEP2035) that have been confirmed by the Bundesnetzagentur (BNetzA) - 'nep2035_b2': all new lines and needed transformers planned in the NEP 2035 in the scenario 2035 B2 + 'nep2035_b2': all new lines and needed transformers planned in the NEP 2035 in the scenario 2035 B2 - 'BE_NO_NEP 2035': DC-lines and transformers to connect the upcomming electrical-neighbours Belgium and Norway - Generation, loads and its timeseries in Belgium and Norway for scenario 'NEP 2035' + 'BE_NO_NEP 2035': DC-lines and transformers to connect the upcomming electrical-neighbours Belgium and Norway + Generation, loads and its timeseries in Belgium and Norway for scenario 'NEP 2035' - Input - ----- + Parameters + ----- network : The existing network container (e.g. scenario 'NEP 2035') session : session-data overlay_scn_name : Name of the additional scenario (WITHOUT 'extension_') start_snapshot, end_snapshot: Simulation time - Output - ------ + Returns + ------ network : Network container including existing and additional network - ''' + """ if version is None: ormcls_prefix = 'EgoGridPfHvExtension' @@ -719,86 +763,90 @@ def extension (network, session, version, scn_extension, start_snapshot, end_sna return network -def decommissioning(network, session, version, scn_decommissioning, **kwargs): - ''' - Function that removes components in a decommissioning-scenario from the existing network container. - Currently, only lines can be decommissioned. +def decommissioning(network, session, args, **kwargs): + """ + Function that removes components in a decommissioning-scenario from + the existing network container. + Currently, only lines can be decommissioned. - All components of the decommissioning scenario need to be inserted in the fitting 'model_draft.ego_grid_pf_hv_extension_' table. - The scn_name in the tables have to be labled with 'decommissioning_' + scn_name (e.g. 'decommissioning_nep2035'). + All components of the decommissioning scenario need to be inserted in + the fitting 'model_draft.ego_grid_pf_hv_extension_' table. + The scn_name in the tables have to be labled with 'decommissioning_' + + scn_name (e.g. 'decommissioning_nep2035'). - Input - ----- - network : The existing network container (e.g. scenario 'NEP 2035') - session : session-data - overlay_scn_name : Name of the decommissioning scenario (WITHOUT 'decommissioning_') + Parameters + ----- + network : The existing network container (e.g. scenario 'NEP 2035') + session : session-data + overlay_scn_name : Name of the decommissioning scenario - Output - ------ - network : Network container including decommissioning + Returns + ------ + network : Network container including decommissioning - ''' - """components = ['Line', 'Link'] - for comp in components: - if version == None: - ormclass = getattr(import_module('egoio.db_tables.model_draft'), ('EgoGridPfHvExtension' + comp)) - else: - ormclass = getattr(import_module('egoio.db_tables.grid'), 'EgoPfHvExtension' + comp) - - query = session.query(ormclass).filter(ormclass.scn_name ==\ - 'decommissioning_' + scn_decommissioning) - - df_decommisionning = pd.read_sql(query.statement, - session.bind, - index_col=comp.lower() + '_id')) - - df_decommisionning.index = df_decommisionning.index.astype(str) - - Wie kann network.lines, network.links in Schleife geschrieben werden? - - """ - - if version == None: - ormclass = getattr(import_module('egoio.db_tables.model_draft'), 'EgoGridPfHvExtensionLine') + """ + + if args['gridversion'] == None: + ormclass = getattr(import_module('egoio.db_tables.model_draft'), + 'EgoGridPfHvExtensionLine') else: - ormclass = getattr(import_module('egoio.db_tables.grid'), 'EgoPfHvExtensionLine') - - + ormclass = getattr(import_module('egoio.db_tables.grid'), + 'EgoPfHvExtensionLine') + query = session.query(ormclass).filter( - ormclass.scn_name == 'decommissioning_' + scn_decommissioning) - - + ormclass.scn_name == 'decommissioning_' + + args['scn_decommissioning']) + df_decommisionning = pd.read_sql(query.statement, session.bind, index_col='line_id') df_decommisionning.index = df_decommisionning.index.astype(str) - + + for idx, row in network.lines.iterrows(): + if (row['s_nom_min'] !=0) & ( + row['scn_name'] =='extension_' + args['scn_decommissioning']): + v_nom_dec = df_decommisionning['v_nom'][( + df_decommisionning.project == row['project']) & ( + df_decommisionning.project_id == row['project_id'])] + + if (v_nom_dec == 110).any(): + network.lines.s_nom_min[network.lines.index == idx]\ + = args['branch_capacity_factor']['HV'] *\ + network.lines.s_nom_min + + else: + network.lines.s_nom_min[network.lines.index == idx] =\ + args['branch_capacity_factor']['eHV'] *\ + network.lines.s_nom_min + ### Drop decommissioning-lines from existing network - network.lines = network.lines[~network.lines.index.isin(df_decommisionning.index)] + network.lines = network.lines[~network.lines.index.isin( + df_decommisionning.index)] return network def distance(x0, x1, y0, y1): - ''' - Function that calculates the square of the distance between two points. + """ + Function that calculates the square of the distance between two points. - Input - ----- - x0: x - coordinate of point 0 - x1: x - coordinate of point 1 - y0: y - coordinate of point 0 - y1: y - coordinate of point 1 + Parameters + ----- + x0: x - coordinate of point 0 + x1: x - coordinate of point 1 + y0: y - coordinate of point 0 + y1: y - coordinate of point 1 - Output - ------ - distance : square of distance + Returns + ------ + distance : float + square of distance - ''' + """ # Calculate square of the distance between two points (Pythagoras) distance = (x1.values- x0.values)*(x1.values- x0.values)\ + (y1.values- y0.values)*(y1.values- y0.values) @@ -806,26 +854,30 @@ def distance(x0, x1, y0, y1): def calc_nearest_point(bus1, network): - ''' - Function that finds the geographical nearest point in a network from a given bus. + """ + Function that finds the geographical nearest point in a network from a given bus. - Input - ----- - bus1: id of bus - network: network container including the comparable buses + Parameters + ----- + bus1: float + id of bus + network: Pypsa network container + network including the comparable buses - Output - ------ - bus0 : bus_id of nearest point + Returns + ------ + bus0 : float + bus_id of nearest point - ''' + """ bus1_index = network.buses.index[network.buses.index == bus1] forbidden_buses = np.append( - bus1_index.values, network.lines.bus1[network.lines.bus0 == bus1].values) + bus1_index.values, network.lines.bus1[ + network.lines.bus0 == bus1].values) forbidden_buses = np.append( forbidden_buses, network.lines.bus0[network.lines.bus1 == bus1].values) @@ -840,7 +892,8 @@ def calc_nearest_point(bus1, network): y0 = network.buses.y[network.buses.index.isin(bus1_index)] - comparable_buses = network.buses[~network.buses.index.isin(forbidden_buses)] + comparable_buses = network.buses[~network.buses.index.isin( + forbidden_buses)] x1 = comparable_buses.x @@ -851,8 +904,8 @@ def calc_nearest_point(bus1, network): min_distance = distance.min() - bus0 = comparable_buses[(((x1.values - x0.values)*(x1.values - x0.values) + ( - y1.values - y0.values)*(y1.values - y0.values)) == min_distance)] + bus0 = comparable_buses[(((x1.values - x0.values)*(x1.values - x0.values + ) + (y1.values - y0.values)*(y1.values - y0.values)) == min_distance)] bus0 = bus0.index[bus0.index == bus0.index.max()] bus0 = ''.join(bus0.values) diff --git a/etrago/tools/plot.py b/etrago/tools/plot.py index 119c8ef3..cdf84003 100644 --- a/etrago/tools/plot.py +++ b/etrago/tools/plot.py @@ -105,6 +105,7 @@ def plot_line_loading( Plots line loading as a colored heatmap. Line loading is displayed as relative to nominal capacity in %. + Parameters ---------- network : PyPSA network container @@ -220,7 +221,7 @@ def plot_line_loading( x_coords_lines[i] = network.buses.loc[str( network.lines.iloc[i, 2]), 'x'] color = colors[i] - if (x_coords_lines[i] == path[i][0][0] and loading_c[i] >= 0): + if (x_coords_lines[i] == path[i][0][0] and load_lines_rel[i] >= 0): arrowprops = dict(arrowstyle="->", color=color) else: arrowprops = dict(arrowstyle="<-", color=color) @@ -248,6 +249,7 @@ def plot_line_loading_diff(networkA, networkB, timestep=0): Positive values mean that line loading with switches is bigger than without Plot switches as small dots + Parameters ---------- networkA : PyPSA network container @@ -356,7 +358,8 @@ def shiftedColorMap( -def network_extension(network, method = 'rel', filename=None, boundaries=[]): +def network_expansion(network, method = 'rel', ext_min=0.1, + ext_width=False, filename=None, boundaries=[]): """Plot relative or absolute network extension of AC- and DC-lines. Parameters @@ -366,6 +369,11 @@ def network_extension(network, method = 'rel', filename=None, boundaries=[]): method: str Choose 'rel' for extension relative to s_nom and 'abs' for absolute extensions. + ext_min: float + Choose minimum relative line extension shown in plot in p.u.. + ext_width: float or bool + Choose if line_width respects line extension. Turn off with 'False' or + set linear factor to decremise extension line_width. filename: str or None Save figure in this direction boundaries: array @@ -377,9 +385,23 @@ def network_extension(network, method = 'rel', filename=None, boundaries=[]): overlay_network = network.copy() overlay_network.lines = overlay_network.lines[ - overlay_network.lines.s_nom_extendable] + overlay_network.lines.s_nom_extendable & (( + overlay_network.lines.s_nom_opt - + overlay_network.lines.s_nom_min) / + overlay_network.lines.s_nom >= ext_min)] overlay_network.links = overlay_network.links[ - overlay_network.links.p_nom_extendable] + overlay_network.links.p_nom_extendable & (( + overlay_network.links.p_nom_opt - + overlay_network.links.p_nom_min)/ + overlay_network.links.p_nom >= ext_min)] + + for i, row in overlay_network.links.iterrows(): + linked = overlay_network.links[(row['bus1'] == + overlay_network.links.bus0) & ( + row['bus0'] == overlay_network.links.bus1)] + if not linked.empty: + if row['p_nom_opt'] < linked.p_nom_opt.values[0]: + overlay_network.links.p_nom_opt[i] = linked.p_nom_opt.values[0] array_line = [['Line'] * len(overlay_network.lines), overlay_network.lines.index] @@ -413,7 +435,7 @@ def network_extension(network, method = 'rel', filename=None, boundaries=[]): extension = extension_lines.append(extension_links) - + # Plot whole network in backgroud of plot network.plot( line_colors=pd.Series("grey", index = [['Line'] * len( @@ -421,19 +443,25 @@ def network_extension(network, method = 'rel', filename=None, boundaries=[]): pd.Series("grey", index = [['Link'] * len(network.links), network.links.index])), bus_sizes=0, - line_widths=pd.Series(0.55, index = [['Line'] * len(network.lines), + line_widths=pd.Series(0.5, index = [['Line'] * len(network.lines), network.lines.index]).append( - pd.Series(0.7, index = [['Link'] * len(network.links), + pd.Series(0.55, index = [['Link'] * len(network.links), network.links.index]))) + if not ext_width: + line_widths= pd.Series(0.8, index = array_line).append( + pd.Series(0.8, index = array_link)) + + else: + line_widths= 0.5 + (extension / ext_width) + ll = overlay_network.plot( line_colors=extension, line_cmap=cmap, bus_sizes=0, - title="Optimized AC- and DC-line extension", - line_widths= pd.Series(0.7, index = array_line).append( - pd.Series(0.75, index = array_link))) - + title="Optimized AC- and DC-line expansion", + line_widths=line_widths) + if not boundaries: v = np.linspace(min(extension), max(extension), 101) boundaries = [min(extension), max(extension)] @@ -441,22 +469,43 @@ def network_extension(network, method = 'rel', filename=None, boundaries=[]): else: v = np.linspace(boundaries[0], boundaries[1], 101) - cb = plt.colorbar(ll[1], boundaries=v, + if not extension_links.empty: + cb_Link = plt.colorbar(ll[2], boundaries=v, ticks=v[0:101:10]) + cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1]) + + cb_Link.remove() + + cb = plt.colorbar(ll[1], boundaries=v, + ticks=v[0:101:10], fraction=0.046, pad=0.04) cb.set_clim(vmin=boundaries[0], vmax=boundaries[1]) if method == 'rel': - cb.set_label('line extension relative to s_nom in %') + cb.set_label('line expansion relative to s_nom in %') if method == 'abs': - cb.set_label('line extension in MW') + cb.set_label('line expansion in MW') if filename is None: plt.show() else: plt.savefig(filename) plt.close() -def network_extension_diff (networkA, networkB, filename=None, boundaries=[]): +def network_expansion_diff (networkA, networkB, filename=None, boundaries=[]): + """Plot relative network expansion derivation of AC- and DC-lines. + + Parameters + ---------- + networkA: PyPSA network container + Holds topology of grid including results from powerflow analysis + networkB: PyPSA network container + Holds topology of grid including results from powerflow analysis + filename: str or None + Save figure in this direction + boundaries: array + Set boundaries of heatmap axis + + """ cmap = plt.cm.jet @@ -484,20 +533,25 @@ def network_extension_diff (networkA, networkB, filename=None, boundaries=[]): bus_sizes=0, title="Derivation of AC- and DC-line extension", line_widths=2) - - if not boundaries: - v = np.linspace(min(extension).round(0), max(extension).round(0), 101) - + + if not boundaries: + v = np.linspace(min(extension), max(extension), 101) + boundaries = [min(extension).round(0), max(extension).round(0)] + else: - v = np.linspace(boundaries[0], boundaries[1], 101) - - cb = plt.colorbar(ll[1], boundaries=v, + v = np.linspace(boundaries[0], boundaries[1], 101) + + if not extension_links.empty: + cb_Link = plt.colorbar(ll[2], boundaries=v, ticks=v[0:101:10]) - cb_Link = plt.colorbar(ll[2], boundaries=v, - ticks=v[0:101:10]) - - cb_Link.set_clim(vmin=min(v), vmax=max(v)) - cb_Link.remove() + cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1]) + + cb_Link.remove() + + cb = plt.colorbar(ll[1], boundaries=v, + ticks=v[0:101:10], fraction=0.046, pad=0.04) + + cb.set_clim(vmin=boundaries[0], vmax=boundaries[1]) cb.set_label('line extension derivation in %') if filename is None: @@ -507,11 +561,21 @@ def network_extension_diff (networkA, networkB, filename=None, boundaries=[]): plt.close() -def full_load_hours( - network, - boundaries=[0, 4830], - filename=None, - two_cb=False): +def full_load_hours(network, boundaries=[], filename=None, two_cb=False): + """Plot loading of lines in equivalten full load hours. + + Parameters + ---------- + network: PyPSA network container + Holds topology of grid including results from powerflow analysis + filename: str or None + Save figure in this direction + boundaries: array + Set boundaries of heatmap axis + two_cb: bool + Choose if an extra colorbar for DC-lines is plotted + + """ cmap = plt.cm.jet array_line = [['Line'] * len(network.lines), network.lines.index] @@ -558,7 +622,15 @@ def full_load_hours( plt.savefig(filename) plt.close() -def plot_q_flows(network ): +def plot_q_flows(network): + """Plot maximal reactive line load. + + Parameters + ---------- + network: PyPSA network container + Holds topology of grid including results from powerflow analysis + + """ cmap_line = plt.cm.jet q_flows_max = abs(network.lines_t.q0.abs().max()/(network.lines.s_nom)) @@ -574,6 +646,21 @@ def plot_q_flows(network ): def max_load(network, boundaries=[], filename=None, two_cb=False): + + """Plot maximum loading of each line. + + Parameters + ---------- + network: PyPSA network container + Holds topology of grid including results from powerflow analysis + filename: str or None + Save figure in this direction + boundaries: array + Set boundaries of heatmap axis + two_cb: bool + Choose if an extra colorbar for DC-lines is plotted + + """ cmap_line = plt.cm.jet cmap_link = plt.cm.jet @@ -630,6 +717,22 @@ def max_load(network, boundaries=[], filename=None, two_cb=False): def load_hours(network, min_load=0.9, max_load=1, boundaries=[0, 8760]): + + """Plot number of hours with line loading in selected range. + + Parameters + ---------- + network: PyPSA network container + Holds topology of grid including results from powerflow analysis + min_load: float + Choose lower bound of relative load + max_load: float + Choose upper bound of relative load + boundaries: array + Set boundaries of heatmap axis + + """ + cmap_line = plt.cm.jet cmap_link = plt.cm.jet array_line = [['Line'] * len(network.lines), network.lines.index] @@ -695,23 +798,34 @@ def plot_residual_load(network): """ renewables = network.generators[ - network.generators.former_dispatch == 'variable'] - renewables_t = network.generators.p_nom.mul(network.snapshot_weightings, - axis=0)[renewables.index] * \ - network.generators_t.p_max_pu[renewables.index] + network.generators.carrier.isin(['wind_onshore', 'wind_offshore', + 'solar', 'run_of_river', + 'wind'])] + renewables_t = network.generators.p_nom[renewables.index] * \ + network.generators_t.p_max_pu[renewables.index].mul( + network.snapshot_weightings, axis=0) load = network.loads_t.p_set.mul(network.snapshot_weightings, axis=0).\ sum(axis=1) all_renew = renewables_t.sum(axis=1) residual_load = load - all_renew - residual_load.plot( + plot = residual_load.plot( + title = 'Residual load', drawstyle='steps', lw=2, color='red', - legend='residual load') + legend=False) + plot.set_ylabel("MW") # sorted curve sorted_residual_load = residual_load.sort_values( ascending=False).reset_index() - sorted_residual_load.plot(drawstyle='steps', lw=1.4, color='red') + plot1 = sorted_residual_load.plot( + title='Sorted residual load', + drawstyle='steps', + lw=2, + color='red', + legend=False) + plot1.set_ylabel("MW") + def plot_stacked_gen(network, bus=None, resolution='GW', filename=None): @@ -781,6 +895,11 @@ def plot_stacked_gen(network, bus=None, resolution='GW', filename=None): ax.set_ylabel(resolution) ax.set_xlabel("") + + + matplotlib.rcParams.update({'font.size': 22}) + + if filename is None: plt.show() @@ -885,11 +1004,29 @@ def plot_voltage(network, boundaries=[]): def curtailment(network, carrier='solar', filename=None): + """ + Plot curtailment of selected carrier + + + Parameters + ---------- + network : PyPSA network container + Holds topology of grid including results from powerflow analysis + carrier: str + Plot curtailemt of this carrier + filename: str or None + Save figure in this direction + + Returns + ------- + Plot + """ p_by_carrier = network.generators_t.p.groupby\ (network.generators.carrier, axis=1).sum() capacity = network.generators.groupby("carrier").sum().at[carrier, "p_nom"] - p_available = network.generators_t.p_max_pu.multiply(network.generators["p_nom"]) + p_available = network.generators_t.p_max_pu.multiply( + network.generators["p_nom"]) p_available_by_carrier = p_available.groupby( network.generators.carrier, axis=1).sum() p_curtailed_by_carrier = p_available_by_carrier - p_by_carrier @@ -926,6 +1063,7 @@ def storage_distribution(network, scaling=1, filename=None): Plot storage distribution as circles on grid nodes Displays storage size and distribution in network. + Parameters ---------- network : PyPSA network container @@ -992,6 +1130,7 @@ def storage_expansion(network, basemap=True, scaling=1, filename=None): """ Plot storage distribution as circles on grid nodes Displays storage size and distribution in network. + Parameters ---------- network : PyPSA network container @@ -1005,18 +1144,26 @@ def storage_expansion(network, basemap=True, scaling=1, filename=None): 'extendable_storage'] batteries = stores[stores.max_hours == 6] hydrogen = stores[stores.max_hours == 168] - storage_distribution = network.storage_units.p_nom_opt[stores.index].groupby( - network.storage_units.bus).sum().reindex(network.buses.index, fill_value=0.) - battery_distribution = network.storage_units.p_nom_opt[batteries.index].groupby( - network.storage_units.bus).sum().reindex(network.buses.index, fill_value=0.) - hydrogen_distribution = network.storage_units.p_nom_opt[hydrogen.index].groupby( - network.storage_units.bus).sum().reindex(network.buses.index, fill_value=0.) + storage_distribution =\ + network.storage_units.p_nom_opt[stores.index].groupby( + network.storage_units.bus).sum().reindex( + network.buses.index, fill_value=0.) + battery_distribution =\ + network.storage_units.p_nom_opt[batteries.index].groupby( + network.storage_units.bus).sum().reindex( + network.buses.index, fill_value=0.) + hydrogen_distribution =\ + network.storage_units.p_nom_opt[hydrogen.index].groupby( + network.storage_units.bus).sum().reindex( + network.buses.index, fill_value=0.) sbatt = network.storage_units.index[ - (network.storage_units.p_nom_opt > 1) & (network.storage_units.capital_cost > 10) & ( + (network.storage_units.p_nom_opt > 1) & ( + network.storage_units.capital_cost > 10) & ( network.storage_units.max_hours == 6)] shydr = network.storage_units.index[ - (network.storage_units.p_nom_opt > 1) & (network.storage_units.capital_cost > 10) & ( + (network.storage_units.p_nom_opt > 1) & ( + network.storage_units.capital_cost > 10) & ( network.storage_units.max_hours == 168)] fig, ax = plt.subplots(1, 1) @@ -1045,15 +1192,22 @@ def storage_expansion(network, basemap=True, scaling=1, filename=None): battery_distribution = battery_distribution / 1000 hydrogen_distribution = hydrogen_distribution / 1000 - if network.storage_units.p_nom_opt[sbatt].sum() < 1 and network.storage_units.p_nom_opt[shydr].sum() < 1: + if network.storage_units.p_nom_opt[sbatt].sum() < 1 and\ + network.storage_units.p_nom_opt[shydr].sum() < 1: print("No storage unit to plot") - elif network.storage_units.p_nom_opt[sbatt].sum() > 1 and network.storage_units.p_nom_opt[shydr].sum() < 1: - network.plot(bus_sizes=battery_distribution * scaling, bus_colors='orangered', ax=ax, line_widths=0.3) - elif network.storage_units.p_nom_opt[sbatt].sum() < 1 and network.storage_units.p_nom_opt[shydr].sum() > 1: - network.plot(bus_sizes=hydrogen_distribution * scaling, bus_colors='teal', ax=ax, line_widths=0.3) + elif network.storage_units.p_nom_opt[sbatt].sum() > 1 and\ + network.storage_units.p_nom_opt[shydr].sum() < 1: + network.plot(bus_sizes=battery_distribution * scaling, + bus_colors='orangered', ax=ax, line_widths=0.3) + elif network.storage_units.p_nom_opt[sbatt].sum() < 1 and\ + network.storage_units.p_nom_opt[shydr].sum() > 1: + network.plot(bus_sizes=hydrogen_distribution * scaling, + bus_colors='teal', ax=ax, line_widths=0.3) else: - network.plot(bus_sizes=battery_distribution * scaling, bus_colors='orangered', ax=ax, line_widths=0.3) - network.plot(bus_sizes=hydrogen_distribution * scaling, bus_colors='teal', ax=ax, line_widths=0.3) + network.plot(bus_sizes=battery_distribution * scaling, + bus_colors='orangered', ax=ax, line_widths=0.3) + network.plot(bus_sizes=hydrogen_distribution * scaling, + bus_colors='teal', ax=ax, line_widths=0.3) if basemap and basemap_present: x = network.buses["x"] @@ -1074,7 +1228,9 @@ def storage_expansion(network, basemap=True, scaling=1, filename=None): plt.scatter([], [], c='orangered', s=msd_max_bat * scaling, label='= ' + str(round(msd_max_bat, 0)) + LabelUnit + ' battery storage') plt.legend(scatterpoints=1, labelspacing=1, title='Storage size and technology', borderpad=1.3, loc=2) + ax.set_title("Storage expansion") + if filename is None: plt.show() else: @@ -1093,6 +1249,8 @@ def gen_dist( filename=None): """ Generation distribution + + Parameters ---------- network : PyPSA network container Holds topology of grid including results from powerflow analysis @@ -1169,6 +1327,8 @@ def gen_dist_diff( is bigger with switches than without Blue colors mean that the generation at a location is smaller with switches than without + + Parameters ---------- networkA : PyPSA network container Holds topology of grid with switches @@ -1252,6 +1412,7 @@ def gen_dist( """ Generation distribution + Parameters ---------- network : PyPSA network container Holds topology of grid including results from powerflow analysis @@ -1486,6 +1647,161 @@ def nodal_production_balance( plt.close() return + +def storage_p_soc(network, mean='1H', filename = None): + """ + Plots the dispatch and state of charge (SOC) of extendable storages. + + Parameters + ---------- + network : PyPSA network container + Holds topology of grid including results from powerflow analysis + mean : str + Defines over how many snapshots the p and soc values will averaged. + filename : path to folder + + """ + + sbatt = network.storage_units.index[(network.storage_units.p_nom_opt > 1) + & (network.storage_units.capital_cost > 10) & + (network.storage_units.max_hours == 6)] + shydr = network.storage_units.index[(network.storage_units.p_nom_opt > 1) + & (network.storage_units.capital_cost > 10) & + (network.storage_units.max_hours == 168)] + + cap_batt = (network.storage_units.max_hours[sbatt] * + network.storage_units.p_nom_opt[sbatt]).sum() + cap_hydr = (network.storage_units.max_hours[shydr] * + network.storage_units.p_nom_opt[shydr]).sum() + + fig, ax = plt.subplots(1, 1) + + if network.storage_units.p_nom_opt[sbatt].sum() < 1 and \ + network.storage_units.p_nom_opt[shydr].sum() < 1: + print("No storage unit to plot") + + elif network.storage_units.p_nom_opt[sbatt].sum() > 1 and \ + network.storage_units.p_nom_opt[shydr].sum() < 1: + + (network.storage_units_t.p[sbatt].resample(mean).mean().sum(axis=1) / \ + network.storage_units.p_nom_opt[sbatt].sum()).plot( + ax=ax, label="Battery dispatch", color='orangered') + # instantiate a second axes that shares the same x-axis + ax2 = ax.twinx() + ((network.storage_units_t.state_of_charge[sbatt].resample(mean).\ + mean().sum(axis=1) / cap_batt)*100).plot(ax=ax2, + label="Battery state of charge", color='blue') + elif network.storage_units.p_nom_opt[sbatt].sum() < 1 and\ + network.storage_units.p_nom_opt[shydr].sum() > 1: + (network.storage_units_t.p[shydr].resample(mean).mean().sum(axis=1) /\ + network.storage_units.p_nom_opt[shydr].sum()).plot( + ax=ax, label="Hydrogen dispatch", color='teal') + # instantiate a second axes that shares the same x-axis + ax2 = ax.twinx() + ((network.storage_units_t.state_of_charge[shydr].resample(mean).\ + mean().sum(axis=1) / cap_hydr)*100).plot( + ax=ax2, label="Hydrogen state of charge", color='green') + else: + (network.storage_units_t.p[sbatt].resample(mean).mean().sum(axis=1) / \ + network.storage_units.p_nom_opt[sbatt].sum()).plot( + ax=ax, label="Battery dispatch", color='orangered') + + (network.storage_units_t.p[shydr].resample(mean).mean().sum(axis=1) /\ + network.storage_units.p_nom_opt[shydr].sum()).plot( + ax=ax, label="Hydrogen dispatch", color='teal') + # instantiate a second axes that shares the same x-axis + ax2 = ax.twinx() + ((network.storage_units_t.state_of_charge[shydr].resample(mean).\ + mean().sum(axis=1) / cap_hydr)*100).plot( + ax=ax2, label="Hydrogen state of charge", color='green') + + ((network.storage_units_t.state_of_charge[sbatt].resample(mean).\ + mean().sum(axis=1) / cap_batt)*100).plot( + ax=ax2, label="Battery state of charge", color='blue') + + ax.set_xlabel("") + ax.set_ylabel("Storage dispatch in p.u. \n <- charge - discharge ->") + ax2.set_ylabel("Storage state of charge in % ") + ax2.set_ylim([0, 100]) + ax.set_ylim([-1,1]) + ax.legend(loc=2) + ax2.legend(loc=1) + ax.set_title("Storage dispatch and state of charge") + + + if filename is None: + plt.show() + else: + plt.savefig(filename) + plt.close() + + return + + +def storage_soc_sorted(network, filename = None): + """ + Plots the soc (state-pf-charge) of extendable storages + + Parameters + ---------- + network : PyPSA network container + Holds topology of grid including results from powerflow analysis + + filename : path to folder + + """ + sbatt = network.storage_units.index[(network.storage_units.p_nom_opt>1) & + (network.storage_units.capital_cost>10) & + (network.storage_units.max_hours==6)] + shydr = network.storage_units.index[(network.storage_units.p_nom_opt>1) & + (network.storage_units.capital_cost>10) + & (network.storage_units.max_hours==168)] + + cap_batt = (network.storage_units.max_hours[sbatt] * + network.storage_units.p_nom_opt[sbatt]).sum() + cap_hydr = (network.storage_units.max_hours[shydr] * + network.storage_units.p_nom_opt[shydr]).sum() + + fig, ax = plt.subplots(1, 1) + + if network.storage_units.p_nom_opt[sbatt].sum() < 1 and \ + network.storage_units.p_nom_opt[shydr].sum() < 1: + print("No storage unit to plot") + elif network.storage_units.p_nom_opt[sbatt].sum() > 1 and \ + network.storage_units.p_nom_opt[shydr].sum() < 1: + (network.storage_units_t.p[sbatt].sum(axis=1).sort_values( + ascending=False).reset_index() / \ + network.storage_units.p_nom_opt[sbatt].sum())[0].plot( + ax=ax, label="Battery storage", color='orangered') + elif network.storage_units.p_nom_opt[sbatt].sum() < 1 and \ + network.storage_units.p_nom_opt[shydr].sum() > 1: + (network.storage_units_t.p[shydr].sum(axis=1).sort_values( + ascending=False).reset_index() / \ + network.storage_units.p_nom_opt[shydr].sum())[0].plot( + ax=ax, label="Hydrogen storage", color='teal') + else: + (network.storage_units_t.p[sbatt].sum(axis=1).sort_values( + ascending=False).reset_index() / \ + network.storage_units.p_nom_opt[sbatt].sum())[0].plot( + ax=ax, label="Battery storage", color='orangered') + (network.storage_units_t.p[shydr].sum(axis=1).sort_values( + ascending=False).reset_index() / \ + network.storage_units.p_nom_opt[shydr].sum())[0].plot( + ax=ax, label="Hydrogen storage", color='teal') + + ax.set_xlabel("") + ax.set_ylabel("Storage dispatch in p.u. \n <- charge - discharge ->") + ax.set_ylim([-1.05,1.05]) + ax.legend() + ax.set_title("Sorted duration curve of storage dispatch") + + if filename is None: + plt.show() + else: + plt.savefig(filename,figsize=(3,4),bbox_inches='tight') + plt.close() + + return if __name__ == '__main__': diff --git a/etrago/tools/snapshot_clustering.py b/etrago/tools/snapshot_clustering.py deleted file mode 100644 index 10335b37..00000000 --- a/etrago/tools/snapshot_clustering.py +++ /dev/null @@ -1,298 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2016-2018 Flensburg University of Applied Sciences, -# Europa-Universität Flensburg, -# Centre for Sustainable Energy Systems, -# DLR-Institute for Networked Energy Systems - -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. - -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -# File description -""" -Please add Header! -""" -import pandas as pd -import os -if 'READTHEDOCS' not in os.environ: - import matplotlib.pyplot as plt - import numpy as np - from scipy.cluster.hierarchy import dendrogram - - -############################################################################### -compare = pd.read_csv( - ('ward-results/med-cost-high-renew/daily365/' - 'storage_units-state_of_charge.csv'), - index_col=[0], - parse_dates=[0]).sum( - axis=1) -lst = ['6', '12', '24', '42'] - -# compare = pd.DataFrame() -for i in lst: - x = pd.read_csv( - 'ward-results/med-cost-high-renew/weekly' + - i + - '/storage_units-state_of_charge.csv', - index_col=[0], - parse_dates=[0]).sum( - axis=1) - w = pd.read_csv( - 'ward-results/med-cost-high-renew/weekly' + - i + - '/snapshots.csv', - index_col=[0], - parse_dates=[0]).weightings - l = pd.read_csv( - 'ward-results/med-cost-high-renew/weekly' + - i + - '/loads-p.csv', - index_col=[0], - parse_dates=[0]) - x = x / w - compare = pd.concat([compare, x], axis=1) - -compare.columns = ['365'] + lst -# compare[compare.isnull()] = 0 -compare_new = compare.dropna() -compare_new.reset_index().plot(drawstyle='steps') - -############################################################################### -# -storage1 = pd.read_csv( - 'ward-results/med-cost-high-renew-2-nodes/storage_capacity.csv', - index_col=[0]) -storage2 = pd.read_csv('ward-results/med-cost-high-renew/storage_capacity.csv', - index_col=[0]) - - -daily1 = storage1.ix[:, storage1.columns.str.contains('daily')] -weekly1 = storage1.ix[:, ~storage1.columns.str.contains('weekly')] - -daily2 = storage2.ix[:, storage2.columns.str.contains('daily')] -weekly2 = storage2.ix[:, storage2.columns.str.contains('weekly')] - -p_d1 = (daily1.mean() - storage1['365'].mean()) / storage1['365'].mean() * 100 -p_w1 = (weekly1.mean() - storage1['365'].mean()) / storage1['365'].mean() * 100 -p_d2 = (daily2.mean() - storage2['365'].mean()) / storage2['365'].mean() * 100 -p_w2 = (weekly2.mean() - storage2['365'].mean()) / storage2['365'].mean() * 100 - - -ax = p_w1.plot(style='*-', color='blue', label='w-low-renew') -p_d1.plot(ax=ax, style='--', color='blue', label='d-low-renew') -p_w2.plot(ax=ax, style='*-', color='red', label='w-high-renew') -p_d2.plot(ax=ax, style='*--', color='red', label='d-high-renew') -lines = ax.get_lines() -ax.grid(True) -ax.legend(lines, [l.get_label() for l in lines]) -ax.set_title('Difference in Storage Capacities') -ax.set_ylabel('Relative Difference of Average Storage Capacities in %') -ax.set_xlabel('Clustered days (in days/weeks)') -ax.set_xticklabels([str(i) for i in [7, 21, 42, 84, 126, 168, 210, 252, 294]]) -fig = ax.get_figure() -fig.savefig('storage_capacities.eps') -# storage['365'] - - -############################################################################### - -cobj = pd.read_csv('compare-ward-results/med-cost-high-renew/objective.csv', - index_col=[0]) - - -obj = pd.read_csv('ward-results/med-cost-high-renew/objective.csv', - index_col=[0]) - -ref = obj['objective'].loc['365'] -d = obj.loc[obj.index.str.contains('daily')]['objective'] -# w = obj.loc[obj.index.str.contains('weekly')]['objective'] -o = cobj['objective'] -d.index = [7, 21, 42, 84, 126, 168, 210, 252, 294] -# w.index = [7, 21, 42, 84, 126, 168, 210, 252, 294] -o.index = [7, 21, 42, 84, 126, 168, 210, 252, 294] - -op = (o - ref) / ref * 100 -op.name = 'Original clustered storages cap.' -dp = (d - ref) / ref * 100 -dp.name = 'Daily Clustering' -# wp = (w - ref) / ref * 100 - -df = pd.concat([op, dp], axis=1) -ax = df.plot(kind='bar') -ax2 = ax.twinx() -times = obj.loc[obj.index.str.contains('daily')]['time'] -relative_time = times / obj['time'].loc['365'] * 100 -ax.set_title('Comparison of objective values (110%-Renew), Daily') -ax.set_ylabel('Relative percentage deviation in %') -ax.set_xlabel('Clustered Days') -ax.set_xticklabels([str(i) for i in [7, 21, 42, 84, 126, 168, 210, 252, 294]]) -relative_time.plot(ax=ax2, style='*-', color='red') -ax2.set_ylabel('Relative time of clustering compared to original in %') -fig = ax.get_figure() -# fig.savefig("comparison_obj_high.eps") - -############################################################################### - -objective = pd.read_csv('ward-results/med-cost-low-renew/objective.csv', - index_col=[0])['time'] -objectiveh = pd.read_csv('ward-results/med-cost-high-renew/objective.csv', - index_col=[0])['time'] -d = objective.loc[objective.index.str.contains('daily')] -d.index = [7, 21, 42, 84, 126, 168, 210, 252, 294] -w = objective.loc[objective.index.str.contains('weekly')] -w.index = [7, 21, 42, 84, 126, 168, 210, 252, 294] - -dh = objectiveh.loc[objectiveh.index.str.contains('daily')] -dh.index = [7, 21, 42, 84, 126, 168, 210, 252, 294] -wh = objectiveh.loc[objectiveh.index.str.contains('weekly')] -wh.index = [7, 21, 42, 84, 126, 168, 210, 252, 294] - -dr = d / objective.loc['365'] -dr.name = 'daily-low' -wr = w / objective.loc['365'] -wr.name = 'weekly-low' - - -drh = dh / objectiveh.loc['365'] -drh.name = 'daily-high' -wrh = wh / objectiveh.loc['365'] -wrh.name = 'weekly-high' - -df = pd.concat([dr, wr, drh, wrh], axis=1) - -ax = df.plot( - style=[ - '*-', - '--', - '*-', - '--'], - color=[ - 'blue', - 'blue', - 'red', - 'red']) -ax.set_title('Difference in solving time') -ax.grid(True) -ax.set_ylabel('Relative time compared to original %') -ax.set_xlabel('Clustered days (daily/weekly)') - -fig = ax.get_figure() -fig.savefig('solving_time.eps') - -############################################################################### - - -df = pd.DataFrame() -path = 'results/med/daily' -original = pd.read_csv( - 'results/med/original/storage_units.csv') # ['p_nom_opt'] -dirs = [i for i in os.listdir(path) if i != 'Z.csv'] -lst = sorted([int(i) for i in dirs]) -# df_opt = pd.DataFrame(index=lst, columns=['objective', 'time']) -for d in lst: - temp_df = pd.read_csv(os.path.join(path, str(d), 'storage_units.csv')) - temp = temp_df.get('p_nom_opt') - if temp is None: - temp_df['p_nom_opt'] = 0 - temp = temp_df['p_nom_opt'] - - temp.name = str(d) - df = pd.concat([df, temp], axis=1) - - temp_obj = pd.read_csv(os.path.join(path, str(d), 'network.csv')) - # df_opt.loc[d]['time'] = temp_obj['time'].values[0] - -me = pd.DataFrame() -for col in df: - me[col] = (original['p_nom_opt'] - df[col]) / original['p_nom_opt'] * 100 -ax = me.mean().plot() -ax.set_xlabel('Clustered days') -ax.set_ylabel('Average Mean Deveation of Storage capacities in %') -ax.grid(True) - - -########################################################################## - -# original -stor = pd.read_csv('results/med/original/storage_units.csv', index_col=0) -gen_t = pd.read_csv('results/med/original/generators-p.csv', index_col=0) -gen = pd.read_csv('results/med/original/generators.csv', index_col=0) - - -# loop -L1_d = {} -objective_d = {} -clusters = [84, 126, 168, 294] - -for c in clusters: - snapshots = pd.read_csv('results/med/daily/' + str(c) + '/snapshots.csv', - parse_dates=[0], index_col=[0]) - stor_7 = pd.read_csv( - 'results/med/daily/' + - str(c) + - '/storage_units.csv', - index_col=0) - gen_7_t = pd.read_csv( - 'results/med/daily/' + - str(c) + - '/generators-p.csv', - index_col=0) - - L1I = sum(stor.loc[s]['capital_cost'] * - abs(stor_7.loc[s].get('p_nom_opt', 0) - - stor.loc[s]['p_nom_opt']) - for s in stor.index) - - L1G = sum(gen.loc[g]['marginal_cost'] * - abs(sum(gen_t.loc[t][str(g)] for t in gen_t.index) - - sum(gen_7_t.loc[str(t)][str(g)] for t in snapshots.index)) - for g in gen.index) - - L1 = L1I + L1G - - L1_d[str(c)] = L1 - -############################################################################### -network = pd.read_csv('results/med/original/network.csv') -objective_d = {} -clusters = [7, 14, 21, 28, 35, 42, 49, 56] -for c in clusters: - network_c = pd.read_csv('results/med/daily/' + str(c) + '/network.csv') - - objective_d[str(c)] = abs(network_c['objective'].values[0] - - network['objective'].values[0]) - - -############################################################################### -# Distance matrix.... -############################################################################### - -Z = snapshots = pd.read_csv('results/med/daily/Z.csv').values - -last = Z[-200:, 2] -last_rev = last[::-1] -idxs = np.arange(1, len(last) + 1) -plt.plot(idxs, last_rev) -plt.show() - -# acceleration = np.diff(last, 2) # 2nd derivative of the distances -# acceleration_rev = acceleration[::-1] -# plt.plot(idxs[:-2] + 1, acceleration_rev) -# plt.show() -# k = acceleration_rev.argmax() + 2 -# if idx 0 is the max of this we want 2 clusters -# print("clusters:", k) -# dendrogram(Z, color_threshold=25) -# -# -# diff --git a/etrago/tools/utilities.py b/etrago/tools/utilities.py index 1460b9b9..e45835fe 100644 --- a/etrago/tools/utilities.py +++ b/etrago/tools/utilities.py @@ -105,9 +105,9 @@ def geolocation_buses(network, session): Use Geometries of buses x/y(lon/lat) and Polygons of Countries from RenpassGisParameterRegion in order to locate the buses - + Else: - Use x/y coordinats to locate foreign buses + Use coordinats of buses to locate foreign buses, which is less accurate. Parameters ---------- @@ -115,52 +115,51 @@ def geolocation_buses(network, session): eTraGo network object compiled by: meth: `etrago.appl.etrago` session: : sqlalchemy: `sqlalchemy.orm.session.Session < orm/session_basics.html >` SQLAlchemy session to the OEDB - + """ - if geopandas: - # ToDo: check eTrago stack generation plots and other in order of adaptation - # Start db connetion - # get renpassG!S scenario data - - RenpassGISRegion = RenpassGisParameterRegion + if geopandas: + # Start db connetion + # get renpassG!S scenario data + + RenpassGISRegion = RenpassGisParameterRegion - # Define regions - region_id = ['DE', 'DK', 'FR', 'BE', 'LU', 'AT', - 'NO', 'PL', 'CH', 'CZ', 'SE', 'NL'] + # Define regions + region_id = ['DE', 'DK', 'FR', 'BE', 'LU', 'AT', + 'NO', 'PL', 'CH', 'CZ', 'SE', 'NL'] - query = session.query(RenpassGISRegion.gid, - RenpassGISRegion.u_region_id, - RenpassGISRegion.stat_level, - RenpassGISRegion.geom, - RenpassGISRegion.geom_point) + query = session.query(RenpassGISRegion.gid, + RenpassGISRegion.u_region_id, + RenpassGISRegion.stat_level, + RenpassGISRegion.geom, + RenpassGISRegion.geom_point) - # get regions by query and filter - Regions = [(gid, u_region_id, stat_level, geoalchemy2.shape.to_shape( - geom),geoalchemy2.shape.to_shape(geom_point)) + # get regions by query and filter + Regions = [(gid, u_region_id, stat_level, geoalchemy2.shape.to_shape( + geom), geoalchemy2.shape.to_shape(geom_point)) for gid, u_region_id, stat_level, geom, geom_point in query.filter(RenpassGISRegion.u_region_id. in_(region_id)).all()] - crs = {'init': 'epsg:4326'} - # transform lon lat to shapely Points and create GeoDataFrame - points = [Point(xy) for xy in zip(network.buses.x, network.buses.y)] - bus = gpd.GeoDataFrame(network.buses, crs=crs, geometry=points) - # Transform Countries Polygons as Regions - region = pd.DataFrame( + crs = {'init': 'epsg:4326'} + # transform lon lat to shapely Points and create GeoDataFrame + points = [Point(xy) for xy in zip(network.buses.x, network.buses.y)] + bus = gpd.GeoDataFrame(network.buses, crs=crs, geometry=points) + # Transform Countries Polygons as Regions + region = pd.DataFrame( Regions, columns=['id', 'country', 'stat_level', 'Polygon', 'Point']) - re = gpd.GeoDataFrame(region, crs=crs, geometry=region['Polygon']) - # join regions and buses by geometry which intersects - busC = gpd.sjoin(bus, re, how='inner', op='intersects') - # busC - # Drop non used columns - busC = busC.drop(['index_right', 'Point', 'id', 'Polygon', + re = gpd.GeoDataFrame(region, crs=crs, geometry=region['Polygon']) + # join regions and buses by geometry which intersects + busC = gpd.sjoin(bus, re, how='inner', op='intersects') + # busC + # Drop non used columns + busC = busC.drop(['index_right', 'Point', 'id', 'Polygon', 'stat_level', 'geometry'], axis=1) - # add busC to eTraGo.buses - network.buses['country_code'] = busC['country'] - network.buses.country_code[network.buses.country_code.isnull()] = 'DE' - # close session - session.close() + # add busC to eTraGo.buses + network.buses['country_code'] = busC['country'] + network.buses.country_code[network.buses.country_code.isnull()] = 'DE' + # close session + session.close() else: @@ -215,7 +214,8 @@ def geolocation_buses(network, session): def buses_by_country(network): """ - Find buses of foreign countries and return them as Pandas Series + Find buses of foreign countries using coordinates + and return them as Pandas Series Parameters ---------- @@ -396,9 +396,9 @@ def clip_foreign(network): def foreign_links(network): - """ - Change transmission technology of foreign lines from AC to DC (links). - Parameters + """Change transmission technology of foreign lines from AC to DC (links). + + Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA @@ -444,11 +444,14 @@ def foreign_links(network): def set_q_foreign_loads(network, cos_phi=1): - """ - Set reative power timeseries of loads in neighbouring countries + """Set reative power timeseries of loads in neighbouring countries + + Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA + cos_phi: float + Choose ration of active and reactive power of foreign loads Returns ------- @@ -515,6 +518,8 @@ def connected_transformer(network, busids): def load_shedding(network, **kwargs): """ Implement load shedding in existing network to identify feasibility problems + + Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA @@ -550,6 +555,16 @@ def load_shedding(network, **kwargs): def data_manipulation_sh(network): + """ Adds missing components to run calculations with SH scenarios. + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + + + + """ from shapely.geometry import Point, LineString, MultiLineString from geoalchemy2.shape import from_shape, to_shape @@ -595,10 +610,21 @@ def _enumerate_row(row): def results_to_csv(network, args, pf_solution=None): - """ + """ Function the writes the calaculation results + in csv-files in the desired directory. + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + args: dict + Contains calculation settings of appl.py + pf_solution: pandas.Dataframe or None + If pf was calculated, df containing information of convergence else None. + """ - path = args['results'] + path = args['csv_export'] if path == False: return None @@ -629,22 +655,41 @@ def results_to_csv(network, args, pf_solution=None): return -def parallelisation(network, start_snapshot, end_snapshot, group_size, - solver_name, solver_options, extra_functionality=None): +def parallelisation(network, args, group_size, extra_functionality=None): + + """ + Function that splits problem in selected number of + snapshot groups and runs optimization successive for each group. + + Not useful for calculations with storage untis or extension. + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + args: dict + Contains calculation settings of appl.py + + Returns + ------- + network : :class:`pypsa.Network + Overall container of PyPSA + """ print("Performing linear OPF, {} snapshot(s) at a time:". format(group_size)) t = time.time() - for i in range(int((end_snapshot - start_snapshot + 1) / group_size)): + for i in range(int((args['end_snapshot'] - args['start_snapshot'] + 1) + / group_size)): if i > 0: network.storage_units.state_of_charge_initial = network.\ storage_units_t.state_of_charge.loc[ network.snapshots[group_size * i - 1]] network.lopf(network.snapshots[ group_size * i:group_size * i + group_size], - solver_name=solver_name, - solver_options=solver_options, + solver_name=args['solver_name'], + solver_options=args['solver_options'], extra_functionality=extra_functionality) network.lines.s_nom = network.lines.s_nom_opt @@ -652,6 +697,23 @@ def parallelisation(network, start_snapshot, end_snapshot, group_size, return def set_slack(network): + + """ Function that chosses the bus with the maximum installed power as slack + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + + Returns + ------- + network : :class:`pypsa.Network + Overall container of PyPSA + + + + """ + old_slack = network.generators.index[network. generators.control == 'Slack'][0] # check if old slack was PV or PQ control: @@ -694,6 +756,34 @@ def set_slack(network): def pf_post_lopf(network, args, extra_functionality, add_foreign_lopf): + """ Function that prepares and runs non-linar load flow using PyPSA pf. + + If network has been extendable, a second lopf with reactances adapted to + new s_nom is needed. + + If crossborder lines are DC-links, pf is only applied on german network. + Crossborder flows are still considerd due to the active behavior of links. + To return a network containing the whole grid, the optimised solution of the + foreign components can be added afterwards. + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + args: dict + Contains calculation settings of appl.py + extra_fuctionality: function or NoneType + Adds constraint to optimization (e.g. when applied snapshot clustering) + add_foreign_lopf: boolean + Choose if foreign results of lopf should be added to the network when + foreign lines are DC + + Returns + ------- + pf_solve: pandas.Dataframe + Contains information about convergency and calculation time of pf + + """ network_pf = network # Update x of extended lines and transformers @@ -768,7 +858,8 @@ def pf_post_lopf(network, args, extra_functionality, add_foreign_lopf): network_pf.links_t.p_set = network_pf.links_t.p0 # if foreign lines are DC, execute pf only on sub_network in Germany - if args['foreign_lines']['carrier'] == 'DC': + if (args['foreign_lines']['carrier'] == 'DC') or ((args['scn_extension']!= + None) and ('BE_NO_NEP 2035' in args['scn_extension'])): n_bus = pd.Series(index=network.sub_networks.index) for i in range(0, len(network.sub_networks.index)-1): @@ -834,7 +925,8 @@ def pf_post_lopf(network, args, extra_functionality, add_foreign_lopf): pf_solution = network_pf.pf(network.snapshots, use_seed=True) # if selected, copy lopf results of neighboring countries to network - if (args['foreign_lines']['carrier'] == 'DC') & add_foreign_lopf: + if ((args['foreign_lines']['carrier'] == 'DC') or ((args['scn_extension']!= + None) and ('BE_NO_NEP 2035' in args['scn_extension']))) and add_foreign_lopf: for comp in sorted(foreign_series): network.import_components_from_dataframe(foreign_comp[comp], comp) @@ -855,7 +947,24 @@ def pf_post_lopf(network, args, extra_functionality, add_foreign_lopf): def distribute_q(network, allocation='p_nom'): + + """ Function that distributes reactive power at bus to all installed + generators and storages. + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + allocation: str + Choose key to distribute reactive power: + 'p_nom' to dirstribute via p_nom + 'p' to distribute via p_set + + Returns + ------- + + + """ network.allocation = allocation if allocation == 'p': p_sum = network.generators_t['p'].\ @@ -919,6 +1028,8 @@ def distribute_q(network, allocation='p_nom'): def calc_line_losses(network): """ Calculate losses per line with PF result data + + Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA @@ -1044,11 +1155,15 @@ def group_parallel_lines(network): return -def set_line_costs(network, cost110=230, cost220=290, cost380=85, costDC=375): +def set_line_costs(network, args, + cost110=230, cost220=290, cost380=85, costDC=375): """ Set capital costs for extendable lines in respect to PyPSA [€/MVA] + + Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA + args: dict containing settings from appl.py cost110 : capital costs per km for 110kV lines and cables default: 230€/MVA/km, source: costs for extra circuit in dena Verteilnetzstudie, p. 146) @@ -1067,21 +1182,29 @@ def set_line_costs(network, cost110=230, cost220=290, cost380=85, costDC=375): network.lines["v_nom"] = network.lines.bus0.map(network.buses.v_nom) network.lines.loc[(network.lines.v_nom == 110), - 'capital_cost'] = cost110 * network.lines.length + 'capital_cost'] = cost110 * network.lines.length /\ + args['branch_capacity_factor']['HV'] + network.lines.loc[(network.lines.v_nom == 220), - 'capital_cost'] = cost220 * network.lines.length + 'capital_cost'] = cost220 * network.lines.length/\ + args['branch_capacity_factor']['eHV'] + network.lines.loc[(network.lines.v_nom == 380), - 'capital_cost'] = cost380 * network.lines.length + 'capital_cost'] = cost380 * network.lines.length/\ + args['branch_capacity_factor']['eHV'] + network.links.loc[network.links.p_nom_extendable, 'capital_cost'] = costDC * network.links.length return network -def set_trafo_costs(network, cost110_220=7500, cost110_380=17333, +def set_trafo_costs(network, args, cost110_220=7500, cost110_380=17333, cost220_380=14166): """ Set capital costs for extendable transformers in respect to PyPSA [€/MVA] + + Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA @@ -1100,21 +1223,37 @@ def set_trafo_costs(network, cost110_220=7500, cost110_380=17333, network.buses.v_nom) network.transformers.loc[(network.transformers.v_nom0 == 110) & ( - network.transformers.v_nom1 == 220), 'capital_cost'] = cost110_220 + network.transformers.v_nom1 == 220), 'capital_cost'] = cost110_220/\ + args['branch_capacity_factor']['HV'] network.transformers.loc[(network.transformers.v_nom0 == 110) & ( - network.transformers.v_nom1 == 380), 'capital_cost'] = cost110_380 + network.transformers.v_nom1 == 380), 'capital_cost'] = cost110_380/\ + args['branch_capacity_factor']['HV'] network.transformers.loc[(network.transformers.v_nom0 == 220) & ( - network.transformers.v_nom1 == 380), 'capital_cost'] = cost220_380 + network.transformers.v_nom1 == 380), 'capital_cost'] = cost220_380/\ + args['branch_capacity_factor']['eHV'] return network def add_missing_components(network): # Munich - ''' - add missing transformer at Heizkraftwerk Nord in Munich: - https://www.swm.de/privatkunden/unternehmen/energieerzeugung/heizkraftwerke.html?utm_medium=301 -@@ -329,27 +334,28 @@ + """Add missing transformer at Heizkraftwerk Nord in Munich and missing + transformer in Stuttgart + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + + Returns + ------- + network : :class:`pypsa.Network + Overall container of PyPSA + + """ + + """https://www.swm.de/privatkunden/unternehmen/energieerzeugung/heizkraftwerke.html?utm_medium=301 + to bus 25096: 25369 (86) 28232 (24) @@ -1131,7 +1270,7 @@ def add_missing_components(network): 28335 to 28139 (28) Overhead lines: 16573 to 24182 (part of 4) - ''' + """ """ Installierte Leistung der Umspannungsebene Höchst- zu Hochspannung (380 kV / 110 kV): 2.750.000 kVA @@ -1289,6 +1428,8 @@ def add_220kv_line(bus0, bus1, overhead=False): def convert_capital_costs(network, start_snapshot, end_snapshot, p=0.05, T=40): """ Convert capital_costs to fit to pypsa and caluculated time + + Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA @@ -1297,30 +1438,26 @@ def convert_capital_costs(network, start_snapshot, end_snapshot, p=0.05, T=40): ------- """ - # Add costs for converter + # Add costs for DC-converter network.links.capital_cost = network.links.capital_cost + 400000 # Calculate present value of an annuity (PVA) PVA = (1 / p) - (1 / (p * (1 + p) ** T)) - # + # Apply function on lines, links, trafos and storages + # Storage costs are already annuized yearly network.lines.loc[network.lines.s_nom_extendable == True, 'capital_cost'] = (network.lines.capital_cost / - (PVA * (8760 / (end_snapshot - - start_snapshot + 1)))) + (PVA * (8760 / (end_snapshot - start_snapshot + 1)))) network.links.loc[network.links.p_nom_extendable == True, - 'capital_cost'] = network.\ - links.capital_cost / (PVA * (8760 // (end_snapshot - - start_snapshot + 1))) + 'capital_cost'] = network. links.capital_cost /\ + (PVA * (8760 / (end_snapshot - start_snapshot + 1))) network.transformers.loc[network.transformers.s_nom_extendable == True, - 'capital_cost'] = network.\ - transformers.capital_cost / \ - (PVA * (8760 // (end_snapshot - start_snapshot + 1))) - network.storage_units.loc[network.storage_units. - p_nom_extendable == True, - 'capital_cost'] = network.\ - storage_units.capital_cost / (8760 // (end_snapshot - - start_snapshot + 1)) + 'capital_cost'] = network.transformers.capital_cost / \ + (PVA * (8760 / (end_snapshot - start_snapshot + 1))) + network.storage_units.loc[network.storage_units.p_nom_extendable == True, + 'capital_cost'] = network.storage_units.capital_cost / \ + (8760 / (end_snapshot - start_snapshot + 1)) return network @@ -1449,6 +1586,16 @@ def get_args_setting(args, jsonpath='scenario_setting.json'): def set_line_country_tags(network): + """ + Set country tag for AC- and DC-lines. + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + + + """ transborder_lines_0 = network.lines[network.lines['bus0'].isin( network.buses.index[network.buses['country_code'] != 'DE'])].index @@ -1492,7 +1639,7 @@ def set_line_country_tags(network): def crossborder_capacity(network, method, capacity_factor): """ - Correct interconnector capacties. + Adjust interconnector capacties. Parameters ---------- @@ -1505,9 +1652,13 @@ def crossborder_capacity(network, method, capacity_factor): 'thermal_acer' corrects certain capacities where our dataset most likely overestimates the thermal capacity. capacity_factor : float - branch capacity factor. Makes sure that new thermal values are adjusted - in the same way as the rest of the network. ntc-values are not adjusted - since they already include n-1 security. + branch capacity factor. Reduction by branch-capacity + factor is applied afterwards and shouln't effect ntc-values, which + already include (n-1)-security. To exclude the ntc-capacities from the + capacity factor, the crossborder-capacities are diveded by the factor + in this function. For thermal-acer this is excluded by setting branch + capacity factors to one. + """ if method == 'ntc_acer': @@ -1532,47 +1683,158 @@ def crossborder_capacity(network, method, capacity_factor): 'LUFR': 364, 'SEDK': 1928, 'DKSE': 1928} - capacity_factor = 1 + elif method == 'thermal_acer': cap_per_country = {'CH': 12000, 'DK': 4000, 'SEDK': 3500, 'DKSE': 3500} + capacity_factor = {'HV': 1, 'eHV':1} if not network.lines[network.lines.country != 'DE'].empty: weighting = network.lines.loc[network.lines.country!='DE', 's_nom'].\ groupby(network.lines.country).transform(lambda x: x/x.sum()) weighting_links = network.links.loc[network.links.country!='DE', 'p_nom'].\ groupby(network.links.country).transform(lambda x: x/x.sum()) - + network.lines["v_nom"] = network.lines.bus0.map(network.buses.v_nom) for country in cap_per_country: - index = network.lines[network.lines.country == country].index + index_HV = network.lines[(network.lines.country == country) &( + network.lines.v_nom == 110)].index + index_eHV = network.lines[(network.lines.country == country) &( + network.lines.v_nom > 110)].index index_links = network.links[network.links.country == country].index if not network.lines[network.lines.country == country].empty: - network.lines.loc[index, 's_nom'] = \ - weighting[index] * cap_per_country[country] *\ - capacity_factor + network.lines.loc[index_HV, 's_nom'] = weighting[index_HV] * \ + cap_per_country[country] / capacity_factor['HV'] + + network.lines.loc[index_eHV, 's_nom'] = \ + weighting[index_eHV] * cap_per_country[country] /\ + capacity_factor['eHV'] if not network.links[network.links.country == country].empty: network.links.loc[index_links, 'p_nom'] = \ weighting_links[index_links] * cap_per_country\ - [country] * capacity_factor + [country] if country == 'SE': network.links.loc[network.links.country == country, 'p_nom'] =\ cap_per_country[country] if not network.lines[network.lines.country == (country+country)].empty: - i_lines = network.lines[network.lines.country == - country+country].index - network.lines.loc[i_lines, 's_nom'] = \ - weighting[i_lines] * cap_per_country[country]*\ - capacity_factor + i_HV = network.lines[(network.lines.v_nom == 110)&( + network.lines.country ==country+country)].index + + i_eHV = network.lines[(network.lines.v_nom == 110)&( + network.lines.country ==country+country)].index + + network.lines.loc[i_HV, 's_nom'] = \ + weighting[i_HV] * cap_per_country[country]/\ + capacity_factor['HV'] + network.lines.loc[i_eHV, 's_nom'] = \ + weighting[i_eHV] * cap_per_country[country]/\ + capacity_factor['eHV'] if not network.links[network.links.country == (country+country)].empty: i_links = network.links[network.links.country == (country+country)].index network.links.loc[i_links, 'p_nom'] = \ - weighting_links[i_links] * cap_per_country\ - [country]*capacity_factor + weighting_links[i_links] * cap_per_country\ + [country]*capacity_factor + + +def set_branch_capacity(network, args): + + """ + Set branch capacity factor of lines and transformers, different factors for + HV (110kV) and eHV (220kV, 380kV). + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + args: dict + Settings in appl.py + + """ + + network.lines["s_nom_total"] = network.lines.s_nom.copy() + + network.transformers["s_nom_total"] = network.transformers.s_nom.copy() + + network.lines["v_nom"] = network.lines.bus0.map( + network.buses.v_nom) + network.transformers["v_nom0"] = network.transformers.bus0.map( + network.buses.v_nom) + + network.lines.s_nom[network.lines.v_nom == 110] = \ + network.lines.s_nom * args['branch_capacity_factor']['HV'] + + network.lines.s_nom[network.lines.v_nom > 110] = \ + network.lines.s_nom * args['branch_capacity_factor']['eHV'] + + network.transformers.s_nom[network.transformers.v_nom0 == 110]\ + = network.transformers.s_nom * args['branch_capacity_factor']['HV'] + + network.transformers.s_nom[network.transformers.v_nom0 > 110]\ + = network.transformers.s_nom * args['branch_capacity_factor']['eHV'] + +def max_line_ext(network, snapshots, share=1.01): + + """ + Sets maximal share of overall network extension + as extra functionality in LOPF + + Parameters + ---------- + share: float + Maximal share of network extension in p.u. + """ + + lines_snom = network.lines.s_nom.sum() + links_pnom = network.links.p_nom.sum() + + def _rule(m): + + lines_opt = sum(m.passive_branch_s_nom[index] + for index + in m.passive_branch_s_nom_index) + + links_opt = sum(m.link_p_nom[index] + for index + in m.link_p_nom_index) + + return (lines_opt + links_opt) <= (lines_snom + links_pnom) * share + network.model.max_line_ext = Constraint(rule=_rule) + +def min_renewable_share(network, snapshots, share=0.72): + """ + Sets minimal renewable share of generation as extra functionality in LOPF + + Parameters + ---------- + share: float + Minimal share of renewable generation in p.u. + """ + renewables = ['wind_onshore', 'wind_offshore', + 'biomass', 'solar', 'run_of_river'] + + res = list(network.generators.index[ + network.generators.carrier.isin(renewables)]) + + total = list(network.generators.index) + snapshots = network.snapshots + + def _rule(m): + """ + """ + renewable_production = sum(m.generator_p[gen, sn] + for gen + in res + for sn in snapshots) + total_production = sum(m.generator_p[gen, sn] + for gen in total + for sn in snapshots) + + return (renewable_production >= total_production * share) + network.model.min_renewable_share = Constraint(rule=_rule) diff --git a/requirements.txt b/requirements.txt index e34c23c6..2d41cfb7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ # use pip install -r requirements.txt to setup your virtualenv # PyPSA fork on featurs/clustering branch --e git+https://github.com/openego/PyPSA.git@features/snapshot_clustering#egg=PyPSA +-e git+https://github.com/openego/PyPSA.git@master#egg=PyPSA diff --git a/setup.py b/setup.py index 444228d7..35a759d7 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ author='DLR VE, ZNES Flensburg', author_email='', description="electric transmission grid optimization", - version='0.7.0', + version='0.7.1', url='https://github.com/openego/eTraGo', license="GNU Affero General Public License Version 3 (AGPL-3.0)", packages=find_packages(),