From 747441f216019226efe26ec65570bdc1ef968b08 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 14:47:34 +0000 Subject: [PATCH 01/48] docs(contributor): contrib-readme-action has updated readme --- README.md | 167 ++++++++++++++++++++++++++---------------------------- 1 file changed, 80 insertions(+), 87 deletions(-) diff --git a/README.md b/README.md index c20cde919..c15921cab 100644 --- a/README.md +++ b/README.md @@ -189,17 +189,17 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - - - - - - - - + + + - + + + + + + + + - + + + + - + + + - + + + - + + + + +
- - FabianHofmann + + GbotemiB
- Fabian Hofmann + Emmanuel Bolarinwa
- - fneum + + davide-f
- Fabian Neumann + Davide-f
@@ -209,42 +209,6 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe Ekaterina - - euronion -
- Euronion -
-
- - Justus-coded -
- Justus Ilemobayo -
-
- - mnm-matin -
- Mnm-matin -
-
- - martacki -
- Martha Frysztacki -
-
- - LukasFrankenQ -
- Lukas Franken -
-
pz-max @@ -252,61 +216,47 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe Max Parzen - - davide-f -
- Davide-f -
-
- - koen-vg -
- Koen Van Greevenbroek -
-
hazemakhalek
Hazem
-
energyLS
EnergyLS
-
- - AnasAlgarei + + DeniseGiub
- AnasAlgarei + DeniseGiub
- - yerbol-akhmetov + + FabianHofmann
- Yerbol Akhmetov + Fabian Hofmann
- - DeniseGiub + + yerbol-akhmetov
- DeniseGiub + Yerbol Akhmetov
- - GbotemiB + + mnm-matin
- Emmanuel Bolarinwa + Mnm-matin
@@ -315,15 +265,15 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Eddy Jalbout -
doneachh
Anton Achhammer
-
Tomkourou @@ -345,6 +295,13 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe GridGrapher + + martacki +
+ Martha Frysztacki +
+
glenkiely-ieg @@ -388,6 +345,28 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe Ekaterina-Vo + + euronion +
+ Euronion +
+
+ + AnasAlgarei +
+ AnasAlgarei +
+
+ + LukasFrankenQ +
+ Lukas Franken +
+
Tooblippe @@ -396,13 +375,12 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - carlosfv92 + + asolavi
- Carlos Fernandez + Null
-
rajesh-ieg @@ -411,11 +389,26 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - asolavi + + koen-vg +
+ Koen Van Greevenbroek +
+
+ + danielelerede-oet
Null
+
+ + carlosfv92 +
+ Carlos Fernandez +
@@ -444,15 +437,15 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Juli-a-ko
-
squoilin
Sylvain Quoilin
-
siddharth-krishna @@ -487,15 +480,15 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Jess
-
jarry7
Jarrad Wright
-
HanaElattar From d6afd52401b4ab69776e5d51be20c539ffc3f997 Mon Sep 17 00:00:00 2001 From: Ekaterina Date: Tue, 5 Nov 2024 00:19:01 +0100 Subject: [PATCH 02/48] Update PyPSA & enable linopy (#1167) * Add a zenodo link to natura.tiff * Update environment * Revise structure definition for lines * Remove get_aggregation_strategies * Fix typo aggregation_strategies * Replace aggregategenerators with aggregateoneport * Add aggregation strategies as a parameter * Re-define aggregation strategies * Update aggregation strategies * Update aggregation strategies for lines * Update aggregation strategies for buses * Fix typo * Put aggregation strategies into a variable * Parametrize the aggregation strategies * Refactor update of the aggregation strategies * Clean-up the code * Revert "Add a zenodo link to natura.tiff" This reverts commit 77007598b436df510ec8ce6f29efa9d067341628. * Define an explicit clustering strategy for v_nom * Add a release note * Get glpk back * Specify v_nom for buses explicitly * Revert "Specify v_nom for buses explicitly" This reverts commit 20192e6b3e80a2fedbee398f8e892f776bb5b5cc. * Add a version restriction to the environment specification * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Adjust naming * Move the variable definition * Move the variable * Upgrade PyPSA version --------- Co-authored-by: Davide Fioriti Co-authored-by: Davide Fioriti <67809479+davide-f@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- Snakefile | 3 ++ doc/release_notes.rst | 2 + envs/environment.yaml | 3 +- scripts/_helpers.py | 10 +++++ scripts/base_network.py | 14 ------- scripts/build_osm_network.py | 25 ++++++++++++ scripts/cluster_network.py | 39 +++++++++++++------ scripts/simplify_network.py | 75 ++++++++++++++++++++++++------------ 8 files changed, 120 insertions(+), 51 deletions(-) diff --git a/Snakefile b/Snakefile index 218041c67..8088117b7 100644 --- a/Snakefile +++ b/Snakefile @@ -563,6 +563,7 @@ rule add_electricity: rule simplify_network: params: + aggregation_strategies=config["cluster_options"]["aggregation_strategies"], renewable=config["renewable"], geo_crs=config["crs"]["geo_crs"], cluster_options=config["cluster_options"], @@ -605,6 +606,7 @@ if config["augmented_line_connection"].get("add_to_snakefile", False) == True: rule cluster_network: params: + aggregation_strategies=config["cluster_options"]["aggregation_strategies"], build_shape_options=config["build_shape_options"], electricity=config["electricity"], costs=config["costs"], @@ -690,6 +692,7 @@ if config["augmented_line_connection"].get("add_to_snakefile", False) == False: rule cluster_network: params: + aggregation_strategies=config["cluster_options"]["aggregation_strategies"], build_shape_options=config["build_shape_options"], electricity=config["electricity"], costs=config["costs"], diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 0d1b7c746..c084a7725 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -72,6 +72,8 @@ PyPSA-Earth 0.4.0 * Add an option to use csv format for custom demand imports. `PR #995 `__ +* Implement changes in processing network topology to use the updated PyPSA version. `PR #1065 `__ + **Minor Changes and bug-fixing** * Minor bug-fixing to run the cluster wildcard min `PR #1019 `__ diff --git a/envs/environment.yaml b/envs/environment.yaml index 7da885a73..dc0726ebe 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -12,7 +12,7 @@ dependencies: - pip - mamba # esp for windows build -- pypsa>=0.24, <0.25 +- pypsa>=0.25, <0.29 # - atlite>=0.2.4 # until https://github.com/PyPSA/atlite/issues/244 is not merged - dask - powerplantmatching @@ -27,6 +27,7 @@ dependencies: - memory_profiler - ruamel.yaml<=0.17.26 - pytables +- pyscipopt # added to compy with the quadratic objective requirement of the clustering script - lxml - numpy - pandas diff --git a/scripts/_helpers.py b/scripts/_helpers.py index ce97f6171..a106f7185 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -922,6 +922,16 @@ def get_last_commit_message(path): return last_commit_message +def update_config_dictionary( + config_dict, + parameter_key_to_fill="lines", + dict_to_use={"geometry": "first", "bounds": "first"}, +): + config_dict.setdefault(parameter_key_to_fill, {}) + config_dict[parameter_key_to_fill].update(dict_to_use) + return config_dict + + # PYPSA-EARTH-SEC diff --git a/scripts/base_network.py b/scripts/base_network.py index 65d640d44..e11ff83c6 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -523,20 +523,6 @@ def base_network( result_type="reduce", ) n.import_components_from_dataframe(lines_ac, "Line") - # The columns which names starts with "bus" are mixed up with the third-bus specification - # when executing additional_linkports() - lines_dc.drop( - labels=[ - "bus0_lon", - "bus0_lat", - "bus1_lon", - "bus1_lat", - "bus_0_coors", - "bus_1_coors", - ], - axis=1, - inplace=True, - ) n.import_components_from_dataframe(lines_dc, "Link") n.import_components_from_dataframe(transformers, "Transformer") diff --git a/scripts/build_osm_network.py b/scripts/build_osm_network.py index 867262abc..d8584cf4e 100644 --- a/scripts/build_osm_network.py +++ b/scripts/build_osm_network.py @@ -24,6 +24,27 @@ logger = create_logger(__name__) +# Keep only a predefined set of columns, as otherwise conflicts are possible +# e.g. the columns which names starts with "bus" are mixed up with +# the third-bus specification when executing additional_linkports() +LINES_COLUMNS = [ + "line_id", + "circuits", + "tag_type", + "voltage", + "bus0", + "bus1", + "length", + "underground", + "under_construction", + "tag_frequency", + "dc", + "country", + "geometry", + "bounds", +] + + def line_endings_to_bus_conversion(lines): # Assign to every line a start and end point @@ -813,6 +834,7 @@ def built_network( countries_config, geo_crs, distance_crs, + lines_cols_standard, force_ac=False, ): logger.info("Stage 1/5: Read input data") @@ -877,6 +899,8 @@ def built_network( if not os.path.exists(outputs["lines"]): os.makedirs(os.path.dirname(outputs["lines"]), exist_ok=True) + lines = lines[lines_cols_standard] + to_csv_nafix(lines, outputs["lines"]) # Generate CSV to_csv_nafix(converters, outputs["converters"]) # Generate CSV to_csv_nafix(transformers, outputs["transformers"]) # Generate CSV @@ -912,5 +936,6 @@ def built_network( countries, geo_crs, distance_crs, + lines_cols_standard=LINES_COLUMNS, force_ac=force_ac, ) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index eeaa2a98a..74d284fb7 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -134,6 +134,7 @@ configure_logging, create_logger, get_aggregation_strategies, + update_config_dictionary, update_p_nom_max, ) from add_electricity import load_costs @@ -575,9 +576,10 @@ def clustering_for_n_clusters( extended_link_costs=0, focus_weights=None, ): - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) if not isinstance(custom_busmap, pd.Series): if alternative_clustering: @@ -603,12 +605,14 @@ def clustering_for_n_clusters( clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=aggregate_carriers, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=line_length_factor, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -727,14 +731,27 @@ def consense(x): ).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!" return v - aggregation_strategies = snakemake.params.cluster_options.get( - "aggregation_strategies", {} + aggregation_strategies = snakemake.params.aggregation_strategies + + # Aggregation strategies must be set for all columns + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="lines", + dict_to_use={"v_nom": "first", "geometry": "first", "bounds": "first"}, + ) + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="buses", + dict_to_use={ + "v_nom": "first", + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, ) - # translate str entries of aggregation_strategies to pd.Series functions: - aggregation_strategies = { - p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} - for p in aggregation_strategies.keys() - } + custom_busmap = False # snakemake.params.custom_busmap custom busmap is depreciated https://github.com/pypsa-meets-earth/pypsa-earth/pull/694 if custom_busmap: busmap = pd.read_csv( diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 92c3dd340..502cf1b9d 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -96,13 +96,12 @@ from _helpers import ( configure_logging, create_logger, - get_aggregation_strategies, + update_config_dictionary, update_p_nom_max, ) from add_electricity import load_costs from cluster_network import cluster_regions, clustering_for_n_clusters from pypsa.clustering.spatial import ( - aggregategenerators, aggregateoneport, busmap_by_stubs, get_clustering_from_busmap, @@ -276,11 +275,15 @@ def replace_components(n, c, df, pnl): _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output) - _, generator_strategies = get_aggregation_strategies(aggregation_strategies) + generator_strategies = aggregation_strategies["generators"] carriers = set(n.generators.carrier) - set(exclude_carriers) - generators, generators_pnl = aggregategenerators( - n, busmap, carriers=carriers, custom_strategies=generator_strategies + generators, generators_pnl = aggregateoneport( + n, + busmap, + "Generator", + carriers=carriers, + custom_strategies=generator_strategies, ) replace_components(n, "Generator", generators, generators_pnl) @@ -588,19 +591,22 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None): if not dist.empty: busmap.loc[buses_i] = dist.idxmin(1) - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) return clustering.network, busmap @@ -848,19 +854,22 @@ def merge_into_network(n, threshold, aggregation_strategies=dict()): if (busmap.index == busmap).all(): return n, n.buses.index.to_series() - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -934,19 +943,22 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): if (busmap.index == busmap).all(): return n, n.buses.index.to_series() - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -976,14 +988,27 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): "exclude_carriers", [] ) hvdc_as_lines = snakemake.params.electricity["hvdc_as_lines"] - aggregation_strategies = snakemake.params.cluster_options.get( - "aggregation_strategies", {} + aggregation_strategies = snakemake.params.aggregation_strategies + + # Aggregation strategies must be set for all columns + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="lines", + dict_to_use={"v_nom": "first", "geometry": "first", "bounds": "first"}, ) - # translate str entries of aggregation_strategies to pd.Series functions: - aggregation_strategies = { - p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} - for p in aggregation_strategies.keys() - } + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="buses", + dict_to_use={ + "v_nom": "first", + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, + ) + n, trafo_map = simplify_network_to_base_voltage(n, linetype, base_voltage) Nyears = n.snapshot_weightings.objective.sum() / 8760 @@ -1088,7 +1113,7 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): solver_name, cluster_config.get("algorithm", "hac"), cluster_config.get("feature", None), - aggregation_strategies, + aggregation_strategies=aggregation_strategies, ) busmaps.append(cluster_map) From bb953250dd6c1b7c992b91ac2ebfa6feb0393dda Mon Sep 17 00:00:00 2001 From: Ekaterina Date: Tue, 5 Nov 2024 21:34:24 +0100 Subject: [PATCH 03/48] Enable linopy - power model (#1169) * Add a zenodo link to natura.tiff * Update environment * Revise structure definition for lines * Remove get_aggregation_strategies * Fix typo aggregation_strategies * Replace aggregategenerators with aggregateoneport * Add aggregation strategies as a parameter * Re-define aggregation strategies * Update aggregation strategies * Update aggregation strategies for lines * Update aggregation strategies for buses * Fix typo * Put aggregation strategies into a variable * Parametrize the aggregation strategies * Refactor update of the aggregation strategies * Clean-up the code * Revert "Add a zenodo link to natura.tiff" This reverts commit 77007598b436df510ec8ce6f29efa9d067341628. * Define an explicit clustering strategy for v_nom * Add a release note * Get glpk back * Specify v_nom for buses explicitly * Revert "Specify v_nom for buses explicitly" This reverts commit 20192e6b3e80a2fedbee398f8e892f776bb5b5cc. * Add a version restriction to the environment specification * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Adjust naming * Move the variable definition * Move the variable * Upgrade PyPSA version * Update docstring * Fix imports duplication * Update imports * Update the carrier-capacity constraint * Add docstring * Update the equity constraint * Add docstring * Update BAU constraint * Update SAFE constraint * Add docstring * Update operational reserve margin constraint * Add docstring * Add an new argument to the RM constraint * Update the update of capacity constraints * Update adding an operational reserve margin constraint * Update docstring * Update battery constraint * Add docstring * Update a constraint related to a RES share * Fix usage of add_ERS_constraints * Update solving script * Update a solving run * Fix typos --------- Co-authored-by: Davide Fioriti Co-authored-by: Davide Fioriti <67809479+davide-f@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- scripts/solve_network.py | 529 ++++++++++++++++++++++----------------- 1 file changed, 302 insertions(+), 227 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a9bbfbaa1..f52d2508b 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -52,15 +52,15 @@ linear optimal power flow (plus investment planning) is provided in the `documentation of PyPSA `_. -The optimization is based on the ``pyomo=False`` setting in the :func:`network.lopf` and :func:`pypsa.linopf.ilopf` function. -Additionally, some extra constraints specified in :mod:`prepare_network` are added. +The optimization is based on the :func:`network.optimize` function. +Additionally, some extra constraints specified in :mod:`prepare_network` and :mod:`solve_network` are added. Solving the network in multiple iterations is motivated through the dependence of transmission line capacities and impedances on values of corresponding flows. As lines are expanded their electrical parameters change, which renders the optimisation bilinear even if the power flow equations are linearized. To retain the computational advantage of continuous linear programming, a sequential linear programming technique is used, where in between iterations the line impedances are updated. -Details (and errors made through this heuristic) are discussed in the paper +Details (and errors introduced through this heuristic) are discussed in the paper - Fabian Neumann and Tom Brown. `Heuristics for Transmission Expansion Planning in Low-Carbon Energy System Models `_), *16th International Conference on the European Energy Market*, 2019. `arXiv:1907.10548 `_. @@ -86,23 +86,17 @@ import pandas as pd import pypsa from _helpers import configure_logging, create_logger, override_component_attrs +from linopy import merge from pypsa.descriptors import get_switchable_as_dense as get_as_dense -from pypsa.linopf import ( - define_constraints, - define_variables, - get_var, - ilopf, - join_exprs, - linexpr, - network_lopf, -) -from pypsa.linopt import define_constraints, get_var, join_exprs, linexpr +from pypsa.optimization.abstract import optimize_transmission_expansion_iteratively +from pypsa.optimization.optimize import optimize +from vresutils.benchmark import memory_logger logger = create_logger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) -def prepare_network(n, solve_opts): +def prepare_network(n, solve_opts, config): if "clip_p_max_pu" in solve_opts: for df in ( n.generators_t.p_max_pu, @@ -159,6 +153,25 @@ def prepare_network(n, solve_opts): def add_CCL_constraints(n, config): + """ + Add CCL (country & carrier limit) constraint to the network. + + Add minimum and maximum levels of generator nominal capacity per carrier + for individual countries. Opts and path for agg_p_nom_minmax.csv must be defined + in config.yaml. Default file is available at data/agg_p_nom_minmax.csv. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + scenario: + opts: [Co2L-CCL-24H] + electricity: + agg_p_nom_limits: data/agg_p_nom_minmax.csv + """ agg_p_nom_limits = config["electricity"].get("agg_p_nom_limits") try: @@ -174,32 +187,57 @@ def add_CCL_constraints(n, config): ) gen_country = n.generators.bus.map(n.buses.country) - # cc means country and carrier - p_nom_per_cc = ( - pd.DataFrame( - { - "p_nom": linexpr((1, get_var(n, "Generator", "p_nom"))), - "country": gen_country, - "carrier": n.generators.carrier, - } + capacity_variable = n.model["Generator-p_nom"] + + lhs = [] + ext_carriers = n.generators.query("p_nom_extendable").carrier.unique() + for c in ext_carriers: + ext_carrier = n.generators.query("p_nom_extendable and carrier == @c") + country_grouper = ( + ext_carrier.bus.map(n.buses.country) + .rename_axis("Generator-ext") + .rename("country") ) - .dropna(subset=["p_nom"]) - .groupby(["country", "carrier"]) - .p_nom.apply(join_exprs) + ext_carrier_per_country = capacity_variable.loc[ + country_grouper.index + ].groupby_sum(country_grouper) + lhs.append(ext_carrier_per_country) + lhs = merge(lhs, dim=pd.Index(ext_carriers, name="carrier")) + + min_matrix = agg_p_nom_minmax["min"].to_xarray().unstack().reindex_like(lhs) + max_matrix = agg_p_nom_minmax["max"].to_xarray().unstack().reindex_like(lhs) + + n.model.add_constraints( + lhs >= min_matrix, name="agg_p_nom_min", mask=min_matrix.notnull() + ) + n.model.add_constraints( + lhs <= max_matrix, name="agg_p_nom_max", mask=max_matrix.notnull() ) - minimum = agg_p_nom_minmax["min"].dropna() - if not minimum.empty: - minconstraint = define_constraints( - n, p_nom_per_cc[minimum.index], ">=", minimum, "agg_p_nom", "min" - ) - maximum = agg_p_nom_minmax["max"].dropna() - if not maximum.empty: - maxconstraint = define_constraints( - n, p_nom_per_cc[maximum.index], "<=", maximum, "agg_p_nom", "max" - ) def add_EQ_constraints(n, o, scaling=1e-1): + """ + Add equity constraints to the network. + + Currently this is only implemented for the electricity sector only. + + Opts must be specified in the config.yaml. + + Parameters + ---------- + n : pypsa.Network + o : str + + Example + ------- + scenario: + opts: [Co2L-EQ0.7-24h] + + Require each country or node to on average produce a minimal share + of its total electricity consumption itself. Example: EQ0.7c demands each country + to produce on average at least 70% of its consumption; EQ0.7 demands + each node to produce on average at least 70% of its consumption. + """ float_regex = "[0-9]*\.?[0-9]+" level = float(re.findall(float_regex, o)[0]) if o[-1] == "c": @@ -220,99 +258,150 @@ def add_EQ_constraints(n, o, scaling=1e-1): ) inflow = inflow.reindex(load.index).fillna(0.0) rhs = scaling * (level * load - inflow) + dispatch_variable = n.model["Generator-p"] lhs_gen = ( - linexpr( - (n.snapshot_weightings.generators * scaling, get_var(n, "Generator", "p").T) - ) - .T.groupby(ggrouper, axis=1) - .apply(join_exprs) + (dispatch_variable * (n.snapshot_weightings.generators * scaling)) + .groupby(ggrouper.to_xarray()) + .sum() + .sum("snapshot") ) - lhs_spill = ( - linexpr( - ( - -n.snapshot_weightings.stores * scaling, - get_var(n, "StorageUnit", "spill").T, - ) + # TODO: double check that this is really needed, why do have to subtract the spillage + if not n.storage_units_t.inflow.empty: + spillage_variable = n.model["StorageUnit-spill"] + lhs_spill = ( + (spillage_variable * (-n.snapshot_weightings.stores * scaling)) + .groupby_sum(sgrouper) + .groupby(sgrouper.to_xarray()) + .sum() + .sum("snapshot") ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("") - lhs = lhs_gen + lhs_spill - define_constraints(n, lhs, ">=", rhs, "equity", "min") + lhs = lhs_gen + lhs_spill + else: + lhs = lhs_gen + n.model.add_constraints(lhs >= rhs, name="equity_min") def add_BAU_constraints(n, config): - ext_c = n.generators.query("p_nom_extendable").carrier.unique() - mincaps = pd.Series( - config["electricity"].get("BAU_mincapacities", {key: 0 for key in ext_c}) - ) - lhs = ( - linexpr((1, get_var(n, "Generator", "p_nom"))) - .groupby(n.generators.carrier) - .apply(join_exprs) - ) - define_constraints(n, lhs, ">=", mincaps[lhs.index], "Carrier", "bau_mincaps") - - maxcaps = pd.Series( - config["electricity"].get("BAU_maxcapacities", {key: np.inf for key in ext_c}) - ) - lhs = ( - linexpr((1, get_var(n, "Generator", "p_nom"))) - .groupby(n.generators.carrier) - .apply(join_exprs) - ) - define_constraints(n, lhs, "<=", maxcaps[lhs.index], "Carrier", "bau_maxcaps") + """ + Add a per-carrier minimal overall capacity. + + BAU_mincapacities and opts must be adjusted in the config.yaml. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + scenario: + opts: [Co2L-BAU-24h] + electricity: + BAU_mincapacities: + solar: 0 + onwind: 0 + OCGT: 100000 + offwind-ac: 0 + offwind-dc: 0 + Which sets minimum expansion across all nodes e.g. in Europe to 100GW. + OCGT bus 1 + OCGT bus 2 + ... > 100000 + """ + mincaps = pd.Series(config["electricity"]["BAU_mincapacities"]) + p_nom = n.model["Generator-p_nom"] + ext_i = n.generators.query("p_nom_extendable") + ext_carrier_i = xr.DataArray(ext_i.carrier.rename_axis("Generator-ext")) + lhs = p_nom.groupby(ext_carrier_i).sum() + rhs = mincaps[lhs.indexes["carrier"]].rename_axis("carrier") + n.model.add_constraints(lhs >= rhs, name="bau_mincaps") def add_SAFE_constraints(n, config): - peakdemand = ( - 1.0 + config["electricity"]["SAFE_reservemargin"] - ) * n.loads_t.p_set.sum(axis=1).max() - conv_techs = config["plotting"]["conv_techs"] + """ + Add a capacity reserve margin of a certain fraction above the peak demand. + Renewable generators and storage do not contribute. Ignores network. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + config.yaml requires to specify opts: + + scenario: + opts: [Co2L-SAFE-24h] + electricity: + SAFE_reservemargin: 0.1 + Which sets a reserve margin of 10% above the peak demand. + """ + peakdemand = n.loads_t.p_set.sum(axis=1).max() + margin = 1.0 + config["electricity"]["SAFE_reservemargin"] + reserve_margin = peakdemand * margin + conventional_carriers = config["electricity"]["conventional_carriers"] + ext_gens_i = n.generators.query( + "carrier in @conventional_carriers & p_nom_extendable" + ).index + capacity_variable = n.model["Generator-p_nom"] + p_nom = n.model["Generator-p_nom"].loc[ext_gens_i] + lhs = p_nom.sum() exist_conv_caps = n.generators.query( - "~p_nom_extendable & carrier in @conv_techs" + "~p_nom_extendable & carrier in @entional_carriers" ).p_nom.sum() - ext_gens_i = n.generators.query("carrier in @conv_techs & p_nom_extendable").index - lhs = linexpr((1, get_var(n, "Generator", "p_nom")[ext_gens_i])).sum() - rhs = peakdemand - exist_conv_caps - define_constraints(n, lhs, ">=", rhs, "Safe", "mintotalcap") + rhs = reserve_margin - exist_conv_caps + n.model.add_constraints(lhs >= rhs, name="safe_mintotalcap") -def add_operational_reserve_margin_constraint(n, config): +def add_operational_reserve_margin_constraint(n, sns, config): + """ + Build reserve margin constraints based on the formulation + as suggested in GenX + https://energy.mit.edu/wp-content/uploads/2017/10/Enhanced-Decision-Support-for-a-Changing-Electricity-Landscape.pdf + It implies that the reserve margin also accounts for optimal + dispatch of distributed energy resources (DERs) and demand response + which is a novel feature of GenX. + """ reserve_config = config["electricity"]["operational_reserve"] EPSILON_LOAD = reserve_config["epsilon_load"] EPSILON_VRES = reserve_config["epsilon_vres"] CONTINGENCY = reserve_config["contingency"] # Reserve Variables - reserve = get_var(n, "Generator", "r") - lhs = linexpr((1, reserve)).sum(1) + n.model.add_variables( + 0, np.inf, coords=[sns, n.generators.index], name="Generator-r" + ) + reserve = n.model["Generator-r"] + lhs = reserve.sum("Generator") # Share of extendable renewable capacities ext_i = n.generators.query("p_nom_extendable").index vres_i = n.generators_t.p_max_pu.columns if not ext_i.empty and not vres_i.empty: capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)] - renewable_capacity_variables = get_var(n, "Generator", "p_nom")[ - vres_i.intersection(ext_i) - ] - lhs += linexpr( - (-EPSILON_VRES * capacity_factor, renewable_capacity_variables) - ).sum(1) + renewable_capacity_variables = ( + n.model["Generator-p_nom"] + .loc[vres_i.intersection(ext_i)] + .rename({"Generator-ext": "Generator"}) + ) + lhs = merge( + lhs, + (renewable_capacity_variables * (-EPSILON_VRES * capacity_factor)).sum( + ["Generator"] + ), + ) - # Total demand at t - demand = n.loads_t.p.sum(1) + # Total demand per t + demand = get_as_dense(n, "Load", "p_set").sum(axis=1) # VRES potential of non extendable generators capacity_factor = n.generators_t.p_max_pu[vres_i.difference(ext_i)] renewable_capacity = n.generators.p_nom[vres_i.difference(ext_i)] - potential = (capacity_factor * renewable_capacity).sum(1) + potential = (capacity_factor * renewable_capacity).sum(axis=1) # Right-hand-side rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY - define_constraints(n, lhs, ">=", rhs, "Reserve margin") + n.model.add_constraints(lhs >= rhs, name="reserve_margin") def update_capacity_constraint(n): @@ -320,65 +409,84 @@ def update_capacity_constraint(n): ext_i = n.generators.query("p_nom_extendable").index fix_i = n.generators.query("not p_nom_extendable").index - dispatch = get_var(n, "Generator", "p") - reserve = get_var(n, "Generator", "r") + dispatch = n.model["Generator-p"] + reserve = n.model["Generator-r"] capacity_fixed = n.generators.p_nom[fix_i] p_max_pu = get_as_dense(n, "Generator", "p_max_pu") - lhs = linexpr((1, dispatch), (1, reserve)) + lhs = merge( + dispatch * 1, + reserve * 1, + ) if not ext_i.empty: - capacity_variable = get_var(n, "Generator", "p_nom") - lhs += linexpr((-p_max_pu[ext_i], capacity_variable)).reindex( - columns=gen_i, fill_value="" - ) + capacity_variable = n.model["Generator-p_nom"] + lhs = dispatch + reserve - capacity_variable * xr.DataArray(p_max_pu[ext_i]) rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0) - define_constraints(n, lhs, "<=", rhs, "Generators", "updated_capacity_constraint") + n.model.add_constraints( + lhs <= rhs, name="gen_updated_capacity_constraint", mask=rhs.notnull() + ) def add_operational_reserve_margin(n, sns, config): """ - Build reserve margin constraints based on the formulation given in - https://genxproject.github.io/GenX/dev/core/#Reserves. + Parameters + ---------- + n : pypsa.Network + sns: pd.DatetimeIndex + config : dict + + Example: + -------- + config.yaml requires to specify operational_reserve: + operational_reserve: # like https://genxproject.github.io/GenX/dev/core/#Reserves + activate: true + epsilon_load: 0.02 # percentage of load at each snapshot + epsilon_vres: 0.02 # percentage of VRES at each snapshot + contingency: 400000 # MW """ - define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index]) - - add_operational_reserve_margin_constraint(n, config) + add_operational_reserve_margin_constraint(n, sns, config) update_capacity_constraint(n) def add_battery_constraints(n): + """ + Add constraint ensuring that charger = discharger, i.e. + 1 * charger_size - efficiency * discharger_size = 0 + """ nodes = n.buses.index[n.buses.carrier == "battery"] - if nodes.empty or ("Link", "p_nom") not in n.variables.index: + # TODO Check if the second part of the condition can make sense + # if nodes.empty or ("Link", "p_nom") not in n.variables.index: + if nodes.empty: return - link_p_nom = get_var(n, "Link", "p_nom") - lhs = linexpr( - (1, link_p_nom[nodes + " charger"]), - ( - -n.links.loc[nodes + " discharger", "efficiency"].values, - link_p_nom[nodes + " discharger"].values, - ), + vars_link = n.model["Link-p_nom"] + eff = n.links.loc[nodes + " discharger", "efficiency"] + lhs = merge( + vars_link.sel({"Link-ext": nodes + " charger"}) * 1, + # for some reasons, eff is one element longer as compared with vars_link + vars_link.sel({"Link-ext": nodes + " discharger"}) * -eff[0], ) - define_constraints(n, lhs, "=", 0, "Link", "charger_ratio") + n.model.add_constraints(lhs == 0, name="link_charger_ratio") -def add_RES_constraints(n, res_share): +def add_RES_constraints(n, res_share, config): lgrouper = n.loads.bus.map(n.buses.country) + # TODO drop load ggrouper = n.generators.bus.map(n.buses.country) sgrouper = n.storage_units.bus.map(n.buses.country) cgrouper = n.links.bus0.map(n.buses.country) logger.warning( - "The add_RES_constraints functionality is still work in progress. " + "The add_RES_constraints() is still work in progress. " "Unexpected results might be incurred, particularly if " "temporal clustering is applied or if an unexpected change of technologies " - "is subject to the obtimisation." + "is subject to future improvements." ) load = ( @@ -388,103 +496,68 @@ def add_RES_constraints(n, res_share): rhs = res_share * load - res_techs = [ - "solar", - "onwind", - "offwind-dc", - "offwind-ac", - "battery", - "hydro", - "ror", - ] + renew_techs = config["electricity"]["renewable_carriers"] + charger = ["H2 electrolysis", "battery charger"] discharger = ["H2 fuel cell", "battery discharger"] - gens_i = n.generators.query("carrier in @res_techs").index - stores_i = n.storage_units.query("carrier in @res_techs").index + gens_i = n.generators.query("carrier in @renew_techs").index + stores_i = n.storage_units.query("carrier in @renew_techs").index + charger_i = n.links.query("carrier in @charger").index discharger_i = n.links.query("carrier in @discharger").index + stores_t_weights = n.snapshot_weightings.stores + # Generators + # TODO restore grouping by countries un-commenting calls of groupby() lhs_gen = ( - linexpr( - (n.snapshot_weightings.generators, get_var(n, "Generator", "p")[gens_i].T) - ) - .T.groupby(ggrouper, axis=1) - .apply(join_exprs) + (n.model["Generator-p"].loc[:, gens_i] * n.snapshot_weightings.generators) + # .groupby(ggrouper.to_xarray()) + .sum() ) # StorageUnits - lhs_dispatch = ( - ( - linexpr( - ( - n.snapshot_weightings.stores, - get_var(n, "StorageUnit", "p_dispatch")[stores_i].T, - ) - ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + store_disp_expr = ( + n.model["StorageUnit-p_dispatch"].loc[:, stores_i] * stores_t_weights + ) + store_expr = n.model["StorageUnit-p_store"].loc[:, stores_i] * stores_t_weights + charge_expr = n.model["Link-p"].loc[:, charger_i] * stores_t_weights.apply( + lambda r: r * n.links.loc[charger_i].efficiency + ) + discharge_expr = n.model["Link-p"].loc[:, discharger_i] * stores_t_weights.apply( + lambda r: r * n.links.loc[discharger_i].efficiency ) + lhs_dispatch = ( + store_disp_expr + # .groupby(sgrouper) + .sum() + ) lhs_store = ( - ( - linexpr( - ( - -n.snapshot_weightings.stores, - get_var(n, "StorageUnit", "p_store")[stores_i].T, - ) - ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + store_expr + # .groupby(sgrouper) + .sum() ) # Stores (or their resp. Link components) # Note that the variables "p0" and "p1" currently do not exist. # Thus, p0 and p1 must be derived from "p" (which exists), taking into account the link efficiency. lhs_charge = ( - ( - linexpr( - ( - -n.snapshot_weightings.stores, - get_var(n, "Link", "p")[charger_i].T, - ) - ) - .T.groupby(cgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + charge_expr + # .groupby(cgrouper) + .sum() ) lhs_discharge = ( - ( - linexpr( - ( - n.snapshot_weightings.stores.apply( - lambda r: r * n.links.loc[discharger_i].efficiency - ), - get_var(n, "Link", "p")[discharger_i], - ) - ) - .groupby(cgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + discharge_expr + # .groupby(cgrouper) + .sum() ) - # signs of resp. terms are coded in the linexpr. - # todo: for links (lhs_charge and lhs_discharge), account for snapshot weightings - lhs = lhs_gen + lhs_dispatch + lhs_store + lhs_charge + lhs_discharge + lhs = lhs_gen + lhs_dispatch - lhs_store - lhs_charge + lhs_discharge - define_constraints(n, lhs, "=", rhs, "RES share") + n.model.add_constraints(lhs == rhs, name="res_share") def add_land_use_constraint(n): @@ -876,7 +949,7 @@ def extra_functionality(n, snapshots): for o in opts: if "RES" in o: res_share = float(re.findall("[0-9]*\.?[0-9]+$", o)[0]) - add_RES_constraints(n, res_share) + add_RES_constraints(n, res_share, config) for o in opts: if "EQ" in o: add_EQ_constraints(n, o) @@ -927,40 +1000,44 @@ def extra_functionality(n, snapshots): add_co2_sequestration_limit(n, snapshots) -def solve_network(n, config, solving={}, opts="", **kwargs): +def solve_network(n, config, solving, **kwargs): set_of_options = solving["solver"]["options"] cf_solving = solving["options"] - solver_options = solving["solver_options"][set_of_options] if set_of_options else {} - solver_name = solving["solver"]["name"] + kwargs["solver_options"] = ( + solving["solver_options"][set_of_options] if set_of_options else {} + ) + kwargs["solver_name"] = solving["solver"]["name"] - track_iterations = cf_solving.get("track_iterations", False) - min_iterations = cf_solving.get("min_iterations", 4) - max_iterations = cf_solving.get("max_iterations", 6) + skip_iterations = cf_solving.get("skip_iterations", False) + if not n.lines.s_nom_extendable.any(): + skip_iterations = True + logger.info("No expandable lines found. Skipping iterative solving.") # add to network for extra_functionality n.config = config n.opts = opts - if cf_solving.get("skip_iterations", False): - network_lopf( - n, - solver_name=solver_name, - solver_options=solver_options, - extra_functionality=extra_functionality, - **kwargs, - ) + if skip_iterations: + status, condition = n.optimize(**kwargs) else: - ilopf( - n, - solver_name=solver_name, - solver_options=solver_options, - track_iterations=track_iterations, - min_iterations=min_iterations, - max_iterations=max_iterations, - extra_functionality=extra_functionality, - **kwargs, + kwargs["track_iterations"] = (cf_solving.get("track_iterations", False),) + kwargs["min_iterations"] = (cf_solving.get("min_iterations", 4),) + kwargs["max_iterations"] = (cf_solving.get("max_iterations", 6),) + status, condition = n.optimize.optimize_transmission_expansion_iteratively( + **kwargs ) + + if status != "ok": # and not rolling_horizon: + logger.warning( + f"Solving status '{status}' with termination condition '{condition}'" + ) + if "infeasible" in condition: + labels = n.model.compute_infeasibilities() + logger.info(f"Labels:\n{labels}") + n.model.print_infeasibilities() + raise RuntimeError("Solving status 'infeasible'") + return n @@ -978,11 +1055,8 @@ def solve_network(n, config, solving={}, opts="", **kwargs): configure_logging(snakemake) - tmpdir = snakemake.params.solving.get("tmpdir") - if tmpdir is not None: - Path(tmpdir).mkdir(parents=True, exist_ok=True) opts = snakemake.wildcards.opts.split("-") - solving = snakemake.params.solving + solve_opts = snakemake.config["solving"]["options"] is_sector_coupled = "sopts" in snakemake.wildcards.keys() @@ -992,10 +1066,11 @@ def solve_network(n, config, solving={}, opts="", **kwargs): else: n = pypsa.Network(snakemake.input.network) - if snakemake.params.augmented_line_connection.get("add_to_snakefile"): - n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( - snakemake.params.augmented_line_connection.get("min_expansion") - ) + # TODO Double-check handling the augmented case + # if snakemake.params.augmented_line_connection.get("add_to_snakefile"): + # n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( + # snakemake.params.augmented_line_connection.get("min_expansion") + # ) if ( snakemake.config["custom_data"]["add_existing"] @@ -1016,15 +1091,15 @@ def solve_network(n, config, solving={}, opts="", **kwargs): else: n_ref = None - n = prepare_network(n, solving["options"]) + # needed to get `n.model` property + n.optimize.create_model() + n = prepare_network(n, solve_opts, config=solve_opts) n = solve_network( n, config=snakemake.config, - solving=solving, - opts=opts, - solver_dir=tmpdir, - solver_logfile=snakemake.log.solver, + solving=snakemake.params.solving, + log_fn=snakemake.log.solver, ) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 5c48d605bcd8f529e1804b8a639770f4aeba3aac Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 5 Nov 2024 21:42:59 +0100 Subject: [PATCH 04/48] Get back CI for Windows --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5887ac014..08f69a702 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: os: - ubuntu-latest - macos-latest - # - windows-latest + - windows-latest runs-on: ${{ matrix.os }} From 35517e96a03470195afacf046a17bfd06bf68e54 Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 5 Nov 2024 21:43:38 +0100 Subject: [PATCH 05/48] Update environment --- envs/environment.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index dc0726ebe..e263e7a78 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -78,7 +78,6 @@ dependencies: # Default solver for tests (required for CI) - glpk -- ipopt - gurobi - pip: From c71c538b41b04d395a96c49b3e6a930fc2f79347 Mon Sep 17 00:00:00 2001 From: = Date: Tue, 15 Oct 2024 07:40:45 +0000 Subject: [PATCH 06/48] Add H2 carrier if not exists --- scripts/prepare_sector_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 40b3400bf..786d546bb 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -245,7 +245,8 @@ def H2_liquid_fossil_conversions(n, costs): def add_hydrogen(n, costs): "function to add hydrogen as an energy carrier with its conversion technologies from and to AC" - n.add("Carrier", "H2") + if not "H2" in n.carriers.index: + n.add("Carrier", "H2") n.madd( "Bus", From b40e643060e5a9253bf5b89cac8a6fb03505f4e3 Mon Sep 17 00:00:00 2001 From: = Date: Tue, 15 Oct 2024 07:58:05 +0000 Subject: [PATCH 07/48] Add battery carrier if not exists only --- scripts/prepare_sector_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 786d546bb..6cb4be6ed 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1089,7 +1089,9 @@ def add_aviation(n, cost): def add_storage(n, costs): "function to add the different types of storage systems" - n.add("Carrier", "battery") + + if not "battery" in n.carriers.index: + n.add("Carrier", "battery") n.madd( "Bus", From fc11c023aea47b3078cdbd4041defcd9c5099cb1 Mon Sep 17 00:00:00 2001 From: = Date: Tue, 22 Oct 2024 19:04:21 +0000 Subject: [PATCH 08/48] use H2 in both AC and DC buses in h2_hc_conversions function --- scripts/prepare_sector_network.py | 40 +++++++++++++++---------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 6cb4be6ed..a00d224fe 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1180,11 +1180,11 @@ def h2_hc_conversions(n, costs): if snakemake.config["sector"]["hydrogen"]["hydrogen_colors"]: n.madd( "Bus", - nodes + " blue H2", - location=nodes, + spatial.nodes + " blue H2", + location=spatial.nodes, carrier="blue H2", - x=n.buses.loc[list(nodes)].x.values, - y=n.buses.loc[list(nodes)].y.values, + x=n.buses.loc[list(spatial.nodes)].x.values, + y=n.buses.loc[list(spatial.nodes)].y.values, ) n.madd( @@ -1192,7 +1192,7 @@ def h2_hc_conversions(n, costs): spatial.nodes, suffix=" SMR CC", bus0=spatial.gas.nodes, - bus1=nodes + " blue H2", + bus1=spatial.nodes + " blue H2", bus2="co2 atmosphere", bus3=spatial.co2.nodes, p_nom_extendable=True, @@ -1207,9 +1207,9 @@ def h2_hc_conversions(n, costs): n.madd( "Link", - nodes + " blue H2", - bus0=nodes + " blue H2", - bus1=nodes + " H2", + spatial.nodes + " blue H2", + bus0=spatial.nodes + " blue H2", + bus1=spatial.nodes + " H2", carrier="blue H2", capital_cost=0, p_nom_extendable=True, @@ -1222,7 +1222,7 @@ def h2_hc_conversions(n, costs): spatial.nodes, suffix=" SMR CC", bus0=spatial.gas.nodes, - bus1=nodes + " H2", + bus1=spatial.nodes + " H2", bus2="co2 atmosphere", bus3=spatial.co2.nodes, p_nom_extendable=True, @@ -1239,18 +1239,18 @@ def h2_hc_conversions(n, costs): if snakemake.config["sector"]["hydrogen"]["hydrogen_colors"]: n.madd( "Bus", - nodes + " grey H2", - location=nodes, + spatial.nodes + " grey H2", + location=spatial.nodes, carrier="grey H2", - x=n.buses.loc[list(nodes)].x.values, - y=n.buses.loc[list(nodes)].y.values, + x=n.buses.loc[list(spatial.nodes)].x.values, + y=n.buses.loc[list(spatial.nodes)].y.values, ) n.madd( "Link", - nodes + " SMR", + spatial.nodes + " SMR", bus0=spatial.gas.nodes, - bus1=nodes + " grey H2", + bus1=spatial.nodes + " grey H2", bus2="co2 atmosphere", p_nom_extendable=True, carrier="SMR", @@ -1262,9 +1262,9 @@ def h2_hc_conversions(n, costs): n.madd( "Link", - nodes + " grey H2", - bus0=nodes + " grey H2", - bus1=nodes + " H2", + spatial.nodes + " grey H2", + bus0=spatial.nodes + " grey H2", + bus1=spatial.nodes + " H2", carrier="grey H2", capital_cost=0, p_nom_extendable=True, @@ -1274,9 +1274,9 @@ def h2_hc_conversions(n, costs): else: n.madd( "Link", - nodes + " SMR", + spatial.nodes + " SMR", bus0=spatial.gas.nodes, - bus1=nodes + " H2", + bus1=spatial.nodes + " H2", bus2="co2 atmosphere", p_nom_extendable=True, carrier="SMR", From 38f6090a1b6b3d925b97b96371ebabcd0fa53805 Mon Sep 17 00:00:00 2001 From: = Date: Tue, 22 Oct 2024 19:22:54 +0000 Subject: [PATCH 09/48] use AC and DC based H2 to make parameters of m.add consistent shape --- scripts/prepare_sector_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index a00d224fe..acb6394bb 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1344,7 +1344,7 @@ def add_shipping(n, costs): if options["shipping_hydrogen_liquefaction"]: n.madd( "Bus", - nodes, + spatial.nodes, suffix=" H2 liquid", carrier="H2 liquid", location=spatial.nodes, @@ -1373,7 +1373,7 @@ def add_shipping(n, costs): ): n.madd( "Load", - nodes, + spatial.nodes, suffix=" H2 for shipping", bus=shipping_bus, carrier="H2 for shipping", From 8c9077eb1ac395663d0d599c2ceffb03a9fb33ff Mon Sep 17 00:00:00 2001 From: yerbol-akhmetov Date: Wed, 30 Oct 2024 19:40:38 +0500 Subject: [PATCH 10/48] add release notes --- doc/release_notes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 5b24e23d8..0d1b7c746 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -40,6 +40,7 @@ E.g. if a new rule becomes available describe how to use it `make test` and in o * The computation of `hydro_profile.nc` in `build_renewable_profiles.py` is not differentiated whether alternative clustering is applied or not; the indexing of the different power plants in `add_electricity.py` is performed according to the bus either in case alternative clustering is applied or not and a `hydro_inflow_factor` is computed prior to the computation of `inflow_t` to split the inflow according to the capacity of each different unit of each power plant (if more units are present). `PR #1119 `_ +* Fix bugs in `prepare_sector_network.py` related to links with H2 buses and bug of re-addition of H2 and battery carriers in present `PR #1145 `_ PyPSA-Earth 0.4.1 ================= From 5f52c502be7b353886988b411e0e379b957e8c13 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 00:17:54 +0100 Subject: [PATCH 11/48] docs(contributor): contrib-readme-action has updated readme (#1162) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- README.md | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index c15921cab..654c81026 100644 --- a/README.md +++ b/README.md @@ -388,13 +388,6 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe Null - - koen-vg -
- Koen Van Greevenbroek -
-
danielelerede-oet @@ -403,6 +396,13 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
+ + carlosfv92 +
+ Carlos Fernandez +
+
carlosfv92 @@ -430,7 +430,8 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Null
-
juli-a-ko @@ -473,7 +474,8 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Katherine M. Antonio
-
jessLryan From 763fb321220201bd8a27cead5e1badca473f8abf Mon Sep 17 00:00:00 2001 From: Ekaterina Date: Tue, 5 Nov 2024 00:19:01 +0100 Subject: [PATCH 12/48] Update PyPSA & enable linopy (#1167) * Add a zenodo link to natura.tiff * Update environment * Revise structure definition for lines * Remove get_aggregation_strategies * Fix typo aggregation_strategies * Replace aggregategenerators with aggregateoneport * Add aggregation strategies as a parameter * Re-define aggregation strategies * Update aggregation strategies * Update aggregation strategies for lines * Update aggregation strategies for buses * Fix typo * Put aggregation strategies into a variable * Parametrize the aggregation strategies * Refactor update of the aggregation strategies * Clean-up the code * Revert "Add a zenodo link to natura.tiff" This reverts commit 77007598b436df510ec8ce6f29efa9d067341628. * Define an explicit clustering strategy for v_nom * Add a release note * Get glpk back * Specify v_nom for buses explicitly * Revert "Specify v_nom for buses explicitly" This reverts commit 20192e6b3e80a2fedbee398f8e892f776bb5b5cc. * Add a version restriction to the environment specification * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Adjust naming * Move the variable definition * Move the variable * Upgrade PyPSA version --------- Co-authored-by: Davide Fioriti Co-authored-by: Davide Fioriti <67809479+davide-f@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- Snakefile | 3 ++ doc/release_notes.rst | 2 + envs/environment.yaml | 3 +- scripts/_helpers.py | 10 +++++ scripts/base_network.py | 14 ------- scripts/build_osm_network.py | 25 ++++++++++++ scripts/cluster_network.py | 39 +++++++++++++------ scripts/simplify_network.py | 75 ++++++++++++++++++++++++------------ 8 files changed, 120 insertions(+), 51 deletions(-) diff --git a/Snakefile b/Snakefile index 218041c67..8088117b7 100644 --- a/Snakefile +++ b/Snakefile @@ -563,6 +563,7 @@ rule add_electricity: rule simplify_network: params: + aggregation_strategies=config["cluster_options"]["aggregation_strategies"], renewable=config["renewable"], geo_crs=config["crs"]["geo_crs"], cluster_options=config["cluster_options"], @@ -605,6 +606,7 @@ if config["augmented_line_connection"].get("add_to_snakefile", False) == True: rule cluster_network: params: + aggregation_strategies=config["cluster_options"]["aggregation_strategies"], build_shape_options=config["build_shape_options"], electricity=config["electricity"], costs=config["costs"], @@ -690,6 +692,7 @@ if config["augmented_line_connection"].get("add_to_snakefile", False) == False: rule cluster_network: params: + aggregation_strategies=config["cluster_options"]["aggregation_strategies"], build_shape_options=config["build_shape_options"], electricity=config["electricity"], costs=config["costs"], diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 0d1b7c746..c084a7725 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -72,6 +72,8 @@ PyPSA-Earth 0.4.0 * Add an option to use csv format for custom demand imports. `PR #995 `__ +* Implement changes in processing network topology to use the updated PyPSA version. `PR #1065 `__ + **Minor Changes and bug-fixing** * Minor bug-fixing to run the cluster wildcard min `PR #1019 `__ diff --git a/envs/environment.yaml b/envs/environment.yaml index 7da885a73..dc0726ebe 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -12,7 +12,7 @@ dependencies: - pip - mamba # esp for windows build -- pypsa>=0.24, <0.25 +- pypsa>=0.25, <0.29 # - atlite>=0.2.4 # until https://github.com/PyPSA/atlite/issues/244 is not merged - dask - powerplantmatching @@ -27,6 +27,7 @@ dependencies: - memory_profiler - ruamel.yaml<=0.17.26 - pytables +- pyscipopt # added to compy with the quadratic objective requirement of the clustering script - lxml - numpy - pandas diff --git a/scripts/_helpers.py b/scripts/_helpers.py index ce97f6171..a106f7185 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -922,6 +922,16 @@ def get_last_commit_message(path): return last_commit_message +def update_config_dictionary( + config_dict, + parameter_key_to_fill="lines", + dict_to_use={"geometry": "first", "bounds": "first"}, +): + config_dict.setdefault(parameter_key_to_fill, {}) + config_dict[parameter_key_to_fill].update(dict_to_use) + return config_dict + + # PYPSA-EARTH-SEC diff --git a/scripts/base_network.py b/scripts/base_network.py index 65d640d44..e11ff83c6 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -523,20 +523,6 @@ def base_network( result_type="reduce", ) n.import_components_from_dataframe(lines_ac, "Line") - # The columns which names starts with "bus" are mixed up with the third-bus specification - # when executing additional_linkports() - lines_dc.drop( - labels=[ - "bus0_lon", - "bus0_lat", - "bus1_lon", - "bus1_lat", - "bus_0_coors", - "bus_1_coors", - ], - axis=1, - inplace=True, - ) n.import_components_from_dataframe(lines_dc, "Link") n.import_components_from_dataframe(transformers, "Transformer") diff --git a/scripts/build_osm_network.py b/scripts/build_osm_network.py index 867262abc..d8584cf4e 100644 --- a/scripts/build_osm_network.py +++ b/scripts/build_osm_network.py @@ -24,6 +24,27 @@ logger = create_logger(__name__) +# Keep only a predefined set of columns, as otherwise conflicts are possible +# e.g. the columns which names starts with "bus" are mixed up with +# the third-bus specification when executing additional_linkports() +LINES_COLUMNS = [ + "line_id", + "circuits", + "tag_type", + "voltage", + "bus0", + "bus1", + "length", + "underground", + "under_construction", + "tag_frequency", + "dc", + "country", + "geometry", + "bounds", +] + + def line_endings_to_bus_conversion(lines): # Assign to every line a start and end point @@ -813,6 +834,7 @@ def built_network( countries_config, geo_crs, distance_crs, + lines_cols_standard, force_ac=False, ): logger.info("Stage 1/5: Read input data") @@ -877,6 +899,8 @@ def built_network( if not os.path.exists(outputs["lines"]): os.makedirs(os.path.dirname(outputs["lines"]), exist_ok=True) + lines = lines[lines_cols_standard] + to_csv_nafix(lines, outputs["lines"]) # Generate CSV to_csv_nafix(converters, outputs["converters"]) # Generate CSV to_csv_nafix(transformers, outputs["transformers"]) # Generate CSV @@ -912,5 +936,6 @@ def built_network( countries, geo_crs, distance_crs, + lines_cols_standard=LINES_COLUMNS, force_ac=force_ac, ) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index eeaa2a98a..74d284fb7 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -134,6 +134,7 @@ configure_logging, create_logger, get_aggregation_strategies, + update_config_dictionary, update_p_nom_max, ) from add_electricity import load_costs @@ -575,9 +576,10 @@ def clustering_for_n_clusters( extended_link_costs=0, focus_weights=None, ): - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) if not isinstance(custom_busmap, pd.Series): if alternative_clustering: @@ -603,12 +605,14 @@ def clustering_for_n_clusters( clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=aggregate_carriers, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=line_length_factor, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -727,14 +731,27 @@ def consense(x): ).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!" return v - aggregation_strategies = snakemake.params.cluster_options.get( - "aggregation_strategies", {} + aggregation_strategies = snakemake.params.aggregation_strategies + + # Aggregation strategies must be set for all columns + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="lines", + dict_to_use={"v_nom": "first", "geometry": "first", "bounds": "first"}, + ) + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="buses", + dict_to_use={ + "v_nom": "first", + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, ) - # translate str entries of aggregation_strategies to pd.Series functions: - aggregation_strategies = { - p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} - for p in aggregation_strategies.keys() - } + custom_busmap = False # snakemake.params.custom_busmap custom busmap is depreciated https://github.com/pypsa-meets-earth/pypsa-earth/pull/694 if custom_busmap: busmap = pd.read_csv( diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 92c3dd340..502cf1b9d 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -96,13 +96,12 @@ from _helpers import ( configure_logging, create_logger, - get_aggregation_strategies, + update_config_dictionary, update_p_nom_max, ) from add_electricity import load_costs from cluster_network import cluster_regions, clustering_for_n_clusters from pypsa.clustering.spatial import ( - aggregategenerators, aggregateoneport, busmap_by_stubs, get_clustering_from_busmap, @@ -276,11 +275,15 @@ def replace_components(n, c, df, pnl): _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output) - _, generator_strategies = get_aggregation_strategies(aggregation_strategies) + generator_strategies = aggregation_strategies["generators"] carriers = set(n.generators.carrier) - set(exclude_carriers) - generators, generators_pnl = aggregategenerators( - n, busmap, carriers=carriers, custom_strategies=generator_strategies + generators, generators_pnl = aggregateoneport( + n, + busmap, + "Generator", + carriers=carriers, + custom_strategies=generator_strategies, ) replace_components(n, "Generator", generators, generators_pnl) @@ -588,19 +591,22 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None): if not dist.empty: busmap.loc[buses_i] = dist.idxmin(1) - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) return clustering.network, busmap @@ -848,19 +854,22 @@ def merge_into_network(n, threshold, aggregation_strategies=dict()): if (busmap.index == busmap).all(): return n, n.buses.index.to_series() - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -934,19 +943,22 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): if (busmap.index == busmap).all(): return n, n.buses.index.to_series() - bus_strategies, generator_strategies = get_aggregation_strategies( - aggregation_strategies - ) + line_strategies = aggregation_strategies.get("lines", dict()) + bus_strategies = aggregation_strategies.get("buses", dict()) + generator_strategies = aggregation_strategies.get("generators", dict()) + one_port_strategies = aggregation_strategies.get("one_ports", dict()) clustering = get_clustering_from_busmap( n, busmap, - bus_strategies=bus_strategies, aggregate_generators_weighted=True, aggregate_generators_carriers=None, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=1.0, + line_strategies=line_strategies, + bus_strategies=bus_strategies, generator_strategies=generator_strategies, + one_port_strategies=one_port_strategies, scale_link_capital_costs=False, ) @@ -976,14 +988,27 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): "exclude_carriers", [] ) hvdc_as_lines = snakemake.params.electricity["hvdc_as_lines"] - aggregation_strategies = snakemake.params.cluster_options.get( - "aggregation_strategies", {} + aggregation_strategies = snakemake.params.aggregation_strategies + + # Aggregation strategies must be set for all columns + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="lines", + dict_to_use={"v_nom": "first", "geometry": "first", "bounds": "first"}, ) - # translate str entries of aggregation_strategies to pd.Series functions: - aggregation_strategies = { - p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} - for p in aggregation_strategies.keys() - } + update_config_dictionary( + config_dict=aggregation_strategies, + parameter_key_to_fill="buses", + dict_to_use={ + "v_nom": "first", + "lat": "mean", + "lon": "mean", + "tag_substation": "first", + "tag_area": "first", + "country": "first", + }, + ) + n, trafo_map = simplify_network_to_base_voltage(n, linetype, base_voltage) Nyears = n.snapshot_weightings.objective.sum() / 8760 @@ -1088,7 +1113,7 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()): solver_name, cluster_config.get("algorithm", "hac"), cluster_config.get("feature", None), - aggregation_strategies, + aggregation_strategies=aggregation_strategies, ) busmaps.append(cluster_map) From 994804a4b171eda0479813299a19b8d6cb4f2c4d Mon Sep 17 00:00:00 2001 From: Ekaterina Date: Tue, 5 Nov 2024 21:34:24 +0100 Subject: [PATCH 13/48] Enable linopy - power model (#1169) * Add a zenodo link to natura.tiff * Update environment * Revise structure definition for lines * Remove get_aggregation_strategies * Fix typo aggregation_strategies * Replace aggregategenerators with aggregateoneport * Add aggregation strategies as a parameter * Re-define aggregation strategies * Update aggregation strategies * Update aggregation strategies for lines * Update aggregation strategies for buses * Fix typo * Put aggregation strategies into a variable * Parametrize the aggregation strategies * Refactor update of the aggregation strategies * Clean-up the code * Revert "Add a zenodo link to natura.tiff" This reverts commit 77007598b436df510ec8ce6f29efa9d067341628. * Define an explicit clustering strategy for v_nom * Add a release note * Get glpk back * Specify v_nom for buses explicitly * Revert "Specify v_nom for buses explicitly" This reverts commit 20192e6b3e80a2fedbee398f8e892f776bb5b5cc. * Add a version restriction to the environment specification * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Adjust naming * Move the variable definition * Move the variable * Upgrade PyPSA version * Update docstring * Fix imports duplication * Update imports * Update the carrier-capacity constraint * Add docstring * Update the equity constraint * Add docstring * Update BAU constraint * Update SAFE constraint * Add docstring * Update operational reserve margin constraint * Add docstring * Add an new argument to the RM constraint * Update the update of capacity constraints * Update adding an operational reserve margin constraint * Update docstring * Update battery constraint * Add docstring * Update a constraint related to a RES share * Fix usage of add_ERS_constraints * Update solving script * Update a solving run * Fix typos --------- Co-authored-by: Davide Fioriti Co-authored-by: Davide Fioriti <67809479+davide-f@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- scripts/solve_network.py | 529 ++++++++++++++++++++++----------------- 1 file changed, 302 insertions(+), 227 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a9bbfbaa1..f52d2508b 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -52,15 +52,15 @@ linear optimal power flow (plus investment planning) is provided in the `documentation of PyPSA `_. -The optimization is based on the ``pyomo=False`` setting in the :func:`network.lopf` and :func:`pypsa.linopf.ilopf` function. -Additionally, some extra constraints specified in :mod:`prepare_network` are added. +The optimization is based on the :func:`network.optimize` function. +Additionally, some extra constraints specified in :mod:`prepare_network` and :mod:`solve_network` are added. Solving the network in multiple iterations is motivated through the dependence of transmission line capacities and impedances on values of corresponding flows. As lines are expanded their electrical parameters change, which renders the optimisation bilinear even if the power flow equations are linearized. To retain the computational advantage of continuous linear programming, a sequential linear programming technique is used, where in between iterations the line impedances are updated. -Details (and errors made through this heuristic) are discussed in the paper +Details (and errors introduced through this heuristic) are discussed in the paper - Fabian Neumann and Tom Brown. `Heuristics for Transmission Expansion Planning in Low-Carbon Energy System Models `_), *16th International Conference on the European Energy Market*, 2019. `arXiv:1907.10548 `_. @@ -86,23 +86,17 @@ import pandas as pd import pypsa from _helpers import configure_logging, create_logger, override_component_attrs +from linopy import merge from pypsa.descriptors import get_switchable_as_dense as get_as_dense -from pypsa.linopf import ( - define_constraints, - define_variables, - get_var, - ilopf, - join_exprs, - linexpr, - network_lopf, -) -from pypsa.linopt import define_constraints, get_var, join_exprs, linexpr +from pypsa.optimization.abstract import optimize_transmission_expansion_iteratively +from pypsa.optimization.optimize import optimize +from vresutils.benchmark import memory_logger logger = create_logger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) -def prepare_network(n, solve_opts): +def prepare_network(n, solve_opts, config): if "clip_p_max_pu" in solve_opts: for df in ( n.generators_t.p_max_pu, @@ -159,6 +153,25 @@ def prepare_network(n, solve_opts): def add_CCL_constraints(n, config): + """ + Add CCL (country & carrier limit) constraint to the network. + + Add minimum and maximum levels of generator nominal capacity per carrier + for individual countries. Opts and path for agg_p_nom_minmax.csv must be defined + in config.yaml. Default file is available at data/agg_p_nom_minmax.csv. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + scenario: + opts: [Co2L-CCL-24H] + electricity: + agg_p_nom_limits: data/agg_p_nom_minmax.csv + """ agg_p_nom_limits = config["electricity"].get("agg_p_nom_limits") try: @@ -174,32 +187,57 @@ def add_CCL_constraints(n, config): ) gen_country = n.generators.bus.map(n.buses.country) - # cc means country and carrier - p_nom_per_cc = ( - pd.DataFrame( - { - "p_nom": linexpr((1, get_var(n, "Generator", "p_nom"))), - "country": gen_country, - "carrier": n.generators.carrier, - } + capacity_variable = n.model["Generator-p_nom"] + + lhs = [] + ext_carriers = n.generators.query("p_nom_extendable").carrier.unique() + for c in ext_carriers: + ext_carrier = n.generators.query("p_nom_extendable and carrier == @c") + country_grouper = ( + ext_carrier.bus.map(n.buses.country) + .rename_axis("Generator-ext") + .rename("country") ) - .dropna(subset=["p_nom"]) - .groupby(["country", "carrier"]) - .p_nom.apply(join_exprs) + ext_carrier_per_country = capacity_variable.loc[ + country_grouper.index + ].groupby_sum(country_grouper) + lhs.append(ext_carrier_per_country) + lhs = merge(lhs, dim=pd.Index(ext_carriers, name="carrier")) + + min_matrix = agg_p_nom_minmax["min"].to_xarray().unstack().reindex_like(lhs) + max_matrix = agg_p_nom_minmax["max"].to_xarray().unstack().reindex_like(lhs) + + n.model.add_constraints( + lhs >= min_matrix, name="agg_p_nom_min", mask=min_matrix.notnull() + ) + n.model.add_constraints( + lhs <= max_matrix, name="agg_p_nom_max", mask=max_matrix.notnull() ) - minimum = agg_p_nom_minmax["min"].dropna() - if not minimum.empty: - minconstraint = define_constraints( - n, p_nom_per_cc[minimum.index], ">=", minimum, "agg_p_nom", "min" - ) - maximum = agg_p_nom_minmax["max"].dropna() - if not maximum.empty: - maxconstraint = define_constraints( - n, p_nom_per_cc[maximum.index], "<=", maximum, "agg_p_nom", "max" - ) def add_EQ_constraints(n, o, scaling=1e-1): + """ + Add equity constraints to the network. + + Currently this is only implemented for the electricity sector only. + + Opts must be specified in the config.yaml. + + Parameters + ---------- + n : pypsa.Network + o : str + + Example + ------- + scenario: + opts: [Co2L-EQ0.7-24h] + + Require each country or node to on average produce a minimal share + of its total electricity consumption itself. Example: EQ0.7c demands each country + to produce on average at least 70% of its consumption; EQ0.7 demands + each node to produce on average at least 70% of its consumption. + """ float_regex = "[0-9]*\.?[0-9]+" level = float(re.findall(float_regex, o)[0]) if o[-1] == "c": @@ -220,99 +258,150 @@ def add_EQ_constraints(n, o, scaling=1e-1): ) inflow = inflow.reindex(load.index).fillna(0.0) rhs = scaling * (level * load - inflow) + dispatch_variable = n.model["Generator-p"] lhs_gen = ( - linexpr( - (n.snapshot_weightings.generators * scaling, get_var(n, "Generator", "p").T) - ) - .T.groupby(ggrouper, axis=1) - .apply(join_exprs) + (dispatch_variable * (n.snapshot_weightings.generators * scaling)) + .groupby(ggrouper.to_xarray()) + .sum() + .sum("snapshot") ) - lhs_spill = ( - linexpr( - ( - -n.snapshot_weightings.stores * scaling, - get_var(n, "StorageUnit", "spill").T, - ) + # TODO: double check that this is really needed, why do have to subtract the spillage + if not n.storage_units_t.inflow.empty: + spillage_variable = n.model["StorageUnit-spill"] + lhs_spill = ( + (spillage_variable * (-n.snapshot_weightings.stores * scaling)) + .groupby_sum(sgrouper) + .groupby(sgrouper.to_xarray()) + .sum() + .sum("snapshot") ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("") - lhs = lhs_gen + lhs_spill - define_constraints(n, lhs, ">=", rhs, "equity", "min") + lhs = lhs_gen + lhs_spill + else: + lhs = lhs_gen + n.model.add_constraints(lhs >= rhs, name="equity_min") def add_BAU_constraints(n, config): - ext_c = n.generators.query("p_nom_extendable").carrier.unique() - mincaps = pd.Series( - config["electricity"].get("BAU_mincapacities", {key: 0 for key in ext_c}) - ) - lhs = ( - linexpr((1, get_var(n, "Generator", "p_nom"))) - .groupby(n.generators.carrier) - .apply(join_exprs) - ) - define_constraints(n, lhs, ">=", mincaps[lhs.index], "Carrier", "bau_mincaps") - - maxcaps = pd.Series( - config["electricity"].get("BAU_maxcapacities", {key: np.inf for key in ext_c}) - ) - lhs = ( - linexpr((1, get_var(n, "Generator", "p_nom"))) - .groupby(n.generators.carrier) - .apply(join_exprs) - ) - define_constraints(n, lhs, "<=", maxcaps[lhs.index], "Carrier", "bau_maxcaps") + """ + Add a per-carrier minimal overall capacity. + + BAU_mincapacities and opts must be adjusted in the config.yaml. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + scenario: + opts: [Co2L-BAU-24h] + electricity: + BAU_mincapacities: + solar: 0 + onwind: 0 + OCGT: 100000 + offwind-ac: 0 + offwind-dc: 0 + Which sets minimum expansion across all nodes e.g. in Europe to 100GW. + OCGT bus 1 + OCGT bus 2 + ... > 100000 + """ + mincaps = pd.Series(config["electricity"]["BAU_mincapacities"]) + p_nom = n.model["Generator-p_nom"] + ext_i = n.generators.query("p_nom_extendable") + ext_carrier_i = xr.DataArray(ext_i.carrier.rename_axis("Generator-ext")) + lhs = p_nom.groupby(ext_carrier_i).sum() + rhs = mincaps[lhs.indexes["carrier"]].rename_axis("carrier") + n.model.add_constraints(lhs >= rhs, name="bau_mincaps") def add_SAFE_constraints(n, config): - peakdemand = ( - 1.0 + config["electricity"]["SAFE_reservemargin"] - ) * n.loads_t.p_set.sum(axis=1).max() - conv_techs = config["plotting"]["conv_techs"] + """ + Add a capacity reserve margin of a certain fraction above the peak demand. + Renewable generators and storage do not contribute. Ignores network. + + Parameters + ---------- + n : pypsa.Network + config : dict + + Example + ------- + config.yaml requires to specify opts: + + scenario: + opts: [Co2L-SAFE-24h] + electricity: + SAFE_reservemargin: 0.1 + Which sets a reserve margin of 10% above the peak demand. + """ + peakdemand = n.loads_t.p_set.sum(axis=1).max() + margin = 1.0 + config["electricity"]["SAFE_reservemargin"] + reserve_margin = peakdemand * margin + conventional_carriers = config["electricity"]["conventional_carriers"] + ext_gens_i = n.generators.query( + "carrier in @conventional_carriers & p_nom_extendable" + ).index + capacity_variable = n.model["Generator-p_nom"] + p_nom = n.model["Generator-p_nom"].loc[ext_gens_i] + lhs = p_nom.sum() exist_conv_caps = n.generators.query( - "~p_nom_extendable & carrier in @conv_techs" + "~p_nom_extendable & carrier in @entional_carriers" ).p_nom.sum() - ext_gens_i = n.generators.query("carrier in @conv_techs & p_nom_extendable").index - lhs = linexpr((1, get_var(n, "Generator", "p_nom")[ext_gens_i])).sum() - rhs = peakdemand - exist_conv_caps - define_constraints(n, lhs, ">=", rhs, "Safe", "mintotalcap") + rhs = reserve_margin - exist_conv_caps + n.model.add_constraints(lhs >= rhs, name="safe_mintotalcap") -def add_operational_reserve_margin_constraint(n, config): +def add_operational_reserve_margin_constraint(n, sns, config): + """ + Build reserve margin constraints based on the formulation + as suggested in GenX + https://energy.mit.edu/wp-content/uploads/2017/10/Enhanced-Decision-Support-for-a-Changing-Electricity-Landscape.pdf + It implies that the reserve margin also accounts for optimal + dispatch of distributed energy resources (DERs) and demand response + which is a novel feature of GenX. + """ reserve_config = config["electricity"]["operational_reserve"] EPSILON_LOAD = reserve_config["epsilon_load"] EPSILON_VRES = reserve_config["epsilon_vres"] CONTINGENCY = reserve_config["contingency"] # Reserve Variables - reserve = get_var(n, "Generator", "r") - lhs = linexpr((1, reserve)).sum(1) + n.model.add_variables( + 0, np.inf, coords=[sns, n.generators.index], name="Generator-r" + ) + reserve = n.model["Generator-r"] + lhs = reserve.sum("Generator") # Share of extendable renewable capacities ext_i = n.generators.query("p_nom_extendable").index vres_i = n.generators_t.p_max_pu.columns if not ext_i.empty and not vres_i.empty: capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)] - renewable_capacity_variables = get_var(n, "Generator", "p_nom")[ - vres_i.intersection(ext_i) - ] - lhs += linexpr( - (-EPSILON_VRES * capacity_factor, renewable_capacity_variables) - ).sum(1) + renewable_capacity_variables = ( + n.model["Generator-p_nom"] + .loc[vres_i.intersection(ext_i)] + .rename({"Generator-ext": "Generator"}) + ) + lhs = merge( + lhs, + (renewable_capacity_variables * (-EPSILON_VRES * capacity_factor)).sum( + ["Generator"] + ), + ) - # Total demand at t - demand = n.loads_t.p.sum(1) + # Total demand per t + demand = get_as_dense(n, "Load", "p_set").sum(axis=1) # VRES potential of non extendable generators capacity_factor = n.generators_t.p_max_pu[vres_i.difference(ext_i)] renewable_capacity = n.generators.p_nom[vres_i.difference(ext_i)] - potential = (capacity_factor * renewable_capacity).sum(1) + potential = (capacity_factor * renewable_capacity).sum(axis=1) # Right-hand-side rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY - define_constraints(n, lhs, ">=", rhs, "Reserve margin") + n.model.add_constraints(lhs >= rhs, name="reserve_margin") def update_capacity_constraint(n): @@ -320,65 +409,84 @@ def update_capacity_constraint(n): ext_i = n.generators.query("p_nom_extendable").index fix_i = n.generators.query("not p_nom_extendable").index - dispatch = get_var(n, "Generator", "p") - reserve = get_var(n, "Generator", "r") + dispatch = n.model["Generator-p"] + reserve = n.model["Generator-r"] capacity_fixed = n.generators.p_nom[fix_i] p_max_pu = get_as_dense(n, "Generator", "p_max_pu") - lhs = linexpr((1, dispatch), (1, reserve)) + lhs = merge( + dispatch * 1, + reserve * 1, + ) if not ext_i.empty: - capacity_variable = get_var(n, "Generator", "p_nom") - lhs += linexpr((-p_max_pu[ext_i], capacity_variable)).reindex( - columns=gen_i, fill_value="" - ) + capacity_variable = n.model["Generator-p_nom"] + lhs = dispatch + reserve - capacity_variable * xr.DataArray(p_max_pu[ext_i]) rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0) - define_constraints(n, lhs, "<=", rhs, "Generators", "updated_capacity_constraint") + n.model.add_constraints( + lhs <= rhs, name="gen_updated_capacity_constraint", mask=rhs.notnull() + ) def add_operational_reserve_margin(n, sns, config): """ - Build reserve margin constraints based on the formulation given in - https://genxproject.github.io/GenX/dev/core/#Reserves. + Parameters + ---------- + n : pypsa.Network + sns: pd.DatetimeIndex + config : dict + + Example: + -------- + config.yaml requires to specify operational_reserve: + operational_reserve: # like https://genxproject.github.io/GenX/dev/core/#Reserves + activate: true + epsilon_load: 0.02 # percentage of load at each snapshot + epsilon_vres: 0.02 # percentage of VRES at each snapshot + contingency: 400000 # MW """ - define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index]) - - add_operational_reserve_margin_constraint(n, config) + add_operational_reserve_margin_constraint(n, sns, config) update_capacity_constraint(n) def add_battery_constraints(n): + """ + Add constraint ensuring that charger = discharger, i.e. + 1 * charger_size - efficiency * discharger_size = 0 + """ nodes = n.buses.index[n.buses.carrier == "battery"] - if nodes.empty or ("Link", "p_nom") not in n.variables.index: + # TODO Check if the second part of the condition can make sense + # if nodes.empty or ("Link", "p_nom") not in n.variables.index: + if nodes.empty: return - link_p_nom = get_var(n, "Link", "p_nom") - lhs = linexpr( - (1, link_p_nom[nodes + " charger"]), - ( - -n.links.loc[nodes + " discharger", "efficiency"].values, - link_p_nom[nodes + " discharger"].values, - ), + vars_link = n.model["Link-p_nom"] + eff = n.links.loc[nodes + " discharger", "efficiency"] + lhs = merge( + vars_link.sel({"Link-ext": nodes + " charger"}) * 1, + # for some reasons, eff is one element longer as compared with vars_link + vars_link.sel({"Link-ext": nodes + " discharger"}) * -eff[0], ) - define_constraints(n, lhs, "=", 0, "Link", "charger_ratio") + n.model.add_constraints(lhs == 0, name="link_charger_ratio") -def add_RES_constraints(n, res_share): +def add_RES_constraints(n, res_share, config): lgrouper = n.loads.bus.map(n.buses.country) + # TODO drop load ggrouper = n.generators.bus.map(n.buses.country) sgrouper = n.storage_units.bus.map(n.buses.country) cgrouper = n.links.bus0.map(n.buses.country) logger.warning( - "The add_RES_constraints functionality is still work in progress. " + "The add_RES_constraints() is still work in progress. " "Unexpected results might be incurred, particularly if " "temporal clustering is applied or if an unexpected change of technologies " - "is subject to the obtimisation." + "is subject to future improvements." ) load = ( @@ -388,103 +496,68 @@ def add_RES_constraints(n, res_share): rhs = res_share * load - res_techs = [ - "solar", - "onwind", - "offwind-dc", - "offwind-ac", - "battery", - "hydro", - "ror", - ] + renew_techs = config["electricity"]["renewable_carriers"] + charger = ["H2 electrolysis", "battery charger"] discharger = ["H2 fuel cell", "battery discharger"] - gens_i = n.generators.query("carrier in @res_techs").index - stores_i = n.storage_units.query("carrier in @res_techs").index + gens_i = n.generators.query("carrier in @renew_techs").index + stores_i = n.storage_units.query("carrier in @renew_techs").index + charger_i = n.links.query("carrier in @charger").index discharger_i = n.links.query("carrier in @discharger").index + stores_t_weights = n.snapshot_weightings.stores + # Generators + # TODO restore grouping by countries un-commenting calls of groupby() lhs_gen = ( - linexpr( - (n.snapshot_weightings.generators, get_var(n, "Generator", "p")[gens_i].T) - ) - .T.groupby(ggrouper, axis=1) - .apply(join_exprs) + (n.model["Generator-p"].loc[:, gens_i] * n.snapshot_weightings.generators) + # .groupby(ggrouper.to_xarray()) + .sum() ) # StorageUnits - lhs_dispatch = ( - ( - linexpr( - ( - n.snapshot_weightings.stores, - get_var(n, "StorageUnit", "p_dispatch")[stores_i].T, - ) - ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + store_disp_expr = ( + n.model["StorageUnit-p_dispatch"].loc[:, stores_i] * stores_t_weights + ) + store_expr = n.model["StorageUnit-p_store"].loc[:, stores_i] * stores_t_weights + charge_expr = n.model["Link-p"].loc[:, charger_i] * stores_t_weights.apply( + lambda r: r * n.links.loc[charger_i].efficiency + ) + discharge_expr = n.model["Link-p"].loc[:, discharger_i] * stores_t_weights.apply( + lambda r: r * n.links.loc[discharger_i].efficiency ) + lhs_dispatch = ( + store_disp_expr + # .groupby(sgrouper) + .sum() + ) lhs_store = ( - ( - linexpr( - ( - -n.snapshot_weightings.stores, - get_var(n, "StorageUnit", "p_store")[stores_i].T, - ) - ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + store_expr + # .groupby(sgrouper) + .sum() ) # Stores (or their resp. Link components) # Note that the variables "p0" and "p1" currently do not exist. # Thus, p0 and p1 must be derived from "p" (which exists), taking into account the link efficiency. lhs_charge = ( - ( - linexpr( - ( - -n.snapshot_weightings.stores, - get_var(n, "Link", "p")[charger_i].T, - ) - ) - .T.groupby(cgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + charge_expr + # .groupby(cgrouper) + .sum() ) lhs_discharge = ( - ( - linexpr( - ( - n.snapshot_weightings.stores.apply( - lambda r: r * n.links.loc[discharger_i].efficiency - ), - get_var(n, "Link", "p")[discharger_i], - ) - ) - .groupby(cgrouper, axis=1) - .apply(join_exprs) - ) - .reindex(lhs_gen.index) - .fillna("") + discharge_expr + # .groupby(cgrouper) + .sum() ) - # signs of resp. terms are coded in the linexpr. - # todo: for links (lhs_charge and lhs_discharge), account for snapshot weightings - lhs = lhs_gen + lhs_dispatch + lhs_store + lhs_charge + lhs_discharge + lhs = lhs_gen + lhs_dispatch - lhs_store - lhs_charge + lhs_discharge - define_constraints(n, lhs, "=", rhs, "RES share") + n.model.add_constraints(lhs == rhs, name="res_share") def add_land_use_constraint(n): @@ -876,7 +949,7 @@ def extra_functionality(n, snapshots): for o in opts: if "RES" in o: res_share = float(re.findall("[0-9]*\.?[0-9]+$", o)[0]) - add_RES_constraints(n, res_share) + add_RES_constraints(n, res_share, config) for o in opts: if "EQ" in o: add_EQ_constraints(n, o) @@ -927,40 +1000,44 @@ def extra_functionality(n, snapshots): add_co2_sequestration_limit(n, snapshots) -def solve_network(n, config, solving={}, opts="", **kwargs): +def solve_network(n, config, solving, **kwargs): set_of_options = solving["solver"]["options"] cf_solving = solving["options"] - solver_options = solving["solver_options"][set_of_options] if set_of_options else {} - solver_name = solving["solver"]["name"] + kwargs["solver_options"] = ( + solving["solver_options"][set_of_options] if set_of_options else {} + ) + kwargs["solver_name"] = solving["solver"]["name"] - track_iterations = cf_solving.get("track_iterations", False) - min_iterations = cf_solving.get("min_iterations", 4) - max_iterations = cf_solving.get("max_iterations", 6) + skip_iterations = cf_solving.get("skip_iterations", False) + if not n.lines.s_nom_extendable.any(): + skip_iterations = True + logger.info("No expandable lines found. Skipping iterative solving.") # add to network for extra_functionality n.config = config n.opts = opts - if cf_solving.get("skip_iterations", False): - network_lopf( - n, - solver_name=solver_name, - solver_options=solver_options, - extra_functionality=extra_functionality, - **kwargs, - ) + if skip_iterations: + status, condition = n.optimize(**kwargs) else: - ilopf( - n, - solver_name=solver_name, - solver_options=solver_options, - track_iterations=track_iterations, - min_iterations=min_iterations, - max_iterations=max_iterations, - extra_functionality=extra_functionality, - **kwargs, + kwargs["track_iterations"] = (cf_solving.get("track_iterations", False),) + kwargs["min_iterations"] = (cf_solving.get("min_iterations", 4),) + kwargs["max_iterations"] = (cf_solving.get("max_iterations", 6),) + status, condition = n.optimize.optimize_transmission_expansion_iteratively( + **kwargs ) + + if status != "ok": # and not rolling_horizon: + logger.warning( + f"Solving status '{status}' with termination condition '{condition}'" + ) + if "infeasible" in condition: + labels = n.model.compute_infeasibilities() + logger.info(f"Labels:\n{labels}") + n.model.print_infeasibilities() + raise RuntimeError("Solving status 'infeasible'") + return n @@ -978,11 +1055,8 @@ def solve_network(n, config, solving={}, opts="", **kwargs): configure_logging(snakemake) - tmpdir = snakemake.params.solving.get("tmpdir") - if tmpdir is not None: - Path(tmpdir).mkdir(parents=True, exist_ok=True) opts = snakemake.wildcards.opts.split("-") - solving = snakemake.params.solving + solve_opts = snakemake.config["solving"]["options"] is_sector_coupled = "sopts" in snakemake.wildcards.keys() @@ -992,10 +1066,11 @@ def solve_network(n, config, solving={}, opts="", **kwargs): else: n = pypsa.Network(snakemake.input.network) - if snakemake.params.augmented_line_connection.get("add_to_snakefile"): - n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( - snakemake.params.augmented_line_connection.get("min_expansion") - ) + # TODO Double-check handling the augmented case + # if snakemake.params.augmented_line_connection.get("add_to_snakefile"): + # n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( + # snakemake.params.augmented_line_connection.get("min_expansion") + # ) if ( snakemake.config["custom_data"]["add_existing"] @@ -1016,15 +1091,15 @@ def solve_network(n, config, solving={}, opts="", **kwargs): else: n_ref = None - n = prepare_network(n, solving["options"]) + # needed to get `n.model` property + n.optimize.create_model() + n = prepare_network(n, solve_opts, config=solve_opts) n = solve_network( n, config=snakemake.config, - solving=solving, - opts=opts, - solver_dir=tmpdir, - solver_logfile=snakemake.log.solver, + solving=snakemake.params.solving, + log_fn=snakemake.log.solver, ) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 0d3ebdf454b8893c623d96e20fda4a257667d9a4 Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 5 Nov 2024 21:42:59 +0100 Subject: [PATCH 14/48] Get back CI for Windows --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5887ac014..08f69a702 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: os: - ubuntu-latest - macos-latest - # - windows-latest + - windows-latest runs-on: ${{ matrix.os }} From 34b616894ca693e1c822d385969ed651f55e80ce Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 5 Nov 2024 21:43:38 +0100 Subject: [PATCH 15/48] Update environment --- envs/environment.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index dc0726ebe..e263e7a78 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -78,7 +78,6 @@ dependencies: # Default solver for tests (required for CI) - glpk -- ipopt - gurobi - pip: From 268851b280933ab0c3b3adce4334de75cd89d14c Mon Sep 17 00:00:00 2001 From: Emmanuel Bolarinwa Date: Thu, 7 Nov 2024 17:16:15 +0100 Subject: [PATCH 16/48] revise implementation to accomodate linopy for sec --- scripts/solve_network.py | 148 ++++++++++++++++++++------------------- 1 file changed, 75 insertions(+), 73 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index f52d2508b..0bd087016 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -621,16 +621,21 @@ def _add_land_use_constraint_m(n): def add_h2_network_cap(n, cap): h2_network = n.links.loc[n.links.carrier == "H2 pipeline"] - if h2_network.index.empty or ("Link", "p_nom") not in n.variables.index: + if h2_network.index.empty: return - h2_network_cap = get_var(n, "Link", "p_nom") - subset_index = h2_network.index.intersection(h2_network_cap.index) - lhs = linexpr( - (h2_network.loc[subset_index, "length"], h2_network_cap[subset_index]) + h2_network_cap = n.model["Link-p_nom"] + h2_network_cap_index = h2_network_cap.indexes["Link-ext"] + subset_index = h2_network.index.intersection(h2_network_cap_index) + diff_index = h2_network_cap_index.difference(subset_index) + if len(diff_index) > 0: + logger.warning( + f"Impossible to set H2 cap for the following links: {diff_index}" + ) + lhs = ( + h2_network_cap.loc[subset_index] * h2_network.loc[subset_index, "length"] ).sum() - # lhs = linexpr((1, h2_network_cap[h2_network.index])).sum() rhs = cap * 1000 - define_constraints(n, lhs, "<=", rhs, "h2_network_cap") + n.model.add_constraints(lhs <= rhs, name="h2_network_cap") def H2_export_yearly_constraint(n): @@ -651,9 +656,10 @@ def H2_export_yearly_constraint(n): index=n.snapshots, columns=res_index, ) - res = join_exprs( - linexpr((weightings, get_var(n, "Generator", "p")[res_index])) - ) # single line sum + capacity_variable = n.model["Generator-p"] + + # single line sum + res = (weightings * capacity_variable.loc[res_index]).sum() load_ind = n.loads[n.loads.carrier == "AC"].index.intersection( n.loads_t.p_set.columns @@ -681,7 +687,7 @@ def H2_export_yearly_constraint(n): else: rhs = h2_export * (1 / 0.7) - con = define_constraints(n, lhs, ">=", rhs, "H2ExportConstraint", "RESproduction") + n.model.add_constraints(lhs >= rhs, name="H2ExportConstraint-RESproduction") def monthly_constraints(n, n_ref): @@ -704,15 +710,17 @@ def monthly_constraints(n, n_ref): index=n.snapshots, columns=res_index, ) + capacity_variable = n.model["Generator-p"] - res = linexpr((weightings, get_var(n, "Generator", "p")[res_index])).sum( - axis=1 - ) # single line sum + # single line sum + res = (weightings * capacity_variable[res_index]).sum(axis=1) res = res.groupby(res.index.month).sum() - electrolysis = get_var(n, "Link", "p")[ + link_p = n.model["Link-p"] + electrolysis = link_p.loc[ n.links.index[n.links.index.str.contains("H2 Electrolysis")] ] + weightings_electrolysis = pd.DataFrame( np.outer( n.snapshot_weightings["generators"], [1.0] * len(electrolysis.columns) @@ -721,7 +729,7 @@ def monthly_constraints(n, n_ref): columns=electrolysis.columns, ) - elec_input = linexpr((-allowed_excess * weightings_electrolysis, electrolysis)).sum( + elec_input = ((-allowed_excess * weightings_electrolysis) * electrolysis).sum( axis=1 ) @@ -744,16 +752,16 @@ def monthly_constraints(n, n_ref): for i in range(len(res.index)): lhs = res.iloc[i] + "\n" + elec_input.iloc[i] rhs = res_ref.iloc[i] + elec_input_ref.iloc[i] - con = define_constraints( - n, lhs, ">=", rhs, f"RESconstraints_{i}", f"REStarget_{i}" + n.model.add_constraints( + lhs >= rhs, name=f"RESconstraints_{i}-REStarget_{i}" ) else: for i in range(len(res.index)): lhs = res.iloc[i] + "\n" + elec_input.iloc[i] - con = define_constraints( - n, lhs, ">=", 0.0, f"RESconstraints_{i}", f"REStarget_{i}" + n.model.add_constraints( + lhs >= 0.0, name=f"RESconstraints_{i}-REStarget_{i}" ) # else: # logger.info("ignoring H2 export constraint as wildcard is set to 0") @@ -774,84 +782,72 @@ def add_chp_constraints(n): electric = n.links.index[electric_bool] heat = n.links.index[heat_bool] - electric_ext = n.links.index[electric_bool & n.links.p_nom_extendable] - heat_ext = n.links.index[heat_bool & n.links.p_nom_extendable] + electric_ext = n.links[electric_bool].query("p_nom_extendable").index + heat_ext = n.links[heat_bool].query("p_nom_extendable").index - electric_fix = n.links.index[electric_bool & ~n.links.p_nom_extendable] - heat_fix = n.links.index[heat_bool & ~n.links.p_nom_extendable] + electric_fix = n.links[electric_bool].query("~p_nom_extendable").index + heat_fix = n.links[heat_bool].query("~p_nom_extendable").index - link_p = get_var(n, "Link", "p") + p = n.model["Link-p"] # dimension: [time, link] + # output ratio between heat and electricity and top_iso_fuel_line for extendable if not electric_ext.empty: - link_p_nom = get_var(n, "Link", "p_nom") - - # ratio of output heat to electricity set by p_nom_ratio - lhs = linexpr( - ( - n.links.loc[electric_ext, "efficiency"] - * n.links.loc[electric_ext, "p_nom_ratio"], - link_p_nom[electric_ext], - ), - (-n.links.loc[heat_ext, "efficiency"].values, link_p_nom[heat_ext].values), - ) + p_nom = n.model["Link-p_nom"] - define_constraints(n, lhs, "=", 0, "chplink", "fix_p_nom_ratio") - - # top_iso_fuel_line for extendable - lhs = linexpr( - (1, link_p[heat_ext]), - (1, link_p[electric_ext].values), - (-1, link_p_nom[electric_ext].values), + lhs = ( + p_nom.loc[electric_ext] + * (n.links.p_nom_ratio * n.links.efficiency)[electric_ext].values + - p_nom.loc[heat_ext] * n.links.efficiency[heat_ext].values ) + n.model.add_constraints(lhs == 0, name="chplink-fix_p_nom_ratio") - define_constraints(n, lhs, "<=", 0, "chplink", "top_iso_fuel_line_ext") + rename = {"Link-ext": "Link"} + lhs = ( + p.loc[:, electric_ext] + + p.loc[:, heat_ext] + - p_nom.rename(rename).loc[electric_ext] + ) + n.model.add_constraints(lhs <= 0, name="chplink-top_iso_fuel_line_ext") + # top_iso_fuel_line for fixed if not electric_fix.empty: - # top_iso_fuel_line for fixed - lhs = linexpr((1, link_p[heat_fix]), (1, link_p[electric_fix].values)) - - rhs = n.links.loc[electric_fix, "p_nom"].values - - define_constraints(n, lhs, "<=", rhs, "chplink", "top_iso_fuel_line_fix") + lhs = p.loc[:, electric_fix] + p.loc[:, heat_fix] + rhs = n.links.p_nom[electric_fix] + n.model.add_constraints(lhs <= rhs, name="chplink-top_iso_fuel_line_fix") + # back-pressure if not electric.empty: - # backpressure - lhs = linexpr( - ( - n.links.loc[electric, "c_b"].values * n.links.loc[heat, "efficiency"], - link_p[heat], - ), - (-n.links.loc[electric, "efficiency"].values, link_p[electric].values), + lhs = ( + p.loc[:, heat] * (n.links.efficiency[heat] * n.links.c_b[electric].values) + - p.loc[:, electric] * n.links.efficiency[electric] ) - - define_constraints(n, lhs, "<=", 0, "chplink", "backpressure") + n.model.add_constraints(lhs <= rhs, name="chplink-backpressure") def add_co2_sequestration_limit(n, sns): co2_stores = n.stores.loc[n.stores.carrier == "co2 stored"].index - if co2_stores.empty or ("Store", "e") not in n.variables.index: + if co2_stores.empty: # or ("Store", "e") not in n.variables.index: return - vars_final_co2_stored = get_var(n, "Store", "e").loc[sns[-1], co2_stores] + vars_final_co2_stored = n.model["Store-e"].loc[sns[-1], co2_stores] - lhs = linexpr((1, vars_final_co2_stored)).sum() + lhs = (1 * vars_final_co2_stored).sum() rhs = ( n.config["sector"].get("co2_sequestration_potential", 5) * 1e6 ) # TODO change 200 limit (Europe) name = "co2_sequestration_limit" - define_constraints( - n, lhs, "<=", rhs, "GlobalConstraint", "mu", axes=pd.Index([name]), spec=name - ) + + n.model.add_constraints(lhs <= rhs, name=f"GlobalConstraint-{name}") def set_h2_colors(n): - blue_h2 = get_var(n, "Link", "p")[ + blue_h2 = n.model["Link-p"].loc[ n.links.index[n.links.index.str.contains("blue H2")] ] - pink_h2 = get_var(n, "Link", "p")[ + pink_h2 = n.model["Link-p"].loc[ n.links.index[n.links.index.str.contains("pink H2")] ] @@ -883,16 +879,16 @@ def set_h2_colors(n): columns=pink_h2.columns, ) - total_blue = linexpr((weightings_blue, blue_h2)).sum().sum() + total_blue = (weightings_blue * blue_h2).sum().sum() - total_pink = linexpr((weightings_pink, pink_h2)).sum().sum() + total_pink = (weightings_pink * pink_h2).sum().sum() rhs_blue = load_h2 * snakemake.config["sector"]["hydrogen"]["blue_share"] rhs_pink = load_h2 * snakemake.config["sector"]["hydrogen"]["pink_share"] - define_constraints(n, total_blue, "=", rhs_blue, "blue_h2_share") + n.model.add_constraints(total_blue == rhs_blue, name="blue_h2_share") - define_constraints(n, total_pink, "=", rhs_pink, "pink_h2_share") + n.model.add_constraints(total_pink == rhs_pink, name="pink_h2_share") def add_existing(n): @@ -953,8 +949,13 @@ def extra_functionality(n, snapshots): for o in opts: if "EQ" in o: add_EQ_constraints(n, o) + add_battery_constraints(n) + if snakemake.config["sector"]["chp"]: + logger.info("setting CHP constraints") + add_chp_constraints(n) + if ( snakemake.config["policy_config"]["hydrogen"]["temporal_matching"] == "h2_yearly_matching" @@ -1008,6 +1009,7 @@ def solve_network(n, config, solving, **kwargs): solving["solver_options"][set_of_options] if set_of_options else {} ) kwargs["solver_name"] = solving["solver"]["name"] + kwargs["extra_functionality"] = extra_functionality skip_iterations = cf_solving.get("skip_iterations", False) if not n.lines.s_nom_extendable.any(): @@ -1048,9 +1050,9 @@ def solve_network(n, config, solving, **kwargs): snakemake = mock_snakemake( "solve_network", simpl="", - clusters="54", + clusters="10", ll="copt", - opts="Co2L-1H", + opts="Co2L-3H", ) configure_logging(snakemake) From 9fcee64a2e9675a09488b102e64d7b7d5c6299c5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 13 Nov 2024 12:51:37 +0000 Subject: [PATCH 17/48] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/solve_network.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index cc16a016b..0bd087016 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1011,7 +1011,6 @@ def solve_network(n, config, solving, **kwargs): kwargs["solver_name"] = solving["solver"]["name"] kwargs["extra_functionality"] = extra_functionality - skip_iterations = cf_solving.get("skip_iterations", False) if not n.lines.s_nom_extendable.any(): skip_iterations = True From 08bd2bde9f01a78438e180f41c08c364372a2317 Mon Sep 17 00:00:00 2001 From: ekatef Date: Fri, 15 Nov 2024 23:37:35 +0100 Subject: [PATCH 18/48] Update PyPSA version --- envs/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index e263e7a78..6b8da543b 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -12,7 +12,7 @@ dependencies: - pip - mamba # esp for windows build -- pypsa>=0.25, <0.29 +- pypsa>=0.25, <0.31 # - atlite>=0.2.4 # until https://github.com/PyPSA/atlite/issues/244 is not merged - dask - powerplantmatching From 5264e625d848456b0275d79679cb23c2d63ca1cc Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 19 Nov 2024 23:24:11 +0100 Subject: [PATCH 19/48] Lift a version restriction --- envs/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 6b8da543b..f345839ff 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -12,7 +12,7 @@ dependencies: - pip - mamba # esp for windows build -- pypsa>=0.25, <0.31 +- pypsa>=0.25 # - atlite>=0.2.4 # until https://github.com/PyPSA/atlite/issues/244 is not merged - dask - powerplantmatching From cedfda8aa5e3455579640e675eb4ad5602f96e3f Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 19 Nov 2024 23:24:50 +0100 Subject: [PATCH 20/48] Enable augmented network --- scripts/solve_network.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index f52d2508b..a446d8787 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1067,10 +1067,10 @@ def solve_network(n, config, solving, **kwargs): n = pypsa.Network(snakemake.input.network) # TODO Double-check handling the augmented case - # if snakemake.params.augmented_line_connection.get("add_to_snakefile"): - # n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( - # snakemake.params.augmented_line_connection.get("min_expansion") - # ) + if snakemake.params.augmented_line_connection.get("add_to_snakefile"): + n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( + snakemake.params.augmented_line_connection.get("min_expansion") + ) if ( snakemake.config["custom_data"]["add_existing"] From f61058aed892f272f841ea0c2f02a086145688f7 Mon Sep 17 00:00:00 2001 From: ekatef Date: Wed, 20 Nov 2024 10:35:17 +0100 Subject: [PATCH 21/48] Add a requirement to update countrycode --- envs/environment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/envs/environment.yaml b/envs/environment.yaml index f345839ff..1e2f43e20 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -88,3 +88,4 @@ dependencies: - tsam>=1.1.0 - chaospy # lastest version only available on pip - fake_useragent + - countrycode==0.4.0 # a sub-dependency; the restriction is needed to deal with an encoding error for windows From c5a47646073462ff71f85425ce51e9fb63841c4e Mon Sep 17 00:00:00 2001 From: ekatef Date: Wed, 20 Nov 2024 12:09:36 +0100 Subject: [PATCH 22/48] Use an installation from the source for countrycode --- envs/environment.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 1e2f43e20..b23ab6d44 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -85,7 +85,8 @@ dependencies: - powerplantmatching>=0.5.19 # until conda release it out - git+https://github.com/davide-f/google-drive-downloader@master # google drive with fix for virus scan - git+https://github.com/FRESNA/vresutils@master # until new pip release > 0.3.1 (strictly) + - git+https://github.com/vincentarelbundock/pycountrycode@main # until the fix will be available via https://github.com/vincentarelbundock/pycountrycode/pull/13 - tsam>=1.1.0 - chaospy # lastest version only available on pip - fake_useragent - - countrycode==0.4.0 # a sub-dependency; the restriction is needed to deal with an encoding error for windows + #- countrycode==0.4.0 # a sub-dependency; the restriction is needed to deal with an encoding error for windows From acc94319900448c4a42a9407b6ee6a35cf164a66 Mon Sep 17 00:00:00 2001 From: ekatef Date: Wed, 20 Nov 2024 21:58:42 +0100 Subject: [PATCH 23/48] Remove redundand initialisation --- scripts/solve_network.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a446d8787..324cda356 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1091,8 +1091,6 @@ def solve_network(n, config, solving, **kwargs): else: n_ref = None - # needed to get `n.model` property - n.optimize.create_model() n = prepare_network(n, solve_opts, config=solve_opts) n = solve_network( From 04065eb5c7fb931e9c27bdac2f865acdd939668f Mon Sep 17 00:00:00 2001 From: ekatef Date: Wed, 20 Nov 2024 23:15:08 +0100 Subject: [PATCH 24/48] Switch on extra functionality --- scripts/solve_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 324cda356..845acd64b 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1008,6 +1008,7 @@ def solve_network(n, config, solving, **kwargs): solving["solver_options"][set_of_options] if set_of_options else {} ) kwargs["solver_name"] = solving["solver"]["name"] + kwargs["extra_functionality"] = extra_functionality skip_iterations = cf_solving.get("skip_iterations", False) if not n.lines.s_nom_extendable.any(): From dc4dba3309ee749f8bd8a728a747512117ef5767 Mon Sep 17 00:00:00 2001 From: ekatef Date: Wed, 20 Nov 2024 23:29:53 +0100 Subject: [PATCH 25/48] Fix grouping in add_res_constraints --- scripts/solve_network.py | 64 ++++++++++++++++------------------------ 1 file changed, 25 insertions(+), 39 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 845acd64b..41b5f42cb 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -476,12 +476,6 @@ def add_battery_constraints(n): def add_RES_constraints(n, res_share, config): - lgrouper = n.loads.bus.map(n.buses.country) - # TODO drop load - ggrouper = n.generators.bus.map(n.buses.country) - sgrouper = n.storage_units.bus.map(n.buses.country) - cgrouper = n.links.bus0.map(n.buses.country) - logger.warning( "The add_RES_constraints() is still work in progress. " "Unexpected results might be incurred, particularly if " @@ -489,31 +483,39 @@ def add_RES_constraints(n, res_share, config): "is subject to future improvements." ) - load = ( - n.snapshot_weightings.generators - @ n.loads_t.p_set.groupby(lgrouper, axis=1).sum() - ) - - rhs = res_share * load - renew_techs = config["electricity"]["renewable_carriers"] charger = ["H2 electrolysis", "battery charger"] discharger = ["H2 fuel cell", "battery discharger"] - gens_i = n.generators.query("carrier in @renew_techs").index - stores_i = n.storage_units.query("carrier in @renew_techs").index + ren_gen = n.generators.query("carrier in @renew_techs") + ren_stores = n.storage_units.query("carrier in @renew_techs") + ren_charger = n.links.query("carrier in @charger") + ren_discharger = n.links.query("carrier in @discharger") - charger_i = n.links.query("carrier in @charger").index - discharger_i = n.links.query("carrier in @discharger").index + gens_i = ren_gen.index + stores_i = ren_stores.index + charger_i = ren_charger.index + discharger_i = ren_discharger.index stores_t_weights = n.snapshot_weightings.stores + lgrouper = n.loads.bus.map(n.buses.country) + ggrouper = ren_gen.bus.map(n.buses.country) + sgrouper = ren_stores.bus.map(n.buses.country) + cgrouper = ren_charger.bus0.map(n.buses.country) + dgrouper = ren_discharger.bus0.map(n.buses.country) + + load = ( + n.snapshot_weightings.generators + @ n.loads_t.p_set.groupby(lgrouper, axis=1).sum() + ) + rhs = res_share * load + # Generators - # TODO restore grouping by countries un-commenting calls of groupby() lhs_gen = ( (n.model["Generator-p"].loc[:, gens_i] * n.snapshot_weightings.generators) - # .groupby(ggrouper.to_xarray()) + .groupby(ggrouper.to_xarray()) .sum() ) @@ -529,31 +531,15 @@ def add_RES_constraints(n, res_share, config): lambda r: r * n.links.loc[discharger_i].efficiency ) - lhs_dispatch = ( - store_disp_expr - # .groupby(sgrouper) - .sum() - ) - lhs_store = ( - store_expr - # .groupby(sgrouper) - .sum() - ) + lhs_dispatch = store_disp_expr.groupby(sgrouper).sum() + lhs_store = store_expr.groupby(sgrouper).sum() # Stores (or their resp. Link components) # Note that the variables "p0" and "p1" currently do not exist. # Thus, p0 and p1 must be derived from "p" (which exists), taking into account the link efficiency. - lhs_charge = ( - charge_expr - # .groupby(cgrouper) - .sum() - ) + lhs_charge = charge_expr.groupby(cgrouper).sum() - lhs_discharge = ( - discharge_expr - # .groupby(cgrouper) - .sum() - ) + lhs_discharge = discharge_expr.groupby(cgrouper).sum() lhs = lhs_gen + lhs_dispatch - lhs_store - lhs_charge + lhs_discharge From 25a9b55a6290ee586ffa1ec7356d6fdada642d36 Mon Sep 17 00:00:00 2001 From: ekatef Date: Wed, 20 Nov 2024 23:46:41 +0100 Subject: [PATCH 26/48] Add missed import --- scripts/solve_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 41b5f42cb..2959c3d04 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -85,6 +85,7 @@ import numpy as np import pandas as pd import pypsa +import xarray as xr from _helpers import configure_logging, create_logger, override_component_attrs from linopy import merge from pypsa.descriptors import get_switchable_as_dense as get_as_dense From b6343e2de887875fc4366210399df80cb3c51448 Mon Sep 17 00:00:00 2001 From: ekatef Date: Thu, 21 Nov 2024 00:29:59 +0100 Subject: [PATCH 27/48] Fix refuse aligning dimensions --- scripts/solve_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2959c3d04..be04cff17 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -428,9 +428,7 @@ def update_capacity_constraint(n): rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0) - n.model.add_constraints( - lhs <= rhs, name="gen_updated_capacity_constraint", mask=rhs.notnull() - ) + n.model.add_constraints(lhs <= rhs, name="gen_updated_capacity_constraint") def add_operational_reserve_margin(n, sns, config): From 129db46e6c3c63f348fc0ff750a9eaf040d29974 Mon Sep 17 00:00:00 2001 From: ekatef Date: Thu, 21 Nov 2024 00:30:29 +0100 Subject: [PATCH 28/48] Improve naming --- scripts/solve_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index be04cff17..309569ec6 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -423,7 +423,9 @@ def update_capacity_constraint(n): ) if not ext_i.empty: - capacity_variable = n.model["Generator-p_nom"] + capacity_variable = n.model["Generator-p_nom"].rename( + {"Generator-ext": "Generator"} + ) lhs = dispatch + reserve - capacity_variable * xr.DataArray(p_max_pu[ext_i]) rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0) From 6770144fab547cbea66c59a4adce243691ed21f4 Mon Sep 17 00:00:00 2001 From: ekatef Date: Thu, 21 Nov 2024 00:52:46 +0100 Subject: [PATCH 29/48] Remove outdated n.variables --- scripts/solve_network.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 309569ec6..f253366f3 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -462,8 +462,7 @@ def add_battery_constraints(n): 1 * charger_size - efficiency * discharger_size = 0 """ nodes = n.buses.index[n.buses.carrier == "battery"] - # TODO Check if the second part of the condition can make sense - # if nodes.empty or ("Link", "p_nom") not in n.variables.index: + if nodes.empty: return vars_link = n.model["Link-p_nom"] @@ -608,7 +607,7 @@ def _add_land_use_constraint_m(n): def add_h2_network_cap(n, cap): h2_network = n.links.loc[n.links.carrier == "H2 pipeline"] - if h2_network.index.empty or ("Link", "p_nom") not in n.variables.index: + if h2_network.index.empty: return h2_network_cap = get_var(n, "Link", "p_nom") subset_index = h2_network.index.intersection(h2_network_cap.index) @@ -817,7 +816,7 @@ def add_chp_constraints(n): def add_co2_sequestration_limit(n, sns): co2_stores = n.stores.loc[n.stores.carrier == "co2 stored"].index - if co2_stores.empty or ("Store", "e") not in n.variables.index: + if co2_stores.empty: return vars_final_co2_stored = get_var(n, "Store", "e").loc[sns[-1], co2_stores] From aa92c9abc7e5f9ae1856b2912402d3930dcc905e Mon Sep 17 00:00:00 2001 From: Emmanuel Bolarinwa Date: Thu, 21 Nov 2024 16:35:06 +0100 Subject: [PATCH 30/48] adjust warning for h2_cap --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 084652a02..15f04023a 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -629,7 +629,7 @@ def add_h2_network_cap(n, cap): diff_index = h2_network_cap_index.difference(subset_index) if len(diff_index) > 0: logger.warning( - f"Impossible to set H2 cap for the following links: {diff_index}" + f"Impossible to set a limit for H2 pipelines extension for the following links: {diff_index}" ) lhs = ( h2_network_cap.loc[subset_index] * h2_network.loc[subset_index, "length"] From 1ba8d94bf51d8ba5c7c37cf6461179717a29f756 Mon Sep 17 00:00:00 2001 From: Emmanuel Bolarinwa Date: Thu, 21 Nov 2024 16:35:52 +0100 Subject: [PATCH 31/48] revert wildcards rules --- scripts/solve_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 15f04023a..6e848c719 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1050,9 +1050,9 @@ def solve_network(n, config, solving, **kwargs): snakemake = mock_snakemake( "solve_network", simpl="", - clusters="10", + clusters="54", ll="copt", - opts="Co2L-3H", + opts="Co2L-1H", ) configure_logging(snakemake) From e909f3dfb67b654dc540b35f7027768ad0eaa59d Mon Sep 17 00:00:00 2001 From: Emmanuel Bolarinwa Date: Thu, 21 Nov 2024 16:37:58 +0100 Subject: [PATCH 32/48] remove comment for .variables --- scripts/solve_network.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 6e848c719..aad3f6471 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -462,7 +462,6 @@ def add_battery_constraints(n): """ nodes = n.buses.index[n.buses.carrier == "battery"] # TODO Check if the second part of the condition can make sense - # if nodes.empty or ("Link", "p_nom") not in n.variables.index: if nodes.empty: return vars_link = n.model["Link-p_nom"] @@ -827,7 +826,7 @@ def add_chp_constraints(n): def add_co2_sequestration_limit(n, sns): co2_stores = n.stores.loc[n.stores.carrier == "co2 stored"].index - if co2_stores.empty: # or ("Store", "e") not in n.variables.index: + if co2_stores.empty: return vars_final_co2_stored = n.model["Store-e"].loc[sns[-1], co2_stores] From ffdf4a420b214339587aa8ae1bed9096793613c8 Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 21 Dec 2024 00:16:24 +0100 Subject: [PATCH 33/48] Remove outdated dependency hint --- envs/environment.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 5228000cc..a283f802c 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -92,4 +92,3 @@ dependencies: - tsam>=1.1.0 - chaospy # lastest version only available on pip - fake_useragent - #- countrycode==0.4.0 # a sub-dependency; the restriction is needed to deal with an encoding error for windows From 95d7a294715901c6eb1787b926747edfd0f5ab4c Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 21 Dec 2024 00:17:09 +0100 Subject: [PATCH 34/48] Update comments --- scripts/solve_network.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 5fc630b59..8750ee17f 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -266,7 +266,9 @@ def add_EQ_constraints(n, o, scaling=1e-1): .sum() .sum("snapshot") ) - # TODO: double check that this is really needed, why do have to subtract the spillage + # the current formulation implies that the available hydro power is (inflow - spillage) + # it implies efficiency_dispatch is 1 which is not quite general + # see https://github.com/pypsa-meets-earth/pypsa-earth/issues/1245 for possible improvements if not n.storage_units_t.inflow.empty: spillage_variable = n.model["StorageUnit-spill"] lhs_spill = ( @@ -1053,7 +1055,6 @@ def solve_network(n, config, solving, **kwargs): else: n = pypsa.Network(snakemake.input.network) - # TODO Double-check handling the augmented case if snakemake.params.augmented_line_connection.get("add_to_snakefile"): n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = ( snakemake.params.augmented_line_connection.get("min_expansion") From ff939d40d8588fdfeebdb175965b2356fc1eacbf Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 21 Dec 2024 00:17:45 +0100 Subject: [PATCH 35/48] Add a docstring --- scripts/solve_network.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 8750ee17f..d53256155 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -477,6 +477,17 @@ def add_battery_constraints(n): def add_RES_constraints(n, res_share, config): + """ + The constraint ensures that a predefined share of power is generated + by renewable sources + + Parameters + ---------- + n : pypsa.Network + res_share: float + config : dict + """ + logger.warning( "The add_RES_constraints() is still work in progress. " "Unexpected results might be incurred, particularly if " From 81721127d45caa4b54d175ec34efb70e44d8d8bc Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 21 Dec 2024 00:24:58 +0100 Subject: [PATCH 36/48] Update the release notes --- doc/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 4a78bb4c6..f6d062413 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -15,6 +15,7 @@ This part of documentation collects descriptive release notes to capture the mai * Include option in the config to allow for custom airport data `PR #1241 `__ +* Implement changes in processing network topology to use the updated PyPSA version. `PR #1065 `__ **Minor Changes and bug-fixing** @@ -115,7 +116,6 @@ PyPSA-Earth 0.4.0 * Add an option to use csv format for custom demand imports. `PR #995 `__ -* Implement changes in processing network topology to use the updated PyPSA version. `PR #1065 `__ **Minor Changes and bug-fixing** From 6ca41778ea5f89eacb2fcb08c5228efb73ca31da Mon Sep 17 00:00:00 2001 From: Ekaterina Date: Sat, 21 Dec 2024 00:29:01 +0100 Subject: [PATCH 37/48] Update scripts/solve_network.py Co-authored-by: Davide Fioriti <67809479+davide-f@users.noreply.github.com> --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index d53256155..7574d2275 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -349,7 +349,7 @@ def add_SAFE_constraints(n, config): p_nom = n.model["Generator-p_nom"].loc[ext_gens_i] lhs = p_nom.sum() exist_conv_caps = n.generators.query( - "~p_nom_extendable & carrier in @entional_carriers" + "~p_nom_extendable & carrier in @conventional_carriers" ).p_nom.sum() rhs = reserve_margin - exist_conv_caps n.model.add_constraints(lhs >= rhs, name="safe_mintotalcap") From eafc486b7c797f8216307489e4837e644d6b56f2 Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 21 Dec 2024 15:40:16 +0100 Subject: [PATCH 38/48] Add a workflow to update environments in PR --- .github/workflows/update-pinned-env-pr.yml | 82 ++++++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 .github/workflows/update-pinned-env-pr.yml diff --git a/.github/workflows/update-pinned-env-pr.yml b/.github/workflows/update-pinned-env-pr.yml new file mode 100644 index 000000000..67e2a4b10 --- /dev/null +++ b/.github/workflows/update-pinned-env-pr.yml @@ -0,0 +1,82 @@ +name: Update pinned envs + +on: + push: + paths: + - envs/environment.yaml + workflow_dispatch: + + +jobs: + update-pinned-environment-pr: + # if: ${{ github.ref == 'refs/heads/main' }} + name: Update pinned envs PR + runs-on: ${{ matrix.os }}-latest + strategy: + fail-fast: false + matrix: + os: [ubuntu, macos, windows] + include: + - os: ubuntu + suffix: "linux" + - os: macos + suffix: "macos" + - os: windows + suffix: "windows" + + steps: + - uses: actions/checkout@v4 + + - name: Setup conda + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: ${{ github.event.repository.name }} + environment-file: envs/environment.yaml + + - name: Update pinned environment per OS + run: | + conda env export --name ${{ github.event.repository.name }} --no-builds > envs/${{ matrix.suffix }}-pinned.yaml + + - name: Add SPDX header + if: ${{ matrix.suffix != 'windows' }} + run: | + SPDX_HEADER="# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors\n#\n# SPDX-License-Identifier: CC0-1.0\n" + echo -e "$SPDX_HEADER" | cat - envs/${{ matrix.suffix }}-pinned.yaml > temp && mv temp envs/${{ matrix.suffix }}-pinned.yaml + + - name: Add SPDX header (windows) + if: ${{ matrix.suffix == 'windows' }} + run: | + $SPDX_HEADER = "# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur`r`n#`r`n# SPDX-License-Identifier: CC0-1.0`r`n`r`n" + $CurrentContent = Get-Content "envs/${{ matrix.suffix }}-pinned.yaml" -Raw + $NewContent = $SPDX_HEADER + $CurrentContent + $NewContent | Set-Content "envs/${{ matrix.suffix }}-pinned.yaml" + + - name: Cache environment files + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.suffix }}-pinned + path: envs/${{ matrix.suffix }}-pinned.yaml + + create-pull-request: + needs: update-pinned-environment + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Download all artifacts + uses: actions/download-artifact@v4 + + - name: Prepare files for commit + run: | + mkdir -p envs + mv linux-pinned/* envs/linux-pinned.yaml + mv macos-pinned/* envs/macos-pinned.yaml + mv windows-pinned/* envs/windows-pinned.yaml + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v7 + with: + token: ${{ secrets.GITHUB_TOKEN }} + branch: update-pinned-environment + title: "[github-actions.ci] Update pinned envs" + body: "Automatically generated PR to update pinned environment files for Windows, macOS, and Linux." + commit-message: "Update pinned environment files for all platforms" From 3ff85e7e4741caae90368de044d617bec318aaf2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 21 Dec 2024 20:28:14 +0100 Subject: [PATCH 39/48] [github-actions.ci] Update pinned envs (#1253) * Update pinned environment files for all platforms --------- Co-authored-by: ekatef <30229437+ekatef@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- envs/linux-pinned.yaml | 101 ++++++++++++++++++++------------------- envs/macos-pinned.yaml | 92 ++++++++++++++++++----------------- envs/windows-pinned.yaml | 95 +++++++++++++++++++----------------- 3 files changed, 154 insertions(+), 134 deletions(-) diff --git a/envs/linux-pinned.yaml b/envs/linux-pinned.yaml index fadd8a2fe..38cbb18fc 100644 --- a/envs/linux-pinned.yaml +++ b/envs/linux-pinned.yaml @@ -27,16 +27,16 @@ dependencies: - atk-1.0=2.38.0 - atlite=0.3.0 - attr=2.5.1 -- attrs=24.2.0 +- attrs=24.3.0 - aws-c-auth=0.8.0 - aws-c-cal=0.8.1 -- aws-c-common=0.10.5 +- aws-c-common=0.10.6 - aws-c-compression=0.3.0 - aws-c-event-stream=0.5.0 - aws-c-http=0.9.2 - aws-c-io=0.15.3 - aws-c-mqtt=0.11.0 -- aws-c-s3=0.7.5 +- aws-c-s3=0.7.7 - aws-c-sdkutils=0.2.1 - aws-checksums=0.2.2 - aws-crt-cpp=0.29.7 @@ -50,24 +50,24 @@ dependencies: - beautifulsoup4=4.12.3 - bleach=6.2.0 - blosc=1.21.6 -- bokeh=3.5.2 +- bokeh=3.6.2 - bottleneck=1.4.2 -- branca=0.7.2 +- branca=0.8.1 - brotli=1.1.0 - brotli-bin=1.1.0 - brotli-python=1.1.0 - brotlicffi=1.1.0.0 - bzip2=1.0.8 -- c-ares=1.34.3 +- c-ares=1.34.4 - c-blosc2=2.15.2 -- ca-certificates=2024.8.30 +- ca-certificates=2024.12.14 - cached-property=1.5.2 - cached_property=1.5.2 - cairo=1.18.2 - capnproto=1.0.2 - cartopy=0.23.0 - cdsapi=0.7.5 -- certifi=2024.8.30 +- certifi=2024.12.14 - cffi=1.17.1 - cfgv=3.3.1 - cfitsio=4.4.1 @@ -92,23 +92,24 @@ dependencies: - contourpy=1.3.1 - country_converter=1.2 - cpp-expected=1.1.0 +- cppad=20240000.7 - cycler=0.12.1 - cyrus-sasl=2.1.27 -- cytoolz=1.0.0 -- dask=2024.12.0 -- dask-core=2024.12.0 -- dask-expr=1.1.20 +- cytoolz=1.0.1 +- dask=2024.12.1 +- dask-core=2024.12.1 +- dask-expr=1.1.21 - datapi=0.1.1 - datashader=0.16.3 - datrie=0.8.2 - dbus=1.13.6 -- debugpy=1.8.10 +- debugpy=1.8.11 - decorator=5.1.1 - defusedxml=0.7.1 - deprecation=2.1.0 - descartes=1.1.0 - distlib=0.3.9 -- distributed=2024.12.0 +- distributed=2024.12.1 - docutils=0.21.2 - dpath=2.2.0 - entrypoints=0.4 @@ -120,7 +121,7 @@ dependencies: - filelock=3.16.1 - fiona=1.9.6 - fmt=11.0.2 -- folium=0.19.0 +- folium=0.19.2 - font-ttf-dejavu-sans-mono=2.37 - font-ttf-inconsolata=3.000 - font-ttf-source-code-pro=2.038 @@ -133,7 +134,7 @@ dependencies: - freetype=2.12.1 - freexl=2.0.0 - fribidi=1.0.10 -- fsspec=2024.10.0 +- fsspec=2024.12.0 - future=1.0.0 - gdal=3.9.3 - gdk-pixbuf=2.42.12 @@ -144,8 +145,8 @@ dependencies: - geopy=2.4.1 - geos=3.13.0 - geotiff=1.7.3 -- geoviews=1.13.1 -- geoviews-core=1.13.1 +- geoviews=1.14.0 +- geoviews-core=1.14.0 - gettext=0.22.5 - gettext-tools=0.22.5 - gflags=2.2.2 @@ -174,7 +175,7 @@ dependencies: - httpcore=1.0.7 - httpx=0.28.1 - humanfriendly=10.0 -- hvplot=0.11.1 +- hvplot=0.11.2 - hyperframe=6.0.1 - icu=75.1 - identify=2.6.3 @@ -184,9 +185,9 @@ dependencies: - importlib_resources=6.4.5 - inflate64=1.0.0 - iniconfig=2.0.0 -- ipopt=3.14.16 +- ipopt=3.14.17 - ipykernel=6.29.5 -- ipython=8.30.0 +- ipython=8.31.0 - isoduration=20.11.0 - jedi=0.19.2 - jinja2=3.1.4 @@ -200,10 +201,10 @@ dependencies: - jupyter-lsp=2.2.5 - jupyter_client=8.6.3 - jupyter_core=5.7.2 -- jupyter_events=0.10.0 -- jupyter_server=2.14.2 +- jupyter_events=0.11.0 +- jupyter_server=2.15.0 - jupyter_server_terminals=0.5.3 -- jupyterlab=4.3.3 +- jupyterlab=4.3.4 - jupyterlab_pygments=0.3.0 - jupyterlab_server=2.27.3 - kealib=1.6.0 @@ -229,12 +230,12 @@ dependencies: - libbrotlienc=1.1.0 - libcap=2.71 - libcblas=3.9.0 -- libclang-cpp19.1=19.1.5 -- libclang13=19.1.5 +- libclang-cpp19.1=19.1.6 +- libclang13=19.1.6 - libcrc32c=1.1.2 - libcups=2.3.3 - libcurl=8.11.1 -- libdeflate=1.22 +- libdeflate=1.23 - libdrm=2.4.124 - libedit=3.1.20191231 - libegl=1.7.0 @@ -282,14 +283,14 @@ dependencies: - liblapack=3.9.0 - liblapacke=3.9.0 - libllvm14=14.0.6 -- libllvm19=19.1.5 +- libllvm19=19.1.6 - liblzma=5.6.3 - liblzma-devel=5.6.3 -- libmamba=2.0.4 +- libmamba=2.0.5 - libnetcdf=4.9.2 - libnghttp2=1.64.0 - libnsl=2.0.1 -- libntlm=1.4 +- libntlm=1.8 - libogg=1.3.5 - libopenblas=0.3.28 - libopus=1.3.1 @@ -318,7 +319,7 @@ dependencies: - libutf8proc=2.9.0 - libuuid=2.38.1 - libvorbis=1.3.7 -- libwebp-base=1.4.0 +- libwebp-base=1.5.0 - libxcb=1.17.0 - libxcrypt=4.4.36 - libxkbcommon=1.7.0 @@ -334,7 +335,7 @@ dependencies: - lz4=4.3.3 - lz4-c=1.10.0 - lzo=2.10 -- mamba=2.0.4 +- mamba=2.0.5 - mapclassify=2.8.1 - markdown=3.6 - markdown-it-py=3.0.0 @@ -349,6 +350,7 @@ dependencies: - metis=5.1.0 - minizip=4.0.7 - mistune=3.0.2 +- mpfr=4.2.1 - mpg123=1.32.9 - msgpack-python=1.1.0 - multipledispatch=0.6.0 @@ -359,7 +361,7 @@ dependencies: - munkres=1.1.4 - mysql-common=9.0.1 - mysql-libs=9.0.1 -- nbclient=0.10.1 +- nbclient=0.10.2 - nbconvert-core=7.16.4 - nbformat=5.10.4 - ncurses=6.5 @@ -385,9 +387,9 @@ dependencies: - packaging=24.2 - pandas=2.2.2 - pandocfilters=1.5.0 -- panel=1.5.4 +- panel=1.5.5 - pango=1.54.0 -- param=2.1.1 +- param=2.2.0 - parso=0.8.4 - partd=1.4.2 - patsy=1.0.1 @@ -430,13 +432,14 @@ dependencies: - pydoe2=1.3.0 - pygments=2.18.0 - pyogrio=0.10.0 -- pyomo=6.8.2 +- pyomo=6.6.1 - pyparsing=3.2.0 - pyppmd=1.1.0 - pyproj=3.7.0 -- pypsa=0.24.0 +- pypsa=0.28.0 - pyqt=5.15.9 - pyqt5-sip=12.12.2 +- pyscipopt=5.2.1 - pyshp=2.3.1 - pysocks=1.7.1 - pytables=3.10.1 @@ -470,18 +473,19 @@ dependencies: - rtree=1.3.0 - ruamel.yaml=0.17.26 - ruamel.yaml.clib=0.2.8 -- s2n=1.5.9 +- s2n=1.5.10 - scikit-learn=1.6.0 +- scip=9.2.0 - scipy=1.14.1 - seaborn=0.13.2 - seaborn-base=0.13.2 - send2trash=1.8.3 - setuptools=75.6.0 - shapely=2.0.6 -- simdjson=3.10.1 +- simdjson=3.11.3 - sip=6.7.12 - six=1.17.0 -- smart_open=7.0.5 +- smart_open=7.1.0 - smmap=5.0.0 - snakemake-minimal=7.32.4 - snappy=1.2.1 @@ -495,12 +499,13 @@ dependencies: - statsmodels=0.14.4 - stopit=1.1.2 - tabulate=0.9.0 +- tbb=2022.0.0 - tblib=3.0.0 - terminado=0.18.1 - texttable=1.7.0 - threadpoolctl=3.5.0 - throttler=1.2.2 -- tiledb=2.26.2 +- tiledb=2.27.0 - tinycss2=1.4.0 - tk=8.6.13 - toml=0.10.2 @@ -540,16 +545,16 @@ dependencies: - xerces-c=3.2.5 - xkeyboard-config=2.43 - xlrd=2.0.1 -- xorg-libice=1.1.1 -- xorg-libsm=1.2.4 +- xorg-libice=1.1.2 +- xorg-libsm=1.2.5 - xorg-libx11=1.8.10 -- xorg-libxau=1.0.11 +- xorg-libxau=1.0.12 - xorg-libxdamage=1.1.6 - xorg-libxdmcp=1.1.5 - xorg-libxext=1.3.6 - xorg-libxfixes=6.0.1 -- xorg-libxrender=0.9.11 -- xorg-libxxf86vm=1.1.5 +- xorg-libxrender=0.9.12 +- xorg-libxxf86vm=1.1.6 - xorg-xf86vidmodeproto=2.3.1 - xyzservices=2024.9.0 - xz=5.6.3 @@ -571,7 +576,7 @@ dependencies: - earth-osm==2.2 - fake-useragent==2.0.3 - googledrivedownloader==0.4 - - highspy==1.8.1 - - protobuf==5.29.1 + - highspy==1.9.0 + - protobuf==5.29.2 - tsam==2.3.6 prefix: /usr/share/miniconda/envs/pypsa-earth diff --git a/envs/macos-pinned.yaml b/envs/macos-pinned.yaml index 96e4a8c09..d872f920c 100644 --- a/envs/macos-pinned.yaml +++ b/envs/macos-pinned.yaml @@ -24,16 +24,16 @@ dependencies: - async-lru=2.0.4 - atk-1.0=2.38.0 - atlite=0.3.0 -- attrs=24.2.0 +- attrs=24.3.0 - aws-c-auth=0.8.0 - aws-c-cal=0.8.1 -- aws-c-common=0.10.5 +- aws-c-common=0.10.6 - aws-c-compression=0.3.0 - aws-c-event-stream=0.5.0 - aws-c-http=0.9.2 - aws-c-io=0.15.3 - aws-c-mqtt=0.11.0 -- aws-c-s3=0.7.5 +- aws-c-s3=0.7.7 - aws-c-sdkutils=0.2.1 - aws-checksums=0.2.2 - aws-crt-cpp=0.29.7 @@ -47,24 +47,24 @@ dependencies: - beautifulsoup4=4.12.3 - bleach=6.2.0 - blosc=1.21.6 -- bokeh=3.5.2 +- bokeh=3.6.2 - bottleneck=1.4.2 -- branca=0.7.2 +- branca=0.8.1 - brotli=1.1.0 - brotli-bin=1.1.0 - brotli-python=1.1.0 - brotlicffi=1.1.0.0 - bzip2=1.0.8 -- c-ares=1.34.3 +- c-ares=1.34.4 - c-blosc2=2.15.2 -- ca-certificates=2024.8.30 +- ca-certificates=2024.12.14 - cached-property=1.5.2 - cached_property=1.5.2 - cairo=1.18.2 - capnproto=1.0.2 - cartopy=0.23.0 - cdsapi=0.7.5 -- certifi=2024.8.30 +- certifi=2024.12.14 - cffi=1.17.1 - cfgv=3.3.1 - cfitsio=4.4.1 @@ -89,22 +89,23 @@ dependencies: - contourpy=1.3.1 - country_converter=1.2 - cpp-expected=1.1.0 +- cppad=20240000.7 - cycler=0.12.1 - cyrus-sasl=2.1.27 -- cytoolz=1.0.0 -- dask=2024.12.0 -- dask-core=2024.12.0 -- dask-expr=1.1.20 +- cytoolz=1.0.1 +- dask=2024.12.1 +- dask-core=2024.12.1 +- dask-expr=1.1.21 - datapi=0.1.1 - datashader=0.16.3 - datrie=0.8.2 -- debugpy=1.8.10 +- debugpy=1.8.11 - decorator=5.1.1 - defusedxml=0.7.1 - deprecation=2.1.0 - descartes=1.1.0 - distlib=0.3.9 -- distributed=2024.12.0 +- distributed=2024.12.1 - docutils=0.21.2 - dpath=2.2.0 - entrypoints=0.4 @@ -115,7 +116,7 @@ dependencies: - filelock=3.16.1 - fiona=1.9.6 - fmt=11.0.2 -- folium=0.19.0 +- folium=0.19.2 - font-ttf-dejavu-sans-mono=2.37 - font-ttf-inconsolata=3.000 - font-ttf-source-code-pro=2.038 @@ -128,7 +129,7 @@ dependencies: - freetype=2.12.1 - freexl=2.0.0 - fribidi=1.0.10 -- fsspec=2024.10.0 +- fsspec=2024.12.0 - future=1.0.0 - gdal=3.9.3 - gdk-pixbuf=2.42.12 @@ -139,8 +140,8 @@ dependencies: - geopy=2.4.1 - geos=3.13.0 - geotiff=1.7.3 -- geoviews=1.13.1 -- geoviews-core=1.13.1 +- geoviews=1.14.0 +- geoviews-core=1.14.0 - gflags=2.2.2 - giflib=5.2.2 - gitdb=4.0.11 @@ -155,7 +156,7 @@ dependencies: - gurobi=12.0.0 - h11=0.14.0 - h2=4.1.0 -- harfbuzz=9.0.0 +- harfbuzz=10.1.0 - hdf4=4.2.15 - hdf5=1.14.3 - holoviews=1.20.0 @@ -163,7 +164,7 @@ dependencies: - httpcore=1.0.7 - httpx=0.28.1 - humanfriendly=10.0 -- hvplot=0.11.1 +- hvplot=0.11.2 - hyperframe=6.0.1 - icu=75.1 - identify=2.6.3 @@ -173,9 +174,9 @@ dependencies: - importlib_resources=6.4.5 - inflate64=1.0.0 - iniconfig=2.0.0 -- ipopt=3.14.16 +- ipopt=3.14.17 - ipykernel=6.29.5 -- ipython=8.30.0 +- ipython=8.31.0 - isoduration=20.11.0 - jedi=0.19.2 - jinja2=3.1.4 @@ -189,10 +190,10 @@ dependencies: - jupyter-lsp=2.2.5 - jupyter_client=8.6.3 - jupyter_core=5.7.2 -- jupyter_events=0.10.0 -- jupyter_server=2.14.2 +- jupyter_events=0.11.0 +- jupyter_server=2.15.0 - jupyter_server_terminals=0.5.3 -- jupyterlab=4.3.3 +- jupyterlab=4.3.4 - jupyterlab_pygments=0.3.0 - jupyterlab_server=2.27.3 - kealib=1.6.0 @@ -214,8 +215,8 @@ dependencies: - libcblas=3.9.0 - libcrc32c=1.1.2 - libcurl=8.11.1 -- libcxx=19.1.5 -- libdeflate=1.22 +- libcxx=19.1.6 +- libdeflate=1.23 - libedit=3.1.20191231 - libev=4.33 - libevent=2.1.12 @@ -242,6 +243,7 @@ dependencies: - libgoogle-cloud=2.32.0 - libgoogle-cloud-storage=2.32.0 - libgrpc=1.67.1 +- libhwloc=2.11.2 - libiconv=1.17 - libintl=0.22.5 - libjpeg-turbo=3.0.0 @@ -250,10 +252,10 @@ dependencies: - liblapacke=3.9.0 - libllvm14=14.0.6 - liblzma=5.6.3 -- libmamba=2.0.4 +- libmamba=2.0.5 - libnetcdf=4.9.2 - libnghttp2=1.64.0 -- libntlm=1.4 +- libntlm=1.8 - libopenblas=0.3.28 - libparquet=18.1.0 - libpng=1.6.44 @@ -272,7 +274,7 @@ dependencies: - libthrift=0.21.0 - libtiff=4.7.0 - libutf8proc=2.9.0 -- libwebp-base=1.4.0 +- libwebp-base=1.5.0 - libxcb=1.17.0 - libxml2=2.13.5 - libxslt=1.1.39 @@ -280,14 +282,14 @@ dependencies: - libzlib=1.3.1 - linkify-it-py=2.0.3 - linopy=0.3.11 -- llvm-openmp=19.1.5 +- llvm-openmp=19.1.6 - llvmlite=0.43.0 - locket=1.0.0 - lxml=5.3.0 - lz4=4.3.3 - lz4-c=1.10.0 - lzo=2.10 -- mamba=2.0.4 +- mamba=2.0.5 - mapclassify=2.8.1 - markdown=3.6 - markdown-it-py=3.0.0 @@ -302,6 +304,7 @@ dependencies: - metis=5.1.0 - minizip=4.0.7 - mistune=3.0.2 +- mpfr=4.2.1 - msgpack-python=1.1.0 - multipledispatch=0.6.0 - multiurl=0.3.3 @@ -309,7 +312,7 @@ dependencies: - mumps-include=5.7.3 - mumps-seq=5.7.3 - munkres=1.1.4 -- nbclient=0.10.1 +- nbclient=0.10.2 - nbconvert-core=7.16.4 - nbformat=5.10.4 - ncurses=6.5 @@ -334,9 +337,9 @@ dependencies: - packaging=24.2 - pandas=2.2.2 - pandocfilters=1.5.0 -- panel=1.5.4 +- panel=1.5.5 - pango=1.54.0 -- param=2.1.1 +- param=2.2.0 - parso=0.8.4 - partd=1.4.2 - patsy=1.0.1 @@ -380,11 +383,12 @@ dependencies: - pyobjc-core=10.3.2 - pyobjc-framework-cocoa=10.3.2 - pyogrio=0.10.0 -- pyomo=6.8.2 +- pyomo=6.6.1 - pyparsing=3.2.0 - pyppmd=1.1.0 - pyproj=3.7.0 -- pypsa=0.24.0 +- pypsa=0.28.0 +- pyscipopt=5.2.1 - pyshp=2.3.1 - pysocks=1.7.1 - pytables=3.10.1 @@ -418,15 +422,16 @@ dependencies: - ruamel.yaml=0.17.26 - ruamel.yaml.clib=0.2.8 - scikit-learn=1.6.0 +- scip=9.2.0 - scipy=1.14.1 - seaborn=0.13.2 - seaborn-base=0.13.2 - send2trash=1.8.3 - setuptools=75.6.0 - shapely=2.0.6 -- simdjson=3.10.1 +- simdjson=3.11.3 - six=1.17.0 -- smart_open=7.0.5 +- smart_open=7.1.0 - smmap=5.0.0 - snakemake-minimal=7.32.4 - snappy=1.2.1 @@ -440,12 +445,13 @@ dependencies: - statsmodels=0.14.4 - stopit=1.1.2 - tabulate=0.9.0 +- tbb=2022.0.0 - tblib=3.0.0 - terminado=0.18.1 - texttable=1.7.0 - threadpoolctl=3.5.0 - throttler=1.2.2 -- tiledb=2.26.2 +- tiledb=2.27.0 - tinycss2=1.4.0 - tk=8.6.13 - tomli=2.2.1 @@ -478,7 +484,7 @@ dependencies: - xarray=2023.11.0 - xerces-c=3.2.5 - xlrd=2.0.1 -- xorg-libxau=1.0.11 +- xorg-libxau=1.0.12 - xorg-libxdmcp=1.1.5 - xyzservices=2024.9.0 - yaml=0.2.5 @@ -497,7 +503,7 @@ dependencies: - earth-osm==2.2 - fake-useragent==2.0.3 - googledrivedownloader==0.4 - - highspy==1.8.1 - - protobuf==5.29.1 + - highspy==1.9.0 + - protobuf==5.29.2 - tsam==2.3.6 prefix: /Users/runner/miniconda3/envs/pypsa-earth diff --git a/envs/windows-pinned.yaml b/envs/windows-pinned.yaml index ca45bea29..b3109fb5c 100644 --- a/envs/windows-pinned.yaml +++ b/envs/windows-pinned.yaml @@ -14,6 +14,7 @@ channels: dependencies: - _openmp_mutex=4.5 - affine=2.4.0 +- ampl-asl=1.0.0 - amply=0.1.6 - anyio=4.7.0 - appdirs=1.4.4 @@ -23,16 +24,16 @@ dependencies: - asttokens=3.0.0 - async-lru=2.0.4 - atlite=0.3.0 -- attrs=24.2.0 +- attrs=24.3.0 - aws-c-auth=0.8.0 - aws-c-cal=0.8.1 -- aws-c-common=0.10.5 +- aws-c-common=0.10.6 - aws-c-compression=0.3.0 - aws-c-event-stream=0.5.0 - aws-c-http=0.9.2 - aws-c-io=0.15.3 - aws-c-mqtt=0.11.0 -- aws-c-s3=0.7.5 +- aws-c-s3=0.7.7 - aws-c-sdkutils=0.2.1 - aws-checksums=0.2.2 - aws-crt-cpp=0.29.7 @@ -45,24 +46,24 @@ dependencies: - beautifulsoup4=4.12.3 - bleach=6.2.0 - blosc=1.21.6 -- bokeh=3.5.2 +- bokeh=3.6.2 - bottleneck=1.4.2 -- branca=0.7.2 +- branca=0.8.1 - brotli=1.1.0 - brotli-bin=1.1.0 - brotli-python=1.1.0 - brotlicffi=1.1.0.0 - bzip2=1.0.8 -- c-ares=1.34.3 +- c-ares=1.34.4 - c-blosc2=2.15.2 -- ca-certificates=2024.8.30 +- ca-certificates=2024.12.14 - cached-property=1.5.2 - cached_property=1.5.2 - cairo=1.18.2 - capnproto=1.0.2 - cartopy=0.23.0 - cdsapi=0.7.5 -- certifi=2024.8.30 +- certifi=2024.12.14 - cffi=1.17.1 - cfgv=3.3.1 - cfitsio=4.4.1 @@ -81,22 +82,23 @@ dependencies: - contourpy=1.3.1 - country_converter=1.2 - cpp-expected=1.1.0 +- cppad=20240000.7 - cpython=3.10.16 - cycler=0.12.1 -- cytoolz=1.0.0 -- dask=2024.12.0 -- dask-core=2024.12.0 -- dask-expr=1.1.20 +- cytoolz=1.0.1 +- dask=2024.12.1 +- dask-core=2024.12.1 +- dask-expr=1.1.21 - datapi=0.1.1 - datashader=0.16.3 - datrie=0.8.2 -- debugpy=1.8.10 +- debugpy=1.8.11 - decorator=5.1.1 - defusedxml=0.7.1 - deprecation=2.1.0 - descartes=1.1.0 - distlib=0.3.9 -- distributed=2024.12.0 +- distributed=2024.12.1 - docutils=0.21.2 - dpath=2.2.0 - entrypoints=0.4 @@ -107,7 +109,7 @@ dependencies: - filelock=3.16.1 - fiona=1.9.6 - fmt=11.0.2 -- folium=0.19.0 +- folium=0.19.2 - font-ttf-dejavu-sans-mono=2.37 - font-ttf-inconsolata=3.000 - font-ttf-source-code-pro=2.038 @@ -120,7 +122,7 @@ dependencies: - freetype=2.12.1 - freexl=2.0.0 - fribidi=1.0.10 -- fsspec=2024.10.0 +- fsspec=2024.12.0 - future=1.0.0 - gdal=3.9.3 - geographiclib=2.0 @@ -130,14 +132,15 @@ dependencies: - geopy=2.4.1 - geos=3.13.0 - geotiff=1.7.3 -- geoviews=1.13.1 -- geoviews-core=1.13.1 +- geoviews=1.14.0 +- geoviews-core=1.14.0 - getopt-win32=0.1 - gitdb=4.0.11 - gitpython=3.1.43 - glib=2.82.2 - glib-tools=2.82.2 - glpk=5.0 +- gmp=6.3.0 - graphite2=1.3.13 - graphviz=12.0.0 - gst-plugins-base=1.24.7 @@ -146,7 +149,7 @@ dependencies: - gurobi=12.0.0 - h11=0.14.0 - h2=4.1.0 -- harfbuzz=9.0.0 +- harfbuzz=10.1.0 - hdf4=4.2.15 - hdf5=1.14.3 - holoviews=1.20.0 @@ -154,7 +157,7 @@ dependencies: - httpcore=1.0.7 - httpx=0.28.1 - humanfriendly=10.0 -- hvplot=0.11.1 +- hvplot=0.11.2 - hyperframe=6.0.1 - icu=75.1 - identify=2.6.3 @@ -164,9 +167,9 @@ dependencies: - importlib_resources=6.4.5 - inflate64=1.0.0 - iniconfig=2.0.0 -- ipopt=3.14.16 +- ipopt=3.14.17 - ipykernel=6.29.5 -- ipython=8.30.0 +- ipython=8.31.0 - isoduration=20.11.0 - jedi=0.19.2 - jinja2=3.1.4 @@ -179,10 +182,10 @@ dependencies: - jupyter-lsp=2.2.5 - jupyter_client=8.6.3 - jupyter_core=5.7.2 -- jupyter_events=0.10.0 -- jupyter_server=2.14.2 +- jupyter_events=0.11.0 +- jupyter_server=2.15.0 - jupyter_server_terminals=0.5.3 -- jupyterlab=4.3.3 +- jupyterlab=4.3.4 - jupyterlab_pygments=0.3.0 - jupyterlab_server=2.27.3 - kealib=1.6.0 @@ -198,14 +201,15 @@ dependencies: - libarrow-dataset=18.1.0 - libarrow-substrait=18.1.0 - libblas=3.9.0 +- libboost=1.86.0 - libbrotlicommon=1.1.0 - libbrotlidec=1.1.0 - libbrotlienc=1.1.0 - libcblas=3.9.0 -- libclang13=19.1.5 +- libclang13=19.1.6 - libcrc32c=1.1.2 - libcurl=8.11.1 -- libdeflate=1.22 +- libdeflate=1.23 - libevent=2.1.12 - libexpat=2.6.4 - libffi=3.4.2 @@ -231,6 +235,7 @@ dependencies: - libgoogle-cloud=2.32.0 - libgoogle-cloud-storage=2.32.0 - libgrpc=1.67.1 +- libhwloc=2.11.2 - libiconv=1.17 - libintl=0.22.5 - libintl-devel=0.22.5 @@ -238,7 +243,7 @@ dependencies: - libkml=1.3.0 - liblapack=3.9.0 - liblzma=5.6.3 -- libmamba=2.0.4 +- libmamba=2.0.5 - libnetcdf=4.9.2 - libogg=1.3.5 - libopenblas=0.3.28 @@ -258,7 +263,7 @@ dependencies: - libtiff=4.7.0 - libutf8proc=2.9.0 - libvorbis=1.3.7 -- libwebp-base=1.4.0 +- libwebp-base=1.5.0 - libwinpthread=12.0.0.r4.gg4f2fc60ca - libxcb=1.17.0 - libxml2=2.13.5 @@ -274,7 +279,7 @@ dependencies: - lz4=4.3.3 - lz4-c=1.10.0 - lzo=2.10 -- mamba=2.0.4 +- mamba=2.0.5 - mapclassify=2.8.1 - markdown=3.6 - markdown-it-py=3.0.0 @@ -288,13 +293,14 @@ dependencies: - mercantile=1.2.1 - minizip=4.0.7 - mistune=3.0.2 +- mpfr=4.2.1 - msgpack-python=1.1.0 - multipledispatch=0.6.0 - multiurl=0.3.3 - multivolumefile=0.2.3 - mumps-seq=5.7.3 - munkres=1.1.4 -- nbclient=0.10.1 +- nbclient=0.10.2 - nbconvert-core=7.16.4 - nbformat=5.10.4 - nest-asyncio=1.6.0 @@ -317,9 +323,9 @@ dependencies: - packaging=24.2 - pandas=2.2.2 - pandocfilters=1.5.0 -- panel=1.5.4 +- panel=1.5.5 - pango=1.54.0 -- param=2.1.1 +- param=2.2.0 - parso=0.8.4 - partd=1.4.2 - patsy=1.0.1 @@ -358,14 +364,15 @@ dependencies: - pydoe2=1.3.0 - pygments=2.18.0 - pyogrio=0.10.0 -- pyomo=6.8.2 +- pyomo=6.6.1 - pyparsing=3.2.0 - pyppmd=1.1.0 - pyproj=3.7.0 -- pypsa=0.24.0 +- pypsa=0.28.0 - pyqt=5.15.9 - pyqt5-sip=12.12.2 - pyreadline3=3.5.4 +- pyscipopt=5.2.1 - pyshp=2.3.1 - pysocks=1.7.1 - pytables=3.10.1 @@ -401,16 +408,17 @@ dependencies: - ruamel.yaml=0.17.26 - ruamel.yaml.clib=0.2.8 - scikit-learn=1.6.0 +- scip=9.2.0 - scipy=1.14.1 - seaborn=0.13.2 - seaborn-base=0.13.2 - send2trash=1.8.3 - setuptools=75.6.0 - shapely=2.0.6 -- simdjson=3.10.1 +- simdjson=3.11.3 - sip=6.7.12 - six=1.17.0 -- smart_open=7.0.5 +- smart_open=7.1.0 - smmap=5.0.0 - snakemake-minimal=7.32.4 - snappy=1.2.1 @@ -424,12 +432,13 @@ dependencies: - statsmodels=0.14.4 - stopit=1.1.2 - tabulate=0.9.0 +- tbb=2022.0.0 - tblib=3.0.0 - terminado=0.18.1 - texttable=1.7.0 - threadpoolctl=3.5.0 - throttler=1.2.2 -- tiledb=2.26.2 +- tiledb=2.27.0 - tinycss2=1.4.0 - tk=8.6.13 - toml=0.10.2 @@ -468,10 +477,10 @@ dependencies: - xarray=2023.11.0 - xerces-c=3.2.5 - xlrd=2.0.1 -- xorg-libice=1.1.1 -- xorg-libsm=1.2.4 +- xorg-libice=1.1.2 +- xorg-libsm=1.2.5 - xorg-libx11=1.8.10 -- xorg-libxau=1.0.11 +- xorg-libxau=1.0.12 - xorg-libxdmcp=1.1.5 - xorg-libxext=1.3.6 - xorg-libxpm=3.5.17 @@ -493,8 +502,8 @@ dependencies: - earth-osm==2.2 - fake-useragent==2.0.3 - googledrivedownloader==0.4 - - highspy==1.8.1 + - highspy==1.9.0 - polars==1.17.1 - - protobuf==5.29.1 + - protobuf==5.29.2 - tsam==2.3.6 prefix: C:\Miniconda\envs\pypsa-earth From 36f45f07ce6154cef5aaddad72cfe01a7273bf56 Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 21 Dec 2024 21:38:09 +0100 Subject: [PATCH 40/48] Remove an outdated dependency --- scripts/solve_network.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2464d20e1..fde668f2b 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -91,7 +91,6 @@ from pypsa.descriptors import get_switchable_as_dense as get_as_dense from pypsa.optimization.abstract import optimize_transmission_expansion_iteratively from pypsa.optimization.optimize import optimize -from vresutils.benchmark import memory_logger logger = create_logger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) From 25f6d7369e56b4839b99e4fd9ec037a783618c39 Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 21 Dec 2024 21:38:36 +0100 Subject: [PATCH 41/48] Remove a redundand workflow --- .github/workflows/update-pinned-env-pr.yml | 82 ---------------------- 1 file changed, 82 deletions(-) delete mode 100644 .github/workflows/update-pinned-env-pr.yml diff --git a/.github/workflows/update-pinned-env-pr.yml b/.github/workflows/update-pinned-env-pr.yml deleted file mode 100644 index 67e2a4b10..000000000 --- a/.github/workflows/update-pinned-env-pr.yml +++ /dev/null @@ -1,82 +0,0 @@ -name: Update pinned envs - -on: - push: - paths: - - envs/environment.yaml - workflow_dispatch: - - -jobs: - update-pinned-environment-pr: - # if: ${{ github.ref == 'refs/heads/main' }} - name: Update pinned envs PR - runs-on: ${{ matrix.os }}-latest - strategy: - fail-fast: false - matrix: - os: [ubuntu, macos, windows] - include: - - os: ubuntu - suffix: "linux" - - os: macos - suffix: "macos" - - os: windows - suffix: "windows" - - steps: - - uses: actions/checkout@v4 - - - name: Setup conda - uses: conda-incubator/setup-miniconda@v3 - with: - activate-environment: ${{ github.event.repository.name }} - environment-file: envs/environment.yaml - - - name: Update pinned environment per OS - run: | - conda env export --name ${{ github.event.repository.name }} --no-builds > envs/${{ matrix.suffix }}-pinned.yaml - - - name: Add SPDX header - if: ${{ matrix.suffix != 'windows' }} - run: | - SPDX_HEADER="# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors\n#\n# SPDX-License-Identifier: CC0-1.0\n" - echo -e "$SPDX_HEADER" | cat - envs/${{ matrix.suffix }}-pinned.yaml > temp && mv temp envs/${{ matrix.suffix }}-pinned.yaml - - - name: Add SPDX header (windows) - if: ${{ matrix.suffix == 'windows' }} - run: | - $SPDX_HEADER = "# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur`r`n#`r`n# SPDX-License-Identifier: CC0-1.0`r`n`r`n" - $CurrentContent = Get-Content "envs/${{ matrix.suffix }}-pinned.yaml" -Raw - $NewContent = $SPDX_HEADER + $CurrentContent - $NewContent | Set-Content "envs/${{ matrix.suffix }}-pinned.yaml" - - - name: Cache environment files - uses: actions/upload-artifact@v4 - with: - name: ${{ matrix.suffix }}-pinned - path: envs/${{ matrix.suffix }}-pinned.yaml - - create-pull-request: - needs: update-pinned-environment - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Download all artifacts - uses: actions/download-artifact@v4 - - - name: Prepare files for commit - run: | - mkdir -p envs - mv linux-pinned/* envs/linux-pinned.yaml - mv macos-pinned/* envs/macos-pinned.yaml - mv windows-pinned/* envs/windows-pinned.yaml - - - name: Create Pull Request - uses: peter-evans/create-pull-request@v7 - with: - token: ${{ secrets.GITHUB_TOKEN }} - branch: update-pinned-environment - title: "[github-actions.ci] Update pinned envs" - body: "Automatically generated PR to update pinned environment files for Windows, macOS, and Linux." - commit-message: "Update pinned environment files for all platforms" From 0f1d8ce2673a0788c0ca963c75a53338bd8ccff1 Mon Sep 17 00:00:00 2001 From: ekatef Date: Sun, 22 Dec 2024 10:49:11 +0100 Subject: [PATCH 42/48] Get back CI for Windows --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a243a304b..8ab61e1e7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,8 +31,8 @@ jobs: env_file: envs/linux-pinned.yaml - os: macos env_file: envs/macos-pinned.yaml - # - os: windows - # env_file: envs/windows-pinned.yaml + - os: windows + env_file: envs/windows-pinned.yaml defaults: run: From 9dcfd1e223364f37eaeb436b8820b7f71018ab9b Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 24 Dec 2024 09:07:11 +0100 Subject: [PATCH 43/48] Minor refactoring --- scripts/solve_network.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index fde668f2b..2dd80b17a 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -373,14 +373,14 @@ def add_operational_reserve_margin_constraint(n, sns, config): 0, np.inf, coords=[sns, n.generators.index], name="Generator-r" ) reserve = n.model["Generator-r"] - lhs = reserve.sum("Generator") + summed_reserve = reserve.sum("Generator") # Share of extendable renewable capacities ext_i = n.generators.query("p_nom_extendable").index vres_i = n.generators_t.p_max_pu.columns if not ext_i.empty and not vres_i.empty: capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)] - renewable_capacity_variables = ( + p_nom_vres = ( n.model["Generator-p_nom"] .loc[vres_i.intersection(ext_i)] .rename({"Generator-ext": "Generator"}) @@ -472,7 +472,8 @@ def add_battery_constraints(n): # for some reasons, eff is one element longer as compared with vars_link vars_link.sel({"Link-ext": nodes + " discharger"}) * -eff[0], ) - n.model.add_constraints(lhs == 0, name="link_charger_ratio") + + n.model.add_constraints(lhs == 0, name="Link-charger_ratio") def add_RES_constraints(n, res_share, config): From 82e431f85675cac07b8e20f49c14b0b335deffae Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 24 Dec 2024 09:07:42 +0100 Subject: [PATCH 44/48] Replace merge functions --- scripts/solve_network.py | 34 ++++++++++++++++------------------ 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2dd80b17a..b03a2a101 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -385,12 +385,9 @@ def add_operational_reserve_margin_constraint(n, sns, config): .loc[vres_i.intersection(ext_i)] .rename({"Generator-ext": "Generator"}) ) - lhs = merge( - lhs, - (renewable_capacity_variables * (-EPSILON_VRES * capacity_factor)).sum( - ["Generator"] - ), - ) + lhs = summed_reserve + ( + p_nom_vres * (-EPSILON_VRES * xr.DataArray(capacity_factor)), + ).sum("Generator") # Total demand per t demand = get_as_dense(n, "Load", "p_set").sum(axis=1) @@ -418,10 +415,7 @@ def update_capacity_constraint(n): p_max_pu = get_as_dense(n, "Generator", "p_max_pu") - lhs = merge( - dispatch * 1, - reserve * 1, - ) + lhs = dispatch + reserve if not ext_i.empty: capacity_variable = n.model["Generator-p_nom"].rename( @@ -462,15 +456,19 @@ def add_battery_constraints(n): Add constraint ensuring that charger = discharger, i.e. 1 * charger_size - efficiency * discharger_size = 0 """ - nodes = n.buses.index[n.buses.carrier == "battery"] - if nodes.empty: + if not n.links.p_nom_extendable.any(): return - vars_link = n.model["Link-p_nom"] - eff = n.links.loc[nodes + " discharger", "efficiency"] - lhs = merge( - vars_link.sel({"Link-ext": nodes + " charger"}) * 1, - # for some reasons, eff is one element longer as compared with vars_link - vars_link.sel({"Link-ext": nodes + " discharger"}) * -eff[0], + + discharger_bool = n.links.index.str.contains("battery discharger") + charger_bool = n.links.index.str.contains("battery charger") + + dischargers_ext = n.links[discharger_bool].query("p_nom_extendable").index + chargers_ext = n.links[charger_bool].query("p_nom_extendable").index + + eff = n.links.efficiency[dischargers_ext].values + lhs = ( + n.model["Link-p_nom"].loc[chargers_ext] + - n.model["Link-p_nom"].loc[dischargers_ext] * eff ) n.model.add_constraints(lhs == 0, name="Link-charger_ratio") From d6b36591636f1cc98448a59602e6bc0430ba5796 Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 24 Dec 2024 09:56:06 +0100 Subject: [PATCH 45/48] Add a TODO --- scripts/solve_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index b03a2a101..32e25795e 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -417,6 +417,7 @@ def update_capacity_constraint(n): lhs = dispatch + reserve + # TODO check if `p_max_pu[ext_i]` is safe for empty `ext_i` and drop if cause in case if not ext_i.empty: capacity_variable = n.model["Generator-p_nom"].rename( {"Generator-ext": "Generator"} From e4386fcc6611d0ccbc734514fa0749371ff844fb Mon Sep 17 00:00:00 2001 From: ekatef Date: Tue, 24 Dec 2024 10:41:47 +0100 Subject: [PATCH 46/48] Fix typo --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 32e25795e..398ca6656 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -386,7 +386,7 @@ def add_operational_reserve_margin_constraint(n, sns, config): .rename({"Generator-ext": "Generator"}) ) lhs = summed_reserve + ( - p_nom_vres * (-EPSILON_VRES * xr.DataArray(capacity_factor)), + p_nom_vres * (-EPSILON_VRES * xr.DataArray(capacity_factor)) ).sum("Generator") # Total demand per t From 4b05b025ee13ad4938d2b95bf78c61832a9e9ec5 Mon Sep 17 00:00:00 2001 From: ekatef Date: Sat, 28 Dec 2024 00:23:06 +0100 Subject: [PATCH 47/48] Update configuration strategies --- config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config.default.yaml b/config.default.yaml index ed8554002..9a1e6ffd6 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -99,6 +99,7 @@ cluster_options: p_nom_max: sum p_nom_min: sum p_min_pu: mean + p_max_pu: weighted_average marginal_cost: mean committable: any ramp_limit_up: max From 2a5c930b76f24933d86f9734f74941779ff62365 Mon Sep 17 00:00:00 2001 From: ekatef Date: Mon, 6 Jan 2025 10:47:13 +0100 Subject: [PATCH 48/48] Update the arguments list for a special case --- scripts/cluster_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 3852a2221..7d3455120 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -707,9 +707,7 @@ def cluster_regions(busmaps, inputs, output): # Fast-path if no clustering is necessary busmap = n.buses.index.to_series() linemap = n.lines.index.to_series() - clustering = pypsa.clustering.spatial.Clustering( - n, busmap, linemap, linemap, pd.Series(dtype="O") - ) + clustering = pypsa.clustering.spatial.Clustering(n, busmap, linemap) elif len(n.buses) < n_clusters: logger.error( f"Desired number of clusters ({n_clusters}) higher than the number of buses ({len(n.buses)})"