diff --git a/README.md b/README.md index c4832758b..d860c27e4 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,7 @@ PyPSA-DE is a softfork of PyPSA-EUR. As such, large parts of the functionality a - Additional constraints that limit maximum capacity of specific technologies - Import constraints on Efuels, hydrogen and electricity - Renewable build out according to the Wind-an-Land, Wind-auf-See and Solarstrategie laws -- A comprehensive reporting module that exports Capacity Expansion, Primary/Secondary/Final Energy, CO2 Emissions per Sector, Trade, Investments, and more. +- A comprehensive reporting module that exports Capacity Expansion, Primary/Secondary/Final Energy, CO2 Emissions per Sector, Trade, Investments, and more. Including a new `STRANSIENT` utility script for power systems stability exports. - Plotting functionality to compare different scenarios - Electricity Network development until 2030 (and for AC beyond) according to the NEP23 - Offshore development until 2030 according to the Offshore NEP23 diff --git a/config/config.de.yaml b/config/config.de.yaml index 42e014928..8ab1e2dfb 100644 --- a/config/config.de.yaml +++ b/config/config.de.yaml @@ -445,7 +445,10 @@ solving: gas pipeline new: 0.3 H2 pipeline: 0.05 H2 pipeline retrofitted: 0.05 - fractional_last_unit_size: true + fractional_last_unit_size: true + solver: + name: highs + options: highs-default constraints: # The default CO2 budget uses the KSG targets, and the non CO2 emissions from the REMIND model in the KN2045_Mix scenario co2_budget_national: diff --git a/scripts/export_stransient.py b/scripts/export_stransient.py new file mode 100644 index 000000000..e2e2eefda --- /dev/null +++ b/scripts/export_stransient.py @@ -0,0 +1,113 @@ +""" +Utility script to extract network components from PyPSA-DE exports and format +them into STRANSIENT-compatible CSV files. + +This can be run standalone via the CLI, or automatically as part of a Snakemake workflow. +""" + +import argparse +from pathlib import Path + +import pandas as pd + + +def export_stransient(base: Path, out: Path): + """ + Parses PyPSA network CSV exports and translates them into the STRANSIENT format. + + Parameters + ---------- + base : Path + Input directory containing standard PyPSA exports (buses.csv, lines.csv, generators.csv, loads.csv). + out : Path + Output directory to save the formatted STRANSIENT CSV files. + """ + out.mkdir(parents=True, exist_ok=True) + bus_df = pd.read_csv(base / "buses.csv").rename( + columns={"name": "bus_id", "v_nom": "vn_kv"} + ) + bus_df["vm_pu"] = bus_df["v_mag_pu_set"] + strans_bus = bus_df[["bus_id", "vn_kv", "type", "vm_pu"]].copy() + strans_bus["area"] = "DE" + strans_bus.to_csv(out / "stransient_bus.csv", index=False) + lines = pd.read_csv(base / "lines.csv").rename(columns={"name": "branch_id"}) + lines["type"] = "AC_line" + lines[ + ["branch_id", "bus0", "bus1", "r_pu", "x_pu", "length", "i_nom", "type"] + ].to_csv(out / "stransient_branch.csv", index=False) + gens = pd.read_csv(base / "generators.csv") + q_source = ( + "q_nom" + if "q_nom" in gens.columns + else "q_set" + if "q_set" in gens.columns + else None + ) + rename_map = {"name": "gen_id", "p_nom": "p_max_mw", "carrier": "type"} + if q_source is not None: + rename_map[q_source] = "q_max_mvar" + else: + gens["q_max_mvar"] = 0.0 + gens = gens.rename(columns=rename_map) + required_cols = ["gen_id", "bus", "p_max_mw", "q_max_mvar", "type"] + gens[required_cols].to_csv(out / "stransient_gen.csv", index=False) + loads = pd.read_csv(base / "loads.csv") + p_source = ( + "p_mw" + if "p_mw" in loads.columns + else "p_set" + if "p_set" in loads.columns + else None + ) + q_source = ( + "q_mvar" + if "q_mvar" in loads.columns + else "q_set" + if "q_set" in loads.columns + else None + ) + load_rename = {"name": "load_id"} + if p_source: + load_rename[p_source] = "p_mw" + else: + loads["p_mw"] = 0.0 + if q_source: + load_rename[q_source] = "q_mvar" + else: + loads["q_mvar"] = 0.0 + loads = loads.rename(columns=load_rename) + loads[["load_id", "bus", "p_mw", "q_mvar"]].to_csv( + out / "stransient_load.csv", index=False + ) + print("wired exports done") + + +if __name__ == "__main__": + if "snakemake" in globals(): + snakemake = globals()["snakemake"] + base_dir = Path(snakemake.input.exports_dir) + out_dir = Path(snakemake.output.stransient_dir) + export_stransient(base_dir, out_dir) + else: + parser = argparse.ArgumentParser( + description="Export STRANSIENT grids from PyPSA" + ) + parser.add_argument( + "--exports-dir", + type=Path, + default=Path( + "results/20260114_limit_cross_border_flows/KN2045_Mix/exports" + ), + help="Directory containing PyPSA export CSVs", + ) + parser.add_argument( + "--out-dir", + type=Path, + default=None, + help="Output directory. Defaults to /../stransient", + ) + args = parser.parse_args() + + base_dir = args.exports_dir + out_dir = args.out_dir if args.out_dir else base_dir.parent / "stransient" + export_stransient(base_dir, out_dir) diff --git a/scripts/pypsa-de/additional_functionality.py b/scripts/pypsa-de/additional_functionality.py index e49bd55c3..8b0e5a95b 100644 --- a/scripts/pypsa-de/additional_functionality.py +++ b/scripts/pypsa-de/additional_functionality.py @@ -9,6 +9,33 @@ logger = logging.getLogger(__name__) +def h2_import_limits_enabled(config): + return config.get("pypsa-de", {}).get("h2_import_limits", {}).get("enable", True) + + +def safe_add_constraint(model, expr, rhs, sense, name): + """Wrap solver call to skip constant-constant constraints.""" + try: + if sense == "<=": + model.add_constraints(expr <= rhs, name=name) + elif sense == ">=": + model.add_constraints(expr >= rhs, name=name) + else: + raise ValueError(f"Unsupported sense '{sense}'") + return True + except ValueError as exc: + if "Both sides of the constraint are constant" in str(exc): + logger.debug( + "Skipping constraint %s because both sides are constant (%s %s %s)", + name, + expr, + sense, + rhs, + ) + return False + raise + + def add_capacity_limits(n, investment_year, limits_capacity, sense="maximum"): for c in n.iterate_components(limits_capacity): logger.info(f"Adding {sense} constraints for {c.list_name}") @@ -208,6 +235,12 @@ def add_pos_neg_aux_variables(n, idx, var_name, infix): def h2_import_limits(n, investment_year, limits_volume_max): + if not h2_import_limits_enabled(n.config): + logger.info( + "Skipping H2 import limit constraints because pypsa-de.h2_import_limits.enable is False." + ) + return + for ct in limits_volume_max["h2_import"]: limit = limits_volume_max["h2_import"][ct][investment_year] * 1e6 @@ -228,6 +261,18 @@ def h2_import_limits(n, investment_year, limits_volume_max): & (n.links.bus1.str[:2] != ct) ] + if incoming.empty and outgoing.empty: + logger.warning( + f"No hydrogen import/export links found for {ct}; skipping limit enforcement." + ) + continue + + if incoming.empty and outgoing.empty: + logger.warning( + f"No hydrogen import/export links found for {ct}; skipping limit enforcement." + ) + continue + incoming_p = ( n.model["Link-p"].loc[:, incoming] * n.snapshot_weightings.generators ).sum() @@ -239,7 +284,13 @@ def h2_import_limits(n, investment_year, limits_volume_max): cname = f"H2_import_limit-{ct}" - n.model.add_constraints(lhs <= limit, name=f"GlobalConstraint-{cname}") + added = safe_add_constraint( + n.model, + lhs, + limit, + "<=", + name=f"GlobalConstraint-{cname}", + ) if cname in n.global_constraints.index: logger.warning( @@ -247,20 +298,27 @@ def h2_import_limits(n, investment_year, limits_volume_max): ) n.global_constraints.drop(cname, inplace=True) - n.add( - "GlobalConstraint", - cname, - constant=limit, - sense="<=", - type="", - carrier_attribute="", - ) + if added: + n.add( + "GlobalConstraint", + cname, + constant=limit, + sense="<=", + type="", + carrier_attribute="", + ) logger.info("Adding H2 export ban") cname = f"H2_export_ban-{ct}" - n.model.add_constraints(lhs >= 0, name=f"GlobalConstraint-{cname}") + added_export = safe_add_constraint( + n.model, + lhs, + 0, + ">=", + name=f"GlobalConstraint-{cname}", + ) if cname in n.global_constraints.index: logger.warning( @@ -268,14 +326,15 @@ def h2_import_limits(n, investment_year, limits_volume_max): ) n.global_constraints.drop(cname, inplace=True) - n.add( - "GlobalConstraint", - cname, - constant=0, - sense=">=", - type="", - carrier_attribute="", - ) + if added_export: + n.add( + "GlobalConstraint", + cname, + constant=0, + sense=">=", + type="", + carrier_attribute="", + ) def h2_production_limits(n, investment_year, limits_volume_min, limits_volume_max): diff --git a/stransient_loader.py b/stransient_loader.py new file mode 100644 index 000000000..b3fd7814f --- /dev/null +++ b/stransient_loader.py @@ -0,0 +1,133 @@ +""" +Provides utilities to load STRANSIENT export CSV files generated by PyPSA-DE +and translate them back into an executable Pandapower network. +""" + +from pathlib import Path + +import pandas as pd + +try: + import pandapower as pp +except ( + ImportError +) as exc: # pragma: no cover - fallback for environments without pandapower + raise ImportError( + "pandapower is required to build a STRANSIENT grid but is not installed." + ) from exc + + +def _safe_per_km(value: float, length_km: float) -> float: + return float(value) / max(float(length_km), 1e-6) + + +def build_net_from_stransient( + folder: Path, + slack_bus_id: str | None = None, + vm_pu: float = 1.02, +) -> pp.pandapowerNet: + """ + Translate exported STRANSIENT CSVs into a runnable pandapower network. + + This function expects four specific CSV exports from the export_stransient script: + - stransient_bus.csv + - stransient_branch.csv + - stransient_gen.csv + - stransient_load.csv + + Parameters + ---------- + folder : Path + Directory where the STRANSIENT CSV files are located. + slack_bus_id : str | None, optional + ID of the bus to assign as the external grid (slack node). If None, the + first bus found in the dataset is used. Default is None. + vm_pu : float, optional + Voltage magnitude setpoint (in p.u.) for the slack node. Default is 1.02. + + Returns + ------- + pp.pandapowerNet + A fully constructed Pandapower network. + """ + folder = Path(folder) + if not folder.exists(): + raise FileNotFoundError(f"STRANSIENT folder not found: {folder}") + + # Required files + files = { + "buses": folder / "stransient_bus.csv", + "branches": folder / "stransient_branch.csv", + "generators": folder / "stransient_gen.csv", + "loads": folder / "stransient_load.csv", + } + + for name, path in files.items(): + if not path.exists(): + raise FileNotFoundError(f"Missing {name} export at {path}") + + bus_df = pd.read_csv(files["buses"]) + line_df = pd.read_csv(files["branches"]) + gen_df = pd.read_csv(files["generators"]) + load_df = pd.read_csv(files["loads"]) + + net = pp.create_empty_network() + bus_map: dict[str, int] = {} + for _, row in bus_df.iterrows(): + bus_id = row["bus_id"] + bus_map[bus_id] = pp.create_bus(net, vn_kv=row["vn_kv"], name=bus_id) + + slack_bus_id = slack_bus_id or (bus_df["bus_id"].iloc[0] if len(bus_df) else None) + if slack_bus_id and slack_bus_id in bus_map: + pp.create_ext_grid( + net, bus=bus_map[slack_bus_id], vm_pu=vm_pu, name="STRANSIENT slack" + ) + elif bus_map: + first_bus = next(iter(bus_map.values())) + pp.create_ext_grid( + net, bus=first_bus, vm_pu=vm_pu, name="STRANSIENT slack (default)" + ) + + for _, row in gen_df.iterrows(): + bus_name = row["bus"] + if bus_name not in bus_map: + continue + pp.create_sgen( + net, + bus=bus_map[bus_name], + p_mw=row.get("p_max_mw", 0.0), + q_mvar=row.get("q_max_mvar", 0.0), + name=row.get("gen_id", f"sgen-{bus_name}"), + ) + + for _, row in load_df.iterrows(): + bus_name = row["bus"] + if bus_name not in bus_map: + continue + pp.create_load( + net, + bus=bus_map[bus_name], + p_mw=row.get("p_mw", 0.0), + q_mvar=row.get("q_mvar", 0.0), + name=row.get("load_id", f"load-{bus_name}"), + ) + + for _, row in line_df.iterrows(): + bus0 = row["bus0"] + bus1 = row["bus1"] + if bus0 not in bus_map or bus1 not in bus_map: + continue + length_km = float(row.get("length", 1.0)) + pp.create_line_from_parameters( + net, + bus_map[bus0], + bus_map[bus1], + length_km=length_km, + r_ohm_per_km=_safe_per_km(row.get("r", 0.0), length_km), + x_ohm_per_km=_safe_per_km(row.get("x", 0.0), length_km), + c_nf_per_km=float(row.get("c_nf_per_km", 0.0)), + max_i_ka=float(row.get("i_nom", 1.0)), + name=row.get("branch_id", f"branch-{bus0}-{bus1}"), + ) + + return net