Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -175,4 +175,6 @@ test_project
__dsgrid_scratch__
journal*.json5
dev_project
dev_project.json5
dev_project.json5
*DS_Store*
equinor*
149 changes: 147 additions & 2 deletions src/stride/cli/stride.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@
from typing import Any, Callable

import rich_click as click
from chronify.exceptions import ChronifyExceptionBase
from chronify.exceptions import ChronifyExceptionBase, InvalidParameter
from chronify.loggers import setup_logging
from dsgrid.cli.common import path_callback
from dsgrid.exceptions import DSGBaseException
from loguru import logger

from stride import Project
from stride.config import CACHED_PROJECTS_UPPER_BOUND
from stride.models import CalculatedTableOverride
from stride.models import CalculatedTableOverride, CustomDemandComponent
from stride.project import list_valid_countries, list_valid_model_years, list_valid_weather_years
from stride.ui.palette_utils import list_user_palettes, set_palette_priority
from stride.dataset_download import (
Expand Down Expand Up @@ -191,6 +191,14 @@ def create_project(
)
if res[1] != 0:
ctx.exit(res[1])
project = res[0]
if project is not None:
try:
from stride.ui.project_manager import add_recent_project

add_recent_project(project.path, project.config.project_id)
except Exception:
logger.exception("Could not add to recent projects")


_export_ep_epilog = """
Expand Down Expand Up @@ -1240,6 +1248,139 @@ def refresh_palette(ctx: click.Context, project_path: Path) -> None:
print("\nPalette colors refreshed and saved!")


@click.group(name="custom-demand")
def custom_demand() -> None:
"""Custom demand component commands"""


_custom_demand_add_epilog = """
Examples:\n
Add a flat-profile data center component:\n
$ stride custom-demand add my_project --name data_centers --sector "Data Centers" --data-file data/dc_annual.csv\n
\n
Add a heat pump component using the residential load shape:\n
$ stride custom-demand add my_project --name heat_pumps --sector "Heat Pumps" --data-file hp.csv --load-profile "sector:Residential" --metric heating\n
"""


@click.command(name="add", epilog=_custom_demand_add_epilog)
@click.argument("project-path", type=click.Path(exists=True), callback=path_callback)
@click.option("--name", type=str, required=True, help="Unique identifier (e.g., 'heat_pumps')")
@click.option("--sector", type=str, required=True, help="Sector label for UI (e.g., 'Heat Pumps')")
@click.option(
"--data-file",
type=click.Path(exists=True),
required=True,
help="CSV/Parquet with model_year and value columns",
callback=path_callback,
)
@click.option(
"--load-profile",
type=str,
default="flat",
show_default=True,
help="Profile: 'flat', 'sector:<name>', 'enduse:<name>', or path to 8760 CSV",
)
@click.option(
"--metric",
type=str,
default="other",
show_default=True,
help="End-use/metric label (e.g., 'heating', 'cooling', 'other')",
)
@click.pass_context
def add_custom_demand(
ctx: click.Context,
project_path: Path,
name: str,
sector: str,
data_file: Path,
load_profile: str,
metric: str,
) -> None:
"""Add a custom demand component and recompute the energy projection."""
res = handle_stride_exception(
ctx,
_add_custom_demand,
project_path,
name,
sector,
data_file,
load_profile,
metric,
)
if res[1] != 0:
ctx.exit(res[1])


def _add_custom_demand(
project_path: Path,
name: str,
sector: str,
data_file: Path,
load_profile: str,
metric: str,
) -> None:
project = Project.load(project_path)
# Check for duplicate name
existing = {c.name for c in project.config.custom_demand_components}
if name in existing:
msg = f"Custom demand component '{name}' already exists. Remove it first."
raise InvalidParameter(msg)

component = CustomDemandComponent(
name=name,
sector=sector,
data_file=data_file.resolve(),
load_profile=load_profile,
metric=metric,
)
project.config.custom_demand_components.append(component)
project.persist()
project.compute_energy_projection()
print(f"Added custom demand component '{name}' and recomputed energy projection.")


@click.command(name="list")
@click.argument("project-path", type=click.Path(exists=True), callback=path_callback)
@click.pass_context
def list_custom_demand(ctx: click.Context, project_path: Path) -> None:
"""List custom demand components in the project."""
project = safe_get_project_from_context(ctx, project_path, read_only=True)
components = project.config.custom_demand_components
if not components:
print("No custom demand components configured.")
return
print(f"Custom demand components ({len(components)}):")
for c in components:
print(f" {c.name}: sector={c.sector!r}, profile={c.load_profile!r}, "
f"metric={c.metric!r}, data_file={c.data_file}")


@click.command(name="remove")
@click.argument("project-path", type=click.Path(exists=True), callback=path_callback)
@click.option("--name", type=str, required=True, help="Name of the component to remove")
@click.pass_context
def remove_custom_demand(ctx: click.Context, project_path: Path, name: str) -> None:
"""Remove a custom demand component and recompute the energy projection."""
res = handle_stride_exception(ctx, _remove_custom_demand, project_path, name)
if res[1] != 0:
ctx.exit(res[1])


def _remove_custom_demand(project_path: Path, name: str) -> None:
project = Project.load(project_path)
components = project.config.custom_demand_components
original_len = len(components)
project.config.custom_demand_components = [c for c in components if c.name != name]
if len(project.config.custom_demand_components) == original_len:
msg = f"Custom demand component '{name}' not found."
raise InvalidParameter(msg)
project.persist()
project.compute_energy_projection()
print(f"Removed custom demand component '{name}' and recomputed energy projection.")


def handle_stride_exception(
ctx: click.Context, func: Callable[..., Any], *args: Any, **kwargs: Any
) -> Any:
Expand Down Expand Up @@ -1277,6 +1418,7 @@ def safe_get_project_from_context(
cli.add_command(calculated_tables)
cli.add_command(palette)
cli.add_command(view)
cli.add_command(custom_demand)
projects.add_command(init_project)
projects.add_command(create_project)
projects.add_command(export_energy_projection)
Expand All @@ -1300,3 +1442,6 @@ def safe_get_project_from_context(
palette.add_command(set_priority)
palette.add_command(get_priority)
palette.add_command(refresh_palette)
custom_demand.add_command(add_custom_demand)
custom_demand.add_command(list_custom_demand)
custom_demand.add_command(remove_custom_demand)
Original file line number Diff line number Diff line change
Expand Up @@ -61,12 +61,14 @@ ev_annual_energy AS (
stride_annual_energy AS (
-- Combine base energy intensity projections with optional EV projections
-- If use_ev_projection is true, replace Transportation + Road with EV-based calculation
-- Tag each row with energy_source so we can assign distinct metrics later
SELECT
geography,
model_year,
sector,
subsector,
stride_annual_total
stride_annual_total,
'base' AS energy_source
FROM stride_annual_energy_base
WHERE NOT (sector = 'Transportation' AND subsector = 'Road' AND {{ var("use_ev_projection", False) }})

Expand All @@ -77,7 +79,8 @@ stride_annual_energy AS (
model_year,
sector,
subsector,
stride_annual_total
stride_annual_total,
'ev' AS energy_source
FROM ev_annual_energy
),

Expand All @@ -86,10 +89,12 @@ scaling_factors AS (
-- This scales the temperature-adjusted load shapes to match STRIDE totals
-- Same scaling factor applies to all enduses within a sector
-- Note: Load shapes are at sector level, so we aggregate subsectors
-- When EV is enabled, Transportation gets two scaling factors (base vs ev)
SELECT
ls.geography,
ls.model_year,
ls.sector,
stride.energy_source,
CASE
WHEN ls.load_shape_annual_total > 0
THEN SUM(stride.stride_annual_total) / ls.load_shape_annual_total
Expand All @@ -100,10 +105,11 @@ scaling_factors AS (
ON ls.geography = stride.geography
AND ls.model_year = stride.model_year
AND ls.sector = stride.sector
GROUP BY ls.geography, ls.model_year, ls.sector, ls.load_shape_annual_total
GROUP BY ls.geography, ls.model_year, ls.sector,
ls.load_shape_annual_total, stride.energy_source
)

-- Apply scaling factors to create final hourly energy projections
-- Non-EV rows: use base scaling factor, keep original end-use metric
SELECT
ls.timestamp,
ls.model_year,
Expand All @@ -116,3 +122,25 @@ JOIN scaling_factors sf
ON ls.geography = sf.geography
AND ls.model_year = sf.model_year
AND ls.sector = sf.sector
WHERE sf.energy_source = 'base'

UNION ALL

-- EV rows: use EV scaling factor, single 'ev_charging' metric per hour.
-- Aggregate load shape across enduses to avoid per-enduse duplication.
-- Only applies to Transportation sector when EV projection is enabled.
SELECT
ls.timestamp,
ls.model_year,
ls.geography,
ls.sector,
'ev_charging' AS metric,
SUM(ls.adjusted_value) * sf.scaling_factor AS value
FROM load_shapes_filtered ls
JOIN scaling_factors sf
ON ls.geography = sf.geography
AND ls.model_year = sf.model_year
AND ls.sector = sf.sector
WHERE sf.energy_source = 'ev'
AND ls.sector = 'Transportation'
GROUP BY ls.timestamp, ls.model_year, ls.geography, ls.sector, sf.scaling_factor
19 changes: 18 additions & 1 deletion src/stride/dsgrid_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
)
from dsgrid.registry.bulk_register import bulk_register
from dsgrid.registry.common import DataStoreType, DatabaseConnection
from dsgrid.exceptions import DSGValueNotRegistered
from dsgrid.registry.registry_manager import RegistryManager
from loguru import logger

Expand Down Expand Up @@ -108,9 +109,25 @@ def make_mapped_datasets(
)
continue

# Use scenario-specific dataset if one was registered (e.g., for overrides).
# dimension_mappings.json5 only contains baseline__ dataset IDs, so we must
# check whether a scenario-specific dataset exists and substitute it.
scenario_dataset_id = f"{scenario}__{table_name}"
try:
mgr.dataset_manager.get_by_id(scenario_dataset_id)
effective_mapping = dict(mapping)
effective_mapping["dataset_id"] = scenario_dataset_id
logger.info(
"Using scenario-specific dataset {} instead of {}",
scenario_dataset_id,
dataset_id,
)
except DSGValueNotRegistered:
effective_mapping = mapping

_process_dataset_mapping(
con=con,
mapping=mapping,
mapping=effective_mapping,
mappings_dir=mappings_dir,
mgr=mgr,
scenario=scenario,
Expand Down
Loading
Loading