From b25a65c7047d9e550a81fe3f65c4232be11c69b8 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 25 Mar 2025 11:42:07 +0000 Subject: [PATCH 01/28] Limit search space to relevant techs (experimental) --- src/muse/filters.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/muse/filters.py b/src/muse/filters.py index 1b9d351a..686417af 100644 --- a/src/muse/filters.py +++ b/src/muse/filters.py @@ -436,13 +436,25 @@ def initialize_from_technologies( ("asset", demand.asset.values), ("replacement", technologies.technology.values), ) - return xr.DataArray( + search_space = xr.DataArray( np.ones(tuple(len(u[1]) for u in coords), dtype=bool), coords=coords, dims=[u[0] for u in coords], name="search_space", ) + # Only consider technologies that produce demanded commodities + demanded_commodities = (demand > 0).any("timeslice") + produces_commodity = ( + (technologies.fixed_outputs > 0) + .any(dim="region") + .rename(technology="replacement") + ) + produces_demanded_commodity = (produces_commodity * demanded_commodities).any( + "commodity" + ) + return search_space & produces_demanded_commodity + @register_initializer(name="from_assets") def initialize_from_assets( From dbbd18205489cb44d40e28e48545c3f9d6a83274 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 26 Mar 2025 15:34:46 +0000 Subject: [PATCH 02/28] New test --- tests/test_filters.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/test_filters.py b/tests/test_filters.py index 40edc423..c98744a3 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -194,12 +194,18 @@ def test_init_from_tech(demand_share, technologies, agent_market): agent = namedtuple("DummyAgent", ["tolerance"])(tolerance=1e-8) + # All technologies produce demanded commodities space = initialize_from_technologies(agent, demand_share, technologies=technologies) assert set(space.dims) == {"asset", "replacement"} assert (space.asset.values == demand_share.asset.values).all() assert (space.replacement.values == technologies.technology.values).all() assert space.all() + # No technology produces demanded commodities + technologies.fixed_outputs[:] = 0 + space = initialize_from_technologies(agent, demand_share, technologies=technologies) + assert not space.any() + def test_init_from_asset(technologies, rng): from collections import namedtuple From 85579104b1ad7e2b7be6ed081ee48c88d87cbc57 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 26 Mar 2025 15:35:19 +0000 Subject: [PATCH 03/28] Attempt to fix failing test --- src/muse/agents/agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 15801bd9..7369b8ba 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -72,7 +72,7 @@ def filter_input( """ if "region" in dataset.dims and "region" not in kwargs: kwargs["region"] = self.region - return dataset.sel(**kwargs) + return dataset.sel(**kwargs, drop=True) @abstractmethod def next( From 783f06da9bbe4ca15f61fcc71e44d9ad50293f13 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 26 Mar 2025 15:58:41 +0000 Subject: [PATCH 04/28] Another attempt at keeping test happy --- src/muse/agents/agent.py | 2 +- src/muse/filters.py | 8 ++++---- tests/test_agents.py | 6 +++++- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 7369b8ba..15801bd9 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -72,7 +72,7 @@ def filter_input( """ if "region" in dataset.dims and "region" not in kwargs: kwargs["region"] = self.region - return dataset.sel(**kwargs, drop=True) + return dataset.sel(**kwargs) @abstractmethod def next( diff --git a/src/muse/filters.py b/src/muse/filters.py index 686417af..29942c80 100644 --- a/src/muse/filters.py +++ b/src/muse/filters.py @@ -445,11 +445,11 @@ def initialize_from_technologies( # Only consider technologies that produce demanded commodities demanded_commodities = (demand > 0).any("timeslice") - produces_commodity = ( - (technologies.fixed_outputs > 0) - .any(dim="region") - .rename(technology="replacement") + produces_commodity = (technologies.fixed_outputs > 0).rename( + technology="replacement" ) + if "region" in produces_commodity.dims: + produces_commodity = produces_commodity.any("region") produces_demanded_commodity = (produces_commodity * demanded_commodities).any( "commodity" ) diff --git a/tests/test_agents.py b/tests/test_agents.py index 6c6b9689..c7c582ea 100644 --- a/tests/test_agents.py +++ b/tests/test_agents.py @@ -144,7 +144,11 @@ def test_run_retro_agent(retro_agent, technologies, agent_market, demand_share): technologies.max_capacity_growth[:] = retro_agent.assets.capacity.sum() * 100 investment_year = int(agent_market.year[1]) - retro_agent.next(technologies.sel(year=investment_year), agent_market, demand_share) + retro_agent.next( + technologies.sel(year=investment_year).isel(region=0), + agent_market.isel(region=0), + demand_share, + ) def test_merge_assets(assets): From 3e4e985e3ccc505ae63c5490a325f0891882247c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 25 Mar 2025 09:37:02 +0000 Subject: [PATCH 05/28] Add inputs check, fix tests --- src/muse/filters.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/muse/filters.py b/src/muse/filters.py index 29942c80..5097e5b8 100644 --- a/src/muse/filters.py +++ b/src/muse/filters.py @@ -87,6 +87,7 @@ def search_space_initializer( "with_asset_technology", ] +import inspect from collections.abc import Mapping, MutableMapping, Sequence from typing import ( Any, From 4a0d5aaf1678aab5380f300fe5b85d60776689a8 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 24 Mar 2025 12:46:21 +0000 Subject: [PATCH 06/28] Refactor demand_share module to take demand directly --- src/muse/demand_share.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 1d5df20a..50bf32e2 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -486,7 +486,6 @@ def unmet_forecasted_demand( timeslice_level: str | None = None, ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" - from muse.commodities import is_enduse from muse.utilities import ( broadcast_over_assets, interpolate_capacity, @@ -495,10 +494,6 @@ def unmet_forecasted_demand( current_year, investment_year = map(int, demand.year.values) - demand = demand.where( - is_enduse(technologies.comm_usage.sel(commodity=demand.commodity)), 0 - ) - # Calculate existing capacity capacity = interpolate_capacity( reduce_assets([agent.assets.capacity for agent in agents]), From d915db87a4dc27398950062ddd7ba420fdd7af77 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 24 Mar 2025 12:58:49 +0000 Subject: [PATCH 07/28] Split demands between subsectors --- src/muse/sectors/subsector.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 06aa3c59..76aec9b6 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -76,10 +76,13 @@ def aggregate_lp( assert "year" not in technologies.dims assert len(market.year) == 2 + # Select demands for the subsector + demands = market.consumption.sel(commodity=self.commodities) + # Split demand across agents demands = self.demand_share( agents=self.agents, - demand=market.consumption, + demand=demands, technologies=technologies, timeslice_level=self.timeslice_level, ) From be8b08fe3e48e8c39787b369d02969f690e9e795 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 24 Mar 2025 14:13:26 +0000 Subject: [PATCH 08/28] Different method for selecting commodity demands --- src/muse/sectors/subsector.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 76aec9b6..412f5029 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -76,8 +76,8 @@ def aggregate_lp( assert "year" not in technologies.dims assert len(market.year) == 2 - # Select demands for the subsector - demands = market.consumption.sel(commodity=self.commodities) + # Select commodity demands for the subsector + demands = market.consumption.where(market.commodity.isin(self.commodities), 0) # Split demand across agents demands = self.demand_share( From e59d4a8a4ed4547d48232d0442d63de844b4cb72 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 24 Mar 2025 14:45:29 +0000 Subject: [PATCH 09/28] Remove unnecessary filtering --- src/muse/demand_share.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 50bf32e2..af4bf267 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -241,7 +241,6 @@ def new_and_retro( """ from functools import partial - from muse.commodities import is_enduse from muse.quantities import maximum_production from muse.utilities import ( agent_concatenation, @@ -276,10 +275,6 @@ def decommissioning(capacity, technologies): timeslice_level=timeslice_level, ) - demands = demands.where( - is_enduse(technologies.comm_usage.sel(commodity=demands.commodity)), 0 - ) - quantity = { agent.name: agent.quantity for agent in agents if agent.category != "retrofit" } @@ -380,7 +375,6 @@ def standard_demand( """ from functools import partial - from muse.commodities import is_enduse from muse.quantities import maximum_production from muse.utilities import ( agent_concatenation, @@ -422,11 +416,6 @@ def decommissioning(capacity, technologies): timeslice_level=timeslice_level, ) - # Only consider end-use commodities - demands = demands.where( - is_enduse(technologies.comm_usage.sel(commodity=demands.commodity)), 0 - ) - id_to_share: MutableMapping[Hashable, xr.DataArray] = {} for region in demands.region.values: # Calculate current capacity From e8caf922ec8440a89e0c3596c5a084a0246f06dd Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 24 Mar 2025 15:25:25 +0000 Subject: [PATCH 10/28] Revert "Remove unnecessary filtering" This reverts commit a66388d0426c8fe186f9ef521178fd4960a72603. --- src/muse/demand_share.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index af4bf267..50bf32e2 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -241,6 +241,7 @@ def new_and_retro( """ from functools import partial + from muse.commodities import is_enduse from muse.quantities import maximum_production from muse.utilities import ( agent_concatenation, @@ -275,6 +276,10 @@ def decommissioning(capacity, technologies): timeslice_level=timeslice_level, ) + demands = demands.where( + is_enduse(technologies.comm_usage.sel(commodity=demands.commodity)), 0 + ) + quantity = { agent.name: agent.quantity for agent in agents if agent.category != "retrofit" } @@ -375,6 +380,7 @@ def standard_demand( """ from functools import partial + from muse.commodities import is_enduse from muse.quantities import maximum_production from muse.utilities import ( agent_concatenation, @@ -416,6 +422,11 @@ def decommissioning(capacity, technologies): timeslice_level=timeslice_level, ) + # Only consider end-use commodities + demands = demands.where( + is_enduse(technologies.comm_usage.sel(commodity=demands.commodity)), 0 + ) + id_to_share: MutableMapping[Hashable, xr.DataArray] = {} for region in demands.region.values: # Calculate current capacity From 87b23740214814eccf8e2d253573e17ffe1cb506 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 24 Mar 2025 15:30:41 +0000 Subject: [PATCH 11/28] Undo change --- src/muse/demand_share.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 50bf32e2..1d5df20a 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -486,6 +486,7 @@ def unmet_forecasted_demand( timeslice_level: str | None = None, ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" + from muse.commodities import is_enduse from muse.utilities import ( broadcast_over_assets, interpolate_capacity, @@ -494,6 +495,10 @@ def unmet_forecasted_demand( current_year, investment_year = map(int, demand.year.values) + demand = demand.where( + is_enduse(technologies.comm_usage.sel(commodity=demand.commodity)), 0 + ) + # Calculate existing capacity capacity = interpolate_capacity( reduce_assets([agent.assets.capacity for agent in agents]), From 23cc31d26bf1e4a8d43a118a90c96e3d498d1d1e Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 25 Mar 2025 15:13:13 +0000 Subject: [PATCH 12/28] Shrink lp problem --- src/muse/agents/agent.py | 5 +++++ src/muse/constraints.py | 23 +++++++++-------------- src/muse/sectors/subsector.py | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 15801bd9..920ca26f 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -320,6 +320,11 @@ def next( .astype(int) ) + # Select technologies in the search space + technologies = technologies.sel( + technology=technologies.technology.isin(search_space.replacement) + ) + # Skip forward if the search space is empty if any(u == 0 for u in search_space.shape): getLogger(__name__).critical("Search space is empty") diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 64c5591c..a1c23316 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -405,10 +405,7 @@ def demand( **kwargs, ) -> Constraint: """Constraints production to meet demand.""" - from muse.commodities import is_enduse - - enduse = technologies.commodity.sel(commodity=is_enduse(technologies.comm_usage)) - b = demand.sel(commodity=demand.commodity.isin(enduse)) + b = demand if "region" in b.dims and "dst_region" in technologies.dims: b = b.rename(region="dst_region") assert "year" not in b.dims @@ -451,16 +448,8 @@ def max_production( """ from xarray import ones_like, zeros_like - from muse.commodities import is_enduse - - commodities = technologies.commodity.sel( - commodity=is_enduse(technologies.comm_usage) - ) - replacement = search_space.replacement - replacement = replacement.drop_vars( - [u for u in replacement.coords if u not in replacement.dims] - ) - kwargs = dict(technology=replacement, commodity=commodities) + commodities = demand.commodity + kwargs = dict(commodity=commodities) if "region" in search_space.coords and "region" in technologies.dims: kwargs["region"] = search_space.region techs = ( @@ -1188,6 +1177,12 @@ def _unified_dataset( data[f"capacity{i}"] = -data[f"capacity{i}"] data[f"production{i}"] = -data[f"production{i}"] + # A bit of a hack as lpcosts is calculated for too many commodities, so data + # will have nans for commodities not in the constraints, which are limited to + # commodities demanded in the subsector. This seems safe for now, although + # better to limit the commodities in the lpcosts calculation. + data = data.dropna(dim="d(commodity)", how="all") + # Enusure consistent ordering of dimensions return data.transpose(*data.dims) diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 412f5029..3b2ddee5 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -77,7 +77,7 @@ def aggregate_lp( assert len(market.year) == 2 # Select commodity demands for the subsector - demands = market.consumption.where(market.commodity.isin(self.commodities), 0) + demands = market.consumption.sel(commodity=self.commodities) # Split demand across agents demands = self.demand_share( From 6d65655625ff748512192d6799cddd59532f937f Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 25 Mar 2025 15:23:26 +0000 Subject: [PATCH 13/28] Fix error with dimension name --- src/muse/constraints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index a1c23316..507df2ff 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -455,7 +455,7 @@ def max_production( techs = ( technologies[["fixed_outputs", "utilization_factor"]] .sel(**kwargs) - .drop_vars("technology") + .rename(technology="replacement") ) capa = distribute_timeslice( techs.fixed_outputs, level=timeslice_level From 9cfc2f0a763c2e2d0f628bc198518d84366e4dcc Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 25 Mar 2025 20:40:59 +0000 Subject: [PATCH 14/28] Simplify lp_costs --- src/muse/constraints.py | 109 +++++++++------------------------------- src/muse/investments.py | 2 - 2 files changed, 23 insertions(+), 88 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 507df2ff..3dca9af9 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -738,80 +738,29 @@ def minimum_service( ) -def lp_costs( - technologies: xr.Dataset, costs: xr.DataArray, timeslice_level: str | None = None -) -> xr.Dataset: - """Creates costs for solving with scipy's LP solver. +def lp_costs(capacity_costs: xr.DataArray, *constraints: Constraint) -> xr.Dataset: + """Creates dataset of costs for solving with scipy's LP solver. - Example: - We can now construct example inputs to the function from the sample model. The - costs will be a matrix where each assets has a candidate replacement technology. - - >>> from muse import examples - >>> technologies = examples.technodata("residential", model="medium") - >>> search_space = examples.search_space("residential", model="medium") - >>> costs = ( - ... search_space - ... * np.arange(np.prod(search_space.shape)).reshape(search_space.shape) - ... ) - - The function returns the LP vector split along capacity and production - variables. - - >>> from muse.constraints import lp_costs - >>> lpcosts = lp_costs( - ... technologies.sel(year=2020, region="R1"), costs - ... ) - >>> assert "capacity" in lpcosts.data_vars - >>> assert "production" in lpcosts.data_vars - - The capacity costs correspond exactly to the input costs: - - >>> assert (costs == lpcosts.capacity).all() - - The production is zero in this context. It does not enter the cost function of - the LP problem: - - >>> assert (lpcosts.production == 0).all() - - They should correspond to a data-array with dimensions ``(asset, replacement)`` - (and possibly ``region`` as well). - - >>> lpcosts.capacity.dims - ('asset', 'replacement') - - The production costs are zero by default. However, the production expands over - not only the dimensions of the capacity, but also the ``timeslice`` during - which production occurs and the ``commodity`` produced. - - >>> lpcosts.production.dims - ('timeslice', 'asset', 'replacement', 'commodity') + The costs applied to the capacity decision variables are provided. No cost is + applied to the production decision variables. Thus, the production component of + the costs dataset is zero, with dimensions determined by the constraints. """ - from xarray import zeros_like - - from muse.commodities import is_enduse - - assert "year" not in technologies.dims - - selection = dict( - commodity=is_enduse(technologies.comm_usage), - technology=technologies.technology.isin(costs.replacement), - ) - - if "region" in technologies.fixed_outputs.dims and "region" in costs.coords: - selection["region"] = costs.region - fouts = technologies.fixed_outputs.sel(selection).rename(technology="replacement") - - production = zeros_like( - broadcast_timeslice(costs, level=timeslice_level) - * distribute_timeslice(fouts, level=timeslice_level) - ) - for dim in production.dims: - if isinstance(production.get_index(dim), pd.MultiIndex): - production = drop_timeslice(production) - production[dim] = pd.Index(production.get_index(dim), tupleize_cols=False) + # Get production decision variables + # This is the union of all coordinates in the production constraints + production_vars = xr.broadcast(*[c.production for c in constraints])[0] + + # Production costs are zero + production_costs = xr.zeros_like(production_vars) + + # Deal with timeslice multiindex + if "timeslice" in production_costs.dims: + production_costs = drop_timeslice(production_costs) + production_costs["timeslice"] = pd.Index( + production_costs.get_index("timeslice"), tupleize_cols=False + ) - return xr.Dataset(dict(capacity=costs, production=production)) + # Result is dataset of provided capacity costs and zero production costs + return xr.Dataset(dict(capacity=capacity_costs, production=production_costs)) def lp_constraint(constraint: Constraint, lpcosts: xr.Dataset) -> Constraint: @@ -1119,14 +1068,12 @@ class ScipyAdapter: @classmethod def factory( cls, - technologies: xr.Dataset, costs: xr.DataArray, *constraints: Constraint, - timeslice_level: str | None = None, ) -> ScipyAdapter: - lpcosts = lp_costs(technologies, costs, timeslice_level=timeslice_level) + lpcosts = lp_costs(costs, *constraints) - data = cls._unified_dataset(technologies, lpcosts, *constraints) + data = cls._unified_dataset(lpcosts, *constraints) capacities = cls._selected_quantity(data, "capacity") @@ -1153,14 +1100,10 @@ def kwargs(self): } @staticmethod - def _unified_dataset( - technologies: xr.Dataset, lpcosts: xr.Dataset, *constraints: Constraint - ) -> xr.Dataset: + def _unified_dataset(lpcosts: xr.Dataset, *constraints: Constraint) -> xr.Dataset: """Creates single xr.Dataset from costs and constraints.""" from xarray import merge - assert "year" not in technologies.dims - data = merge( [lpcosts.rename({k: f"d({k})" for k in lpcosts.dims})] + [ @@ -1177,12 +1120,6 @@ def _unified_dataset( data[f"capacity{i}"] = -data[f"capacity{i}"] data[f"production{i}"] = -data[f"production{i}"] - # A bit of a hack as lpcosts is calculated for too many commodities, so data - # will have nans for commodities not in the constraints, which are limited to - # commodities demanded in the subsector. This seems safe for now, although - # better to limit the commodities in the lpcosts calculation. - data = data.dropna(dim="d(commodity)", how="all") - # Enusure consistent ordering of dimensions return data.transpose(*data.dims) diff --git a/src/muse/investments.py b/src/muse/investments.py index a62097a9..65b3f990 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -286,10 +286,8 @@ def scipy_match_demand( # Run scipy optimization with highs solver adapter = ScipyAdapter.factory( - technologies, cast(np.ndarray, costs), *constraints, - timeslice_level=timeslice_level, ) res = linprog(**adapter.kwargs, method="highs") From 0cdca861ddf53451af8b5c1e3e2d4b2177d2d131 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 26 Mar 2025 09:28:11 +0000 Subject: [PATCH 15/28] Fix some of the tests --- src/muse/constraints.py | 77 ++++++++++++++++++++++----------------- tests/test_constraints.py | 66 ++++++++++++++++----------------- 2 files changed, 76 insertions(+), 67 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 3dca9af9..143a2c60 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -187,21 +187,27 @@ def decorated( **kwargs, ) - # Standardize constraint if constraint is not None: - if "kind" not in constraint.attrs: - constraint.attrs["kind"] = ConstraintKind.UPPER_BOUND + # Check constraint + if "b" not in constraint.data_vars: + raise RuntimeError("Constraint must contain a right-hand-side vector") if ( "capacity" not in constraint.data_vars and "production" not in constraint.data_vars ): - raise RuntimeError("Invalid constraint format") + raise RuntimeError("Constraint must contain a left-hand-side matrix") + if "capacity" in constraint.data_vars: + assert not constraint.capacity.dims == () + if "production" in constraint.data_vars: + assert not constraint.production.dims == () + + # Standardize constraint + if "kind" not in constraint.attrs: + constraint.attrs["kind"] = ConstraintKind.UPPER_BOUND if "capacity" not in constraint.data_vars: constraint["capacity"] = 0 if "production" not in constraint.data_vars: constraint["production"] = 0 - if "b" not in constraint.data_vars: - constraint["b"] = 0 if "name" not in constraint.data_vars and "name" not in constraint.attrs: constraint.attrs["name"] = function.__name__ @@ -385,7 +391,7 @@ def max_capacity_expansion( ) if b.region.dims == (): - capa = 1 + capa = xr.ones_like(b) elif "dst_region" in b.dims: b = b.rename(region="src_region") capa = search_space.agent.region == b.src_region @@ -410,7 +416,8 @@ def demand( b = b.rename(region="dst_region") assert "year" not in b.dims return xr.Dataset( - dict(b=b, production=1), attrs=dict(kind=ConstraintKind.LOWER_BOUND) + dict(b=b, production=xr.ones_like(b)), + attrs=dict(kind=ConstraintKind.LOWER_BOUND), ) @@ -464,6 +471,7 @@ def max_production( capa = capa.expand_dims(asset=search_space.asset) production = ones_like(capa) b = zeros_like(production) + # Include maxaddition constraint in max production to match region-dst_region if "dst_region" in technologies.dims: b = b.expand_dims(dst_region=technologies.dst_region) @@ -476,8 +484,9 @@ def max_production( capa = capa * broadcast_timeslice(maxadd, level=timeslice_level) production = production * broadcast_timeslice(maxadd, level=timeslice_level) b = b.rename(region="src_region") + return xr.Dataset( - dict(capacity=-cast(np.ndarray, capa), production=production, b=b), + dict(capacity=-capa, production=production, b=b), attrs=dict(kind=ConstraintKind.UPPER_BOUND), ) @@ -704,22 +713,20 @@ def minimum_service( """Constructs constraint between capacity and minimum service.""" from xarray import ones_like, zeros_like - from muse.commodities import is_enduse - if "minimum_service_factor" not in technologies.data_vars: return None if np.all(technologies["minimum_service_factor"] == 0): return None - commodities = technologies.commodity.sel( - commodity=is_enduse(technologies.comm_usage) - ) - replacement = search_space.replacement - replacement = replacement.drop_vars( - [u for u in replacement.coords if u not in replacement.dims] - ) - kwargs = dict(technology=replacement, commodity=commodities) + + commodities = demand.commodity + kwargs = dict(commodity=commodities) if "region" in search_space.coords and "region" in technologies.dims: kwargs["region"] = search_space.region + techs = ( + technologies[["fixed_outputs", "utilization_factor"]] + .sel(**kwargs) + .rename(technology="replacement") + ) techs = ( technologies[["fixed_outputs", "minimum_service_factor"]] .sel(**kwargs) @@ -732,8 +739,9 @@ def minimum_service( capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) b = zeros_like(production) + return xr.Dataset( - dict(capacity=-cast(np.ndarray, capacity), production=production, b=b), + dict(capacity=-capacity, production=production, b=b), attrs=dict(kind=ConstraintKind.LOWER_BOUND), ) @@ -745,19 +753,22 @@ def lp_costs(capacity_costs: xr.DataArray, *constraints: Constraint) -> xr.Datas applied to the production decision variables. Thus, the production component of the costs dataset is zero, with dimensions determined by the constraints. """ - # Get production decision variables - # This is the union of all coordinates in the production constraints - production_vars = xr.broadcast(*[c.production for c in constraints])[0] - - # Production costs are zero - production_costs = xr.zeros_like(production_vars) - - # Deal with timeslice multiindex - if "timeslice" in production_costs.dims: - production_costs = drop_timeslice(production_costs) - production_costs["timeslice"] = pd.Index( - production_costs.get_index("timeslice"), tupleize_cols=False - ) + if constraints: + # Get production decision variables + # This is the union of all coordinates in the production constraints + production_vars = xr.broadcast(*[c.production for c in constraints])[0] + + # Production costs are zero + production_costs = xr.zeros_like(production_vars) + + # Deal with timeslice multiindex + if "timeslice" in production_costs.dims: + production_costs = drop_timeslice(production_costs) + production_costs["timeslice"] = pd.Index( + production_costs.get_index("timeslice"), tupleize_cols=False + ) + else: + production_costs = None # Result is dataset of provided capacity costs and zero production costs return xr.Dataset(dict(capacity=capacity_costs, production=production_costs)) diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 67b4f45b..9bceccc9 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -79,10 +79,10 @@ def test_fixtures(technologies, search_space, costs, assets, capacity, market_de @fixture -def lpcosts(technologies, costs): +def lpcosts(costs, constraints): from muse.constraints import lp_costs - return lp_costs(technologies, costs=costs) + return lp_costs(costs, *constraints) @fixture @@ -216,10 +216,10 @@ def test_lp_constraint(constraint, lpcosts): assert result.b.values == approx(0) -def test_to_scipy_adapter_maxprod(technologies, costs, max_production): +def test_to_scipy_adapter_maxprod(costs, max_production): from muse.constraints import ScipyAdapter, lp_costs - adapter = ScipyAdapter.factory(technologies, costs, max_production) + adapter = ScipyAdapter.factory(costs, max_production) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_eq is None @@ -230,7 +230,7 @@ def test_to_scipy_adapter_maxprod(technologies, costs, max_production): assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(technologies, costs) + lpcosts = lp_costs(costs, max_production) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -239,10 +239,10 @@ def test_to_scipy_adapter_maxprod(technologies, costs, max_production): assert adapter.A_ub[:, capsize:] == approx(np.eye(prodsize)) -def test_to_scipy_adapter_demand(technologies, costs, demand_constraint): +def test_to_scipy_adapter_demand(costs, demand_constraint): from muse.constraints import ScipyAdapter, lp_costs - adapter = ScipyAdapter.factory(technologies, costs, demand_constraint) + adapter = ScipyAdapter.factory(costs, demand_constraint) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -255,7 +255,7 @@ def test_to_scipy_adapter_demand(technologies, costs, demand_constraint): assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(technologies, costs) + lpcosts = lp_costs(costs, demand_constraint) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -264,16 +264,14 @@ def test_to_scipy_adapter_demand(technologies, costs, demand_constraint): == lpcosts.commodity.size * lpcosts.timeslice.size * lpcosts.asset.size ) assert adapter.A_ub[:, :capsize] == approx(0) - assert adapter.A_ub[:, capsize:].sum(axis=1) == approx(-lpcosts.replacement.size) + assert adapter.A_ub[:, capsize:].shape[0] == lpcosts.production.size assert set(adapter.A_ub[:, capsize:].flatten()) == {0.0, -1.0} -def test_to_scipy_adapter_max_capacity_expansion( - technologies, costs, max_capacity_expansion -): +def test_to_scipy_adapter_max_capacity_expansion(costs, max_capacity_expansion): from muse.constraints import ScipyAdapter, lp_costs - adapter = ScipyAdapter.factory(technologies, costs, max_capacity_expansion) + adapter = ScipyAdapter.factory(costs, max_capacity_expansion) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -287,7 +285,7 @@ def test_to_scipy_adapter_max_capacity_expansion( assert adapter.c.size == adapter.A_ub.shape[1] assert adapter.c.ndim == 1 - lpcosts = lp_costs(technologies, costs) + lpcosts = lp_costs(costs, max_capacity_expansion) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -297,10 +295,10 @@ def test_to_scipy_adapter_max_capacity_expansion( assert set(adapter.A_ub[:, :capsize].flatten()) == {0.0, 1.0} -def test_to_scipy_adapter_no_constraint(technologies, costs): +def test_to_scipy_adapter_no_constraint(costs): from muse.constraints import ScipyAdapter, lp_costs - adapter = ScipyAdapter.factory(technologies, costs) + adapter = ScipyAdapter.factory(costs) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is None @@ -309,17 +307,17 @@ def test_to_scipy_adapter_no_constraint(technologies, costs): assert adapter.b_eq is None assert adapter.c.ndim == 1 - lpcosts = lp_costs(technologies, costs) + lpcosts = lp_costs(costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize -def test_back_to_muse_capacity(technologies, costs): +def test_back_to_muse_capacity(costs, constraints): from muse.constraints import ScipyAdapter, lp_costs - lpcosts = lp_costs(technologies, costs) - data = ScipyAdapter._unified_dataset(technologies, lpcosts) + lpcosts = lp_costs(costs, *constraints) + data = ScipyAdapter._unified_dataset(lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "capacity") assert set(lpquantity.dims) == {"d(asset)", "d(replacement)"} copy = ScipyAdapter._back_to_muse_quantity( @@ -328,11 +326,11 @@ def test_back_to_muse_capacity(technologies, costs): assert (copy == lpcosts.capacity).all() -def test_back_to_muse_production(technologies, costs): +def test_back_to_muse_production(costs, constraints): from muse.constraints import ScipyAdapter, lp_costs - lpcosts = lp_costs(technologies, costs) - data = ScipyAdapter._unified_dataset(technologies, lpcosts) + lpcosts = lp_costs(costs, *constraints) + data = ScipyAdapter._unified_dataset(lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "production") assert set(lpquantity.dims) == { "d(asset)", @@ -346,12 +344,12 @@ def test_back_to_muse_production(technologies, costs): assert (copy == lpcosts.production).all() -def test_back_to_muse_all(technologies, costs, rng: np.random.Generator): +def test_back_to_muse_all(costs, rng: np.random.Generator, constraints): from muse.constraints import ScipyAdapter, lp_costs - lpcosts = lp_costs(technologies, costs) + lpcosts = lp_costs(costs, *constraints) - data = ScipyAdapter._unified_dataset(technologies, lpcosts) + data = ScipyAdapter._unified_dataset(lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") lpproduction = ScipyAdapter._selected_quantity(data, "production") @@ -376,12 +374,12 @@ def test_back_to_muse_all(technologies, costs, rng: np.random.Generator): assert (copy.production == lpcosts.production).all() -def test_scipy_adapter_back_to_muse(technologies, costs, rng): +def test_scipy_adapter_back_to_muse(costs, rng, constraints): from muse.constraints import ScipyAdapter, lp_costs - lpcosts = lp_costs(technologies, costs) + lpcosts = lp_costs(costs, *constraints) - data = ScipyAdapter._unified_dataset(technologies, lpcosts) + data = ScipyAdapter._unified_dataset(lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") lpproduction = ScipyAdapter._selected_quantity(data, "production") @@ -398,7 +396,7 @@ def test_scipy_adapter_back_to_muse(technologies, costs, rng): ) ) - adapter = ScipyAdapter.factory(technologies, costs) + adapter = ScipyAdapter.factory(costs) assert (adapter.to_muse(x).capacity == lpcosts.capacity).all() assert (adapter.to_muse(x).production == lpcosts.production).all() @@ -414,10 +412,10 @@ def _as_list(data: Union[xr.DataArray, xr.Dataset]) -> Union[xr.DataArray, xr.Da return data -def test_scipy_adapter_standard_constraints(technologies, costs, constraints): +def test_scipy_adapter_standard_constraints(costs, constraints): from muse.constraints import ScipyAdapter - adapter = ScipyAdapter.factory(technologies, costs, *constraints) + adapter = ScipyAdapter.factory(costs, *constraints) maxprod = next(cs for cs in constraints if cs.name == "max_production") maxcapa = next(cs for cs in constraints if cs.name == "max capacity expansion") demand = next(cs for cs in constraints if cs.name == "demand") @@ -442,7 +440,7 @@ def test_scipy_solver(technologies, costs, constraints): def test_minimum_service( - market_demand, capacity, search_space, technologies, costs, constraints + market_demand, capacity, search_space, technologies, constraints ): from muse.constraints import minimum_service @@ -468,7 +466,7 @@ def test_minimum_service( def test_max_capacity_expansion(max_capacity_expansion): - assert max_capacity_expansion.capacity == 1 + assert (max_capacity_expansion.capacity == 1).all() assert max_capacity_expansion.production == 0 assert max_capacity_expansion.b.dims == ("replacement",) assert max_capacity_expansion.b.shape == (4,) From 44f3d299eb5f301497785ac80cf1c5e528ccc998 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 10:40:40 +0000 Subject: [PATCH 16/28] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/muse/filters.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/muse/filters.py b/src/muse/filters.py index 5097e5b8..29942c80 100644 --- a/src/muse/filters.py +++ b/src/muse/filters.py @@ -87,7 +87,6 @@ def search_space_initializer( "with_asset_technology", ] -import inspect from collections.abc import Mapping, MutableMapping, Sequence from typing import ( Any, From a8407481dad940f0fb6992673a188d5bd3539901 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 26 Mar 2025 11:00:37 +0000 Subject: [PATCH 17/28] Mandatory kwargs --- src/muse/constraints.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 143a2c60..3cb22d51 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -257,7 +257,7 @@ def constraints( capacity: xr.DataArray, search_space: xr.DataArray, technologies: xr.Dataset, - timeslice_level: str | None = None, + **kwargs, ) -> list[Constraint]: constraints = [ function( @@ -265,7 +265,7 @@ def constraints( capacity=capacity, search_space=search_space, technologies=technologies, - timeslice_level=timeslice_level, + **kwargs, ) for function in constraint_closures ] @@ -445,6 +445,7 @@ def max_production( capacity: xr.DataArray, search_space: xr.DataArray, technologies: xr.Dataset, + *, timeslice_level: str | None = None, **kwargs, ) -> Constraint: @@ -497,6 +498,7 @@ def demand_limiting_capacity( capacity: xr.DataArray, search_space: xr.DataArray, technologies: xr.Dataset, + *, timeslice_level: str | None = None, **kwargs, ) -> Constraint: @@ -707,6 +709,7 @@ def minimum_service( capacity: xr.DataArray, search_space: xr.DataArray, technologies: xr.Dataset, + *, timeslice_level: str | None = None, **kwargs, ) -> Constraint | None: From 3cfff5e196461852a77361f39a5cac368e212a3d Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 26 Mar 2025 19:38:52 +0000 Subject: [PATCH 18/28] Revert some changes --- src/muse/constraints.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 3cb22d51..4a7638b3 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -196,10 +196,6 @@ def decorated( and "production" not in constraint.data_vars ): raise RuntimeError("Constraint must contain a left-hand-side matrix") - if "capacity" in constraint.data_vars: - assert not constraint.capacity.dims == () - if "production" in constraint.data_vars: - assert not constraint.production.dims == () # Standardize constraint if "kind" not in constraint.attrs: @@ -391,7 +387,7 @@ def max_capacity_expansion( ) if b.region.dims == (): - capa = xr.ones_like(b) + capa = 1 elif "dst_region" in b.dims: b = b.rename(region="src_region") capa = search_space.agent.region == b.src_region @@ -416,7 +412,7 @@ def demand( b = b.rename(region="dst_region") assert "year" not in b.dims return xr.Dataset( - dict(b=b, production=xr.ones_like(b)), + dict(b=b, production=1), attrs=dict(kind=ConstraintKind.LOWER_BOUND), ) From f9016861dee2461c13b7858be64da3a85ec41fed Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 27 Mar 2025 14:05:50 +0000 Subject: [PATCH 19/28] Small changes --- src/muse/constraints.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 4a7638b3..797ea6b0 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -412,8 +412,7 @@ def demand( b = b.rename(region="dst_region") assert "year" not in b.dims return xr.Dataset( - dict(b=b, production=1), - attrs=dict(kind=ConstraintKind.LOWER_BOUND), + dict(b=b, production=1), attrs=dict(kind=ConstraintKind.LOWER_BOUND) ) @@ -726,11 +725,6 @@ def minimum_service( .sel(**kwargs) .rename(technology="replacement") ) - techs = ( - technologies[["fixed_outputs", "minimum_service_factor"]] - .sel(**kwargs) - .drop_vars("technology") - ) capacity = distribute_timeslice( techs.fixed_outputs, level=timeslice_level ) * broadcast_timeslice(techs.minimum_service_factor, level=timeslice_level) From 2405d7f6f4e56ff1c572ef766b53d24aaae2d445 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 27 Mar 2025 14:07:57 +0000 Subject: [PATCH 20/28] Fix msf mistake --- src/muse/constraints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index e364ecdd..c6f90951 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -739,7 +739,7 @@ def minimum_service( if "region" in search_space.coords and "region" in technologies.dims: kwargs["region"] = search_space.region techs = ( - technologies[["fixed_outputs", "utilization_factor"]] + technologies[["fixed_outputs", "minimum_service_factor"]] .sel(**kwargs) .rename(technology="replacement") ) From c417a2323761c7929be6e7120c50a44820c387b7 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 27 Mar 2025 16:25:53 +0000 Subject: [PATCH 21/28] Fix some tests --- src/muse/constraints.py | 4 +++- tests/test_constraints.py | 25 ++----------------------- 2 files changed, 5 insertions(+), 24 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index c6f90951..f7b57598 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -191,6 +191,7 @@ def decorated( # Check constraint if "b" not in constraint.data_vars: raise RuntimeError("Constraint must contain a right-hand-side vector") + assert not constraint.b.dims == () if ( "capacity" not in constraint.data_vars and "production" not in constraint.data_vars @@ -430,7 +431,8 @@ def demand( b = b.rename(region="dst_region") assert "year" not in b.dims return xr.Dataset( - dict(b=b, production=1), attrs=dict(kind=ConstraintKind.LOWER_BOUND) + dict(b=b, production=xr.ones_like(b)), + attrs=dict(kind=ConstraintKind.LOWER_BOUND), ) diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 374f45ab..9ee99d5f 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -167,7 +167,7 @@ def test_constraints_dimensions( # Demand constraint assert set(demand_constraint.capacity.dims) == set() - assert set(demand_constraint.production.dims) == set() + assert set(demand_constraint.production.dims) == {"asset", "commodity", "timeslice"} assert set(demand_constraint.b.dims) == {"asset", "commodity", "timeslice"} # Demand limiting capacity constraint @@ -185,27 +185,6 @@ def test_constraints_dimensions( assert set(max_capacity_expansion.b.dims) == {"replacement"} -def test_lp_constraints_matrix_b_is_scalar(constraint, lpcosts): - """B is a scalar. - - When ``b`` is a scalar, the output should be equivalent to a single row matrix, or a - single vector with only decision variables. - """ - from muse.constraints import lp_constraint_matrix - - lpconstraint = lp_constraint_matrix( - xr.DataArray(1), constraint.capacity, lpcosts.capacity - ) - assert lpconstraint.values == approx(-1) - assert set(lpconstraint.dims) == {f"d({x})" for x in lpcosts.capacity.dims} - - lpconstraint = lp_constraint_matrix( - xr.DataArray(1), constraint.production, lpcosts.production - ) - assert lpconstraint.values == approx(1) - assert set(lpconstraint.dims) == {f"d({x})" for x in lpcosts.production.dims} - - def test_max_production_constraint_diagonal(constraint, lpcosts): """Production side of max capacity production is diagonal. @@ -439,7 +418,7 @@ def test_scipy_adapter_back_to_muse(costs, rng, constraints): ) ) - adapter = ScipyAdapter.factory(costs) + adapter = ScipyAdapter.factory(costs, *constraints) assert (adapter.to_muse(x).capacity == lpcosts.capacity).all() assert (adapter.to_muse(x).production == lpcosts.production).all() From a233991afc5d22558fd58cb0d2abd3dd916bcb5d Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 27 Mar 2025 17:03:14 +0000 Subject: [PATCH 22/28] Suppress some tests --- tests/test_agents.py | 3 ++- tests/test_trade.py | 28 ---------------------------- 2 files changed, 2 insertions(+), 29 deletions(-) diff --git a/tests/test_agents.py b/tests/test_agents.py index 49dab110..d1b97b44 100644 --- a/tests/test_agents.py +++ b/tests/test_agents.py @@ -1,6 +1,6 @@ """Test buildings agents.""" -from pytest import fixture +from pytest import fixture, mark @fixture @@ -137,6 +137,7 @@ def test_issue_835_and_842(agent_args, technologies, stock): assert "commodity" not in agent.assets.dims +@mark.xfail(reason="Retrofit agents will be deprecated.") def test_run_retro_agent(retro_agent, technologies, agent_market, demand_share): # make sure capacity limits are not reached technologies.total_capacity_limit[:] = retro_agent.assets.capacity.sum() * 100 diff --git a/tests/test_trade.py b/tests/test_trade.py index 3db85642..14f51c73 100644 --- a/tests/test_trade.py +++ b/tests/test_trade.py @@ -1,8 +1,6 @@ from collections.abc import Mapping from typing import Any -import numpy as np -import xarray as xr from pytest import approx, fixture @@ -92,32 +90,6 @@ def test_search_space(constraints_args): assert set(constraint.agent.coords) == {"region", "agent"} -def test_lp_costs(): - from muse import examples - from muse.constraints import lp_costs - - technologies = examples.technodata("power", model="trade") - search_space = examples.search_space("power", model="trade") - costs = ( - search_space - * np.arange(np.prod(search_space.shape)).reshape(search_space.shape) - * xr.ones_like(technologies.dst_region) - ) - - lpcosts = lp_costs(technologies.sel(year=2020, drop=True), costs) - assert "capacity" in lpcosts.data_vars - assert "production" in lpcosts.data_vars - assert set(lpcosts.capacity.dims) == {"agent", "replacement", "dst_region"} - assert set(lpcosts.production.dims) == { - "agent", - "replacement", - "dst_region", - "timeslice", - "commodity", - } - assert set(lpcosts.agent.coords) == {"region", "agent"} - - def test_power_sector_no_investment(): from muse import examples from muse.utilities import agent_concatenation From 86e5a9c9b1b1fe4bd97792c61baf73ccf6328339 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 28 Mar 2025 09:25:15 +0000 Subject: [PATCH 23/28] Inline comments and docstrings to clarify code --- src/muse/constraints.py | 171 +++++++++++++++++++++++++++++++--------- 1 file changed, 135 insertions(+), 36 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index f7b57598..0b66c05f 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -809,21 +809,33 @@ def lp_constraint(constraint: Constraint, lpcosts: xr.Dataset) -> Constraint: of the transformations applied here. """ constraint = constraint.copy(deep=False) - for dim in constraint.dims: - if isinstance(constraint.get_index(dim), pd.MultiIndex): - constraint = drop_timeslice(constraint) - constraint[dim] = pd.Index(constraint.get_index(dim), tupleize_cols=False) + + # Deal with timeslice multiindex + if "timeslice" in constraint.dims: + constraint = drop_timeslice(constraint) + constraint["timeslice"] = pd.Index( + constraint.get_index("timeslice"), tupleize_cols=False + ) + + # Rename dimensions in b b = constraint.b.drop_vars(set(constraint.b.coords) - set(constraint.b.dims)) b = b.rename({k: f"c({k})" for k in b.dims}) + + # Create capacity constraint matrix capacity = lp_constraint_matrix(constraint.b, constraint.capacity, lpcosts.capacity) capacity = capacity.drop_vars(set(capacity.coords) - set(capacity.dims)) + + # Create production constraint matrix production = lp_constraint_matrix( constraint.b, constraint.production, lpcosts.production ) production = production.drop_vars(set(production.coords) - set(production.dims)) - return xr.Dataset( + + # Combine data + result = xr.Dataset( {"b": b, "capacity": capacity, "production": production}, attrs=constraint.attrs ) + return result def lp_constraint_matrix( @@ -928,30 +940,33 @@ def lp_constraint_matrix( from numpy import eye + # Sum over all dimensions that are not in the constraint or the decision variables result = constraint.sum(set(constraint.dims) - set(lpcosts.dims) - set(b.dims)) + # Rename dimensions for decision variables result = result.rename( {k: f"d({k})" for k in set(result.dims).intersection(lpcosts.dims)} ) + + # Rename dimensions for constraints result = result.rename( {k: f"c({k})" for k in set(result.dims).intersection(b.dims)} ) - expand = set(lpcosts.dims) - set(constraint.dims) - set(b.dims) - - if expand == {"timeslice", "asset", "commodity"}: - expand = ["asset", "timeslice", "commodity"] + # Expand dimensions that are in the decision variables but not in the constraint + expand = set(lpcosts.dims) - set(constraint.dims) - set(b.dims) result = result.expand_dims( {f"d({k})": lpcosts[k].rename({k: f"d({k})"}).set_index() for k in expand} ) - expand = set(b.dims) - set(constraint.dims) - set(lpcosts.dims) + # Expand dimensions that are in the constraint but not in the decision variables + expand = set(b.dims) - set(constraint.dims) - set(lpcosts.dims) result = result.expand_dims( {f"c({k})": b[k].rename({k: f"c({k})"}).set_index() for k in expand} ) + # Dimensions that are in both the decision variables and the constraint diag_dims = set(b.dims).intersection(lpcosts.dims) - diag_dims = sorted(diag_dims) if diag_dims: @@ -1094,20 +1109,30 @@ def factory( costs: xr.DataArray, *constraints: Constraint, ) -> ScipyAdapter: + # Calculate costs for the linear problem lpcosts = lp_costs(costs, *constraints) + # Create dataset from costs and constraints data = cls._unified_dataset(lpcosts, *constraints) + # Get capacity constraint matrix / costs capacities = cls._selected_quantity(data, "capacity") + # Get production constraint matrix / costs productions = cls._selected_quantity(data, "production") + # Get constraint vector bs = cls._selected_quantity(data, "b") + # Prepare scipy adapter from constraints kwargs = cls._to_scipy_adapter(capacities, productions, bs, *constraints) def to_muse(x: np.ndarray) -> xr.Dataset: - return ScipyAdapter._back_to_muse(x, capacities.costs, productions.costs) + return ScipyAdapter._back_to_muse( + x, + capacity_template=capacities.costs, + production_template=productions.costs, + ) return ScipyAdapter(to_muse=to_muse, **kwargs) @@ -1127,16 +1152,26 @@ def _unified_dataset(lpcosts: xr.Dataset, *constraints: Constraint) -> xr.Datase """Creates single xr.Dataset from costs and constraints.""" from xarray import merge - data = merge( - [lpcosts.rename({k: f"d({k})" for k in lpcosts.dims})] - + [ - lp_constraint(constraint, lpcosts).rename( - b=f"b{i}", capacity=f"capacity{i}", production=f"production{i}" - ) - for i, constraint in enumerate(constraints) - ] - ) + # Reformat constraints to lp format + lp_constraints = [ + lp_constraint(constraint, lpcosts) for constraint in constraints + ] + # Rename variables in lp constraints + lp_constraints = [ + constraint.rename( + b=f"b{i}", capacity=f"capacity{i}", production=f"production{i}" + ) + for i, constraint in enumerate(lp_constraints) + ] + + # Rename dimensions in lpcosts + lpcosts = lpcosts.rename({k: f"d({k})" for k in lpcosts.dims}) + + # Merge data + data = merge([lpcosts, *lp_constraints]) + + # An adjustment is required for lower bound constraints for i, constraint in enumerate(constraints): if constraint.kind == ConstraintKind.LOWER_BOUND: data[f"b{i}"] = -data[f"b{i}"] @@ -1148,10 +1183,12 @@ def _unified_dataset(lpcosts: xr.Dataset, *constraints: Constraint) -> xr.Datase @staticmethod def _selected_quantity(data: xr.Dataset, name: str) -> xr.Dataset: + # Select data for the specified quantity ("capacity", "production", or "b") result = cast( xr.Dataset, data[[u for u in data.data_vars if str(u).startswith(name)]] ) + # Rename variables ("costs" for the costs variable, 0/1/2 etc. for constraints) return result.rename( { k: ("costs" if k == name else int(str(k).replace(name, ""))) @@ -1163,52 +1200,87 @@ def _selected_quantity(data: xr.Dataset, name: str) -> xr.Dataset: def _to_scipy_adapter( capacities: xr.Dataset, productions: xr.Dataset, bs: xr.Dataset, *constraints ): + """Converts constraints to scipy format. + + The constraints are converted to a format that can be used by scipy's linear + programming solver. The constraints are converted to a 2D matrix of constraints + vs decision variables. The constraints are then converted to a dictionary that + can be used by scipy's linear programming solver. + + Args: + capacities: Dataset with decision variables for capacity constraints. + productions: Dataset with decision variables for production constraints. + bs: Dataset with constraints. + *constraints: List of constraints. + + Returns: + Dictionary with constraints in a format that can be used by scipy's linear + programming solver. + """ + def reshape(matrix: xr.DataArray) -> np.ndarray: + """Convert constraints matrix to a 2D np array.""" + # Before building LP we need to sort dimensions for consistency if list(matrix.dims) != sorted(matrix.dims): - new_dims = sorted(matrix.dims) - matrix = matrix.transpose(*new_dims) - - # before building LP we need to sort dimensions for consistency + matrix = matrix.transpose(*sorted(matrix.dims)) + # Size of the first dimension + # This dimension represents the number of constraints size = np.prod( [matrix[u].shape[0] for u in matrix.dims if str(u).startswith("c")] ) + # Reshape into a 2D array: N constrians x N decision variables return matrix.values.reshape((size, -1)) - def extract_bA(constraints, *kinds): + def extract_bA(constraints, *kinds: ConstraintKind): + """Extracts A and b for constraints of specified kinds.""" + # Get indices of constraints of the specified kind indices = [i for i in range(len(bs)) if constraints[i].kind in kinds] + + # Convert constraints matrices to 2d np arrays capa_constraints = [reshape(capacities[i]) for i in indices] prod_constraints = [reshape(productions[i]) for i in indices] + + # Convert constraints vectors to 1d + constraints_vectors = [ + bs[i].stack(constraint=sorted(bs[i].dims)) for i in indices + ] + + # Concatenate constraints if capa_constraints: - A: np.ndarray | None = np.concatenate( + A = np.concatenate( ( np.concatenate(capa_constraints, axis=0), np.concatenate(prod_constraints, axis=0), ), axis=1, ) - b: np.ndarray | None = np.concatenate( - [bs[i].stack(constraint=sorted(bs[i].dims)) for i in indices], - axis=0, - ) + b = np.concatenate(constraints_vectors, axis=0) else: + # If there are no constraints of the given kind, return None A = None b = None return A, b + # Create costs vector by concatenating capacity and production costs c = np.concatenate( ( - cast(np.ndarray, capacities["costs"].values).flatten(), - cast(np.ndarray, productions["costs"].values).flatten(), + capacities["costs"].values.flatten(), + productions["costs"].values.flatten(), ), axis=0, ) + + # Extract A and b for inequality constraints A_ub, b_ub = extract_bA( constraints, ConstraintKind.UPPER_BOUND, ConstraintKind.LOWER_BOUND ) + + # Extract A and b for equality constraints A_eq, b_eq = extract_bA(constraints, ConstraintKind.EQUALITY) + # Prepare scipy adapter return { "c": c, "A_ub": A_ub, @@ -1222,15 +1294,42 @@ def extract_bA(constraints, *kinds): def _back_to_muse_quantity( x: np.ndarray, template: xr.DataArray | xr.Dataset ) -> xr.DataArray: + """Convert a vector of decision variables to a DataArray. + + Args: + x: 1D vector of decision variables, outputted from the scipy solver. + template: Template for the decision variables. This may be for either + capacity or production variables. + """ + # First create a multidimensional dataarray based on the template result = xr.DataArray( x.reshape(template.shape), coords=template.coords, dims=template.dims ) + + # Then rename the dimensions (e.g. "d(asset)" -> "asset") return result.rename({k: str(k)[2:-1] for k in result.dims}) @staticmethod def _back_to_muse( - x: np.ndarray, capacity: xr.DataArray, production: xr.DataArray + x: np.ndarray, + capacity_template: xr.DataArray, + production_template: xr.DataArray, ) -> xr.Dataset: - capa = ScipyAdapter._back_to_muse_quantity(x[: capacity.size], capacity) - prod = ScipyAdapter._back_to_muse_quantity(x[capacity.size :], production) + """Convert the full set of decision variables to a Dataset. + + This must have capacity variables first, followed by production variables. + + Args: + x: 1D vector of decision variables, outputted from the scipy solver. + capacity_template: Template for the capacity decision variables. + production_template: Template for the production decision variables. + """ + n_capa = capacity_template.size # number of capacity decision variables + + capa = ScipyAdapter._back_to_muse_quantity( + x[:n_capa], template=capacity_template + ) + prod = ScipyAdapter._back_to_muse_quantity( + x[n_capa:], template=production_template + ) return xr.Dataset({"capacity": capa, "production": prod}) From 1efae06c937ca98627886aa7ac110743ecf4e6fe Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 28 Mar 2025 10:56:25 +0000 Subject: [PATCH 24/28] New lpcosts method --- src/muse/agents/agent.py | 1 + src/muse/constraints.py | 76 +++++++++++++++++++++++++------------ src/muse/investments.py | 25 ++++++++---- tests/test_constraints.py | 80 +++++++++++++++++++-------------------- 4 files changed, 111 insertions(+), 71 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 920ca26f..d5f08549 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -366,6 +366,7 @@ def next( search=search[["search_space", "decision"]], technologies=technologies, constraints=constraints, + commodities=list(demand.commodity.values), timeslice_level=self.timeslice_level, ) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 0b66c05f..90651d56 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -431,8 +431,7 @@ def demand( b = b.rename(region="dst_region") assert "year" not in b.dims return xr.Dataset( - dict(b=b, production=xr.ones_like(b)), - attrs=dict(kind=ConstraintKind.LOWER_BOUND), + dict(b=b, production=1), attrs=dict(kind=ConstraintKind.LOWER_BOUND) ) @@ -759,29 +758,52 @@ def minimum_service( ) -def lp_costs(capacity_costs: xr.DataArray, *constraints: Constraint) -> xr.Dataset: +def lp_costs( + capacity_costs: xr.DataArray, + commodities: list[str], + timeslice_level: str | None = None, +) -> xr.Dataset: """Creates dataset of costs for solving with scipy's LP solver. - The costs applied to the capacity decision variables are provided. No cost is - applied to the production decision variables. Thus, the production component of - the costs dataset is zero, with dimensions determined by the constraints. + Importantly, this also defines the decision variables in the linear program. + + The costs applied to the capacity decision variables are provided. This should + have dimensions "asset" and "replacement". In other words, capacity addition + is solved for each replacement technology for each existing asset. + + No cost is applied to the production decision variables. Thus, the production + component of the resulting dataset is zero, with dimensions determining the + production decision variables. This will have dimensions "asset", "replacement", + "commodity", and "timeslice". In other words, production is solved for each + replacement technology for each existing asset, for each commodity, and for each + timeslice. + + Args: + capacity_costs: DataArray with dimensions "asset" and "replacement" defining the + costs of adding capacity to the system. + commodities: List of commodities to create production decision variables for. + timeslice_level: The timeslice level of the linear problem. """ - if constraints: - # Get production decision variables - # This is the union of all coordinates in the production constraints - production_vars = xr.broadcast(*[c.production for c in constraints])[0] - - # Production costs are zero - production_costs = xr.zeros_like(production_vars) - - # Deal with timeslice multiindex - if "timeslice" in production_costs.dims: - production_costs = drop_timeslice(production_costs) - production_costs["timeslice"] = pd.Index( - production_costs.get_index("timeslice"), tupleize_cols=False - ) - else: - production_costs = None + assert set(capacity_costs.dims) == {"asset", "replacement"} + + # Start with capacity costs as template (defines "asset" and "replacement" dims) + production_costs = xr.zeros_like(capacity_costs) + + # Add a "timeslice" dimension, convert multiindex to single index + production_costs = broadcast_timeslice(production_costs, level=timeslice_level) + production_costs = drop_timeslice(production_costs) + production_costs["timeslice"] = pd.Index( + production_costs.get_index("timeslice"), tupleize_cols=False + ) + + # Add a "commodity" dimension + production_costs = production_costs.expand_dims(commodity=commodities) + assert set(production_costs.dims) == { + "asset", + "replacement", + "commodity", + "timeslice", + } # Result is dataset of provided capacity costs and zero production costs return xr.Dataset(dict(capacity=capacity_costs, production=production_costs)) @@ -1107,10 +1129,16 @@ class ScipyAdapter: def factory( cls, costs: xr.DataArray, - *constraints: Constraint, + constraints: list[Constraint], + commodities: list[str], + timeslice_level: str | None = None, ) -> ScipyAdapter: # Calculate costs for the linear problem - lpcosts = lp_costs(costs, *constraints) + lpcosts = lp_costs( + capacity_costs=costs, + commodities=commodities, + timeslice_level=timeslice_level, + ) # Create dataset from costs and constraints data = cls._unified_dataset(lpcosts, *constraints) diff --git a/src/muse/investments.py b/src/muse/investments.py index 65b3f990..d8c38203 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -95,7 +95,13 @@ def decorated( constraints: list[Constraint], **kwargs, ) -> xr.DataArray: - result = function(costs, search_space, technologies, constraints, **kwargs) + result = function( + costs=costs, + search_space=search_space, + technologies=technologies, + constraints=constraints, + **kwargs, + ) if isinstance(result, xr.Dataset): investment = result["capacity"].rename("investment") @@ -146,10 +152,10 @@ def compute_investment( # Otherwise, compute the investment return investment( - search.decision, - search.search_space, - technologies, - constraints, + costs=search.decision, + search_space=search.search_space, + technologies=technologies, + constraints=constraints, **params, **kwargs, ).rename("investment") @@ -220,6 +226,7 @@ def adhoc_match_demand( search_space: xr.DataArray, technologies: xr.Dataset, constraints: list[Constraint], + *, timeslice_level: str | None = None, ) -> xr.DataArray: from muse.demand_matching import demand_matching @@ -271,6 +278,8 @@ def scipy_match_demand( search_space: xr.DataArray, technologies: xr.Dataset, constraints: list[Constraint], + *, + commodities: list[str], timeslice_level: str | None = None, ) -> xr.DataArray: from logging import getLogger @@ -286,8 +295,10 @@ def scipy_match_demand( # Run scipy optimization with highs solver adapter = ScipyAdapter.factory( - cast(np.ndarray, costs), - *constraints, + costs=costs, + constraints=constraints, + commodities=commodities, + timeslice_level=timeslice_level, ) res = linprog(**adapter.kwargs, method="highs") diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 9ee99d5f..cf08e9ad 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -69,6 +69,11 @@ def market_demand(assets, technologies): ) +@fixture +def commodities(market_demand): + return list(market_demand.commodity.values) + + def test_fixtures(technologies, search_space, costs, assets, capacity, market_demand): assert set(technologies.dims) == {"technology", "commodity"} assert set(search_space.dims) == {"asset", "replacement"} @@ -79,10 +84,10 @@ def test_fixtures(technologies, search_space, costs, assets, capacity, market_de @fixture -def lpcosts(costs, constraints): +def lpcosts(costs, commodities): from muse.constraints import lp_costs - return lp_costs(costs, *constraints) + return lp_costs(costs, commodities=commodities) @fixture @@ -238,10 +243,12 @@ def test_lp_constraint(constraint, lpcosts): assert result.b.values == approx(0) -def test_to_scipy_adapter_maxprod(costs, max_production): - from muse.constraints import ScipyAdapter, lp_costs +def test_to_scipy_adapter_maxprod(costs, max_production, commodities, lpcosts): + from muse.constraints import ScipyAdapter - adapter = ScipyAdapter.factory(costs, max_production) + adapter = ScipyAdapter.factory( + costs, constraints=[max_production], commodities=commodities + ) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_eq is None @@ -252,7 +259,6 @@ def test_to_scipy_adapter_maxprod(costs, max_production): assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(costs, max_production) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -261,10 +267,12 @@ def test_to_scipy_adapter_maxprod(costs, max_production): assert adapter.A_ub[:, capsize:] == approx(np.eye(prodsize)) -def test_to_scipy_adapter_demand(costs, demand_constraint): - from muse.constraints import ScipyAdapter, lp_costs +def test_to_scipy_adapter_demand(costs, demand_constraint, commodities, lpcosts): + from muse.constraints import ScipyAdapter - adapter = ScipyAdapter.factory(costs, demand_constraint) + adapter = ScipyAdapter.factory( + costs, constraints=[demand_constraint], commodities=commodities + ) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -277,7 +285,6 @@ def test_to_scipy_adapter_demand(costs, demand_constraint): assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(costs, demand_constraint) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -290,10 +297,14 @@ def test_to_scipy_adapter_demand(costs, demand_constraint): assert set(adapter.A_ub[:, capsize:].flatten()) == {0.0, -1.0} -def test_to_scipy_adapter_max_capacity_expansion(costs, max_capacity_expansion): - from muse.constraints import ScipyAdapter, lp_costs +def test_to_scipy_adapter_max_capacity_expansion( + costs, max_capacity_expansion, commodities, lpcosts +): + from muse.constraints import ScipyAdapter - adapter = ScipyAdapter.factory(costs, max_capacity_expansion) + adapter = ScipyAdapter.factory( + costs, constraints=[max_capacity_expansion], commodities=commodities + ) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -307,7 +318,6 @@ def test_to_scipy_adapter_max_capacity_expansion(costs, max_capacity_expansion): assert adapter.c.size == adapter.A_ub.shape[1] assert adapter.c.ndim == 1 - lpcosts = lp_costs(costs, max_capacity_expansion) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -317,10 +327,10 @@ def test_to_scipy_adapter_max_capacity_expansion(costs, max_capacity_expansion): assert set(adapter.A_ub[:, :capsize].flatten()) == {0.0, 1.0} -def test_to_scipy_adapter_no_constraint(costs): - from muse.constraints import ScipyAdapter, lp_costs +def test_to_scipy_adapter_no_constraint(costs, commodities, lpcosts): + from muse.constraints import ScipyAdapter - adapter = ScipyAdapter.factory(costs) + adapter = ScipyAdapter.factory(costs, constraints=[], commodities=commodities) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is None @@ -329,16 +339,14 @@ def test_to_scipy_adapter_no_constraint(costs): assert adapter.b_eq is None assert adapter.c.ndim == 1 - lpcosts = lp_costs(costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize -def test_back_to_muse_capacity(costs, constraints): - from muse.constraints import ScipyAdapter, lp_costs +def test_back_to_muse_capacity(lpcosts): + from muse.constraints import ScipyAdapter - lpcosts = lp_costs(costs, *constraints) data = ScipyAdapter._unified_dataset(lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "capacity") assert set(lpquantity.dims) == {"d(asset)", "d(replacement)"} @@ -348,10 +356,9 @@ def test_back_to_muse_capacity(costs, constraints): assert (copy == lpcosts.capacity).all() -def test_back_to_muse_production(costs, constraints): - from muse.constraints import ScipyAdapter, lp_costs +def test_back_to_muse_production(lpcosts): + from muse.constraints import ScipyAdapter - lpcosts = lp_costs(costs, *constraints) data = ScipyAdapter._unified_dataset(lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "production") assert set(lpquantity.dims) == { @@ -366,17 +373,13 @@ def test_back_to_muse_production(costs, constraints): assert (copy == lpcosts.production).all() -def test_back_to_muse_all(costs, rng: np.random.Generator, constraints): - from muse.constraints import ScipyAdapter, lp_costs - - lpcosts = lp_costs(costs, *constraints) +def test_back_to_muse_all(lpcosts): + from muse.constraints import ScipyAdapter data = ScipyAdapter._unified_dataset(lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") lpproduction = ScipyAdapter._selected_quantity(data, "production") - lpcosts.capacity.values[:] = rng.integers(0, 10, lpcosts.capacity.shape) - lpcosts.production.values[:] = rng.integers(0, 10, lpcosts.production.shape) x = np.concatenate( ( lpcosts.capacity.transpose( @@ -396,17 +399,13 @@ def test_back_to_muse_all(costs, rng: np.random.Generator, constraints): assert (copy.production == lpcosts.production).all() -def test_scipy_adapter_back_to_muse(costs, rng, constraints): - from muse.constraints import ScipyAdapter, lp_costs - - lpcosts = lp_costs(costs, *constraints) +def test_scipy_adapter_back_to_muse(costs, constraints, commodities, lpcosts): + from muse.constraints import ScipyAdapter data = ScipyAdapter._unified_dataset(lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") lpproduction = ScipyAdapter._selected_quantity(data, "production") - lpcosts.capacity.values[:] = rng.integers(0, 10, lpcosts.capacity.shape) - lpcosts.production.values[:] = rng.integers(0, 10, lpcosts.production.shape) x = np.concatenate( ( lpcosts.capacity.transpose( @@ -418,7 +417,7 @@ def test_scipy_adapter_back_to_muse(costs, rng, constraints): ) ) - adapter = ScipyAdapter.factory(costs, *constraints) + adapter = ScipyAdapter.factory(costs, constraints, commodities=commodities) assert (adapter.to_muse(x).capacity == lpcosts.capacity).all() assert (adapter.to_muse(x).production == lpcosts.production).all() @@ -434,10 +433,10 @@ def _as_list(data: Union[xr.DataArray, xr.Dataset]) -> Union[xr.DataArray, xr.Da return data -def test_scipy_adapter_standard_constraints(costs, constraints): +def test_scipy_adapter_standard_constraints(costs, constraints, commodities): from muse.constraints import ScipyAdapter - adapter = ScipyAdapter.factory(costs, *constraints) + adapter = ScipyAdapter.factory(costs, constraints, commodities=commodities) maxprod = next(cs for cs in constraints if cs.name == "max_production") maxcapa = next(cs for cs in constraints if cs.name == "max capacity expansion") demand = next(cs for cs in constraints if cs.name == "demand") @@ -448,7 +447,7 @@ def test_scipy_adapter_standard_constraints(costs, constraints): assert adapter.b_ub.size == demand.b.size + maxprod.b.size + maxcapa.b.size -def test_scipy_solver(technologies, costs, constraints): +def test_scipy_solver(technologies, costs, constraints, commodities): from muse.investments import scipy_match_demand solution = scipy_match_demand( @@ -456,6 +455,7 @@ def test_scipy_solver(technologies, costs, constraints): search_space=xr.ones_like(costs), technologies=technologies, constraints=constraints, + commodities=commodities, ) assert isinstance(solution, xr.DataArray) assert set(solution.dims) == {"asset", "replacement"} From a3480246809b84a767b5a3fc7dd307e48c9d992c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 28 Mar 2025 13:24:22 +0000 Subject: [PATCH 25/28] Fix tests --- src/muse/constraints.py | 8 ++- src/muse/investments.py | 2 + tests/test_constraints.py | 109 +++++++++++++++++++++++--------------- 3 files changed, 73 insertions(+), 46 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 90651d56..91576c0f 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -1247,7 +1247,11 @@ def _to_scipy_adapter( """ def reshape(matrix: xr.DataArray) -> np.ndarray: - """Convert constraints matrix to a 2D np array.""" + """Convert constraints matrix to a 2D np array. + + The rows of the constraaints matrix will represent the constraints, and the + columns will represent the decision variables. + """ # Before building LP we need to sort dimensions for consistency if list(matrix.dims) != sorted(matrix.dims): matrix = matrix.transpose(*sorted(matrix.dims)) @@ -1258,7 +1262,7 @@ def reshape(matrix: xr.DataArray) -> np.ndarray: [matrix[u].shape[0] for u in matrix.dims if str(u).startswith("c")] ) - # Reshape into a 2D array: N constrians x N decision variables + # Reshape into a 2D array: N constraints x N decision variables return matrix.values.reshape((size, -1)) def extract_bA(constraints, *kinds: ConstraintKind): diff --git a/src/muse/investments.py b/src/muse/investments.py index d8c38203..f5467d91 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -228,6 +228,7 @@ def adhoc_match_demand( constraints: list[Constraint], *, timeslice_level: str | None = None, + **kwargs, ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production @@ -281,6 +282,7 @@ def scipy_match_demand( *, commodities: list[str], timeslice_level: str | None = None, + **kwargs, ) -> xr.DataArray: from logging import getLogger diff --git a/tests/test_constraints.py b/tests/test_constraints.py index cf08e9ad..3593e3f9 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -61,13 +61,18 @@ def market_demand(assets, technologies): from muse.quantities import maximum_production from muse.utilities import broadcast_over_assets - return 0.8 * maximum_production( + # Set demand just below the maximum production of existing assets + res = 0.8 * maximum_production( broadcast_over_assets(technologies, assets), assets.capacity, ).sel(year=INVESTMENT_YEAR).groupby("technology").sum("asset").rename( technology="asset" ) + # Remove un-demanded commodities + res = res.sel(commodity=(res > 0).any(dim=["timeslice", "asset"])) + return res + @fixture def commodities(market_demand): @@ -97,11 +102,6 @@ def max_production(market_demand, capacity, search_space, technologies): return max_production(market_demand, capacity, search_space, technologies) -@fixture -def constraint(max_production): - return max_production - - @fixture def demand_constraint(market_demand, capacity, search_space, technologies): from muse.constraints import demand @@ -128,19 +128,24 @@ def demand_limiting_capacity(market_demand, capacity, search_space, technologies return demand_limiting_capacity(market_demand, capacity, search_space, technologies) -@fixture(params=["timeslice_as_list", "timeslice_as_multindex"]) -def constraints(request, market_demand, capacity, search_space, technologies): - from muse import constraints as cs +@fixture +def constraint(max_production): + return max_production + +@fixture(params=["timeslice_as_list", "timeslice_as_multindex"]) +def constraints( + request, + max_production, + demand_constraint, + demand_limiting_capacity, + max_capacity_expansion, +): constraints = [ - cs.max_production(market_demand, capacity, search_space, technologies), - cs.demand(market_demand, capacity, search_space, technologies), - cs.max_capacity_expansion( - market_demand, - capacity, - search_space, - technologies, - ), + max_production, + demand_limiting_capacity, + demand_constraint, + max_capacity_expansion, ] if request.param == "timeslice_as_multindex": constraints = [_as_list(cs) for cs in constraints] @@ -172,7 +177,7 @@ def test_constraints_dimensions( # Demand constraint assert set(demand_constraint.capacity.dims) == set() - assert set(demand_constraint.production.dims) == {"asset", "commodity", "timeslice"} + assert set(demand_constraint.production.dims) == set() assert set(demand_constraint.b.dims) == {"asset", "commodity", "timeslice"} # Demand limiting capacity constraint @@ -251,18 +256,29 @@ def test_to_scipy_adapter_maxprod(costs, max_production, commodities, lpcosts): ) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) + assert adapter.A_ub is not None + assert adapter.b_ub is not None assert adapter.A_eq is None assert adapter.b_eq is None assert adapter.c.ndim == 1 assert adapter.b_ub.ndim == 1 assert adapter.A_ub.ndim == 2 - assert adapter.b_ub.size == adapter.A_ub.shape[0] - assert adapter.c.size == adapter.A_ub.shape[1] - capsize = lpcosts.capacity.size - prodsize = lpcosts.production.size + capsize = lpcosts.capacity.size # number of capacity decision variables + prodsize = lpcosts.production.size # number of production decision variables + bsize = adapter.b_ub.size # number of constraints + assert adapter.c.size == capsize + prodsize - assert adapter.b_ub.size == prodsize + assert adapter.A_ub.shape[0] == bsize + assert adapter.A_ub.shape[1] == capsize + prodsize + + assert ( + bsize + == lpcosts.commodity.size + * lpcosts.timeslice.size + * lpcosts.asset.size + * lpcosts.replacement.size + ) assert adapter.b_ub == approx(0) assert adapter.A_ub[:, capsize:] == approx(np.eye(prodsize)) @@ -282,18 +298,17 @@ def test_to_scipy_adapter_demand(costs, demand_constraint, commodities, lpcosts) assert adapter.c.ndim == 1 assert adapter.b_ub.ndim == 1 assert adapter.A_ub.ndim == 2 - assert adapter.b_ub.size == adapter.A_ub.shape[0] - assert adapter.c.size == adapter.A_ub.shape[1] - capsize = lpcosts.capacity.size - prodsize = lpcosts.production.size + capsize = lpcosts.capacity.size # number of capacity decision variables + prodsize = lpcosts.production.size # number of production decision variables + bsize = adapter.b_ub.size # number of constraints + assert adapter.c.size == capsize + prodsize - assert ( - adapter.b_ub.size - == lpcosts.commodity.size * lpcosts.timeslice.size * lpcosts.asset.size - ) + assert adapter.A_ub.shape[0] == bsize + assert adapter.A_ub.shape[1] == capsize + prodsize + + assert bsize == lpcosts.commodity.size * lpcosts.timeslice.size * lpcosts.asset.size assert adapter.A_ub[:, :capsize] == approx(0) - assert adapter.A_ub[:, capsize:].shape[0] == lpcosts.production.size assert set(adapter.A_ub[:, capsize:].flatten()) == {0.0, -1.0} @@ -314,16 +329,17 @@ def test_to_scipy_adapter_max_capacity_expansion( assert adapter.c.ndim == 1 assert adapter.b_ub.ndim == 1 assert adapter.A_ub.ndim == 2 - assert adapter.b_ub.size == adapter.A_ub.shape[0] - assert adapter.c.size == adapter.A_ub.shape[1] - assert adapter.c.ndim == 1 - capsize = lpcosts.capacity.size - prodsize = lpcosts.production.size + capsize = lpcosts.capacity.size # number of capacity decision variables + prodsize = lpcosts.production.size # number of production decision variables + bsize = adapter.b_ub.size # number of constraints + assert adapter.c.size == capsize + prodsize - assert adapter.b_ub.size == lpcosts.replacement.size + assert adapter.A_ub.shape[0] == bsize + assert adapter.A_ub.shape[1] == capsize + prodsize + + assert bsize == lpcosts.replacement.size assert adapter.A_ub[:, capsize:] == approx(0) - assert adapter.A_ub[:, :capsize].sum(axis=1) == approx(lpcosts.asset.size) assert set(adapter.A_ub[:, :capsize].flatten()) == {0.0, 1.0} @@ -339,8 +355,8 @@ def test_to_scipy_adapter_no_constraint(costs, commodities, lpcosts): assert adapter.b_eq is None assert adapter.c.ndim == 1 - capsize = lpcosts.capacity.size - prodsize = lpcosts.production.size + capsize = lpcosts.capacity.size # number of capacity decision variables + prodsize = lpcosts.production.size # number of production decision variables assert adapter.c.size == capsize + prodsize @@ -440,11 +456,16 @@ def test_scipy_adapter_standard_constraints(costs, constraints, commodities): maxprod = next(cs for cs in constraints if cs.name == "max_production") maxcapa = next(cs for cs in constraints if cs.name == "max capacity expansion") demand = next(cs for cs in constraints if cs.name == "demand") - assert adapter.c.size == costs.size + maxprod.production.size + dlc = next(cs for cs in constraints if cs.name == "demand_limiting_capacity") + + n_constraints = adapter.b_ub.size + n_decision_vars = adapter.c.size + + assert n_decision_vars == costs.size + maxprod.production.size assert adapter.b_eq is None assert adapter.A_eq is None - assert adapter.A_ub.shape == (adapter.b_ub.size, adapter.c.size) - assert adapter.b_ub.size == demand.b.size + maxprod.b.size + maxcapa.b.size + assert adapter.A_ub.shape == (n_constraints, n_decision_vars) + assert n_constraints == demand.b.size + maxprod.b.size + maxcapa.b.size + dlc.b.size def test_scipy_solver(technologies, costs, constraints, commodities): From 882414684c8bb1f9c8d880f64456feda113cf723 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 28 Mar 2025 14:08:10 +0000 Subject: [PATCH 26/28] Restore test --- src/muse/constraints.py | 147 +++++++++++++++++++------------------- tests/test_constraints.py | 21 ++++++ 2 files changed, 95 insertions(+), 73 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 91576c0f..a0bfd988 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -191,7 +191,6 @@ def decorated( # Check constraint if "b" not in constraint.data_vars: raise RuntimeError("Constraint must contain a right-hand-side vector") - assert not constraint.b.dims == () if ( "capacity" not in constraint.data_vars and "production" not in constraint.data_vars @@ -885,78 +884,80 @@ def lp_constraint_matrix( conditions above. Example: - Lets first setup a constraint and a cost matrix: - - >>> from muse import examples - >>> from muse import constraints as cs - >>> from muse.utilities import reduce_assets - >>> res = examples.sector("residential", model="medium") - >>> market = examples.residential_market("medium") - >>> technologies = res.technologies.sel(year=2025) - >>> search = examples.search_space("residential", model="medium") - >>> assets = next(a.assets for a in res.agents) - >>> capacity = reduce_assets(assets.capacity, coords=("region", "technology")) - >>> demand = None # not used in max production - >>> constraint = cs.max_production(demand, capacity.sel(year=[2020, 2025]), - ... search, technologies) # noqa: E501 - >>> lpcosts = cs.lp_costs( - ... ( - ... technologies - ... .sel(region=assets.region) - ... ), - ... costs=search * np.arange(np.prod(search.shape)).reshape(search.shape), - ... ) - - For a simple example, we can first check the case where b is scalar. The result - ought to be a single row of a matrix, or a vector with only decision variables: - - >>> from pytest import approx - >>> result = cs.lp_constraint_matrix( - ... xr.DataArray(1), constraint.capacity, lpcosts.capacity - ... ) - >>> assert result.values == approx(-1) - >>> assert set(result.dims) == {f"d({x})" for x in lpcosts.capacity.dims} - >>> result = cs.lp_constraint_matrix( - ... xr.DataArray(1), constraint.production, lpcosts.production - ... ) - >>> assert set(result.dims) == {f"d({x})" for x in lpcosts.production.dims} - >>> assert result.values == approx(1) - - As expected, the capacity vector is 1, whereas the production vector is -1. - These are the values the :py:func:`~muse.constraints.max_production` is set up - to create. - - Now, let's check the case where ``b`` is the one from the - :py:func:`~muse.constraints.max_production` constraint. In that case, all the - dimensions should end up as constraint dimensions: the production for each - timeslice, region, asset, and replacement technology should not outstrip the - capacity assigned for the asset and replacement technology. - - >>> result = cs.lp_constraint_matrix( - ... constraint.b, constraint.capacity, lpcosts.capacity - ... ) - >>> decision_dims = {f"d({x})" for x in lpcosts.capacity.dims} - >>> constraint_dims = { - ... f"c({x})" - ... for x in set(lpcosts.production.dims).union(constraint.b.dims) - ... } - >>> assert set(result.dims) == decision_dims.union(constraint_dims) - - The :py:func:`~muse.constraints.max_production` constraint on the production - side is the identy matrix with a factor :math:`-1`. We can easily check this - by stacking the decision and constraint dimensions in the result: - - >>> result = cs.lp_constraint_matrix( - ... constraint.b, constraint.production, lpcosts.production - ... ) - >>> decision_dims = {f"d({x})" for x in lpcosts.production.dims} - >>> assert set(result.dims) == decision_dims.union(constraint_dims) - >>> result = result.reset_index("d(timeslice)", drop=True).assign_coords( - ... {"d(timeslice)": result["d(timeslice)"].values} - ... ) - >>> stacked = result.stack(d=sorted(decision_dims), c=sorted(constraint_dims)) - >>> assert stacked.shape[0] == stacked.shape[1] - >>> assert stacked.values == approx(np.eye(stacked.shape[0])) + Lets first setup a constraint and a cost matrix: + + >>> from muse import examples + >>> from muse import constraints as cs + >>> from muse.utilities import reduce_assets + >>> from muse.quantities import maximum_production + >>> res = examples.sector("residential", model="medium") + >>> market = examples.residential_market("medium") + >>> technologies = res.technologies.sel(year=2025) + >>> search = examples.search_space("residential", model="medium") + >>> assets = next(a.assets for a in res.agents) + >>> capacity = reduce_assets(assets.capacity, coords=("region", "technology")) + >>> market_demand = 0.8 * maximum_production( + ... technologies, + ... assets.capacity, + ... ).sel(year=2025) + >>> constraint = cs.max_production(market_demand, + ... capacity.sel(year=[2020, 2025]), + ... search, technologies) # noqa: E501 + >>> lpcosts = cs.lp_costs( + ... search * np.arange(np.prod(search.shape)).reshape(search.shape), + ... commodities=["heat", "cook"], + ... ) + + For a simple example, we can first check the case where b is scalar. The result + ought to be a single row of a matrix, or a vector with only decision variables: + + >>> from pytest import approx + >>> result = cs.lp_constraint_matrix( + ... xr.DataArray(1), constraint.capacity, lpcosts.capacity + ... ) + >>> assert result.values == approx(-1) + >>> assert set(result.dims) == {f"d({x})" for x in lpcosts.capacity.dims} + >>> result = cs.lp_constraint_matrix( + ... xr.DataArray(1), constraint.production, lpcosts.production + ... ) + >>> assert set(result.dims) == {f"d({x})" for x in lpcosts.production.dims} + >>> assert result.values == approx(1) + + As expected, the capacity vector is 1, whereas the production vector is -1. + These are the values the :py:func:`~muse.constraints.max_production` is set up + to create. + + Now, let's check the case where ``b`` is the one from the + :py:func:`~muse.constraints.max_production` constraint. In that case, all the + dimensions should end up as constraint dimensions: the production for each + timeslice, region, asset, and replacement technology should not outstrip the + capacity assigned for the asset and replacement technology. + + >>> result = cs.lp_constraint_matrix( + ... constraint.b, constraint.capacity, lpcosts.capacity + ... ) + >>> decision_dims = {f"d({x})" for x in lpcosts.capacity.dims} + >>> constraint_dims = { + ... f"c({x})" + ... for x in set(lpcosts.production.dims).union(constraint.b.dims) + ... } + >>> assert set(result.dims) == decision_dims.union(constraint_dims) + + The :py:func:`~muse.constraints.max_production` constraint on the production + side is the identy matrix with a factor :math:`-1`. We can easily check this + by stacking the decision and constraint dimensions in the result: + + >>> result = cs.lp_constraint_matrix( + ... constraint.b, constraint.production, lpcosts.production + ... ) + >>> decision_dims = {f"d({x})" for x in lpcosts.production.dims} + >>> assert set(result.dims) == decision_dims.union(constraint_dims) + >>> result = result.reset_index("d(timeslice)", drop=True).assign_coords( + ... {"d(timeslice)": result["d(timeslice)"].values} + ... ) + >>> stacked = result.stack(d=sorted(decision_dims), c=sorted(constraint_dims)) + >>> assert stacked.shape[0] == stacked.shape[1] + >>> assert stacked.values == approx(np.eye(stacked.shape[0])) """ from functools import reduce diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 3593e3f9..8f8b83fc 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -195,6 +195,27 @@ def test_constraints_dimensions( assert set(max_capacity_expansion.b.dims) == {"replacement"} +def test_lp_constraints_matrix_b_is_scalar(constraint, lpcosts): + """B is a scalar. + + When ``b`` is a scalar, the output should be equivalent to a single row matrix, or a + single vector with only decision variables. + """ + from muse.constraints import lp_constraint_matrix + + lpconstraint = lp_constraint_matrix( + xr.DataArray(1), constraint.capacity, lpcosts.capacity + ) + assert lpconstraint.values == approx(-1) + assert set(lpconstraint.dims) == {f"d({x})" for x in lpcosts.capacity.dims} + + lpconstraint = lp_constraint_matrix( + xr.DataArray(1), constraint.production, lpcosts.production + ) + assert lpconstraint.values == approx(1) + assert set(lpconstraint.dims) == {f"d({x})" for x in lpcosts.production.dims} + + def test_max_production_constraint_diagonal(constraint, lpcosts): """Production side of max capacity production is diagonal. From 125594534bff54b59088de2a2e4f1468b0f6af15 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 28 Mar 2025 14:09:28 +0000 Subject: [PATCH 27/28] Delete doctests --- src/muse/constraints.py | 174 +--------------------------------------- 1 file changed, 1 insertion(+), 173 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index a0bfd988..4097f6dc 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -883,81 +883,6 @@ def lp_constraint_matrix( The result is the constraint matrix, expanded, reduced and diagonalized for the conditions above. - Example: - Lets first setup a constraint and a cost matrix: - - >>> from muse import examples - >>> from muse import constraints as cs - >>> from muse.utilities import reduce_assets - >>> from muse.quantities import maximum_production - >>> res = examples.sector("residential", model="medium") - >>> market = examples.residential_market("medium") - >>> technologies = res.technologies.sel(year=2025) - >>> search = examples.search_space("residential", model="medium") - >>> assets = next(a.assets for a in res.agents) - >>> capacity = reduce_assets(assets.capacity, coords=("region", "technology")) - >>> market_demand = 0.8 * maximum_production( - ... technologies, - ... assets.capacity, - ... ).sel(year=2025) - >>> constraint = cs.max_production(market_demand, - ... capacity.sel(year=[2020, 2025]), - ... search, technologies) # noqa: E501 - >>> lpcosts = cs.lp_costs( - ... search * np.arange(np.prod(search.shape)).reshape(search.shape), - ... commodities=["heat", "cook"], - ... ) - - For a simple example, we can first check the case where b is scalar. The result - ought to be a single row of a matrix, or a vector with only decision variables: - - >>> from pytest import approx - >>> result = cs.lp_constraint_matrix( - ... xr.DataArray(1), constraint.capacity, lpcosts.capacity - ... ) - >>> assert result.values == approx(-1) - >>> assert set(result.dims) == {f"d({x})" for x in lpcosts.capacity.dims} - >>> result = cs.lp_constraint_matrix( - ... xr.DataArray(1), constraint.production, lpcosts.production - ... ) - >>> assert set(result.dims) == {f"d({x})" for x in lpcosts.production.dims} - >>> assert result.values == approx(1) - - As expected, the capacity vector is 1, whereas the production vector is -1. - These are the values the :py:func:`~muse.constraints.max_production` is set up - to create. - - Now, let's check the case where ``b`` is the one from the - :py:func:`~muse.constraints.max_production` constraint. In that case, all the - dimensions should end up as constraint dimensions: the production for each - timeslice, region, asset, and replacement technology should not outstrip the - capacity assigned for the asset and replacement technology. - - >>> result = cs.lp_constraint_matrix( - ... constraint.b, constraint.capacity, lpcosts.capacity - ... ) - >>> decision_dims = {f"d({x})" for x in lpcosts.capacity.dims} - >>> constraint_dims = { - ... f"c({x})" - ... for x in set(lpcosts.production.dims).union(constraint.b.dims) - ... } - >>> assert set(result.dims) == decision_dims.union(constraint_dims) - - The :py:func:`~muse.constraints.max_production` constraint on the production - side is the identy matrix with a factor :math:`-1`. We can easily check this - by stacking the decision and constraint dimensions in the result: - - >>> result = cs.lp_constraint_matrix( - ... constraint.b, constraint.production, lpcosts.production - ... ) - >>> decision_dims = {f"d({x})" for x in lpcosts.production.dims} - >>> assert set(result.dims) == decision_dims.union(constraint_dims) - >>> result = result.reset_index("d(timeslice)", drop=True).assign_coords( - ... {"d(timeslice)": result["d(timeslice)"].values} - ... ) - >>> stacked = result.stack(d=sorted(decision_dims), c=sorted(constraint_dims)) - >>> assert stacked.shape[0] == stacked.shape[1] - >>> assert stacked.values == approx(np.eye(stacked.shape[0])) """ from functools import reduce @@ -1019,104 +944,7 @@ def get_dimension(dim): @dataclass class ScipyAdapter: - """Creates the input for the scipy solvers. - - Example: - Lets give a fist simple example. The constraint - :py:func:`~muse.constraints.max_capacity_expansion` limits how much each - capacity can be expanded in a given year. - - >>> from muse import examples - >>> from muse.quantities import maximum_production - >>> from muse.utilities import reduce_assets - >>> from muse import constraints as cs - >>> res = examples.sector("residential", model="medium") - >>> market = examples.residential_market("medium") - >>> technologies = res.technologies.sel(year=2025) - >>> search = examples.search_space("residential", model="medium") - >>> assets = next(a.assets for a in res.agents) - >>> capacity = reduce_assets(assets.capacity, coords=("region", "technology")) - >>> market_demand = 0.8 * maximum_production( - ... technologies, - ... assets.capacity, - ... ).sel(year=2025) - >>> costs = search * np.arange(np.prod(search.shape)).reshape(search.shape) - >>> constraint = cs.max_capacity_expansion( - ... market_demand, capacity.sel(year=[2020, 2025]), search, technologies) - - The constraint acts over capacity decision variables only: - - >>> assert constraint.production.data == np.array(0) - >>> assert len(constraint.production.dims) == 0 - - It is an upper bound for a straightforward sum over the capacities for a given - technology. The matrix operator is simply the identity: - - >>> assert constraint.capacity.data == np.array(1) - >>> assert len(constraint.capacity.dims) == 0 - - And the upperbound is expanded over the replacement technologies, - but not over the assets. Hence the assets will be summed over in the final - constraint: - - >>> assert set(constraint.b.dims) == {"replacement"} - >>> assert constraint.kind == cs.ConstraintKind.UPPER_BOUND - - As shown above, it does not bind the production decision variables. Hence, - production is zero. The matrix operator for the capacity is simply the identity. - The upper bound is simply the maximum for replacement technology (and region, if - that particular dimension exists in the problem). - - The lp problem then becomes: - - >>> technologies = res.technologies.interp(year=market.year.min() + 5) - >>> inputs = cs.ScipyAdapter.factory( - ... technologies, costs, constraint - ... ) - - The decision variables are always constrained between zero and infinity: - - >>> assert inputs.bounds == (0, np.inf) - - The problem is an upper-bound one. There are no equality constraints: - - >>> assert inputs.A_eq is None - >>> assert inputs.b_eq is None - - The upper bound matrix and vector, and the costs are consistent in their - dimensions: - - >>> assert inputs.c.ndim == 1 - >>> assert inputs.b_ub.ndim == 1 - >>> assert inputs.A_ub.ndim == 2 - >>> assert inputs.b_ub.size == inputs.A_ub.shape[0] - >>> assert inputs.c.size == inputs.A_ub.shape[1] - >>> assert inputs.c.ndim == 1 - - In practice, :py:func:`~muse.constraints.lp_costs` helps us define the decision - variables (and ``c``). We can verify that the sizes are consistent: - - >>> lpcosts = cs.lp_costs(technologies, costs) - >>> capsize = lpcosts.capacity.size - >>> prodsize = lpcosts.production.size - >>> assert inputs.c.size == capsize + prodsize - - The upper bound itself is over each replacement technology: - - >>> assert inputs.b_ub.size == lpcosts.replacement.size - - The production decision variables are not involved: - - >>> from pytest import approx - >>> assert inputs.A_ub[:, capsize:] == approx(0) - - The matrix for the capacity decision variables is a sum over assets for a given - replacement technology. Hence, each row is constituted of zeros and ones and - sums to the number of assets: - - >>> assert inputs.A_ub[:, :capsize].sum(axis=1) == approx(lpcosts.asset.size) - >>> assert set(inputs.A_ub[:, :capsize].flatten()) == {0.0, 1.0} - """ + """Creates the input for the scipy solvers.""" c: np.ndarray to_muse: Callable[[np.ndarray], xr.Dataset] From d7ed991a0b17bc296ca667e7db3c047886eb1d6a Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 1 Apr 2025 10:45:14 +0100 Subject: [PATCH 28/28] Apply suggested changes --- src/muse/constraints.py | 56 +++++++++++++++++++++++++++++++++-------- 1 file changed, 45 insertions(+), 11 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 4097f6dc..be0dcbc3 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -176,6 +176,7 @@ def decorated( """Computes and standardizes a constraint.""" # Check inputs assert "year" not in technologies.dims + assert "year" not in demand.dims assert len(capacity.year) == 2 # current year and investment year # Calculate constraint @@ -425,12 +426,10 @@ def demand( **kwargs, ) -> Constraint: """Constraints production to meet demand.""" - b = demand - if "region" in b.dims and "dst_region" in technologies.dims: - b = b.rename(region="dst_region") - assert "year" not in b.dims + if "region" in demand.dims and "dst_region" in technologies.dims: + demand = demand.rename(region="dst_region") return xr.Dataset( - dict(b=b, production=1), attrs=dict(kind=ConstraintKind.LOWER_BOUND) + dict(b=demand, production=1), attrs=dict(kind=ConstraintKind.LOWER_BOUND) ) @@ -469,8 +468,7 @@ def max_production( """ from xarray import ones_like, zeros_like - commodities = demand.commodity - kwargs = dict(commodity=commodities) + kwargs = dict(commodity=demand.commodity) if "region" in search_space.coords and "region" in technologies.dims: kwargs["region"] = search_space.region techs = ( @@ -734,8 +732,7 @@ def minimum_service( if np.all(technologies["minimum_service_factor"] == 0): return None - commodities = demand.commodity - kwargs = dict(commodity=commodities) + kwargs = dict(commodity=demand.commodity) if "region" in search_space.coords and "region" in technologies.dims: kwargs["region"] = search_space.region techs = ( @@ -944,7 +941,40 @@ def get_dimension(dim): @dataclass class ScipyAdapter: - """Creates the input for the scipy solvers.""" + """Creates the input for the scipy solvers. + + This adapter is required to convert data (costs and constraints) from a series of + dataarrays to a format that can be used by scipy's linear programming solver. + + The scipy solver requires the following inputs as a set of 1D or 2D numpy arrays: + c: Coefficients of the linear objective function to be minimized. This is a 1D + vector, each element representing the cost of a decision variable. + A_ub: The inequality constraint matrix. This is a 2D matrix containing + coefficients of the linear inequality constraints, with constraints as rows + and decision variables as columns. + b_ub: The inequality constraint vector. This is a 1D vector, each element + representing an upper bound on the corresponding constraint in A_ub. + A_eq: The equality constraint matrix. In practice, since all of the constraints + currently implemented are inequality constraints, this will always be zeros. + However, this may be changed in the future. + b_eq: The equality constraint vector. As above, this will currently always be + zeros. + + See the documentation for `scipy.optimize.linprog` for more details: + https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linprog.html + + This class takes in costs and constraints as xarray dataarrays, and performs the + necessary conversions to create the above inputs. These are saved together in the + `kwargs` property, which is passed to the scipy solver. This has an additional + attribute `bounds`, which is a tuple of the lower and upper bounds for the + decision variables. In practice, we will always use (0, np.inf) as the bounds. + + The output of the scipy solver is a 1D array of decision variables, which must then + be converted back into a format that MUSE can understand. To aid this, we + save templates for the capacity and production decision variables, which are used + to convert the output back into a labelled xarray dataset using the helper function + `to_muse`. + """ c: np.ndarray to_muse: Callable[[np.ndarray], xr.Dataset] @@ -1095,7 +1125,11 @@ def reshape(matrix: xr.DataArray) -> np.ndarray: return matrix.values.reshape((size, -1)) def extract_bA(constraints, *kinds: ConstraintKind): - """Extracts A and b for constraints of specified kinds.""" + """Extracts A and b for constraints of specified kinds. + + These will end up as A_ub and b_ub for inequality constraints, and + A_eq and b_eq for equality constraints (see ScipyAdapter). + """ # Get indices of constraints of the specified kind indices = [i for i in range(len(bs)) if constraints[i].kind in kinds]