Skip to content

Code Reference

framcore

check_type(value: object, expected: type | tuple[type], caller: Callable | None = None) -> None

Check a value matches expected type(s).

Parameters:

Name Type Description Default
value object

value being checked.

required
expected type | tuple[type]

Expected types.

required
caller Callable

The origin of the check.

None

Raises:

Type Description
TypeError

When value does not match expected types.

Source code in framcore/Base.py
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
def check_type(value: object, expected: type | tuple[type], caller: Callable | None = None) -> None:
    """
    Check a value matches expected type(s).

    Args:
        value (object): value being checked.
        expected (type | tuple[type]): Expected types.
        caller (Callable): The origin of the check.

    Raises:
        TypeError: When value does not match expected types.

    """
    if not isinstance(value, expected):
        message = f"{expected}, got {type(value).__name__}"
        message = "Expected " + message if caller is None else f"{caller} expected " + message
        raise TypeError(message)

Base

Base

Core base class to share methods.

Source code in framcore/Base.py
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
class Base:
    """Core base class to share methods."""

    def _check_type(self, value, class_or_tuple) -> None:  # noqa: ANN001
        check_type(value, class_or_tuple, caller=self)

    def _ensure_float(self, value: object) -> float:
        with contextlib.suppress(Exception):
            return float(value)
        message = f"Unable to convert {value} to float."
        raise ValueError(message)

    def _check_int(self, value: int, lower_bound: int | None, upper_bound: int | None) -> None:
        if lower_bound is not None and value < lower_bound:
            message = f"Value {value} is less than lower_bound {lower_bound}."
            raise ValueError(message)
        if upper_bound is not None and value > upper_bound:
            message = f"Value {value} is greater than upper_bound {upper_bound}."
            raise ValueError(message)

    def _check_float(self, value: float, lower_bound: float | None, upper_bound: float | None) -> None:
        if lower_bound is not None and value < lower_bound:
            message = f"Value {value} is less than lower_bound {lower_bound}."
            raise ValueError(message)
        if upper_bound is not None and value > upper_bound:
            message = f"Value {value} is greater than upper_bound {upper_bound}."
            raise ValueError(message)

    def _report_errors(self, errors: set[str]) -> None:
        if errors:
            n = len(errors)
            s = "s" if n > 1 else ""
            error_str = "\n".join(errors)
            message = f"Found {n} error{s}:\n{error_str}"
            raise RuntimeError(message)

    def send_event(self, event_type: str, **kwargs: dict[str, Any]) -> None:
        """All events in core should use this."""
        send_event(sender=self, event_type=event_type, **kwargs)

    def send_warning_event(self, message: str) -> None:
        """Use this to send warning event."""
        send_warning_event(sender=self, message=message)

    def send_error_event(self, message: str, exception_type_name: str, traceback: str) -> None:
        """Use this to send error event."""
        send_error_event(sender=self, message=message, exception_type_name=exception_type_name, traceback=traceback)

    def send_info_event(self, message: str) -> None:
        """Use this to send info event."""
        send_info_event(sender=self, message=message)

    def send_debug_event(self, message: str) -> None:
        """Use this to send debug event."""
        send_debug_event(sender=self, message=message)

    def get_fingerprint_default(
        self,
        refs: dict[str, str] | None = None,
        excludes: set[str] | None = None,
    ) -> Fingerprint:
        """
        Generate a Fingerprint for the object, optionally including references and excluding specified properties.

        Parameters
        ----------
        refs : dict[str, str] | None, optional
            Dictionary mapping property names to reference keys to include as references in the fingerprint.
        excludes : set[str] | None, optional
            Set of property names to exclude from the fingerprint.

        Returns
        -------
        Fingerprint
            The generated fingerprint for the object.

        """
        fingerprint = Fingerprint(source=self)

        if refs:
            for ref_prop, ref_key in refs.items():
                if ref_key is not None:
                    fingerprint.add_ref(ref_prop, ref_key)

        default_excludes = {"_parent"}

        for prop_name, prop_value in self.__dict__.items():
            if callable(prop_value) or (refs and prop_name in refs) or (excludes and prop_name in excludes) or prop_name in default_excludes:
                continue

            if prop_value is None:
                continue

            fingerprint.add(prop_name, prop_value)

        return fingerprint

    def _get_property_name(self, property_reference) -> str | None:  # noqa: ANN001
        for name, value in inspect.getmembers(self):
            if value is property_reference:
                return name
        return None

    def __repr__(self) -> str:
        """Display type and non-None fields."""
        type_name = type(self).__name__
        value_fields = []
        for k, v in vars(self).items():
            display_value = self._get_attr_str(k, v)
            if display_value is not None:
                value_fields.append(f"{k}={display_value}")
        value_fields = ", ".join(value_fields)
        return f"{type_name}({value_fields})"

    def _get_attr_str(self, key: str, value: object) -> str | None:
        if value is None:
            return None
        if isinstance(value, int | float | str | bool):
            return value
        try:
            return value._get_attr_str()  # noqa: SLF001
        except Exception:
            pass
        return type(value).__name__
__repr__() -> str

Display type and non-None fields.

Source code in framcore/Base.py
121
122
123
124
125
126
127
128
129
130
def __repr__(self) -> str:
    """Display type and non-None fields."""
    type_name = type(self).__name__
    value_fields = []
    for k, v in vars(self).items():
        display_value = self._get_attr_str(k, v)
        if display_value is not None:
            value_fields.append(f"{k}={display_value}")
    value_fields = ", ".join(value_fields)
    return f"{type_name}({value_fields})"
get_fingerprint_default(refs: dict[str, str] | None = None, excludes: set[str] | None = None) -> Fingerprint

Generate a Fingerprint for the object, optionally including references and excluding specified properties.

Parameters

refs : dict[str, str] | None, optional Dictionary mapping property names to reference keys to include as references in the fingerprint. excludes : set[str] | None, optional Set of property names to exclude from the fingerprint.

Returns

Fingerprint The generated fingerprint for the object.

Source code in framcore/Base.py
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
def get_fingerprint_default(
    self,
    refs: dict[str, str] | None = None,
    excludes: set[str] | None = None,
) -> Fingerprint:
    """
    Generate a Fingerprint for the object, optionally including references and excluding specified properties.

    Parameters
    ----------
    refs : dict[str, str] | None, optional
        Dictionary mapping property names to reference keys to include as references in the fingerprint.
    excludes : set[str] | None, optional
        Set of property names to exclude from the fingerprint.

    Returns
    -------
    Fingerprint
        The generated fingerprint for the object.

    """
    fingerprint = Fingerprint(source=self)

    if refs:
        for ref_prop, ref_key in refs.items():
            if ref_key is not None:
                fingerprint.add_ref(ref_prop, ref_key)

    default_excludes = {"_parent"}

    for prop_name, prop_value in self.__dict__.items():
        if callable(prop_value) or (refs and prop_name in refs) or (excludes and prop_name in excludes) or prop_name in default_excludes:
            continue

        if prop_value is None:
            continue

        fingerprint.add(prop_name, prop_value)

    return fingerprint
send_debug_event(message: str) -> None

Use this to send debug event.

Source code in framcore/Base.py
70
71
72
def send_debug_event(self, message: str) -> None:
    """Use this to send debug event."""
    send_debug_event(sender=self, message=message)
send_error_event(message: str, exception_type_name: str, traceback: str) -> None

Use this to send error event.

Source code in framcore/Base.py
62
63
64
def send_error_event(self, message: str, exception_type_name: str, traceback: str) -> None:
    """Use this to send error event."""
    send_error_event(sender=self, message=message, exception_type_name=exception_type_name, traceback=traceback)
send_event(event_type: str, **kwargs: dict[str, Any]) -> None

All events in core should use this.

Source code in framcore/Base.py
54
55
56
def send_event(self, event_type: str, **kwargs: dict[str, Any]) -> None:
    """All events in core should use this."""
    send_event(sender=self, event_type=event_type, **kwargs)
send_info_event(message: str) -> None

Use this to send info event.

Source code in framcore/Base.py
66
67
68
def send_info_event(self, message: str) -> None:
    """Use this to send info event."""
    send_info_event(sender=self, message=message)
send_warning_event(message: str) -> None

Use this to send warning event.

Source code in framcore/Base.py
58
59
60
def send_warning_event(self, message: str) -> None:
    """Use this to send warning event."""
    send_warning_event(sender=self, message=message)

check_type(value: object, expected: type | tuple[type], caller: Callable | None = None) -> None

Check a value matches expected type(s).

Parameters:

Name Type Description Default
value object

value being checked.

required
expected type | tuple[type]

Expected types.

required
caller Callable

The origin of the check.

None

Raises:

Type Description
TypeError

When value does not match expected types.

Source code in framcore/Base.py
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
def check_type(value: object, expected: type | tuple[type], caller: Callable | None = None) -> None:
    """
    Check a value matches expected type(s).

    Args:
        value (object): value being checked.
        expected (type | tuple[type]): Expected types.
        caller (Callable): The origin of the check.

    Raises:
        TypeError: When value does not match expected types.

    """
    if not isinstance(value, expected):
        message = f"{expected}, got {type(value).__name__}"
        message = "Expected " + message if caller is None else f"{caller} expected " + message
        raise TypeError(message)

Model

Model

Bases: Base

Model stores the representation of the energy system with Components, TimeVectors, Expression, and the Aggregators applied to the Model.

  • Components describe the main elements in the energy system. Can have additional Attributes.
  • TimeVector and Curve hold the time series data.
  • Expressions for data manipulation of TimeVectors and Curves. Can be queried.
  • Aggregators handle aggregation and disaggregation of Components. Aggregators are added to Model when used (Aggregator.aggregate(model)), and can be undone in LIFO order with disaggregate().

Methods:

Name Description
get_data

Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified.

disaggregate

Undo all aggregations applied to Model in LIFO order.

get_content_counts

Return number of objects stored in model organized into concepts and types.

Source code in framcore/Model.py
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
class Model(Base):
    """
    Model stores the representation of the energy system with Components, TimeVectors, Expression, and the Aggregators applied to the Model.

    - Components describe the main elements in the energy system. Can have additional Attributes.
    - TimeVector and Curve hold the time series data.
    - Expressions for data manipulation of TimeVectors and Curves. Can be queried.
    - Aggregators handle aggregation and disaggregation of Components. Aggregators are added to Model when used (Aggregator.aggregate(model)),
    and can be undone in LIFO order with disaggregate().

    Methods:
        get_data(): Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified.
        disaggregate(): Undo all aggregations applied to Model in LIFO order.
        get_content_counts(): Return number of objects stored in model organized into concepts and types.

    """

    def __init__(self) -> None:
        """Create a new model instance with empty data and no aggregators."""
        self._data = ModelDict()
        self._aggregators: list[Aggregator] = []

    def get_data(self) -> ModelDict:
        """Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified."""
        return self._data

    def disaggregate(self) -> None:
        """Undo all aggregations applied to Model in LIFO order."""
        while self._aggregators:
            aggregator = self._aggregators.pop(-1)  # last item
            aggregator.disaggregate(self)

    def get_content_counts(self) -> dict[str, Counter]:
        """Return number of objects stored in model organized into concepts and types."""
        data_values = self.get_data().values()
        counts = {
            "components": Counter(),
            "timevectors": Counter(),
            "curves": Counter(),
            "expressions": Counter(),
        }
        for obj in data_values:
            if isinstance(obj, Component):
                key = "components"
            elif isinstance(obj, TimeVector):
                key = "timevectors"
            elif isinstance(obj, Curve):
                key = "curves"
            elif isinstance(obj, Expr):
                key = "expressions"
            else:
                key = "unexpected"
                if key not in counts:
                    counts[key] = Counter()
            counts[key][type(obj).__name__] += 1

        assert len(data_values) == sum(c.total() for c in counts.values())

        counts["aggregators"] = Counter()
        for a in self._aggregators:
            counts["aggregators"][type(a).__name__] += 1

        return counts
__init__() -> None

Create a new model instance with empty data and no aggregators.

Source code in framcore/Model.py
45
46
47
48
def __init__(self) -> None:
    """Create a new model instance with empty data and no aggregators."""
    self._data = ModelDict()
    self._aggregators: list[Aggregator] = []
disaggregate() -> None

Undo all aggregations applied to Model in LIFO order.

Source code in framcore/Model.py
54
55
56
57
58
def disaggregate(self) -> None:
    """Undo all aggregations applied to Model in LIFO order."""
    while self._aggregators:
        aggregator = self._aggregators.pop(-1)  # last item
        aggregator.disaggregate(self)
get_content_counts() -> dict[str, Counter]

Return number of objects stored in model organized into concepts and types.

Source code in framcore/Model.py
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
def get_content_counts(self) -> dict[str, Counter]:
    """Return number of objects stored in model organized into concepts and types."""
    data_values = self.get_data().values()
    counts = {
        "components": Counter(),
        "timevectors": Counter(),
        "curves": Counter(),
        "expressions": Counter(),
    }
    for obj in data_values:
        if isinstance(obj, Component):
            key = "components"
        elif isinstance(obj, TimeVector):
            key = "timevectors"
        elif isinstance(obj, Curve):
            key = "curves"
        elif isinstance(obj, Expr):
            key = "expressions"
        else:
            key = "unexpected"
            if key not in counts:
                counts[key] = Counter()
        counts[key][type(obj).__name__] += 1

    assert len(data_values) == sum(c.total() for c in counts.values())

    counts["aggregators"] = Counter()
    for a in self._aggregators:
        counts["aggregators"][type(a).__name__] += 1

    return counts
get_data() -> ModelDict

Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified.

Source code in framcore/Model.py
50
51
52
def get_data(self) -> ModelDict:
    """Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified."""
    return self._data

ModelDict

Bases: dict

Dict storing only values of type Component | Expr | TimeVector | Curve.

Source code in framcore/Model.py
14
15
16
17
18
19
20
21
22
23
24
25
class ModelDict(dict):
    """Dict storing only values of type Component | Expr | TimeVector | Curve."""

    def __setitem__(self, key: str, value: Component | Expr | TimeVector | Curve) -> None:
        """Set item with type checking."""
        if not isinstance(key, str):
            message = f"Expected str for key {key}, got {type(key).__name__}"
            raise TypeError(message)
        if not isinstance(value, Component | Expr | TimeVector | Curve):
            message = f"Expected Component | Expr | TimeVector | Curve for key {key}, got {type(value).__name__}"
            raise TypeError(message)
        return super().__setitem__(key, value)
__setitem__(key: str, value: Component | Expr | TimeVector | Curve) -> None

Set item with type checking.

Source code in framcore/Model.py
17
18
19
20
21
22
23
24
25
def __setitem__(self, key: str, value: Component | Expr | TimeVector | Curve) -> None:
    """Set item with type checking."""
    if not isinstance(key, str):
        message = f"Expected str for key {key}, got {type(key).__name__}"
        raise TypeError(message)
    if not isinstance(value, Component | Expr | TimeVector | Curve):
        message = f"Expected Component | Expr | TimeVector | Curve for key {key}, got {type(value).__name__}"
        raise TypeError(message)
    return super().__setitem__(key, value)

aggregators

SolarAggregator

Bases: _WindSolarAggregator

Aggregate Solar components into groups based on their power nodes.

Aggregation steps (self._aggregate):

  1. Group components based on their power nodes (self._group_by_power_node):
  2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
    • Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
    • Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
    • TODO: Add support for additional weighting methods (e.g. production instead of capacity).
    • Production is aggregated as the sum of production levels with weighted profiles. 2a. Make new hydro module and delete original components from model data.
  3. Add mapping from detailed to aggregated components to self._aggregation_map.

Disaggregation steps (self._disaggregate):

  1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
  2. Distribute production from aggregated components back to the original components:
    • Results are weighted based on the weighting method (now ony max_capacity supported).
  3. Delete aggregated components from the model.

See Aggregator for general design notes and rules to follow when using Aggregators.

Attributes:

Name Type Description
_data_dim SinglePeriodTimeIndex | None

Data dimension for eager evaluation.

_scen_dim FixedFrequencyTimeIndex | None

Scenario dimension for eager evaluation.

_grouped_components dict[str, set[str]]

Mapping of aggregated components to their detailed components. agg to detailed

Parent Attributes (see framcore.aggregators.Aggregator):

_is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
_original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
_aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
Source code in framcore/aggregators/WindSolarAggregator.py
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
class SolarAggregator(_WindSolarAggregator):
    """
    Aggregate Solar components into groups based on their power nodes.

    Aggregation steps (self._aggregate):

    1. Group components based on their power nodes (self._group_by_power_node):
    2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
        - Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
        - Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
        - TODO: Add support for additional weighting methods (e.g. production instead of capacity).
        - Production is aggregated as the sum of production levels with weighted profiles.
    2a. Make new hydro module and delete original components from model data.
    3. Add mapping from detailed to aggregated components to self._aggregation_map.


    Disaggregation steps (self._disaggregate):

    1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
    2. Distribute production from aggregated components back to the original components:
        - Results are weighted based on the weighting method (now ony max_capacity supported).
    3. Delete aggregated components from the model.


    See Aggregator for general design notes and rules to follow when using Aggregators.

    Attributes:
        _data_dim (SinglePeriodTimeIndex | None): Data dimension for eager evaluation.
        _scen_dim (FixedFrequencyTimeIndex | None): Scenario dimension for eager evaluation.
        _grouped_components (dict[str, set[str]]): Mapping of aggregated components to their detailed components.  agg to detailed


    Parent Attributes (see framcore.aggregators.Aggregator):

        _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
        _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
        _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg

    """

    _component_type = Solar

WindAggregator

Bases: _WindSolarAggregator

Aggregate Wind components into groups based on their power nodes.

Aggregation steps (self._aggregate):

  1. Group components based on their power nodes (self._group_by_power_node):
  2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
    • Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
    • Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
    • TODO: Add support for additional weighting methods (e.g. production instead of capacity).
    • Production is aggregated as the sum of production levels with weighted profiles. 2a. Make new hydro module and delete original components from model data.
  3. Add mapping from detailed to aggregated components to self._aggregation_map.

Disaggregation steps (self._disaggregate):

  1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
  2. Distribute production from aggregated components back to the original components:
    • Results are weighted based on the weighting method (now ony max_capacity supported).
  3. Delete aggregated components from the model.

See Aggregator for general design notes and rules to follow when using Aggregators.

Attributes:

Name Type Description
_data_dim SinglePeriodTimeIndex | None

Data dimension for eager evaluation.

_scen_dim FixedFrequencyTimeIndex | None

Scenario dimension for eager evaluation.

_grouped_components dict[str, set[str]]

Mapping of aggregated components to their detailed components. agg to detailed

Parent Attributes (see framcore.aggregators.Aggregator):

_is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
_original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
_aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
Source code in framcore/aggregators/WindSolarAggregator.py
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
class WindAggregator(_WindSolarAggregator):
    """
    Aggregate Wind components into groups based on their power nodes.

    Aggregation steps (self._aggregate):

    1. Group components based on their power nodes (self._group_by_power_node):
    2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
        - Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
        - Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
        - TODO: Add support for additional weighting methods (e.g. production instead of capacity).
        - Production is aggregated as the sum of production levels with weighted profiles.
    2a. Make new hydro module and delete original components from model data.
    3. Add mapping from detailed to aggregated components to self._aggregation_map.


    Disaggregation steps (self._disaggregate):

    1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
    2. Distribute production from aggregated components back to the original components:
        - Results are weighted based on the weighting method (now ony max_capacity supported).
    3. Delete aggregated components from the model.


    See Aggregator for general design notes and rules to follow when using Aggregators.

    Attributes:
        _data_dim (SinglePeriodTimeIndex | None): Data dimension for eager evaluation.
        _scen_dim (FixedFrequencyTimeIndex | None): Scenario dimension for eager evaluation.
        _grouped_components (dict[str, set[str]]): Mapping of aggregated components to their detailed components.  agg to detailed


    Parent Attributes (see framcore.aggregators.Aggregator):

        _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
        _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
        _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg

    """

    _component_type = Wind

Aggregator

Aggregator

Bases: Base, ABC

Aggregator interface class.

Aggregators handles aggregation and disaggregation of Components. - The general approach for aggregation is to group Components, aggregate Components in the same group to (a) new Component(s), delete the detailed Components, and add the mapping to self._aggregation_map. - The general approach for disaggregation is to restore the detailed Components, move results from aggregated Components to detailed Components, and delete the aggregated Components.

Concrete Aggregators must implement the abstract methods _aggregate() and _disaggregate().

Some rules for using Aggregators: 1. Disaggragate can only be called after aggregate has been called. 2. Not allowed to call aggregate twice. Must call disaggregate before aggregate can be called again. 3. Aggregators are stored in Model when aggregate is called. Disaggregate by calling Model.disaggregate(), which will disaggregate all Aggregators in LIFO order. 4. At the moment we allow changes to the aggregated Components, which is ignored during disaggregation. TODO: Handle this 5. It is recommended to only use the same Aggregator type once on the same components of a Model. If you want to go from one aggregation level to another, it is better to use Model.disaggregate first and then aggregate again. This is to keep the logic simple and avoid complex expressions.

Some design notes: - Levels and profiles are aggregated separately and then combined into attributes. - We have chosen to eagerly evaluate weights for aggregation (weighted averages) and disaggregation of levels and profiles. This approach supports any form of aggregation by varying the weights, and complex weights can be created by eagerly evaluating expressions and using the result to compute those weights. - This is a balance between eagerly evaluating everything and setting up complex expressions. Eagerly evaluating everything would require setting up new TimeVectors after evaluation, which is not ideal. While setting up complex expressions gives expressions that are harder to work with and slower to query from. - This trade-off simplifies adding logic that recognises if result expressions come from aggregations or disaggregations. When aggregating or disaggregating these, we can go back to the original results rather than setting up complex expressions that for examples aggregates the disaggregated results.

Source code in framcore/aggregators/Aggregator.py
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
class Aggregator(Base, ABC):
    """
    Aggregator interface class.

    Aggregators handles aggregation and disaggregation of Components.
    - The general approach for aggregation is to group Components, aggregate Components in the same group to (a) new Component(s),
    delete the detailed Components, and add the mapping to self._aggregation_map.
    - The general approach for disaggregation is to restore the detailed Components, move results from aggregated
    Components to detailed Components, and delete the aggregated Components.

    Concrete Aggregators must implement the abstract methods _aggregate() and _disaggregate().

    Some rules for using Aggregators:
    1. Disaggragate can only be called after aggregate has been called.
    2. Not allowed to call aggregate twice. Must call disaggregate before aggregate can be called again.
    3. Aggregators are stored in Model when aggregate is called. Disaggregate by calling Model.disaggregate(),
         which will disaggregate all Aggregators in LIFO order.
    4. At the moment we allow changes to the aggregated Components, which is ignored during disaggregation. TODO: Handle this
    5. It is recommended to only use the same Aggregator type once on the same components of a Model.
        If you want to go from one aggregation level to another, it is better to use Model.disaggregate first and then aggregate again.
        This is to keep the logic simple and avoid complex expressions.

    Some design notes:
    - Levels and profiles are aggregated separately and then combined into attributes.
    - We have chosen to eagerly evaluate weights for aggregation (weighted averages) and disaggregation of levels and profiles.
        This approach supports any form of aggregation by varying the weights, and complex weights can be created by eagerly evaluating
        expressions and using the result to compute those weights.
    - This is a balance between eagerly evaluating everything and setting up complex expressions.
        Eagerly evaluating everything would require setting up new TimeVectors after evaluation, which is not ideal.
        While setting up complex expressions gives expressions that are harder to work with and slower to query from.
    - This trade-off simplifies adding logic that recognises if result expressions come from aggregations or disaggregations.
        When aggregating or disaggregating these, we can go back to the original results rather than setting up complex expressions
        that for examples aggregates the disaggregated results.

    """

    def __init__(self) -> None:
        """Initialize the Aggregator with default state for aggregation tracking and data storage."""
        self._is_last_call_aggregate = None
        self._original_data: dict[str, Component | TimeVector | Curve | Expr] | None = None
        self._aggregation_map: dict[str, set[str]] = None

    def aggregate(self, model: Model) -> None:
        """Aggregate model. Keep original data in case disaggregate is called."""
        self._check_type(model, Model)

        if self._is_last_call_aggregate is True:
            message = "Will overwrite existing aggregation."
            self.send_warning_event(message)

        self._original_data = deepcopy(model.get_data())
        self._aggregate(model)
        self._is_last_call_aggregate = True
        if self in model._aggregators:  # noqa: SLF001
            message = f"{model} has already been aggregated with {self}. Cannot perform the same Aggregation more than once on a Model object."
            raise ValueError(message)

        # transfer_unambigous_memberships to aggregated components to support further aggregation
        mapping = self.get_aggregation_map()
        reversed_mapping = defaultdict(set)
        new_data = model.get_data()
        for member_id, group_ids in mapping.items():
            self._check_type(group_ids, set)
            for group_id in group_ids:
                self._check_type(group_id, str)
                member_component = self._original_data[member_id]
                group_component = new_data[group_id]
                reversed_mapping[group_component].add(member_component)
        for group_component, member_components in reversed_mapping.items():
            transfer_unambigous_memberships(group_component, member_components)

        model._aggregators.append(deepcopy(self))  # noqa: SLF001

    def disaggregate(self, model: Model) -> None:
        """Disaggregate model back to pre-aggregate form. Move results into the disaggregated objects."""
        self._check_type(model, Model)
        self._check_is_aggregated()
        self._disaggregate(model, self._original_data)
        self._is_last_call_aggregate = False
        self._original_data = None
        self._aggregation_map = None

    def get_aggregation_map(self) -> dict[str, set[str]]:
        """
        Return dictionary mapping from disaggregated to aggregated Component IDs.

        The mapping should tell you which of the original Components were aggregated into which new Components.
        Components which are left as is should not be in the mapping.
        Components which are deleted without being aggregated are mapped to an empty set.
        """
        if self._aggregation_map is None:
            message = f"{self} has not yet performed an aggregation or the aggregation map was not created during aggregation."
            raise ValueError(message)
        return self._aggregation_map

    @abstractmethod
    def _aggregate(self, model: Model) -> None:
        """Modify model inplace. Replace components with aggregated components according to some method."""
        pass

    @abstractmethod
    def _disaggregate(
        self,
        model: Model,
        original_data: dict[str, Component | TimeVector | Curve | Expr],
    ) -> None:
        """
        Modify model inplace. Restore from aggregated to original components.

        Transfer any results from aggregated components to restored (disaggregated) components.

        Implementers should document and handle changes in model instance between aggregation and disaggregation.
        E.g. what to do if an aggregated component has been deleted prior to disaggregate call.
        """
        pass

    def _check_is_aggregated(self) -> None:
        if self._is_last_call_aggregate in [False, None]:
            message = "Not aggregated. Must call aggregate and disaggregate in pairs."
            raise RuntimeError(message)
__init__() -> None

Initialize the Aggregator with default state for aggregation tracking and data storage.

Source code in framcore/aggregators/Aggregator.py
53
54
55
56
57
def __init__(self) -> None:
    """Initialize the Aggregator with default state for aggregation tracking and data storage."""
    self._is_last_call_aggregate = None
    self._original_data: dict[str, Component | TimeVector | Curve | Expr] | None = None
    self._aggregation_map: dict[str, set[str]] = None
aggregate(model: Model) -> None

Aggregate model. Keep original data in case disaggregate is called.

Source code in framcore/aggregators/Aggregator.py
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
def aggregate(self, model: Model) -> None:
    """Aggregate model. Keep original data in case disaggregate is called."""
    self._check_type(model, Model)

    if self._is_last_call_aggregate is True:
        message = "Will overwrite existing aggregation."
        self.send_warning_event(message)

    self._original_data = deepcopy(model.get_data())
    self._aggregate(model)
    self._is_last_call_aggregate = True
    if self in model._aggregators:  # noqa: SLF001
        message = f"{model} has already been aggregated with {self}. Cannot perform the same Aggregation more than once on a Model object."
        raise ValueError(message)

    # transfer_unambigous_memberships to aggregated components to support further aggregation
    mapping = self.get_aggregation_map()
    reversed_mapping = defaultdict(set)
    new_data = model.get_data()
    for member_id, group_ids in mapping.items():
        self._check_type(group_ids, set)
        for group_id in group_ids:
            self._check_type(group_id, str)
            member_component = self._original_data[member_id]
            group_component = new_data[group_id]
            reversed_mapping[group_component].add(member_component)
    for group_component, member_components in reversed_mapping.items():
        transfer_unambigous_memberships(group_component, member_components)

    model._aggregators.append(deepcopy(self))  # noqa: SLF001
disaggregate(model: Model) -> None

Disaggregate model back to pre-aggregate form. Move results into the disaggregated objects.

Source code in framcore/aggregators/Aggregator.py
90
91
92
93
94
95
96
97
def disaggregate(self, model: Model) -> None:
    """Disaggregate model back to pre-aggregate form. Move results into the disaggregated objects."""
    self._check_type(model, Model)
    self._check_is_aggregated()
    self._disaggregate(model, self._original_data)
    self._is_last_call_aggregate = False
    self._original_data = None
    self._aggregation_map = None
get_aggregation_map() -> dict[str, set[str]]

Return dictionary mapping from disaggregated to aggregated Component IDs.

The mapping should tell you which of the original Components were aggregated into which new Components. Components which are left as is should not be in the mapping. Components which are deleted without being aggregated are mapped to an empty set.

Source code in framcore/aggregators/Aggregator.py
 99
100
101
102
103
104
105
106
107
108
109
110
def get_aggregation_map(self) -> dict[str, set[str]]:
    """
    Return dictionary mapping from disaggregated to aggregated Component IDs.

    The mapping should tell you which of the original Components were aggregated into which new Components.
    Components which are left as is should not be in the mapping.
    Components which are deleted without being aggregated are mapped to an empty set.
    """
    if self._aggregation_map is None:
        message = f"{self} has not yet performed an aggregation or the aggregation map was not created during aggregation."
        raise ValueError(message)
    return self._aggregation_map
transfer_unambigous_memberships(group_component: Component, member_components: Iterable[Component]) -> None

Transfer unambiguous membership metadata from member components to a group component.

Parameters

group_component : Component The component to which unambiguous membership metadata will be transferred. member_components : Iterable[Component] The components from which membership metadata is collected.

Notes

Only metadata keys with a single unique Member value among all member components are transferred. Existing metadata on the group component is not overwritten.

Source code in framcore/aggregators/Aggregator.py
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
def transfer_unambigous_memberships(group_component: Component, member_components: Iterable[Component]) -> None:
    """
    Transfer unambiguous membership metadata from member components to a group component.

    Parameters
    ----------
    group_component : Component
        The component to which unambiguous membership metadata will be transferred.
    member_components : Iterable[Component]
        The components from which membership metadata is collected.

    Notes
    -----
    Only metadata keys with a single unique Member value among all member components are transferred.
    Existing metadata on the group component is not overwritten.

    """
    d = defaultdict(set)
    for member in member_components:
        for key in member.get_meta_keys():
            value = member.get_meta(key)
            if not isinstance(value, Member):
                continue
            d[key].add(value)
    for key, value_set in d.items():
        test_value = group_component.get_meta(key)
        if test_value is not None:
            # don't overwrite if already set
            continue
        if len(value_set) != 1:
            # ambigous membership
            continue
        value = next(iter(value_set))
        group_component.add_meta(key, value)

HydroAggregator

HydroAggregator

Bases: Aggregator

Aggregate HydroModules into two equivalent modules based on the regulation factor, into one regulated and one unregulated module per area.

Aggregation steps (self._aggregate):

  1. Group modules based on their power nodes (self._group_modules_by_power_node)
    • Modules with generators are grouped based on their power nodes. You can choose to only group modules for certain power nodes by giving self._power_node_members alone or together with self._metakey_power_node. NB! Watershed that crosses power nodes should not be aggregated in two different HydroAggregators as the aggregator will remove all connected modules from the model after the first aggregation.
    • Reservoirs are assigned to the power node which has the highest cumulative energy equivalent downstream of the reservoir. This is because JulES currently only support one-to-one mapping of detailed and aggregated reservoirs.
    • Reservoirs without generators downstream are ignored in the aggregation.
  2. Group area modules into regulated and unregulated based on regulation factor (self._group_modules_by_regulation_factor)
    • Regulation factor = upstream reservoir capacity / yearly upstream inflow. Modules with generators that have regulation factor <= self._ror_threshold are grouped into unregulated run-of-river modules, the other modules with generators are grouped into regulated reservoir modules.
    • All reservoirs are assigned to the regulated group.
    • Generators without upstream inflows are ignored in the aggregation.
  3. Make aggregated hydro module for each group (self._aggregate_groups)
    • The resulting HydroModule has a generator with energy equivalent of 1 kWh/m3. The inflow, discharge capacity and reservoir capacity is calculated based on energy and transformed back to water using this energy equivalent.
    • Generation capacity (release_cap*energy_equivalent/agg_energy_equivalent, capacity of hydraulic couplings not double counted). The release capacity profile is ignored except if self._release_capacity_profile is given, then this profile is used for all aggregated modules.
    • Energy reservoir capacity (res_cap*energy_equivalent_downstream/agg_energy_equivalent)
    • Gross energy inflow (inflow_up*energy_equivalent/agg_energy_equivalent) - TODO: Add possibility to adjust inflow to closer represent net inflow
    • Inflow profiles weighted based on gross energy inflow (inflow_up_per_profile*energy_equivalent) - calc from core model using self._map_topology()
    • TODO: Other details like pumps and environmental constraints are currently ignored in the aggregation. 3a. Aggregate results if all modules in group have results.
    • Production is the sum of production levels with weighted profiles
    • Reservoir filling is the sum of energy reservoir filling levels (filling*energy_equivalent_downstream/agg_energy_equivalent) with weighted profiles
    • TODO: Water values, spill, bypass and pumping results are currently ignored in the aggregation.
    • TODO: Add possibility to skip results aggregation. 3b. Make new hydro module and delete original modules from model data.
  4. Add mapping from detailed to aggregated modules to self._aggregation_map.

Disaggregation steps (self._disaggregate):

  1. Restore original modules from self._original_data. NB! Changes to aggregated modules are lost except for results (TODO)
  2. Move production and filling results from aggregated modules to detailed modules, weighted based on production capacity and reservoir capacity.
    • TODO: Water values, spill, bypass and pumping results are currently ignored in the disaggregation.
  3. Delete aggregated modules.

NB! Watershed that crosses power nodes should not be aggregated in two different HydroAggregators as the aggregator will remove all connected modules from the model after the first aggregation. Reservoirs will also be assigned to the power node which has the highest cumulative energy equivalent, so this aggregator does not work well for reservoirs that are upstream of multiple power nodes.

See Aggregator for general design notes and rules to follow when using Aggregators.

Attributes:

Name Type Description
_metakey_energy_eq_downstream str

Metadata key for energy equivalent downstream.

_data_dim SinglePeriodTimeIndex

Data dimension for eager evalutation.

_scen_dim FixedFrequencyTimeIndex

Scenario dimension for eager evalutation.

_grouped_modules dict[str, set[str]]

Mapping of aggregated modules to detailed modules. agg to detailed

_grouped_reservoirs dict[str, set[str]]

Mapping of aggregated reservoirs to detailed reservoirs. agg to detailed

_ror_threshold float

Regulation factor (upstream reservoir capacity / yearly upstream inflow) threshold for run-of-river classification. Default is 0.5.

_metakey_power_node str | None

If given, check metadata of power nodes to check if they should be grouped.

_power_node_members list[str] | None

If given along with metakey_power_node, group modules only for power nodes with these metadata values. If given without metakey_power_node, only group power nodes in this list.

_release_capacity_profile TimeVector | None

If given, use this profile for all aggregated modules' release capacities.

Parent Attributes (see framcore.aggregators.Aggregator):

_is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
_original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
_aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
Source code in framcore/aggregators/HydroAggregator.py
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
class HydroAggregator(Aggregator):
    """
    Aggregate HydroModules into two equivalent modules based on the regulation factor, into one regulated and one unregulated module per area.

    Aggregation steps (self._aggregate):

    1. Group modules based on their power nodes (self._group_modules_by_power_node)
        - Modules with generators are grouped based on their power nodes. You can choose to only group modules for certain power nodes by giving
        self._power_node_members alone or together with self._metakey_power_node. NB! Watershed that crosses power nodes should not be aggregated in two
        different HydroAggregators as the aggregator will remove all connected modules from the model after the first aggregation.
        - Reservoirs are assigned to the power node which has the highest cumulative energy equivalent downstream of the reservoir. This is because JulES
        currently only support one-to-one mapping of detailed and aggregated reservoirs.
        - Reservoirs without generators downstream are ignored in the aggregation.
    2. Group area modules into regulated and unregulated based on regulation factor (self._group_modules_by_regulation_factor)
        - Regulation factor = upstream reservoir capacity / yearly upstream inflow. Modules with generators that have regulation factor <= self._ror_threshold
        are grouped into unregulated run-of-river modules, the other modules with generators are grouped into regulated reservoir modules.
        - All reservoirs are assigned to the regulated group.
        - Generators without upstream inflows are ignored in the aggregation.
    3. Make aggregated hydro module for each group (self._aggregate_groups)
        - The resulting HydroModule has a generator with energy equivalent of 1 kWh/m3. The inflow, discharge capacity and reservoir capacity
        is calculated based on energy and transformed back to water using this energy equivalent.
        - Generation capacity (release_cap*energy_equivalent/agg_energy_equivalent, capacity of hydraulic couplings not double counted). The release capacity
        profile is ignored except if self._release_capacity_profile is given, then this profile is used for all aggregated modules.
        - Energy reservoir capacity (res_cap*energy_equivalent_downstream/agg_energy_equivalent)
        - Gross energy inflow (inflow_up*energy_equivalent/agg_energy_equivalent) - TODO: Add possibility to adjust inflow to closer represent net inflow
        - Inflow profiles weighted based on gross energy inflow (inflow_up_per_profile*energy_equivalent) - calc from core model using self._map_topology()
        - TODO: Other details like pumps and environmental constraints are currently ignored in the aggregation.
    3a. Aggregate results if all modules in group have results.
        - Production is the sum of production levels with weighted profiles
        - Reservoir filling is the sum of energy reservoir filling levels (filling*energy_equivalent_downstream/agg_energy_equivalent) with weighted profiles
        - TODO: Water values, spill, bypass and pumping results are currently ignored in the aggregation.
        - TODO: Add possibility to skip results aggregation.
    3b. Make new hydro module and delete original modules from model data.
    4. Add mapping from detailed to aggregated modules to self._aggregation_map.


    Disaggregation steps (self._disaggregate):

    1. Restore original modules from self._original_data. NB! Changes to aggregated modules are lost except for results (TODO)
    2. Move production and filling results from aggregated modules to detailed modules, weighted based on production capacity and reservoir capacity.
        - TODO: Water values, spill, bypass and pumping results are currently ignored in the disaggregation.
    3. Delete aggregated modules.

    NB! Watershed that crosses power nodes should not be aggregated in two different HydroAggregators as the aggregator will remove all connected modules
    from the model after the first aggregation. Reservoirs will also be assigned to the power node which has the highest cumulative energy equivalent, so
    this aggregator does not work well for reservoirs that are upstream of multiple power nodes.

    See Aggregator for general design notes and rules to follow when using Aggregators.

    Attributes:
        _metakey_energy_eq_downstream (str): Metadata key for energy equivalent downstream.
        _data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation.
        _scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation.
        _grouped_modules (dict[str, set[str]]): Mapping of aggregated modules to detailed modules. agg to detailed
        _grouped_reservoirs (dict[str, set[str]]): Mapping of aggregated reservoirs to detailed reservoirs. agg to detailed
        _ror_threshold (float): Regulation factor (upstream reservoir capacity / yearly upstream inflow) threshold for run-of-river classification.
            Default is 0.5.
        _metakey_power_node (str | None): If given, check metadata of power nodes to check if they should be grouped.
        _power_node_members (list[str] | None): If given along with metakey_power_node, group modules only for power nodes with these metadata values.
            If given without metakey_power_node, only group power nodes in this list.
        _release_capacity_profile (TimeVector | None): If given, use this profile for all aggregated modules' release capacities.

    Parent Attributes (see framcore.aggregators.Aggregator):

        _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
        _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
        _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg

    """

    def __init__(
        self,
        metakey_energy_eq_downstream: str,
        data_dim: SinglePeriodTimeIndex,
        scen_dim: FixedFrequencyTimeIndex,
        ror_threshold: float = 0.5,
        metakey_power_node: str | None = None,
        power_node_members: list[str] | None = None,
        release_capacity_profile: TimeVector | None = None,
    ) -> None:
        """
        Initialize HydroAggregator.

        Args:
            metakey_energy_eq_downstream (str): Metadata key for energy equivalent downstream.
                Can be calculated with framcore.utils.set_global_energy_equivalent
            data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation.
            scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation.
            ror_threshold (float): Regulation factor (upstream reservoir capacity / yearly upstream inflow) threshold for run-of-river classification.
                Default is 0.5.
            metakey_power_node (str | None): If given, check metadata of power nodes to check if they should be grouped.
            power_node_members (list[str] | None): If given along with metakey_power_node, group modules only for power nodes with these metadata values.
                If given without metakey_power_node, only group power nodes in this list.
            release_capacity_profile (TimeVector | None): If given, use this profile for all aggregated modules' release capacities.

        """
        super().__init__()
        self._check_type(metakey_energy_eq_downstream, str)
        self._check_type(ror_threshold, float)
        self._check_type(data_dim, SinglePeriodTimeIndex)
        self._check_type(scen_dim, FixedFrequencyTimeIndex)
        self._check_type(metakey_power_node, (str, type(None)))
        self._check_type(power_node_members, (list, type(None)))
        if ror_threshold < 0:
            msg = f"ror_threshold must be non-negative, got {ror_threshold}."
            raise ValueError(msg)
        if metakey_power_node is not None and len(power_node_members) <= 0:
            raise ValueError("If metakey_power_node is given, power_node_members must also be given.")

        self._metakey_energy_eq_downstream = metakey_energy_eq_downstream
        self._ror_threshold = ror_threshold
        self._metakey_power_node = metakey_power_node
        self._power_node_members = power_node_members
        self._release_capacity_profile = release_capacity_profile

        self._data_dim = data_dim
        self._scen_dim = scen_dim

        self._grouped_modules: dict[str, set[str]] = defaultdict(list)  # agg to detailed
        self._grouped_reservoirs: dict[str, set[str]] = defaultdict(list)  # agg to detailed

    def _aggregate(self, model: Model) -> None:  # noqa: C901, PLR0915
        t0 = time()
        data = model.get_data()

        t = time()
        upstream_topology = self._map_upstream_topology(data)
        self.send_debug_event(f"_map_upstream_topology time: {round(time() - t, 3)} seconds")

        t = time()
        generator_module_groups, reservoir_module_groups = self._group_modules_by_power_node(model, upstream_topology)
        self.send_debug_event(f"_group_modules_by_power_node time: {round(time() - t, 3)} seconds")

        t = time()
        self._group_modules_by_regulation_factor(model, generator_module_groups, reservoir_module_groups, upstream_topology)
        self.send_debug_event(f"_group_modules_by_regulation_factor time: {round(time() - t, 3)} seconds")

        t = time()
        ignore_production_capacity_modules = self._ignore_production_capacity_modules(model)
        self.send_debug_event(f"_ignore_production_capacity_modules time: {round(time() - t, 3)} seconds")

        t = time()
        self._aggregate_groups(model, upstream_topology, ignore_production_capacity_modules)
        self.send_debug_event(f"_aggregate_groups time: {round(time() - t, 3)} seconds")

        # Add reservoir modules to aggregation map
        t = time()
        self._aggregation_map = {dd: set([a]) for a, d in self._grouped_reservoirs.items() for dd in d}
        self.send_debug_event(f"add reservoir modules to _aggregation_map time: {round(time() - t, 3)} seconds")

        # Add generator modules to aggregation map
        t = time()
        for a, d in self._grouped_modules.items():
            for dd in d:
                if dd not in self._aggregation_map:
                    self._aggregation_map[dd] = set([a])
                elif not (data[dd].get_reservoir() and data[a].get_reservoir()):  # reservoir modules can only be mapped to one aggregated reservoir module
                    self._aggregation_map[dd].add(a)
        self.send_debug_event(f"add generator modules to _aggregation_map time: {round(time() - t, 3)} seconds")

        # Delete detailed modules and add remaining modules to aggregation map
        t = time()
        upstream_topology_with_bypass_spill = self._map_upstream_topology(data, include_bypass_spill=True)
        aggregated_hydromodules = {module for modules in generator_module_groups.values() for module in modules}  # add generator modules
        for grouped_modules in generator_module_groups.values():  # add upstream modules
            for grouped_module in grouped_modules:
                upstream = upstream_topology_with_bypass_spill[grouped_module]
                aggregated_hydromodules.update(upstream)
        for downstream_module in upstream_topology_with_bypass_spill:  # add downstream modules
            for upstream in upstream_topology_with_bypass_spill[downstream_module]:
                if upstream in aggregated_hydromodules:
                    aggregated_hydromodules.add(downstream_module)
                    break
        other_modules = [key for key, component in data.items() if isinstance(component, HydroModule) and key not in aggregated_hydromodules]
        other_generator_modules = [m for m in other_modules if data[m].get_generator()]
        for m in other_modules:  # remove other modules that do not interact with generator modules
            interacts = False
            for upstreams in upstream_topology_with_bypass_spill[m]:
                for upstream in upstreams:
                    if upstream in other_generator_modules:
                        interacts = True
                        break
            for gm in other_generator_modules:
                if m in upstream_topology_with_bypass_spill[gm]:
                    interacts = True
                    break
            if not interacts:
                aggregated_hydromodules.add(m)
                message = f"Module {m} is not upstream or downstream of any generator module, adding to aggregation as it does not interact with power system."
                self.send_warning_event(message)

        for m_key in aggregated_hydromodules:
            if m_key not in self._grouped_modules:
                if not (m_key in self._aggregation_map or m_key in self._grouped_reservoirs):
                    self._aggregation_map[m_key] = set()
                del model.get_data()[m_key]
        self.send_debug_event(f"delete detailed modules time: {round(time() - t, 3)} seconds")

        self.send_debug_event(f"total _aggregate: {round(time() - t0, 3)} seconds")

    def _map_upstream_topology(  # noqa: C901
        self,
        data: dict[str, Component | TimeVector | Curve | Expr],
        include_bypass_spill: bool = False,
    ) -> dict[str, list[str]]:
        """Map HydroModules topology. Return dict[module, List[upstream modules + itself]]."""
        module_names = [key for key, component in data.items() if isinstance(component, HydroModule)]

        # Direct upstream mapping (including transport pumps)
        direct_upstream = {module_name: [] for module_name in module_names}
        for module_name in module_names:
            release_to = data[module_name].get_release_to()
            pump = data[module_name].get_pump()
            if data[module_name].get_pump() and pump.get_from_module() == module_name:  # transport pump
                pump = data[module_name].get_pump()
                pump_to = pump.get_to_module()
                direct_upstream[pump_to].append(module_name)
            elif release_to:  # other
                try:
                    direct_upstream[release_to].append(module_name)
                except KeyError as e:
                    message = f"Reference to {release_to} does not exist in Model. Referenced by {module_name} Module."
                    raise KeyError(message) from e
            if include_bypass_spill:
                bypass = data[module_name].get_bypass()
                if bypass:
                    bypass_to = bypass.get_to_module()
                    if bypass_to:
                        try:
                            direct_upstream[bypass_to].append(module_name)
                        except KeyError as e:
                            message = f"Reference to {bypass_to} does not exist in Model. Referenced by {module_name} Module."
                            raise KeyError(message) from e
                spill_to = data[module_name].get_spill_to()
                if spill_to:
                    try:
                        direct_upstream[spill_to].append(module_name)
                    except KeyError as e:
                        message = f"Reference to {spill_to} does not exist in Model. Referenced by {module_name} Module."
                        raise KeyError(message) from e

        # Recursive upstream function
        def find_all_upstream(
            module_name: str,
            visited: set,
            data: dict[str, Component | TimeVector | Curve | Expr],
        ) -> list[str]:
            if module_name in visited:
                return []  # Avoid circular dependencies
            visited.add(module_name)
            upstream_names = direct_upstream[module_name]
            all_upstream = set(upstream_names)
            for upstream in upstream_names:
                all_upstream.update(find_all_upstream(upstream, visited, data))
            all_upstream.add(module_name)  # include itself
            return visited

        # Full upstream topology
        topology = {}
        for module_name in module_names:
            topology[module_name] = list(find_all_upstream(module_name, set(), data))

        return topology

    def _build_upstream_reservoir_and_inflow_exprs(
        self,
        data: dict[str, Component | TimeVector | Curve | Expr],
        upstream_topology: dict[str, list[str]],
    ) -> tuple[dict[str, Expr], dict[str, Expr]]:
        """Build upstream inflow and reservoir expressions for each generator module."""
        upstream_inflow_exprs = dict[str, Expr]()
        upstream_reservoir_exprs = dict[str, Expr]()
        generator_modules = [key for key, module in data.items() if isinstance(module, HydroModule) and module.get_generator()]
        for m in generator_modules:
            inflow_expr = 0
            reservoir_expr = 0
            for mm in upstream_topology[m]:
                inflow = data[mm].get_inflow()
                if inflow:
                    inflow_expr += inflow.get_level()
                reservoir = data[mm].get_reservoir()
                if reservoir:
                    reservoir_expr += reservoir.get_capacity().get_level()

            upstream_inflow_exprs[m] = inflow_expr
            upstream_reservoir_exprs[m] = reservoir_expr

        return upstream_inflow_exprs, upstream_reservoir_exprs

    def _group_modules_by_power_node(self, model: Model, upstream_topology: dict[str, list[str]]) -> dict[str, list[str]]:  # noqa: C901
        """Group modules by power node. Return generator_module_groups, reservoir_module_groups."""
        data = model.get_data()
        generator_module_groups = defaultdict(list)  # power_node -> generator_modules
        reservoir_mapping = defaultdict(set)  # reservoir -> power_node(s)
        for key, component in data.items():
            if isinstance(component, HydroModule) and component.get_generator():
                power_node = component.get_generator().get_power_node()
                if self._metakey_power_node is None and self._power_node_members and power_node not in self._power_node_members:
                    continue
                if self._metakey_power_node is not None:  # only group modules for nodes in self._power_node_members
                    power_node_component = data[power_node]
                    node_meta = power_node_component.get_meta(self._metakey_power_node)
                    if node_meta is None:
                        message = f"Module {key} does not have metadata '{self._metakey_power_node}' for node mapping."
                        raise ValueError(message)
                    node_meta_value = node_meta.get_value()
                    if node_meta_value not in self._power_node_members:
                        continue

                generator_module_groups[power_node].append(key)

                for m in upstream_topology[key]:
                    if data[m].get_reservoir():
                        reservoir_mapping[m].add(power_node)

        # Group reservoirs to the power node with the highest cumulative energy equivalent downstream from the reservoir
        reservoir_module_groups: dict[str, list[str]] = defaultdict(list)
        for res_name in reservoir_mapping:
            power_nodes = reservoir_mapping[res_name]
            if len(power_nodes) > 1:
                highest_power_node = max(
                    power_nodes,
                    key=lambda pn: get_level_value(
                        get_hydro_downstream_energy_equivalent(data, res_name, pn),
                        db=model,
                        unit="kWh/m3",
                        data_dim=self._data_dim,
                        scen_dim=self._scen_dim,
                        is_max=False,
                    ),
                )
                reservoir_module_groups[highest_power_node].append(res_name)
            else:
                reservoir_module_groups[next(iter(power_nodes))].append(res_name)

        return generator_module_groups, reservoir_module_groups

    def _group_modules_by_regulation_factor(
        self,
        model: Model,
        generator_module_groups: dict[str, list[str]],
        reservoir_module_groups: dict[str, list[str]],
        upstream_topology: dict[str, list[str]],
    ) -> None:
        """
        Group modules into regulated and unregulated based on regulation factor and self._ror_threshold.

        Regulation factor = upstream reservoir capacity / yearly upstream inflow.
        Run-of-river = regulation factor <= self._ror_threshold.
        Regulated = regulation factor > self._ror_threshold.
        """
        data = model.get_data()
        upstream_inflow_exprs, upstream_reservoir_exprs = self._build_upstream_reservoir_and_inflow_exprs(data, upstream_topology)

        for area, member_modules in generator_module_groups.items():
            ror_name = area + "_hydro_RoR"
            reg_name = area + "_hydro_reservoir"

            ror_modules = []
            reservoir_modules = []

            for m_key in member_modules:
                if upstream_inflow_exprs[m_key] != 0:
                    upstream_inflow = get_level_value(
                        upstream_inflow_exprs[m_key],
                        db=model,
                        unit="Mm3/year",
                        data_dim=self._data_dim,
                        scen_dim=self._scen_dim,
                        is_max=False,
                    )
                else:
                    continue  # Skip generator modules with no upstream inflow
                if upstream_reservoir_exprs[m_key] != 0:
                    upstream_reservoir = get_level_value(
                        upstream_reservoir_exprs[m_key],
                        db=model,
                        unit="Mm3",
                        data_dim=self._data_dim,
                        scen_dim=self._scen_dim,
                        is_max=False,
                    )
                else:
                    upstream_reservoir = 0
                regulation_factor = upstream_reservoir / upstream_inflow if upstream_inflow > 0 else 0

                if regulation_factor <= self._ror_threshold:
                    ror_modules.append(m_key)
                else:
                    reservoir_modules.append(m_key)

            if len(ror_modules) > 0:  # only make run-of-river group if there are any modules
                self._grouped_modules[ror_name] = ror_modules

            if len(reservoir_modules) > 0:  # only make reservoir group if there are any modules
                self._grouped_modules[reg_name] = reservoir_modules

            if len(reservoir_module_groups[area]) > 0 and len(reservoir_modules) > 0:  # add reservoirs to reg group
                self._grouped_reservoirs[reg_name] = reservoir_module_groups[area]
            elif len(reservoir_module_groups[area]) > 0:  # add reservoirs to ror group if no reg group
                self._grouped_reservoirs[ror_name] = reservoir_module_groups[area]
                message = f"{area} has no modules over ror_threshold ({self._ror_threshold}), so all reservoirs are put in RoR module."
                self.send_warning_event(message)

    def _ignore_production_capacity_modules(
        self,
        model: Model,
    ) -> list[str]:
        """
        Return list of module names to ignore production capacity for in aggregation, because of hydraulic coupled reservoirs.

        Ignore the lowest production capacity of modules that are under the same hydraulic coupled reservoirs.
        """
        ignore_production_capacity_modules = []
        data = model.get_data()
        module_names = [key for key, component in data.items() if isinstance(component, HydroModule)]

        for m in module_names:
            if data[m].get_hydraulic_coupling() != 0:
                under_hydraulic = [
                    (
                        mm,
                        get_level_value(
                            data[mm].get_generator().get_energy_equivalent().get_level() * data[mm].get_release_capacity().get_level(),
                            model,
                            "MW",
                            self._data_dim,
                            self._scen_dim,
                            is_max=False,
                        ),
                    )
                    for mm in module_names
                    if data[mm].get_release_to() == m
                ]
                assert len(under_hydraulic) > 1
                ignore_production_capacity_modules.append(min(under_hydraulic, key=lambda x: x[1])[0])

        return ignore_production_capacity_modules

    def _aggregate_groups(  # noqa: C901, PLR0915
        self,
        model: Model,
        upstream_topology: dict[str, list[str]],
        ignore_capacity: list[str],
    ) -> None:
        """Aggregate each group of modules into one HydroModule."""
        data = model.get_data()
        for new_id, module_names in self._grouped_modules.items():
            num_reservoirs = 0
            if new_id in self._grouped_reservoirs:
                num_reservoirs = len(self._grouped_reservoirs[new_id])
            self.send_info_event(f"{new_id} from {len(module_names)} generator modules and {num_reservoirs} reservoirs.")

            # Generator and production
            generator_module_names = [m for m in module_names if data[m].get_generator()]
            productions = [data[m].get_generator().get_production() for m in generator_module_names]
            sum_production = _aggregate_result_volumes(model, productions, "MW", self._data_dim, self._scen_dim, new_id, generator_module_names)

            generator = HydroGenerator(
                power_node=data[generator_module_names[0]].get_generator().get_power_node(),
                energy_equivalent=Conversion(level=ConstantTimeVector(1.0, "kWh/m3", is_max_level=True)),
                production=sum_production,
            )
            energy_eq = generator.get_energy_equivalent().get_level()

            # Release capacity
            release_capacities = [data[m].get_release_capacity() for m in generator_module_names if m not in ignore_capacity]
            if self._release_capacity_profile:
                if not all([rc.get_profile() is None for rc in release_capacities]):
                    message = f"Some release capacities in {new_id} have profiles, using provided profile for all."
                    self.send_warning_event(message)
                release_capacities = deepcopy(release_capacities)
                for rc in release_capacities:
                    rc.set_profile(self._release_capacity_profile)
            generator_energy_eqs = [data[m].get_generator().get_energy_equivalent() for m in generator_module_names if m not in ignore_capacity]
            release_capacity_levels = [rc.get_level() * ee.get_level() for rc, ee in zip(release_capacities, generator_energy_eqs, strict=True)]

            release_capacity_profile = None
            if any(rc.get_profile() for rc in release_capacities):
                one_profile_max = Expr(src=ConstantTimeVector(1.0, is_zero_one_profile=False), is_profile=True)
                weights = [get_level_value(rcl, model, "MW", self._data_dim, self._scen_dim, is_max=True) for rcl in release_capacity_levels]
                profiles = [rc.get_profile() if rc.get_profile() else one_profile_max for rc in release_capacities]
                release_capacity_profile = _aggregate_weighted_expressions(profiles, weights)
            release_capacity = MaxFlowVolume(level=sum(release_capacity_levels) / energy_eq, profile=release_capacity_profile)

            # Inflow level
            upstream_inflow_levels = defaultdict(list)
            for m in generator_module_names:
                for mm in upstream_topology[m]:
                    inflow = data[mm].get_inflow()
                    if inflow:
                        upstream_inflow_levels[m].append(inflow.get_level())
            inflow_level_energy = sum(
                sum(upstream_inflow_levels[m]) * data[m].get_generator().get_energy_equivalent().get_level()
                for m in generator_module_names
                if len(upstream_inflow_levels[m]) > 0
            )
            inflow_level = inflow_level_energy / energy_eq

            # Inflow profile
            one_profile = Expr(src=ConstantTimeVector(1.0, is_zero_one_profile=False), is_profile=True)
            inflow_profile_to_energyinflow = defaultdict(list)
            inflow_level_to_value = dict()
            for m in generator_module_names:
                m_energy_eq = data[m].get_generator().get_energy_equivalent().get_level()
                m_energy_eq_value = get_level_value(
                    m_energy_eq,
                    db=model,
                    unit="kWh/m3",
                    data_dim=self._data_dim,
                    scen_dim=self._scen_dim,
                    is_max=False,
                )
                for upstream_module in upstream_topology[m]:
                    inflow = data[upstream_module].get_inflow()
                    if inflow:
                        if inflow not in inflow_level_to_value:
                            inflow_level_to_value[inflow] = get_level_value(
                                inflow.get_level(),
                                db=model,
                                unit="m3/s",
                                data_dim=self._data_dim,
                                scen_dim=self._scen_dim,
                                is_max=False,
                            )
                        upstream_energy_inflow = inflow_level_to_value[inflow] * m_energy_eq_value
                        upstream_profile = inflow.get_profile() if inflow.get_profile() else one_profile
                        inflow_profile_to_energyinflow[upstream_profile].append(upstream_energy_inflow)

            profile_weights = [sum(energyinflows) for energyinflows in inflow_profile_to_energyinflow.values()]
            inflow_profile = _aggregate_weighted_expressions(list(inflow_profile_to_energyinflow.keys()), profile_weights)
            inflow = AvgFlowVolume(level=inflow_level, profile=inflow_profile)

            # Reservoir capacity and filling
            if new_id in self._grouped_reservoirs and len(self._grouped_reservoirs[new_id]) > 0:
                reservoir_levels = [
                    data[m].get_reservoir().get_capacity().get_level() * data[m].get_meta(self._metakey_energy_eq_downstream).get_value()
                    for m in self._grouped_reservoirs[new_id]
                ]
                reservoir_level = sum(reservoir_levels) / energy_eq
                reservoir_capacity = StockVolume(level=reservoir_level)

                fillings = [data[m].get_reservoir().get_volume() for m in self._grouped_reservoirs[new_id]]
                energy_eq_downstreams = [data[m].get_meta(self._metakey_energy_eq_downstream).get_value() for m in self._grouped_reservoirs[new_id]]
                sum_filling = self._aggregate_fillings(fillings, energy_eq_downstreams, energy_eq, model, "GWh", new_id, self._grouped_reservoirs[new_id])
                reservoir = HydroReservoir(capacity=reservoir_capacity, volume=sum_filling)
            else:
                reservoir = None

            new_hydro = HydroModule(
                generator=generator,
                reservoir=reservoir,
                inflow=inflow,
                release_capacity=release_capacity,
            )
            new_hydro.add_meta(key=self._metakey_energy_eq_downstream, value=LevelExprMeta(energy_eq))

            data[new_id] = new_hydro

    def _aggregate_fillings(
        self,
        fillings: list[StockVolume],
        energy_eq_downstreams: list[Expr],
        energy_eq: Expr,
        model: Model,
        weight_unit: str,
        group_id: str,
        members: list[str],
    ) -> StockVolume | None:
        """Aggregate reservoir fillings if all fillings are not None."""
        sum_filling = None
        if all(filling.get_level() for filling in fillings):
            if any(not filling.get_profile() for filling in fillings):
                missing = [member for member, filling in zip(members, fillings, strict=False) if not filling.get_profile()]
                message = (
                    "Some reservoir fillings in grouped modules have no profile. Cannot aggregate profiles.",
                    f"Group: '{group_id}', missing profile for {missing}.",
                )
                raise ValueError(message)
            level, profiles, weights = self._get_level_profiles_weights_fillings(model, fillings, energy_eq_downstreams, energy_eq, weight_unit)
            profile = _aggregate_weighted_expressions(profiles, weights)
            sum_filling = StockVolume(level=level, profile=profile)
        elif any(filling.get_level() for filling in fillings):
            missing = [member for member, filling in zip(members, fillings, strict=False) if not filling.get_level()]
            message = (
                "Some but not all grouped modules have reservoir filling defined, reservoir filling not aggregated. "
                f"Group: {group_id}, missing filling for {missing}."
            )
            self.send_warning_event(message)
        return sum_filling

    def _get_level_profiles_weights_fillings(
        self,
        model: Model,
        fillings: list[StockVolume],
        energy_eq_downstreams: list[Expr],
        energy_eq: Expr,
        weight_unit: str,
    ) -> tuple[Expr, list[Expr], list[float]]:
        """
        Get aggregated filling level, and profiles with weights from list of fillings.

        Two cases:
        1) All fillings are expressions from previous disaggregation. Can be aggregated more efficiently.
        2) Default case, where we weight fillings based on energy equivalent inflow.
        """
        levels = [filling.get_level() for filling in fillings]
        if all(self._is_disagg_filling_expr(level) for level in levels):
            return _get_level_profile_weights_from_disagg_levelprofiles(model, fillings, self._data_dim, self._scen_dim)
        levels_energy = [filling * ee for filling, ee in zip(levels, energy_eq_downstreams, strict=True)]
        level = sum(levels_energy) / energy_eq
        profiles = [filling.get_profile() for filling in fillings]
        weights = [get_level_value(level_energy, model, weight_unit, self._data_dim, self._scen_dim, False) for level_energy in levels_energy]
        return level, profiles, weights

    def _is_disagg_filling_expr(self, expr: Expr) -> bool:
        """Check if expr is ((weight * agg_level * energy_eq_downstream) / energy_eq_agg) which indicates it comes from disaggregation."""
        if expr.is_leaf():
            return False
        ops, args = expr.get_operations(expect_ops=True, copy_list=False)
        if not (
            ops == "**/"
            and len(args) == 4  # noqa E501
            and all([args[0].is_leaf(), args[3].is_leaf()])
            and not args[0].is_level()
            and not args[0].is_flow()
            and not args[0].is_stock()
            and args[1].is_stock()
            and args[2].is_level()
            and not args[2].is_flow()
            and not args[2].is_stock()
            and args[3].is_level()
            and not args[3].is_flow()
            and not args[3].is_stock()
        ):
            return False
        return args[2].is_leaf() or args[2].get_operations(expect_ops=True, copy_list=False)[0] == "+"

    def _disaggregate(  # noqa: C901
        self,
        model: Model,
        original_data: dict[str, Component | TimeVector | Curve | Expr],
    ) -> None:
        """Disaggregate HydroAggregator."""
        new_data = model.get_data()

        deleted_group_names = self._get_deleted_group_modules(new_data)  # find agg groups that have been deleted
        agg_modules = {key: new_data.pop(key) for key in self._grouped_modules if key not in deleted_group_names}  # isolate agg modules out of new_data

        # Reinstate original detailed modules that are not fully deleted
        for detailed_key, agg_keys in self._aggregation_map.items():
            if agg_keys and all(key in deleted_group_names for key in agg_keys):
                continue
            new_data[detailed_key] = original_data[detailed_key]

        # Set production results in detailed modules
        for agg_key, detailed_keys in self._grouped_modules.items():
            if agg_key in deleted_group_names:
                continue

            agg_production_level = agg_modules[agg_key].get_generator().get_production().get_level()
            if agg_production_level is None:  # keep original production if agg has no production defined
                continue
            if len(detailed_keys) == 1:  # only one detailed module, set production directly
                new_data[detailed_key].get_generator().get_production().set_level(agg_production_level)
                continue
            detailed_production_levels = [new_data[detailed_key].get_generator().get_production().get_level() for detailed_key in detailed_keys]
            if any(detailed_production_levels) and not all(
                detailed_production_levels,
            ):  # if some but not all detailed modules have production defined, skip setting productio
                missing = [detailed_key for detailed_key, level in zip(detailed_keys, detailed_production_levels, strict=False) if not level]
                message = f"Some but not all grouped modules have production defined. Production not disaggregated for {agg_key}, missing for {missing}."
                self.send_warning_event(message)
                continue
            if _all_detailed_exprs_in_sum_expr(agg_production_level, detailed_production_levels):  # if agg production is sum of detailed levels,  keep original
                continue
            production_weights = self._get_disaggregation_production_weights(model, detailed_keys)  # default method
            for detailed_key in detailed_keys:
                self._set_weighted_production(new_data[detailed_key], agg_modules[agg_key], production_weights[detailed_key])

        # Set filling results in detailed modules
        for agg_key, detailed_keys in self._grouped_reservoirs.items():
            if agg_key in deleted_group_names:
                continue

            agg_filling_level = agg_modules[agg_key].get_reservoir().get_volume().get_level()
            if agg_filling_level is None:  # keep original filling if agg has no filling defined
                continue
            if len(detailed_keys) == 1:  # only one detailed module, set filling directly
                new_data[detailed_key].get_reservoir().get_volume().set_level(agg_filling_level)
                continue
            detailed_filling_levels = [new_data[detailed_key].get_reservoir().get_volume().get_level() for detailed_key in detailed_keys]
            if any(detailed_filling_levels) and not all(
                detailed_filling_levels,
            ):  # if some but not all detailed modules have filling defined, skip setting filling
                missing = [detailed_key for detailed_key, level in zip(detailed_keys, detailed_filling_levels, strict=False) if not level]
                message = f"Some but not all grouped modules have filling defined. Filling not disaggregated for {agg_key}, missing for {missing}."
                self.send_warning_event(message)
                continue
            detailed_energy_eq_downstreams = [new_data[detailed_key].get_meta(self._metakey_energy_eq_downstream).get_value() for detailed_key in detailed_keys]
            agg_energy_eq_downstream = agg_modules[agg_key].get_meta(self._metakey_energy_eq_downstream).get_value()
            agg_detailed_fillings = [
                detailed_filling * detailed_energy_eq
                for detailed_filling, detailed_energy_eq in zip(detailed_filling_levels, detailed_energy_eq_downstreams, strict=True)
                if detailed_filling and detailed_energy_eq
            ]
            if self._is_sum_filling_expr(
                agg_filling_level,
                agg_detailed_fillings,
                agg_energy_eq_downstream,
            ):  # if agg filling is sum of detailed levels, keep original
                continue
            reservoir_weights = self._get_disaggregation_filling_weights(model, detailed_keys)  # default method
            for detailed_key in detailed_keys:
                self._set_weighted_filling(new_data[detailed_key], agg_modules[agg_key], reservoir_weights[detailed_key])

        self._grouped_modules.clear()
        self._grouped_reservoirs.clear()

    def _get_deleted_group_modules(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> set[str]:
        deleted_group_names: set[str] = set()

        for group_name in self._grouped_modules:
            if group_name not in new_data:
                deleted_group_names.add(group_name)
                continue

        return deleted_group_names

    def _get_disaggregation_production_weights(
        self,
        model: Model,
        detailed_keys: list[str],
    ) -> dict[str, float]:
        """Get weights to disaggregate production based on production capacity."""
        # Calculate production capacity for each detailed module
        data = model.get_data()
        production_weights = dict()  # detailed_key -> production_weight
        production_weight_factors = dict()  # detailed_key -> production_weight_factor
        for det in detailed_keys:
            det_module = data[det]
            release_capacity_level = det_module.get_release_capacity().get_level()
            generator_energy_eq = det_module.get_generator().get_energy_equivalent().get_level()
            production_weight = get_level_value(
                release_capacity_level * generator_energy_eq,
                db=model,
                unit="kW",
                data_dim=self._data_dim,
                scen_dim=self._scen_dim,
                is_max=False,
            )
            production_weights[det] = production_weight

        # Calculate production weight for each detailed module
        for det in detailed_keys:
            production_weight_factors[det] = production_weights[det] / sum(production_weights.values())

        return production_weight_factors

    def _get_disaggregation_filling_weights(
        self,
        model: Model,
        detailed_keys: list[str],
    ) -> dict[str, float]:
        """Get weights to disaggregate filling based on reservoir capacity."""
        # Calculate reservoir capacity for each detailed module
        data = model.get_data()
        filling_weights = dict()  # detailed_key -> reservoir_weight
        filling_weight_factors = dict()  # detailed_key -> reservoir_weight_factor
        for det in detailed_keys:
            det_module = data[det]
            reservoir_capacity_level = det_module.get_reservoir().get_capacity().get_level()
            reservoir_energy_eq = det_module.get_meta(self._metakey_energy_eq_downstream).get_value()
            reservoir_weight = get_level_value(
                reservoir_capacity_level * reservoir_energy_eq,
                db=model,
                unit="GWh",
                data_dim=self._data_dim,
                scen_dim=self._scen_dim,
                is_max=False,
            )
            filling_weights[det] = reservoir_weight

        # Calculate reservoir weight for each detailed module
        for det in detailed_keys:
            filling_weight_factors[det] = filling_weights[det] / sum(filling_weights.values())

        return filling_weight_factors

    def _set_weighted_production(self, detailed_module: HydroModule, agg_module: HydroModule, production_weight: float) -> None:
        """Set production level and profile for detailed module based on aggregated module."""
        agg_production_level = agg_module.get_generator().get_production().get_level()
        agg_production_profile = agg_module.get_generator().get_production().get_profile()
        production_level = production_weight * agg_production_level
        detailed_module.get_generator().get_production().set_level(production_level)
        detailed_module.get_generator().get_production().set_profile(agg_production_profile)

    def _is_sum_filling_expr(self, agg_filling: Expr, agg_detailed_fillings: list[Expr], agg_energy_eq_downstream: Expr) -> bool:
        """Check if expr is (sum(filling * energy_eq_downstream)) / agg_energy_eq_downstream, indicating it comes from aggregation."""
        if agg_filling.is_leaf():
            return False
        ops, args = agg_filling.get_operations(expect_ops=True, copy_list=False)
        if not (ops == "/" and len(args) == 2) and args[1] == agg_energy_eq_downstream:  # noqa E501
            return False
        ops_sum, args_sum = args[0].get_operations(expect_ops=True, copy_list=False)
        if "+" not in ops_sum:
            return False
        if len(args_sum) != len(agg_detailed_fillings):
            return False
        return all(arg in agg_detailed_fillings for arg in args_sum)

    def _set_weighted_filling(self, detailed_module: HydroModule, agg_module: HydroModule, filling_weight: float) -> None:
        """Set filling level and profile for detailed module based on aggregated module."""
        agg_filling_level = agg_module.get_reservoir().get_volume().get_level()
        agg_filling_profile = agg_module.get_reservoir().get_volume().get_profile()
        if agg_filling_level:  # keep original filling if agg has no filling defined
            agg_energy_eq = agg_module.get_meta(self._metakey_energy_eq_downstream).get_value()
            detailed_energy_eq = detailed_module.get_meta(self._metakey_energy_eq_downstream).get_value()

            filling_level = (filling_weight * agg_filling_level * agg_energy_eq) / detailed_energy_eq
            detailed_module.get_reservoir().get_volume().set_level(filling_level)
            detailed_module.get_reservoir().get_volume().set_profile(agg_filling_profile)
__init__(metakey_energy_eq_downstream: str, data_dim: SinglePeriodTimeIndex, scen_dim: FixedFrequencyTimeIndex, ror_threshold: float = 0.5, metakey_power_node: str | None = None, power_node_members: list[str] | None = None, release_capacity_profile: TimeVector | None = None) -> None

Initialize HydroAggregator.

Parameters:

Name Type Description Default
metakey_energy_eq_downstream str

Metadata key for energy equivalent downstream. Can be calculated with framcore.utils.set_global_energy_equivalent

required
data_dim SinglePeriodTimeIndex

Data dimension for eager evalutation.

required
scen_dim FixedFrequencyTimeIndex

Scenario dimension for eager evalutation.

required
ror_threshold float

Regulation factor (upstream reservoir capacity / yearly upstream inflow) threshold for run-of-river classification. Default is 0.5.

0.5
metakey_power_node str | None

If given, check metadata of power nodes to check if they should be grouped.

None
power_node_members list[str] | None

If given along with metakey_power_node, group modules only for power nodes with these metadata values. If given without metakey_power_node, only group power nodes in this list.

None
release_capacity_profile TimeVector | None

If given, use this profile for all aggregated modules' release capacities.

None
Source code in framcore/aggregators/HydroAggregator.py
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
def __init__(
    self,
    metakey_energy_eq_downstream: str,
    data_dim: SinglePeriodTimeIndex,
    scen_dim: FixedFrequencyTimeIndex,
    ror_threshold: float = 0.5,
    metakey_power_node: str | None = None,
    power_node_members: list[str] | None = None,
    release_capacity_profile: TimeVector | None = None,
) -> None:
    """
    Initialize HydroAggregator.

    Args:
        metakey_energy_eq_downstream (str): Metadata key for energy equivalent downstream.
            Can be calculated with framcore.utils.set_global_energy_equivalent
        data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation.
        scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation.
        ror_threshold (float): Regulation factor (upstream reservoir capacity / yearly upstream inflow) threshold for run-of-river classification.
            Default is 0.5.
        metakey_power_node (str | None): If given, check metadata of power nodes to check if they should be grouped.
        power_node_members (list[str] | None): If given along with metakey_power_node, group modules only for power nodes with these metadata values.
            If given without metakey_power_node, only group power nodes in this list.
        release_capacity_profile (TimeVector | None): If given, use this profile for all aggregated modules' release capacities.

    """
    super().__init__()
    self._check_type(metakey_energy_eq_downstream, str)
    self._check_type(ror_threshold, float)
    self._check_type(data_dim, SinglePeriodTimeIndex)
    self._check_type(scen_dim, FixedFrequencyTimeIndex)
    self._check_type(metakey_power_node, (str, type(None)))
    self._check_type(power_node_members, (list, type(None)))
    if ror_threshold < 0:
        msg = f"ror_threshold must be non-negative, got {ror_threshold}."
        raise ValueError(msg)
    if metakey_power_node is not None and len(power_node_members) <= 0:
        raise ValueError("If metakey_power_node is given, power_node_members must also be given.")

    self._metakey_energy_eq_downstream = metakey_energy_eq_downstream
    self._ror_threshold = ror_threshold
    self._metakey_power_node = metakey_power_node
    self._power_node_members = power_node_members
    self._release_capacity_profile = release_capacity_profile

    self._data_dim = data_dim
    self._scen_dim = scen_dim

    self._grouped_modules: dict[str, set[str]] = defaultdict(list)  # agg to detailed
    self._grouped_reservoirs: dict[str, set[str]] = defaultdict(list)  # agg to detailed

NodeAggregator

NodeAggregator

Bases: Aggregator

Aggregate groups of Nodes for a commodity. Subclass of Aggregator.

Aggregation steps (self._aggregate):

  1. Map all Components to their Nodes of the correct commodity if they are referencing any. This is important to redirect all references to the new Nodes after aggregation.
  2. Create mapping of what members the new Nodes will be aggregated from. This step also does alot of error handling and checks the validity of the metadata and groupings. Raises error if:
    • Nodes do not have any metadata for the meta key.
    • Nodes have the wrong metadata object type for the meta key (must be Member).
    • Exogenous Nodes are grouped together for aggregation with endogenous Nodes.
  3. Initialize new Node objects and set prices and exogenous status. Prices are calculated as a weighted average of all the member Node prices.
  4. Old Nodes are deleted from the Model data, after which the aggregated Node is added, and references in the rest of the system are updated to point to the new Node.
  5. Handling of transports: All Components which transport the same commodity as the aggregated Nodes are analysed. If the two Nodes they connect is now the same aggregated Node, the transpart is 'internal' meaning it is now operating within a Node. If the transport Component is lossy, it is replaced by a Demand Component representing the commodity consumption caused by the loss. All internal transports are afterwards deleted.

Disaggregation steps (self._aggregate):

  1. Collect set of Nodes group keys for which have been either removed from the Model data or changed to reference something other than Nodes.
  2. Validate that IDs of Nodes to be restored have not been used to reference something else in the meantime.
  3. Delete the aggregated Nodes and restore the old Nodes to the Model. Also copy shadow price results from the aggregated Nodes to the disaggregated. NB! This will overwrite the possible previous shadow prices of the original disaggregated Nodes.
  4. Restore the references in all objects to the disaggregated Nodes. A mapping created during aggregation is used for this.
  5. Validate that no restorable internal transports has a name conflict with existing objects in the Model. NB! an internal transport is not restorable if one or both of its referenced Nodes have been removed from the Model or is now referencing another object. See step 1.
  6. Restore all the restorable internal transports from the original data.
  7. Delete the aggregation-created Demand objects representing internal transports.

See Aggregator for general design notes and rules to follow when using Aggregators.

Source code in framcore/aggregators/NodeAggregator.py
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
class NodeAggregator(Aggregator):
    """
    Aggregate groups of Nodes for a commodity. Subclass of Aggregator.

    Aggregation steps (self._aggregate):

    1. Map all Components to their Nodes of the correct commodity if they are referencing any. This is important to redirect all references to the
        new Nodes after aggregation.
    2. Create mapping of what members the new Nodes will be aggregated from. This step also does alot of error handling and checks the validity of the
        metadata and groupings. Raises error if:
        - Nodes do not have any metadata for the meta key.
        - Nodes have the wrong metadata object type for the meta key (must be Member).
        - Exogenous Nodes are grouped together for aggregation with endogenous Nodes.
    3. Initialize new Node objects and set prices and exogenous status. Prices are calculated as a weighted average of all the member Node prices.
    4. Old Nodes are deleted from the Model data, after which the aggregated Node is added, and references in the rest of the system are updated to point to
        the new Node.
    5. Handling of transports: All Components which transport the same commodity as the aggregated Nodes are analysed. If the two Nodes they connect is now
        the same aggregated Node, the transpart is 'internal' meaning it is now operating within a Node. If the transport Component is lossy, it is replaced
        by a Demand Component representing the commodity consumption caused by the loss. All internal transports are afterwards deleted.


    Disaggregation steps (self._aggregate):

    1. Collect set of Nodes group keys for which have been either removed from the Model data or changed to reference something other than Nodes.
    2. Validate that IDs of Nodes to be restored have not been used to reference something else in the meantime.
    3. Delete the aggregated Nodes and restore the old Nodes to the Model. Also copy shadow price results from the aggregated Nodes to the disaggregated.
        NB! This will overwrite the possible previous shadow prices of the original disaggregated Nodes.
    4. Restore the references in all objects to the disaggregated Nodes. A mapping created during aggregation is used for this.
    5. Validate that no restorable internal transports has a name conflict with existing objects in the Model.
        NB! an internal transport is not restorable if one or both of its referenced Nodes have been removed from the Model or is now referencing another
        object. See step 1.
    6. Restore all the restorable internal transports from the original data.
    7. Delete the aggregation-created Demand objects representing internal transports.

    See Aggregator for general design notes and rules to follow when using Aggregators.

    """

    def __init__(
        self,
        commodity: str,
        meta_key: str,
        data_dim: SinglePeriodTimeIndex,
        scen_dim: FixedFrequencyTimeIndex,
        utilization_rate: float = 0.5,
    ) -> None:
        """
        Aggregate groups of nodes (defined by metadata key) for a commodity.

        Args:
            commodity (str): Commodity of the Nodes to be aggregated.
            meta_key (str): _description_
            data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation of prices.
            scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation of prices.
            utilization_rate (float, optional): Assumed utilization rate on internal transports. Used to calculate new Demands after aggregation
                                                          if the transport does not have a volume.
                                                          Defaults to 0.5 (i.e. 50 percent utilization in each direction).

        """
        super().__init__()
        self._commodity = commodity
        self._meta_key = meta_key
        self._data_dim = data_dim
        self._scen_dim = scen_dim
        self._utilization_rate = utilization_rate

        # To remember all modifications in _aggregate so we can undo them in _disaggregate
        # Will be cleared in _init_aggregate, so that same memory can be re-used.
        self._grouped_nodes: dict[str, set[str]] = defaultdict(set)
        self._replaced_references: dict[str, set[tuple[str, str]]] = defaultdict(set)  # dict with controll of all nodes which have been replaced
        self._internal_transports: set[str] = set()
        self._internal_transport_demands: set[str] = set()

        # To record error messages in _aggregate and _disaggregate
        # Will be cleared in _init_aggregate and _init_disaggregate,
        # so that same memory can be re-used.
        self._errors: set[str] = set()

    def _aggregate(self, model: Model) -> None:
        """Modify model, components and data."""
        t0 = time()
        # Will be modified by upcoming code by adding group_nodes
        # and deleting member_nodes and redundant transports.
        data = model.get_data()

        # Helper-dict to give simpler access to components in upcoming loops
        # The components are the same instances as in data, and upcoming code
        # will use this to modify components inplace, in self._replace_node.
        components: dict[str, Component] = {key: c for key, c in data.items() if isinstance(c, Component)}

        # This is just a helper-dict to give fast access
        component_to_nodes: dict[str, set[str]] = get_component_to_nodes(components)

        self._init_aggregate(components, data)
        self.send_debug_event(f"init time {round(time() - t0, 3)} seconds")

        # main logic
        t = time()
        for group_name, member_node_names in self._grouped_nodes.items():
            member_node_names: set[str]
            group_node = Node(commodity=self._commodity)
            self._set_group_price(model, group_node, member_node_names, "EUR/MWh")
            self._delete_members(data, member_node_names)

            assert group_name not in data, f"{group_name}"
            data[group_name] = group_node

            self._replace_node(group_name, member_node_names, components, component_to_nodes)
            components[group_name] = group_node
        self.send_debug_event(f"main logic time {round(time() - t, 3)} seconds")

        t = time()
        transports = get_transports_by_commodity(components, self._commodity)
        self._update_internal_transports(transports)
        self._delete_internal_transports(data)
        self._add_internal_transport_demands(model, components, transports)
        self.send_debug_event(f"handle internal transport losses time {round(time() - t, 3)} seconds")

        self.send_debug_event(f"total time {round(time() - t0, 3)} seconds")

    def _update_internal_transports(
        self,
        transports: dict[str, tuple[str, str]],
    ) -> None:
        for name, (from_node, to_node) in transports.items():
            if from_node == to_node:
                # if not, then invalid transport from before
                assert to_node in self._grouped_nodes

                # earlier to_node was added here, but it should be the transport name, right?
                self._internal_transports.add(name)

    def _get_demand_member_meta_keys(self, components: dict[str, Component]) -> set[str]:
        """We find all direct_out demands via flows from get_supported_components and collect member meta keys from them."""
        out: set[str] = set()
        nodes_and_flows = get_supported_components(components, supported_types=(Node, Flow), forbidden_types=tuple())
        node_to_commodity = get_node_to_commodity(nodes_and_flows)
        for flow in nodes_and_flows.values():
            if not isinstance(flow, Flow):
                continue
            flow_infos = get_flow_infos(flow, node_to_commodity)
            if len(flow_infos) != 1:
                continue
            flow_info = flow_infos[0]
            if flow_info.category != "direct_out":
                continue
            if flow_info.commodity_out != self._commodity:
                continue
            demand = flow
            for key in demand.get_meta_keys():
                meta = demand.get_meta(key)
                if isinstance(meta, Member):
                    out.add(key)
        return out

    def _add_internal_transport_demands(
        self,
        model: Model,
        components: dict[str, Component],
        transports: dict[str, tuple[str, str]],
    ) -> None:
        """
        Add demand representing loss on internal transmission lines being removed by aggregation.

        This is done to avoid underestimation of aggregated demand.
        """
        data = model.get_data()

        demand_member_meta_keys = self._get_demand_member_meta_keys(components)

        # TODO: Document that we rely on Transmission and Demand APIs to get loss
        for key in self._internal_transports:
            transport = components[key]
            from_node, to_node = transports[key]
            assert from_node == to_node, (
                f"Transport {key} added to internal transport when it should not. Source node {from_node}, and destination node {to_node} are not the same."
            )
            node = from_node

            transport: Transmission

            if transport.get_loss():
                profile = None
                loss = transport.get_loss()
                if loss.get_level() is None:
                    continue
                if transport.get_outgoing_volume().get_level():
                    level = transport.get_outgoing_volume().get_level() * loss.get_level()

                    # could multiply by loss profile here, but profile * profile is not yet supported so we wait.
                    profile = transport.get_outgoing_volume().get_profile()

                # elif exploitation factor at individual level. How to best access this?
                else:
                    level = transport.get_max_capacity().get_level() * self._utilization_rate * loss.get_level()
                    profile = loss.get_profile()

                internal_losses_demand = Demand(
                    node=node,
                    capacity=MaxFlowVolume(
                        level=level,
                        profile=profile,
                    ),
                )

                for meta_key in demand_member_meta_keys:  # transfer member metadata to internal loss Demand
                    internal_losses_demand.add_meta(meta_key, Member("InternalTransportLossFromNodeAggregator"))

                demand_key = key + "_InternalTransportLossDemand_" + node

                self._internal_transport_demands.add(demand_key)
                if demand_key in data:
                    msg = f"Could not use key {demand_key} for internal transport demand because it already exists in the Model."
                    raise KeyError(msg)
                data[demand_key] = internal_losses_demand

    def _delete_internal_transports(
        self,
        data: dict[str, Component | TimeVector | Curve | Expr],
    ) -> None:
        for key in self._internal_transports:
            self._aggregation_map[key] = set()
            del data[key]

    def _delete_members(
        self,
        data: dict[str, Component | TimeVector | Curve | Expr],
        member_node_names: set[str],
    ) -> None:
        for member in member_node_names:
            del data[member]

    def _set_group_price(
        self,
        model: Model,
        group_node: Node,
        member_node_names: set[str],
        weight_unit: str,
    ) -> None:
        data = model.get_data()
        weights = [1.0 / len(member_node_names)] * len(member_node_names)
        prices = [data[key].get_price() for key in member_node_names]

        exogenous = [data[key].is_exogenous() for key in member_node_names]
        if all(exogenous):
            group_node.set_exogenous()
        elif any(exogenous):
            message = f"Only some member Nodes of group {group_node} are exogenous. This is ambiguous. Either all or none must be exogenous."
            raise ValueError(message)
        if all(prices):
            level, profile, intercept = _aggregate_costs(
                model=model,
                costs=prices,
                weights=weights,
                weight_unit=weight_unit,
                data_dim=self._data_dim,
                scen_dim=self._scen_dim,
            )
            group_node.get_price().set_level(level)
            group_node.get_price().set_profile(profile)
            group_node.get_price().set_intercept(intercept)
        elif any(prices):
            missing = [key for key in member_node_names if data[key].get_price() is None]
            self.send_warning_event(f"Only some member Nodes of group {group_node} have a Price, skip aggregate prices. Missing: {missing}")

    def _replace_node(
        self,
        group_name: str,
        member_node_names: set[str],
        components: dict[str, Component],
        component_to_nodes: dict[str, set[str]],
    ) -> None:
        for name, component in components.items():
            replace_keys = component_to_nodes[name]
            for key in member_node_names:
                if key in replace_keys:
                    component.replace_node(key, group_name)
                    self._replaced_references[name].add((key, group_name))

    def _init_aggregate(  # noqa C901
        self,
        components: dict[str, Component],
        data: dict[str, Component | TimeVector | Curve | Expr],
    ) -> None:
        self._grouped_nodes.clear()
        self._internal_transports.clear()
        self._internal_transport_demands.clear()
        self._errors.clear()

        self._aggregation_map = defaultdict(set)

        exogenous_groups = set()

        meta_key = self._meta_key

        for key, component in components.items():
            if not isinstance(component, Node):
                self._aggregation_map[key].add(key)
                continue

            node: Node = component

            commodity = node.get_commodity()

            if self._commodity != commodity:
                self._aggregation_map[key].add(key)
                continue

            meta: Meta | None = node.get_meta(meta_key)

            if meta is None:
                self._errors.add(f"Node {key} had no metadata behind key {meta_key}.")
                continue

            meta: Meta

            if not isinstance(meta, Member):
                got = type(meta).__name__
                message = f"Node {key} has metadata behind key {meta_key} with wrong type. Expected Member, got {got}."
                self._errors.add(message)
                continue

            meta: Member

            group_name: str = meta.get_value()

            if node.is_exogenous():
                # register groups with exogenous Nodes to validate later.
                exogenous_groups.add(group_name)

            if not self._errors:
                self._aggregation_map[key].add(group_name)
                self._grouped_nodes[group_name].add(key)

        grouped_nodes = self._grouped_nodes.copy()

        for group_name in exogenous_groups:  # Check exogenous groups.
            node_keys = grouped_nodes[group_name]
            if len(node_keys) > 1:  # allow unchanged or renamed exogenous Nodes.
                # We allow pure exogenous groups.
                exogenous = [components[node_key].is_exogenous() for node_key in node_keys]
                if (not all(exogenous)) and any(exogenous):
                    self._errors.add(f"Group {group_name} contains both exogenous and endogenous Nodes. This is ambiguous and therefore not allowed.")

        # remove single groups with unchanged names and check for duplicated names
        for group_name, node_keys in grouped_nodes.items():
            if len(node_keys) == 1 and group_name == next(iter(node_keys)):
                del self._grouped_nodes[group_name]
            try:  # If group name already exists for a node and the existing node is not aggregated to a new one.
                meta = data[group_name].get_meta(meta_key)
                if meta is None or meta.get_value() is None:
                    self._errors.add(
                        f"Metadata name for aggregated node ({group_name}) already exists in the model: {data[group_name]}",
                    )
            except KeyError:
                pass

        self._check_uniqueness()
        self._report_errors(self._errors)

    def _report_errors(self, errors: set[str]) -> None:
        if errors:
            n = len(errors)
            s = "s" if n > 1 else ""
            error_str = "\n".join(errors)
            message = f"Found {n} error{s}:\n{error_str}"
            raise RuntimeError(message)

    def _check_uniqueness(self) -> None:
        flipped = defaultdict(set)
        for group, members in self._grouped_nodes.items():
            for member in members:
                flipped[member].add(group)
        for k, v in flipped.items():
            if len(v) > 1:
                self._errors.add(f"Node {k} belongs to more than one group {v}")

    def _disaggregate(
        self,
        model: Model,
        original_data: dict[str, Component | TimeVector | Curve | Expr],
    ) -> None:
        new_data = model.get_data()

        deleted_group_names: set[str] = self._init_disaggregate(new_data)

        self._validate_restore_nodes(new_data, deleted_group_names)
        self._restore_nodes(new_data, original_data, deleted_group_names)
        self._restore_references(new_data)

        restorable_transports = self._validate_restore_internal_transports(new_data, original_data, deleted_group_names)
        self._restore_internal_transports(new_data, original_data, restorable_transports)

        self._delete_internal_transport_demands(new_data)

    def _init_disaggregate(
        self,
        new_data: dict[str, Component | TimeVector | Curve | Expr],
    ) -> set[str]:
        self._errors.clear()
        deleted_group_names: set[str] = set()

        for group_name in self._grouped_nodes:
            if group_name not in new_data:
                deleted_group_names.add(group_name)
                continue

            group_node = new_data[group_name]

            if not (isinstance(group_node, Node) and group_node.get_commodity() == self._commodity):
                deleted_group_names.add(group_name)

        return deleted_group_names

    def _validate_restore_nodes(
        self,
        new_data: dict[str, Component | TimeVector | Curve | Expr],
        deleted_group_names: set[str],
    ) -> None:
        for group_name, member_node_names in self._grouped_nodes.items():
            if group_name in deleted_group_names:
                continue
            for key in member_node_names:
                if key in new_data:
                    obj = new_data[key]
                    if not (isinstance(obj, Node) and obj.get_commodity() == self._commodity):
                        typ = type(obj).__name__
                        message = f"Restoring node {key} from group node {group_name} failed because model already stores object of {typ} with that name."
                        self._errors.add(message)
        self._report_errors(self._errors)

    def _restore_nodes(
        self,
        new_data: dict[str, Component | TimeVector | Curve | Expr],
        original_data: dict[str, Component | TimeVector | Curve | Expr],
        deleted_group_names: set[str],
    ) -> None:
        for group_name, member_node_names in self._grouped_nodes.items():
            if group_name in deleted_group_names:
                continue

            group_node: Node = new_data.pop(group_name)

            group_price: Price | None = group_node.get_price()

            for key in member_node_names:
                original_node: Node = original_data[key]
                if group_price is not None:
                    original_price = original_node.get_price()
                    original_price.copy_from(group_price)
                new_data[key] = original_node

    def _validate_restore_internal_transports(
        self,
        new_data: dict[str, Component | TimeVector | Curve | Expr],
        original_data: dict[str, Component | TimeVector | Curve | Expr],
        deleted_group_names: set[str],
    ) -> set[str]:
        nodes_not_added_back: set[str] = set()
        restorable_transports: set[str] = set()

        components = {k: v for k, v in original_data.items() if isinstance(v, Component)}
        transports = get_transports_by_commodity(components, self._commodity)

        for group_name, member_node_names in self._grouped_nodes.items():
            if group_name in deleted_group_names:
                nodes_not_added_back.update(member_node_names)
                continue

        for key in self._internal_transports:
            from_node, to_node = transports[key]

            if (from_node in nodes_not_added_back) and (to_node in nodes_not_added_back):
                continue

            restorable_transports.add(key)
            if key in new_data:
                obj = new_data[key]
                typ = type(obj).__name__
                message = f"Restoring deleted transport {key} from group node {group_name} failed because model already stores object of {typ} with that name."
                self._errors.add(message)

        self._report_errors(self._errors)

        return restorable_transports

    def _restore_internal_transports(
        self,
        new_data: dict[str, Component | TimeVector | Curve | Expr],
        original_data: dict[str, Component | TimeVector | Curve | Expr],
        restorable_transports: set[str],
    ) -> None:
        for key in self._internal_transports:
            if key not in restorable_transports:
                continue
            transport = original_data[key]
            new_data[key] = transport

    def _delete_internal_transport_demands(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> None:
        for key in self._internal_transport_demands:
            new_data.pop(key, None)

    def _restore_references(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> None:
        for component_name, replacements in self._replaced_references.items():
            # internal transports are handled by themselves.
            if component_name in new_data and component_name not in self._internal_transports and isinstance(new_data[component_name], Component):
                for replacement in replacements:
                    disaggregated, group_name = replacement
                    new_data[component_name].replace_node(old=group_name, new=disaggregated)  # set the disaggregated node back in the component.
__init__(commodity: str, meta_key: str, data_dim: SinglePeriodTimeIndex, scen_dim: FixedFrequencyTimeIndex, utilization_rate: float = 0.5) -> None

Aggregate groups of nodes (defined by metadata key) for a commodity.

Parameters:

Name Type Description Default
commodity str

Commodity of the Nodes to be aggregated.

required
meta_key str

description

required
data_dim SinglePeriodTimeIndex

Data dimension for eager evalutation of prices.

required
scen_dim FixedFrequencyTimeIndex

Scenario dimension for eager evalutation of prices.

required
utilization_rate float

Assumed utilization rate on internal transports. Used to calculate new Demands after aggregation if the transport does not have a volume. Defaults to 0.5 (i.e. 50 percent utilization in each direction).

0.5
Source code in framcore/aggregators/NodeAggregator.py
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
def __init__(
    self,
    commodity: str,
    meta_key: str,
    data_dim: SinglePeriodTimeIndex,
    scen_dim: FixedFrequencyTimeIndex,
    utilization_rate: float = 0.5,
) -> None:
    """
    Aggregate groups of nodes (defined by metadata key) for a commodity.

    Args:
        commodity (str): Commodity of the Nodes to be aggregated.
        meta_key (str): _description_
        data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation of prices.
        scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation of prices.
        utilization_rate (float, optional): Assumed utilization rate on internal transports. Used to calculate new Demands after aggregation
                                                      if the transport does not have a volume.
                                                      Defaults to 0.5 (i.e. 50 percent utilization in each direction).

    """
    super().__init__()
    self._commodity = commodity
    self._meta_key = meta_key
    self._data_dim = data_dim
    self._scen_dim = scen_dim
    self._utilization_rate = utilization_rate

    # To remember all modifications in _aggregate so we can undo them in _disaggregate
    # Will be cleared in _init_aggregate, so that same memory can be re-used.
    self._grouped_nodes: dict[str, set[str]] = defaultdict(set)
    self._replaced_references: dict[str, set[tuple[str, str]]] = defaultdict(set)  # dict with controll of all nodes which have been replaced
    self._internal_transports: set[str] = set()
    self._internal_transport_demands: set[str] = set()

    # To record error messages in _aggregate and _disaggregate
    # Will be cleared in _init_aggregate and _init_disaggregate,
    # so that same memory can be re-used.
    self._errors: set[str] = set()

WindSolarAggregator

SolarAggregator

Bases: _WindSolarAggregator

Aggregate Solar components into groups based on their power nodes.

Aggregation steps (self._aggregate):

  1. Group components based on their power nodes (self._group_by_power_node):
  2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
    • Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
    • Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
    • TODO: Add support for additional weighting methods (e.g. production instead of capacity).
    • Production is aggregated as the sum of production levels with weighted profiles. 2a. Make new hydro module and delete original components from model data.
  3. Add mapping from detailed to aggregated components to self._aggregation_map.

Disaggregation steps (self._disaggregate):

  1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
  2. Distribute production from aggregated components back to the original components:
    • Results are weighted based on the weighting method (now ony max_capacity supported).
  3. Delete aggregated components from the model.

See Aggregator for general design notes and rules to follow when using Aggregators.

Attributes:

Name Type Description
_data_dim SinglePeriodTimeIndex | None

Data dimension for eager evaluation.

_scen_dim FixedFrequencyTimeIndex | None

Scenario dimension for eager evaluation.

_grouped_components dict[str, set[str]]

Mapping of aggregated components to their detailed components. agg to detailed

Parent Attributes (see framcore.aggregators.Aggregator):

_is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
_original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
_aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
Source code in framcore/aggregators/WindSolarAggregator.py
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
class SolarAggregator(_WindSolarAggregator):
    """
    Aggregate Solar components into groups based on their power nodes.

    Aggregation steps (self._aggregate):

    1. Group components based on their power nodes (self._group_by_power_node):
    2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
        - Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
        - Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
        - TODO: Add support for additional weighting methods (e.g. production instead of capacity).
        - Production is aggregated as the sum of production levels with weighted profiles.
    2a. Make new hydro module and delete original components from model data.
    3. Add mapping from detailed to aggregated components to self._aggregation_map.


    Disaggregation steps (self._disaggregate):

    1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
    2. Distribute production from aggregated components back to the original components:
        - Results are weighted based on the weighting method (now ony max_capacity supported).
    3. Delete aggregated components from the model.


    See Aggregator for general design notes and rules to follow when using Aggregators.

    Attributes:
        _data_dim (SinglePeriodTimeIndex | None): Data dimension for eager evaluation.
        _scen_dim (FixedFrequencyTimeIndex | None): Scenario dimension for eager evaluation.
        _grouped_components (dict[str, set[str]]): Mapping of aggregated components to their detailed components.  agg to detailed


    Parent Attributes (see framcore.aggregators.Aggregator):

        _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
        _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
        _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg

    """

    _component_type = Solar
WindAggregator

Bases: _WindSolarAggregator

Aggregate Wind components into groups based on their power nodes.

Aggregation steps (self._aggregate):

  1. Group components based on their power nodes (self._group_by_power_node):
  2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
    • Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
    • Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
    • TODO: Add support for additional weighting methods (e.g. production instead of capacity).
    • Production is aggregated as the sum of production levels with weighted profiles. 2a. Make new hydro module and delete original components from model data.
  3. Add mapping from detailed to aggregated components to self._aggregation_map.

Disaggregation steps (self._disaggregate):

  1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
  2. Distribute production from aggregated components back to the original components:
    • Results are weighted based on the weighting method (now ony max_capacity supported).
  3. Delete aggregated components from the model.

See Aggregator for general design notes and rules to follow when using Aggregators.

Attributes:

Name Type Description
_data_dim SinglePeriodTimeIndex | None

Data dimension for eager evaluation.

_scen_dim FixedFrequencyTimeIndex | None

Scenario dimension for eager evaluation.

_grouped_components dict[str, set[str]]

Mapping of aggregated components to their detailed components. agg to detailed

Parent Attributes (see framcore.aggregators.Aggregator):

_is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
_original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
_aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
Source code in framcore/aggregators/WindSolarAggregator.py
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
class WindAggregator(_WindSolarAggregator):
    """
    Aggregate Wind components into groups based on their power nodes.

    Aggregation steps (self._aggregate):

    1. Group components based on their power nodes (self._group_by_power_node):
    2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
        - Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
        - Variable operation costs (voc) are aggregated using weighted averages based on the weighting method (now ony max_capacity supported).
        - TODO: Add support for additional weighting methods (e.g. production instead of capacity).
        - Production is aggregated as the sum of production levels with weighted profiles.
    2a. Make new hydro module and delete original components from model data.
    3. Add mapping from detailed to aggregated components to self._aggregation_map.


    Disaggregation steps (self._disaggregate):

    1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
    2. Distribute production from aggregated components back to the original components:
        - Results are weighted based on the weighting method (now ony max_capacity supported).
    3. Delete aggregated components from the model.


    See Aggregator for general design notes and rules to follow when using Aggregators.

    Attributes:
        _data_dim (SinglePeriodTimeIndex | None): Data dimension for eager evaluation.
        _scen_dim (FixedFrequencyTimeIndex | None): Scenario dimension for eager evaluation.
        _grouped_components (dict[str, set[str]]): Mapping of aggregated components to their detailed components.  agg to detailed


    Parent Attributes (see framcore.aggregators.Aggregator):

        _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
        _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
        _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg

    """

    _component_type = Wind

attributes

AvgFlowVolume

Bases: FlowVolume

Concrete class representing an average flow volume attribute, indicating a flow variable with average values.

Subclass of FlowVolume < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
881
882
883
884
885
886
887
888
class AvgFlowVolume(FlowVolume):
    """
    Concrete class representing an average flow volume attribute, indicating a flow variable with average values.

    Subclass of FlowVolume < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False

Coefficient

Bases: LevelProfile

Abstract class representing a coefficient attribute, used as a base class for various coefficient types.

Subclass of LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
730
731
732
733
734
735
736
737
class Coefficient(LevelProfile):
    """
    Abstract class representing a coefficient attribute, used as a base class for various coefficient types.

    Subclass of LevelProfile. See LevelProfile for details.
    """

    pass

Conversion

Bases: ArrowCoefficient

Concrete class representing a conversion coefficient attribute, used for conversion factors in the model.

Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
871
872
873
874
875
876
877
878
class Conversion(ArrowCoefficient):
    """
    Concrete class representing a conversion coefficient attribute, used for conversion factors in the model.

    Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False

Cost

Bases: ObjectiveCoefficient

Concrete class representing a cost attribute, indicating cost coefficients in the objective function.

Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
795
796
797
798
799
800
801
802
803
class Cost(ObjectiveCoefficient):
    """
    Concrete class representing a cost attribute, indicating cost coefficients in the objective function.

    Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_COST = True

Efficiency

Bases: ArrowCoefficient

Concrete class representing an efficiency coefficient attribute, indicating a unitless coefficient.

Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
849
850
851
852
853
854
855
856
857
class Efficiency(ArrowCoefficient):
    """
    Concrete class representing an efficiency coefficient attribute, indicating a unitless coefficient.

    Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True

Elasticity

Bases: Coefficient

Concrete class representing an elasticity coefficient attribute, indicating a unitless coefficient.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
817
818
819
820
821
822
823
824
825
class Elasticity(Coefficient):  # TODO: How do this work?
    """
    Concrete class representing an elasticity coefficient attribute, indicating a unitless coefficient.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True

FlowVolume

Bases: LevelProfile

Abstract class representing a flow volume attribute, indicating that the attribute is a flow variable.

Subclass of LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
720
721
722
723
724
725
726
727
class FlowVolume(LevelProfile):
    """
    Abstract class representing a flow volume attribute, indicating that the attribute is a flow variable.

    Subclass of LevelProfile. See LevelProfile for details.
    """

    _IS_FLOW = True

Hours

Bases: Coefficient

Concrete class representing an hours coefficient attribute, indicating a time-related coefficient.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
839
840
841
842
843
844
845
846
class Hours(Coefficient):  # TODO: How do this work?
    """
    Concrete class representing an hours coefficient attribute, indicating a time-related coefficient.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False

HydroBypass

Bases: Base

HydroBypass represents a controlled water way from a HydroModule. Used to bypass main release of the HydroModule.

Source code in framcore/attributes/hydro/HydroBypass.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
class HydroBypass(Base):
    """HydroBypass represents a controlled water way from a HydroModule. Used to bypass main release of the HydroModule."""

    def __init__(
        self,
        to_module: str | None,
        capacity: FlowVolume | None = None,
    ) -> None:
        """
        Initialize object.

        Args:
            to_module (str | None): Name of the HydroModule the water is released to.
            capacity (FlowVolume | None, optional): Restrictions on the volume of water which can pass through the bypass at a given moment. Defaults to None.

        """
        super().__init__()

        self._check_type(to_module, (str, type(None)))
        self._check_type(capacity, (FlowVolume, type(None)))

        self._to_module = to_module
        self._capacity = capacity
        self._volume = AvgFlowVolume()

    def get_to_module(self) -> str | None:
        """Get the name of the module to which the bypass leads."""
        return self._to_module

    def set_to_module(self, to_module: str) -> None:
        """Set the name of the module to which the bypass leads."""
        self._check_type(to_module, str)
        self._to_module = to_module

    def get_capacity(self) -> FlowVolume | None:
        """Get the capacity of the bypass."""
        return self._capacity

    def get_volume(self) -> AvgFlowVolume:
        """Get the volume of the bypass."""
        return self._volume

    def _get_fingerprint(self) -> Fingerprint:
        return self.get_fingerprint_default(refs={"to_module": self._to_module})
__init__(to_module: str | None, capacity: FlowVolume | None = None) -> None

Initialize object.

Parameters:

Name Type Description Default
to_module str | None

Name of the HydroModule the water is released to.

required
capacity FlowVolume | None

Restrictions on the volume of water which can pass through the bypass at a given moment. Defaults to None.

None
Source code in framcore/attributes/hydro/HydroBypass.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
def __init__(
    self,
    to_module: str | None,
    capacity: FlowVolume | None = None,
) -> None:
    """
    Initialize object.

    Args:
        to_module (str | None): Name of the HydroModule the water is released to.
        capacity (FlowVolume | None, optional): Restrictions on the volume of water which can pass through the bypass at a given moment. Defaults to None.

    """
    super().__init__()

    self._check_type(to_module, (str, type(None)))
    self._check_type(capacity, (FlowVolume, type(None)))

    self._to_module = to_module
    self._capacity = capacity
    self._volume = AvgFlowVolume()
get_capacity() -> FlowVolume | None

Get the capacity of the bypass.

Source code in framcore/attributes/hydro/HydroBypass.py
40
41
42
def get_capacity(self) -> FlowVolume | None:
    """Get the capacity of the bypass."""
    return self._capacity
get_to_module() -> str | None

Get the name of the module to which the bypass leads.

Source code in framcore/attributes/hydro/HydroBypass.py
31
32
33
def get_to_module(self) -> str | None:
    """Get the name of the module to which the bypass leads."""
    return self._to_module
get_volume() -> AvgFlowVolume

Get the volume of the bypass.

Source code in framcore/attributes/hydro/HydroBypass.py
44
45
46
def get_volume(self) -> AvgFlowVolume:
    """Get the volume of the bypass."""
    return self._volume
set_to_module(to_module: str) -> None

Set the name of the module to which the bypass leads.

Source code in framcore/attributes/hydro/HydroBypass.py
35
36
37
38
def set_to_module(self, to_module: str) -> None:
    """Set the name of the module to which the bypass leads."""
    self._check_type(to_module, str)
    self._to_module = to_module

HydroGenerator

Bases: Base

Produces power from the main release of a HydroModule.

Produces to a power node, and can have variable costs associated with operation. Other attributes are energy equivalent, PQ curve, nominal head and tailwater elevation.

Source code in framcore/attributes/hydro/HydroGenerator.py
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
class HydroGenerator(Base):
    """
    Produces power from the main release of a HydroModule.

    Produces to a power node, and can have variable costs associated with operation. Other attributes are energy equivalent, PQ curve, nominal head
    and tailwater elevation.

    """

    def __init__(
        self,
        power_node: str,
        energy_equivalent: Conversion,  # energy equivalent
        pq_curve: Expr | str | Curve | None = None,
        nominal_head: Expr | str | TimeVector | None = None,
        tailwater_elevation: Expr | str | TimeVector | None = None,
        voc: Cost | None = None,
        production: AvgFlowVolume | None = None,
    ) -> None:
        """
        Initialize a HydroGenerator with parameters.

        Args:
            power_node (str): Node to supply power to.
            energy_equivalent (Conversion): Conversion factor of power produced to water released.
            pq_curve (Expr | str | Curve | None, optional): Expression or curve describing the relationship produced power and water released. Defaults to None.
            nominal_head (Expr | str | TimeVector | None, optional): Vertical distance between upstream and dowstream water level. Defaults to None.
            tailwater_elevation (Expr | str | TimeVector | None, optional): Elevation at the surface where the water exits the turbine. Defaults to None.
            voc (Cost | None, optional): Variable operational costs. Defaults to None.
            production (AvgFlowVolume | None, optional): Result of power volume produced. Defaults to None.

        """
        super().__init__()

        self._check_type(power_node, str)
        self._check_type(energy_equivalent, Conversion)
        self._check_type(pq_curve, (Expr, str, Curve, type(None)))
        self._check_type(nominal_head, (Expr, str, TimeVector, type(None)))
        self._check_type(tailwater_elevation, (Expr, str, TimeVector, type(None)))
        self._check_type(voc, (Cost, type(None)))

        self._power_node = power_node
        self._energy_eq = energy_equivalent
        self._pq_curve = ensure_expr(pq_curve)
        self._nominal_head = ensure_expr(nominal_head, is_level=True)
        self._tailwater_elevation = ensure_expr(tailwater_elevation, is_level=True)
        self._voc = voc

        if production is None:
            production = AvgFlowVolume()
        self._production: AvgFlowVolume = production

    def get_power_node(self) -> str:
        """Get the power node of the hydro generator."""
        return self._power_node

    def set_power_node(self, power_node: str) -> None:
        """Set the power node of the pump unit."""
        self._check_type(power_node, str)
        self._power_node = power_node

    def get_energy_equivalent(self) -> Conversion:
        """Get the energy equivalent of the hydro generator."""
        return self._energy_eq

    def get_pq_curve(self) -> Expr | None:
        """Get the PQ curve of the hydro generator."""
        return self._pq_curve

    def get_nominal_head(self) -> Expr | None:
        """Get the nominal head of the hydro generator."""
        return self._nominal_head

    def get_tailwater_elevation(self) -> Expr | None:
        """Get the tailwater elevation of the hydro generator."""
        return self._tailwater_elevation

    def get_voc(self) -> Cost | None:
        """Get the variable operation and maintenance cost of the hydro generator."""
        return self._voc

    def set_voc(self, voc: Cost) -> None:
        """Set the variable operation and maintenance cost of the hydro generator."""
        self._check_type(voc, Cost)
        self._voc = voc

    def get_production(self) -> AvgFlowVolume:
        """Get the generation of the hydro generator."""
        return self._production

    def _get_fingerprint(self) -> Fingerprint:
        raise self.get_fingerprint_default(refs={"power_node": self._power_node})
__init__(power_node: str, energy_equivalent: Conversion, pq_curve: Expr | str | Curve | None = None, nominal_head: Expr | str | TimeVector | None = None, tailwater_elevation: Expr | str | TimeVector | None = None, voc: Cost | None = None, production: AvgFlowVolume | None = None) -> None

Initialize a HydroGenerator with parameters.

Parameters:

Name Type Description Default
power_node str

Node to supply power to.

required
energy_equivalent Conversion

Conversion factor of power produced to water released.

required
pq_curve Expr | str | Curve | None

Expression or curve describing the relationship produced power and water released. Defaults to None.

None
nominal_head Expr | str | TimeVector | None

Vertical distance between upstream and dowstream water level. Defaults to None.

None
tailwater_elevation Expr | str | TimeVector | None

Elevation at the surface where the water exits the turbine. Defaults to None.

None
voc Cost | None

Variable operational costs. Defaults to None.

None
production AvgFlowVolume | None

Result of power volume produced. Defaults to None.

None
Source code in framcore/attributes/hydro/HydroGenerator.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
def __init__(
    self,
    power_node: str,
    energy_equivalent: Conversion,  # energy equivalent
    pq_curve: Expr | str | Curve | None = None,
    nominal_head: Expr | str | TimeVector | None = None,
    tailwater_elevation: Expr | str | TimeVector | None = None,
    voc: Cost | None = None,
    production: AvgFlowVolume | None = None,
) -> None:
    """
    Initialize a HydroGenerator with parameters.

    Args:
        power_node (str): Node to supply power to.
        energy_equivalent (Conversion): Conversion factor of power produced to water released.
        pq_curve (Expr | str | Curve | None, optional): Expression or curve describing the relationship produced power and water released. Defaults to None.
        nominal_head (Expr | str | TimeVector | None, optional): Vertical distance between upstream and dowstream water level. Defaults to None.
        tailwater_elevation (Expr | str | TimeVector | None, optional): Elevation at the surface where the water exits the turbine. Defaults to None.
        voc (Cost | None, optional): Variable operational costs. Defaults to None.
        production (AvgFlowVolume | None, optional): Result of power volume produced. Defaults to None.

    """
    super().__init__()

    self._check_type(power_node, str)
    self._check_type(energy_equivalent, Conversion)
    self._check_type(pq_curve, (Expr, str, Curve, type(None)))
    self._check_type(nominal_head, (Expr, str, TimeVector, type(None)))
    self._check_type(tailwater_elevation, (Expr, str, TimeVector, type(None)))
    self._check_type(voc, (Cost, type(None)))

    self._power_node = power_node
    self._energy_eq = energy_equivalent
    self._pq_curve = ensure_expr(pq_curve)
    self._nominal_head = ensure_expr(nominal_head, is_level=True)
    self._tailwater_elevation = ensure_expr(tailwater_elevation, is_level=True)
    self._voc = voc

    if production is None:
        production = AvgFlowVolume()
    self._production: AvgFlowVolume = production
get_energy_equivalent() -> Conversion

Get the energy equivalent of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
70
71
72
def get_energy_equivalent(self) -> Conversion:
    """Get the energy equivalent of the hydro generator."""
    return self._energy_eq
get_nominal_head() -> Expr | None

Get the nominal head of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
78
79
80
def get_nominal_head(self) -> Expr | None:
    """Get the nominal head of the hydro generator."""
    return self._nominal_head
get_power_node() -> str

Get the power node of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
61
62
63
def get_power_node(self) -> str:
    """Get the power node of the hydro generator."""
    return self._power_node
get_pq_curve() -> Expr | None

Get the PQ curve of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
74
75
76
def get_pq_curve(self) -> Expr | None:
    """Get the PQ curve of the hydro generator."""
    return self._pq_curve
get_production() -> AvgFlowVolume

Get the generation of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
95
96
97
def get_production(self) -> AvgFlowVolume:
    """Get the generation of the hydro generator."""
    return self._production
get_tailwater_elevation() -> Expr | None

Get the tailwater elevation of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
82
83
84
def get_tailwater_elevation(self) -> Expr | None:
    """Get the tailwater elevation of the hydro generator."""
    return self._tailwater_elevation
get_voc() -> Cost | None

Get the variable operation and maintenance cost of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
86
87
88
def get_voc(self) -> Cost | None:
    """Get the variable operation and maintenance cost of the hydro generator."""
    return self._voc
set_power_node(power_node: str) -> None

Set the power node of the pump unit.

Source code in framcore/attributes/hydro/HydroGenerator.py
65
66
67
68
def set_power_node(self, power_node: str) -> None:
    """Set the power node of the pump unit."""
    self._check_type(power_node, str)
    self._power_node = power_node
set_voc(voc: Cost) -> None

Set the variable operation and maintenance cost of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
90
91
92
93
def set_voc(self, voc: Cost) -> None:
    """Set the variable operation and maintenance cost of the hydro generator."""
    self._check_type(voc, Cost)
    self._voc = voc

HydroPump

Bases: Base

Represent a pump associated with a HydroModule.

The HydroPump can consume power from a power Node to move water upstream between two HydroModules. It has a max power capacity, and mean energy equivalent and water capacity. It can also describe the relationship between head and flow (Q), with min and max head and flow.

Results for water and power consumption are stored as AvgFlowVolume attributes.

Source code in framcore/attributes/hydro/HydroPump.py
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
class HydroPump(Base):
    """
    Represent a pump associated with a HydroModule.

    The HydroPump can consume power from a power Node to move water upstream between two HydroModules. It has a max power capacity, and mean energy
    equivalent and water capacity. It can also describe the relationship between head and flow (Q), with min and max head and flow.

    Results for water and power consumption are stored as AvgFlowVolume attributes.

    """

    def __init__(
        self,
        power_node: str,
        from_module: str,
        to_module: str,
        water_capacity: FlowVolume,
        energy_equivalent: Conversion,
        power_capacity: FlowVolume | None = None,
        head_min: Expr | str | TimeVector | None = None,
        head_max: Expr | str | TimeVector | None = None,
        q_min: Expr | str | TimeVector | None = None,
        q_max: Expr | str | TimeVector | None = None,
    ) -> None:
        """
        Initialize a HydroPump object parameters.

        Args:
            power_node (str): Node to take power from when operating.
            from_module (str): Source HydroModule to move water from.
            to_module (str): Destination HydroModule to move water to.
            water_capacity (FlowVolume): Max pumped water volume given the mean energy equivalent and power capacity.
            energy_equivalent (Conversion): Mean conversion factor between power consumed and volume of water moved.
            power_capacity (FlowVolume | None, optional): Max power consumed. Defaults to None.
            head_min (Expr | str | TimeVector | None, optional): Minimum elevation difference between upstream and downstream water level. Defaults to None.
            head_max (Expr | str | TimeVector | None, optional): Maximum elevation difference between upstream and downstream water level. Defaults to None.
            q_min (Expr | str | TimeVector | None, optional): Maximum water flow at head_min. Defaults to None.
            q_max (Expr | str | TimeVector | None, optional): Maximum water flow at head_max. Defaults to None.

        """
        super().__init__()
        self._check_type(power_node, str)
        self._check_modules(from_module, to_module)  # checks types and that they are not the same.
        self._check_type(water_capacity, FlowVolume)
        self._check_type(power_capacity, (FlowVolume, type(None)))
        self._check_type(energy_equivalent, Conversion)
        self._check_type(head_min, (Expr, str, TimeVector, type(None)))
        self._check_type(head_max, (Expr, str, TimeVector, type(None)))
        self._check_type(q_min, (Expr, str, TimeVector, type(None)))
        self._check_type(q_max, (Expr, str, TimeVector, type(None)))

        self._power_node = power_node
        self._from_module = from_module
        self._to_module = to_module
        self._water_capacity = water_capacity
        self._energy_eq = energy_equivalent
        self._power_capacity = power_capacity

        self._hmin = ensure_expr(head_min, is_level=True)
        self._hmax = ensure_expr(head_max, is_level=True)
        self._qmin = ensure_expr(q_min, is_flow=True, is_level=True)
        self._qmax = ensure_expr(q_max, is_flow=True, is_level=True)

        self._water_consumption = AvgFlowVolume()
        self._power_consumption = AvgFlowVolume()

    def get_water_capacity(self) -> FlowVolume:
        """Get the capacity of the pump unit."""
        return self._water_capacity

    def get_power_capacity(self) -> FlowVolume:
        """Get the capacity of the pump unit."""
        return self._power_capacity

    def get_power_node(self) -> str:
        """Get the power node of the pump unit."""
        return self._power_node

    def set_power_node(self, power_node: str) -> None:
        """Set the power node of the pump unit."""
        self._check_type(power_node, str)
        self._power_node = power_node

    def get_from_module(self) -> str:
        """Get the module from which the pump unit is pumping."""
        return self._from_module

    def get_to_module(self) -> str:
        """Get the module to which the pump unit is pumping."""
        return self._to_module

    # TODO: should be split in two? Keep in mind we check that the to and from modules are not the same. So if we split this user might run into issues if
    # trying to first set from_module to to_module then change to_module.
    def set_modules(self, from_module: str, to_module: str) -> None:
        """Set the modules for the pump unit."""
        self._check_modules(from_module, to_module)
        self._from_module = from_module
        self._to_module = to_module

    def get_water_consumption(self) -> FlowVolume:
        """Get the water consumption of the pump unit."""
        return self._water_consumption

    def get_power_consumption(self) -> FlowVolume:
        """Get the power consumption of the pump unit."""
        return self._power_consumption

    def _check_modules(self, from_module: str, to_module: str) -> None:
        self._check_type(from_module, str)
        self._check_type(to_module, str)
        if from_module == to_module:
            message = f"{self} cannot pump to and from the same module. Got {from_module} for both from_module and to_module."
            raise ValueError(message)

    def _check_base_module_name(self, base_name: str) -> None:
        if base_name not in (self._from_module, self._to_module):
            message = (
                f"Module {base_name} has not been coupled correctly to its pump {self}. Pump is coupled to modules {self._from_module} and {self._to_module}"
            )
            raise RuntimeError(message)

    # other parameters
    def get_energy_equivalent(self) -> Conversion:
        """Get the energy equivalent of hydro pump."""
        return self._energy_eq

    def set_energy_eq(self, energy_eq: Conversion) -> None:
        """Set the energy equivalent."""
        self._check_type(energy_eq, Conversion)
        self._energy_eq = energy_eq

    def get_head_min(self) -> Expr:
        """Get min fall height of hydro pump."""
        return self._head_min

    def set_head_min(self, head_min: Expr | str | None) -> None:
        """Set min fall height."""
        self._head_min = ensure_expr(head_min)

    def get_head_max(self) -> Expr:
        """Get max fall height of hydro pump."""
        return self._hmax

    def set_head_max(self, hmax: Expr | str | None) -> None:
        """Set max fall height."""
        self._hmax = ensure_expr(hmax)

    def get_q_min(self) -> Expr:
        """Get Q min of hydro pump."""
        return self._q_min

    def set_qmin(self, q_min: Expr | str | None) -> None:
        """Set Q min."""
        self._q_min = ensure_expr(q_min)

    def get_q_max(self) -> Expr:
        """Get Q max of hydro pump."""
        return self._q_max

    def set_qmax(self, q_max: Expr | str | None) -> None:
        """Set Q max."""
        self._q_max = ensure_expr(q_max)

    def _get_fingerprint(self) -> Fingerprint:
        return self.get_fingerprint_default(
            refs={
                "power_node": self._power_node,
                "from_module": self._from_module,
                "to_module": self._to_module,
            },
        )
__init__(power_node: str, from_module: str, to_module: str, water_capacity: FlowVolume, energy_equivalent: Conversion, power_capacity: FlowVolume | None = None, head_min: Expr | str | TimeVector | None = None, head_max: Expr | str | TimeVector | None = None, q_min: Expr | str | TimeVector | None = None, q_max: Expr | str | TimeVector | None = None) -> None

Initialize a HydroPump object parameters.

Parameters:

Name Type Description Default
power_node str

Node to take power from when operating.

required
from_module str

Source HydroModule to move water from.

required
to_module str

Destination HydroModule to move water to.

required
water_capacity FlowVolume

Max pumped water volume given the mean energy equivalent and power capacity.

required
energy_equivalent Conversion

Mean conversion factor between power consumed and volume of water moved.

required
power_capacity FlowVolume | None

Max power consumed. Defaults to None.

None
head_min Expr | str | TimeVector | None

Minimum elevation difference between upstream and downstream water level. Defaults to None.

None
head_max Expr | str | TimeVector | None

Maximum elevation difference between upstream and downstream water level. Defaults to None.

None
q_min Expr | str | TimeVector | None

Maximum water flow at head_min. Defaults to None.

None
q_max Expr | str | TimeVector | None

Maximum water flow at head_max. Defaults to None.

None
Source code in framcore/attributes/hydro/HydroPump.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
def __init__(
    self,
    power_node: str,
    from_module: str,
    to_module: str,
    water_capacity: FlowVolume,
    energy_equivalent: Conversion,
    power_capacity: FlowVolume | None = None,
    head_min: Expr | str | TimeVector | None = None,
    head_max: Expr | str | TimeVector | None = None,
    q_min: Expr | str | TimeVector | None = None,
    q_max: Expr | str | TimeVector | None = None,
) -> None:
    """
    Initialize a HydroPump object parameters.

    Args:
        power_node (str): Node to take power from when operating.
        from_module (str): Source HydroModule to move water from.
        to_module (str): Destination HydroModule to move water to.
        water_capacity (FlowVolume): Max pumped water volume given the mean energy equivalent and power capacity.
        energy_equivalent (Conversion): Mean conversion factor between power consumed and volume of water moved.
        power_capacity (FlowVolume | None, optional): Max power consumed. Defaults to None.
        head_min (Expr | str | TimeVector | None, optional): Minimum elevation difference between upstream and downstream water level. Defaults to None.
        head_max (Expr | str | TimeVector | None, optional): Maximum elevation difference between upstream and downstream water level. Defaults to None.
        q_min (Expr | str | TimeVector | None, optional): Maximum water flow at head_min. Defaults to None.
        q_max (Expr | str | TimeVector | None, optional): Maximum water flow at head_max. Defaults to None.

    """
    super().__init__()
    self._check_type(power_node, str)
    self._check_modules(from_module, to_module)  # checks types and that they are not the same.
    self._check_type(water_capacity, FlowVolume)
    self._check_type(power_capacity, (FlowVolume, type(None)))
    self._check_type(energy_equivalent, Conversion)
    self._check_type(head_min, (Expr, str, TimeVector, type(None)))
    self._check_type(head_max, (Expr, str, TimeVector, type(None)))
    self._check_type(q_min, (Expr, str, TimeVector, type(None)))
    self._check_type(q_max, (Expr, str, TimeVector, type(None)))

    self._power_node = power_node
    self._from_module = from_module
    self._to_module = to_module
    self._water_capacity = water_capacity
    self._energy_eq = energy_equivalent
    self._power_capacity = power_capacity

    self._hmin = ensure_expr(head_min, is_level=True)
    self._hmax = ensure_expr(head_max, is_level=True)
    self._qmin = ensure_expr(q_min, is_flow=True, is_level=True)
    self._qmax = ensure_expr(q_max, is_flow=True, is_level=True)

    self._water_consumption = AvgFlowVolume()
    self._power_consumption = AvgFlowVolume()
get_energy_equivalent() -> Conversion

Get the energy equivalent of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
130
131
132
def get_energy_equivalent(self) -> Conversion:
    """Get the energy equivalent of hydro pump."""
    return self._energy_eq
get_from_module() -> str

Get the module from which the pump unit is pumping.

Source code in framcore/attributes/hydro/HydroPump.py
91
92
93
def get_from_module(self) -> str:
    """Get the module from which the pump unit is pumping."""
    return self._from_module
get_head_max() -> Expr

Get max fall height of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
147
148
149
def get_head_max(self) -> Expr:
    """Get max fall height of hydro pump."""
    return self._hmax
get_head_min() -> Expr

Get min fall height of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
139
140
141
def get_head_min(self) -> Expr:
    """Get min fall height of hydro pump."""
    return self._head_min
get_power_capacity() -> FlowVolume

Get the capacity of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
78
79
80
def get_power_capacity(self) -> FlowVolume:
    """Get the capacity of the pump unit."""
    return self._power_capacity
get_power_consumption() -> FlowVolume

Get the power consumption of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
111
112
113
def get_power_consumption(self) -> FlowVolume:
    """Get the power consumption of the pump unit."""
    return self._power_consumption
get_power_node() -> str

Get the power node of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
82
83
84
def get_power_node(self) -> str:
    """Get the power node of the pump unit."""
    return self._power_node
get_q_max() -> Expr

Get Q max of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
163
164
165
def get_q_max(self) -> Expr:
    """Get Q max of hydro pump."""
    return self._q_max
get_q_min() -> Expr

Get Q min of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
155
156
157
def get_q_min(self) -> Expr:
    """Get Q min of hydro pump."""
    return self._q_min
get_to_module() -> str

Get the module to which the pump unit is pumping.

Source code in framcore/attributes/hydro/HydroPump.py
95
96
97
def get_to_module(self) -> str:
    """Get the module to which the pump unit is pumping."""
    return self._to_module
get_water_capacity() -> FlowVolume

Get the capacity of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
74
75
76
def get_water_capacity(self) -> FlowVolume:
    """Get the capacity of the pump unit."""
    return self._water_capacity
get_water_consumption() -> FlowVolume

Get the water consumption of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
107
108
109
def get_water_consumption(self) -> FlowVolume:
    """Get the water consumption of the pump unit."""
    return self._water_consumption
set_energy_eq(energy_eq: Conversion) -> None

Set the energy equivalent.

Source code in framcore/attributes/hydro/HydroPump.py
134
135
136
137
def set_energy_eq(self, energy_eq: Conversion) -> None:
    """Set the energy equivalent."""
    self._check_type(energy_eq, Conversion)
    self._energy_eq = energy_eq
set_head_max(hmax: Expr | str | None) -> None

Set max fall height.

Source code in framcore/attributes/hydro/HydroPump.py
151
152
153
def set_head_max(self, hmax: Expr | str | None) -> None:
    """Set max fall height."""
    self._hmax = ensure_expr(hmax)
set_head_min(head_min: Expr | str | None) -> None

Set min fall height.

Source code in framcore/attributes/hydro/HydroPump.py
143
144
145
def set_head_min(self, head_min: Expr | str | None) -> None:
    """Set min fall height."""
    self._head_min = ensure_expr(head_min)
set_modules(from_module: str, to_module: str) -> None

Set the modules for the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
101
102
103
104
105
def set_modules(self, from_module: str, to_module: str) -> None:
    """Set the modules for the pump unit."""
    self._check_modules(from_module, to_module)
    self._from_module = from_module
    self._to_module = to_module
set_power_node(power_node: str) -> None

Set the power node of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
86
87
88
89
def set_power_node(self, power_node: str) -> None:
    """Set the power node of the pump unit."""
    self._check_type(power_node, str)
    self._power_node = power_node
set_qmax(q_max: Expr | str | None) -> None

Set Q max.

Source code in framcore/attributes/hydro/HydroPump.py
167
168
169
def set_qmax(self, q_max: Expr | str | None) -> None:
    """Set Q max."""
    self._q_max = ensure_expr(q_max)
set_qmin(q_min: Expr | str | None) -> None

Set Q min.

Source code in framcore/attributes/hydro/HydroPump.py
159
160
161
def set_qmin(self, q_min: Expr | str | None) -> None:
    """Set Q min."""
    self._q_min = ensure_expr(q_min)

HydroReservoir

Bases: Storage

Represent a hydro reservoir of a HydroModule.

Source code in framcore/attributes/hydro/HydroReservoir.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
class HydroReservoir(Storage):
    """Represent a hydro reservoir of a HydroModule."""

    def __init__(
        self,
        capacity: StockVolume,
        reservoir_curve: ReservoirCurve = None,
        volume: StockVolume | None = None,
    ) -> None:
        """
        Initialize a HydroReservoir instance.

        Args:
            capacity (StockVolume): The maximum storage capacity of the reservoir.
            reservoir_curve (ReservoirCurve, optional): The curve describing water level elevation to volume characteristics.
            volume (StockVolume, optional): Volume of water in the reservoir.

        """
        super().__init__(
            capacity=capacity,
            reservoir_curve=reservoir_curve,
            volume=volume,
        )
__init__(capacity: StockVolume, reservoir_curve: ReservoirCurve = None, volume: StockVolume | None = None) -> None

Initialize a HydroReservoir instance.

Parameters:

Name Type Description Default
capacity StockVolume

The maximum storage capacity of the reservoir.

required
reservoir_curve ReservoirCurve

The curve describing water level elevation to volume characteristics.

None
volume StockVolume

Volume of water in the reservoir.

None
Source code in framcore/attributes/hydro/HydroReservoir.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
def __init__(
    self,
    capacity: StockVolume,
    reservoir_curve: ReservoirCurve = None,
    volume: StockVolume | None = None,
) -> None:
    """
    Initialize a HydroReservoir instance.

    Args:
        capacity (StockVolume): The maximum storage capacity of the reservoir.
        reservoir_curve (ReservoirCurve, optional): The curve describing water level elevation to volume characteristics.
        volume (StockVolume, optional): Volume of water in the reservoir.

    """
    super().__init__(
        capacity=capacity,
        reservoir_curve=reservoir_curve,
        volume=volume,
    )

LevelProfile

Bases: Base, ABC

Attributes representing timeseries data for Components. Mostly as Level * Profile, where both Level and Profile are Expr (expressions).

Level and Profile represent two distinct dimensions of time. This is because we want to simulate future system states with historical weather patterns. Therefore, Level represents the system state at a given time (data_dim), while Profile represents the scenario dimension (scen_dim). A Level would for example represent the installed capacity of solar plants towards 2030, while the Profile would represent the historical variation between 1991-2020.

Level and Profile can have two main formats: A maximum Level with a Profile that varies between 0-1, and an average Level with a Profile with a mean of 1 (the latter can have a ReferencePeriod). The max format is, for example, used for capacities, while the mean format can be used for prices and flows. The system needs to be able to convert between the two formats. This is especially important for aggregations (for example weighted averages) where all the TimeVectors need to be on the same format for a correct result. One simple example of conversion is pairing a max Level of 100 MW with a mean_one Profile [0, 1, 2]. Asking for this on the max format will return the series 100[0, 0.5, 1] MW, while on the avg format it will return 50[0, 1, 2] MW.

Queries to LevelProfile need to provide a database, the desired target TimeIndex for both dimensions, the target unit and the desired format. At the moment we support these queries for LevelProfile: - self.get_data_value(db, scen_dim, data_dim, unit, is_max_level) - self.get_scenario_vector(db, scen_dim, data_dim, unit, is_float32)

In addition, we have the possibility to shift, scale, and change the intercept of the LevelProfiles. Then we get the full representation: Scale * (Level + Level_shift) * Profile + Intercept. - Level_shift adds a constant value to Level, has the same Profile as Level. - Scale multiplies (Level + Level_shift) by a constant value. - Intercept adds a constant value to LevelProfile, ignoring Level and Profile. This is the only way of supporting a timeseries that crosses zero in our system. This functionality is under development and has not been properly tested.

LevelProfiles also have additional properties that describes their behaviour. These can be used for initialization, validation, and to simplify queries. The properties are: - is_stock: True if attribute is a stock variable. Level Expr should also have is_stock=True. See Expr for details. - is_flow: True if attribute is a flow variable. Level Expr should also have is_flow=True. See Expr for details. - is_not_negative: True if attribute is not allowed to have negative values. Level Expr should also have only non-negative values. - is_max_and_zero_one: Preferred format of Level and Profile. Used for initialization and queries. - is_ingoing: True if attribute is ingoing, False if outgoing, None if neither. - is_cost: True if attribute is objective function cost coefficient. Else None. - is_unitless: True if attribute is known to be unitless. False if known to have a unit that is not None. Else None.

Source code in framcore/attributes/level_profile_attributes.py
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
class LevelProfile(Base, ABC):
    """
    Attributes representing timeseries data for Components. Mostly as Level * Profile, where both Level and Profile are Expr (expressions).

    Level and Profile represent two distinct dimensions of time. This is because we want to simulate future system states with historical weather patterns.
    Therefore, Level represents the system state at a given time (data_dim), while Profile represents the scenario dimension (scen_dim).
    A Level would for example represent the installed capacity of solar plants towards 2030,
    while the Profile would represent the historical variation between 1991-2020.

    Level and Profile can have two main formats: A maximum Level with a Profile that varies between 0-1,
    and an average Level with a Profile with a mean of 1 (the latter can have a ReferencePeriod).
    The max format is, for example, used for capacities, while the mean format can be used for prices and flows.
    The system needs to be able to convert between the two formats. This is especially important for aggregations
    (for example weighted averages) where all the TimeVectors need to be on the same format for a correct result.
    One simple example of conversion is pairing a max Level of 100 MW with a mean_one Profile [0, 1, 2].
    Asking for this on the max format will return the series 100*[0, 0.5, 1] MW, while on the avg format it will return 50*[0, 1, 2] MW.

    Queries to LevelProfile need to provide a database, the desired target TimeIndex for both dimensions, the target unit and the desired format.
    At the moment we support these queries for LevelProfile:
    - self.get_data_value(db, scen_dim, data_dim, unit, is_max_level)
    - self.get_scenario_vector(db, scen_dim, data_dim, unit, is_float32)

    In addition, we have the possibility to shift, scale, and change the intercept of the LevelProfiles.
    Then we get the full representation: Scale * (Level + Level_shift) * Profile + Intercept.
    - Level_shift adds a constant value to Level, has the same Profile as Level.
    - Scale multiplies (Level + Level_shift) by a constant value.
    - Intercept adds a constant value to LevelProfile, ignoring Level and Profile. **This is the only way of supporting a timeseries that crosses zero
        in our system. This functionality is under development and has not been properly tested.**

    LevelProfiles also have additional properties that describes their behaviour. These can be used for initialization, validation,
    and to simplify queries. The properties are:
    - is_stock: True if attribute is a stock variable. Level Expr should also have is_stock=True. See Expr for details.
    - is_flow: True if attribute is a flow variable. Level Expr should also have is_flow=True. See Expr for details.
    - is_not_negative: True if attribute is not allowed to have negative values. Level Expr should also have only non-negative values.
    - is_max_and_zero_one: Preferred format of Level and Profile. Used for initialization and queries.
    - is_ingoing: True if attribute is ingoing, False if outgoing, None if neither.
    - is_cost: True if attribute is objective function cost coefficient. Else None.
    - is_unitless: True if attribute is known to be unitless. False if known to have a unit that is not None. Else None.

    """

    # must be overwritten by subclass when otherwise
    # don't change the defaults
    _IS_ABSTRACT: bool = True
    _IS_STOCK: bool = False
    _IS_FLOW: bool = False
    _IS_NOT_NEGATIVE: bool = True
    _IS_MAX_AND_ZERO_ONE: bool = False

    # must be set by subclass when applicable
    _IS_INGOING: bool | None = None
    _IS_COST: bool | None = None
    _IS_UNITLESS: bool | None = None

    def __init__(
        self,
        level: Expr | TimeVector | str | None = None,
        profile: Expr | TimeVector | str | None = None,
        value: float | int | None = None,  # To support Price(value=20, unit="EUR/MWh")
        unit: str | None = None,
        level_shift: Expr | None = None,
        intercept: Expr | None = None,
        scale: Expr | None = None,
    ) -> None:
        """
        Initialize LevelProfile.

        See the LevelProfile class docstring for details. A complete LevelProfile is represented as:
        Scale * (Level + Level_shift) * Profile + Intercept. Normally only Level and Profile are used.

        Either give level and profile, or value and unit.

        Args:
            level (Expr | TimeVector | str | None, optional): Level Expr. Defaults to None.
            profile (Expr | TimeVector | str | None, optional): Profile Expr. Defaults to None.
            value (float | int | None, optional): A constant value to initialize Level. Defaults to None.
            unit (str | None, optional): Unit of the constant value to initialize Level. Defaults to None.
            level_shift (Expr | None, optional): Level_shift Expr. Defaults to None.
            intercept (Expr | None, optional): Intercept Expr. Defaults to None.
            scale (Expr | None, optional): Scale Expr. Defaults to None.

        """
        self._assert_invariants()

        self._check_type(value, (float, int, type(None)))
        self._check_type(unit, (str, type(None)))
        self._check_type(level, (Expr, TimeVector, str, type(None)))
        self._check_type(profile, (Expr, TimeVector, str, type(None)))
        self._check_type(level_shift, (Expr, type(None)))
        self._check_type(intercept, (Expr, type(None)))
        self._check_type(scale, (Expr, type(None)))
        level = self._ensure_level_expr(level, value, unit)
        profile = self._ensure_profile_expr(profile)
        self._ensure_compatible_level_profile_combo(level, profile)
        self._ensure_compatible_level_profile_combo(level_shift, profile)
        self._level: Expr | None = level
        self._profile: Expr | None = profile
        self._level_shift: Expr | None = level_shift
        self._intercept: Expr | None = intercept
        self._scale: Expr | None = scale
        # TODO: Validate that profiles are equal in level and level_shift.
        # TODO: Validate that level_shift, scale and intercept only consist of Exprs with ConstantTimeVectors
        # TODO: Validate that level_shift, level_scale and intercept have correct Expr properties

    def _assert_invariants(self) -> None:
        abstract = self._IS_ABSTRACT
        max_level_profile = self._IS_MAX_AND_ZERO_ONE
        stock = self._IS_STOCK
        flow = self._IS_FLOW
        unitless = self._IS_UNITLESS
        ingoing = self._IS_INGOING
        cost = self._IS_COST
        not_negative = self._IS_NOT_NEGATIVE

        assert not abstract, "Abstract types should only be used for type hints and checks."
        assert isinstance(max_level_profile, bool)
        assert isinstance(stock, bool)
        assert isinstance(flow, bool)
        assert isinstance(not_negative, bool)
        assert isinstance(ingoing, bool | type(None))
        assert isinstance(unitless, bool | type(None))
        assert isinstance(cost, bool | type(None))
        assert not (flow and stock)
        if flow or stock:
            assert not unitless, "flow and stock must have unit that is not None."
            assert not_negative, "flow and stock cannot have negative values."
        if ingoing is True:
            assert cost is None, "cost must be None when ingoing is True."
        if cost is True:
            assert ingoing is None, "ingoing must be None when cost is True."

        parent = super()
        if isinstance(parent, LevelProfile) and not parent._IS_ABSTRACT:  # noqa: SLF001
            self._assert_same_behaviour(parent)

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in expressions to loaders."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self.get_level())
        add_loaders_if(loaders, self.get_profile())

    def clear(self) -> None:
        """
        Set all internal fields to None.

        You may want to use this to get exogenous flow to use capacities instead of volume.
        """
        self._level = None
        self._profile = None
        self._level_shift = None
        self._intercept = None
        self._scale = None

    def is_stock(self) -> bool:
        """
        Return True if attribute is a stock variable.

        Return False if attribute is not a stock variable.
        """
        return self._IS_STOCK

    def is_flow(self) -> bool:
        """
        Return True if attribute is a flow variable.

        Return False if attribute is not a flow variable.
        """
        return self._IS_FLOW

    def is_not_negative(self) -> bool:
        """
        Return True if attribute is not allowed to have negative values.

        Return False if attribute can have both positive and negative values.
        """
        return self._IS_NOT_NEGATIVE

    def is_max_and_zero_one(self) -> bool:
        """
        When True level should be max (not average) and corresponding profile should be zero_one (not mean_one).

        When False level should be average (not max) and corresponding profile should be mean_one (not zero_one).
        """
        return self._IS_MAX_AND_ZERO_ONE

    def is_ingoing(self) -> bool | None:
        """
        Return True if attribute is ingoing.

        Return True if attribute is outgoing.

        Return None if not applicable.
        """
        return self._IS_INGOING

    def is_cost(self) -> bool | None:
        """
        Return True if attribute is objective function cost coefficient.

        Return False if attribute is objective function revenue coefficient.

        Return None if not applicable.
        """
        return self._IS_COST

    def is_unitless(self) -> bool | None:
        """
        Return True if attribute is known to be unitless.

        Return False if attribute is known to have a unit that is not None.

        Return None if not applicable.
        """
        return self._IS_UNITLESS

    def has_level(self) -> bool:
        """Return True if get_level will return value not None."""
        return (self._level is not None) or (self._level_shift is not None)

    def has_profile(self) -> bool:
        """Return True if get_profile will return value not None."""
        return self._profile is not None

    def has_intercept(self) -> bool:
        """Return True if get_intercept will return value not None."""
        return self._intercept is not None

    def copy_from(self, other: LevelProfile) -> None:
        """Copy fields from other."""
        self._check_type(other, LevelProfile)
        self._assert_same_behaviour(other)
        self._level = other._level
        self._profile = other._profile
        self._level_shift = other._level_shift
        self._intercept = other._intercept
        self._scale = other._scale

    def get_level(self) -> Expr | None:
        """Get level part of (level * profile + intercept)."""
        level = self._level

        if level is None:
            return None

        if level.is_leaf():
            level = Expr(
                src=level.get_src(),
                operations=level.get_operations(expect_ops=False, copy_list=True),
                is_stock=level.is_stock(),
                is_flow=level.is_flow(),
                is_level=True,
                is_profile=False,
                profile=self._profile,
            )

        if self._level_shift is not None:
            level += self._level_shift

        if self._scale is not None:
            level *= self._scale

        return level

    def set_level(self, level: Expr | TimeVector | str | None) -> None:
        """Set level part of (scale * (level + level_shift) * profile + intercept)."""
        self._check_type(level, (Expr, TimeVector, str, type(None)))
        level = self._ensure_level_expr(level)
        self._ensure_compatible_level_profile_combo(level, self._profile)
        self._level = level

    def get_profile(self) -> Expr | None:
        """Get profile part of (level * profile + intercept)."""
        return self._profile

    def set_profile(self, profile: Expr | TimeVector | str | None) -> None:
        """Set profile part of (scale * (level + level_shift) * profile + intercept)."""
        self._check_type(profile, (Expr, TimeVector, str, type(None)))
        profile = self._ensure_profile_expr(profile)
        self._ensure_compatible_level_profile_combo(self._level, profile)
        self._profile = profile

    def get_intercept(self) -> Expr | None:
        """Get intercept part of (level * profile + intercept)."""
        intercept = self._intercept
        if self._scale is not None:
            intercept *= self._scale
        return intercept

    def set_intercept(self, value: Expr | None) -> None:
        """Set intercept part of (level * profile + intercept)."""
        self._check_type(value, (Expr, type(None)))
        if value is not None:
            self._check_level_expr(value)
        self._intercept = value

    def get_level_unit_set(
        self,
        db: QueryDB | Model,
    ) -> set[TimeIndex]:
        """
        Return set with all units behind level expression.

        Useful for discovering valid unit input to get_level_value.
        """
        if not self.has_level():
            return set()
        return get_units_from_expr(db, self.get_level())

    def get_profile_timeindex_set(
        self,
        db: QueryDB | Model,
    ) -> set[TimeIndex]:
        """
        Return set with all TimeIndex behind profile expression.

        Can be used to run optimized queries, i.e. not asking for
        finer time resolutions than necessary.
        """
        if not self.has_profile():
            return set()
        return get_timeindexes_from_expr(db, self.get_profile())

    def get_scenario_vector(
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_float32: bool = True,
    ) -> NDArray:
        """
        Evaluate LevelProfile over the periods in scenario dimension, and at the level period of the data dimension.

        Underlying profiles are evalutated over the scenario dimension,
        and levels are evalutated to scalars over level_period in the data dimension.

        Args:
            db (QueryDB | Model): The database or model instance used to fetch the required data.
            scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
            level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
            unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
                the expression should be unitless.
            is_float32 (bool, optional): Whether to return the vector as a NumPy array with `float32`
                precision. Defaults to True.

        """
        return self._get_scenario_vector(db, scenario_horizon, level_period, unit, is_float32)

    def get_data_value(
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_max_level: bool | None = None,
    ) -> float:
        """
        Evaluate LevelProfile to a scalar at the level period of the data dimension, and as an average over the scenario horizon.

        Args:
            db (QueryDB | Model): The database or model instance used to fetch the required data.
            scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
            level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
            unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
                the expression should be unitless.
            is_max_level (bool | None, optional): Whether to evaluate the expression as a maximum level (with a zero_one profile)
                or as an average level (with a mean_one profile). If None, the default format of the attribute is used.

        """
        return self._get_data_value(db, scenario_horizon, level_period, unit, is_max_level)

    def shift_intercept(self, value: float, unit: str | None) -> None:
        """Modify the intercept part of (level * profile + intercept) of an attribute by adding a constant value."""
        expr = ensure_expr(
            ConstantTimeVector(self._ensure_float(value), unit=unit, is_max_level=False),
            is_level=True,
            is_profile=False,
            is_stock=self._IS_STOCK,
            is_flow=self._IS_FLOW,
            profile=None,
        )
        if self._intercept is None:
            self._intercept = expr
        else:
            self._intercept += expr

    def shift_level(
        self,
        value: float | int,
        unit: str | None = None,
        reference_period: ReferencePeriod | None = None,
        is_max_level: bool | None = None,
        use_profile: bool = True,  # TODO: Remove. Should always use profile. If has profile validate that it is equal to the profile of Level.
    ) -> None:
        """Modify the level_shift part of (scale * (level + level_shift) * profile + intercept) of an attribute by adding a constant value."""
        # TODO: Not allowed to shift if there is intercept?
        self._check_type(value, (float, int))
        self._check_type(unit, (str, type(None)))
        self._check_type(reference_period, (ReferencePeriod, type(None)))
        self._check_type(is_max_level, (bool, type(None)))
        self._check_type(use_profile, bool)

        if is_max_level is None:
            is_max_level = self._IS_MAX_AND_ZERO_ONE

        expr = ensure_expr(
            ConstantTimeVector(
                self._ensure_float(value),
                unit=unit,
                is_max_level=is_max_level,
                reference_period=reference_period,
            ),
            is_level=True,
            is_profile=False,
            is_stock=self._IS_STOCK,
            is_flow=self._IS_FLOW,
            profile=self._profile if use_profile else None,
        )
        if self._level_shift is None:
            self._level_shift = expr
        else:
            self._level_shift += expr

    def scale(self, value: float | int) -> None:
        """Modify the scale part of (scale * (level + level_shift) * profile + intercept) of an attribute by multiplying with a constant value."""
        # TODO: Not allowed to scale if there is intercept?
        expr = ensure_expr(
            ConstantTimeVector(self._ensure_float(value), unit=None, is_max_level=False),
            is_level=True,
            is_profile=False,
            profile=None,
        )
        if self._scale is None:
            self._scale = expr
        else:
            self._scale *= expr

    def _ensure_level_expr(
        self,
        level: Expr | str | TimeVector | None,
        value: float | int | None = None,
        unit: str | None = None,
        reference_period: ReferencePeriod | None = None,
    ) -> Expr | None:
        if value is not None:
            level = ConstantTimeVector(
                scalar=float(value),
                unit=unit,
                is_max_level=self._IS_MAX_AND_ZERO_ONE,
                is_zero_one_profile=None,
                reference_period=reference_period,
            )
        if level is None:
            return None

        if isinstance(level, Expr):
            self._check_level_expr(level)
            return level

        return Expr(
            src=level,
            is_flow=self._IS_FLOW,
            is_stock=self._IS_STOCK,
            is_level=True,
            is_profile=False,
            profile=None,
        )

    def _ensure_compatible_level_profile_combo(self, level: Expr | None, profile: Expr | None) -> None:
        """Check that all profiles in leaf levels (in level) also exist in profile."""
        if level is None or profile is None:
            return

        leaf_level_profiles = get_profile_exprs_from_leaf_levels(level)
        leaf_profile_profiles = get_leaf_profiles(profile)

        for p in leaf_level_profiles:
            if p not in leaf_profile_profiles:
                message = (
                    f"Incompatible level/profile combination because all profiles in leaf levels (in level) does not exist in profile. "
                    f"Profile expression {p} found in level {level} but not in profile."
                )
                raise ValueError(message)

    def _check_level_expr(self, expr: Expr) -> None:
        msg = f"{self} requires {expr} to be "
        if expr.is_stock() != self._IS_STOCK:
            raise ValueError(msg + f"is_stock={self._IS_STOCK}")
        if expr.is_flow() != self._IS_FLOW:
            raise ValueError(msg + f"is_flow={self._IS_STOCK}")
        if expr.is_level() is False:
            raise ValueError(msg + "is_level=True")
        if expr.is_profile() is True:
            raise ValueError(msg + "is_profile=False")

    def _check_profile_expr(self, expr: Expr) -> None:
        msg = f"{self} requires {expr} to be "
        if expr.is_stock() is True:
            raise ValueError(msg + "is_stock=False")
        if expr.is_flow() is True:
            raise ValueError(msg + "is_flow=False")
        if expr.is_level() is True:
            raise ValueError(msg + "is_level=False")
        if expr.is_profile() is False:
            raise ValueError(msg + "is_profile=True")

    def _ensure_profile_expr(
        self,
        value: Expr | str | TimeVector | None,
    ) -> Expr | None:
        if value is None:
            return None

        if isinstance(value, Expr):
            self._check_profile_expr(value)
            return value

        return Expr(
            src=value,
            is_flow=False,
            is_stock=False,
            is_level=False,
            is_profile=True,
            profile=None,
        )

    def _get_data_value(
        self,
        db: QueryDB,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_max_level: bool | None,
    ) -> float:
        # NB! don't type check db, as this is done in get_level_value and get_profile_vector
        self._check_type(scenario_horizon, FixedFrequencyTimeIndex)
        self._check_type(level_period, SinglePeriodTimeIndex)
        self._check_type(unit, (str, type(None)))
        self._check_type(is_max_level, (bool, type(None)))

        level_expr = self.get_level()

        if is_max_level is None:
            is_max_level = self._IS_MAX_AND_ZERO_ONE

        self._check_type(level_expr, (Expr, type(None)))
        if not isinstance(level_expr, Expr):
            raise ValueError("Attribute level Expr is None. Have you called Solver.solve yet?")

        level_value = get_level_value(
            expr=level_expr,
            db=db,
            scen_dim=scenario_horizon,
            data_dim=level_period,
            unit=unit,
            is_max=is_max_level,
        )

        intercept = None
        if self._intercept is not None:
            intercept = _get_constant_from_expr(
                self._intercept,
                db,
                unit=unit,
                data_dim=level_period,
                scen_dim=scenario_horizon,
                is_max=is_max_level,
            )

        if intercept is None:
            return level_value

        return level_value + intercept

    def _get_scenario_vector(
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_float32: bool = True,
    ) -> NDArray:
        """Return vector with values along the given scenario horizon using level over level_period."""
        # NB! don't type check db, as this is done in get_level_value and get_profile_vector
        self._check_type(scenario_horizon, FixedFrequencyTimeIndex)
        self._check_type(level_period, SinglePeriodTimeIndex)
        self._check_type(unit, (str, type(None)))
        self._check_type(is_float32, bool)

        level_expr = self.get_level()

        self._check_type(level_expr, (Expr, type(None)))
        if not isinstance(level_expr, Expr):
            raise ValueError("Attribute level Expr is None. Have you called Solver.solve yet?")

        level_value = get_level_value(
            expr=level_expr,
            db=db,
            scen_dim=scenario_horizon,
            data_dim=level_period,
            unit=unit,
            is_max=self._IS_MAX_AND_ZERO_ONE,
        )

        profile_expr = self.get_profile()

        if profile_expr is None:
            profile_vector = np.ones(
                scenario_horizon.get_num_periods(),
                dtype=np.float32 if is_float32 else np.float64,
            )
        else:
            profile_vector = get_profile_vector(
                expr=profile_expr,
                db=db,
                scen_dim=scenario_horizon,
                data_dim=level_period,
                is_zero_one=self._IS_MAX_AND_ZERO_ONE,
                is_float32=is_float32,
            )

        intercept = None
        if self._intercept is not None:
            intercept = _get_constant_from_expr(
                self._intercept,
                db,
                unit=unit,
                data_dim=level_period,
                scen_dim=scenario_horizon,
                is_max=self._IS_MAX_AND_ZERO_ONE,
            )

        if intercept is None:
            return level_value * profile_vector

        return level_value * profile_vector + intercept

    def _has_same_behaviour(self, other: LevelProfile) -> bool:
        return all(
            (
                self._IS_FLOW == other._IS_FLOW,
                self._IS_STOCK == other._IS_STOCK,
                self._IS_NOT_NEGATIVE == other._IS_NOT_NEGATIVE,
                self._IS_MAX_AND_ZERO_ONE == other._IS_MAX_AND_ZERO_ONE,
                self._IS_INGOING == other._IS_INGOING,
                self._IS_COST == other._IS_COST,
                self._IS_UNITLESS == other._IS_UNITLESS,
            ),
        )

    def _assert_same_behaviour(self, other: LevelProfile) -> None:
        if not self._has_same_behaviour(other):
            message = f"Not same behaviour for {self} and {other}"
            raise ValueError(message)

    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Return True if other is equal to self."""
        if not isinstance(other, LevelProfile):
            return False
        if not self._has_same_behaviour(other):
            return False
        return all(
            (
                self._level == other._level,
                self._profile == other._profile,
                self._level_shift == other._level_shift,
                self._intercept == other._intercept,
                self._scale == other._scale,
            ),
        )

    def __hash__(self) -> int:
        """Compute hash of self."""
        return hash(
            (
                type(self).__name__,
                self._level,
                self._profile,
                self._level_shift,
                self._intercept,
                self._scale,
            ),
        )
__eq__(other) -> bool

Return True if other is equal to self.

Source code in framcore/attributes/level_profile_attributes.py
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Return True if other is equal to self."""
    if not isinstance(other, LevelProfile):
        return False
    if not self._has_same_behaviour(other):
        return False
    return all(
        (
            self._level == other._level,
            self._profile == other._profile,
            self._level_shift == other._level_shift,
            self._intercept == other._intercept,
            self._scale == other._scale,
        ),
    )
__hash__() -> int

Compute hash of self.

Source code in framcore/attributes/level_profile_attributes.py
703
704
705
706
707
708
709
710
711
712
713
714
def __hash__(self) -> int:
    """Compute hash of self."""
    return hash(
        (
            type(self).__name__,
            self._level,
            self._profile,
            self._level_shift,
            self._intercept,
            self._scale,
        ),
    )
__init__(level: Expr | TimeVector | str | None = None, profile: Expr | TimeVector | str | None = None, value: float | int | None = None, unit: str | None = None, level_shift: Expr | None = None, intercept: Expr | None = None, scale: Expr | None = None) -> None

Initialize LevelProfile.

See the LevelProfile class docstring for details. A complete LevelProfile is represented as: Scale * (Level + Level_shift) * Profile + Intercept. Normally only Level and Profile are used.

Either give level and profile, or value and unit.

Parameters:

Name Type Description Default
level Expr | TimeVector | str | None

Level Expr. Defaults to None.

None
profile Expr | TimeVector | str | None

Profile Expr. Defaults to None.

None
value float | int | None

A constant value to initialize Level. Defaults to None.

None
unit str | None

Unit of the constant value to initialize Level. Defaults to None.

None
level_shift Expr | None

Level_shift Expr. Defaults to None.

None
intercept Expr | None

Intercept Expr. Defaults to None.

None
scale Expr | None

Scale Expr. Defaults to None.

None
Source code in framcore/attributes/level_profile_attributes.py
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
def __init__(
    self,
    level: Expr | TimeVector | str | None = None,
    profile: Expr | TimeVector | str | None = None,
    value: float | int | None = None,  # To support Price(value=20, unit="EUR/MWh")
    unit: str | None = None,
    level_shift: Expr | None = None,
    intercept: Expr | None = None,
    scale: Expr | None = None,
) -> None:
    """
    Initialize LevelProfile.

    See the LevelProfile class docstring for details. A complete LevelProfile is represented as:
    Scale * (Level + Level_shift) * Profile + Intercept. Normally only Level and Profile are used.

    Either give level and profile, or value and unit.

    Args:
        level (Expr | TimeVector | str | None, optional): Level Expr. Defaults to None.
        profile (Expr | TimeVector | str | None, optional): Profile Expr. Defaults to None.
        value (float | int | None, optional): A constant value to initialize Level. Defaults to None.
        unit (str | None, optional): Unit of the constant value to initialize Level. Defaults to None.
        level_shift (Expr | None, optional): Level_shift Expr. Defaults to None.
        intercept (Expr | None, optional): Intercept Expr. Defaults to None.
        scale (Expr | None, optional): Scale Expr. Defaults to None.

    """
    self._assert_invariants()

    self._check_type(value, (float, int, type(None)))
    self._check_type(unit, (str, type(None)))
    self._check_type(level, (Expr, TimeVector, str, type(None)))
    self._check_type(profile, (Expr, TimeVector, str, type(None)))
    self._check_type(level_shift, (Expr, type(None)))
    self._check_type(intercept, (Expr, type(None)))
    self._check_type(scale, (Expr, type(None)))
    level = self._ensure_level_expr(level, value, unit)
    profile = self._ensure_profile_expr(profile)
    self._ensure_compatible_level_profile_combo(level, profile)
    self._ensure_compatible_level_profile_combo(level_shift, profile)
    self._level: Expr | None = level
    self._profile: Expr | None = profile
    self._level_shift: Expr | None = level_shift
    self._intercept: Expr | None = intercept
    self._scale: Expr | None = scale
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in expressions to loaders.

Source code in framcore/attributes/level_profile_attributes.py
166
167
168
169
170
171
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in expressions to loaders."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self.get_level())
    add_loaders_if(loaders, self.get_profile())
clear() -> None

Set all internal fields to None.

You may want to use this to get exogenous flow to use capacities instead of volume.

Source code in framcore/attributes/level_profile_attributes.py
173
174
175
176
177
178
179
180
181
182
183
def clear(self) -> None:
    """
    Set all internal fields to None.

    You may want to use this to get exogenous flow to use capacities instead of volume.
    """
    self._level = None
    self._profile = None
    self._level_shift = None
    self._intercept = None
    self._scale = None
copy_from(other: LevelProfile) -> None

Copy fields from other.

Source code in framcore/attributes/level_profile_attributes.py
259
260
261
262
263
264
265
266
267
def copy_from(self, other: LevelProfile) -> None:
    """Copy fields from other."""
    self._check_type(other, LevelProfile)
    self._assert_same_behaviour(other)
    self._level = other._level
    self._profile = other._profile
    self._level_shift = other._level_shift
    self._intercept = other._intercept
    self._scale = other._scale
get_data_value(db: QueryDB | Model, scenario_horizon: FixedFrequencyTimeIndex, level_period: SinglePeriodTimeIndex, unit: str | None, is_max_level: bool | None = None) -> float

Evaluate LevelProfile to a scalar at the level period of the data dimension, and as an average over the scenario horizon.

Parameters:

Name Type Description Default
db QueryDB | Model

The database or model instance used to fetch the required data.

required
scenario_horizon FixedFrequencyTimeIndex

TimeIndex of the scenario dimension to evaluate profiles.

required
level_period SinglePeriodTimeIndex

TimeIndex of the data dimension to evaluate levels.

required
unit str | None

The unit to convert the resulting values into (e.g., MW, GWh). If None, the expression should be unitless.

required
is_max_level bool | None

Whether to evaluate the expression as a maximum level (with a zero_one profile) or as an average level (with a mean_one profile). If None, the default format of the attribute is used.

None
Source code in framcore/attributes/level_profile_attributes.py
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
def get_data_value(
    self,
    db: QueryDB | Model,
    scenario_horizon: FixedFrequencyTimeIndex,
    level_period: SinglePeriodTimeIndex,
    unit: str | None,
    is_max_level: bool | None = None,
) -> float:
    """
    Evaluate LevelProfile to a scalar at the level period of the data dimension, and as an average over the scenario horizon.

    Args:
        db (QueryDB | Model): The database or model instance used to fetch the required data.
        scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
        level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
        unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
            the expression should be unitless.
        is_max_level (bool | None, optional): Whether to evaluate the expression as a maximum level (with a zero_one profile)
            or as an average level (with a mean_one profile). If None, the default format of the attribute is used.

    """
    return self._get_data_value(db, scenario_horizon, level_period, unit, is_max_level)
get_intercept() -> Expr | None

Get intercept part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
313
314
315
316
317
318
def get_intercept(self) -> Expr | None:
    """Get intercept part of (level * profile + intercept)."""
    intercept = self._intercept
    if self._scale is not None:
        intercept *= self._scale
    return intercept
get_level() -> Expr | None

Get level part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
def get_level(self) -> Expr | None:
    """Get level part of (level * profile + intercept)."""
    level = self._level

    if level is None:
        return None

    if level.is_leaf():
        level = Expr(
            src=level.get_src(),
            operations=level.get_operations(expect_ops=False, copy_list=True),
            is_stock=level.is_stock(),
            is_flow=level.is_flow(),
            is_level=True,
            is_profile=False,
            profile=self._profile,
        )

    if self._level_shift is not None:
        level += self._level_shift

    if self._scale is not None:
        level *= self._scale

    return level
get_level_unit_set(db: QueryDB | Model) -> set[TimeIndex]

Return set with all units behind level expression.

Useful for discovering valid unit input to get_level_value.

Source code in framcore/attributes/level_profile_attributes.py
327
328
329
330
331
332
333
334
335
336
337
338
def get_level_unit_set(
    self,
    db: QueryDB | Model,
) -> set[TimeIndex]:
    """
    Return set with all units behind level expression.

    Useful for discovering valid unit input to get_level_value.
    """
    if not self.has_level():
        return set()
    return get_units_from_expr(db, self.get_level())
get_profile() -> Expr | None

Get profile part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
302
303
304
def get_profile(self) -> Expr | None:
    """Get profile part of (level * profile + intercept)."""
    return self._profile
get_profile_timeindex_set(db: QueryDB | Model) -> set[TimeIndex]

Return set with all TimeIndex behind profile expression.

Can be used to run optimized queries, i.e. not asking for finer time resolutions than necessary.

Source code in framcore/attributes/level_profile_attributes.py
340
341
342
343
344
345
346
347
348
349
350
351
352
def get_profile_timeindex_set(
    self,
    db: QueryDB | Model,
) -> set[TimeIndex]:
    """
    Return set with all TimeIndex behind profile expression.

    Can be used to run optimized queries, i.e. not asking for
    finer time resolutions than necessary.
    """
    if not self.has_profile():
        return set()
    return get_timeindexes_from_expr(db, self.get_profile())
get_scenario_vector(db: QueryDB | Model, scenario_horizon: FixedFrequencyTimeIndex, level_period: SinglePeriodTimeIndex, unit: str | None, is_float32: bool = True) -> NDArray

Evaluate LevelProfile over the periods in scenario dimension, and at the level period of the data dimension.

Underlying profiles are evalutated over the scenario dimension, and levels are evalutated to scalars over level_period in the data dimension.

Parameters:

Name Type Description Default
db QueryDB | Model

The database or model instance used to fetch the required data.

required
scenario_horizon FixedFrequencyTimeIndex

TimeIndex of the scenario dimension to evaluate profiles.

required
level_period SinglePeriodTimeIndex

TimeIndex of the data dimension to evaluate levels.

required
unit str | None

The unit to convert the resulting values into (e.g., MW, GWh). If None, the expression should be unitless.

required
is_float32 bool

Whether to return the vector as a NumPy array with float32 precision. Defaults to True.

True
Source code in framcore/attributes/level_profile_attributes.py
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
def get_scenario_vector(
    self,
    db: QueryDB | Model,
    scenario_horizon: FixedFrequencyTimeIndex,
    level_period: SinglePeriodTimeIndex,
    unit: str | None,
    is_float32: bool = True,
) -> NDArray:
    """
    Evaluate LevelProfile over the periods in scenario dimension, and at the level period of the data dimension.

    Underlying profiles are evalutated over the scenario dimension,
    and levels are evalutated to scalars over level_period in the data dimension.

    Args:
        db (QueryDB | Model): The database or model instance used to fetch the required data.
        scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
        level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
        unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
            the expression should be unitless.
        is_float32 (bool, optional): Whether to return the vector as a NumPy array with `float32`
            precision. Defaults to True.

    """
    return self._get_scenario_vector(db, scenario_horizon, level_period, unit, is_float32)
has_intercept() -> bool

Return True if get_intercept will return value not None.

Source code in framcore/attributes/level_profile_attributes.py
255
256
257
def has_intercept(self) -> bool:
    """Return True if get_intercept will return value not None."""
    return self._intercept is not None
has_level() -> bool

Return True if get_level will return value not None.

Source code in framcore/attributes/level_profile_attributes.py
247
248
249
def has_level(self) -> bool:
    """Return True if get_level will return value not None."""
    return (self._level is not None) or (self._level_shift is not None)
has_profile() -> bool

Return True if get_profile will return value not None.

Source code in framcore/attributes/level_profile_attributes.py
251
252
253
def has_profile(self) -> bool:
    """Return True if get_profile will return value not None."""
    return self._profile is not None
is_cost() -> bool | None

Return True if attribute is objective function cost coefficient.

Return False if attribute is objective function revenue coefficient.

Return None if not applicable.

Source code in framcore/attributes/level_profile_attributes.py
227
228
229
230
231
232
233
234
235
def is_cost(self) -> bool | None:
    """
    Return True if attribute is objective function cost coefficient.

    Return False if attribute is objective function revenue coefficient.

    Return None if not applicable.
    """
    return self._IS_COST
is_flow() -> bool

Return True if attribute is a flow variable.

Return False if attribute is not a flow variable.

Source code in framcore/attributes/level_profile_attributes.py
193
194
195
196
197
198
199
def is_flow(self) -> bool:
    """
    Return True if attribute is a flow variable.

    Return False if attribute is not a flow variable.
    """
    return self._IS_FLOW
is_ingoing() -> bool | None

Return True if attribute is ingoing.

Return True if attribute is outgoing.

Return None if not applicable.

Source code in framcore/attributes/level_profile_attributes.py
217
218
219
220
221
222
223
224
225
def is_ingoing(self) -> bool | None:
    """
    Return True if attribute is ingoing.

    Return True if attribute is outgoing.

    Return None if not applicable.
    """
    return self._IS_INGOING
is_max_and_zero_one() -> bool

When True level should be max (not average) and corresponding profile should be zero_one (not mean_one).

When False level should be average (not max) and corresponding profile should be mean_one (not zero_one).

Source code in framcore/attributes/level_profile_attributes.py
209
210
211
212
213
214
215
def is_max_and_zero_one(self) -> bool:
    """
    When True level should be max (not average) and corresponding profile should be zero_one (not mean_one).

    When False level should be average (not max) and corresponding profile should be mean_one (not zero_one).
    """
    return self._IS_MAX_AND_ZERO_ONE
is_not_negative() -> bool

Return True if attribute is not allowed to have negative values.

Return False if attribute can have both positive and negative values.

Source code in framcore/attributes/level_profile_attributes.py
201
202
203
204
205
206
207
def is_not_negative(self) -> bool:
    """
    Return True if attribute is not allowed to have negative values.

    Return False if attribute can have both positive and negative values.
    """
    return self._IS_NOT_NEGATIVE
is_stock() -> bool

Return True if attribute is a stock variable.

Return False if attribute is not a stock variable.

Source code in framcore/attributes/level_profile_attributes.py
185
186
187
188
189
190
191
def is_stock(self) -> bool:
    """
    Return True if attribute is a stock variable.

    Return False if attribute is not a stock variable.
    """
    return self._IS_STOCK
is_unitless() -> bool | None

Return True if attribute is known to be unitless.

Return False if attribute is known to have a unit that is not None.

Return None if not applicable.

Source code in framcore/attributes/level_profile_attributes.py
237
238
239
240
241
242
243
244
245
def is_unitless(self) -> bool | None:
    """
    Return True if attribute is known to be unitless.

    Return False if attribute is known to have a unit that is not None.

    Return None if not applicable.
    """
    return self._IS_UNITLESS
scale(value: float | int) -> None

Modify the scale part of (scale * (level + level_shift) * profile + intercept) of an attribute by multiplying with a constant value.

Source code in framcore/attributes/level_profile_attributes.py
455
456
457
458
459
460
461
462
463
464
465
466
467
def scale(self, value: float | int) -> None:
    """Modify the scale part of (scale * (level + level_shift) * profile + intercept) of an attribute by multiplying with a constant value."""
    # TODO: Not allowed to scale if there is intercept?
    expr = ensure_expr(
        ConstantTimeVector(self._ensure_float(value), unit=None, is_max_level=False),
        is_level=True,
        is_profile=False,
        profile=None,
    )
    if self._scale is None:
        self._scale = expr
    else:
        self._scale *= expr
set_intercept(value: Expr | None) -> None

Set intercept part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
320
321
322
323
324
325
def set_intercept(self, value: Expr | None) -> None:
    """Set intercept part of (level * profile + intercept)."""
    self._check_type(value, (Expr, type(None)))
    if value is not None:
        self._check_level_expr(value)
    self._intercept = value
set_level(level: Expr | TimeVector | str | None) -> None

Set level part of (scale * (level + level_shift) * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
295
296
297
298
299
300
def set_level(self, level: Expr | TimeVector | str | None) -> None:
    """Set level part of (scale * (level + level_shift) * profile + intercept)."""
    self._check_type(level, (Expr, TimeVector, str, type(None)))
    level = self._ensure_level_expr(level)
    self._ensure_compatible_level_profile_combo(level, self._profile)
    self._level = level
set_profile(profile: Expr | TimeVector | str | None) -> None

Set profile part of (scale * (level + level_shift) * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
306
307
308
309
310
311
def set_profile(self, profile: Expr | TimeVector | str | None) -> None:
    """Set profile part of (scale * (level + level_shift) * profile + intercept)."""
    self._check_type(profile, (Expr, TimeVector, str, type(None)))
    profile = self._ensure_profile_expr(profile)
    self._ensure_compatible_level_profile_combo(self._level, profile)
    self._profile = profile
shift_intercept(value: float, unit: str | None) -> None

Modify the intercept part of (level * profile + intercept) of an attribute by adding a constant value.

Source code in framcore/attributes/level_profile_attributes.py
403
404
405
406
407
408
409
410
411
412
413
414
415
416
def shift_intercept(self, value: float, unit: str | None) -> None:
    """Modify the intercept part of (level * profile + intercept) of an attribute by adding a constant value."""
    expr = ensure_expr(
        ConstantTimeVector(self._ensure_float(value), unit=unit, is_max_level=False),
        is_level=True,
        is_profile=False,
        is_stock=self._IS_STOCK,
        is_flow=self._IS_FLOW,
        profile=None,
    )
    if self._intercept is None:
        self._intercept = expr
    else:
        self._intercept += expr
shift_level(value: float | int, unit: str | None = None, reference_period: ReferencePeriod | None = None, is_max_level: bool | None = None, use_profile: bool = True) -> None

Modify the level_shift part of (scale * (level + level_shift) * profile + intercept) of an attribute by adding a constant value.

Source code in framcore/attributes/level_profile_attributes.py
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
def shift_level(
    self,
    value: float | int,
    unit: str | None = None,
    reference_period: ReferencePeriod | None = None,
    is_max_level: bool | None = None,
    use_profile: bool = True,  # TODO: Remove. Should always use profile. If has profile validate that it is equal to the profile of Level.
) -> None:
    """Modify the level_shift part of (scale * (level + level_shift) * profile + intercept) of an attribute by adding a constant value."""
    # TODO: Not allowed to shift if there is intercept?
    self._check_type(value, (float, int))
    self._check_type(unit, (str, type(None)))
    self._check_type(reference_period, (ReferencePeriod, type(None)))
    self._check_type(is_max_level, (bool, type(None)))
    self._check_type(use_profile, bool)

    if is_max_level is None:
        is_max_level = self._IS_MAX_AND_ZERO_ONE

    expr = ensure_expr(
        ConstantTimeVector(
            self._ensure_float(value),
            unit=unit,
            is_max_level=is_max_level,
            reference_period=reference_period,
        ),
        is_level=True,
        is_profile=False,
        is_stock=self._IS_STOCK,
        is_flow=self._IS_FLOW,
        profile=self._profile if use_profile else None,
    )
    if self._level_shift is None:
        self._level_shift = expr
    else:
        self._level_shift += expr

Loss

Bases: ArrowCoefficient

Concrete class representing a loss coefficient attribute, indicating a unitless coefficient.

Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
860
861
862
863
864
865
866
867
868
class Loss(ArrowCoefficient):  # TODO: Make a loss for storage that is percentage per time
    """
    Concrete class representing a loss coefficient attribute, indicating a unitless coefficient.

    Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True

MaxFlowVolume

Bases: FlowVolume

Concrete class representing a maximum flow volume attribute, indicating a flow variable with maximum values.

Subclass of FlowVolume < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
891
892
893
894
895
896
897
898
899
class MaxFlowVolume(FlowVolume):
    """
    Concrete class representing a maximum flow volume attribute, indicating a flow variable with maximum values.

    Subclass of FlowVolume < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_MAX_AND_ZERO_ONE = True

ObjectiveCoefficient

Bases: Coefficient

Abstract class representing an objective coefficient attribute, indicating cost or revenue coefficients in the objective function.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
761
762
763
764
765
766
767
768
769
class ObjectiveCoefficient(Coefficient):
    """
    Abstract class representing an objective coefficient attribute, indicating cost or revenue coefficients in the objective function.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_UNITLESS = False
    _IS_NOT_NEGATIVE = False

Price

Bases: ShadowPrice

Concrete class representing a price attribute, indicating the price of a commodity at a specific node.

Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
775
776
777
778
779
780
781
782
class Price(ShadowPrice):
    """
    Concrete class representing a price attribute, indicating the price of a commodity at a specific node.

    Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False

Proportion

Bases: Coefficient

Concrete class representing a proportion coefficient attribute, indicating a unitless coefficient between 0 and 1.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
828
829
830
831
832
833
834
835
836
class Proportion(Coefficient):
    """
    Concrete class representing a proportion coefficient attribute, indicating a unitless coefficient between 0 and 1.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True

ReservePrice

Bases: ObjectiveCoefficient

Concrete class representing a reserve price attribute, indicating revenue coefficients in the objective function.

Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
806
807
808
809
810
811
812
813
814
class ReservePrice(ObjectiveCoefficient):
    """
    Concrete class representing a reserve price attribute, indicating revenue coefficients in the objective function.

    Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_COST = False

ShadowPrice

Bases: Coefficient

Abstract class representing a shadow price attribute, indicating that the attribute has a unit and might be negative.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
750
751
752
753
754
755
756
757
758
class ShadowPrice(Coefficient):
    """
    Abstract class representing a shadow price attribute, indicating that the attribute has a unit and might be negative.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_UNITLESS = False
    _IS_NOT_NEGATIVE = False

StockVolume

Bases: LevelProfile

Concrete class representing a stock volume attribute, indicating a stock variable with maximum values.

Subclass of LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
902
903
904
905
906
907
908
909
910
911
class StockVolume(LevelProfile):
    """
    Concrete class representing a stock volume attribute, indicating a stock variable with maximum values.

    Subclass of LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_STOCK = True
    _IS_MAX_AND_ZERO_ONE = True

WaterValue

Bases: ShadowPrice

Concrete class representing a water value attribute, indicating the value of water in the system.

Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
785
786
787
788
789
790
791
792
class WaterValue(ShadowPrice):
    """
    Concrete class representing a water value attribute, indicating the value of water in the system.

    Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False

Arrow

Arrow

Bases: Base

Arrow class is used by Flows to represent contribution of its commodity to Nodes.

The Arrow has direction to determine input or output (is_ingoing), and parameters for the contribution of the Flow to the Node. The main parameters are conversion, efficiency and loss which together form the coefficient = conversion * (1 / efficiency) * (1 - loss) Arrow has its own implementation of get_scenario_vector and get_data_value to calculate the coefficient shown above.

Source code in framcore/attributes/Arrow.py
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
class Arrow(Base):
    """
    Arrow class is used by Flows to represent contribution of its commodity to Nodes.

    The Arrow has direction to determine input or output (is_ingoing), and parameters for the contribution of the Flow to the Node.
    The main parameters are conversion, efficiency and loss which together form the coefficient = conversion * (1 / efficiency) * (1 - loss)
    Arrow has its own implementation of get_scenario_vector and get_data_value to calculate the coefficient shown above.
    """

    def __init__(
        self,
        node: str,
        is_ingoing: bool,
        conversion: Conversion | None = None,
        efficiency: Efficiency | None = None,
        loss: Loss | None = None,
    ) -> None:
        """Initialize the Arrow class."""
        self._check_type(node, str)
        self._check_type(is_ingoing, bool)
        self._check_type(conversion, (Conversion, type(None)))
        self._check_type(efficiency, (Efficiency, type(None)))
        self._check_type(loss, (Loss, type(None)))
        self._node = node
        self._is_ingoing = is_ingoing
        self._conversion = conversion
        self._efficiency = efficiency
        self._loss = loss

    def get_node(self) -> str:
        """Get the node the arrow is pointing to."""
        return self._node

    def set_node(self, node: str) -> None:
        """Set the node the arrow is pointing to."""
        self._check_type(node, str)
        self._node = node

    def is_ingoing(self) -> bool:
        """
        Return True if arrow is ingoing.

        Ingoing means the flow variable supplies to node.
        Outgoing means the flow variable takes out of node.
        """
        return self._is_ingoing

    def get_conversion(self) -> Conversion | None:
        """Get the conversion."""
        return self._conversion

    def set_conversion(self, value: Conversion | None) -> None:
        """Set the conversion."""
        self._check_type(value, Conversion, type(None))
        self._conversion = value

    def get_efficiency(self) -> Efficiency | None:
        """Get the efficiency."""
        return self._efficiency

    def set_efficiency(self, value: Efficiency | None) -> None:
        """Set the efficiency."""
        self._check_type(value, Efficiency, type(None))
        self._efficiency = value

    def get_loss(self) -> Loss | None:
        """Get the loss."""
        return self._loss

    def set_loss(self, value: Loss | None) -> None:
        """Set the loss."""
        self._check_type(value, Loss, type(None))
        self._loss = value

    def has_profile(self) -> bool:
        """Return True if any of conversion, efficiency or loss has profile."""
        if self._conversion is not None and self._conversion.has_profile():
            return True
        if self._efficiency is not None and self._efficiency.has_profile():
            return True
        return bool(self._loss is not None and self._loss.has_profile())

    def get_conversion_unit_set(
        self,
        db: QueryDB | Model,
    ) -> set[str]:
        """Get set of units behind conversion level expr (if any)."""
        if self._conversion is None:
            return set()
        return self._conversion.get_level_unit_set(db)

    def get_profile_timeindex_set(
        self,
        db: QueryDB | Model,
    ) -> set[TimeIndex]:
        """
        Get set of timeindexes behind profile.

        Can be used to run optimized queries, i.e. not asking for
        finer time resolutions than necessary.
        """
        if self.has_profile() is None:
            return set()
        s = set()
        if self._conversion is not None:
            s.update(self._conversion.get_profile_timeindex_set(db))
        if self._loss is not None:
            s.update(self._loss.get_profile_timeindex_set(db))
        if self._efficiency is not None:
            s.update(self._efficiency.get_profile_timeindex_set(db))
        return s

    def get_scenario_vector(  # noqa: C901, PLR0915
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_float32: bool = True,
    ) -> NDArray:
        """Return vector with values along the given scenario horizon using level over level_period."""
        conversion_vector = None
        efficiency_vector = None
        loss_vector = None
        conversion_value = None
        efficiency_value = None
        loss_value = None

        if self._conversion is not None:
            if self._conversion.has_profile():
                conversion_vector = self._conversion.get_scenario_vector(
                    db=db,
                    scenario_horizon=scenario_horizon,
                    level_period=level_period,
                    unit=unit,
                    is_float32=is_float32,
                )
            elif self._conversion.has_level():
                conversion_value = self._conversion.get_data_value(
                    db=db,
                    scenario_horizon=scenario_horizon,
                    level_period=level_period,
                    unit=unit,
                )
                conversion_value = float(conversion_value)

        if self._efficiency is not None:
            if self._efficiency.has_profile():
                efficiency_vector = self._efficiency.get_scenario_vector(
                    db=db,
                    scenario_horizon=scenario_horizon,
                    level_period=level_period,
                    unit=None,
                    is_float32=is_float32,
                )
            elif self._efficiency.has_level():
                efficiency_value = self._efficiency.get_data_value(
                    db=db,
                    scenario_horizon=scenario_horizon,
                    level_period=level_period,
                    unit=None,
                )
                efficiency_value = float(efficiency_value)

        if self._loss is not None:
            if self._loss.has_profile():
                loss_vector = self._loss.get_scenario_vector(
                    db=db,
                    scenario_horizon=scenario_horizon,
                    level_period=level_period,
                    unit=None,
                    is_float32=is_float32,
                )
            elif self._loss.has_level():
                loss_value = self._loss.get_data_value(
                    db=db,
                    scenario_horizon=scenario_horizon,
                    level_period=level_period,
                    unit=None,
                )
                loss_value = float(loss_value)

        if conversion_value is not None:
            assert conversion_value >= 0, f"Arrow with invalid conversion ({conversion_value}): {self}"
            out = conversion_value
        else:
            out = 1.0

        if efficiency_value is not None:
            assert efficiency_value > 0, f"Arrow with invalid efficiency ({efficiency_value}): {self}"
            out = out / efficiency_value

        if loss_value is not None:
            assert loss_value >= 0 or loss_value < 1, f"Arrow with invalid loss ({loss_value}): {self}"
            out = out - out * loss_value

        if conversion_vector is not None:
            np.multiply(conversion_vector, out, out=conversion_vector)
            out = conversion_vector

        if efficiency_vector is not None:
            if isinstance(out, float):
                np.divide(out, efficiency_vector, out=efficiency_vector)
                out = efficiency_vector
            else:
                np.divide(out, efficiency_vector, out=out)

        if loss_vector is not None:
            if isinstance(out, float):
                np.multiply(out, loss_vector, out=loss_vector)
                np.subtract(out, loss_vector, out=loss_vector)
                out = loss_vector
            else:
                np.multiply(out, loss_vector, out=loss_vector)
                np.subtract(out, loss_vector, out=out)

        if isinstance(out, float):
            num_periods = scenario_horizon.get_num_periods()
            vector = np.ones(num_periods, dtype=np.float32 if is_float32 else np.float64)
            vector.fill(out)
            return vector

        return out

    def get_data_value(
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_max_level: bool = False,
    ) -> float:
        """Return float for level_period."""
        conversion_value = None
        efficiency_value = None
        loss_value = None

        if self._conversion is not None and self._conversion.has_level():
            conversion_value = self._conversion.get_data_value(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=unit,
                is_max_level=is_max_level,
            )
            conversion_value = float(conversion_value)

        if self._efficiency is not None and self._efficiency.has_level():
            efficiency_value = self._efficiency.get_data_value(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=None,
                is_max_level=is_max_level,
            )
            efficiency_value = float(efficiency_value)

        if self._loss is not None and self._loss.has_level():
            loss_value = self._loss.get_data_value(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=None,
                is_max_level=is_max_level,
            )
            loss_value = float(loss_value)

        if conversion_value is not None:
            assert conversion_value >= 0, f"Arrow with invalid conversion ({conversion_value}): {self}"
            out = conversion_value
        else:
            out = 1.0

        if efficiency_value is not None:
            assert efficiency_value > 0, f"Arrow with invalid efficiency ({efficiency_value}): {self}"
            out = out / efficiency_value

        if loss_value is not None:
            assert loss_value >= 0 or loss_value < 1, f"Arrow with invalid loss ({loss_value}): {self}"
            out = out - out * loss_value

        return out

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in attributes to loaders."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self.get_conversion())
        add_loaders_if(loaders, self.get_loss())
        add_loaders_if(loaders, self.get_efficiency())
__init__(node: str, is_ingoing: bool, conversion: Conversion | None = None, efficiency: Efficiency | None = None, loss: Loss | None = None) -> None

Initialize the Arrow class.

Source code in framcore/attributes/Arrow.py
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
def __init__(
    self,
    node: str,
    is_ingoing: bool,
    conversion: Conversion | None = None,
    efficiency: Efficiency | None = None,
    loss: Loss | None = None,
) -> None:
    """Initialize the Arrow class."""
    self._check_type(node, str)
    self._check_type(is_ingoing, bool)
    self._check_type(conversion, (Conversion, type(None)))
    self._check_type(efficiency, (Efficiency, type(None)))
    self._check_type(loss, (Loss, type(None)))
    self._node = node
    self._is_ingoing = is_ingoing
    self._conversion = conversion
    self._efficiency = efficiency
    self._loss = loss
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in attributes to loaders.

Source code in framcore/attributes/Arrow.py
301
302
303
304
305
306
307
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in attributes to loaders."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self.get_conversion())
    add_loaders_if(loaders, self.get_loss())
    add_loaders_if(loaders, self.get_efficiency())
get_conversion() -> Conversion | None

Get the conversion.

Source code in framcore/attributes/Arrow.py
65
66
67
def get_conversion(self) -> Conversion | None:
    """Get the conversion."""
    return self._conversion
get_conversion_unit_set(db: QueryDB | Model) -> set[str]

Get set of units behind conversion level expr (if any).

Source code in framcore/attributes/Arrow.py
100
101
102
103
104
105
106
107
def get_conversion_unit_set(
    self,
    db: QueryDB | Model,
) -> set[str]:
    """Get set of units behind conversion level expr (if any)."""
    if self._conversion is None:
        return set()
    return self._conversion.get_level_unit_set(db)
get_data_value(db: QueryDB | Model, scenario_horizon: FixedFrequencyTimeIndex, level_period: SinglePeriodTimeIndex, unit: str | None, is_max_level: bool = False) -> float

Return float for level_period.

Source code in framcore/attributes/Arrow.py
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
def get_data_value(
    self,
    db: QueryDB | Model,
    scenario_horizon: FixedFrequencyTimeIndex,
    level_period: SinglePeriodTimeIndex,
    unit: str | None,
    is_max_level: bool = False,
) -> float:
    """Return float for level_period."""
    conversion_value = None
    efficiency_value = None
    loss_value = None

    if self._conversion is not None and self._conversion.has_level():
        conversion_value = self._conversion.get_data_value(
            db=db,
            scenario_horizon=scenario_horizon,
            level_period=level_period,
            unit=unit,
            is_max_level=is_max_level,
        )
        conversion_value = float(conversion_value)

    if self._efficiency is not None and self._efficiency.has_level():
        efficiency_value = self._efficiency.get_data_value(
            db=db,
            scenario_horizon=scenario_horizon,
            level_period=level_period,
            unit=None,
            is_max_level=is_max_level,
        )
        efficiency_value = float(efficiency_value)

    if self._loss is not None and self._loss.has_level():
        loss_value = self._loss.get_data_value(
            db=db,
            scenario_horizon=scenario_horizon,
            level_period=level_period,
            unit=None,
            is_max_level=is_max_level,
        )
        loss_value = float(loss_value)

    if conversion_value is not None:
        assert conversion_value >= 0, f"Arrow with invalid conversion ({conversion_value}): {self}"
        out = conversion_value
    else:
        out = 1.0

    if efficiency_value is not None:
        assert efficiency_value > 0, f"Arrow with invalid efficiency ({efficiency_value}): {self}"
        out = out / efficiency_value

    if loss_value is not None:
        assert loss_value >= 0 or loss_value < 1, f"Arrow with invalid loss ({loss_value}): {self}"
        out = out - out * loss_value

    return out
get_efficiency() -> Efficiency | None

Get the efficiency.

Source code in framcore/attributes/Arrow.py
74
75
76
def get_efficiency(self) -> Efficiency | None:
    """Get the efficiency."""
    return self._efficiency
get_loss() -> Loss | None

Get the loss.

Source code in framcore/attributes/Arrow.py
83
84
85
def get_loss(self) -> Loss | None:
    """Get the loss."""
    return self._loss
get_node() -> str

Get the node the arrow is pointing to.

Source code in framcore/attributes/Arrow.py
47
48
49
def get_node(self) -> str:
    """Get the node the arrow is pointing to."""
    return self._node
get_profile_timeindex_set(db: QueryDB | Model) -> set[TimeIndex]

Get set of timeindexes behind profile.

Can be used to run optimized queries, i.e. not asking for finer time resolutions than necessary.

Source code in framcore/attributes/Arrow.py
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
def get_profile_timeindex_set(
    self,
    db: QueryDB | Model,
) -> set[TimeIndex]:
    """
    Get set of timeindexes behind profile.

    Can be used to run optimized queries, i.e. not asking for
    finer time resolutions than necessary.
    """
    if self.has_profile() is None:
        return set()
    s = set()
    if self._conversion is not None:
        s.update(self._conversion.get_profile_timeindex_set(db))
    if self._loss is not None:
        s.update(self._loss.get_profile_timeindex_set(db))
    if self._efficiency is not None:
        s.update(self._efficiency.get_profile_timeindex_set(db))
    return s
get_scenario_vector(db: QueryDB | Model, scenario_horizon: FixedFrequencyTimeIndex, level_period: SinglePeriodTimeIndex, unit: str | None, is_float32: bool = True) -> NDArray

Return vector with values along the given scenario horizon using level over level_period.

Source code in framcore/attributes/Arrow.py
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
def get_scenario_vector(  # noqa: C901, PLR0915
    self,
    db: QueryDB | Model,
    scenario_horizon: FixedFrequencyTimeIndex,
    level_period: SinglePeriodTimeIndex,
    unit: str | None,
    is_float32: bool = True,
) -> NDArray:
    """Return vector with values along the given scenario horizon using level over level_period."""
    conversion_vector = None
    efficiency_vector = None
    loss_vector = None
    conversion_value = None
    efficiency_value = None
    loss_value = None

    if self._conversion is not None:
        if self._conversion.has_profile():
            conversion_vector = self._conversion.get_scenario_vector(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=unit,
                is_float32=is_float32,
            )
        elif self._conversion.has_level():
            conversion_value = self._conversion.get_data_value(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=unit,
            )
            conversion_value = float(conversion_value)

    if self._efficiency is not None:
        if self._efficiency.has_profile():
            efficiency_vector = self._efficiency.get_scenario_vector(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=None,
                is_float32=is_float32,
            )
        elif self._efficiency.has_level():
            efficiency_value = self._efficiency.get_data_value(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=None,
            )
            efficiency_value = float(efficiency_value)

    if self._loss is not None:
        if self._loss.has_profile():
            loss_vector = self._loss.get_scenario_vector(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=None,
                is_float32=is_float32,
            )
        elif self._loss.has_level():
            loss_value = self._loss.get_data_value(
                db=db,
                scenario_horizon=scenario_horizon,
                level_period=level_period,
                unit=None,
            )
            loss_value = float(loss_value)

    if conversion_value is not None:
        assert conversion_value >= 0, f"Arrow with invalid conversion ({conversion_value}): {self}"
        out = conversion_value
    else:
        out = 1.0

    if efficiency_value is not None:
        assert efficiency_value > 0, f"Arrow with invalid efficiency ({efficiency_value}): {self}"
        out = out / efficiency_value

    if loss_value is not None:
        assert loss_value >= 0 or loss_value < 1, f"Arrow with invalid loss ({loss_value}): {self}"
        out = out - out * loss_value

    if conversion_vector is not None:
        np.multiply(conversion_vector, out, out=conversion_vector)
        out = conversion_vector

    if efficiency_vector is not None:
        if isinstance(out, float):
            np.divide(out, efficiency_vector, out=efficiency_vector)
            out = efficiency_vector
        else:
            np.divide(out, efficiency_vector, out=out)

    if loss_vector is not None:
        if isinstance(out, float):
            np.multiply(out, loss_vector, out=loss_vector)
            np.subtract(out, loss_vector, out=loss_vector)
            out = loss_vector
        else:
            np.multiply(out, loss_vector, out=loss_vector)
            np.subtract(out, loss_vector, out=out)

    if isinstance(out, float):
        num_periods = scenario_horizon.get_num_periods()
        vector = np.ones(num_periods, dtype=np.float32 if is_float32 else np.float64)
        vector.fill(out)
        return vector

    return out
has_profile() -> bool

Return True if any of conversion, efficiency or loss has profile.

Source code in framcore/attributes/Arrow.py
92
93
94
95
96
97
98
def has_profile(self) -> bool:
    """Return True if any of conversion, efficiency or loss has profile."""
    if self._conversion is not None and self._conversion.has_profile():
        return True
    if self._efficiency is not None and self._efficiency.has_profile():
        return True
    return bool(self._loss is not None and self._loss.has_profile())
is_ingoing() -> bool

Return True if arrow is ingoing.

Ingoing means the flow variable supplies to node. Outgoing means the flow variable takes out of node.

Source code in framcore/attributes/Arrow.py
56
57
58
59
60
61
62
63
def is_ingoing(self) -> bool:
    """
    Return True if arrow is ingoing.

    Ingoing means the flow variable supplies to node.
    Outgoing means the flow variable takes out of node.
    """
    return self._is_ingoing
set_conversion(value: Conversion | None) -> None

Set the conversion.

Source code in framcore/attributes/Arrow.py
69
70
71
72
def set_conversion(self, value: Conversion | None) -> None:
    """Set the conversion."""
    self._check_type(value, Conversion, type(None))
    self._conversion = value
set_efficiency(value: Efficiency | None) -> None

Set the efficiency.

Source code in framcore/attributes/Arrow.py
78
79
80
81
def set_efficiency(self, value: Efficiency | None) -> None:
    """Set the efficiency."""
    self._check_type(value, Efficiency, type(None))
    self._efficiency = value
set_loss(value: Loss | None) -> None

Set the loss.

Source code in framcore/attributes/Arrow.py
87
88
89
90
def set_loss(self, value: Loss | None) -> None:
    """Set the loss."""
    self._check_type(value, Loss, type(None))
    self._loss = value
set_node(node: str) -> None

Set the node the arrow is pointing to.

Source code in framcore/attributes/Arrow.py
51
52
53
54
def set_node(self, node: str) -> None:
    """Set the node the arrow is pointing to."""
    self._check_type(node, str)
    self._node = node

ElasticDemand

ElasticDemand attribute class.

ElasticDemand

Bases: Base

ElasticDemand class representing the price elasticity of a demand Component.

Source code in framcore/attributes/ElasticDemand.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
class ElasticDemand(Base):
    """ElasticDemand class representing the price elasticity of a demand Component."""

    def __init__(
        self,
        price_elasticity: Elasticity,
        min_price: Price,
        normal_price: Price,
        max_price: Price,
    ) -> None:
        """
        Initialize the ElasticDemand class.

        Args:
            price_elasticity (Elasticity): The price elasticity factor of the demand consumer.
            min_price (Price): Lower limit for price elasticity.
            normal_price (Price): Price for which the demand is inelastic. If it deviates from this price, the consumer will adjust
                                  it's consumption according to the _price_elasticity factor.
            max_price (Price): Upper limit for price elasticity / reservation price level.

        """
        self._check_type(price_elasticity, Elasticity)
        self._check_type(min_price, Price)
        self._check_type(normal_price, Price)
        self._check_type(max_price, Price)

        self._price_elasticity = price_elasticity
        self._min_price = min_price
        self._normal_price = normal_price
        self._max_price = max_price

    def get_price_elasticity(self) -> Elasticity:
        """Get the price elasticity."""
        return self._price_elasticity

    def set_price_elasticity(self, elasticity: Price) -> None:
        """Set the price elasticity."""
        self._check_type(elasticity, Elasticity)
        self._price_elasticity = elasticity

    def get_min_price(self) -> Price:
        """Get the minimum price."""
        return self._min_price

    def set_min_price(self, min_price: Price) -> None:
        """Set the minimum price."""
        self._check_type(min_price, Price)
        self._min_price = min_price

    def get_normal_price(self) -> Price:
        """Get the normal price."""
        return self._normal_price

    def set_normal_price(self, normal_price: Price) -> None:
        """Set the normal price."""
        self._check_type(normal_price, Price)
        self._normal_price = normal_price

    def get_max_price(self) -> Price:
        """Get the maximum price."""
        return self._max_price

    def set_max_price(self, max_price: Price) -> None:
        """Set the maximum price."""
        self._check_type(max_price, Price)
        self._max_price = max_price

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in attributes to loaders."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self._normal_price)
        add_loaders_if(loaders, self._price_elasticity)
        add_loaders_if(loaders, self._max_price)
        add_loaders_if(loaders, self._min_price)
__init__(price_elasticity: Elasticity, min_price: Price, normal_price: Price, max_price: Price) -> None

Initialize the ElasticDemand class.

Parameters:

Name Type Description Default
price_elasticity Elasticity

The price elasticity factor of the demand consumer.

required
min_price Price

Lower limit for price elasticity.

required
normal_price Price

Price for which the demand is inelastic. If it deviates from this price, the consumer will adjust it's consumption according to the _price_elasticity factor.

required
max_price Price

Upper limit for price elasticity / reservation price level.

required
Source code in framcore/attributes/ElasticDemand.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
def __init__(
    self,
    price_elasticity: Elasticity,
    min_price: Price,
    normal_price: Price,
    max_price: Price,
) -> None:
    """
    Initialize the ElasticDemand class.

    Args:
        price_elasticity (Elasticity): The price elasticity factor of the demand consumer.
        min_price (Price): Lower limit for price elasticity.
        normal_price (Price): Price for which the demand is inelastic. If it deviates from this price, the consumer will adjust
                              it's consumption according to the _price_elasticity factor.
        max_price (Price): Upper limit for price elasticity / reservation price level.

    """
    self._check_type(price_elasticity, Elasticity)
    self._check_type(min_price, Price)
    self._check_type(normal_price, Price)
    self._check_type(max_price, Price)

    self._price_elasticity = price_elasticity
    self._min_price = min_price
    self._normal_price = normal_price
    self._max_price = max_price
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in attributes to loaders.

Source code in framcore/attributes/ElasticDemand.py
83
84
85
86
87
88
89
90
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in attributes to loaders."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self._normal_price)
    add_loaders_if(loaders, self._price_elasticity)
    add_loaders_if(loaders, self._max_price)
    add_loaders_if(loaders, self._min_price)
get_max_price() -> Price

Get the maximum price.

Source code in framcore/attributes/ElasticDemand.py
74
75
76
def get_max_price(self) -> Price:
    """Get the maximum price."""
    return self._max_price
get_min_price() -> Price

Get the minimum price.

Source code in framcore/attributes/ElasticDemand.py
56
57
58
def get_min_price(self) -> Price:
    """Get the minimum price."""
    return self._min_price
get_normal_price() -> Price

Get the normal price.

Source code in framcore/attributes/ElasticDemand.py
65
66
67
def get_normal_price(self) -> Price:
    """Get the normal price."""
    return self._normal_price
get_price_elasticity() -> Elasticity

Get the price elasticity.

Source code in framcore/attributes/ElasticDemand.py
47
48
49
def get_price_elasticity(self) -> Elasticity:
    """Get the price elasticity."""
    return self._price_elasticity
set_max_price(max_price: Price) -> None

Set the maximum price.

Source code in framcore/attributes/ElasticDemand.py
78
79
80
81
def set_max_price(self, max_price: Price) -> None:
    """Set the maximum price."""
    self._check_type(max_price, Price)
    self._max_price = max_price
set_min_price(min_price: Price) -> None

Set the minimum price.

Source code in framcore/attributes/ElasticDemand.py
60
61
62
63
def set_min_price(self, min_price: Price) -> None:
    """Set the minimum price."""
    self._check_type(min_price, Price)
    self._min_price = min_price
set_normal_price(normal_price: Price) -> None

Set the normal price.

Source code in framcore/attributes/ElasticDemand.py
69
70
71
72
def set_normal_price(self, normal_price: Price) -> None:
    """Set the normal price."""
    self._check_type(normal_price, Price)
    self._normal_price = normal_price
set_price_elasticity(elasticity: Price) -> None

Set the price elasticity.

Source code in framcore/attributes/ElasticDemand.py
51
52
53
54
def set_price_elasticity(self, elasticity: Price) -> None:
    """Set the price elasticity."""
    self._check_type(elasticity, Elasticity)
    self._price_elasticity = elasticity

ReservoirCurve

ReservoirCurve

Bases: Base

Water level elevation to water volume characteristics for HydroStorage.

Source code in framcore/attributes/ReservoirCurve.py
11
12
13
14
15
16
17
18
19
20
21
22
23
class ReservoirCurve(Base):
    """Water level elevation to water volume characteristics for HydroStorage."""

    # TODO: Implement and comment, also too generic name

    def __init__(self, value: str | None) -> None:
        """Initialize a ReservoirCurve instance."""
        self._check_type(value, (str, type(None)))
        self._value = value

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in attributes to loaders."""
        return
__init__(value: str | None) -> None

Initialize a ReservoirCurve instance.

Source code in framcore/attributes/ReservoirCurve.py
16
17
18
19
def __init__(self, value: str | None) -> None:
    """Initialize a ReservoirCurve instance."""
    self._check_type(value, (str, type(None)))
    self._value = value
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in attributes to loaders.

Source code in framcore/attributes/ReservoirCurve.py
21
22
23
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in attributes to loaders."""
    return

SoftBound

SoftBound

Represents a soft bound attribute. Penalty applied if the bound is violated.

Source code in framcore/attributes/SoftBound.py
 9
10
11
12
13
14
15
16
class SoftBound:
    """Represents a soft bound attribute. Penalty applied if the bound is violated."""

    # TODO: Implement and comment

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in attributes to loaders."""
        return
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in attributes to loaders.

Source code in framcore/attributes/SoftBound.py
14
15
16
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in attributes to loaders."""
    return

StartUpCost

StartUpCost

Bases: Base

Represent the costs associated with starting up the operation of a Component.

Source code in framcore/attributes/StartUpCost.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
class StartUpCost(Base):
    """Represent the costs associated with starting up the operation of a Component."""

    # TODO: Complete description

    def __init__(
        self,
        startup_cost: Cost,
        min_stable_load: Proportion,
        start_hours: Hours,
        part_load_efficiency: Efficiency,
    ) -> None:
        """
        Initialize the StartUpCost class.

        Args:
            startup_cost (Cost): _description_
            min_stable_load (Proportion): _description_
            start_hours (Hours): _description_
            part_load_efficiency (Efficiency): _description_

        """
        self._check_type(startup_cost, Cost)
        self._check_type(min_stable_load, Proportion)
        self._check_type(start_hours, Hours)
        self._check_type(part_load_efficiency, Efficiency)

        self._startup_cost = startup_cost
        self._min_stable_load = min_stable_load
        self._start_hours = start_hours
        self._part_load_efficiency = part_load_efficiency

    def get_startupcost(self) -> Cost:
        """Get the startup cost."""
        return self._startup_cost

    def set_startupcost(self, startupcost: Cost) -> None:
        """Set the startup cost."""
        self._check_type(startupcost, Cost)
        self._startup_cost = startupcost

    def get_fingerprint(self) -> Fingerprint:
        """Get the fingerprint of the startup cost."""
        return self.get_fingerprint_default()

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Get all loaders stored in attributes."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self.get_startupcost())
        add_loaders_if(loaders, self._start_hours)
        add_loaders_if(loaders, self._min_stable_load)
        add_loaders_if(loaders, self._part_load_efficiency)
__init__(startup_cost: Cost, min_stable_load: Proportion, start_hours: Hours, part_load_efficiency: Efficiency) -> None

Initialize the StartUpCost class.

Parameters:

Name Type Description Default
startup_cost Cost

description

required
min_stable_load Proportion

description

required
start_hours Hours

description

required
part_load_efficiency Efficiency

description

required
Source code in framcore/attributes/StartUpCost.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
def __init__(
    self,
    startup_cost: Cost,
    min_stable_load: Proportion,
    start_hours: Hours,
    part_load_efficiency: Efficiency,
) -> None:
    """
    Initialize the StartUpCost class.

    Args:
        startup_cost (Cost): _description_
        min_stable_load (Proportion): _description_
        start_hours (Hours): _description_
        part_load_efficiency (Efficiency): _description_

    """
    self._check_type(startup_cost, Cost)
    self._check_type(min_stable_load, Proportion)
    self._check_type(start_hours, Hours)
    self._check_type(part_load_efficiency, Efficiency)

    self._startup_cost = startup_cost
    self._min_stable_load = min_stable_load
    self._start_hours = start_hours
    self._part_load_efficiency = part_load_efficiency
add_loaders(loaders: set[Loader]) -> None

Get all loaders stored in attributes.

Source code in framcore/attributes/StartUpCost.py
58
59
60
61
62
63
64
65
def add_loaders(self, loaders: set[Loader]) -> None:
    """Get all loaders stored in attributes."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self.get_startupcost())
    add_loaders_if(loaders, self._start_hours)
    add_loaders_if(loaders, self._min_stable_load)
    add_loaders_if(loaders, self._part_load_efficiency)
get_fingerprint() -> Fingerprint

Get the fingerprint of the startup cost.

Source code in framcore/attributes/StartUpCost.py
54
55
56
def get_fingerprint(self) -> Fingerprint:
    """Get the fingerprint of the startup cost."""
    return self.get_fingerprint_default()
get_startupcost() -> Cost

Get the startup cost.

Source code in framcore/attributes/StartUpCost.py
45
46
47
def get_startupcost(self) -> Cost:
    """Get the startup cost."""
    return self._startup_cost
set_startupcost(startupcost: Cost) -> None

Set the startup cost.

Source code in framcore/attributes/StartUpCost.py
49
50
51
52
def set_startupcost(self, startupcost: Cost) -> None:
    """Set the startup cost."""
    self._check_type(startupcost, Cost)
    self._startup_cost = startupcost

Storage

Storage

Bases: Base

Represents all types of storage this system supports.

Subclasses are supposed to restrict which attributes that are used, not add more.

Source code in framcore/attributes/Storage.py
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
class Storage(Base):
    """
    Represents all types of storage this system supports.

    Subclasses are supposed to restrict which attributes that are used, not add more.
    """

    def __init__(
        self,
        capacity: StockVolume,
        volume: StockVolume | None = None,
        loss: Loss | None = None,  # TODO: Should be loss percentage per time.
        reservoir_curve: ReservoirCurve | None = None,
        max_soft_bound: SoftBound | None = None,
        min_soft_bound: SoftBound | None = None,
        target_bound: TargetBound | None = None,
        initial_storage_percentage: float | None = None,
    ) -> None:
        """
        Create new storage.

        Args:
            capacity (StockVolume): Storage capacity.
            volume (StockVolume | None, optional): Storage filling (actual/result). Defaults to None.
            loss (Loss | None, optional): Loss percentage per time. Defaults to None.
            reservoir_curve (ReservoirCurve | None, optional): Water level elevation to water volume for HydroStorage. Defaults to None.
            max_soft_bound (SoftBound | None, optional): Upper soft boundary that is penalized if broken. Defaults to None.
            min_soft_bound (SoftBound | None, optional): Lower soft boundary that is penalized if broken. Defaults to None.
            target_bound (TargetBound | None, optional): Target filling, can be penalized if deviation. Defaults to None.
            initial_storage_percentage (float | None, optional): Initial storage filling percentage at start of simulation. Defaults to None.

        """
        super().__init__()

        self._check_type(capacity, StockVolume)
        self._check_type(volume, (StockVolume, type(None)))
        self._check_type(loss, (StockVolume, type(None)))
        self._check_type(reservoir_curve, (ReservoirCurve, type(None)))
        self._check_type(max_soft_bound, (SoftBound, type(None)))
        self._check_type(min_soft_bound, (SoftBound, type(None)))
        self._check_type(target_bound, (TargetBound, type(None)))
        self._check_type(initial_storage_percentage, (float, type(None)))

        if initial_storage_percentage is not None:
            self._check_float(initial_storage_percentage, lower_bound=0.0, upper_bound=1.0)

        self._capacity = capacity

        self._loss = loss
        self._reservoir_curve = reservoir_curve
        self._max_soft_bound = max_soft_bound
        self._min_soft_bound = min_soft_bound
        self._target_bound = target_bound
        self._initial_storage_percentage = initial_storage_percentage

        self._cost_terms: dict[str, ObjectiveCoefficient] = dict()

        if volume is None:
            volume = StockVolume()
        self._volume = volume

    def get_capacity(self) -> StockVolume:
        """Get the capacity."""
        return self._capacity

    def get_volume(self) -> StockVolume:
        """Get the volume."""
        return self._volume

    def add_cost_term(self, key: str, cost_term: ObjectiveCoefficient) -> None:
        """Add a cost term."""
        self._check_type(key, str)
        self._check_type(cost_term, ObjectiveCoefficient)
        self._cost_terms[key] = cost_term

    def get_cost_terms(self) -> dict[str, ObjectiveCoefficient]:
        """Get the cost terms."""
        return self._cost_terms

    def get_loss(self) -> Loss | None:
        """Get the loss."""
        return self._loss

    def set_loss(self, value: Loss | None) -> None:
        """Set the loss."""
        self._check_type(value, (Loss, type(None)))
        self._loss = value

    def get_reservoir_curve(self) -> ReservoirCurve | None:
        """Get the reservoir curve."""
        return self._reservoir_curve

    def set_reservoir_curve(self, value: ReservoirCurve | None) -> None:
        """Set the reservoir curve."""
        self._check_type(value, (ReservoirCurve, type(None)))
        self._reservoir_curve = value

    def get_max_soft_bound(self) -> SoftBound | None:
        """Get the max soft bound."""
        return self._max_soft_bound

    def set_max_soft_bound(self, value: SoftBound | None) -> None:
        """Set the max soft bound."""
        self._check_type(value, (SoftBound, type(None)))
        self._max_soft_bound = value

    def get_min_soft_bound(self) -> SoftBound | None:
        """Get the min soft bound."""
        return self._min_soft_bound

    def set_min_soft_bound(self, value: SoftBound | None) -> None:
        """Set the min soft bound."""
        self._check_type(value, (SoftBound, type(None)))
        self._min_soft_bound = value

    def get_target_bound(self) -> TargetBound | None:
        """Get the target bound."""
        return self._target_bound

    def set_target_bound(self, value: TargetBound | None) -> None:
        """Set the target bound."""
        self._check_type(value, (TargetBound, type(None)))
        self._target_bound = value

    def get_initial_storage_percentage(self) -> float | None:
        """Get the initial storage percentage (float in [0, 1])."""
        return self._initial_storage_percentage

    def set_initial_storage_percentage(self, value: float) -> None:
        """Set the initial storage percentage (float in [0, 1])."""
        self._check_float(value, lower_bound=0.0, upper_bound=1.0)
        self._initial_storage_percentage = value

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in attributes to loaders."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self.get_capacity())
        add_loaders_if(loaders, self.get_loss())
        add_loaders_if(loaders, self.get_volume())
        add_loaders_if(loaders, self.get_max_soft_bound())
        add_loaders_if(loaders, self.get_min_soft_bound())
        add_loaders_if(loaders, self.get_reservoir_curve())
        add_loaders_if(loaders, self.get_target_bound())

        for cost in self.get_cost_terms().values():
            add_loaders_if(loaders, cost)
__init__(capacity: StockVolume, volume: StockVolume | None = None, loss: Loss | None = None, reservoir_curve: ReservoirCurve | None = None, max_soft_bound: SoftBound | None = None, min_soft_bound: SoftBound | None = None, target_bound: TargetBound | None = None, initial_storage_percentage: float | None = None) -> None

Create new storage.

Parameters:

Name Type Description Default
capacity StockVolume

Storage capacity.

required
volume StockVolume | None

Storage filling (actual/result). Defaults to None.

None
loss Loss | None

Loss percentage per time. Defaults to None.

None
reservoir_curve ReservoirCurve | None

Water level elevation to water volume for HydroStorage. Defaults to None.

None
max_soft_bound SoftBound | None

Upper soft boundary that is penalized if broken. Defaults to None.

None
min_soft_bound SoftBound | None

Lower soft boundary that is penalized if broken. Defaults to None.

None
target_bound TargetBound | None

Target filling, can be penalized if deviation. Defaults to None.

None
initial_storage_percentage float | None

Initial storage filling percentage at start of simulation. Defaults to None.

None
Source code in framcore/attributes/Storage.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
def __init__(
    self,
    capacity: StockVolume,
    volume: StockVolume | None = None,
    loss: Loss | None = None,  # TODO: Should be loss percentage per time.
    reservoir_curve: ReservoirCurve | None = None,
    max_soft_bound: SoftBound | None = None,
    min_soft_bound: SoftBound | None = None,
    target_bound: TargetBound | None = None,
    initial_storage_percentage: float | None = None,
) -> None:
    """
    Create new storage.

    Args:
        capacity (StockVolume): Storage capacity.
        volume (StockVolume | None, optional): Storage filling (actual/result). Defaults to None.
        loss (Loss | None, optional): Loss percentage per time. Defaults to None.
        reservoir_curve (ReservoirCurve | None, optional): Water level elevation to water volume for HydroStorage. Defaults to None.
        max_soft_bound (SoftBound | None, optional): Upper soft boundary that is penalized if broken. Defaults to None.
        min_soft_bound (SoftBound | None, optional): Lower soft boundary that is penalized if broken. Defaults to None.
        target_bound (TargetBound | None, optional): Target filling, can be penalized if deviation. Defaults to None.
        initial_storage_percentage (float | None, optional): Initial storage filling percentage at start of simulation. Defaults to None.

    """
    super().__init__()

    self._check_type(capacity, StockVolume)
    self._check_type(volume, (StockVolume, type(None)))
    self._check_type(loss, (StockVolume, type(None)))
    self._check_type(reservoir_curve, (ReservoirCurve, type(None)))
    self._check_type(max_soft_bound, (SoftBound, type(None)))
    self._check_type(min_soft_bound, (SoftBound, type(None)))
    self._check_type(target_bound, (TargetBound, type(None)))
    self._check_type(initial_storage_percentage, (float, type(None)))

    if initial_storage_percentage is not None:
        self._check_float(initial_storage_percentage, lower_bound=0.0, upper_bound=1.0)

    self._capacity = capacity

    self._loss = loss
    self._reservoir_curve = reservoir_curve
    self._max_soft_bound = max_soft_bound
    self._min_soft_bound = min_soft_bound
    self._target_bound = target_bound
    self._initial_storage_percentage = initial_storage_percentage

    self._cost_terms: dict[str, ObjectiveCoefficient] = dict()

    if volume is None:
        volume = StockVolume()
    self._volume = volume
add_cost_term(key: str, cost_term: ObjectiveCoefficient) -> None

Add a cost term.

Source code in framcore/attributes/Storage.py
81
82
83
84
85
def add_cost_term(self, key: str, cost_term: ObjectiveCoefficient) -> None:
    """Add a cost term."""
    self._check_type(key, str)
    self._check_type(cost_term, ObjectiveCoefficient)
    self._cost_terms[key] = cost_term
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in attributes to loaders.

Source code in framcore/attributes/Storage.py
145
146
147
148
149
150
151
152
153
154
155
156
157
158
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in attributes to loaders."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self.get_capacity())
    add_loaders_if(loaders, self.get_loss())
    add_loaders_if(loaders, self.get_volume())
    add_loaders_if(loaders, self.get_max_soft_bound())
    add_loaders_if(loaders, self.get_min_soft_bound())
    add_loaders_if(loaders, self.get_reservoir_curve())
    add_loaders_if(loaders, self.get_target_bound())

    for cost in self.get_cost_terms().values():
        add_loaders_if(loaders, cost)
get_capacity() -> StockVolume

Get the capacity.

Source code in framcore/attributes/Storage.py
73
74
75
def get_capacity(self) -> StockVolume:
    """Get the capacity."""
    return self._capacity
get_cost_terms() -> dict[str, ObjectiveCoefficient]

Get the cost terms.

Source code in framcore/attributes/Storage.py
87
88
89
def get_cost_terms(self) -> dict[str, ObjectiveCoefficient]:
    """Get the cost terms."""
    return self._cost_terms
get_initial_storage_percentage() -> float | None

Get the initial storage percentage (float in [0, 1]).

Source code in framcore/attributes/Storage.py
136
137
138
def get_initial_storage_percentage(self) -> float | None:
    """Get the initial storage percentage (float in [0, 1])."""
    return self._initial_storage_percentage
get_loss() -> Loss | None

Get the loss.

Source code in framcore/attributes/Storage.py
91
92
93
def get_loss(self) -> Loss | None:
    """Get the loss."""
    return self._loss
get_max_soft_bound() -> SoftBound | None

Get the max soft bound.

Source code in framcore/attributes/Storage.py
109
110
111
def get_max_soft_bound(self) -> SoftBound | None:
    """Get the max soft bound."""
    return self._max_soft_bound
get_min_soft_bound() -> SoftBound | None

Get the min soft bound.

Source code in framcore/attributes/Storage.py
118
119
120
def get_min_soft_bound(self) -> SoftBound | None:
    """Get the min soft bound."""
    return self._min_soft_bound
get_reservoir_curve() -> ReservoirCurve | None

Get the reservoir curve.

Source code in framcore/attributes/Storage.py
100
101
102
def get_reservoir_curve(self) -> ReservoirCurve | None:
    """Get the reservoir curve."""
    return self._reservoir_curve
get_target_bound() -> TargetBound | None

Get the target bound.

Source code in framcore/attributes/Storage.py
127
128
129
def get_target_bound(self) -> TargetBound | None:
    """Get the target bound."""
    return self._target_bound
get_volume() -> StockVolume

Get the volume.

Source code in framcore/attributes/Storage.py
77
78
79
def get_volume(self) -> StockVolume:
    """Get the volume."""
    return self._volume
set_initial_storage_percentage(value: float) -> None

Set the initial storage percentage (float in [0, 1]).

Source code in framcore/attributes/Storage.py
140
141
142
143
def set_initial_storage_percentage(self, value: float) -> None:
    """Set the initial storage percentage (float in [0, 1])."""
    self._check_float(value, lower_bound=0.0, upper_bound=1.0)
    self._initial_storage_percentage = value
set_loss(value: Loss | None) -> None

Set the loss.

Source code in framcore/attributes/Storage.py
95
96
97
98
def set_loss(self, value: Loss | None) -> None:
    """Set the loss."""
    self._check_type(value, (Loss, type(None)))
    self._loss = value
set_max_soft_bound(value: SoftBound | None) -> None

Set the max soft bound.

Source code in framcore/attributes/Storage.py
113
114
115
116
def set_max_soft_bound(self, value: SoftBound | None) -> None:
    """Set the max soft bound."""
    self._check_type(value, (SoftBound, type(None)))
    self._max_soft_bound = value
set_min_soft_bound(value: SoftBound | None) -> None

Set the min soft bound.

Source code in framcore/attributes/Storage.py
122
123
124
125
def set_min_soft_bound(self, value: SoftBound | None) -> None:
    """Set the min soft bound."""
    self._check_type(value, (SoftBound, type(None)))
    self._min_soft_bound = value
set_reservoir_curve(value: ReservoirCurve | None) -> None

Set the reservoir curve.

Source code in framcore/attributes/Storage.py
104
105
106
107
def set_reservoir_curve(self, value: ReservoirCurve | None) -> None:
    """Set the reservoir curve."""
    self._check_type(value, (ReservoirCurve, type(None)))
    self._reservoir_curve = value
set_target_bound(value: TargetBound | None) -> None

Set the target bound.

Source code in framcore/attributes/Storage.py
131
132
133
134
def set_target_bound(self, value: TargetBound | None) -> None:
    """Set the target bound."""
    self._check_type(value, (TargetBound, type(None)))
    self._target_bound = value

TargetBound

TargetBound

Target boundary attribute. Can be penalized if deviation from target.

Source code in framcore/attributes/TargetBound.py
 9
10
11
12
13
14
15
16
class TargetBound:
    """Target boundary attribute. Can be penalized if deviation from target."""

    # TODO: Implement and comment

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in attributes to loaders."""
        return
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in attributes to loaders.

Source code in framcore/attributes/TargetBound.py
14
15
16
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in attributes to loaders."""
    return

hydro

HydroBypass
HydroBypass

Bases: Base

HydroBypass represents a controlled water way from a HydroModule. Used to bypass main release of the HydroModule.

Source code in framcore/attributes/hydro/HydroBypass.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
class HydroBypass(Base):
    """HydroBypass represents a controlled water way from a HydroModule. Used to bypass main release of the HydroModule."""

    def __init__(
        self,
        to_module: str | None,
        capacity: FlowVolume | None = None,
    ) -> None:
        """
        Initialize object.

        Args:
            to_module (str | None): Name of the HydroModule the water is released to.
            capacity (FlowVolume | None, optional): Restrictions on the volume of water which can pass through the bypass at a given moment. Defaults to None.

        """
        super().__init__()

        self._check_type(to_module, (str, type(None)))
        self._check_type(capacity, (FlowVolume, type(None)))

        self._to_module = to_module
        self._capacity = capacity
        self._volume = AvgFlowVolume()

    def get_to_module(self) -> str | None:
        """Get the name of the module to which the bypass leads."""
        return self._to_module

    def set_to_module(self, to_module: str) -> None:
        """Set the name of the module to which the bypass leads."""
        self._check_type(to_module, str)
        self._to_module = to_module

    def get_capacity(self) -> FlowVolume | None:
        """Get the capacity of the bypass."""
        return self._capacity

    def get_volume(self) -> AvgFlowVolume:
        """Get the volume of the bypass."""
        return self._volume

    def _get_fingerprint(self) -> Fingerprint:
        return self.get_fingerprint_default(refs={"to_module": self._to_module})
__init__(to_module: str | None, capacity: FlowVolume | None = None) -> None

Initialize object.

Parameters:

Name Type Description Default
to_module str | None

Name of the HydroModule the water is released to.

required
capacity FlowVolume | None

Restrictions on the volume of water which can pass through the bypass at a given moment. Defaults to None.

None
Source code in framcore/attributes/hydro/HydroBypass.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
def __init__(
    self,
    to_module: str | None,
    capacity: FlowVolume | None = None,
) -> None:
    """
    Initialize object.

    Args:
        to_module (str | None): Name of the HydroModule the water is released to.
        capacity (FlowVolume | None, optional): Restrictions on the volume of water which can pass through the bypass at a given moment. Defaults to None.

    """
    super().__init__()

    self._check_type(to_module, (str, type(None)))
    self._check_type(capacity, (FlowVolume, type(None)))

    self._to_module = to_module
    self._capacity = capacity
    self._volume = AvgFlowVolume()
get_capacity() -> FlowVolume | None

Get the capacity of the bypass.

Source code in framcore/attributes/hydro/HydroBypass.py
40
41
42
def get_capacity(self) -> FlowVolume | None:
    """Get the capacity of the bypass."""
    return self._capacity
get_to_module() -> str | None

Get the name of the module to which the bypass leads.

Source code in framcore/attributes/hydro/HydroBypass.py
31
32
33
def get_to_module(self) -> str | None:
    """Get the name of the module to which the bypass leads."""
    return self._to_module
get_volume() -> AvgFlowVolume

Get the volume of the bypass.

Source code in framcore/attributes/hydro/HydroBypass.py
44
45
46
def get_volume(self) -> AvgFlowVolume:
    """Get the volume of the bypass."""
    return self._volume
set_to_module(to_module: str) -> None

Set the name of the module to which the bypass leads.

Source code in framcore/attributes/hydro/HydroBypass.py
35
36
37
38
def set_to_module(self, to_module: str) -> None:
    """Set the name of the module to which the bypass leads."""
    self._check_type(to_module, str)
    self._to_module = to_module
HydroGenerator
HydroGenerator

Bases: Base

Produces power from the main release of a HydroModule.

Produces to a power node, and can have variable costs associated with operation. Other attributes are energy equivalent, PQ curve, nominal head and tailwater elevation.

Source code in framcore/attributes/hydro/HydroGenerator.py
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
class HydroGenerator(Base):
    """
    Produces power from the main release of a HydroModule.

    Produces to a power node, and can have variable costs associated with operation. Other attributes are energy equivalent, PQ curve, nominal head
    and tailwater elevation.

    """

    def __init__(
        self,
        power_node: str,
        energy_equivalent: Conversion,  # energy equivalent
        pq_curve: Expr | str | Curve | None = None,
        nominal_head: Expr | str | TimeVector | None = None,
        tailwater_elevation: Expr | str | TimeVector | None = None,
        voc: Cost | None = None,
        production: AvgFlowVolume | None = None,
    ) -> None:
        """
        Initialize a HydroGenerator with parameters.

        Args:
            power_node (str): Node to supply power to.
            energy_equivalent (Conversion): Conversion factor of power produced to water released.
            pq_curve (Expr | str | Curve | None, optional): Expression or curve describing the relationship produced power and water released. Defaults to None.
            nominal_head (Expr | str | TimeVector | None, optional): Vertical distance between upstream and dowstream water level. Defaults to None.
            tailwater_elevation (Expr | str | TimeVector | None, optional): Elevation at the surface where the water exits the turbine. Defaults to None.
            voc (Cost | None, optional): Variable operational costs. Defaults to None.
            production (AvgFlowVolume | None, optional): Result of power volume produced. Defaults to None.

        """
        super().__init__()

        self._check_type(power_node, str)
        self._check_type(energy_equivalent, Conversion)
        self._check_type(pq_curve, (Expr, str, Curve, type(None)))
        self._check_type(nominal_head, (Expr, str, TimeVector, type(None)))
        self._check_type(tailwater_elevation, (Expr, str, TimeVector, type(None)))
        self._check_type(voc, (Cost, type(None)))

        self._power_node = power_node
        self._energy_eq = energy_equivalent
        self._pq_curve = ensure_expr(pq_curve)
        self._nominal_head = ensure_expr(nominal_head, is_level=True)
        self._tailwater_elevation = ensure_expr(tailwater_elevation, is_level=True)
        self._voc = voc

        if production is None:
            production = AvgFlowVolume()
        self._production: AvgFlowVolume = production

    def get_power_node(self) -> str:
        """Get the power node of the hydro generator."""
        return self._power_node

    def set_power_node(self, power_node: str) -> None:
        """Set the power node of the pump unit."""
        self._check_type(power_node, str)
        self._power_node = power_node

    def get_energy_equivalent(self) -> Conversion:
        """Get the energy equivalent of the hydro generator."""
        return self._energy_eq

    def get_pq_curve(self) -> Expr | None:
        """Get the PQ curve of the hydro generator."""
        return self._pq_curve

    def get_nominal_head(self) -> Expr | None:
        """Get the nominal head of the hydro generator."""
        return self._nominal_head

    def get_tailwater_elevation(self) -> Expr | None:
        """Get the tailwater elevation of the hydro generator."""
        return self._tailwater_elevation

    def get_voc(self) -> Cost | None:
        """Get the variable operation and maintenance cost of the hydro generator."""
        return self._voc

    def set_voc(self, voc: Cost) -> None:
        """Set the variable operation and maintenance cost of the hydro generator."""
        self._check_type(voc, Cost)
        self._voc = voc

    def get_production(self) -> AvgFlowVolume:
        """Get the generation of the hydro generator."""
        return self._production

    def _get_fingerprint(self) -> Fingerprint:
        raise self.get_fingerprint_default(refs={"power_node": self._power_node})
__init__(power_node: str, energy_equivalent: Conversion, pq_curve: Expr | str | Curve | None = None, nominal_head: Expr | str | TimeVector | None = None, tailwater_elevation: Expr | str | TimeVector | None = None, voc: Cost | None = None, production: AvgFlowVolume | None = None) -> None

Initialize a HydroGenerator with parameters.

Parameters:

Name Type Description Default
power_node str

Node to supply power to.

required
energy_equivalent Conversion

Conversion factor of power produced to water released.

required
pq_curve Expr | str | Curve | None

Expression or curve describing the relationship produced power and water released. Defaults to None.

None
nominal_head Expr | str | TimeVector | None

Vertical distance between upstream and dowstream water level. Defaults to None.

None
tailwater_elevation Expr | str | TimeVector | None

Elevation at the surface where the water exits the turbine. Defaults to None.

None
voc Cost | None

Variable operational costs. Defaults to None.

None
production AvgFlowVolume | None

Result of power volume produced. Defaults to None.

None
Source code in framcore/attributes/hydro/HydroGenerator.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
def __init__(
    self,
    power_node: str,
    energy_equivalent: Conversion,  # energy equivalent
    pq_curve: Expr | str | Curve | None = None,
    nominal_head: Expr | str | TimeVector | None = None,
    tailwater_elevation: Expr | str | TimeVector | None = None,
    voc: Cost | None = None,
    production: AvgFlowVolume | None = None,
) -> None:
    """
    Initialize a HydroGenerator with parameters.

    Args:
        power_node (str): Node to supply power to.
        energy_equivalent (Conversion): Conversion factor of power produced to water released.
        pq_curve (Expr | str | Curve | None, optional): Expression or curve describing the relationship produced power and water released. Defaults to None.
        nominal_head (Expr | str | TimeVector | None, optional): Vertical distance between upstream and dowstream water level. Defaults to None.
        tailwater_elevation (Expr | str | TimeVector | None, optional): Elevation at the surface where the water exits the turbine. Defaults to None.
        voc (Cost | None, optional): Variable operational costs. Defaults to None.
        production (AvgFlowVolume | None, optional): Result of power volume produced. Defaults to None.

    """
    super().__init__()

    self._check_type(power_node, str)
    self._check_type(energy_equivalent, Conversion)
    self._check_type(pq_curve, (Expr, str, Curve, type(None)))
    self._check_type(nominal_head, (Expr, str, TimeVector, type(None)))
    self._check_type(tailwater_elevation, (Expr, str, TimeVector, type(None)))
    self._check_type(voc, (Cost, type(None)))

    self._power_node = power_node
    self._energy_eq = energy_equivalent
    self._pq_curve = ensure_expr(pq_curve)
    self._nominal_head = ensure_expr(nominal_head, is_level=True)
    self._tailwater_elevation = ensure_expr(tailwater_elevation, is_level=True)
    self._voc = voc

    if production is None:
        production = AvgFlowVolume()
    self._production: AvgFlowVolume = production
get_energy_equivalent() -> Conversion

Get the energy equivalent of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
70
71
72
def get_energy_equivalent(self) -> Conversion:
    """Get the energy equivalent of the hydro generator."""
    return self._energy_eq
get_nominal_head() -> Expr | None

Get the nominal head of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
78
79
80
def get_nominal_head(self) -> Expr | None:
    """Get the nominal head of the hydro generator."""
    return self._nominal_head
get_power_node() -> str

Get the power node of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
61
62
63
def get_power_node(self) -> str:
    """Get the power node of the hydro generator."""
    return self._power_node
get_pq_curve() -> Expr | None

Get the PQ curve of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
74
75
76
def get_pq_curve(self) -> Expr | None:
    """Get the PQ curve of the hydro generator."""
    return self._pq_curve
get_production() -> AvgFlowVolume

Get the generation of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
95
96
97
def get_production(self) -> AvgFlowVolume:
    """Get the generation of the hydro generator."""
    return self._production
get_tailwater_elevation() -> Expr | None

Get the tailwater elevation of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
82
83
84
def get_tailwater_elevation(self) -> Expr | None:
    """Get the tailwater elevation of the hydro generator."""
    return self._tailwater_elevation
get_voc() -> Cost | None

Get the variable operation and maintenance cost of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
86
87
88
def get_voc(self) -> Cost | None:
    """Get the variable operation and maintenance cost of the hydro generator."""
    return self._voc
set_power_node(power_node: str) -> None

Set the power node of the pump unit.

Source code in framcore/attributes/hydro/HydroGenerator.py
65
66
67
68
def set_power_node(self, power_node: str) -> None:
    """Set the power node of the pump unit."""
    self._check_type(power_node, str)
    self._power_node = power_node
set_voc(voc: Cost) -> None

Set the variable operation and maintenance cost of the hydro generator.

Source code in framcore/attributes/hydro/HydroGenerator.py
90
91
92
93
def set_voc(self, voc: Cost) -> None:
    """Set the variable operation and maintenance cost of the hydro generator."""
    self._check_type(voc, Cost)
    self._voc = voc
HydroPump
HydroPump

Bases: Base

Represent a pump associated with a HydroModule.

The HydroPump can consume power from a power Node to move water upstream between two HydroModules. It has a max power capacity, and mean energy equivalent and water capacity. It can also describe the relationship between head and flow (Q), with min and max head and flow.

Results for water and power consumption are stored as AvgFlowVolume attributes.

Source code in framcore/attributes/hydro/HydroPump.py
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
class HydroPump(Base):
    """
    Represent a pump associated with a HydroModule.

    The HydroPump can consume power from a power Node to move water upstream between two HydroModules. It has a max power capacity, and mean energy
    equivalent and water capacity. It can also describe the relationship between head and flow (Q), with min and max head and flow.

    Results for water and power consumption are stored as AvgFlowVolume attributes.

    """

    def __init__(
        self,
        power_node: str,
        from_module: str,
        to_module: str,
        water_capacity: FlowVolume,
        energy_equivalent: Conversion,
        power_capacity: FlowVolume | None = None,
        head_min: Expr | str | TimeVector | None = None,
        head_max: Expr | str | TimeVector | None = None,
        q_min: Expr | str | TimeVector | None = None,
        q_max: Expr | str | TimeVector | None = None,
    ) -> None:
        """
        Initialize a HydroPump object parameters.

        Args:
            power_node (str): Node to take power from when operating.
            from_module (str): Source HydroModule to move water from.
            to_module (str): Destination HydroModule to move water to.
            water_capacity (FlowVolume): Max pumped water volume given the mean energy equivalent and power capacity.
            energy_equivalent (Conversion): Mean conversion factor between power consumed and volume of water moved.
            power_capacity (FlowVolume | None, optional): Max power consumed. Defaults to None.
            head_min (Expr | str | TimeVector | None, optional): Minimum elevation difference between upstream and downstream water level. Defaults to None.
            head_max (Expr | str | TimeVector | None, optional): Maximum elevation difference between upstream and downstream water level. Defaults to None.
            q_min (Expr | str | TimeVector | None, optional): Maximum water flow at head_min. Defaults to None.
            q_max (Expr | str | TimeVector | None, optional): Maximum water flow at head_max. Defaults to None.

        """
        super().__init__()
        self._check_type(power_node, str)
        self._check_modules(from_module, to_module)  # checks types and that they are not the same.
        self._check_type(water_capacity, FlowVolume)
        self._check_type(power_capacity, (FlowVolume, type(None)))
        self._check_type(energy_equivalent, Conversion)
        self._check_type(head_min, (Expr, str, TimeVector, type(None)))
        self._check_type(head_max, (Expr, str, TimeVector, type(None)))
        self._check_type(q_min, (Expr, str, TimeVector, type(None)))
        self._check_type(q_max, (Expr, str, TimeVector, type(None)))

        self._power_node = power_node
        self._from_module = from_module
        self._to_module = to_module
        self._water_capacity = water_capacity
        self._energy_eq = energy_equivalent
        self._power_capacity = power_capacity

        self._hmin = ensure_expr(head_min, is_level=True)
        self._hmax = ensure_expr(head_max, is_level=True)
        self._qmin = ensure_expr(q_min, is_flow=True, is_level=True)
        self._qmax = ensure_expr(q_max, is_flow=True, is_level=True)

        self._water_consumption = AvgFlowVolume()
        self._power_consumption = AvgFlowVolume()

    def get_water_capacity(self) -> FlowVolume:
        """Get the capacity of the pump unit."""
        return self._water_capacity

    def get_power_capacity(self) -> FlowVolume:
        """Get the capacity of the pump unit."""
        return self._power_capacity

    def get_power_node(self) -> str:
        """Get the power node of the pump unit."""
        return self._power_node

    def set_power_node(self, power_node: str) -> None:
        """Set the power node of the pump unit."""
        self._check_type(power_node, str)
        self._power_node = power_node

    def get_from_module(self) -> str:
        """Get the module from which the pump unit is pumping."""
        return self._from_module

    def get_to_module(self) -> str:
        """Get the module to which the pump unit is pumping."""
        return self._to_module

    # TODO: should be split in two? Keep in mind we check that the to and from modules are not the same. So if we split this user might run into issues if
    # trying to first set from_module to to_module then change to_module.
    def set_modules(self, from_module: str, to_module: str) -> None:
        """Set the modules for the pump unit."""
        self._check_modules(from_module, to_module)
        self._from_module = from_module
        self._to_module = to_module

    def get_water_consumption(self) -> FlowVolume:
        """Get the water consumption of the pump unit."""
        return self._water_consumption

    def get_power_consumption(self) -> FlowVolume:
        """Get the power consumption of the pump unit."""
        return self._power_consumption

    def _check_modules(self, from_module: str, to_module: str) -> None:
        self._check_type(from_module, str)
        self._check_type(to_module, str)
        if from_module == to_module:
            message = f"{self} cannot pump to and from the same module. Got {from_module} for both from_module and to_module."
            raise ValueError(message)

    def _check_base_module_name(self, base_name: str) -> None:
        if base_name not in (self._from_module, self._to_module):
            message = (
                f"Module {base_name} has not been coupled correctly to its pump {self}. Pump is coupled to modules {self._from_module} and {self._to_module}"
            )
            raise RuntimeError(message)

    # other parameters
    def get_energy_equivalent(self) -> Conversion:
        """Get the energy equivalent of hydro pump."""
        return self._energy_eq

    def set_energy_eq(self, energy_eq: Conversion) -> None:
        """Set the energy equivalent."""
        self._check_type(energy_eq, Conversion)
        self._energy_eq = energy_eq

    def get_head_min(self) -> Expr:
        """Get min fall height of hydro pump."""
        return self._head_min

    def set_head_min(self, head_min: Expr | str | None) -> None:
        """Set min fall height."""
        self._head_min = ensure_expr(head_min)

    def get_head_max(self) -> Expr:
        """Get max fall height of hydro pump."""
        return self._hmax

    def set_head_max(self, hmax: Expr | str | None) -> None:
        """Set max fall height."""
        self._hmax = ensure_expr(hmax)

    def get_q_min(self) -> Expr:
        """Get Q min of hydro pump."""
        return self._q_min

    def set_qmin(self, q_min: Expr | str | None) -> None:
        """Set Q min."""
        self._q_min = ensure_expr(q_min)

    def get_q_max(self) -> Expr:
        """Get Q max of hydro pump."""
        return self._q_max

    def set_qmax(self, q_max: Expr | str | None) -> None:
        """Set Q max."""
        self._q_max = ensure_expr(q_max)

    def _get_fingerprint(self) -> Fingerprint:
        return self.get_fingerprint_default(
            refs={
                "power_node": self._power_node,
                "from_module": self._from_module,
                "to_module": self._to_module,
            },
        )
__init__(power_node: str, from_module: str, to_module: str, water_capacity: FlowVolume, energy_equivalent: Conversion, power_capacity: FlowVolume | None = None, head_min: Expr | str | TimeVector | None = None, head_max: Expr | str | TimeVector | None = None, q_min: Expr | str | TimeVector | None = None, q_max: Expr | str | TimeVector | None = None) -> None

Initialize a HydroPump object parameters.

Parameters:

Name Type Description Default
power_node str

Node to take power from when operating.

required
from_module str

Source HydroModule to move water from.

required
to_module str

Destination HydroModule to move water to.

required
water_capacity FlowVolume

Max pumped water volume given the mean energy equivalent and power capacity.

required
energy_equivalent Conversion

Mean conversion factor between power consumed and volume of water moved.

required
power_capacity FlowVolume | None

Max power consumed. Defaults to None.

None
head_min Expr | str | TimeVector | None

Minimum elevation difference between upstream and downstream water level. Defaults to None.

None
head_max Expr | str | TimeVector | None

Maximum elevation difference between upstream and downstream water level. Defaults to None.

None
q_min Expr | str | TimeVector | None

Maximum water flow at head_min. Defaults to None.

None
q_max Expr | str | TimeVector | None

Maximum water flow at head_max. Defaults to None.

None
Source code in framcore/attributes/hydro/HydroPump.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
def __init__(
    self,
    power_node: str,
    from_module: str,
    to_module: str,
    water_capacity: FlowVolume,
    energy_equivalent: Conversion,
    power_capacity: FlowVolume | None = None,
    head_min: Expr | str | TimeVector | None = None,
    head_max: Expr | str | TimeVector | None = None,
    q_min: Expr | str | TimeVector | None = None,
    q_max: Expr | str | TimeVector | None = None,
) -> None:
    """
    Initialize a HydroPump object parameters.

    Args:
        power_node (str): Node to take power from when operating.
        from_module (str): Source HydroModule to move water from.
        to_module (str): Destination HydroModule to move water to.
        water_capacity (FlowVolume): Max pumped water volume given the mean energy equivalent and power capacity.
        energy_equivalent (Conversion): Mean conversion factor between power consumed and volume of water moved.
        power_capacity (FlowVolume | None, optional): Max power consumed. Defaults to None.
        head_min (Expr | str | TimeVector | None, optional): Minimum elevation difference between upstream and downstream water level. Defaults to None.
        head_max (Expr | str | TimeVector | None, optional): Maximum elevation difference between upstream and downstream water level. Defaults to None.
        q_min (Expr | str | TimeVector | None, optional): Maximum water flow at head_min. Defaults to None.
        q_max (Expr | str | TimeVector | None, optional): Maximum water flow at head_max. Defaults to None.

    """
    super().__init__()
    self._check_type(power_node, str)
    self._check_modules(from_module, to_module)  # checks types and that they are not the same.
    self._check_type(water_capacity, FlowVolume)
    self._check_type(power_capacity, (FlowVolume, type(None)))
    self._check_type(energy_equivalent, Conversion)
    self._check_type(head_min, (Expr, str, TimeVector, type(None)))
    self._check_type(head_max, (Expr, str, TimeVector, type(None)))
    self._check_type(q_min, (Expr, str, TimeVector, type(None)))
    self._check_type(q_max, (Expr, str, TimeVector, type(None)))

    self._power_node = power_node
    self._from_module = from_module
    self._to_module = to_module
    self._water_capacity = water_capacity
    self._energy_eq = energy_equivalent
    self._power_capacity = power_capacity

    self._hmin = ensure_expr(head_min, is_level=True)
    self._hmax = ensure_expr(head_max, is_level=True)
    self._qmin = ensure_expr(q_min, is_flow=True, is_level=True)
    self._qmax = ensure_expr(q_max, is_flow=True, is_level=True)

    self._water_consumption = AvgFlowVolume()
    self._power_consumption = AvgFlowVolume()
get_energy_equivalent() -> Conversion

Get the energy equivalent of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
130
131
132
def get_energy_equivalent(self) -> Conversion:
    """Get the energy equivalent of hydro pump."""
    return self._energy_eq
get_from_module() -> str

Get the module from which the pump unit is pumping.

Source code in framcore/attributes/hydro/HydroPump.py
91
92
93
def get_from_module(self) -> str:
    """Get the module from which the pump unit is pumping."""
    return self._from_module
get_head_max() -> Expr

Get max fall height of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
147
148
149
def get_head_max(self) -> Expr:
    """Get max fall height of hydro pump."""
    return self._hmax
get_head_min() -> Expr

Get min fall height of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
139
140
141
def get_head_min(self) -> Expr:
    """Get min fall height of hydro pump."""
    return self._head_min
get_power_capacity() -> FlowVolume

Get the capacity of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
78
79
80
def get_power_capacity(self) -> FlowVolume:
    """Get the capacity of the pump unit."""
    return self._power_capacity
get_power_consumption() -> FlowVolume

Get the power consumption of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
111
112
113
def get_power_consumption(self) -> FlowVolume:
    """Get the power consumption of the pump unit."""
    return self._power_consumption
get_power_node() -> str

Get the power node of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
82
83
84
def get_power_node(self) -> str:
    """Get the power node of the pump unit."""
    return self._power_node
get_q_max() -> Expr

Get Q max of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
163
164
165
def get_q_max(self) -> Expr:
    """Get Q max of hydro pump."""
    return self._q_max
get_q_min() -> Expr

Get Q min of hydro pump.

Source code in framcore/attributes/hydro/HydroPump.py
155
156
157
def get_q_min(self) -> Expr:
    """Get Q min of hydro pump."""
    return self._q_min
get_to_module() -> str

Get the module to which the pump unit is pumping.

Source code in framcore/attributes/hydro/HydroPump.py
95
96
97
def get_to_module(self) -> str:
    """Get the module to which the pump unit is pumping."""
    return self._to_module
get_water_capacity() -> FlowVolume

Get the capacity of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
74
75
76
def get_water_capacity(self) -> FlowVolume:
    """Get the capacity of the pump unit."""
    return self._water_capacity
get_water_consumption() -> FlowVolume

Get the water consumption of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
107
108
109
def get_water_consumption(self) -> FlowVolume:
    """Get the water consumption of the pump unit."""
    return self._water_consumption
set_energy_eq(energy_eq: Conversion) -> None

Set the energy equivalent.

Source code in framcore/attributes/hydro/HydroPump.py
134
135
136
137
def set_energy_eq(self, energy_eq: Conversion) -> None:
    """Set the energy equivalent."""
    self._check_type(energy_eq, Conversion)
    self._energy_eq = energy_eq
set_head_max(hmax: Expr | str | None) -> None

Set max fall height.

Source code in framcore/attributes/hydro/HydroPump.py
151
152
153
def set_head_max(self, hmax: Expr | str | None) -> None:
    """Set max fall height."""
    self._hmax = ensure_expr(hmax)
set_head_min(head_min: Expr | str | None) -> None

Set min fall height.

Source code in framcore/attributes/hydro/HydroPump.py
143
144
145
def set_head_min(self, head_min: Expr | str | None) -> None:
    """Set min fall height."""
    self._head_min = ensure_expr(head_min)
set_modules(from_module: str, to_module: str) -> None

Set the modules for the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
101
102
103
104
105
def set_modules(self, from_module: str, to_module: str) -> None:
    """Set the modules for the pump unit."""
    self._check_modules(from_module, to_module)
    self._from_module = from_module
    self._to_module = to_module
set_power_node(power_node: str) -> None

Set the power node of the pump unit.

Source code in framcore/attributes/hydro/HydroPump.py
86
87
88
89
def set_power_node(self, power_node: str) -> None:
    """Set the power node of the pump unit."""
    self._check_type(power_node, str)
    self._power_node = power_node
set_qmax(q_max: Expr | str | None) -> None

Set Q max.

Source code in framcore/attributes/hydro/HydroPump.py
167
168
169
def set_qmax(self, q_max: Expr | str | None) -> None:
    """Set Q max."""
    self._q_max = ensure_expr(q_max)
set_qmin(q_min: Expr | str | None) -> None

Set Q min.

Source code in framcore/attributes/hydro/HydroPump.py
159
160
161
def set_qmin(self, q_min: Expr | str | None) -> None:
    """Set Q min."""
    self._q_min = ensure_expr(q_min)
HydroReservoir
HydroReservoir

Bases: Storage

Represent a hydro reservoir of a HydroModule.

Source code in framcore/attributes/hydro/HydroReservoir.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
class HydroReservoir(Storage):
    """Represent a hydro reservoir of a HydroModule."""

    def __init__(
        self,
        capacity: StockVolume,
        reservoir_curve: ReservoirCurve = None,
        volume: StockVolume | None = None,
    ) -> None:
        """
        Initialize a HydroReservoir instance.

        Args:
            capacity (StockVolume): The maximum storage capacity of the reservoir.
            reservoir_curve (ReservoirCurve, optional): The curve describing water level elevation to volume characteristics.
            volume (StockVolume, optional): Volume of water in the reservoir.

        """
        super().__init__(
            capacity=capacity,
            reservoir_curve=reservoir_curve,
            volume=volume,
        )
__init__(capacity: StockVolume, reservoir_curve: ReservoirCurve = None, volume: StockVolume | None = None) -> None

Initialize a HydroReservoir instance.

Parameters:

Name Type Description Default
capacity StockVolume

The maximum storage capacity of the reservoir.

required
reservoir_curve ReservoirCurve

The curve describing water level elevation to volume characteristics.

None
volume StockVolume

Volume of water in the reservoir.

None
Source code in framcore/attributes/hydro/HydroReservoir.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
def __init__(
    self,
    capacity: StockVolume,
    reservoir_curve: ReservoirCurve = None,
    volume: StockVolume | None = None,
) -> None:
    """
    Initialize a HydroReservoir instance.

    Args:
        capacity (StockVolume): The maximum storage capacity of the reservoir.
        reservoir_curve (ReservoirCurve, optional): The curve describing water level elevation to volume characteristics.
        volume (StockVolume, optional): Volume of water in the reservoir.

    """
    super().__init__(
        capacity=capacity,
        reservoir_curve=reservoir_curve,
        volume=volume,
    )

level_profile_attributes

ArrowCoefficient

Bases: Coefficient

Abstract class representing an arrow coefficient attribute, used for efficiency, loss, and conversion coefficients.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
740
741
742
743
744
745
746
747
class ArrowCoefficient(Coefficient):
    """
    Abstract class representing an arrow coefficient attribute, used for efficiency, loss, and conversion coefficients.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    pass
AvgFlowVolume

Bases: FlowVolume

Concrete class representing an average flow volume attribute, indicating a flow variable with average values.

Subclass of FlowVolume < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
881
882
883
884
885
886
887
888
class AvgFlowVolume(FlowVolume):
    """
    Concrete class representing an average flow volume attribute, indicating a flow variable with average values.

    Subclass of FlowVolume < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
Coefficient

Bases: LevelProfile

Abstract class representing a coefficient attribute, used as a base class for various coefficient types.

Subclass of LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
730
731
732
733
734
735
736
737
class Coefficient(LevelProfile):
    """
    Abstract class representing a coefficient attribute, used as a base class for various coefficient types.

    Subclass of LevelProfile. See LevelProfile for details.
    """

    pass
Conversion

Bases: ArrowCoefficient

Concrete class representing a conversion coefficient attribute, used for conversion factors in the model.

Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
871
872
873
874
875
876
877
878
class Conversion(ArrowCoefficient):
    """
    Concrete class representing a conversion coefficient attribute, used for conversion factors in the model.

    Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
Cost

Bases: ObjectiveCoefficient

Concrete class representing a cost attribute, indicating cost coefficients in the objective function.

Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
795
796
797
798
799
800
801
802
803
class Cost(ObjectiveCoefficient):
    """
    Concrete class representing a cost attribute, indicating cost coefficients in the objective function.

    Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_COST = True
Efficiency

Bases: ArrowCoefficient

Concrete class representing an efficiency coefficient attribute, indicating a unitless coefficient.

Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
849
850
851
852
853
854
855
856
857
class Efficiency(ArrowCoefficient):
    """
    Concrete class representing an efficiency coefficient attribute, indicating a unitless coefficient.

    Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True
Elasticity

Bases: Coefficient

Concrete class representing an elasticity coefficient attribute, indicating a unitless coefficient.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
817
818
819
820
821
822
823
824
825
class Elasticity(Coefficient):  # TODO: How do this work?
    """
    Concrete class representing an elasticity coefficient attribute, indicating a unitless coefficient.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True
FlowVolume

Bases: LevelProfile

Abstract class representing a flow volume attribute, indicating that the attribute is a flow variable.

Subclass of LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
720
721
722
723
724
725
726
727
class FlowVolume(LevelProfile):
    """
    Abstract class representing a flow volume attribute, indicating that the attribute is a flow variable.

    Subclass of LevelProfile. See LevelProfile for details.
    """

    _IS_FLOW = True
Hours

Bases: Coefficient

Concrete class representing an hours coefficient attribute, indicating a time-related coefficient.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
839
840
841
842
843
844
845
846
class Hours(Coefficient):  # TODO: How do this work?
    """
    Concrete class representing an hours coefficient attribute, indicating a time-related coefficient.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
LevelProfile

Bases: Base, ABC

Attributes representing timeseries data for Components. Mostly as Level * Profile, where both Level and Profile are Expr (expressions).

Level and Profile represent two distinct dimensions of time. This is because we want to simulate future system states with historical weather patterns. Therefore, Level represents the system state at a given time (data_dim), while Profile represents the scenario dimension (scen_dim). A Level would for example represent the installed capacity of solar plants towards 2030, while the Profile would represent the historical variation between 1991-2020.

Level and Profile can have two main formats: A maximum Level with a Profile that varies between 0-1, and an average Level with a Profile with a mean of 1 (the latter can have a ReferencePeriod). The max format is, for example, used for capacities, while the mean format can be used for prices and flows. The system needs to be able to convert between the two formats. This is especially important for aggregations (for example weighted averages) where all the TimeVectors need to be on the same format for a correct result. One simple example of conversion is pairing a max Level of 100 MW with a mean_one Profile [0, 1, 2]. Asking for this on the max format will return the series 100[0, 0.5, 1] MW, while on the avg format it will return 50[0, 1, 2] MW.

Queries to LevelProfile need to provide a database, the desired target TimeIndex for both dimensions, the target unit and the desired format. At the moment we support these queries for LevelProfile: - self.get_data_value(db, scen_dim, data_dim, unit, is_max_level) - self.get_scenario_vector(db, scen_dim, data_dim, unit, is_float32)

In addition, we have the possibility to shift, scale, and change the intercept of the LevelProfiles. Then we get the full representation: Scale * (Level + Level_shift) * Profile + Intercept. - Level_shift adds a constant value to Level, has the same Profile as Level. - Scale multiplies (Level + Level_shift) by a constant value. - Intercept adds a constant value to LevelProfile, ignoring Level and Profile. This is the only way of supporting a timeseries that crosses zero in our system. This functionality is under development and has not been properly tested.

LevelProfiles also have additional properties that describes their behaviour. These can be used for initialization, validation, and to simplify queries. The properties are: - is_stock: True if attribute is a stock variable. Level Expr should also have is_stock=True. See Expr for details. - is_flow: True if attribute is a flow variable. Level Expr should also have is_flow=True. See Expr for details. - is_not_negative: True if attribute is not allowed to have negative values. Level Expr should also have only non-negative values. - is_max_and_zero_one: Preferred format of Level and Profile. Used for initialization and queries. - is_ingoing: True if attribute is ingoing, False if outgoing, None if neither. - is_cost: True if attribute is objective function cost coefficient. Else None. - is_unitless: True if attribute is known to be unitless. False if known to have a unit that is not None. Else None.

Source code in framcore/attributes/level_profile_attributes.py
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
class LevelProfile(Base, ABC):
    """
    Attributes representing timeseries data for Components. Mostly as Level * Profile, where both Level and Profile are Expr (expressions).

    Level and Profile represent two distinct dimensions of time. This is because we want to simulate future system states with historical weather patterns.
    Therefore, Level represents the system state at a given time (data_dim), while Profile represents the scenario dimension (scen_dim).
    A Level would for example represent the installed capacity of solar plants towards 2030,
    while the Profile would represent the historical variation between 1991-2020.

    Level and Profile can have two main formats: A maximum Level with a Profile that varies between 0-1,
    and an average Level with a Profile with a mean of 1 (the latter can have a ReferencePeriod).
    The max format is, for example, used for capacities, while the mean format can be used for prices and flows.
    The system needs to be able to convert between the two formats. This is especially important for aggregations
    (for example weighted averages) where all the TimeVectors need to be on the same format for a correct result.
    One simple example of conversion is pairing a max Level of 100 MW with a mean_one Profile [0, 1, 2].
    Asking for this on the max format will return the series 100*[0, 0.5, 1] MW, while on the avg format it will return 50*[0, 1, 2] MW.

    Queries to LevelProfile need to provide a database, the desired target TimeIndex for both dimensions, the target unit and the desired format.
    At the moment we support these queries for LevelProfile:
    - self.get_data_value(db, scen_dim, data_dim, unit, is_max_level)
    - self.get_scenario_vector(db, scen_dim, data_dim, unit, is_float32)

    In addition, we have the possibility to shift, scale, and change the intercept of the LevelProfiles.
    Then we get the full representation: Scale * (Level + Level_shift) * Profile + Intercept.
    - Level_shift adds a constant value to Level, has the same Profile as Level.
    - Scale multiplies (Level + Level_shift) by a constant value.
    - Intercept adds a constant value to LevelProfile, ignoring Level and Profile. **This is the only way of supporting a timeseries that crosses zero
        in our system. This functionality is under development and has not been properly tested.**

    LevelProfiles also have additional properties that describes their behaviour. These can be used for initialization, validation,
    and to simplify queries. The properties are:
    - is_stock: True if attribute is a stock variable. Level Expr should also have is_stock=True. See Expr for details.
    - is_flow: True if attribute is a flow variable. Level Expr should also have is_flow=True. See Expr for details.
    - is_not_negative: True if attribute is not allowed to have negative values. Level Expr should also have only non-negative values.
    - is_max_and_zero_one: Preferred format of Level and Profile. Used for initialization and queries.
    - is_ingoing: True if attribute is ingoing, False if outgoing, None if neither.
    - is_cost: True if attribute is objective function cost coefficient. Else None.
    - is_unitless: True if attribute is known to be unitless. False if known to have a unit that is not None. Else None.

    """

    # must be overwritten by subclass when otherwise
    # don't change the defaults
    _IS_ABSTRACT: bool = True
    _IS_STOCK: bool = False
    _IS_FLOW: bool = False
    _IS_NOT_NEGATIVE: bool = True
    _IS_MAX_AND_ZERO_ONE: bool = False

    # must be set by subclass when applicable
    _IS_INGOING: bool | None = None
    _IS_COST: bool | None = None
    _IS_UNITLESS: bool | None = None

    def __init__(
        self,
        level: Expr | TimeVector | str | None = None,
        profile: Expr | TimeVector | str | None = None,
        value: float | int | None = None,  # To support Price(value=20, unit="EUR/MWh")
        unit: str | None = None,
        level_shift: Expr | None = None,
        intercept: Expr | None = None,
        scale: Expr | None = None,
    ) -> None:
        """
        Initialize LevelProfile.

        See the LevelProfile class docstring for details. A complete LevelProfile is represented as:
        Scale * (Level + Level_shift) * Profile + Intercept. Normally only Level and Profile are used.

        Either give level and profile, or value and unit.

        Args:
            level (Expr | TimeVector | str | None, optional): Level Expr. Defaults to None.
            profile (Expr | TimeVector | str | None, optional): Profile Expr. Defaults to None.
            value (float | int | None, optional): A constant value to initialize Level. Defaults to None.
            unit (str | None, optional): Unit of the constant value to initialize Level. Defaults to None.
            level_shift (Expr | None, optional): Level_shift Expr. Defaults to None.
            intercept (Expr | None, optional): Intercept Expr. Defaults to None.
            scale (Expr | None, optional): Scale Expr. Defaults to None.

        """
        self._assert_invariants()

        self._check_type(value, (float, int, type(None)))
        self._check_type(unit, (str, type(None)))
        self._check_type(level, (Expr, TimeVector, str, type(None)))
        self._check_type(profile, (Expr, TimeVector, str, type(None)))
        self._check_type(level_shift, (Expr, type(None)))
        self._check_type(intercept, (Expr, type(None)))
        self._check_type(scale, (Expr, type(None)))
        level = self._ensure_level_expr(level, value, unit)
        profile = self._ensure_profile_expr(profile)
        self._ensure_compatible_level_profile_combo(level, profile)
        self._ensure_compatible_level_profile_combo(level_shift, profile)
        self._level: Expr | None = level
        self._profile: Expr | None = profile
        self._level_shift: Expr | None = level_shift
        self._intercept: Expr | None = intercept
        self._scale: Expr | None = scale
        # TODO: Validate that profiles are equal in level and level_shift.
        # TODO: Validate that level_shift, scale and intercept only consist of Exprs with ConstantTimeVectors
        # TODO: Validate that level_shift, level_scale and intercept have correct Expr properties

    def _assert_invariants(self) -> None:
        abstract = self._IS_ABSTRACT
        max_level_profile = self._IS_MAX_AND_ZERO_ONE
        stock = self._IS_STOCK
        flow = self._IS_FLOW
        unitless = self._IS_UNITLESS
        ingoing = self._IS_INGOING
        cost = self._IS_COST
        not_negative = self._IS_NOT_NEGATIVE

        assert not abstract, "Abstract types should only be used for type hints and checks."
        assert isinstance(max_level_profile, bool)
        assert isinstance(stock, bool)
        assert isinstance(flow, bool)
        assert isinstance(not_negative, bool)
        assert isinstance(ingoing, bool | type(None))
        assert isinstance(unitless, bool | type(None))
        assert isinstance(cost, bool | type(None))
        assert not (flow and stock)
        if flow or stock:
            assert not unitless, "flow and stock must have unit that is not None."
            assert not_negative, "flow and stock cannot have negative values."
        if ingoing is True:
            assert cost is None, "cost must be None when ingoing is True."
        if cost is True:
            assert ingoing is None, "ingoing must be None when cost is True."

        parent = super()
        if isinstance(parent, LevelProfile) and not parent._IS_ABSTRACT:  # noqa: SLF001
            self._assert_same_behaviour(parent)

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in expressions to loaders."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self.get_level())
        add_loaders_if(loaders, self.get_profile())

    def clear(self) -> None:
        """
        Set all internal fields to None.

        You may want to use this to get exogenous flow to use capacities instead of volume.
        """
        self._level = None
        self._profile = None
        self._level_shift = None
        self._intercept = None
        self._scale = None

    def is_stock(self) -> bool:
        """
        Return True if attribute is a stock variable.

        Return False if attribute is not a stock variable.
        """
        return self._IS_STOCK

    def is_flow(self) -> bool:
        """
        Return True if attribute is a flow variable.

        Return False if attribute is not a flow variable.
        """
        return self._IS_FLOW

    def is_not_negative(self) -> bool:
        """
        Return True if attribute is not allowed to have negative values.

        Return False if attribute can have both positive and negative values.
        """
        return self._IS_NOT_NEGATIVE

    def is_max_and_zero_one(self) -> bool:
        """
        When True level should be max (not average) and corresponding profile should be zero_one (not mean_one).

        When False level should be average (not max) and corresponding profile should be mean_one (not zero_one).
        """
        return self._IS_MAX_AND_ZERO_ONE

    def is_ingoing(self) -> bool | None:
        """
        Return True if attribute is ingoing.

        Return True if attribute is outgoing.

        Return None if not applicable.
        """
        return self._IS_INGOING

    def is_cost(self) -> bool | None:
        """
        Return True if attribute is objective function cost coefficient.

        Return False if attribute is objective function revenue coefficient.

        Return None if not applicable.
        """
        return self._IS_COST

    def is_unitless(self) -> bool | None:
        """
        Return True if attribute is known to be unitless.

        Return False if attribute is known to have a unit that is not None.

        Return None if not applicable.
        """
        return self._IS_UNITLESS

    def has_level(self) -> bool:
        """Return True if get_level will return value not None."""
        return (self._level is not None) or (self._level_shift is not None)

    def has_profile(self) -> bool:
        """Return True if get_profile will return value not None."""
        return self._profile is not None

    def has_intercept(self) -> bool:
        """Return True if get_intercept will return value not None."""
        return self._intercept is not None

    def copy_from(self, other: LevelProfile) -> None:
        """Copy fields from other."""
        self._check_type(other, LevelProfile)
        self._assert_same_behaviour(other)
        self._level = other._level
        self._profile = other._profile
        self._level_shift = other._level_shift
        self._intercept = other._intercept
        self._scale = other._scale

    def get_level(self) -> Expr | None:
        """Get level part of (level * profile + intercept)."""
        level = self._level

        if level is None:
            return None

        if level.is_leaf():
            level = Expr(
                src=level.get_src(),
                operations=level.get_operations(expect_ops=False, copy_list=True),
                is_stock=level.is_stock(),
                is_flow=level.is_flow(),
                is_level=True,
                is_profile=False,
                profile=self._profile,
            )

        if self._level_shift is not None:
            level += self._level_shift

        if self._scale is not None:
            level *= self._scale

        return level

    def set_level(self, level: Expr | TimeVector | str | None) -> None:
        """Set level part of (scale * (level + level_shift) * profile + intercept)."""
        self._check_type(level, (Expr, TimeVector, str, type(None)))
        level = self._ensure_level_expr(level)
        self._ensure_compatible_level_profile_combo(level, self._profile)
        self._level = level

    def get_profile(self) -> Expr | None:
        """Get profile part of (level * profile + intercept)."""
        return self._profile

    def set_profile(self, profile: Expr | TimeVector | str | None) -> None:
        """Set profile part of (scale * (level + level_shift) * profile + intercept)."""
        self._check_type(profile, (Expr, TimeVector, str, type(None)))
        profile = self._ensure_profile_expr(profile)
        self._ensure_compatible_level_profile_combo(self._level, profile)
        self._profile = profile

    def get_intercept(self) -> Expr | None:
        """Get intercept part of (level * profile + intercept)."""
        intercept = self._intercept
        if self._scale is not None:
            intercept *= self._scale
        return intercept

    def set_intercept(self, value: Expr | None) -> None:
        """Set intercept part of (level * profile + intercept)."""
        self._check_type(value, (Expr, type(None)))
        if value is not None:
            self._check_level_expr(value)
        self._intercept = value

    def get_level_unit_set(
        self,
        db: QueryDB | Model,
    ) -> set[TimeIndex]:
        """
        Return set with all units behind level expression.

        Useful for discovering valid unit input to get_level_value.
        """
        if not self.has_level():
            return set()
        return get_units_from_expr(db, self.get_level())

    def get_profile_timeindex_set(
        self,
        db: QueryDB | Model,
    ) -> set[TimeIndex]:
        """
        Return set with all TimeIndex behind profile expression.

        Can be used to run optimized queries, i.e. not asking for
        finer time resolutions than necessary.
        """
        if not self.has_profile():
            return set()
        return get_timeindexes_from_expr(db, self.get_profile())

    def get_scenario_vector(
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_float32: bool = True,
    ) -> NDArray:
        """
        Evaluate LevelProfile over the periods in scenario dimension, and at the level period of the data dimension.

        Underlying profiles are evalutated over the scenario dimension,
        and levels are evalutated to scalars over level_period in the data dimension.

        Args:
            db (QueryDB | Model): The database or model instance used to fetch the required data.
            scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
            level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
            unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
                the expression should be unitless.
            is_float32 (bool, optional): Whether to return the vector as a NumPy array with `float32`
                precision. Defaults to True.

        """
        return self._get_scenario_vector(db, scenario_horizon, level_period, unit, is_float32)

    def get_data_value(
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_max_level: bool | None = None,
    ) -> float:
        """
        Evaluate LevelProfile to a scalar at the level period of the data dimension, and as an average over the scenario horizon.

        Args:
            db (QueryDB | Model): The database or model instance used to fetch the required data.
            scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
            level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
            unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
                the expression should be unitless.
            is_max_level (bool | None, optional): Whether to evaluate the expression as a maximum level (with a zero_one profile)
                or as an average level (with a mean_one profile). If None, the default format of the attribute is used.

        """
        return self._get_data_value(db, scenario_horizon, level_period, unit, is_max_level)

    def shift_intercept(self, value: float, unit: str | None) -> None:
        """Modify the intercept part of (level * profile + intercept) of an attribute by adding a constant value."""
        expr = ensure_expr(
            ConstantTimeVector(self._ensure_float(value), unit=unit, is_max_level=False),
            is_level=True,
            is_profile=False,
            is_stock=self._IS_STOCK,
            is_flow=self._IS_FLOW,
            profile=None,
        )
        if self._intercept is None:
            self._intercept = expr
        else:
            self._intercept += expr

    def shift_level(
        self,
        value: float | int,
        unit: str | None = None,
        reference_period: ReferencePeriod | None = None,
        is_max_level: bool | None = None,
        use_profile: bool = True,  # TODO: Remove. Should always use profile. If has profile validate that it is equal to the profile of Level.
    ) -> None:
        """Modify the level_shift part of (scale * (level + level_shift) * profile + intercept) of an attribute by adding a constant value."""
        # TODO: Not allowed to shift if there is intercept?
        self._check_type(value, (float, int))
        self._check_type(unit, (str, type(None)))
        self._check_type(reference_period, (ReferencePeriod, type(None)))
        self._check_type(is_max_level, (bool, type(None)))
        self._check_type(use_profile, bool)

        if is_max_level is None:
            is_max_level = self._IS_MAX_AND_ZERO_ONE

        expr = ensure_expr(
            ConstantTimeVector(
                self._ensure_float(value),
                unit=unit,
                is_max_level=is_max_level,
                reference_period=reference_period,
            ),
            is_level=True,
            is_profile=False,
            is_stock=self._IS_STOCK,
            is_flow=self._IS_FLOW,
            profile=self._profile if use_profile else None,
        )
        if self._level_shift is None:
            self._level_shift = expr
        else:
            self._level_shift += expr

    def scale(self, value: float | int) -> None:
        """Modify the scale part of (scale * (level + level_shift) * profile + intercept) of an attribute by multiplying with a constant value."""
        # TODO: Not allowed to scale if there is intercept?
        expr = ensure_expr(
            ConstantTimeVector(self._ensure_float(value), unit=None, is_max_level=False),
            is_level=True,
            is_profile=False,
            profile=None,
        )
        if self._scale is None:
            self._scale = expr
        else:
            self._scale *= expr

    def _ensure_level_expr(
        self,
        level: Expr | str | TimeVector | None,
        value: float | int | None = None,
        unit: str | None = None,
        reference_period: ReferencePeriod | None = None,
    ) -> Expr | None:
        if value is not None:
            level = ConstantTimeVector(
                scalar=float(value),
                unit=unit,
                is_max_level=self._IS_MAX_AND_ZERO_ONE,
                is_zero_one_profile=None,
                reference_period=reference_period,
            )
        if level is None:
            return None

        if isinstance(level, Expr):
            self._check_level_expr(level)
            return level

        return Expr(
            src=level,
            is_flow=self._IS_FLOW,
            is_stock=self._IS_STOCK,
            is_level=True,
            is_profile=False,
            profile=None,
        )

    def _ensure_compatible_level_profile_combo(self, level: Expr | None, profile: Expr | None) -> None:
        """Check that all profiles in leaf levels (in level) also exist in profile."""
        if level is None or profile is None:
            return

        leaf_level_profiles = get_profile_exprs_from_leaf_levels(level)
        leaf_profile_profiles = get_leaf_profiles(profile)

        for p in leaf_level_profiles:
            if p not in leaf_profile_profiles:
                message = (
                    f"Incompatible level/profile combination because all profiles in leaf levels (in level) does not exist in profile. "
                    f"Profile expression {p} found in level {level} but not in profile."
                )
                raise ValueError(message)

    def _check_level_expr(self, expr: Expr) -> None:
        msg = f"{self} requires {expr} to be "
        if expr.is_stock() != self._IS_STOCK:
            raise ValueError(msg + f"is_stock={self._IS_STOCK}")
        if expr.is_flow() != self._IS_FLOW:
            raise ValueError(msg + f"is_flow={self._IS_STOCK}")
        if expr.is_level() is False:
            raise ValueError(msg + "is_level=True")
        if expr.is_profile() is True:
            raise ValueError(msg + "is_profile=False")

    def _check_profile_expr(self, expr: Expr) -> None:
        msg = f"{self} requires {expr} to be "
        if expr.is_stock() is True:
            raise ValueError(msg + "is_stock=False")
        if expr.is_flow() is True:
            raise ValueError(msg + "is_flow=False")
        if expr.is_level() is True:
            raise ValueError(msg + "is_level=False")
        if expr.is_profile() is False:
            raise ValueError(msg + "is_profile=True")

    def _ensure_profile_expr(
        self,
        value: Expr | str | TimeVector | None,
    ) -> Expr | None:
        if value is None:
            return None

        if isinstance(value, Expr):
            self._check_profile_expr(value)
            return value

        return Expr(
            src=value,
            is_flow=False,
            is_stock=False,
            is_level=False,
            is_profile=True,
            profile=None,
        )

    def _get_data_value(
        self,
        db: QueryDB,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_max_level: bool | None,
    ) -> float:
        # NB! don't type check db, as this is done in get_level_value and get_profile_vector
        self._check_type(scenario_horizon, FixedFrequencyTimeIndex)
        self._check_type(level_period, SinglePeriodTimeIndex)
        self._check_type(unit, (str, type(None)))
        self._check_type(is_max_level, (bool, type(None)))

        level_expr = self.get_level()

        if is_max_level is None:
            is_max_level = self._IS_MAX_AND_ZERO_ONE

        self._check_type(level_expr, (Expr, type(None)))
        if not isinstance(level_expr, Expr):
            raise ValueError("Attribute level Expr is None. Have you called Solver.solve yet?")

        level_value = get_level_value(
            expr=level_expr,
            db=db,
            scen_dim=scenario_horizon,
            data_dim=level_period,
            unit=unit,
            is_max=is_max_level,
        )

        intercept = None
        if self._intercept is not None:
            intercept = _get_constant_from_expr(
                self._intercept,
                db,
                unit=unit,
                data_dim=level_period,
                scen_dim=scenario_horizon,
                is_max=is_max_level,
            )

        if intercept is None:
            return level_value

        return level_value + intercept

    def _get_scenario_vector(
        self,
        db: QueryDB | Model,
        scenario_horizon: FixedFrequencyTimeIndex,
        level_period: SinglePeriodTimeIndex,
        unit: str | None,
        is_float32: bool = True,
    ) -> NDArray:
        """Return vector with values along the given scenario horizon using level over level_period."""
        # NB! don't type check db, as this is done in get_level_value and get_profile_vector
        self._check_type(scenario_horizon, FixedFrequencyTimeIndex)
        self._check_type(level_period, SinglePeriodTimeIndex)
        self._check_type(unit, (str, type(None)))
        self._check_type(is_float32, bool)

        level_expr = self.get_level()

        self._check_type(level_expr, (Expr, type(None)))
        if not isinstance(level_expr, Expr):
            raise ValueError("Attribute level Expr is None. Have you called Solver.solve yet?")

        level_value = get_level_value(
            expr=level_expr,
            db=db,
            scen_dim=scenario_horizon,
            data_dim=level_period,
            unit=unit,
            is_max=self._IS_MAX_AND_ZERO_ONE,
        )

        profile_expr = self.get_profile()

        if profile_expr is None:
            profile_vector = np.ones(
                scenario_horizon.get_num_periods(),
                dtype=np.float32 if is_float32 else np.float64,
            )
        else:
            profile_vector = get_profile_vector(
                expr=profile_expr,
                db=db,
                scen_dim=scenario_horizon,
                data_dim=level_period,
                is_zero_one=self._IS_MAX_AND_ZERO_ONE,
                is_float32=is_float32,
            )

        intercept = None
        if self._intercept is not None:
            intercept = _get_constant_from_expr(
                self._intercept,
                db,
                unit=unit,
                data_dim=level_period,
                scen_dim=scenario_horizon,
                is_max=self._IS_MAX_AND_ZERO_ONE,
            )

        if intercept is None:
            return level_value * profile_vector

        return level_value * profile_vector + intercept

    def _has_same_behaviour(self, other: LevelProfile) -> bool:
        return all(
            (
                self._IS_FLOW == other._IS_FLOW,
                self._IS_STOCK == other._IS_STOCK,
                self._IS_NOT_NEGATIVE == other._IS_NOT_NEGATIVE,
                self._IS_MAX_AND_ZERO_ONE == other._IS_MAX_AND_ZERO_ONE,
                self._IS_INGOING == other._IS_INGOING,
                self._IS_COST == other._IS_COST,
                self._IS_UNITLESS == other._IS_UNITLESS,
            ),
        )

    def _assert_same_behaviour(self, other: LevelProfile) -> None:
        if not self._has_same_behaviour(other):
            message = f"Not same behaviour for {self} and {other}"
            raise ValueError(message)

    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Return True if other is equal to self."""
        if not isinstance(other, LevelProfile):
            return False
        if not self._has_same_behaviour(other):
            return False
        return all(
            (
                self._level == other._level,
                self._profile == other._profile,
                self._level_shift == other._level_shift,
                self._intercept == other._intercept,
                self._scale == other._scale,
            ),
        )

    def __hash__(self) -> int:
        """Compute hash of self."""
        return hash(
            (
                type(self).__name__,
                self._level,
                self._profile,
                self._level_shift,
                self._intercept,
                self._scale,
            ),
        )
__eq__(other) -> bool

Return True if other is equal to self.

Source code in framcore/attributes/level_profile_attributes.py
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Return True if other is equal to self."""
    if not isinstance(other, LevelProfile):
        return False
    if not self._has_same_behaviour(other):
        return False
    return all(
        (
            self._level == other._level,
            self._profile == other._profile,
            self._level_shift == other._level_shift,
            self._intercept == other._intercept,
            self._scale == other._scale,
        ),
    )
__hash__() -> int

Compute hash of self.

Source code in framcore/attributes/level_profile_attributes.py
703
704
705
706
707
708
709
710
711
712
713
714
def __hash__(self) -> int:
    """Compute hash of self."""
    return hash(
        (
            type(self).__name__,
            self._level,
            self._profile,
            self._level_shift,
            self._intercept,
            self._scale,
        ),
    )
__init__(level: Expr | TimeVector | str | None = None, profile: Expr | TimeVector | str | None = None, value: float | int | None = None, unit: str | None = None, level_shift: Expr | None = None, intercept: Expr | None = None, scale: Expr | None = None) -> None

Initialize LevelProfile.

See the LevelProfile class docstring for details. A complete LevelProfile is represented as: Scale * (Level + Level_shift) * Profile + Intercept. Normally only Level and Profile are used.

Either give level and profile, or value and unit.

Parameters:

Name Type Description Default
level Expr | TimeVector | str | None

Level Expr. Defaults to None.

None
profile Expr | TimeVector | str | None

Profile Expr. Defaults to None.

None
value float | int | None

A constant value to initialize Level. Defaults to None.

None
unit str | None

Unit of the constant value to initialize Level. Defaults to None.

None
level_shift Expr | None

Level_shift Expr. Defaults to None.

None
intercept Expr | None

Intercept Expr. Defaults to None.

None
scale Expr | None

Scale Expr. Defaults to None.

None
Source code in framcore/attributes/level_profile_attributes.py
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
def __init__(
    self,
    level: Expr | TimeVector | str | None = None,
    profile: Expr | TimeVector | str | None = None,
    value: float | int | None = None,  # To support Price(value=20, unit="EUR/MWh")
    unit: str | None = None,
    level_shift: Expr | None = None,
    intercept: Expr | None = None,
    scale: Expr | None = None,
) -> None:
    """
    Initialize LevelProfile.

    See the LevelProfile class docstring for details. A complete LevelProfile is represented as:
    Scale * (Level + Level_shift) * Profile + Intercept. Normally only Level and Profile are used.

    Either give level and profile, or value and unit.

    Args:
        level (Expr | TimeVector | str | None, optional): Level Expr. Defaults to None.
        profile (Expr | TimeVector | str | None, optional): Profile Expr. Defaults to None.
        value (float | int | None, optional): A constant value to initialize Level. Defaults to None.
        unit (str | None, optional): Unit of the constant value to initialize Level. Defaults to None.
        level_shift (Expr | None, optional): Level_shift Expr. Defaults to None.
        intercept (Expr | None, optional): Intercept Expr. Defaults to None.
        scale (Expr | None, optional): Scale Expr. Defaults to None.

    """
    self._assert_invariants()

    self._check_type(value, (float, int, type(None)))
    self._check_type(unit, (str, type(None)))
    self._check_type(level, (Expr, TimeVector, str, type(None)))
    self._check_type(profile, (Expr, TimeVector, str, type(None)))
    self._check_type(level_shift, (Expr, type(None)))
    self._check_type(intercept, (Expr, type(None)))
    self._check_type(scale, (Expr, type(None)))
    level = self._ensure_level_expr(level, value, unit)
    profile = self._ensure_profile_expr(profile)
    self._ensure_compatible_level_profile_combo(level, profile)
    self._ensure_compatible_level_profile_combo(level_shift, profile)
    self._level: Expr | None = level
    self._profile: Expr | None = profile
    self._level_shift: Expr | None = level_shift
    self._intercept: Expr | None = intercept
    self._scale: Expr | None = scale
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in expressions to loaders.

Source code in framcore/attributes/level_profile_attributes.py
166
167
168
169
170
171
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in expressions to loaders."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self.get_level())
    add_loaders_if(loaders, self.get_profile())
clear() -> None

Set all internal fields to None.

You may want to use this to get exogenous flow to use capacities instead of volume.

Source code in framcore/attributes/level_profile_attributes.py
173
174
175
176
177
178
179
180
181
182
183
def clear(self) -> None:
    """
    Set all internal fields to None.

    You may want to use this to get exogenous flow to use capacities instead of volume.
    """
    self._level = None
    self._profile = None
    self._level_shift = None
    self._intercept = None
    self._scale = None
copy_from(other: LevelProfile) -> None

Copy fields from other.

Source code in framcore/attributes/level_profile_attributes.py
259
260
261
262
263
264
265
266
267
def copy_from(self, other: LevelProfile) -> None:
    """Copy fields from other."""
    self._check_type(other, LevelProfile)
    self._assert_same_behaviour(other)
    self._level = other._level
    self._profile = other._profile
    self._level_shift = other._level_shift
    self._intercept = other._intercept
    self._scale = other._scale
get_data_value(db: QueryDB | Model, scenario_horizon: FixedFrequencyTimeIndex, level_period: SinglePeriodTimeIndex, unit: str | None, is_max_level: bool | None = None) -> float

Evaluate LevelProfile to a scalar at the level period of the data dimension, and as an average over the scenario horizon.

Parameters:

Name Type Description Default
db QueryDB | Model

The database or model instance used to fetch the required data.

required
scenario_horizon FixedFrequencyTimeIndex

TimeIndex of the scenario dimension to evaluate profiles.

required
level_period SinglePeriodTimeIndex

TimeIndex of the data dimension to evaluate levels.

required
unit str | None

The unit to convert the resulting values into (e.g., MW, GWh). If None, the expression should be unitless.

required
is_max_level bool | None

Whether to evaluate the expression as a maximum level (with a zero_one profile) or as an average level (with a mean_one profile). If None, the default format of the attribute is used.

None
Source code in framcore/attributes/level_profile_attributes.py
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
def get_data_value(
    self,
    db: QueryDB | Model,
    scenario_horizon: FixedFrequencyTimeIndex,
    level_period: SinglePeriodTimeIndex,
    unit: str | None,
    is_max_level: bool | None = None,
) -> float:
    """
    Evaluate LevelProfile to a scalar at the level period of the data dimension, and as an average over the scenario horizon.

    Args:
        db (QueryDB | Model): The database or model instance used to fetch the required data.
        scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
        level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
        unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
            the expression should be unitless.
        is_max_level (bool | None, optional): Whether to evaluate the expression as a maximum level (with a zero_one profile)
            or as an average level (with a mean_one profile). If None, the default format of the attribute is used.

    """
    return self._get_data_value(db, scenario_horizon, level_period, unit, is_max_level)
get_intercept() -> Expr | None

Get intercept part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
313
314
315
316
317
318
def get_intercept(self) -> Expr | None:
    """Get intercept part of (level * profile + intercept)."""
    intercept = self._intercept
    if self._scale is not None:
        intercept *= self._scale
    return intercept
get_level() -> Expr | None

Get level part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
def get_level(self) -> Expr | None:
    """Get level part of (level * profile + intercept)."""
    level = self._level

    if level is None:
        return None

    if level.is_leaf():
        level = Expr(
            src=level.get_src(),
            operations=level.get_operations(expect_ops=False, copy_list=True),
            is_stock=level.is_stock(),
            is_flow=level.is_flow(),
            is_level=True,
            is_profile=False,
            profile=self._profile,
        )

    if self._level_shift is not None:
        level += self._level_shift

    if self._scale is not None:
        level *= self._scale

    return level
get_level_unit_set(db: QueryDB | Model) -> set[TimeIndex]

Return set with all units behind level expression.

Useful for discovering valid unit input to get_level_value.

Source code in framcore/attributes/level_profile_attributes.py
327
328
329
330
331
332
333
334
335
336
337
338
def get_level_unit_set(
    self,
    db: QueryDB | Model,
) -> set[TimeIndex]:
    """
    Return set with all units behind level expression.

    Useful for discovering valid unit input to get_level_value.
    """
    if not self.has_level():
        return set()
    return get_units_from_expr(db, self.get_level())
get_profile() -> Expr | None

Get profile part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
302
303
304
def get_profile(self) -> Expr | None:
    """Get profile part of (level * profile + intercept)."""
    return self._profile
get_profile_timeindex_set(db: QueryDB | Model) -> set[TimeIndex]

Return set with all TimeIndex behind profile expression.

Can be used to run optimized queries, i.e. not asking for finer time resolutions than necessary.

Source code in framcore/attributes/level_profile_attributes.py
340
341
342
343
344
345
346
347
348
349
350
351
352
def get_profile_timeindex_set(
    self,
    db: QueryDB | Model,
) -> set[TimeIndex]:
    """
    Return set with all TimeIndex behind profile expression.

    Can be used to run optimized queries, i.e. not asking for
    finer time resolutions than necessary.
    """
    if not self.has_profile():
        return set()
    return get_timeindexes_from_expr(db, self.get_profile())
get_scenario_vector(db: QueryDB | Model, scenario_horizon: FixedFrequencyTimeIndex, level_period: SinglePeriodTimeIndex, unit: str | None, is_float32: bool = True) -> NDArray

Evaluate LevelProfile over the periods in scenario dimension, and at the level period of the data dimension.

Underlying profiles are evalutated over the scenario dimension, and levels are evalutated to scalars over level_period in the data dimension.

Parameters:

Name Type Description Default
db QueryDB | Model

The database or model instance used to fetch the required data.

required
scenario_horizon FixedFrequencyTimeIndex

TimeIndex of the scenario dimension to evaluate profiles.

required
level_period SinglePeriodTimeIndex

TimeIndex of the data dimension to evaluate levels.

required
unit str | None

The unit to convert the resulting values into (e.g., MW, GWh). If None, the expression should be unitless.

required
is_float32 bool

Whether to return the vector as a NumPy array with float32 precision. Defaults to True.

True
Source code in framcore/attributes/level_profile_attributes.py
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
def get_scenario_vector(
    self,
    db: QueryDB | Model,
    scenario_horizon: FixedFrequencyTimeIndex,
    level_period: SinglePeriodTimeIndex,
    unit: str | None,
    is_float32: bool = True,
) -> NDArray:
    """
    Evaluate LevelProfile over the periods in scenario dimension, and at the level period of the data dimension.

    Underlying profiles are evalutated over the scenario dimension,
    and levels are evalutated to scalars over level_period in the data dimension.

    Args:
        db (QueryDB | Model): The database or model instance used to fetch the required data.
        scenario_horizon (FixedFrequencyTimeIndex): TimeIndex of the scenario dimension to evaluate profiles.
        level_period (SinglePeriodTimeIndex): TimeIndex of the data dimension to evaluate levels.
        unit (str | None): The unit to convert the resulting values into (e.g., MW, GWh). If None,
            the expression should be unitless.
        is_float32 (bool, optional): Whether to return the vector as a NumPy array with `float32`
            precision. Defaults to True.

    """
    return self._get_scenario_vector(db, scenario_horizon, level_period, unit, is_float32)
has_intercept() -> bool

Return True if get_intercept will return value not None.

Source code in framcore/attributes/level_profile_attributes.py
255
256
257
def has_intercept(self) -> bool:
    """Return True if get_intercept will return value not None."""
    return self._intercept is not None
has_level() -> bool

Return True if get_level will return value not None.

Source code in framcore/attributes/level_profile_attributes.py
247
248
249
def has_level(self) -> bool:
    """Return True if get_level will return value not None."""
    return (self._level is not None) or (self._level_shift is not None)
has_profile() -> bool

Return True if get_profile will return value not None.

Source code in framcore/attributes/level_profile_attributes.py
251
252
253
def has_profile(self) -> bool:
    """Return True if get_profile will return value not None."""
    return self._profile is not None
is_cost() -> bool | None

Return True if attribute is objective function cost coefficient.

Return False if attribute is objective function revenue coefficient.

Return None if not applicable.

Source code in framcore/attributes/level_profile_attributes.py
227
228
229
230
231
232
233
234
235
def is_cost(self) -> bool | None:
    """
    Return True if attribute is objective function cost coefficient.

    Return False if attribute is objective function revenue coefficient.

    Return None if not applicable.
    """
    return self._IS_COST
is_flow() -> bool

Return True if attribute is a flow variable.

Return False if attribute is not a flow variable.

Source code in framcore/attributes/level_profile_attributes.py
193
194
195
196
197
198
199
def is_flow(self) -> bool:
    """
    Return True if attribute is a flow variable.

    Return False if attribute is not a flow variable.
    """
    return self._IS_FLOW
is_ingoing() -> bool | None

Return True if attribute is ingoing.

Return True if attribute is outgoing.

Return None if not applicable.

Source code in framcore/attributes/level_profile_attributes.py
217
218
219
220
221
222
223
224
225
def is_ingoing(self) -> bool | None:
    """
    Return True if attribute is ingoing.

    Return True if attribute is outgoing.

    Return None if not applicable.
    """
    return self._IS_INGOING
is_max_and_zero_one() -> bool

When True level should be max (not average) and corresponding profile should be zero_one (not mean_one).

When False level should be average (not max) and corresponding profile should be mean_one (not zero_one).

Source code in framcore/attributes/level_profile_attributes.py
209
210
211
212
213
214
215
def is_max_and_zero_one(self) -> bool:
    """
    When True level should be max (not average) and corresponding profile should be zero_one (not mean_one).

    When False level should be average (not max) and corresponding profile should be mean_one (not zero_one).
    """
    return self._IS_MAX_AND_ZERO_ONE
is_not_negative() -> bool

Return True if attribute is not allowed to have negative values.

Return False if attribute can have both positive and negative values.

Source code in framcore/attributes/level_profile_attributes.py
201
202
203
204
205
206
207
def is_not_negative(self) -> bool:
    """
    Return True if attribute is not allowed to have negative values.

    Return False if attribute can have both positive and negative values.
    """
    return self._IS_NOT_NEGATIVE
is_stock() -> bool

Return True if attribute is a stock variable.

Return False if attribute is not a stock variable.

Source code in framcore/attributes/level_profile_attributes.py
185
186
187
188
189
190
191
def is_stock(self) -> bool:
    """
    Return True if attribute is a stock variable.

    Return False if attribute is not a stock variable.
    """
    return self._IS_STOCK
is_unitless() -> bool | None

Return True if attribute is known to be unitless.

Return False if attribute is known to have a unit that is not None.

Return None if not applicable.

Source code in framcore/attributes/level_profile_attributes.py
237
238
239
240
241
242
243
244
245
def is_unitless(self) -> bool | None:
    """
    Return True if attribute is known to be unitless.

    Return False if attribute is known to have a unit that is not None.

    Return None if not applicable.
    """
    return self._IS_UNITLESS
scale(value: float | int) -> None

Modify the scale part of (scale * (level + level_shift) * profile + intercept) of an attribute by multiplying with a constant value.

Source code in framcore/attributes/level_profile_attributes.py
455
456
457
458
459
460
461
462
463
464
465
466
467
def scale(self, value: float | int) -> None:
    """Modify the scale part of (scale * (level + level_shift) * profile + intercept) of an attribute by multiplying with a constant value."""
    # TODO: Not allowed to scale if there is intercept?
    expr = ensure_expr(
        ConstantTimeVector(self._ensure_float(value), unit=None, is_max_level=False),
        is_level=True,
        is_profile=False,
        profile=None,
    )
    if self._scale is None:
        self._scale = expr
    else:
        self._scale *= expr
set_intercept(value: Expr | None) -> None

Set intercept part of (level * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
320
321
322
323
324
325
def set_intercept(self, value: Expr | None) -> None:
    """Set intercept part of (level * profile + intercept)."""
    self._check_type(value, (Expr, type(None)))
    if value is not None:
        self._check_level_expr(value)
    self._intercept = value
set_level(level: Expr | TimeVector | str | None) -> None

Set level part of (scale * (level + level_shift) * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
295
296
297
298
299
300
def set_level(self, level: Expr | TimeVector | str | None) -> None:
    """Set level part of (scale * (level + level_shift) * profile + intercept)."""
    self._check_type(level, (Expr, TimeVector, str, type(None)))
    level = self._ensure_level_expr(level)
    self._ensure_compatible_level_profile_combo(level, self._profile)
    self._level = level
set_profile(profile: Expr | TimeVector | str | None) -> None

Set profile part of (scale * (level + level_shift) * profile + intercept).

Source code in framcore/attributes/level_profile_attributes.py
306
307
308
309
310
311
def set_profile(self, profile: Expr | TimeVector | str | None) -> None:
    """Set profile part of (scale * (level + level_shift) * profile + intercept)."""
    self._check_type(profile, (Expr, TimeVector, str, type(None)))
    profile = self._ensure_profile_expr(profile)
    self._ensure_compatible_level_profile_combo(self._level, profile)
    self._profile = profile
shift_intercept(value: float, unit: str | None) -> None

Modify the intercept part of (level * profile + intercept) of an attribute by adding a constant value.

Source code in framcore/attributes/level_profile_attributes.py
403
404
405
406
407
408
409
410
411
412
413
414
415
416
def shift_intercept(self, value: float, unit: str | None) -> None:
    """Modify the intercept part of (level * profile + intercept) of an attribute by adding a constant value."""
    expr = ensure_expr(
        ConstantTimeVector(self._ensure_float(value), unit=unit, is_max_level=False),
        is_level=True,
        is_profile=False,
        is_stock=self._IS_STOCK,
        is_flow=self._IS_FLOW,
        profile=None,
    )
    if self._intercept is None:
        self._intercept = expr
    else:
        self._intercept += expr
shift_level(value: float | int, unit: str | None = None, reference_period: ReferencePeriod | None = None, is_max_level: bool | None = None, use_profile: bool = True) -> None

Modify the level_shift part of (scale * (level + level_shift) * profile + intercept) of an attribute by adding a constant value.

Source code in framcore/attributes/level_profile_attributes.py
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
def shift_level(
    self,
    value: float | int,
    unit: str | None = None,
    reference_period: ReferencePeriod | None = None,
    is_max_level: bool | None = None,
    use_profile: bool = True,  # TODO: Remove. Should always use profile. If has profile validate that it is equal to the profile of Level.
) -> None:
    """Modify the level_shift part of (scale * (level + level_shift) * profile + intercept) of an attribute by adding a constant value."""
    # TODO: Not allowed to shift if there is intercept?
    self._check_type(value, (float, int))
    self._check_type(unit, (str, type(None)))
    self._check_type(reference_period, (ReferencePeriod, type(None)))
    self._check_type(is_max_level, (bool, type(None)))
    self._check_type(use_profile, bool)

    if is_max_level is None:
        is_max_level = self._IS_MAX_AND_ZERO_ONE

    expr = ensure_expr(
        ConstantTimeVector(
            self._ensure_float(value),
            unit=unit,
            is_max_level=is_max_level,
            reference_period=reference_period,
        ),
        is_level=True,
        is_profile=False,
        is_stock=self._IS_STOCK,
        is_flow=self._IS_FLOW,
        profile=self._profile if use_profile else None,
    )
    if self._level_shift is None:
        self._level_shift = expr
    else:
        self._level_shift += expr
Loss

Bases: ArrowCoefficient

Concrete class representing a loss coefficient attribute, indicating a unitless coefficient.

Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
860
861
862
863
864
865
866
867
868
class Loss(ArrowCoefficient):  # TODO: Make a loss for storage that is percentage per time
    """
    Concrete class representing a loss coefficient attribute, indicating a unitless coefficient.

    Subclass of ArrowCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True
MaxFlowVolume

Bases: FlowVolume

Concrete class representing a maximum flow volume attribute, indicating a flow variable with maximum values.

Subclass of FlowVolume < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
891
892
893
894
895
896
897
898
899
class MaxFlowVolume(FlowVolume):
    """
    Concrete class representing a maximum flow volume attribute, indicating a flow variable with maximum values.

    Subclass of FlowVolume < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_MAX_AND_ZERO_ONE = True
ObjectiveCoefficient

Bases: Coefficient

Abstract class representing an objective coefficient attribute, indicating cost or revenue coefficients in the objective function.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
761
762
763
764
765
766
767
768
769
class ObjectiveCoefficient(Coefficient):
    """
    Abstract class representing an objective coefficient attribute, indicating cost or revenue coefficients in the objective function.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_UNITLESS = False
    _IS_NOT_NEGATIVE = False
Price

Bases: ShadowPrice

Concrete class representing a price attribute, indicating the price of a commodity at a specific node.

Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
775
776
777
778
779
780
781
782
class Price(ShadowPrice):
    """
    Concrete class representing a price attribute, indicating the price of a commodity at a specific node.

    Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
Proportion

Bases: Coefficient

Concrete class representing a proportion coefficient attribute, indicating a unitless coefficient between 0 and 1.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
828
829
830
831
832
833
834
835
836
class Proportion(Coefficient):
    """
    Concrete class representing a proportion coefficient attribute, indicating a unitless coefficient between 0 and 1.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_UNITLESS = True
ReservePrice

Bases: ObjectiveCoefficient

Concrete class representing a reserve price attribute, indicating revenue coefficients in the objective function.

Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
806
807
808
809
810
811
812
813
814
class ReservePrice(ObjectiveCoefficient):
    """
    Concrete class representing a reserve price attribute, indicating revenue coefficients in the objective function.

    Subclass of ObjectiveCoefficient < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_COST = False
ShadowPrice

Bases: Coefficient

Abstract class representing a shadow price attribute, indicating that the attribute has a unit and might be negative.

Subclass of Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
750
751
752
753
754
755
756
757
758
class ShadowPrice(Coefficient):
    """
    Abstract class representing a shadow price attribute, indicating that the attribute has a unit and might be negative.

    Subclass of Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_UNITLESS = False
    _IS_NOT_NEGATIVE = False
StockVolume

Bases: LevelProfile

Concrete class representing a stock volume attribute, indicating a stock variable with maximum values.

Subclass of LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
902
903
904
905
906
907
908
909
910
911
class StockVolume(LevelProfile):
    """
    Concrete class representing a stock volume attribute, indicating a stock variable with maximum values.

    Subclass of LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False
    _IS_STOCK = True
    _IS_MAX_AND_ZERO_ONE = True
WaterValue

Bases: ShadowPrice

Concrete class representing a water value attribute, indicating the value of water in the system.

Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.

Source code in framcore/attributes/level_profile_attributes.py
785
786
787
788
789
790
791
792
class WaterValue(ShadowPrice):
    """
    Concrete class representing a water value attribute, indicating the value of water in the system.

    Subclass of ShadowPrice < Coefficient < LevelProfile. See LevelProfile for details.
    """

    _IS_ABSTRACT = False

components

Solar

Bases: _WindSolar

Solar power component.

Has attributes for power node, capacity, variable operation cost, and production.

Compatible with WindSolarAggregator.

Source code in framcore/components/wind_solar.py
73
74
75
76
77
78
79
80
81
82
class Solar(_WindSolar):
    """
    Solar power component.

    Has attributes for power node, capacity, variable operation cost, and production.

    Compatible with WindSolarAggregator.
    """

    pass

Wind

Bases: _WindSolar

Wind power component.

Has attributes for power node, capacity, variable operation cost, and production.

Compatible with WindSolarAggregator.

Source code in framcore/components/wind_solar.py
61
62
63
64
65
66
67
68
69
70
class Wind(_WindSolar):
    """
    Wind power component.

    Has attributes for power node, capacity, variable operation cost, and production.

    Compatible with WindSolarAggregator.
    """

    pass

Component

Component

Bases: Base, ABC

Components describe the main elements in the energy system. Can have additional Attributes and Metadata.

We have high-level and low-level Components. High-level Components, such as a HydroModule, can be decomposed into low-level Components like Flows and Nodes. The high-level description lets analysts work with recognizable domain objects, while the low-level descriptions enable generic algorithms that minimize code duplication and simplify data manipulation.

Some energy market models like JulES, SpineOpt and PyPSA also have a generic description of the system, so this two-tier system can be used to easier adapt the dataset to their required formats.

The method Component.get_simpler_components() is used to decompose high-level Components into low-level Components. This can also be used together with the utility function get_supported_components() to transform a set of Components into a set that only contains supported Component types.

Result attributes are initialized in the high-level Components. When they are transferred to low-level Components, and the results are set by a model like JulES, the results will also appear in the high-level Components.

Nodes, Flows and Arrows are the main building blocks in FRAM's low-level representation of energy systems. Node represent a point where a commodity can possibly be traded, stored or pass through. Movement between Nodes is represented by Flows and Arrows. Flows represent a commodity flow, and can have Arrows that each describe contribution of the Flow into a Node. The Arrows have direction to determine input or output, and parameters for the contribution of the Flow to the Node (conversion, efficiency and loss).

Source code in framcore/components/Component.py
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
class Component(Base, ABC):
    """
    Components describe the main elements in the energy system. Can have additional Attributes and Metadata.

    We have high-level and low-level Components. High-level Components, such as a HydroModule,
    can be decomposed into low-level Components like Flows and Nodes. The high-level description lets
    analysts work with recognizable domain objects, while the low-level descriptions enable generic algorithms
    that minimize code duplication and simplify data manipulation.

    Some energy market models like JulES, SpineOpt and PyPSA also have a generic description of the system,
    so this two-tier system can be used to easier adapt the dataset to their required formats.

    The method Component.get_simpler_components() is used to decompose high-level Components into low-level
    Components. This can also be used together with the utility function get_supported_components() to transform
    a set of Components into a set that only contains supported Component types.

    Result attributes are initialized in the high-level Components. When they are transferred to low-level Components,
    and the results are set by a model like JulES, the results will also appear in the high-level Components.

    Nodes, Flows and Arrows are the main building blocks in FRAM's low-level representation of energy systems.
    Node represent a point where a commodity can possibly be traded, stored or pass through.
    Movement between Nodes is represented by Flows and Arrows. Flows represent a commodity flow,
    and can have Arrows that each describe contribution of the Flow into a Node.
    The Arrows have direction to determine input or output, and parameters for the contribution of the
    Flow to the Node (conversion, efficiency and loss).
    """

    def __init__(self) -> None:
        """Set mandatory private variables."""
        self._parent: Component | None = None
        self._meta: dict[str, Meta] = dict()

    def add_meta(self, key: str, value: Meta) -> None:
        """Add metadata to component. Overwrite if already exist."""
        self._check_type(key, str)
        self._check_type(value, Meta)
        self._meta[key] = value

    def get_meta(self, key: str) -> Meta | None:
        """Get metadata from component or return None if not exist."""
        self._check_type(key, str)
        return self._meta.get(key, None)

    def get_meta_keys(self) -> Iterable[str]:
        """Get iterable with all metakeys in component."""
        return self._meta.keys()

    def get_simpler_components(
        self,
        base_name: str,
    ) -> dict[str, Component]:
        """
        Return representation of self as dict of named simpler components.

        The base_name should be unique within a model instance, and should
        be used to prefix name of all simpler components.

        Insert self as parent in each child.

        Transfer metadata to each child.
        """
        self._check_type(base_name, str)
        components = self._get_simpler_components(base_name)
        assert base_name not in components, f"base_name: {base_name} should not be in \ncomponent: {self}"
        components: dict[str, Component]
        self._check_type(components, dict)
        for name, c in components.items():
            self._check_type(name, str)
            self._check_type(c, Component)
            self._check_component_not_self(c)
            c: Component
            c._parent = self  # noqa: SLF001
        for key in self.get_meta_keys():
            value = self.get_meta(key)
            for c in components.values():
                c.add_meta(key, value)
        return components

    def get_parent(self) -> Component | None:
        """Return parent if any, else None."""
        self._check_type(self._parent, (Component, type(None)))
        self._check_component_not_self(self._parent)
        return self._parent

    def get_parents(self) -> list[Component]:
        """Return list of all parents, including self."""
        child = self
        parent = child.get_parent()
        parents = [child]
        while parent is not None:
            child = parent
            parent = child.get_parent()
            parents.append(child)
        self._check_unique_parents(parents)
        return parents

    def get_top_parent(self) -> Component:
        """Return topmost parent. (May be object self)."""
        parents = self.get_parents()
        return parents[-1]

    def replace_node(self, old: str, new: str) -> None:
        """Replace old Node with new. Not error if no match."""
        self._check_type(old, str)
        self._check_type(new, str)
        self._replace_node(old, new)

    def _check_component_not_self(self, other: Component | None) -> None:
        if not isinstance(other, Component):
            return
        if self != other:
            return
        message = f"Expected other component than {self}."
        raise TypeError(message)

    def _check_unique_parents(self, parents: list[Component]) -> None:
        if len(parents) > len(set(parents)):
            message = f"Parents for {self} are not unique."
            raise TypeError(message)

    @abstractmethod
    def _replace_node(self, old: str, new: str) -> None:
        pass

    @abstractmethod
    def _get_simpler_components(self, base_name: str) -> dict[str, Component]:
        pass
__init__() -> None

Set mandatory private variables.

Source code in framcore/components/Component.py
37
38
39
40
def __init__(self) -> None:
    """Set mandatory private variables."""
    self._parent: Component | None = None
    self._meta: dict[str, Meta] = dict()
add_meta(key: str, value: Meta) -> None

Add metadata to component. Overwrite if already exist.

Source code in framcore/components/Component.py
42
43
44
45
46
def add_meta(self, key: str, value: Meta) -> None:
    """Add metadata to component. Overwrite if already exist."""
    self._check_type(key, str)
    self._check_type(value, Meta)
    self._meta[key] = value
get_meta(key: str) -> Meta | None

Get metadata from component or return None if not exist.

Source code in framcore/components/Component.py
48
49
50
51
def get_meta(self, key: str) -> Meta | None:
    """Get metadata from component or return None if not exist."""
    self._check_type(key, str)
    return self._meta.get(key, None)
get_meta_keys() -> Iterable[str]

Get iterable with all metakeys in component.

Source code in framcore/components/Component.py
53
54
55
def get_meta_keys(self) -> Iterable[str]:
    """Get iterable with all metakeys in component."""
    return self._meta.keys()
get_parent() -> Component | None

Return parent if any, else None.

Source code in framcore/components/Component.py
88
89
90
91
92
def get_parent(self) -> Component | None:
    """Return parent if any, else None."""
    self._check_type(self._parent, (Component, type(None)))
    self._check_component_not_self(self._parent)
    return self._parent
get_parents() -> list[Component]

Return list of all parents, including self.

Source code in framcore/components/Component.py
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def get_parents(self) -> list[Component]:
    """Return list of all parents, including self."""
    child = self
    parent = child.get_parent()
    parents = [child]
    while parent is not None:
        child = parent
        parent = child.get_parent()
        parents.append(child)
    self._check_unique_parents(parents)
    return parents
get_simpler_components(base_name: str) -> dict[str, Component]

Return representation of self as dict of named simpler components.

The base_name should be unique within a model instance, and should be used to prefix name of all simpler components.

Insert self as parent in each child.

Transfer metadata to each child.

Source code in framcore/components/Component.py
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
def get_simpler_components(
    self,
    base_name: str,
) -> dict[str, Component]:
    """
    Return representation of self as dict of named simpler components.

    The base_name should be unique within a model instance, and should
    be used to prefix name of all simpler components.

    Insert self as parent in each child.

    Transfer metadata to each child.
    """
    self._check_type(base_name, str)
    components = self._get_simpler_components(base_name)
    assert base_name not in components, f"base_name: {base_name} should not be in \ncomponent: {self}"
    components: dict[str, Component]
    self._check_type(components, dict)
    for name, c in components.items():
        self._check_type(name, str)
        self._check_type(c, Component)
        self._check_component_not_self(c)
        c: Component
        c._parent = self  # noqa: SLF001
    for key in self.get_meta_keys():
        value = self.get_meta(key)
        for c in components.values():
            c.add_meta(key, value)
    return components
get_top_parent() -> Component

Return topmost parent. (May be object self).

Source code in framcore/components/Component.py
106
107
108
109
def get_top_parent(self) -> Component:
    """Return topmost parent. (May be object self)."""
    parents = self.get_parents()
    return parents[-1]
replace_node(old: str, new: str) -> None

Replace old Node with new. Not error if no match.

Source code in framcore/components/Component.py
111
112
113
114
115
def replace_node(self, old: str, new: str) -> None:
    """Replace old Node with new. Not error if no match."""
    self._check_type(old, str)
    self._check_type(new, str)
    self._replace_node(old, new)

Demand

Demand class.

Demand

Bases: Component

Demand class representing a simple demand with possible reserve price. Subclass of Component.

Source code in framcore/components/Demand.py
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
class Demand(Component):
    """Demand class representing a simple demand with possible reserve price. Subclass of Component."""

    def __init__(
        self,
        node: str,
        capacity: FlowVolume | None = None,
        reserve_price: ReservePrice | None = None,
        elastic_demand: ElasticDemand | None = None,
        temperature_profile: Expr | str | TimeVector | None = None,
        consumption: AvgFlowVolume | None = None,
    ) -> None:
        """
        Initialize the Demand class.

        Args:
            node (str): Node which this Demand consumes power on.
            capacity (FlowVolume | None, optional): Maximum consumption capacity. Defaults to None.
            reserve_price (ReservePrice | None, optional): Price in node at which the Demand will stop consumption. Defaults to None.
            elastic_demand (ElasticDemand | None, optional): Describe changes in consumption based on commodity price in node. Defaults to None.
            temperature_profile (Expr | str | TimeVector | None, optional): Describe changes in consumption based on temperatures. Defaults to None.
            consumption (AvgFlowVolume | None, optional): Actual calculated consumption. Defaults to None.

        Raises:
            ValueError: When both reserve_price and elastic_demand is passed as arguments. This is ambiguous.

        """
        super().__init__()
        self._check_type(node, str)
        self._check_type(capacity, (FlowVolume, type(None)))
        self._check_type(reserve_price, (ReservePrice, type(None)))
        self._check_type(elastic_demand, (ElasticDemand, type(None)))
        self._check_type(consumption, (AvgFlowVolume, type(None)))

        if reserve_price is not None and elastic_demand is not None:
            message = "Cannot have 'reserve_price' and 'elastic_demand' at the same time."
            raise ValueError(message)

        self._node = node
        self._capacity = capacity
        self._reserve_price = reserve_price
        self._elastic_demand = elastic_demand
        self._temperature_profile = ensure_expr(temperature_profile, is_profile=True)

        if consumption is None:
            consumption = AvgFlowVolume()

        self._consumption: AvgFlowVolume = consumption

    def get_capacity(self) -> FlowVolume:
        """Get the capacity of the demand component."""
        return self._capacity

    def get_consumption(self) -> AvgFlowVolume:
        """Get the consumption of the demand component."""
        return self._consumption

    def get_node(self) -> str:
        """Get the node of the demand component."""
        return self._node

    def set_node(self, node: str) -> None:
        """Set the node of the demand component."""
        self._check_type(node, str)
        self.node = node

    def get_reserve_price(self) -> ReservePrice | None:
        """Get the reserve price level of the demand component."""
        return self._reserve_price

    def set_reserve_price(self, reserve_price: ReservePrice | None) -> None:
        """Set the reserve price level of the demand component."""
        self._check_type(reserve_price, (ReservePrice, type(None)))
        if self._elastic_demand and reserve_price:
            message = "Cannot set reserve_price when elastic_demand is not None."
            raise ValueError(message)
        self._reserve_price = reserve_price

    def get_elastic_demand(self) -> ElasticDemand | None:
        """Get the elastic demand of the demand component."""
        return self._elastic_demand

    def set_elastic_demand(self, elastic_demand: ElasticDemand | None) -> None:
        """Set the elastic demand of the demand component."""
        self._check_type(elastic_demand, (ElasticDemand, type(None)))
        if self._reserve_price is not None and elastic_demand is not None:
            message = "Cannot set elastic_demand when reserve_price is not None."
            raise ValueError(message)
        self._elastic_demand = elastic_demand

    def get_temperature_profile(self) -> Expr | None:
        """Get the temperature profile of the demand component."""
        return self._temperature_profile

    def set_temperature_profile(self, temperature_profile: Expr | str | None) -> None:
        """Set the temperature profile of the demand component."""
        self._check_type(temperature_profile, (Expr, str, TimeVector, type(None)))
        self._temperature_profile = ensure_expr(temperature_profile, is_profile=True)

    """Implementation of Component interface"""

    def _replace_node(self, old: str, new: str) -> None:
        if old == self._node:
            self._node = new
        else:
            message = f"{old} not found in {self}. Expected existing node {self._node}."
            raise ValueError(message)

    def _get_simpler_components(self, base_name: str) -> dict[str, Component]:
        return {base_name + "_Flow": self._create_flow()}

    def _create_flow(self) -> Flow:
        is_exogenous = self._elastic_demand is None and self._reserve_price is None

        flow = Flow(
            main_node=self._node,
            max_capacity=self._capacity,
            min_capacity=self._capacity if is_exogenous else None,
            volume=self._consumption,
            arrow_volumes=None,
            is_exogenous=is_exogenous,
        )

        power_arrow = Arrow(self._node, False, conversion=Conversion(value=1))
        flow.add_arrow(power_arrow)

        if self._reserve_price is not None:
            flow.add_cost_term("reserve_price", self._reserve_price)

        # TODO: Implement correctly when Curve is ready. For now, model as inelastic consumer w. reserve_price
        elif self._elastic_demand is not None:
            price = self._elastic_demand.get_max_price()
            reserve_price = ReservePrice(level=price.get_level(), profile=price.get_profile())
            flow.add_cost_term("reserve_price", cost_term=reserve_price)

        return flow
__init__(node: str, capacity: FlowVolume | None = None, reserve_price: ReservePrice | None = None, elastic_demand: ElasticDemand | None = None, temperature_profile: Expr | str | TimeVector | None = None, consumption: AvgFlowVolume | None = None) -> None

Initialize the Demand class.

Parameters:

Name Type Description Default
node str

Node which this Demand consumes power on.

required
capacity FlowVolume | None

Maximum consumption capacity. Defaults to None.

None
reserve_price ReservePrice | None

Price in node at which the Demand will stop consumption. Defaults to None.

None
elastic_demand ElasticDemand | None

Describe changes in consumption based on commodity price in node. Defaults to None.

None
temperature_profile Expr | str | TimeVector | None

Describe changes in consumption based on temperatures. Defaults to None.

None
consumption AvgFlowVolume | None

Actual calculated consumption. Defaults to None.

None

Raises:

Type Description
ValueError

When both reserve_price and elastic_demand is passed as arguments. This is ambiguous.

Source code in framcore/components/Demand.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
def __init__(
    self,
    node: str,
    capacity: FlowVolume | None = None,
    reserve_price: ReservePrice | None = None,
    elastic_demand: ElasticDemand | None = None,
    temperature_profile: Expr | str | TimeVector | None = None,
    consumption: AvgFlowVolume | None = None,
) -> None:
    """
    Initialize the Demand class.

    Args:
        node (str): Node which this Demand consumes power on.
        capacity (FlowVolume | None, optional): Maximum consumption capacity. Defaults to None.
        reserve_price (ReservePrice | None, optional): Price in node at which the Demand will stop consumption. Defaults to None.
        elastic_demand (ElasticDemand | None, optional): Describe changes in consumption based on commodity price in node. Defaults to None.
        temperature_profile (Expr | str | TimeVector | None, optional): Describe changes in consumption based on temperatures. Defaults to None.
        consumption (AvgFlowVolume | None, optional): Actual calculated consumption. Defaults to None.

    Raises:
        ValueError: When both reserve_price and elastic_demand is passed as arguments. This is ambiguous.

    """
    super().__init__()
    self._check_type(node, str)
    self._check_type(capacity, (FlowVolume, type(None)))
    self._check_type(reserve_price, (ReservePrice, type(None)))
    self._check_type(elastic_demand, (ElasticDemand, type(None)))
    self._check_type(consumption, (AvgFlowVolume, type(None)))

    if reserve_price is not None and elastic_demand is not None:
        message = "Cannot have 'reserve_price' and 'elastic_demand' at the same time."
        raise ValueError(message)

    self._node = node
    self._capacity = capacity
    self._reserve_price = reserve_price
    self._elastic_demand = elastic_demand
    self._temperature_profile = ensure_expr(temperature_profile, is_profile=True)

    if consumption is None:
        consumption = AvgFlowVolume()

    self._consumption: AvgFlowVolume = consumption
get_capacity() -> FlowVolume

Get the capacity of the demand component.

Source code in framcore/components/Demand.py
58
59
60
def get_capacity(self) -> FlowVolume:
    """Get the capacity of the demand component."""
    return self._capacity
get_consumption() -> AvgFlowVolume

Get the consumption of the demand component.

Source code in framcore/components/Demand.py
62
63
64
def get_consumption(self) -> AvgFlowVolume:
    """Get the consumption of the demand component."""
    return self._consumption
get_elastic_demand() -> ElasticDemand | None

Get the elastic demand of the demand component.

Source code in framcore/components/Demand.py
87
88
89
def get_elastic_demand(self) -> ElasticDemand | None:
    """Get the elastic demand of the demand component."""
    return self._elastic_demand
get_node() -> str

Get the node of the demand component.

Source code in framcore/components/Demand.py
66
67
68
def get_node(self) -> str:
    """Get the node of the demand component."""
    return self._node
get_reserve_price() -> ReservePrice | None

Get the reserve price level of the demand component.

Source code in framcore/components/Demand.py
75
76
77
def get_reserve_price(self) -> ReservePrice | None:
    """Get the reserve price level of the demand component."""
    return self._reserve_price
get_temperature_profile() -> Expr | None

Get the temperature profile of the demand component.

Source code in framcore/components/Demand.py
 99
100
101
def get_temperature_profile(self) -> Expr | None:
    """Get the temperature profile of the demand component."""
    return self._temperature_profile
set_elastic_demand(elastic_demand: ElasticDemand | None) -> None

Set the elastic demand of the demand component.

Source code in framcore/components/Demand.py
91
92
93
94
95
96
97
def set_elastic_demand(self, elastic_demand: ElasticDemand | None) -> None:
    """Set the elastic demand of the demand component."""
    self._check_type(elastic_demand, (ElasticDemand, type(None)))
    if self._reserve_price is not None and elastic_demand is not None:
        message = "Cannot set elastic_demand when reserve_price is not None."
        raise ValueError(message)
    self._elastic_demand = elastic_demand
set_node(node: str) -> None

Set the node of the demand component.

Source code in framcore/components/Demand.py
70
71
72
73
def set_node(self, node: str) -> None:
    """Set the node of the demand component."""
    self._check_type(node, str)
    self.node = node
set_reserve_price(reserve_price: ReservePrice | None) -> None

Set the reserve price level of the demand component.

Source code in framcore/components/Demand.py
79
80
81
82
83
84
85
def set_reserve_price(self, reserve_price: ReservePrice | None) -> None:
    """Set the reserve price level of the demand component."""
    self._check_type(reserve_price, (ReservePrice, type(None)))
    if self._elastic_demand and reserve_price:
        message = "Cannot set reserve_price when elastic_demand is not None."
        raise ValueError(message)
    self._reserve_price = reserve_price
set_temperature_profile(temperature_profile: Expr | str | None) -> None

Set the temperature profile of the demand component.

Source code in framcore/components/Demand.py
103
104
105
106
def set_temperature_profile(self, temperature_profile: Expr | str | None) -> None:
    """Set the temperature profile of the demand component."""
    self._check_type(temperature_profile, (Expr, str, TimeVector, type(None)))
    self._temperature_profile = ensure_expr(temperature_profile, is_profile=True)

Flow

Flow

Bases: Component

Represents a commodity flow in or out of one or more nodes. Can have Attributes and Metadata.

Main attributes are arrows, main_node, max_capacity, min_capacity, startupcost and if it is exogenous.

Arrows describes contribution of a Flow into a Node. Has direction to determine input or output, and parameters for the contribution of the Flow to the Node (conversion, efficiency, loss). Nodes, Flows and Arrows are the main building blocks in FRAM's low-level representation of energy systems.

Source code in framcore/components/Flow.py
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
class Flow(Component):
    """
    Represents a commodity flow in or out of one or more nodes. Can have Attributes and Metadata.

    Main attributes are arrows, main_node, max_capacity, min_capacity, startupcost and if it is exogenous.

    Arrows describes contribution of a Flow into a Node. Has direction to determine input or output,
    and parameters for the contribution of the Flow to the Node (conversion, efficiency, loss).
    Nodes, Flows and Arrows are the main building blocks in FRAM's low-level representation of energy systems.
    """

    def __init__(
        self,
        main_node: str,
        max_capacity: FlowVolume | None = None,
        min_capacity: FlowVolume | None = None,
        startupcost: StartUpCost | None = None,
        volume: AvgFlowVolume | None = None,
        arrow_volumes: dict[Arrow, AvgFlowVolume] | None = None,
        is_exogenous: bool = False,
    ) -> None:
        """
        Initialize Flow with main node, capacity, and startup cost.

        Args:
            main_node (str): Node which the Flow is primarily associated with.
            max_capacity (FlowVolume | None, optional): Maximum capacity of the Flow. Defaults to None.
            min_capacity (FlowVolume | None, optional): Minimum capacity of the Flow. Defaults to None.
            startupcost (StartUpCost | None, optional): Costs associated with starting up this Flow. Defaults to None.
            volume (AvgFlowVolume | None, optional): The actual volume carried by this Flow at a given moment. Defaults to None.
            arrow_volumes (dict[Arrow, AvgFlowVolume] | None, optional): Possibility to store a version of volume for each Arrow. Can account for conversion,
            efficiency and loss to represent the result for different commodities and units. Defaults to None.
            is_exogenous (bool, optional): Flag denoting if a Solver should calculate the volumes associated with this flow or use its predefined volume.
                                           Defaults to False.

        """
        super().__init__()
        self._check_type(main_node, str)
        self._check_type(max_capacity, (FlowVolume, type(None)))
        self._check_type(min_capacity, (FlowVolume, type(None)))
        self._check_type(startupcost, (StartUpCost, type(None)))
        self._check_type(volume, (FlowVolume, type(None)))
        self._check_type(arrow_volumes, (dict, type(None)))
        self._main_node: str = main_node
        self._max_capacity = max_capacity
        self._min_capacity = min_capacity
        self._startupcost = startupcost
        self._arrows: set[Arrow] = set()
        self._cost_terms: dict[str, ObjectiveCoefficient] = dict()
        self._is_exogenous: bool = is_exogenous

        if not volume:
            volume = AvgFlowVolume()
        self._volume: AvgFlowVolume = volume

        if arrow_volumes is None:
            arrow_volumes = dict()
        self._arrow_volumes: dict[Arrow, AvgFlowVolume] = arrow_volumes

    def is_exogenous(self) -> bool:
        """Return True if Flow is exogenous."""
        return self._is_exogenous

    def set_exogenous(self) -> None:
        """
        Treat flow as fixed variable.

        Use volume if it exists.
        If no volume, then try to use
        min_capacity and max_capacity, which must
        be equal. Error if this fails.
        """
        self._is_exogenous = True

    def set_endogenous(self) -> None:
        """
        Treat flow as decision variable.

        Volume should be updated with results after a solve.
        """
        self._is_exogenous = False

    def get_main_node(self) -> str:
        """Get the main node of the flow."""
        return self._main_node

    def get_volume(self) -> AvgFlowVolume:
        """Get the volume of the flow."""
        return self._volume

    def get_arrow_volumes(self) -> dict[Arrow, AvgFlowVolume]:
        """Get dict of volume converted to volume at node pointed to by Arrow."""
        return self._arrow_volumes

    def get_max_capacity(self) -> FlowVolume | None:
        """Get the maximum capacity of the flow."""
        return self._max_capacity

    def set_max_capacity(self, capacity: FlowVolume | None) -> None:
        """Set the maximum capacity of the flow."""
        self._check_type(capacity, (FlowVolume, type(None)))
        self._max_capacity = capacity

    def get_min_capacity(self) -> FlowVolume | None:
        """Get the minimum capacity of the flow."""
        return self._min_capacity

    def set_min_capacity(self, capacity: FlowVolume | None) -> None:
        """Set the minimum capacity of the flow."""
        self._check_type(capacity, (FlowVolume, type(None)))
        self._min_capacity = capacity

    def get_startupcost(self) -> StartUpCost | None:
        """Get the startup cost of the flow."""
        self._check_type(self._startupcost, (StartUpCost, type(None)))
        return self._startupcost

    def set_startupcost(self, startupcost: StartUpCost | None) -> None:
        """Set the startup cost of the flow."""
        self._check_type(startupcost, (StartUpCost, type(None)))
        self._startupcost = startupcost

    def get_arrows(self) -> set[Arrow]:
        """Get the arrows of the flow."""
        return self._arrows

    def add_arrow(self, arrow: Arrow) -> None:
        """Add an arrow to the flow."""
        self._check_type(arrow, Arrow)
        self._arrows.add(arrow)

    def add_cost_term(self, key: str, cost_term: ObjectiveCoefficient) -> None:
        """Add a cost term to the flow."""
        self._check_type(key, str)
        self._check_type(cost_term, ObjectiveCoefficient)
        self._cost_terms[key] = cost_term

    def get_cost_terms(self) -> dict[str, ObjectiveCoefficient]:
        """Get the cost terms of the flow."""
        return self._cost_terms

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add loaders stored in attributes to loaders."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self.get_volume())
        add_loaders_if(loaders, self.get_max_capacity())
        add_loaders_if(loaders, self.get_min_capacity())

        for cost in self.get_cost_terms().values():
            add_loaders_if(loaders, cost)

        for arrow in self.get_arrows():
            add_loaders_if(loaders, arrow)

        for volume in self.get_arrow_volumes().values():
            add_loaders_if(loaders, volume)

    def _replace_node(self, old: str, new: str) -> None:
        # Component.replace_node does input type check
        if old == self._main_node:
            self._main_node = new
        for a in self._arrows:
            a: Arrow
            if a.get_node() == old:
                a.set_node(new)
                return

    def _get_simpler_components(self, _: str) -> dict[str, Component]:
        return dict()

    def _get_fingerprint(self) -> Fingerprint:
        refs = {}
        refs["_main_node"] = self._main_node

        return self.get_fingerprint_default()
__init__(main_node: str, max_capacity: FlowVolume | None = None, min_capacity: FlowVolume | None = None, startupcost: StartUpCost | None = None, volume: AvgFlowVolume | None = None, arrow_volumes: dict[Arrow, AvgFlowVolume] | None = None, is_exogenous: bool = False) -> None

Initialize Flow with main node, capacity, and startup cost.

Parameters:

Name Type Description Default
main_node str

Node which the Flow is primarily associated with.

required
max_capacity FlowVolume | None

Maximum capacity of the Flow. Defaults to None.

None
min_capacity FlowVolume | None

Minimum capacity of the Flow. Defaults to None.

None
startupcost StartUpCost | None

Costs associated with starting up this Flow. Defaults to None.

None
volume AvgFlowVolume | None

The actual volume carried by this Flow at a given moment. Defaults to None.

None
arrow_volumes dict[Arrow, AvgFlowVolume] | None

Possibility to store a version of volume for each Arrow. Can account for conversion,

None
is_exogenous bool

Flag denoting if a Solver should calculate the volumes associated with this flow or use its predefined volume. Defaults to False.

False
Source code in framcore/components/Flow.py
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
def __init__(
    self,
    main_node: str,
    max_capacity: FlowVolume | None = None,
    min_capacity: FlowVolume | None = None,
    startupcost: StartUpCost | None = None,
    volume: AvgFlowVolume | None = None,
    arrow_volumes: dict[Arrow, AvgFlowVolume] | None = None,
    is_exogenous: bool = False,
) -> None:
    """
    Initialize Flow with main node, capacity, and startup cost.

    Args:
        main_node (str): Node which the Flow is primarily associated with.
        max_capacity (FlowVolume | None, optional): Maximum capacity of the Flow. Defaults to None.
        min_capacity (FlowVolume | None, optional): Minimum capacity of the Flow. Defaults to None.
        startupcost (StartUpCost | None, optional): Costs associated with starting up this Flow. Defaults to None.
        volume (AvgFlowVolume | None, optional): The actual volume carried by this Flow at a given moment. Defaults to None.
        arrow_volumes (dict[Arrow, AvgFlowVolume] | None, optional): Possibility to store a version of volume for each Arrow. Can account for conversion,
        efficiency and loss to represent the result for different commodities and units. Defaults to None.
        is_exogenous (bool, optional): Flag denoting if a Solver should calculate the volumes associated with this flow or use its predefined volume.
                                       Defaults to False.

    """
    super().__init__()
    self._check_type(main_node, str)
    self._check_type(max_capacity, (FlowVolume, type(None)))
    self._check_type(min_capacity, (FlowVolume, type(None)))
    self._check_type(startupcost, (StartUpCost, type(None)))
    self._check_type(volume, (FlowVolume, type(None)))
    self._check_type(arrow_volumes, (dict, type(None)))
    self._main_node: str = main_node
    self._max_capacity = max_capacity
    self._min_capacity = min_capacity
    self._startupcost = startupcost
    self._arrows: set[Arrow] = set()
    self._cost_terms: dict[str, ObjectiveCoefficient] = dict()
    self._is_exogenous: bool = is_exogenous

    if not volume:
        volume = AvgFlowVolume()
    self._volume: AvgFlowVolume = volume

    if arrow_volumes is None:
        arrow_volumes = dict()
    self._arrow_volumes: dict[Arrow, AvgFlowVolume] = arrow_volumes
add_arrow(arrow: Arrow) -> None

Add an arrow to the flow.

Source code in framcore/components/Flow.py
140
141
142
143
def add_arrow(self, arrow: Arrow) -> None:
    """Add an arrow to the flow."""
    self._check_type(arrow, Arrow)
    self._arrows.add(arrow)
add_cost_term(key: str, cost_term: ObjectiveCoefficient) -> None

Add a cost term to the flow.

Source code in framcore/components/Flow.py
145
146
147
148
149
def add_cost_term(self, key: str, cost_term: ObjectiveCoefficient) -> None:
    """Add a cost term to the flow."""
    self._check_type(key, str)
    self._check_type(cost_term, ObjectiveCoefficient)
    self._cost_terms[key] = cost_term
add_loaders(loaders: set[Loader]) -> None

Add loaders stored in attributes to loaders.

Source code in framcore/components/Flow.py
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add loaders stored in attributes to loaders."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self.get_volume())
    add_loaders_if(loaders, self.get_max_capacity())
    add_loaders_if(loaders, self.get_min_capacity())

    for cost in self.get_cost_terms().values():
        add_loaders_if(loaders, cost)

    for arrow in self.get_arrows():
        add_loaders_if(loaders, arrow)

    for volume in self.get_arrow_volumes().values():
        add_loaders_if(loaders, volume)
get_arrow_volumes() -> dict[Arrow, AvgFlowVolume]

Get dict of volume converted to volume at node pointed to by Arrow.

Source code in framcore/components/Flow.py
104
105
106
def get_arrow_volumes(self) -> dict[Arrow, AvgFlowVolume]:
    """Get dict of volume converted to volume at node pointed to by Arrow."""
    return self._arrow_volumes
get_arrows() -> set[Arrow]

Get the arrows of the flow.

Source code in framcore/components/Flow.py
136
137
138
def get_arrows(self) -> set[Arrow]:
    """Get the arrows of the flow."""
    return self._arrows
get_cost_terms() -> dict[str, ObjectiveCoefficient]

Get the cost terms of the flow.

Source code in framcore/components/Flow.py
151
152
153
def get_cost_terms(self) -> dict[str, ObjectiveCoefficient]:
    """Get the cost terms of the flow."""
    return self._cost_terms
get_main_node() -> str

Get the main node of the flow.

Source code in framcore/components/Flow.py
96
97
98
def get_main_node(self) -> str:
    """Get the main node of the flow."""
    return self._main_node
get_max_capacity() -> FlowVolume | None

Get the maximum capacity of the flow.

Source code in framcore/components/Flow.py
108
109
110
def get_max_capacity(self) -> FlowVolume | None:
    """Get the maximum capacity of the flow."""
    return self._max_capacity
get_min_capacity() -> FlowVolume | None

Get the minimum capacity of the flow.

Source code in framcore/components/Flow.py
117
118
119
def get_min_capacity(self) -> FlowVolume | None:
    """Get the minimum capacity of the flow."""
    return self._min_capacity
get_startupcost() -> StartUpCost | None

Get the startup cost of the flow.

Source code in framcore/components/Flow.py
126
127
128
129
def get_startupcost(self) -> StartUpCost | None:
    """Get the startup cost of the flow."""
    self._check_type(self._startupcost, (StartUpCost, type(None)))
    return self._startupcost
get_volume() -> AvgFlowVolume

Get the volume of the flow.

Source code in framcore/components/Flow.py
100
101
102
def get_volume(self) -> AvgFlowVolume:
    """Get the volume of the flow."""
    return self._volume
is_exogenous() -> bool

Return True if Flow is exogenous.

Source code in framcore/components/Flow.py
73
74
75
def is_exogenous(self) -> bool:
    """Return True if Flow is exogenous."""
    return self._is_exogenous
set_endogenous() -> None

Treat flow as decision variable.

Volume should be updated with results after a solve.

Source code in framcore/components/Flow.py
88
89
90
91
92
93
94
def set_endogenous(self) -> None:
    """
    Treat flow as decision variable.

    Volume should be updated with results after a solve.
    """
    self._is_exogenous = False
set_exogenous() -> None

Treat flow as fixed variable.

Use volume if it exists. If no volume, then try to use min_capacity and max_capacity, which must be equal. Error if this fails.

Source code in framcore/components/Flow.py
77
78
79
80
81
82
83
84
85
86
def set_exogenous(self) -> None:
    """
    Treat flow as fixed variable.

    Use volume if it exists.
    If no volume, then try to use
    min_capacity and max_capacity, which must
    be equal. Error if this fails.
    """
    self._is_exogenous = True
set_max_capacity(capacity: FlowVolume | None) -> None

Set the maximum capacity of the flow.

Source code in framcore/components/Flow.py
112
113
114
115
def set_max_capacity(self, capacity: FlowVolume | None) -> None:
    """Set the maximum capacity of the flow."""
    self._check_type(capacity, (FlowVolume, type(None)))
    self._max_capacity = capacity
set_min_capacity(capacity: FlowVolume | None) -> None

Set the minimum capacity of the flow.

Source code in framcore/components/Flow.py
121
122
123
124
def set_min_capacity(self, capacity: FlowVolume | None) -> None:
    """Set the minimum capacity of the flow."""
    self._check_type(capacity, (FlowVolume, type(None)))
    self._min_capacity = capacity
set_startupcost(startupcost: StartUpCost | None) -> None

Set the startup cost of the flow.

Source code in framcore/components/Flow.py
131
132
133
134
def set_startupcost(self, startupcost: StartUpCost | None) -> None:
    """Set the startup cost of the flow."""
    self._check_type(startupcost, (StartUpCost, type(None)))
    self._startupcost = startupcost

HydroModule

HydroModule

Bases: Component

HydroModules represents a physical element in a river system, with its topology and other attributes.

The hydromodule can contain a HydroReservoir, HydroGenerator, HydroPump, HydroBypass and local inflow, aswell as the topological attributes release_to and spill_to:

  • HydroGenerator uses the release pathway of the HydroModule to generate power, while HydroPump has its own water way that consumes power. Both HydroGenerator and HydroPump connects to power nodes.
  • HydroBypass also have attributes that define the topology of the river system.
  • HydroReservoir represents the water storage of the HydroModule.
  • The hydraulic_coupling attribute is used to identify which HydroModules have hydraulic coupled reservoirs.

Results for the release volume, spill volume and the water value are stored directly in the HydroModule, while the production, pumping, reservoir volume and bypass volume are stored in the attributes.

HydroModule is compatible with HydroAggregator for aggregation of multiple HydroModules into one.

Source code in framcore/components/HydroModule.py
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
class HydroModule(Component):
    """
    HydroModules represents a physical element in a river system, with its topology and other attributes.

    The hydromodule can contain a HydroReservoir, HydroGenerator, HydroPump, HydroBypass and local inflow, aswell as the topological attributes release_to
    and spill_to:

    - HydroGenerator uses the release pathway of the HydroModule to generate power, while HydroPump has its own water way that consumes
      power. Both HydroGenerator and HydroPump connects to power nodes.
    - HydroBypass also have attributes that define the topology of the river system.
    - HydroReservoir represents the water storage of the HydroModule.
    - The hydraulic_coupling attribute is used to identify which HydroModules have hydraulic coupled reservoirs.


    Results for the release volume, spill volume and the water value are stored directly in the HydroModule, while the production, pumping,
    reservoir volume and bypass volume are stored in the attributes.

    HydroModule is compatible with HydroAggregator for aggregation of multiple HydroModules into one.

    """

    # We add this to module name to get corresponding node name
    _NODE_NAME_POSTFIX = "_node"

    def __init__(
        self,
        release_to: str | None = None,  # Must be reference to another HydroModule
        release_capacity: FlowVolume | None = None,
        generator: HydroGenerator | None = None,  # attribute
        pump: HydroPump | None = None,
        inflow: AvgFlowVolume | None = None,
        reservoir: HydroReservoir | None = None,  # attribute
        hydraulic_coupling: int = 0,
        bypass: HydroBypass | None = None,  # attribute
        spill_to: str | None = None,  # Must be reference to another HydroModule
        commodity: str = "Hydro",
        water_value: WaterValue | None = None,
        release_volume: AvgFlowVolume | None = None,
        spill_volume: AvgFlowVolume | None = None,
    ) -> None:
        """
        Initialize the HydroModule with its parameters.

        Args:
            release_to (str | None, optional): Reference to another HydroModule which recieves the water releases through the main release. Defaults to None.
            release_capacity (FlowVolume | None): Amount of water which can be released via main release at a given moment. Defaults to None.
            generator (HydroGenerator | None, optional): Represents generation of electricity from the movement of water through the Modules main release
                                                        pathway. Defaults to None.
            pump (HydroPump | None): Pump associated with this Module. Can move water to another using power. Defaults to None.
            inflow (AvgFlowVolume | None, optional): The local inflow of the HydroModule. Defaults to None.
            reservoir (HydroReservoir | None, optional): The Modules water storage. Defaults to None.
            hydraulic_coupling (int): Number other than 0 if the HydroModules reservoir is hydraulic coupled to another reservoir. Defaults to 0.
                                                        TODO: Replace with HydraulicCoupling class
            bypass (HydroBypass | None, optional): Bypass water way. Defaults to None.
            spill_to (str | None): Reference to another Module recieving this ones spill volume. Defaults to None.
            commodity (str, optional): Commodity of the hydro node. Defaults to "Hydro".
            water_value (WaterValue | None, optional): Water value of the reservoir in currency per water volume. Defaults to None.
                                                        TODO: Allow water values with multiple demimensions?
            release_volume (AvgFlowVolume | None, optional): Volume of water released via main waterway. Defaults to None.
            spill_volume (AvgFlowVolume | None, optional): Volume of water spilled. Defaults to None.

        """
        super().__init__()
        self._check_type(release_to, (str, type(None)))
        self._check_type(release_capacity, (FlowVolume, type(None)))
        self._check_type(generator, (HydroGenerator, type(None)))
        self._check_type(pump, (HydroPump, type(None)))
        self._check_type(inflow, (AvgFlowVolume, type(None)))
        self._check_type(reservoir, (HydroReservoir, type(None)))
        self._check_type(hydraulic_coupling, int)
        self._check_type(bypass, (HydroBypass, type(None)))
        self._check_type(spill_to, (str, type(None)))
        self._check_type(commodity, str)
        self._check_type(water_value, (WaterValue, type(None)))
        self._check_type(release_volume, (AvgFlowVolume, type(None)))
        self._check_type(spill_volume, (AvgFlowVolume, type(None)))

        self._release_to = release_to
        self._release_capacity = release_capacity
        self._generator = generator
        self._pump = pump
        self._inflow = inflow
        self._reservoir = reservoir
        self._hydraulic_coupling = hydraulic_coupling
        self._bypass = bypass
        self._spill_to = spill_to
        self._commodity = commodity

        if not water_value:
            water_value = WaterValue()

        if not release_volume:
            release_volume = AvgFlowVolume()

        if not spill_volume:
            spill_volume = AvgFlowVolume()

        self._water_value: WaterValue = water_value
        self._release_volume: AvgFlowVolume = release_volume
        self._spill_volume: AvgFlowVolume = spill_volume

    def get_release_capacity(self) -> FlowVolume | None:
        """Get the capacity of the thermal unit."""
        return self._release_capacity

    def get_hydraulic_coupling(self) -> int:
        """Get the Modules hydraulic code."""
        return self._hydraulic_coupling

    def get_reservoir(self) -> HydroReservoir | None:
        """Get the reservoir of the hydro module."""
        return self._reservoir

    def set_reservoir(self, reservoir: HydroReservoir | None) -> None:
        """Set the reservoir of the hydro module."""
        self._check_type(reservoir, (HydroReservoir, type(None)))
        self._reservoir = reservoir

    def get_pump(self) -> HydroPump | None:
        """Get the pump of the hydro module."""
        return self._pump

    def set_pump(self, pump: HydroPump | None) -> None:
        """Set the pump of the hydro module."""
        self._check_type(pump, (HydroPump, type(None)))
        self._pump = pump

    def get_generator(self) -> HydroGenerator | None:
        """Get the generator of the hydro module."""
        return self._generator

    def set_generator(self, generator: HydroGenerator | None) -> None:
        """Set the generator of the hydro module."""
        self._check_type(generator, (HydroGenerator, type(None)))
        self._generator = generator

    def get_bypass(self) -> HydroBypass | None:
        """Get the bypass of the hydro module."""
        return self._bypass

    def set_bypass(self, bypass: HydroBypass | None) -> None:
        """Set the bypass of the hydro module."""
        self._check_type(bypass, (HydroBypass, type(None)))
        self._bypass = bypass

    def get_inflow(self) -> AvgFlowVolume | None:
        """Get the inflow of the hydro module."""
        return self._inflow

    def set_inflow(self, inflow: AvgFlowVolume | None) -> None:
        """Set the inflow of the hydro module."""
        self._check_type(inflow, (AvgFlowVolume, type(None)))
        self._inflow = inflow

    def get_release_to(self) -> str | None:
        """Get the release_to module of the hydro module."""
        return self._release_to

    def set_release_to(self, release_to: str | None) -> None:
        """Set the release_to module of the hydro module."""
        self._check_type(release_to, (str, type(None)))
        self._release_to = release_to

    def get_spill_to(self) -> str | None:
        """Get the spill_to module of the hydro module."""
        return self._spill_to

    def get_water_value(self) -> WaterValue:
        """Get water value at the hydro node."""
        return self._water_value

    def get_release_volume(self) -> FlowVolume:
        """Get the release_volume volume of the thermal unit."""
        return self._release_volume

    def get_spill_volume(self) -> FlowVolume:
        """Get the spill_volume volume of the thermal unit."""
        return self._spill_volume

    """Implementation of Component interface"""

    def _replace_node(self, old: str, new: str) -> None:
        if self._pump and old == self._pump.get_power_node():
            self._pump.set_power_node(new)
        if self._generator and old == self._generator.get_power_node():
            self._generator.set_power_node(new)

    def _get_simpler_components(self, module_name: str) -> dict[str, Component]:
        out: dict[str, Component] = {}

        node_name = module_name + self._NODE_NAME_POSTFIX

        out[node_name] = self._create_hydro_node()
        out[module_name + "_release_flow"] = self._create_release_flow(node_name)
        out[module_name + "_spill_flow"] = self._create_spill_flow(node_name)

        if self._inflow is not None:
            out[module_name + "_inflow_flow"] = self._create_inflow_flow(node_name)

        if self._bypass is not None:
            out[module_name + "_bypass_flow"] = self._create_bypass_flow(node_name)

        if self._pump is not None:
            out[module_name + "_pump_flow"] = self._create_pump_flow(node_name)

        return out

    def _create_hydro_node(self) -> Node:
        return Node(
            commodity=self._commodity,
            price=self._water_value,
            storage=self._reservoir,
        )

    def _create_release_flow(self, node_name: str) -> Flow:
        # TODO: pq_curve, nominal_head, tailwater_elevation
        flow = Flow(
            main_node=node_name,
            max_capacity=self._release_capacity,
            volume=self._release_volume,
            startupcost=None,
            arrow_volumes=None,
            is_exogenous=False,
        )

        arrow_volumes = flow.get_arrow_volumes()

        outgoing_arrow = Arrow(
            node=node_name,
            is_ingoing=False,
            conversion=Conversion(value=1),
        )

        flow.add_arrow(outgoing_arrow)

        if self._release_to:
            flow.add_arrow(
                Arrow(
                    node=self._release_to + self._NODE_NAME_POSTFIX,
                    is_ingoing=True,
                    conversion=Conversion(value=1),
                ),
            )

        if self._generator:
            production_arrow = Arrow(
                node=self._generator.get_power_node(),
                is_ingoing=True,
                conversion=self._generator.get_energy_equivalent(),
            )
            flow.add_arrow(production_arrow)
            arrow_volumes[production_arrow] = self._generator.get_production()

            if self._generator.get_voc() is not None:
                flow.add_cost_term("VOC", self._generator.get_voc())

        return flow

    def _create_spill_flow(self, node_name: str) -> Flow:
        flow = Flow(
            main_node=node_name,
            max_capacity=None,
            volume=self._spill_volume,
        )

        flow.add_arrow(
            Arrow(
                node=node_name,
                is_ingoing=False,
                conversion=Conversion(value=1),
            ),
        )

        if self._spill_to is not None:
            flow.add_arrow(
                Arrow(
                    node=self._spill_to + self._NODE_NAME_POSTFIX,
                    is_ingoing=True,
                    conversion=Conversion(value=1),
                ),
            )

        return flow

    def _create_bypass_flow(self, node_name: str) -> Flow:
        flow = Flow(
            main_node=node_name,
            max_capacity=self._bypass.get_capacity(),
            volume=self._bypass.get_volume(),
            is_exogenous=False,
        )

        flow.add_arrow(
            Arrow(
                node=node_name,
                is_ingoing=False,
                conversion=Conversion(value=1),
            ),
        )

        if self._bypass.get_to_module() is not None:
            flow.add_arrow(
                Arrow(
                    node=self._bypass.get_to_module() + self._NODE_NAME_POSTFIX,
                    is_ingoing=True,
                    conversion=Conversion(value=1),
                ),
            )

        return flow

    def _create_inflow_flow(self, node_name: str) -> Flow:
        flow = Flow(
            main_node=node_name,
            max_capacity=None,
            volume=self._inflow,
            is_exogenous=True,
        )

        flow.add_arrow(
            Arrow(
                node=node_name,
                is_ingoing=True,
                conversion=Conversion(value=1),
            ),
        )

        return flow

    def _create_pump_flow(self, node_name: str) -> Flow:
        # TODO: add rest of attributes

        arrow_volumes: dict[Arrow, FlowVolume] = dict()

        flow = Flow(
            main_node=node_name,
            max_capacity=self._pump.get_water_capacity(),
            volume=self._pump.get_water_consumption(),
            arrow_volumes=arrow_volumes,
            is_exogenous=False,
        )

        flow.add_arrow(
            Arrow(
                node=self._pump.get_to_module() + self._NODE_NAME_POSTFIX,
                is_ingoing=True,
                conversion=Conversion(value=1),
            ),
        )

        flow.add_arrow(
            Arrow(
                node=self._pump.get_from_module() + self._NODE_NAME_POSTFIX,
                is_ingoing=False,
                conversion=Conversion(value=1),
            ),
        )

        pump_arrow = Arrow(
            node=self._pump.get_power_node(),
            is_ingoing=False,
            conversion=self._pump.get_energy_equivalent(),
        )
        flow.add_arrow(pump_arrow)
        arrow_volumes[pump_arrow] = self._pump.get_power_consumption()

        return flow
__init__(release_to: str | None = None, release_capacity: FlowVolume | None = None, generator: HydroGenerator | None = None, pump: HydroPump | None = None, inflow: AvgFlowVolume | None = None, reservoir: HydroReservoir | None = None, hydraulic_coupling: int = 0, bypass: HydroBypass | None = None, spill_to: str | None = None, commodity: str = 'Hydro', water_value: WaterValue | None = None, release_volume: AvgFlowVolume | None = None, spill_volume: AvgFlowVolume | None = None) -> None

Initialize the HydroModule with its parameters.

Parameters:

Name Type Description Default
release_to str | None

Reference to another HydroModule which recieves the water releases through the main release. Defaults to None.

None
release_capacity FlowVolume | None

Amount of water which can be released via main release at a given moment. Defaults to None.

None
generator HydroGenerator | None

Represents generation of electricity from the movement of water through the Modules main release pathway. Defaults to None.

None
pump HydroPump | None

Pump associated with this Module. Can move water to another using power. Defaults to None.

None
inflow AvgFlowVolume | None

The local inflow of the HydroModule. Defaults to None.

None
reservoir HydroReservoir | None

The Modules water storage. Defaults to None.

None
hydraulic_coupling int

Number other than 0 if the HydroModules reservoir is hydraulic coupled to another reservoir. Defaults to 0. TODO: Replace with HydraulicCoupling class

0
bypass HydroBypass | None

Bypass water way. Defaults to None.

None
spill_to str | None

Reference to another Module recieving this ones spill volume. Defaults to None.

None
commodity str

Commodity of the hydro node. Defaults to "Hydro".

'Hydro'
water_value WaterValue | None

Water value of the reservoir in currency per water volume. Defaults to None. TODO: Allow water values with multiple demimensions?

None
release_volume AvgFlowVolume | None

Volume of water released via main waterway. Defaults to None.

None
spill_volume AvgFlowVolume | None

Volume of water spilled. Defaults to None.

None
Source code in framcore/components/HydroModule.py
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def __init__(
    self,
    release_to: str | None = None,  # Must be reference to another HydroModule
    release_capacity: FlowVolume | None = None,
    generator: HydroGenerator | None = None,  # attribute
    pump: HydroPump | None = None,
    inflow: AvgFlowVolume | None = None,
    reservoir: HydroReservoir | None = None,  # attribute
    hydraulic_coupling: int = 0,
    bypass: HydroBypass | None = None,  # attribute
    spill_to: str | None = None,  # Must be reference to another HydroModule
    commodity: str = "Hydro",
    water_value: WaterValue | None = None,
    release_volume: AvgFlowVolume | None = None,
    spill_volume: AvgFlowVolume | None = None,
) -> None:
    """
    Initialize the HydroModule with its parameters.

    Args:
        release_to (str | None, optional): Reference to another HydroModule which recieves the water releases through the main release. Defaults to None.
        release_capacity (FlowVolume | None): Amount of water which can be released via main release at a given moment. Defaults to None.
        generator (HydroGenerator | None, optional): Represents generation of electricity from the movement of water through the Modules main release
                                                    pathway. Defaults to None.
        pump (HydroPump | None): Pump associated with this Module. Can move water to another using power. Defaults to None.
        inflow (AvgFlowVolume | None, optional): The local inflow of the HydroModule. Defaults to None.
        reservoir (HydroReservoir | None, optional): The Modules water storage. Defaults to None.
        hydraulic_coupling (int): Number other than 0 if the HydroModules reservoir is hydraulic coupled to another reservoir. Defaults to 0.
                                                    TODO: Replace with HydraulicCoupling class
        bypass (HydroBypass | None, optional): Bypass water way. Defaults to None.
        spill_to (str | None): Reference to another Module recieving this ones spill volume. Defaults to None.
        commodity (str, optional): Commodity of the hydro node. Defaults to "Hydro".
        water_value (WaterValue | None, optional): Water value of the reservoir in currency per water volume. Defaults to None.
                                                    TODO: Allow water values with multiple demimensions?
        release_volume (AvgFlowVolume | None, optional): Volume of water released via main waterway. Defaults to None.
        spill_volume (AvgFlowVolume | None, optional): Volume of water spilled. Defaults to None.

    """
    super().__init__()
    self._check_type(release_to, (str, type(None)))
    self._check_type(release_capacity, (FlowVolume, type(None)))
    self._check_type(generator, (HydroGenerator, type(None)))
    self._check_type(pump, (HydroPump, type(None)))
    self._check_type(inflow, (AvgFlowVolume, type(None)))
    self._check_type(reservoir, (HydroReservoir, type(None)))
    self._check_type(hydraulic_coupling, int)
    self._check_type(bypass, (HydroBypass, type(None)))
    self._check_type(spill_to, (str, type(None)))
    self._check_type(commodity, str)
    self._check_type(water_value, (WaterValue, type(None)))
    self._check_type(release_volume, (AvgFlowVolume, type(None)))
    self._check_type(spill_volume, (AvgFlowVolume, type(None)))

    self._release_to = release_to
    self._release_capacity = release_capacity
    self._generator = generator
    self._pump = pump
    self._inflow = inflow
    self._reservoir = reservoir
    self._hydraulic_coupling = hydraulic_coupling
    self._bypass = bypass
    self._spill_to = spill_to
    self._commodity = commodity

    if not water_value:
        water_value = WaterValue()

    if not release_volume:
        release_volume = AvgFlowVolume()

    if not spill_volume:
        spill_volume = AvgFlowVolume()

    self._water_value: WaterValue = water_value
    self._release_volume: AvgFlowVolume = release_volume
    self._spill_volume: AvgFlowVolume = spill_volume
get_bypass() -> HydroBypass | None

Get the bypass of the hydro module.

Source code in framcore/components/HydroModule.py
141
142
143
def get_bypass(self) -> HydroBypass | None:
    """Get the bypass of the hydro module."""
    return self._bypass
get_generator() -> HydroGenerator | None

Get the generator of the hydro module.

Source code in framcore/components/HydroModule.py
132
133
134
def get_generator(self) -> HydroGenerator | None:
    """Get the generator of the hydro module."""
    return self._generator
get_hydraulic_coupling() -> int

Get the Modules hydraulic code.

Source code in framcore/components/HydroModule.py
110
111
112
def get_hydraulic_coupling(self) -> int:
    """Get the Modules hydraulic code."""
    return self._hydraulic_coupling
get_inflow() -> AvgFlowVolume | None

Get the inflow of the hydro module.

Source code in framcore/components/HydroModule.py
150
151
152
def get_inflow(self) -> AvgFlowVolume | None:
    """Get the inflow of the hydro module."""
    return self._inflow
get_pump() -> HydroPump | None

Get the pump of the hydro module.

Source code in framcore/components/HydroModule.py
123
124
125
def get_pump(self) -> HydroPump | None:
    """Get the pump of the hydro module."""
    return self._pump
get_release_capacity() -> FlowVolume | None

Get the capacity of the thermal unit.

Source code in framcore/components/HydroModule.py
106
107
108
def get_release_capacity(self) -> FlowVolume | None:
    """Get the capacity of the thermal unit."""
    return self._release_capacity
get_release_to() -> str | None

Get the release_to module of the hydro module.

Source code in framcore/components/HydroModule.py
159
160
161
def get_release_to(self) -> str | None:
    """Get the release_to module of the hydro module."""
    return self._release_to
get_release_volume() -> FlowVolume

Get the release_volume volume of the thermal unit.

Source code in framcore/components/HydroModule.py
176
177
178
def get_release_volume(self) -> FlowVolume:
    """Get the release_volume volume of the thermal unit."""
    return self._release_volume
get_reservoir() -> HydroReservoir | None

Get the reservoir of the hydro module.

Source code in framcore/components/HydroModule.py
114
115
116
def get_reservoir(self) -> HydroReservoir | None:
    """Get the reservoir of the hydro module."""
    return self._reservoir
get_spill_to() -> str | None

Get the spill_to module of the hydro module.

Source code in framcore/components/HydroModule.py
168
169
170
def get_spill_to(self) -> str | None:
    """Get the spill_to module of the hydro module."""
    return self._spill_to
get_spill_volume() -> FlowVolume

Get the spill_volume volume of the thermal unit.

Source code in framcore/components/HydroModule.py
180
181
182
def get_spill_volume(self) -> FlowVolume:
    """Get the spill_volume volume of the thermal unit."""
    return self._spill_volume
get_water_value() -> WaterValue

Get water value at the hydro node.

Source code in framcore/components/HydroModule.py
172
173
174
def get_water_value(self) -> WaterValue:
    """Get water value at the hydro node."""
    return self._water_value
set_bypass(bypass: HydroBypass | None) -> None

Set the bypass of the hydro module.

Source code in framcore/components/HydroModule.py
145
146
147
148
def set_bypass(self, bypass: HydroBypass | None) -> None:
    """Set the bypass of the hydro module."""
    self._check_type(bypass, (HydroBypass, type(None)))
    self._bypass = bypass
set_generator(generator: HydroGenerator | None) -> None

Set the generator of the hydro module.

Source code in framcore/components/HydroModule.py
136
137
138
139
def set_generator(self, generator: HydroGenerator | None) -> None:
    """Set the generator of the hydro module."""
    self._check_type(generator, (HydroGenerator, type(None)))
    self._generator = generator
set_inflow(inflow: AvgFlowVolume | None) -> None

Set the inflow of the hydro module.

Source code in framcore/components/HydroModule.py
154
155
156
157
def set_inflow(self, inflow: AvgFlowVolume | None) -> None:
    """Set the inflow of the hydro module."""
    self._check_type(inflow, (AvgFlowVolume, type(None)))
    self._inflow = inflow
set_pump(pump: HydroPump | None) -> None

Set the pump of the hydro module.

Source code in framcore/components/HydroModule.py
127
128
129
130
def set_pump(self, pump: HydroPump | None) -> None:
    """Set the pump of the hydro module."""
    self._check_type(pump, (HydroPump, type(None)))
    self._pump = pump
set_release_to(release_to: str | None) -> None

Set the release_to module of the hydro module.

Source code in framcore/components/HydroModule.py
163
164
165
166
def set_release_to(self, release_to: str | None) -> None:
    """Set the release_to module of the hydro module."""
    self._check_type(release_to, (str, type(None)))
    self._release_to = release_to
set_reservoir(reservoir: HydroReservoir | None) -> None

Set the reservoir of the hydro module.

Source code in framcore/components/HydroModule.py
118
119
120
121
def set_reservoir(self, reservoir: HydroReservoir | None) -> None:
    """Set the reservoir of the hydro module."""
    self._check_type(reservoir, (HydroReservoir, type(None)))
    self._reservoir = reservoir

Node

Node

Bases: Component

Represents a point in the energy system where a commodity can possibly be traded, stored or pass through.

A node is characterized by the commodity it handles, its price, and optionally storage capabilities. If the node is exogenous, the commodity can be bought and sold at a fixed price determined by the user. If the node is endogenous, the price is determined by the market dynamics at the Node.

Nodes, Flows and Arrows are the main building blocks in FRAM's low-level representation of energy systems. Movement between Nodes is represented by Flows and Arrows. Flows represent a commodity flow, and can have Arrows that each describe contribution of the Flow into a Node. The Arrows have direction to determine input or output, and parameters for the contribution of the Flow to the Node (conversion, efficiency and loss).

Source code in framcore/components/Node.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
class Node(Component):
    """
    Represents a point in the energy system where a commodity can possibly be traded, stored or pass through.

    A node is characterized by the commodity it handles, its price, and optionally storage capabilities. If the
    node is exogenous, the commodity can be bought and sold at a fixed price determined by the user.
    If the node is endogenous, the price is determined by the market dynamics at the Node.

    Nodes, Flows and Arrows are the main building blocks in FRAM's low-level representation of energy systems.
    Movement between Nodes is represented by Flows and Arrows. Flows represent a commodity flow,
    and can have Arrows that each describe contribution of the Flow into a Node.
    The Arrows have direction to determine input or output,
    and parameters for the contribution of the Flow to the Node (conversion, efficiency and loss).

    """

    def __init__(
        self,
        commodity: str,
        is_exogenous: bool = False,  # TODO
        price: ShadowPrice | None = None,
        storage: Storage | None = None,
    ) -> None:
        """
        Initialize the Node class.

        Args:
            commodity (str): Commodity at the Node. Power/electricity, gas, heat, etc.
            is_exogenous (bool, optional): Flag used to signal Solvers whether they should simulate the node endogenously or use the pre-set price.
                                           Defaults to False.
            price (ShadowPrice | None): Actual, calculated price of Commodity in this Node for each moment of simulation. Defaulta to None.
            storage (Storage | None, optional): The amount of the Commodity stored on this Node. Defaults to None.

        """
        super().__init__()
        self._check_type(commodity, str)
        self._check_type(is_exogenous, bool)
        self._check_type(price, (ShadowPrice, type(None)))
        self._check_type(storage, (Storage, type(None)))

        self._commodity = commodity
        self._is_exogenous = is_exogenous

        self._storage = storage

        if price is None:
            price = Price()

        self._price: Price = price

    def set_exogenous(self) -> None:
        """Set the Node to be exogenous."""
        self._check_type(self._is_exogenous, bool)
        self._is_exogenous = True

    def set_endogenous(self) -> None:
        """Set the Node to be endogenous."""
        self._check_type(self._is_exogenous, bool)
        self._is_exogenous = False

    def is_exogenous(self) -> bool:
        """Return True if Node is exogenous (i.e. has fixed prices determined outside the model) else False."""
        return self._is_exogenous

    def get_price(self) -> ShadowPrice:
        """Return price."""
        return self._price

    def get_storage(self) -> Storage | None:
        """Get Storage if any."""
        return self._storage

    def get_commodity(self) -> str:
        """Return commodity."""
        return self._commodity

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add loaders stored in attributes to loaders."""
        from framcore.utils import add_loaders_if

        add_loaders_if(loaders, self.get_price())
        add_loaders_if(loaders, self.get_storage())

    def _replace_node(self, old: str, new: str) -> None:
        return None

    def _get_simpler_components(self, _: str) -> dict[str, Component]:
        return dict()
__init__(commodity: str, is_exogenous: bool = False, price: ShadowPrice | None = None, storage: Storage | None = None) -> None

Initialize the Node class.

Parameters:

Name Type Description Default
commodity str

Commodity at the Node. Power/electricity, gas, heat, etc.

required
is_exogenous bool

Flag used to signal Solvers whether they should simulate the node endogenously or use the pre-set price. Defaults to False.

False
price ShadowPrice | None

Actual, calculated price of Commodity in this Node for each moment of simulation. Defaulta to None.

None
storage Storage | None

The amount of the Commodity stored on this Node. Defaults to None.

None
Source code in framcore/components/Node.py
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
def __init__(
    self,
    commodity: str,
    is_exogenous: bool = False,  # TODO
    price: ShadowPrice | None = None,
    storage: Storage | None = None,
) -> None:
    """
    Initialize the Node class.

    Args:
        commodity (str): Commodity at the Node. Power/electricity, gas, heat, etc.
        is_exogenous (bool, optional): Flag used to signal Solvers whether they should simulate the node endogenously or use the pre-set price.
                                       Defaults to False.
        price (ShadowPrice | None): Actual, calculated price of Commodity in this Node for each moment of simulation. Defaulta to None.
        storage (Storage | None, optional): The amount of the Commodity stored on this Node. Defaults to None.

    """
    super().__init__()
    self._check_type(commodity, str)
    self._check_type(is_exogenous, bool)
    self._check_type(price, (ShadowPrice, type(None)))
    self._check_type(storage, (Storage, type(None)))

    self._commodity = commodity
    self._is_exogenous = is_exogenous

    self._storage = storage

    if price is None:
        price = Price()

    self._price: Price = price
add_loaders(loaders: set[Loader]) -> None

Add loaders stored in attributes to loaders.

Source code in framcore/components/Node.py
88
89
90
91
92
93
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add loaders stored in attributes to loaders."""
    from framcore.utils import add_loaders_if

    add_loaders_if(loaders, self.get_price())
    add_loaders_if(loaders, self.get_storage())
get_commodity() -> str

Return commodity.

Source code in framcore/components/Node.py
84
85
86
def get_commodity(self) -> str:
    """Return commodity."""
    return self._commodity
get_price() -> ShadowPrice

Return price.

Source code in framcore/components/Node.py
76
77
78
def get_price(self) -> ShadowPrice:
    """Return price."""
    return self._price
get_storage() -> Storage | None

Get Storage if any.

Source code in framcore/components/Node.py
80
81
82
def get_storage(self) -> Storage | None:
    """Get Storage if any."""
    return self._storage
is_exogenous() -> bool

Return True if Node is exogenous (i.e. has fixed prices determined outside the model) else False.

Source code in framcore/components/Node.py
72
73
74
def is_exogenous(self) -> bool:
    """Return True if Node is exogenous (i.e. has fixed prices determined outside the model) else False."""
    return self._is_exogenous
set_endogenous() -> None

Set the Node to be endogenous.

Source code in framcore/components/Node.py
67
68
69
70
def set_endogenous(self) -> None:
    """Set the Node to be endogenous."""
    self._check_type(self._is_exogenous, bool)
    self._is_exogenous = False
set_exogenous() -> None

Set the Node to be exogenous.

Source code in framcore/components/Node.py
62
63
64
65
def set_exogenous(self) -> None:
    """Set the Node to be exogenous."""
    self._check_type(self._is_exogenous, bool)
    self._is_exogenous = True

Thermal

Thermal

Bases: _PowerPlant

Represents a thermal power plant, subclassing PowerPlant.

This class models a thermal power plant with attributes inherited from PowerPlant. Additionally, it includes specific attributes such as:

  • fuel node
  • efficiency
  • emission node
  • emission coefficient
  • startup costs

This class is compatible with ThermalAggregator.

Source code in framcore/components/Thermal.py
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
class Thermal(_PowerPlant):
    """
    Represents a thermal power plant, subclassing PowerPlant.

    This class models a thermal power plant with attributes inherited from PowerPlant.
    Additionally, it includes specific attributes such as:

    - fuel node
    - efficiency
    - emission node
    - emission coefficient
    - startup costs


    This class is compatible with ThermalAggregator.
    """

    def __init__(
        self,
        power_node: str,
        fuel_node: str,
        efficiency: Efficiency,
        max_capacity: FlowVolume,
        emission_node: str | None = None,
        emission_coefficient: Conversion | None = None,
        startupcost: StartUpCost | None = None,
        min_capacity: FlowVolume | None = None,
        voc: Cost | None = None,
        production: AvgFlowVolume | None = None,
        fuel_demand: AvgFlowVolume | None = None,
        emission_demand: AvgFlowVolume | None = None,
    ) -> None:
        """
        Initialize a Thermal power plant instance.

        Args:
            power_node (str): The power node of the plant.
            fuel_node (str): The fuel node of the plant.
            efficiency (Efficiency): Efficiency of the plant.
            emission_node (str | None, optional): Emission node.
            emission_coefficient (Conversion | None, optional): Emission coefficient.
            startupcost (StartUpCost | None, optional): Cost associated with starting up the Plant.
            max_capacity (FlowVolume | None, optional): Maximum production capacity.
            min_capacity (FlowVolume | None, optional): Minimum production capacity.
            voc (Cost | None, optional): Variable operating cost.
            production (AvgFlowVolume | None, optional): Production volume.
            fuel_demand (AvgFlowVolume | None, optional): Fuel demand.
            emission_demand (AvgFlowVolume | None, optional): Emission demand.

        """
        super().__init__(
            power_node=power_node,
            max_capacity=max_capacity,
            min_capacity=min_capacity,
            voc=voc,
            production=production,
        )

        self._check_type(fuel_node, str)
        self._check_type(emission_node, (str, type(None)))
        self._check_type(emission_coefficient, (Conversion, type(None)))
        self._check_type(startupcost, (StartUpCost, type(None)))
        self._check_type(production, (AvgFlowVolume, type(None)))
        self._check_type(fuel_demand, (AvgFlowVolume, type(None)))
        self._check_type(emission_demand, (AvgFlowVolume, type(None)))

        self._fuel_node = fuel_node
        self._efficiency = efficiency
        self._emission_node = emission_node
        self._emission_coefficient = emission_coefficient
        self._startupcost = startupcost

        if production is None:
            production = AvgFlowVolume()

        if fuel_demand is None:
            fuel_demand = AvgFlowVolume()

        if emission_demand is None and emission_node is not None:
            emission_demand = AvgFlowVolume()

        self._production = production
        self._fuel_demand = fuel_demand
        self._emission_demand = emission_demand

    def get_fuel_node(self) -> str:
        """Get the fuel node of the thermal unit."""
        return self._fuel_node

    def set_fuel_node(self, fuel_node: str) -> None:
        """Set the fuel node of the thermal unit."""
        self._check_type(fuel_node, str)
        self._fuel_node = fuel_node

    def get_emission_node(self) -> str | None:
        """Get the emission node of the thermal unit."""
        return self._emission_node

    def set_emission_node(self, emission_node: str | None) -> None:
        """Set the emission node of the thermal unit."""
        self._check_type(emission_node, (str, type(None)))
        self._emission_node = emission_node

    def get_emission_coefficient(self) -> Conversion | None:
        """Get the emission coefficient of the thermal unit."""
        return self._emission_coefficient

    def set_emission_coefficient(self, emission_coefficient: Conversion | None) -> None:
        """Set the emission coefficient of the thermal unit."""
        self._check_type(emission_coefficient, (Conversion, type(None)))
        self._emission_coefficient = emission_coefficient

    def get_fuel_demand(self) -> AvgFlowVolume:
        """Get the fuel demand of the thermal unit."""
        return self._fuel_demand

    def get_emission_demand(self) -> AvgFlowVolume | None:
        """Get the emission demand of the thermal unit."""
        return self._emission_demand

    def set_emission_demand(self, value: AvgFlowVolume | None) -> None:
        """Set the emission demand of the thermal unit."""
        self._check_type(value, (AvgFlowVolume, type(None)))
        self._emission_demand = value

    def get_efficiency(self) -> Efficiency:
        """Get the efficiency of the thermal unit."""
        return self._efficiency

    def get_startupcost(self) -> StartUpCost | None:
        """Get the startup cost of the thermal unit."""
        return self._startupcost

    def set_startupcost(self, startupcost: StartUpCost | None) -> None:
        """Set the startup cost of the thermal unit."""
        self._check_type(startupcost, (StartUpCost, type(None)))
        self._startupcost = startupcost

    """Implementation of Component interface"""

    def _get_simpler_components(self, base_name: str) -> dict[str, Component]:
        return {base_name + "_Flow": self._create_flow()}

    def _replace_node(self, old: str, new: str) -> None:
        existing_nodes = [self._power_node, self._fuel_node]
        existing_nodes = existing_nodes if self._emission_node is None else [*existing_nodes, self._emission_node]
        if old not in existing_nodes:
            message = f"{old} not found in {self}. Expected one of the existing nodes {existing_nodes}."
            raise ValueError(message)

        if self._power_node == old:
            self._power_node = new
        if self._fuel_node == old:
            self._fuel_node = new
        if (self._emission_node is not None) and (old == self._emission_node):
            self._emission_node = new

    def _create_flow(self) -> Flow:
        arrow_volumes: dict[Arrow, AvgFlowVolume] = dict()

        is_exogenous = self._max_capacity == self._min_capacity

        flow = Flow(
            main_node=self._power_node,
            max_capacity=self._max_capacity,
            min_capacity=self._min_capacity,
            startupcost=self._startupcost,
            volume=self._production,
            arrow_volumes=arrow_volumes,
            is_exogenous=is_exogenous,
        )

        power_arrow = Arrow(
            node=self._power_node,
            is_ingoing=True,
            conversion=Conversion(value=1),
        )
        flow.add_arrow(power_arrow)

        fuel_arrow = Arrow(
            node=self._fuel_node,
            is_ingoing=False,
            efficiency=self._efficiency,
        )
        flow.add_arrow(fuel_arrow)
        arrow_volumes[fuel_arrow] = self._fuel_demand

        if self._emission_node is not None:
            if self._emission_demand is None:
                self._emission_demand = AvgFlowVolume()
            emission_arrow = Arrow(
                node=self._emission_node,
                is_ingoing=False,
                conversion=self._emission_coefficient,
                efficiency=self._efficiency,
            )
            flow.add_arrow(emission_arrow)
            arrow_volumes[emission_arrow] = self._emission_demand

        if self._voc:
            flow.add_cost_term("VOC", self._voc)

        return flow
__init__(power_node: str, fuel_node: str, efficiency: Efficiency, max_capacity: FlowVolume, emission_node: str | None = None, emission_coefficient: Conversion | None = None, startupcost: StartUpCost | None = None, min_capacity: FlowVolume | None = None, voc: Cost | None = None, production: AvgFlowVolume | None = None, fuel_demand: AvgFlowVolume | None = None, emission_demand: AvgFlowVolume | None = None) -> None

Initialize a Thermal power plant instance.

Parameters:

Name Type Description Default
power_node str

The power node of the plant.

required
fuel_node str

The fuel node of the plant.

required
efficiency Efficiency

Efficiency of the plant.

required
emission_node str | None

Emission node.

None
emission_coefficient Conversion | None

Emission coefficient.

None
startupcost StartUpCost | None

Cost associated with starting up the Plant.

None
max_capacity FlowVolume | None

Maximum production capacity.

required
min_capacity FlowVolume | None

Minimum production capacity.

None
voc Cost | None

Variable operating cost.

None
production AvgFlowVolume | None

Production volume.

None
fuel_demand AvgFlowVolume | None

Fuel demand.

None
emission_demand AvgFlowVolume | None

Emission demand.

None
Source code in framcore/components/Thermal.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
def __init__(
    self,
    power_node: str,
    fuel_node: str,
    efficiency: Efficiency,
    max_capacity: FlowVolume,
    emission_node: str | None = None,
    emission_coefficient: Conversion | None = None,
    startupcost: StartUpCost | None = None,
    min_capacity: FlowVolume | None = None,
    voc: Cost | None = None,
    production: AvgFlowVolume | None = None,
    fuel_demand: AvgFlowVolume | None = None,
    emission_demand: AvgFlowVolume | None = None,
) -> None:
    """
    Initialize a Thermal power plant instance.

    Args:
        power_node (str): The power node of the plant.
        fuel_node (str): The fuel node of the plant.
        efficiency (Efficiency): Efficiency of the plant.
        emission_node (str | None, optional): Emission node.
        emission_coefficient (Conversion | None, optional): Emission coefficient.
        startupcost (StartUpCost | None, optional): Cost associated with starting up the Plant.
        max_capacity (FlowVolume | None, optional): Maximum production capacity.
        min_capacity (FlowVolume | None, optional): Minimum production capacity.
        voc (Cost | None, optional): Variable operating cost.
        production (AvgFlowVolume | None, optional): Production volume.
        fuel_demand (AvgFlowVolume | None, optional): Fuel demand.
        emission_demand (AvgFlowVolume | None, optional): Emission demand.

    """
    super().__init__(
        power_node=power_node,
        max_capacity=max_capacity,
        min_capacity=min_capacity,
        voc=voc,
        production=production,
    )

    self._check_type(fuel_node, str)
    self._check_type(emission_node, (str, type(None)))
    self._check_type(emission_coefficient, (Conversion, type(None)))
    self._check_type(startupcost, (StartUpCost, type(None)))
    self._check_type(production, (AvgFlowVolume, type(None)))
    self._check_type(fuel_demand, (AvgFlowVolume, type(None)))
    self._check_type(emission_demand, (AvgFlowVolume, type(None)))

    self._fuel_node = fuel_node
    self._efficiency = efficiency
    self._emission_node = emission_node
    self._emission_coefficient = emission_coefficient
    self._startupcost = startupcost

    if production is None:
        production = AvgFlowVolume()

    if fuel_demand is None:
        fuel_demand = AvgFlowVolume()

    if emission_demand is None and emission_node is not None:
        emission_demand = AvgFlowVolume()

    self._production = production
    self._fuel_demand = fuel_demand
    self._emission_demand = emission_demand
get_efficiency() -> Efficiency

Get the efficiency of the thermal unit.

Source code in framcore/components/Thermal.py
131
132
133
def get_efficiency(self) -> Efficiency:
    """Get the efficiency of the thermal unit."""
    return self._efficiency
get_emission_coefficient() -> Conversion | None

Get the emission coefficient of the thermal unit.

Source code in framcore/components/Thermal.py
109
110
111
def get_emission_coefficient(self) -> Conversion | None:
    """Get the emission coefficient of the thermal unit."""
    return self._emission_coefficient
get_emission_demand() -> AvgFlowVolume | None

Get the emission demand of the thermal unit.

Source code in framcore/components/Thermal.py
122
123
124
def get_emission_demand(self) -> AvgFlowVolume | None:
    """Get the emission demand of the thermal unit."""
    return self._emission_demand
get_emission_node() -> str | None

Get the emission node of the thermal unit.

Source code in framcore/components/Thermal.py
100
101
102
def get_emission_node(self) -> str | None:
    """Get the emission node of the thermal unit."""
    return self._emission_node
get_fuel_demand() -> AvgFlowVolume

Get the fuel demand of the thermal unit.

Source code in framcore/components/Thermal.py
118
119
120
def get_fuel_demand(self) -> AvgFlowVolume:
    """Get the fuel demand of the thermal unit."""
    return self._fuel_demand
get_fuel_node() -> str

Get the fuel node of the thermal unit.

Source code in framcore/components/Thermal.py
91
92
93
def get_fuel_node(self) -> str:
    """Get the fuel node of the thermal unit."""
    return self._fuel_node
get_startupcost() -> StartUpCost | None

Get the startup cost of the thermal unit.

Source code in framcore/components/Thermal.py
135
136
137
def get_startupcost(self) -> StartUpCost | None:
    """Get the startup cost of the thermal unit."""
    return self._startupcost
set_emission_coefficient(emission_coefficient: Conversion | None) -> None

Set the emission coefficient of the thermal unit.

Source code in framcore/components/Thermal.py
113
114
115
116
def set_emission_coefficient(self, emission_coefficient: Conversion | None) -> None:
    """Set the emission coefficient of the thermal unit."""
    self._check_type(emission_coefficient, (Conversion, type(None)))
    self._emission_coefficient = emission_coefficient
set_emission_demand(value: AvgFlowVolume | None) -> None

Set the emission demand of the thermal unit.

Source code in framcore/components/Thermal.py
126
127
128
129
def set_emission_demand(self, value: AvgFlowVolume | None) -> None:
    """Set the emission demand of the thermal unit."""
    self._check_type(value, (AvgFlowVolume, type(None)))
    self._emission_demand = value
set_emission_node(emission_node: str | None) -> None

Set the emission node of the thermal unit.

Source code in framcore/components/Thermal.py
104
105
106
107
def set_emission_node(self, emission_node: str | None) -> None:
    """Set the emission node of the thermal unit."""
    self._check_type(emission_node, (str, type(None)))
    self._emission_node = emission_node
set_fuel_node(fuel_node: str) -> None

Set the fuel node of the thermal unit.

Source code in framcore/components/Thermal.py
95
96
97
98
def set_fuel_node(self, fuel_node: str) -> None:
    """Set the fuel node of the thermal unit."""
    self._check_type(fuel_node, str)
    self._fuel_node = fuel_node
set_startupcost(startupcost: StartUpCost | None) -> None

Set the startup cost of the thermal unit.

Source code in framcore/components/Thermal.py
139
140
141
142
def set_startupcost(self, startupcost: StartUpCost | None) -> None:
    """Set the startup cost of the thermal unit."""
    self._check_type(startupcost, (StartUpCost, type(None)))
    self._startupcost = startupcost

Transmission

Contain class describing transmission of Power commodity between nodes.

Transmission

Bases: Component

Transmission component representing a one directional transmission line. Subclass of Component.

An object of this class represents one transmission line where power flows one direction (the other direction is represented by another Transmission object). However, the actual measured power being sent can be higher than the amount being recieved because of loss. One Transmission object therefore represents the viewpoints of both the sender and the reciever of power on this specific line.

Source code in framcore/components/Transmission.py
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
class Transmission(Component):
    """
    Transmission component representing a one directional transmission line. Subclass of Component.

    An object of this class represents one transmission line where power flows one direction (the other direction is
    represented by another Transmission object). However, the actual measured power being sent can be higher than the
    amount being recieved because of loss. One Transmission object therefore represents the viewpoints of both the
    sender and the reciever of power on this specific line.

    """

    def __init__(
        self,
        from_node: str,
        to_node: str,
        max_capacity: FlowVolume = None,
        min_capacity: FlowVolume | None = None,
        loss: Loss | None = None,
        tariff: Cost | None = None,
        ramp_up: Proportion | None = None,
        ramp_down: Proportion | None = None,
        ingoing_volume: AvgFlowVolume | None = None,
        outgoing_volume: AvgFlowVolume | None = None,
    ) -> None:
        """
        Initialize object of the Transmission class. Perform type checks and convert arguments to expressions.

        Args:
            from_node (str): Node which power is transported from.
            to_node (str): Destination Node.
            max_capacity (FlowVolume, optional): Maximum transmission capacity. Defaults to None.
            min_capacity (FlowVolume | None, optional): Minimum transmission capacity. Defaults to None.
            loss (Loss | None, optional): Amount of power lost while transmitting. Defaults to None.
            tariff (Cost | None, optional): Costs associated with operating this transmission line. Defaults to None.
            ramp_up (Proportion | None, optional): Max upwards change in transmission per time. Defaults to None.
            ramp_down (Proportion | None, optional): Max downwards change in transmission per time. Defaults to None.
            ingoing_volume (AvgFlowVolume | None, optional): Volume of power recieved by to_node. Defaults to None.
            outgoing_volume (AvgFlowVolume | None, optional): Volume of power sent by from_node. Defaults to None.

        """
        super().__init__()

        self._check_type(from_node, str)
        self._check_type(to_node, str)
        self._check_type(max_capacity, FlowVolume)
        self._check_type(min_capacity, (FlowVolume, type(None)))
        self._check_type(loss, (Loss, type(None)))
        self._check_type(tariff, (Cost, type(None)))
        self._check_type(ramp_up, (Proportion, type(None)))
        self._check_type(ramp_down, (Proportion, type(None)))
        self._check_type(ingoing_volume, (AvgFlowVolume, type(None)))
        self._check_type(outgoing_volume, (AvgFlowVolume, type(None)))

        self._from_node = from_node
        self._to_node = to_node
        self._max_capacity = max_capacity
        self._min_capacity = min_capacity
        self._loss = loss
        self._tariff = tariff
        self._ramp_up = ramp_up
        self._ramp_down = ramp_down

        if outgoing_volume is None:
            outgoing_volume = AvgFlowVolume()

        if ingoing_volume is None:
            ingoing_volume = AvgFlowVolume()

        self._outgoing_volume: AvgFlowVolume = outgoing_volume
        self._ingoing_volume: AvgFlowVolume = ingoing_volume

    def get_from_node(self) -> str:
        """Get the from node of the transmission line."""
        return self._from_node

    def set_from_node(self, node: str) -> None:
        """Set the from node of the transmission line."""
        self._check_type(node, str)
        self._from_node = node

    def get_to_node(self) -> str:
        """Get the to node of the transmission line."""
        return self._to_node

    def set_to_node(self, node: str) -> None:
        """Set the to node of the transmission line."""
        self._check_type(node, str)
        self._to_node = node

    def get_max_capacity(self) -> FlowVolume:
        """Get the maximum capacity (before losses) of the transmission line."""
        return self._max_capacity

    def get_min_capacity(self) -> FlowVolume:
        """Get the minimum capacity (before losses) of the transmission line."""
        return self._min_capacity

    def set_min_capacity(self, value: FlowVolume | None) -> None:
        """Set the minimum capacity (before losses) of the transmission line."""
        self._check_type(value, (FlowVolume, type(None)))
        self._min_capacity = value

    def get_outgoing_volume(self) -> AvgFlowVolume:
        """Get the outgoing (before losses) flow volume of the transmission line."""
        return self._outgoing_volume

    def get_ingoing_volume(self) -> AvgFlowVolume:
        """Get the ingoing (after losses) flow volume of the transmission line."""
        return self._ingoing_volume

    def get_loss(self) -> Loss | None:
        """Get the loss of the transmission line."""
        return self._loss

    def set_loss(self, loss: Loss | None) -> None:
        """Set the loss of the transmission line."""
        self._check_type(loss, (Loss, type(None)))
        self._loss = loss

    def get_tariff(self) -> Cost | None:
        """Get the tariff of the transmission line."""
        return self._tariff

    def set_tariff(self, tariff: Cost | None) -> None:
        """Set the tariff of the transmission line."""
        self._check_type(tariff, (Cost, type(None)))
        self._tariff = tariff

    def get_ramp_up(self) -> Proportion | None:
        """Get the ramp up profile level of the transmission line."""
        return self._ramp_up

    def set_ramp_up(self, value: Proportion | None) -> None:
        """Set the ramp up of the transmission line."""
        self._check_type(value, (Proportion, type(None)))
        self._ramp_up = value

    def get_ramp_down(self) -> Proportion | None:
        """Get the ramp down of the transmission line."""
        return self._ramp_down

    def set_ramp_down(self, value: Proportion | None) -> None:
        """Set the ramp down of the transmission line."""
        self._check_type(value, (Proportion, type(None)))
        self._ramp_down = value

    """Implementation of Component interface"""

    def _get_simpler_components(self, base_name: str) -> dict[str, Component]:
        return {base_name + "_Flow": self._create_flow()}

    def _replace_node(self, old: str, new: str) -> None:
        if old == self._from_node:
            self._from_node = new
        if old == self._to_node:
            self._to_node = new

    def _create_flow(self) -> Flow:
        arrow_volumes: dict[Arrow, FlowVolume] = dict()

        flow = Flow(
            main_node=self._from_node,
            max_capacity=self._max_capacity,
            volume=self._outgoing_volume,
            arrow_volumes=arrow_volumes,
            # ramp_up=self._ramp_up,    # TODO
            # ramp_down=self._ramp_up,  # TODO
        )

        outgoing_arrow = Arrow(
            node=self._from_node,
            is_ingoing=False,
            conversion=Conversion(value=1),
        )
        flow.add_arrow(outgoing_arrow)
        arrow_volumes[outgoing_arrow] = self._outgoing_volume

        # TODO: Extend Loss to support more fetures, such as quadratic losses? Needs loss param in Arrow to do this

        ingoing_arrow = Arrow(
            node=self._to_node,
            is_ingoing=True,
            conversion=Conversion(value=1),
            loss=self._loss,
        )
        flow.add_arrow(ingoing_arrow)
        arrow_volumes[ingoing_arrow] = self._ingoing_volume

        if self._tariff is not None:
            flow.add_cost_term("tariff", self._tariff)

        return flow
__init__(from_node: str, to_node: str, max_capacity: FlowVolume = None, min_capacity: FlowVolume | None = None, loss: Loss | None = None, tariff: Cost | None = None, ramp_up: Proportion | None = None, ramp_down: Proportion | None = None, ingoing_volume: AvgFlowVolume | None = None, outgoing_volume: AvgFlowVolume | None = None) -> None

Initialize object of the Transmission class. Perform type checks and convert arguments to expressions.

Parameters:

Name Type Description Default
from_node str

Node which power is transported from.

required
to_node str

Destination Node.

required
max_capacity FlowVolume

Maximum transmission capacity. Defaults to None.

None
min_capacity FlowVolume | None

Minimum transmission capacity. Defaults to None.

None
loss Loss | None

Amount of power lost while transmitting. Defaults to None.

None
tariff Cost | None

Costs associated with operating this transmission line. Defaults to None.

None
ramp_up Proportion | None

Max upwards change in transmission per time. Defaults to None.

None
ramp_down Proportion | None

Max downwards change in transmission per time. Defaults to None.

None
ingoing_volume AvgFlowVolume | None

Volume of power recieved by to_node. Defaults to None.

None
outgoing_volume AvgFlowVolume | None

Volume of power sent by from_node. Defaults to None.

None
Source code in framcore/components/Transmission.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
def __init__(
    self,
    from_node: str,
    to_node: str,
    max_capacity: FlowVolume = None,
    min_capacity: FlowVolume | None = None,
    loss: Loss | None = None,
    tariff: Cost | None = None,
    ramp_up: Proportion | None = None,
    ramp_down: Proportion | None = None,
    ingoing_volume: AvgFlowVolume | None = None,
    outgoing_volume: AvgFlowVolume | None = None,
) -> None:
    """
    Initialize object of the Transmission class. Perform type checks and convert arguments to expressions.

    Args:
        from_node (str): Node which power is transported from.
        to_node (str): Destination Node.
        max_capacity (FlowVolume, optional): Maximum transmission capacity. Defaults to None.
        min_capacity (FlowVolume | None, optional): Minimum transmission capacity. Defaults to None.
        loss (Loss | None, optional): Amount of power lost while transmitting. Defaults to None.
        tariff (Cost | None, optional): Costs associated with operating this transmission line. Defaults to None.
        ramp_up (Proportion | None, optional): Max upwards change in transmission per time. Defaults to None.
        ramp_down (Proportion | None, optional): Max downwards change in transmission per time. Defaults to None.
        ingoing_volume (AvgFlowVolume | None, optional): Volume of power recieved by to_node. Defaults to None.
        outgoing_volume (AvgFlowVolume | None, optional): Volume of power sent by from_node. Defaults to None.

    """
    super().__init__()

    self._check_type(from_node, str)
    self._check_type(to_node, str)
    self._check_type(max_capacity, FlowVolume)
    self._check_type(min_capacity, (FlowVolume, type(None)))
    self._check_type(loss, (Loss, type(None)))
    self._check_type(tariff, (Cost, type(None)))
    self._check_type(ramp_up, (Proportion, type(None)))
    self._check_type(ramp_down, (Proportion, type(None)))
    self._check_type(ingoing_volume, (AvgFlowVolume, type(None)))
    self._check_type(outgoing_volume, (AvgFlowVolume, type(None)))

    self._from_node = from_node
    self._to_node = to_node
    self._max_capacity = max_capacity
    self._min_capacity = min_capacity
    self._loss = loss
    self._tariff = tariff
    self._ramp_up = ramp_up
    self._ramp_down = ramp_down

    if outgoing_volume is None:
        outgoing_volume = AvgFlowVolume()

    if ingoing_volume is None:
        ingoing_volume = AvgFlowVolume()

    self._outgoing_volume: AvgFlowVolume = outgoing_volume
    self._ingoing_volume: AvgFlowVolume = ingoing_volume
get_from_node() -> str

Get the from node of the transmission line.

Source code in framcore/components/Transmission.py
78
79
80
def get_from_node(self) -> str:
    """Get the from node of the transmission line."""
    return self._from_node
get_ingoing_volume() -> AvgFlowVolume

Get the ingoing (after losses) flow volume of the transmission line.

Source code in framcore/components/Transmission.py
113
114
115
def get_ingoing_volume(self) -> AvgFlowVolume:
    """Get the ingoing (after losses) flow volume of the transmission line."""
    return self._ingoing_volume
get_loss() -> Loss | None

Get the loss of the transmission line.

Source code in framcore/components/Transmission.py
117
118
119
def get_loss(self) -> Loss | None:
    """Get the loss of the transmission line."""
    return self._loss
get_max_capacity() -> FlowVolume

Get the maximum capacity (before losses) of the transmission line.

Source code in framcore/components/Transmission.py
96
97
98
def get_max_capacity(self) -> FlowVolume:
    """Get the maximum capacity (before losses) of the transmission line."""
    return self._max_capacity
get_min_capacity() -> FlowVolume

Get the minimum capacity (before losses) of the transmission line.

Source code in framcore/components/Transmission.py
100
101
102
def get_min_capacity(self) -> FlowVolume:
    """Get the minimum capacity (before losses) of the transmission line."""
    return self._min_capacity
get_outgoing_volume() -> AvgFlowVolume

Get the outgoing (before losses) flow volume of the transmission line.

Source code in framcore/components/Transmission.py
109
110
111
def get_outgoing_volume(self) -> AvgFlowVolume:
    """Get the outgoing (before losses) flow volume of the transmission line."""
    return self._outgoing_volume
get_ramp_down() -> Proportion | None

Get the ramp down of the transmission line.

Source code in framcore/components/Transmission.py
144
145
146
def get_ramp_down(self) -> Proportion | None:
    """Get the ramp down of the transmission line."""
    return self._ramp_down
get_ramp_up() -> Proportion | None

Get the ramp up profile level of the transmission line.

Source code in framcore/components/Transmission.py
135
136
137
def get_ramp_up(self) -> Proportion | None:
    """Get the ramp up profile level of the transmission line."""
    return self._ramp_up
get_tariff() -> Cost | None

Get the tariff of the transmission line.

Source code in framcore/components/Transmission.py
126
127
128
def get_tariff(self) -> Cost | None:
    """Get the tariff of the transmission line."""
    return self._tariff
get_to_node() -> str

Get the to node of the transmission line.

Source code in framcore/components/Transmission.py
87
88
89
def get_to_node(self) -> str:
    """Get the to node of the transmission line."""
    return self._to_node
set_from_node(node: str) -> None

Set the from node of the transmission line.

Source code in framcore/components/Transmission.py
82
83
84
85
def set_from_node(self, node: str) -> None:
    """Set the from node of the transmission line."""
    self._check_type(node, str)
    self._from_node = node
set_loss(loss: Loss | None) -> None

Set the loss of the transmission line.

Source code in framcore/components/Transmission.py
121
122
123
124
def set_loss(self, loss: Loss | None) -> None:
    """Set the loss of the transmission line."""
    self._check_type(loss, (Loss, type(None)))
    self._loss = loss
set_min_capacity(value: FlowVolume | None) -> None

Set the minimum capacity (before losses) of the transmission line.

Source code in framcore/components/Transmission.py
104
105
106
107
def set_min_capacity(self, value: FlowVolume | None) -> None:
    """Set the minimum capacity (before losses) of the transmission line."""
    self._check_type(value, (FlowVolume, type(None)))
    self._min_capacity = value
set_ramp_down(value: Proportion | None) -> None

Set the ramp down of the transmission line.

Source code in framcore/components/Transmission.py
148
149
150
151
def set_ramp_down(self, value: Proportion | None) -> None:
    """Set the ramp down of the transmission line."""
    self._check_type(value, (Proportion, type(None)))
    self._ramp_down = value
set_ramp_up(value: Proportion | None) -> None

Set the ramp up of the transmission line.

Source code in framcore/components/Transmission.py
139
140
141
142
def set_ramp_up(self, value: Proportion | None) -> None:
    """Set the ramp up of the transmission line."""
    self._check_type(value, (Proportion, type(None)))
    self._ramp_up = value
set_tariff(tariff: Cost | None) -> None

Set the tariff of the transmission line.

Source code in framcore/components/Transmission.py
130
131
132
133
def set_tariff(self, tariff: Cost | None) -> None:
    """Set the tariff of the transmission line."""
    self._check_type(tariff, (Cost, type(None)))
    self._tariff = tariff
set_to_node(node: str) -> None

Set the to node of the transmission line.

Source code in framcore/components/Transmission.py
91
92
93
94
def set_to_node(self, node: str) -> None:
    """Set the to node of the transmission line."""
    self._check_type(node, str)
    self._to_node = node

wind_solar

Solar

Bases: _WindSolar

Solar power component.

Has attributes for power node, capacity, variable operation cost, and production.

Compatible with WindSolarAggregator.

Source code in framcore/components/wind_solar.py
73
74
75
76
77
78
79
80
81
82
class Solar(_WindSolar):
    """
    Solar power component.

    Has attributes for power node, capacity, variable operation cost, and production.

    Compatible with WindSolarAggregator.
    """

    pass
Wind

Bases: _WindSolar

Wind power component.

Has attributes for power node, capacity, variable operation cost, and production.

Compatible with WindSolarAggregator.

Source code in framcore/components/wind_solar.py
61
62
63
64
65
66
67
68
69
70
class Wind(_WindSolar):
    """
    Wind power component.

    Has attributes for power node, capacity, variable operation cost, and production.

    Compatible with WindSolarAggregator.
    """

    pass

curves

Curve

Curve interface.

Curve

Bases: Base, ABC

Curve interface class.

Source code in framcore/curves/Curve.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
class Curve(Base, ABC):
    """Curve interface class."""

    @abstractmethod
    def get_unique_name(self) -> str | None:
        """Return unique name of curve."""
        pass

    @abstractmethod
    def get_x_axis(self, is_float32: bool) -> NDArray:
        """
        Get array of x axis values.

        Args:
            is_float32 (bool): Flag for converting the array of values to numpy float32.

        Returns:
            NDArray: Numpy array of values.

        """
        pass

    @abstractmethod
    def get_y_axis(self, is_float32: bool) -> NDArray:
        """
        Get array of y axis values.

        Args:
            is_float32 (bool): Flag for converting the array of values to numpy float32.

        Returns:
            NDArray: Numpy array of values.

        """
        pass
get_unique_name() -> str | None abstractmethod

Return unique name of curve.

Source code in framcore/curves/Curve.py
13
14
15
16
@abstractmethod
def get_unique_name(self) -> str | None:
    """Return unique name of curve."""
    pass
get_x_axis(is_float32: bool) -> NDArray abstractmethod

Get array of x axis values.

Parameters:

Name Type Description Default
is_float32 bool

Flag for converting the array of values to numpy float32.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of values.

Source code in framcore/curves/Curve.py
18
19
20
21
22
23
24
25
26
27
28
29
30
@abstractmethod
def get_x_axis(self, is_float32: bool) -> NDArray:
    """
    Get array of x axis values.

    Args:
        is_float32 (bool): Flag for converting the array of values to numpy float32.

    Returns:
        NDArray: Numpy array of values.

    """
    pass
get_y_axis(is_float32: bool) -> NDArray abstractmethod

Get array of y axis values.

Parameters:

Name Type Description Default
is_float32 bool

Flag for converting the array of values to numpy float32.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of values.

Source code in framcore/curves/Curve.py
32
33
34
35
36
37
38
39
40
41
42
43
44
@abstractmethod
def get_y_axis(self, is_float32: bool) -> NDArray:
    """
    Get array of y axis values.

    Args:
        is_float32 (bool): Flag for converting the array of values to numpy float32.

    Returns:
        NDArray: Numpy array of values.

    """
    pass

LoadedCurve

LoadedCurve

Bases: Curve

Represents a curve loaded from a CurveLoader.

Methods

get_unique_name() Returns the unique name of the curve. get_x_axis(precision=None) Returns the x-axis data. get_y_axis(precision=None) Returns the y-axis data. get_x_unit() Returns the unit for the x-axis. get_y_unit() Returns the unit for the y-axis. get_loader() Returns the loader instance. get_fingerprint() Returns the fingerprint of the curve.

Source code in framcore/curves/LoadedCurve.py
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
class LoadedCurve(Curve):
    """
    Represents a curve loaded from a CurveLoader.

    Methods
    -------
    get_unique_name()
        Returns the unique name of the curve.
    get_x_axis(precision=None)
        Returns the x-axis data.
    get_y_axis(precision=None)
        Returns the y-axis data.
    get_x_unit()
        Returns the unit for the x-axis.
    get_y_unit()
        Returns the unit for the y-axis.
    get_loader()
        Returns the loader instance.
    get_fingerprint()
        Returns the fingerprint of the curve.

    """

    def __init__(self, curve_id: str, loader: CurveLoader) -> None:
        """
        Initialize a LoadedCurve instance.

        Parameters
        ----------
        curve_id : str
            Identifier for the curve.
        loader : CurveLoader
            Loader instance used to retrieve curve data.

        """
        self._curve_id = curve_id
        self._loader = loader

        # TODO: get from loader
        self._reference_period = None

    def __repr__(self) -> str:
        """Return a string representation of the LoadedCurve instance."""
        return f"{type(self).__name__}(curve_id={self._curve_id},loader={self._loader},x_unit={self.get_x_unit()}),y_unit={self.get_y_unit()}),"

    def get_unique_name(self) -> str:
        """
        Return the unique name of the curve.

        Returns
        -------
        str
            The unique name for the curve.

        """
        return self._curve_id

    def get_x_axis(self, is_float32: bool) -> NDArray:
        """
        Get x axis values of the curve as a numpy array.

        Args:
            is_float32 (bool): Flag for converting the array of values to numpy float32.

        Returns:
            NDArray: Numpy array of x axis values.

        """
        x_axis = self._loader.get_x_axis(self._curve_id)
        if is_float32:
            x_axis = x_axis.astype(np.float32)
        return x_axis

    def get_y_axis(self, is_float32: bool) -> NDArray:
        """
        Get y axis values of the curve as a numpy array.

        Args:
            is_float32 (bool): Flag for converting the array of values to numpy float32.

        Returns:
            NDArray: Numpy array of y axis values.

        """
        y_axis = self._loader.get_y_axis(self._curve_id)
        if is_float32:
            y_axis = y_axis.astype(np.float32)
        return y_axis

    def get_x_unit(self) -> str:
        """
        Return the unit for the x-axis.

        Returns
        -------
        str
            The unit for the x-axis.

        """
        return self._loader.get_x_unit(self._curve_id)

    def get_y_unit(self) -> str:
        """
        Return the unit for the y-axis.

        Returns
        -------
        str
            The unit for the y-axis.

        """
        return self._loader.get_y_unit(self._curve_id)

    def get_loader(self) -> CurveLoader:
        """
        Return the loader instance used to retrieve curve data.

        Returns
        -------
        CurveLoader
            The loader instance associated with this curve.

        """
        return self._loader

    def get_fingerprint(self) -> Fingerprint:
        """
        Return the fingerprint of the curve.

        The method is not implemented yet.
        """
        raise NotImplementedError("Not implemented yet.")
__init__(curve_id: str, loader: CurveLoader) -> None

Initialize a LoadedCurve instance.

Parameters

curve_id : str Identifier for the curve. loader : CurveLoader Loader instance used to retrieve curve data.

Source code in framcore/curves/LoadedCurve.py
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
def __init__(self, curve_id: str, loader: CurveLoader) -> None:
    """
    Initialize a LoadedCurve instance.

    Parameters
    ----------
    curve_id : str
        Identifier for the curve.
    loader : CurveLoader
        Loader instance used to retrieve curve data.

    """
    self._curve_id = curve_id
    self._loader = loader

    # TODO: get from loader
    self._reference_period = None
__repr__() -> str

Return a string representation of the LoadedCurve instance.

Source code in framcore/curves/LoadedCurve.py
56
57
58
def __repr__(self) -> str:
    """Return a string representation of the LoadedCurve instance."""
    return f"{type(self).__name__}(curve_id={self._curve_id},loader={self._loader},x_unit={self.get_x_unit()}),y_unit={self.get_y_unit()}),"
get_fingerprint() -> Fingerprint

Return the fingerprint of the curve.

The method is not implemented yet.

Source code in framcore/curves/LoadedCurve.py
140
141
142
143
144
145
146
def get_fingerprint(self) -> Fingerprint:
    """
    Return the fingerprint of the curve.

    The method is not implemented yet.
    """
    raise NotImplementedError("Not implemented yet.")
get_loader() -> CurveLoader

Return the loader instance used to retrieve curve data.

Returns

CurveLoader The loader instance associated with this curve.

Source code in framcore/curves/LoadedCurve.py
128
129
130
131
132
133
134
135
136
137
138
def get_loader(self) -> CurveLoader:
    """
    Return the loader instance used to retrieve curve data.

    Returns
    -------
    CurveLoader
        The loader instance associated with this curve.

    """
    return self._loader
get_unique_name() -> str

Return the unique name of the curve.

Returns

str The unique name for the curve.

Source code in framcore/curves/LoadedCurve.py
60
61
62
63
64
65
66
67
68
69
70
def get_unique_name(self) -> str:
    """
    Return the unique name of the curve.

    Returns
    -------
    str
        The unique name for the curve.

    """
    return self._curve_id
get_x_axis(is_float32: bool) -> NDArray

Get x axis values of the curve as a numpy array.

Parameters:

Name Type Description Default
is_float32 bool

Flag for converting the array of values to numpy float32.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of x axis values.

Source code in framcore/curves/LoadedCurve.py
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
def get_x_axis(self, is_float32: bool) -> NDArray:
    """
    Get x axis values of the curve as a numpy array.

    Args:
        is_float32 (bool): Flag for converting the array of values to numpy float32.

    Returns:
        NDArray: Numpy array of x axis values.

    """
    x_axis = self._loader.get_x_axis(self._curve_id)
    if is_float32:
        x_axis = x_axis.astype(np.float32)
    return x_axis
get_x_unit() -> str

Return the unit for the x-axis.

Returns

str The unit for the x-axis.

Source code in framcore/curves/LoadedCurve.py
104
105
106
107
108
109
110
111
112
113
114
def get_x_unit(self) -> str:
    """
    Return the unit for the x-axis.

    Returns
    -------
    str
        The unit for the x-axis.

    """
    return self._loader.get_x_unit(self._curve_id)
get_y_axis(is_float32: bool) -> NDArray

Get y axis values of the curve as a numpy array.

Parameters:

Name Type Description Default
is_float32 bool

Flag for converting the array of values to numpy float32.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of y axis values.

Source code in framcore/curves/LoadedCurve.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
def get_y_axis(self, is_float32: bool) -> NDArray:
    """
    Get y axis values of the curve as a numpy array.

    Args:
        is_float32 (bool): Flag for converting the array of values to numpy float32.

    Returns:
        NDArray: Numpy array of y axis values.

    """
    y_axis = self._loader.get_y_axis(self._curve_id)
    if is_float32:
        y_axis = y_axis.astype(np.float32)
    return y_axis
get_y_unit() -> str

Return the unit for the y-axis.

Returns

str The unit for the y-axis.

Source code in framcore/curves/LoadedCurve.py
116
117
118
119
120
121
122
123
124
125
126
def get_y_unit(self) -> str:
    """
    Return the unit for the y-axis.

    Returns
    -------
    str
        The unit for the y-axis.

    """
    return self._loader.get_y_unit(self._curve_id)

events

get_event_handler() -> object | None

Get event handler if any.

Source code in framcore/events/events.py
21
22
23
def get_event_handler() -> object | None:
    """Get event handler if any."""
    return _EVENT_HANDLER

send_debug_event(sender: object, message: str) -> None

Use this to send debug event.

Source code in framcore/events/events.py
49
50
51
def send_debug_event(sender: object, message: str) -> None:
    """Use this to send debug event."""
    send_event(sender, "debug", message=message)

send_error_event(sender: object, message: str, exception_type_name: str, traceback: str) -> None

Use this to send error event.

Source code in framcore/events/events.py
39
40
41
def send_error_event(sender: object, message: str, exception_type_name: str, traceback: str) -> None:
    """Use this to send error event."""
    send_event(sender, "error", message=message, exception_type_name=exception_type_name, traceback=traceback)

send_event(sender: object, event_type: str, **kwargs: dict[str, object]) -> None

All events in core should use this.

Source code in framcore/events/events.py
26
27
28
29
30
31
def send_event(sender: object, event_type: str, **kwargs: dict[str, object]) -> None:
    """All events in core should use this."""
    if _EVENT_HANDLER is None:
        print(event_type, kwargs)
    else:
        _EVENT_HANDLER.handle_event(sender, event_type, **kwargs)

send_info_event(sender: object, message: str) -> None

Use this to send info event.

Source code in framcore/events/events.py
44
45
46
def send_info_event(sender: object, message: str) -> None:
    """Use this to send info event."""
    send_event(sender, "info", message=message)

send_warning_event(sender: object, message: str) -> None

Use this to send warning event.

Source code in framcore/events/events.py
34
35
36
def send_warning_event(sender: object, message: str) -> None:
    """Use this to send warning event."""
    send_event(sender, "warning", message=message)

set_event_handler(handler: object | None) -> None

Set event handler if any.

Source code in framcore/events/events.py
12
13
14
15
16
17
18
def set_event_handler(handler: object | None) -> None:
    """Set event handler if any."""
    if handler is not None and (not hasattr(handler, "handle_event") or not callable(handler.handle_event)):
        message = "Given handler does not implement handle_event."
        raise ValueError(message)
    global _EVENT_HANDLER  # noqa: PLW0603 # TODO: unsafe?
    _EVENT_HANDLER = handler

events

Event system.

All code in the core use these functions to communicate events.

Calling systems (e.g. workflow codes) can get events by hooking into SEND_EVENT_CHANNEL.

get_event_handler() -> object | None

Get event handler if any.

Source code in framcore/events/events.py
21
22
23
def get_event_handler() -> object | None:
    """Get event handler if any."""
    return _EVENT_HANDLER
send_debug_event(sender: object, message: str) -> None

Use this to send debug event.

Source code in framcore/events/events.py
49
50
51
def send_debug_event(sender: object, message: str) -> None:
    """Use this to send debug event."""
    send_event(sender, "debug", message=message)
send_error_event(sender: object, message: str, exception_type_name: str, traceback: str) -> None

Use this to send error event.

Source code in framcore/events/events.py
39
40
41
def send_error_event(sender: object, message: str, exception_type_name: str, traceback: str) -> None:
    """Use this to send error event."""
    send_event(sender, "error", message=message, exception_type_name=exception_type_name, traceback=traceback)
send_event(sender: object, event_type: str, **kwargs: dict[str, object]) -> None

All events in core should use this.

Source code in framcore/events/events.py
26
27
28
29
30
31
def send_event(sender: object, event_type: str, **kwargs: dict[str, object]) -> None:
    """All events in core should use this."""
    if _EVENT_HANDLER is None:
        print(event_type, kwargs)
    else:
        _EVENT_HANDLER.handle_event(sender, event_type, **kwargs)
send_info_event(sender: object, message: str) -> None

Use this to send info event.

Source code in framcore/events/events.py
44
45
46
def send_info_event(sender: object, message: str) -> None:
    """Use this to send info event."""
    send_event(sender, "info", message=message)
send_warning_event(sender: object, message: str) -> None

Use this to send warning event.

Source code in framcore/events/events.py
34
35
36
def send_warning_event(sender: object, message: str) -> None:
    """Use this to send warning event."""
    send_event(sender, "warning", message=message)
set_event_handler(handler: object | None) -> None

Set event handler if any.

Source code in framcore/events/events.py
12
13
14
15
16
17
18
def set_event_handler(handler: object | None) -> None:
    """Set event handler if any."""
    if handler is not None and (not hasattr(handler, "handle_event") or not callable(handler.handle_event)):
        message = "Given handler does not implement handle_event."
        raise ValueError(message)
    global _EVENT_HANDLER  # noqa: PLW0603 # TODO: unsafe?
    _EVENT_HANDLER = handler

expressions

ensure_expr(value: Expr | str | Curve | TimeVector | None, is_flow: bool = False, is_stock: bool = False, is_level: bool = False, is_profile: bool = False, profile: Expr | None = None) -> Expr | None

Ensure that the value is an expression of the expected type or create one if possible.

Parameters:

Name Type Description Default
value Expr | str | None

The value to check.

required
is_flow str

If the Expr is a flow. Cannot be True if is_stock is True.

False
is_stock str

If the Expr is a stock. Cannot be True if is_flow is True.

False
is_level bool

Wether the Expr represents a level. Cannot be True if is_profile is True.

False
is_profile bool

Wether the Expr represents a profile. Cannot be True if is_level is True.

False
profile Expr | None

If the Expr is a level, this should be its profile.

None

Returns:

Name Type Description
value Expr | str

The value as an expression of the expected type or None.

Source code in framcore/expressions/Expr.py
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
def ensure_expr(
    value: Expr | str | Curve | TimeVector | None,  # technically anything that can be converted to float. Typehint for this?
    is_flow: bool = False,
    is_stock: bool = False,
    is_level: bool = False,
    is_profile: bool = False,
    profile: Expr | None = None,
) -> Expr | None:
    """
    Ensure that the value is an expression of the expected type or create one if possible.

    Args:
        value (Expr | str | None): The value to check.
        is_flow (str): If the Expr is a flow. Cannot be True if is_stock is True.
        is_stock (str): If the Expr is a stock. Cannot be True if is_flow is True.
        is_level (bool): Wether the Expr represents a level. Cannot be True if is_profile is True.
        is_profile (bool): Wether the Expr represents a profile. Cannot be True if is_level is True.
        profile (Expr | None): If the Expr is a level, this should be its profile.

    Returns:
        value (Expr | str): The value as an expression of the expected type or None.

    """
    if not isinstance(value, (str, Expr, Curve, TimeVector)) and value is not None:
        msg = f"Expected value to be of type Expr, str, Curve, TimeVector or None. Got {type(value).__name__}."
        raise TypeError(msg)

    if value is None:
        return None

    if isinstance(value, Expr):
        # Check wether given Expr matches expected flow, stock, profile and level status.
        if value.is_flow() != is_flow or value.is_stock() != is_stock or value.is_level() != is_level or value.is_profile() != is_profile:
            message = (
                "Given Expr has a mismatch between expected and actual flow/stock or level/profile status:\nExpected: "
                f"is_flow - {is_flow}, is_stock - {is_stock}, is_level - {is_level}, is_profile - {is_profile}\n"
                f"Actual: is_flow - {value.is_flow()}, is_stock - {value.is_stock()}, "
                f"is_level - {value.is_level()}, is_profile - {value.is_profile()}"
            )
            raise ValueError(message)
        return value

    return Expr(
        src=value,
        is_flow=is_flow,
        is_stock=is_stock,
        is_level=is_level,
        is_profile=is_profile,
        profile=profile,
    )

get_leaf_profiles(expr: Expr) -> list[Expr]

Get all leaf profile expressions from an Expr object.

Parameters:

Name Type Description Default
expr Expr

The starting Expr object.

required

Returns:

Type Description
list[Expr]

list[Expr]: A list of leaf profile expressions.

Source code in framcore/expressions/Expr.py
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
def get_leaf_profiles(expr: Expr) -> list[Expr]:
    """
    Get all leaf profile expressions from an Expr object.

    Args:
        expr (Expr): The starting Expr object.

    Returns:
        list[Expr]: A list of leaf profile expressions.

    """
    leaf_profiles = []

    def _traverse(expr: Expr) -> None:
        if expr.is_leaf():
            if expr.is_profile():
                leaf_profiles.append(expr)
            return

        # Recursively traverse the arguments of the expression
        _, args = expr.get_operations(expect_ops=False, copy_list=False)
        for arg in args:
            _traverse(arg)

    _traverse(expr)
    return leaf_profiles

get_level_value(expr: Expr, db: QueryDB | Model, unit: str | None, data_dim: SinglePeriodTimeIndex, scen_dim: FixedFrequencyTimeIndex, is_max: bool) -> float

Evaluate Expr representing a (possibly aggregated) level.

The follwing will be automatically handled for you: - fetching from different data objecs (from db) - conversion to requested unit - query at requested TimeIndex for data and scenario dimension, and with requested reference period - conversion to requested level type (is_max or is_avg)

Supports all expressions. Will evaluate level Exprs at data_dim (with reference period of scen_dim), and profile Exprs as an average over scen_dim (both as constants). Has optimized fastpath methods for sums, products and aggregations. The rest uses a fallback method with SymPy.

Source code in framcore/expressions/queries.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
def get_level_value(
    expr: Expr,
    db: QueryDB | Model,
    unit: str | None,
    data_dim: SinglePeriodTimeIndex,
    scen_dim: FixedFrequencyTimeIndex,
    is_max: bool,
) -> float:
    """
    Evaluate Expr representing a (possibly aggregated) level.

    The follwing will be automatically handled for you:
    - fetching from different data objecs (from db)
    - conversion to requested unit
    - query at requested TimeIndex for data and scenario dimension, and with requested reference period
    - conversion to requested level type (is_max or is_avg)

    Supports all expressions. Will evaluate level Exprs at data_dim (with reference period of scen_dim),
    and profile Exprs as an average over scen_dim (both as constants). Has optimized fastpath methods for sums, products and aggregations.
    The rest uses a fallback method with SymPy.

    """
    check_type(expr, Expr)  # check expr here since _get_level_value is not recursively called.
    check_type(unit, (str, type(None)))
    check_type(data_dim, SinglePeriodTimeIndex)
    check_type(scen_dim, FixedFrequencyTimeIndex)
    check_type(is_max, bool)
    db = _load_model_and_create_model_db(db)

    return _get_level_value(expr, db, unit, data_dim, scen_dim, is_max)

get_profile_exprs_from_leaf_levels(expr: Expr) -> list[Expr]

Get all profile expressions from leaf-level Expr objects that are marked as levels.

Parameters:

Name Type Description Default
expr Expr

The starting Expr object.

required

Returns:

Type Description
list[Expr]

list[Expr]: A list of profile expressions from leaf-level Expr objects.

Source code in framcore/expressions/Expr.py
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
def get_profile_exprs_from_leaf_levels(expr: Expr) -> list[Expr]:
    """
    Get all profile expressions from leaf-level Expr objects that are marked as levels.

    Args:
        expr (Expr): The starting Expr object.

    Returns:
        list[Expr]: A list of profile expressions from leaf-level Expr objects.

    """
    profile_exprs = []

    def _traverse(expr: Expr) -> None:
        if expr.is_leaf():
            if expr.is_level() and expr.get_profile() is not None:
                profile_exprs.append(expr.get_profile())
            return

        # Recursively traverse the arguments of the expression
        _, args = expr.get_operations(expect_ops=False, copy_list=False)
        for arg in args:
            _traverse(arg)

    _traverse(expr)
    return profile_exprs

get_profile_vector(expr: Expr, db: QueryDB | Model, data_dim: SinglePeriodTimeIndex, scen_dim: FixedFrequencyTimeIndex, is_zero_one: bool, is_float32: bool = True) -> NDArray

Evaluate expr representing a (possibly aggregated) profile.

expr = sum(weight[i] * profile[i]) where

weight[i] >= 0 and is unitless, and will be evaluated as a constant
profile[i] is a unitless profile expr

profile[i] is either "zero_one" or "mean_one" type of profile

"zero_one" and "mean_one" profile type must be converted to the
same standard to be added correctly.

The query parameters data_dim and scen_dim are used to evaluate the values
requested TimeIndex for data and scenario dimension, and with requested reference period

weight[i] will be evaluated level Exprs at data_dim (with reference period of scen_dim),
and profile Exprs as an average over scen_dim (both as constants)

profile[i] will be evaluated as profile vectors over scen_dim

The query parameter is_zero_one tells which profile type the output
vector should be converted to.
Source code in framcore/expressions/queries.py
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
def get_profile_vector(
    expr: Expr,
    db: QueryDB | Model,
    data_dim: SinglePeriodTimeIndex,
    scen_dim: FixedFrequencyTimeIndex,
    is_zero_one: bool,
    is_float32: bool = True,
) -> NDArray:
    """
    Evaluate expr representing a (possibly aggregated) profile.

    expr = sum(weight[i] * profile[i]) where

        weight[i] >= 0 and is unitless, and will be evaluated as a constant
        profile[i] is a unitless profile expr

        profile[i] is either "zero_one" or "mean_one" type of profile

        "zero_one" and "mean_one" profile type must be converted to the
        same standard to be added correctly.

        The query parameters data_dim and scen_dim are used to evaluate the values
        requested TimeIndex for data and scenario dimension, and with requested reference period

        weight[i] will be evaluated level Exprs at data_dim (with reference period of scen_dim),
        and profile Exprs as an average over scen_dim (both as constants)

        profile[i] will be evaluated as profile vectors over scen_dim

        The query parameter is_zero_one tells which profile type the output
        vector should be converted to.
    """
    # Argument expr checked in _get_profile_vector since it can be recursively called.
    check_type(data_dim, SinglePeriodTimeIndex)
    check_type(scen_dim, FixedFrequencyTimeIndex)
    check_type(is_zero_one, bool)
    check_type(is_float32, bool)
    db = _load_model_and_create_model_db(db)

    return _get_profile_vector(expr, db, data_dim, scen_dim, is_zero_one, is_float32)

get_timeindexes_from_expr(db: QueryDB | Model, expr: Expr) -> set[TimeIndex]

Find all timeindexes behind an expression.

Useful for optimized queries (not asking for more data than necessary).

Source code in framcore/expressions/queries.py
108
109
110
111
112
113
114
115
116
117
118
119
120
def get_timeindexes_from_expr(db: QueryDB | Model, expr: Expr) -> set[TimeIndex]:
    """
    Find all timeindexes behind an expression.

    Useful for optimized queries (not asking for more data than necessary).
    """
    db = _load_model_and_create_model_db(db)

    timeindexes: set[TimeIndex] = set()

    _recursively_update_timeindexes(timeindexes, db, expr)

    return timeindexes

get_unit_conversion_factor(from_unit: str | None, to_unit: str | None) -> float

Get the conversion factor from one unit to another.

Source code in framcore/expressions/units.py
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
def get_unit_conversion_factor(from_unit: str | None, to_unit: str | None) -> float:  # noqa C901
    """Get the conversion factor from one unit to another."""
    if from_unit == to_unit:
        return 1.0

    if from_unit is None or to_unit is None:
        return _get_unit_conversion_factor_with_none(from_unit, to_unit)

    fastpath = _fastpath_get_unit_conversion_factor(from_unit, to_unit)

    if _DEBUG is False and fastpath is not None:
        return fastpath

    if fastpath is None:
        has_multiplier = False
        with contextlib.suppress(Exception):
            ix = from_unit.index("*")
            multiplier = float(from_unit[:ix])
            base_from_unit = from_unit[ix + 1 :].strip()
            has_multiplier = True

        if has_multiplier:
            fastpath = _fastpath_get_unit_conversion_factor(base_from_unit, to_unit)
            fastpath = fastpath if fastpath is None else fastpath * multiplier
            if _DEBUG is False and fastpath is not None:
                return fastpath

    if _COLLECT_FASTPATH_DATA and fastpath is None:
        if has_multiplier:
            _OBSERVED_UNIT_CONVERSIONS.add((base_from_unit, to_unit))
        else:
            _OBSERVED_UNIT_CONVERSIONS.add((from_unit, to_unit))

    fallback = _fallback_get_unit_conversion_factor(from_unit, to_unit)

    if _DEBUG and fastpath is not None and fallback != fastpath:
        message = f"Different results!\nfrom_unit {from_unit} to_unit {to_unit}\nfastpath {fastpath} fallback {fallback}"
        raise RuntimeError(message)

    if _unit_has_no_floats(from_unit) and _unit_has_no_floats(to_unit):
        _FASTPATH_CONVERSION_FACTORS[(from_unit, to_unit)] = fallback

    return fallback

get_units_from_expr(db: QueryDB | Model, expr: Expr) -> set[str]

Find all units behind an expression. Useful for queries involving conversion factors.

Source code in framcore/expressions/queries.py
 97
 98
 99
100
101
102
103
104
105
def get_units_from_expr(db: QueryDB | Model, expr: Expr) -> set[str]:
    """Find all units behind an expression. Useful for queries involving conversion factors."""
    db = _load_model_and_create_model_db(db)

    units: set[str] = set()

    _recursively_update_units(units, db, expr)

    return units

is_convertable(unit_from: str, unit_to: str) -> bool

Return True if from_unit can be converted to to_unit else False.

Source code in framcore/expressions/units.py
222
223
224
225
226
227
def is_convertable(unit_from: str, unit_to: str) -> bool:
    """Return True if from_unit can be converted to to_unit else False."""
    with contextlib.suppress(Exception):
        get_unit_conversion_factor(unit_from, unit_to)
        return True
    return False

validate_unit_conversion_fastpaths() -> bool

Run-Time validation of fastpaths.

Source code in framcore/expressions/units.py
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
def validate_unit_conversion_fastpaths() -> bool:
    """Run-Time validation of fastpaths."""
    errors = []
    for (from_unit, to_unit), result in _FASTPATH_CONVERSION_FACTORS.items():
        sympy_result = None
        with contextlib.suppress(Exception):
            sympy_result = _fallback_get_unit_conversion_factor(from_unit, to_unit)
        if result != sympy_result:
            message = f"'{from_unit}' to '{to_unit}' failed. Fastpath: {result}, SymPy: {sympy_result}"
            errors.append(message)
    for from_unit, to_unit in _FASTPATH_INCOMPATIBLE_CONVERSIONS:
        with contextlib.suppress(Exception):
            sympy_result = _fallback_get_unit_conversion_factor(from_unit, to_unit)
            message = f"'{from_unit}' to '{to_unit}'. Fastpath claim incompatible units, but SymPy fallback returned {sympy_result}"
            errors.append(message)
    if errors:
        message = "\n".join(errors)
        raise RuntimeError(message)

Expr

Expr

Bases: Base

Mathematical expression with TimeVectors and Curves to represent Levels and Profiles in LevelProfiles.

The simplest Expr is a single TimeVector, while a more complicated expression could be a weighted average of several TimeVectors or Expressions. Expr can also have string references to Expr, TimeVector or Curve in a database (often Model).

Expr are classified as Stock, Flow or None of them. See https://en.wikipedia.org/wiki/Stock_and_flow. In FRAM we only support Flow data as a rate of change. So, for example, a production timeseries has to be in MW, and not in MWh. Converting between the two versions of Flow would add another level of complexity both in Expr and in TimeVector operations.

Expr are also classified as Level, Profile or none of them. This classification, together with Stock or Flow, is used to check if the built Expr are legal operations. - Expr that are Level can contain its connected Profile Expr. This is used in the queries to evaluate Levels according to their ReferencePeriod, and convert between Level formats (max level or average level, see LevelProfile for more details).

Calculations using Expr are evaluated lazily, reducing unnecessary numerical operations during data manipulation. Computations involving values and units occur only when the Expr is queried.

We only support calculations using +, -, *, and / in Expr, and we have no plans to change this. Expanding beyond these would turn Expr into a complex programming language rather than keeping it as a simple and efficient system for common time-series calculations. More advanced operations are still possible through eager evaluation, so this is not a limitation. It simply distributes responsibilities across system components in a way that is practical from a maintenance perspective.

We use SymPy to support unit conversions. Already computed unit conversion factors are cached to minimize redundant calculations.

At the moment we support these queries for Expr (see Aggregators for more about how they are used): - get_level_value(expr, db, unit, data_dim, scen_dim, is_max) - Supports all expressions. Will evaluate level Exprs at data_dim (with reference period of scen_dim), and profile Exprs as an average over scen_dim (both as constants). - Has optimized fastpath methods for sums, products and aggregations. The rest uses a fallback method with SymPy. - get_profile_vector(expr, db, data_dim, scen_dim, is_zero_one, is_float32) - Supports expr = sum(weight[i] * profile[i]) where weight[i] is a unitless constant Expr with value >= 0, and profile[i] is a unitless profile Expr.

Source code in framcore/expressions/Expr.py
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
class Expr(Base):
    """
    Mathematical expression with TimeVectors and Curves to represent Levels and Profiles in LevelProfiles.

    The simplest Expr is a single TimeVector, while a more complicated expression could be a weighted average of several TimeVectors or Expressions.
    Expr can also have string references to Expr, TimeVector or Curve in a database (often Model).

    Expr are classified as Stock, Flow or None of them. See https://en.wikipedia.org/wiki/Stock_and_flow. In FRAM we only support Flow data as a rate of change.
    So, for example, a production timeseries has to be in MW, and not in MWh. Converting between the two versions of Flow would add another
    level of complexity both in Expr and in TimeVector operations.

    Expr are also classified as Level, Profile or none of them. This classification, together with Stock or Flow,
    is used to check if the built Expr are legal operations.
    - Expr that are Level can contain its connected Profile Expr. This is used in the queries to evaluate Levels according to their ReferencePeriod, and
        convert between Level formats (max level or average level, see LevelProfile for more details).

    Calculations using Expr are evaluated lazily, reducing unnecessary numerical operations during data manipulation.
    Computations involving values and units occur only when the Expr is queried.

    We only support calculations using +, -, *, and / in Expr, and we have no plans to change this.
    Expanding beyond these would turn Expr into a complex programming language rather than keeping it as a simple
    and efficient system for common time-series calculations. More advanced operations are still possible through eager evaluation, so this is not a limitation.
    It simply distributes responsibilities across system components in a way that is practical from a maintenance perspective.

    We use SymPy to support unit conversions. Already computed unit conversion factors are cached to minimize redundant calculations.

    At the moment we support these queries for Expr (see Aggregators for more about how they are used):
    - get_level_value(expr, db, unit, data_dim, scen_dim, is_max)
        - Supports all expressions. Will evaluate level Exprs at data_dim (with reference period of scen_dim),
            and profile Exprs as an average over scen_dim (both as constants).
        - Has optimized fastpath methods for sums, products and aggregations. The rest uses a fallback method with SymPy.
    - get_profile_vector(expr, db, data_dim, scen_dim, is_zero_one, is_float32)
        - Supports expr = sum(weight[i] * profile[i]) where weight[i] is a unitless constant Expr with value >= 0, and profile[i] is a unitless profile Expr.

    """

    def __init__(
        self,
        src: str | Curve | TimeVector | None = None,
        is_stock: bool = False,
        is_flow: bool = False,
        is_profile: bool = False,
        is_level: bool = False,
        profile: Expr | None = None,
        operations: tuple[str, list[Expr]] | None = None,
    ) -> None:
        """
        Create new (immutable) Expression.

        Args:
            src (str | Curve | TimeVector | None, optional): Source of the values to be used in the Expression. Either a Curve or TimeVector object,
              or a reference to one of them. Defaults to None.
            is_stock (bool, optional): Flag to signify if the Expr represents a stock type variable. Defaults to False.
            is_flow (bool, optional): Flag to signify if the Expr represents a flow type variable. Defaults to False.
            is_profile (bool, optional): Flag to signify if the Expr represents a profile. Defaults to False.
            is_level (bool, optional): Flag to signify if the Expr represents a level. Defaults to False.
            profile (Expr | None, optional): Expr that are Level can contain its connected Profile Expr. This is used in the queries to evaluate
                Levels according to their ReferencePeriod, and convert between Level formats (max level or average level, see LevelProfile for more details).
            operations (tuple[str, list[Expr]] | None, optional): Operations to apply to the expression. Defaults to None.

        """
        if is_level and is_profile:
            message = "Expr cannot be both level and a profile. Set either is_level or is_profile True or both False."
            raise ValueError(message)

        if is_flow and is_stock:
            message = "Expr cannot be both flow and stock. Set either is_flow or is_stock True or both False."
            raise ValueError(message)

        if is_profile and (is_flow or is_stock):
            message = "Expr cannot be both a profile and a flow/stock. Profiles must be coefficients."
            raise ValueError(message)

        self._src: str | Curve | TimeVector | None = src
        self._is_stock = is_stock
        self._is_flow = is_flow
        self._is_profile = is_profile
        self._is_level = is_level
        self._profile = profile

        # have to come after setting fields
        # because fields are used to create
        # error messages e.g. in __repr__

        self._check_type(src, (str, Curve, TimeVector, type(None)))
        self._check_type(is_stock, (bool, type(None)))
        self._check_type(is_flow, (bool, type(None)))
        self._check_type(is_level, (bool, type(None)))
        self._check_type(is_profile, (bool, type(None)))
        self._check_type(profile, (Expr, type(None)))

        self._check_operations(operations)
        if operations is None:
            operations = "", []
        self._operations: tuple[str, list[Expr]] = operations

    def _check_operations(self, operations: tuple[str, list[Expr]] | None, expect_ops: bool = False) -> None:
        if operations is None:
            return
        self._check_type(operations, tuple)
        if len(operations) != 2:  # noqa: PLR2004
            message = f"Expected len(operations) == 2. Got: {operations}"
            raise ValueError(message)
        ops, args = operations
        self._check_type(ops, str)
        self._check_type(args, list)
        if ops == "":
            if expect_ops:
                message = f"Expected ops, but got {operations}"
                raise ValueError(message)
            if len(args) > 0:
                message = f"Expected ops to have length. Got {operations}"
                raise ValueError(message)
            return
        if len(ops) != len(args) - 1:
            message = f"Expected len(ops) == len(args) - 1. Got {operations}"
            raise ValueError(message)
        for op in ops:
            if op not in "+-/*":
                message = f"Expected all op in ops in +-*/. Got {operations}"
                raise ValueError(message)
        for ex in args:
            self._check_type(ex, Expr)

    def get_fingerprint(self) -> Fingerprint:
        """Return fingerprint."""
        fingerprint = Fingerprint(self)
        fingerprint.add("is_stock", self._is_stock)
        fingerprint.add("is_flow", self._is_flow)
        fingerprint.add("is_profile", self._is_profile)
        fingerprint.add("is_level", self._is_level)
        fingerprint.add("profile", self._profile)
        if self._src:
            fingerprint.add("src", self._src.get_fingerprint() if isinstance(self._src, TimeVector) else FingerprintRef(self._src))
        fingerprint.add("operations", self._operations)
        return fingerprint

    def is_leaf(self) -> bool:
        """Return True if self is not an operation expression."""
        return self._src is not None

    def get_src(self) -> str | Curve | TimeVector | None:
        """Return str, Curve or TimeVector (either reference to Curve/TimeVector or Curve/TimeVector itself) or None if self is an operation expression."""
        return self._src

    def get_operations(self, expect_ops: bool, copy_list: bool) -> tuple[str, list[Expr]]:
        """Return ops, args. Users of this (low level) API must supply expect_ops and copy_list args."""
        self._check_type(copy_list, bool)
        self._verify_operations(expect_ops)
        if copy_list:
            ops, args = self._operations
            return ops, copy(args)
        return self._operations

    def _verify_operations(self, expect_ops: bool = False) -> None:
        self._check_operations(self._operations, expect_ops)
        ops = self._operations[0]

        if not ops:
            return

        has_add = "+" in ops
        has_sub = "-" in ops
        has_mul = "*" in ops
        has_div = "/" in ops

        if (has_add or has_sub) and (has_mul or has_div):
            message = f"Found +- in same operation level as */ in operations {self._operations} "
            raise ValueError(message)

        if has_div:
            seen_div = False
            for op in ops:
                if op == "/":
                    seen_div = True
                    continue
                if seen_div and op != "/":
                    message = f"Found +-* after / in operations {self._operations}"
                    raise ValueError(message)

    def is_flow(self) -> bool:
        """Return True if flow. Cannot be stock and flow."""
        return self._is_flow

    def is_stock(self) -> bool:
        """Return True if stock. Cannot be stock and flow."""
        return self._is_stock

    def is_level(self) -> bool:
        """Return True if level. Cannot be level and profile."""
        return self._is_level

    def is_profile(self) -> bool:
        """Return True if profile. Cannot be level and profile."""
        return self._is_profile

    def get_profile(self) -> Expr | None:
        """Return Expr representing profile. Implies self.is_level() is True."""
        return self._profile

    def set_profile(self, profile: Expr | None) -> None:
        """Set the profile of the Expr. Implies self.is_level() is True."""
        if not self.is_level():
            raise ValueError("Cannot set profile on Expr that is not a level.")
        self._profile = profile

    def _analyze_op(self, op: str, other: Expr) -> tuple[bool, bool, bool, bool, Expr | None]:
        flow = (True, False)
        stock = (False, True)
        level = (True, False)
        profile = (False, True)
        none = (False, False)

        supported_cases = {
            # all op supported for none
            ("+", none, none, none, none): (none, none, None),
            ("-", none, none, none, none): (none, none, None),
            ("*", none, none, none, none): (none, none, None),
            ("/", none, none, none, none): (none, none, None),
            # + flow level
            ("+", flow, level, flow, level): (flow, level, None),
            # * flow level
            ("*", flow, level, none, none): (flow, level, self.get_profile()),
            ("*", none, none, flow, level): (flow, level, other.get_profile()),
            # / flow level
            ("/", flow, level, none, none): (flow, level, self.get_profile()),
            ("/", flow, level, flow, level): (none, none, None),
            # + stock level
            ("+", stock, level, stock, level): (stock, level, None),
            # * stock level
            ("*", stock, level, none, level): (stock, level, None),
            ("*", none, level, stock, level): (stock, level, None),
            ("*", stock, level, none, none): (stock, level, self.get_profile()),
            ("*", none, none, stock, level): (stock, level, other.get_profile()),
            # / stock level
            ("/", stock, level, none, level): (stock, level, None),
            ("/", stock, level, none, none): (stock, level, self.get_profile()),
            ("/", stock, level, stock, level): (none, none, None),
            # level * level ok if one is flow (i.e. price * volume) or none (co2_eff / eff)
            ("*", flow, level, none, level): (flow, level, None),
            ("*", none, level, flow, level): (flow, level, None),
            ("/", flow, level, none, level): (flow, level, None),
            ("/", none, level, none, level): (none, level, None),
            ("*", none, level, none, level): (none, level, None),
            # profile
            ("+", none, profile, none, profile): (none, profile, None),
            ("-", none, profile, none, profile): (none, profile, None),
            ("/", none, profile, none, none): (none, profile, None),
            ("*", none, profile, none, none): (none, profile, None),
            ("*", none, none, none, profile): (none, profile, None),
            ("/", none, none, none, profile): (none, profile, None),
            # level
            ("+", none, level, none, level): (none, level, None),
            ("-", none, level, none, level): (none, level, None),
            ("/", none, level, none, none): (none, level, self.get_profile()),
            ("*", none, level, none, none): (none, level, self.get_profile()),
            ("*", none, none, none, level): (none, level, other.get_profile()),
            ("/", none, none, none, level): (none, level, other.get_profile()),
        }

        case = (
            op,
            (self.is_flow(), self.is_stock()),
            (self.is_level(), self.is_profile()),
            (other.is_flow(), other.is_stock()),
            (other.is_level(), other.is_profile()),
        )

        if case not in supported_cases:
            printable_case = {
                "op": case[0],
                "self_is_flow": case[1][0],
                "self_is_stock": case[1][1],
                "self_is_level": case[2][0],
                "self_is_profile": case[2][1],
                "other_is_flow": case[3][0],
                "other_is_stock": case[3][1],
                "other_is_level": case[4][0],
                "other_is_profile": case[4][1],
            }
            message = f"Unsupported case:\n{printable_case}\nexpression:\n{self} {op} {other}."
            raise ValueError(message)

        ((is_flow, is_stock), (is_level, is_profile), profile) = supported_cases[case]

        return is_stock, is_flow, is_level, is_profile, profile

    @staticmethod
    def _is_number(src: str) -> bool:
        try:
            float(src)
            return True
        except ValueError:
            return False

    def _create_op_expr(  # noqa: C901
        self,
        op: str,
        other: Expr | int | float,
        is_rhs: bool,
    ) -> Expr:
        if isinstance(other, Expr):
            is_stock, is_flow, is_level, is_profile, profile = self._analyze_op(op, other)

            x, y = (other, self) if is_rhs else (self, other)

            xisconst = isinstance(x.get_src(), ConstantTimeVector)
            yisconst = isinstance(y.get_src(), ConstantTimeVector)
            if xisconst and yisconst:
                xtv = x.get_src()
                ytv = y.get_src()
                is_combinable_tv = (
                    xtv.get_unit() == ytv.get_unit()
                    and xtv.is_max_level() == ytv.is_max_level()
                    and xtv.is_zero_one_profile() == ytv.is_zero_one_profile()
                    and xtv.get_reference_period() == ytv.get_reference_period()
                )
                if is_combinable_tv:
                    is_combinable_expr = (
                        x.is_level() == y.is_level()
                        and x.is_profile() == y.is_profile()
                        and x.is_flow() == y.is_flow()
                        and x.is_stock() == y.is_stock()
                        and x.get_profile() == y.get_profile()
                    )
                    if is_combinable_expr:
                        xscalar = xtv.get_vector(is_float32=True)[0]
                        yscalar = ytv.get_vector(is_float32=True)[0]
                        if op == "+":
                            scalar = xscalar + yscalar
                        elif op == "-":
                            scalar = xscalar - yscalar
                        elif op == "*":
                            scalar = xscalar * yscalar
                        elif op == "/":
                            scalar = xscalar / yscalar
                        return Expr(
                            src=ConstantTimeVector(
                                scalar=scalar,
                                unit=xtv.get_unit(),
                                is_max_level=xtv.is_max_level(),
                                is_zero_one_profile=xtv.is_zero_one_profile(),
                                reference_period=xtv.get_reference_period(),
                            ),
                            is_stock=x.is_stock(),
                            is_flow=x.is_flow(),
                            is_profile=x.is_profile(),
                            is_level=x.is_level(),
                            profile=x.get_profile(),
                            operations=None,
                        )

            ops, args = x.get_operations(expect_ops=False, copy_list=True)

            if not ops:
                ops = op
                args = [x, y]
            else:
                last_op = ops[-1]
                if last_op == op or (op in "+-" and last_op in "+-") or (last_op == "*" and op == "/"):
                    ops = f"{ops}{op}"
                    args.append(y)
                else:
                    ops = op
                    args = [x, y]

            return Expr(
                src=None,
                is_flow=is_flow,
                is_stock=is_stock,
                is_level=is_level,
                is_profile=is_profile,
                profile=profile,
                operations=(ops, args),
            )

        if self._is_number(other):
            if op in "*/":
                other_expr = Expr(src=ConstantTimeVector(float(other), is_max_level=False))
                return self._create_op_expr(op=op, other=other_expr, is_rhs=is_rhs)

            if op in "+" and other == 0:  # Comes from sum(expr_list). See sum() noqa
                return self  # TODO: Also accept 0 - Expr and -Expr?

            message = f"Only support multiplication and division with numbers, got {op} and {other}."
            raise ValueError(message)

        message = f"Only support Expr, int, float. Got unsupported type {type(other).__name__}."
        raise TypeError(message)

    def __add__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("+", other, is_rhs=False)

    def __sub__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("-", other, is_rhs=False)

    def __mul__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("*", other, is_rhs=False)

    def __truediv__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("/", other, is_rhs=False)

    def __radd__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("+", other, is_rhs=True)

    def __rsub__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("-", other, is_rhs=True)

    def __rmul__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("*", other, is_rhs=True)

    def __rtruediv__(self, other: object) -> Expr:  # noqa: D105
        return self._create_op_expr("/", other, is_rhs=True)

    def __repr__(self) -> str:
        """Represent Expr as str."""
        if self._src is not None:
            return f"Expr({self._src})"
        ops, args = self.get_operations(expect_ops=True, copy_list=False)
        out = f"{args[0]}"
        for op, arg in zip(ops, args[1:], strict=True):
            out = f"{out} {op} {arg}"
        return f"Expr({out})"

    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Check if self and other are equal."""
        if not isinstance(other, type(self)):
            return False
        return (
            self._is_flow == other._is_flow
            and self._is_level == other._is_level
            and self._src == other._src
            and self._is_stock == other._is_stock
            and self._is_profile == other._is_profile
            and self._profile == other._profile
            and self._operations[0] == other._operations[0]
            and len(self._operations[1]) == len(other._operations[1])
            and all([self._operations[1][i] == other._operations[1][i] for i in range(len(self._operations[1]))])  # noqa: SLF001
        )

    def __hash__(self) -> int:
        """Compute hash value.."""
        return hash(
            (
                self._is_flow,
                self._is_stock,
                self._is_level,
                self._is_profile,
                self._src,
                self._profile,
                self._operations[0],
                tuple(self._operations[1]),
            ),
        )

    def add_loaders(self, loaders: set[Loader]) -> None:
        """Add all loaders stored in TimeVector or Curve within Expr to loaders."""
        if self.is_leaf():
            src = self.get_src()
            if isinstance(src, TimeVector | LoadedCurve):
                loader = src.get_loader()
                if loader is not None:
                    loaders.add(loader)
            return
        __, args = self.get_operations(expect_ops=True, copy_list=False)
        for arg in args:
            arg.add_loaders(loaders)
__eq__(other) -> bool

Check if self and other are equal.

Source code in framcore/expressions/Expr.py
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Check if self and other are equal."""
    if not isinstance(other, type(self)):
        return False
    return (
        self._is_flow == other._is_flow
        and self._is_level == other._is_level
        and self._src == other._src
        and self._is_stock == other._is_stock
        and self._is_profile == other._is_profile
        and self._profile == other._profile
        and self._operations[0] == other._operations[0]
        and len(self._operations[1]) == len(other._operations[1])
        and all([self._operations[1][i] == other._operations[1][i] for i in range(len(self._operations[1]))])  # noqa: SLF001
    )
__hash__() -> int

Compute hash value..

Source code in framcore/expressions/Expr.py
456
457
458
459
460
461
462
463
464
465
466
467
468
469
def __hash__(self) -> int:
    """Compute hash value.."""
    return hash(
        (
            self._is_flow,
            self._is_stock,
            self._is_level,
            self._is_profile,
            self._src,
            self._profile,
            self._operations[0],
            tuple(self._operations[1]),
        ),
    )
__init__(src: str | Curve | TimeVector | None = None, is_stock: bool = False, is_flow: bool = False, is_profile: bool = False, is_level: bool = False, profile: Expr | None = None, operations: tuple[str, list[Expr]] | None = None) -> None

Create new (immutable) Expression.

Parameters:

Name Type Description Default
src str | Curve | TimeVector | None

Source of the values to be used in the Expression. Either a Curve or TimeVector object, or a reference to one of them. Defaults to None.

None
is_stock bool

Flag to signify if the Expr represents a stock type variable. Defaults to False.

False
is_flow bool

Flag to signify if the Expr represents a flow type variable. Defaults to False.

False
is_profile bool

Flag to signify if the Expr represents a profile. Defaults to False.

False
is_level bool

Flag to signify if the Expr represents a level. Defaults to False.

False
profile Expr | None

Expr that are Level can contain its connected Profile Expr. This is used in the queries to evaluate Levels according to their ReferencePeriod, and convert between Level formats (max level or average level, see LevelProfile for more details).

None
operations tuple[str, list[Expr]] | None

Operations to apply to the expression. Defaults to None.

None
Source code in framcore/expressions/Expr.py
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
def __init__(
    self,
    src: str | Curve | TimeVector | None = None,
    is_stock: bool = False,
    is_flow: bool = False,
    is_profile: bool = False,
    is_level: bool = False,
    profile: Expr | None = None,
    operations: tuple[str, list[Expr]] | None = None,
) -> None:
    """
    Create new (immutable) Expression.

    Args:
        src (str | Curve | TimeVector | None, optional): Source of the values to be used in the Expression. Either a Curve or TimeVector object,
          or a reference to one of them. Defaults to None.
        is_stock (bool, optional): Flag to signify if the Expr represents a stock type variable. Defaults to False.
        is_flow (bool, optional): Flag to signify if the Expr represents a flow type variable. Defaults to False.
        is_profile (bool, optional): Flag to signify if the Expr represents a profile. Defaults to False.
        is_level (bool, optional): Flag to signify if the Expr represents a level. Defaults to False.
        profile (Expr | None, optional): Expr that are Level can contain its connected Profile Expr. This is used in the queries to evaluate
            Levels according to their ReferencePeriod, and convert between Level formats (max level or average level, see LevelProfile for more details).
        operations (tuple[str, list[Expr]] | None, optional): Operations to apply to the expression. Defaults to None.

    """
    if is_level and is_profile:
        message = "Expr cannot be both level and a profile. Set either is_level or is_profile True or both False."
        raise ValueError(message)

    if is_flow and is_stock:
        message = "Expr cannot be both flow and stock. Set either is_flow or is_stock True or both False."
        raise ValueError(message)

    if is_profile and (is_flow or is_stock):
        message = "Expr cannot be both a profile and a flow/stock. Profiles must be coefficients."
        raise ValueError(message)

    self._src: str | Curve | TimeVector | None = src
    self._is_stock = is_stock
    self._is_flow = is_flow
    self._is_profile = is_profile
    self._is_level = is_level
    self._profile = profile

    # have to come after setting fields
    # because fields are used to create
    # error messages e.g. in __repr__

    self._check_type(src, (str, Curve, TimeVector, type(None)))
    self._check_type(is_stock, (bool, type(None)))
    self._check_type(is_flow, (bool, type(None)))
    self._check_type(is_level, (bool, type(None)))
    self._check_type(is_profile, (bool, type(None)))
    self._check_type(profile, (Expr, type(None)))

    self._check_operations(operations)
    if operations is None:
        operations = "", []
    self._operations: tuple[str, list[Expr]] = operations
__repr__() -> str

Represent Expr as str.

Source code in framcore/expressions/Expr.py
430
431
432
433
434
435
436
437
438
def __repr__(self) -> str:
    """Represent Expr as str."""
    if self._src is not None:
        return f"Expr({self._src})"
    ops, args = self.get_operations(expect_ops=True, copy_list=False)
    out = f"{args[0]}"
    for op, arg in zip(ops, args[1:], strict=True):
        out = f"{out} {op} {arg}"
    return f"Expr({out})"
add_loaders(loaders: set[Loader]) -> None

Add all loaders stored in TimeVector or Curve within Expr to loaders.

Source code in framcore/expressions/Expr.py
471
472
473
474
475
476
477
478
479
480
481
482
def add_loaders(self, loaders: set[Loader]) -> None:
    """Add all loaders stored in TimeVector or Curve within Expr to loaders."""
    if self.is_leaf():
        src = self.get_src()
        if isinstance(src, TimeVector | LoadedCurve):
            loader = src.get_loader()
            if loader is not None:
                loaders.add(loader)
        return
    __, args = self.get_operations(expect_ops=True, copy_list=False)
    for arg in args:
        arg.add_loaders(loaders)
get_fingerprint() -> Fingerprint

Return fingerprint.

Source code in framcore/expressions/Expr.py
140
141
142
143
144
145
146
147
148
149
150
151
def get_fingerprint(self) -> Fingerprint:
    """Return fingerprint."""
    fingerprint = Fingerprint(self)
    fingerprint.add("is_stock", self._is_stock)
    fingerprint.add("is_flow", self._is_flow)
    fingerprint.add("is_profile", self._is_profile)
    fingerprint.add("is_level", self._is_level)
    fingerprint.add("profile", self._profile)
    if self._src:
        fingerprint.add("src", self._src.get_fingerprint() if isinstance(self._src, TimeVector) else FingerprintRef(self._src))
    fingerprint.add("operations", self._operations)
    return fingerprint
get_operations(expect_ops: bool, copy_list: bool) -> tuple[str, list[Expr]]

Return ops, args. Users of this (low level) API must supply expect_ops and copy_list args.

Source code in framcore/expressions/Expr.py
161
162
163
164
165
166
167
168
def get_operations(self, expect_ops: bool, copy_list: bool) -> tuple[str, list[Expr]]:
    """Return ops, args. Users of this (low level) API must supply expect_ops and copy_list args."""
    self._check_type(copy_list, bool)
    self._verify_operations(expect_ops)
    if copy_list:
        ops, args = self._operations
        return ops, copy(args)
    return self._operations
get_profile() -> Expr | None

Return Expr representing profile. Implies self.is_level() is True.

Source code in framcore/expressions/Expr.py
212
213
214
def get_profile(self) -> Expr | None:
    """Return Expr representing profile. Implies self.is_level() is True."""
    return self._profile
get_src() -> str | Curve | TimeVector | None

Return str, Curve or TimeVector (either reference to Curve/TimeVector or Curve/TimeVector itself) or None if self is an operation expression.

Source code in framcore/expressions/Expr.py
157
158
159
def get_src(self) -> str | Curve | TimeVector | None:
    """Return str, Curve or TimeVector (either reference to Curve/TimeVector or Curve/TimeVector itself) or None if self is an operation expression."""
    return self._src
is_flow() -> bool

Return True if flow. Cannot be stock and flow.

Source code in framcore/expressions/Expr.py
196
197
198
def is_flow(self) -> bool:
    """Return True if flow. Cannot be stock and flow."""
    return self._is_flow
is_leaf() -> bool

Return True if self is not an operation expression.

Source code in framcore/expressions/Expr.py
153
154
155
def is_leaf(self) -> bool:
    """Return True if self is not an operation expression."""
    return self._src is not None
is_level() -> bool

Return True if level. Cannot be level and profile.

Source code in framcore/expressions/Expr.py
204
205
206
def is_level(self) -> bool:
    """Return True if level. Cannot be level and profile."""
    return self._is_level
is_profile() -> bool

Return True if profile. Cannot be level and profile.

Source code in framcore/expressions/Expr.py
208
209
210
def is_profile(self) -> bool:
    """Return True if profile. Cannot be level and profile."""
    return self._is_profile
is_stock() -> bool

Return True if stock. Cannot be stock and flow.

Source code in framcore/expressions/Expr.py
200
201
202
def is_stock(self) -> bool:
    """Return True if stock. Cannot be stock and flow."""
    return self._is_stock
set_profile(profile: Expr | None) -> None

Set the profile of the Expr. Implies self.is_level() is True.

Source code in framcore/expressions/Expr.py
216
217
218
219
220
def set_profile(self, profile: Expr | None) -> None:
    """Set the profile of the Expr. Implies self.is_level() is True."""
    if not self.is_level():
        raise ValueError("Cannot set profile on Expr that is not a level.")
    self._profile = profile
ensure_expr(value: Expr | str | Curve | TimeVector | None, is_flow: bool = False, is_stock: bool = False, is_level: bool = False, is_profile: bool = False, profile: Expr | None = None) -> Expr | None

Ensure that the value is an expression of the expected type or create one if possible.

Parameters:

Name Type Description Default
value Expr | str | None

The value to check.

required
is_flow str

If the Expr is a flow. Cannot be True if is_stock is True.

False
is_stock str

If the Expr is a stock. Cannot be True if is_flow is True.

False
is_level bool

Wether the Expr represents a level. Cannot be True if is_profile is True.

False
is_profile bool

Wether the Expr represents a profile. Cannot be True if is_level is True.

False
profile Expr | None

If the Expr is a level, this should be its profile.

None

Returns:

Name Type Description
value Expr | str

The value as an expression of the expected type or None.

Source code in framcore/expressions/Expr.py
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
def ensure_expr(
    value: Expr | str | Curve | TimeVector | None,  # technically anything that can be converted to float. Typehint for this?
    is_flow: bool = False,
    is_stock: bool = False,
    is_level: bool = False,
    is_profile: bool = False,
    profile: Expr | None = None,
) -> Expr | None:
    """
    Ensure that the value is an expression of the expected type or create one if possible.

    Args:
        value (Expr | str | None): The value to check.
        is_flow (str): If the Expr is a flow. Cannot be True if is_stock is True.
        is_stock (str): If the Expr is a stock. Cannot be True if is_flow is True.
        is_level (bool): Wether the Expr represents a level. Cannot be True if is_profile is True.
        is_profile (bool): Wether the Expr represents a profile. Cannot be True if is_level is True.
        profile (Expr | None): If the Expr is a level, this should be its profile.

    Returns:
        value (Expr | str): The value as an expression of the expected type or None.

    """
    if not isinstance(value, (str, Expr, Curve, TimeVector)) and value is not None:
        msg = f"Expected value to be of type Expr, str, Curve, TimeVector or None. Got {type(value).__name__}."
        raise TypeError(msg)

    if value is None:
        return None

    if isinstance(value, Expr):
        # Check wether given Expr matches expected flow, stock, profile and level status.
        if value.is_flow() != is_flow or value.is_stock() != is_stock or value.is_level() != is_level or value.is_profile() != is_profile:
            message = (
                "Given Expr has a mismatch between expected and actual flow/stock or level/profile status:\nExpected: "
                f"is_flow - {is_flow}, is_stock - {is_stock}, is_level - {is_level}, is_profile - {is_profile}\n"
                f"Actual: is_flow - {value.is_flow()}, is_stock - {value.is_stock()}, "
                f"is_level - {value.is_level()}, is_profile - {value.is_profile()}"
            )
            raise ValueError(message)
        return value

    return Expr(
        src=value,
        is_flow=is_flow,
        is_stock=is_stock,
        is_level=is_level,
        is_profile=is_profile,
        profile=profile,
    )
get_leaf_profiles(expr: Expr) -> list[Expr]

Get all leaf profile expressions from an Expr object.

Parameters:

Name Type Description Default
expr Expr

The starting Expr object.

required

Returns:

Type Description
list[Expr]

list[Expr]: A list of leaf profile expressions.

Source code in framcore/expressions/Expr.py
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
def get_leaf_profiles(expr: Expr) -> list[Expr]:
    """
    Get all leaf profile expressions from an Expr object.

    Args:
        expr (Expr): The starting Expr object.

    Returns:
        list[Expr]: A list of leaf profile expressions.

    """
    leaf_profiles = []

    def _traverse(expr: Expr) -> None:
        if expr.is_leaf():
            if expr.is_profile():
                leaf_profiles.append(expr)
            return

        # Recursively traverse the arguments of the expression
        _, args = expr.get_operations(expect_ops=False, copy_list=False)
        for arg in args:
            _traverse(arg)

    _traverse(expr)
    return leaf_profiles
get_profile_exprs_from_leaf_levels(expr: Expr) -> list[Expr]

Get all profile expressions from leaf-level Expr objects that are marked as levels.

Parameters:

Name Type Description Default
expr Expr

The starting Expr object.

required

Returns:

Type Description
list[Expr]

list[Expr]: A list of profile expressions from leaf-level Expr objects.

Source code in framcore/expressions/Expr.py
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
def get_profile_exprs_from_leaf_levels(expr: Expr) -> list[Expr]:
    """
    Get all profile expressions from leaf-level Expr objects that are marked as levels.

    Args:
        expr (Expr): The starting Expr object.

    Returns:
        list[Expr]: A list of profile expressions from leaf-level Expr objects.

    """
    profile_exprs = []

    def _traverse(expr: Expr) -> None:
        if expr.is_leaf():
            if expr.is_level() and expr.get_profile() is not None:
                profile_exprs.append(expr.get_profile())
            return

        # Recursively traverse the arguments of the expression
        _, args = expr.get_operations(expect_ops=False, copy_list=False)
        for arg in args:
            _traverse(arg)

    _traverse(expr)
    return profile_exprs

queries

get_level_value(expr: Expr, db: QueryDB | Model, unit: str | None, data_dim: SinglePeriodTimeIndex, scen_dim: FixedFrequencyTimeIndex, is_max: bool) -> float

Evaluate Expr representing a (possibly aggregated) level.

The follwing will be automatically handled for you: - fetching from different data objecs (from db) - conversion to requested unit - query at requested TimeIndex for data and scenario dimension, and with requested reference period - conversion to requested level type (is_max or is_avg)

Supports all expressions. Will evaluate level Exprs at data_dim (with reference period of scen_dim), and profile Exprs as an average over scen_dim (both as constants). Has optimized fastpath methods for sums, products and aggregations. The rest uses a fallback method with SymPy.

Source code in framcore/expressions/queries.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
def get_level_value(
    expr: Expr,
    db: QueryDB | Model,
    unit: str | None,
    data_dim: SinglePeriodTimeIndex,
    scen_dim: FixedFrequencyTimeIndex,
    is_max: bool,
) -> float:
    """
    Evaluate Expr representing a (possibly aggregated) level.

    The follwing will be automatically handled for you:
    - fetching from different data objecs (from db)
    - conversion to requested unit
    - query at requested TimeIndex for data and scenario dimension, and with requested reference period
    - conversion to requested level type (is_max or is_avg)

    Supports all expressions. Will evaluate level Exprs at data_dim (with reference period of scen_dim),
    and profile Exprs as an average over scen_dim (both as constants). Has optimized fastpath methods for sums, products and aggregations.
    The rest uses a fallback method with SymPy.

    """
    check_type(expr, Expr)  # check expr here since _get_level_value is not recursively called.
    check_type(unit, (str, type(None)))
    check_type(data_dim, SinglePeriodTimeIndex)
    check_type(scen_dim, FixedFrequencyTimeIndex)
    check_type(is_max, bool)
    db = _load_model_and_create_model_db(db)

    return _get_level_value(expr, db, unit, data_dim, scen_dim, is_max)
get_profile_vector(expr: Expr, db: QueryDB | Model, data_dim: SinglePeriodTimeIndex, scen_dim: FixedFrequencyTimeIndex, is_zero_one: bool, is_float32: bool = True) -> NDArray

Evaluate expr representing a (possibly aggregated) profile.

expr = sum(weight[i] * profile[i]) where

weight[i] >= 0 and is unitless, and will be evaluated as a constant
profile[i] is a unitless profile expr

profile[i] is either "zero_one" or "mean_one" type of profile

"zero_one" and "mean_one" profile type must be converted to the
same standard to be added correctly.

The query parameters data_dim and scen_dim are used to evaluate the values
requested TimeIndex for data and scenario dimension, and with requested reference period

weight[i] will be evaluated level Exprs at data_dim (with reference period of scen_dim),
and profile Exprs as an average over scen_dim (both as constants)

profile[i] will be evaluated as profile vectors over scen_dim

The query parameter is_zero_one tells which profile type the output
vector should be converted to.
Source code in framcore/expressions/queries.py
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
def get_profile_vector(
    expr: Expr,
    db: QueryDB | Model,
    data_dim: SinglePeriodTimeIndex,
    scen_dim: FixedFrequencyTimeIndex,
    is_zero_one: bool,
    is_float32: bool = True,
) -> NDArray:
    """
    Evaluate expr representing a (possibly aggregated) profile.

    expr = sum(weight[i] * profile[i]) where

        weight[i] >= 0 and is unitless, and will be evaluated as a constant
        profile[i] is a unitless profile expr

        profile[i] is either "zero_one" or "mean_one" type of profile

        "zero_one" and "mean_one" profile type must be converted to the
        same standard to be added correctly.

        The query parameters data_dim and scen_dim are used to evaluate the values
        requested TimeIndex for data and scenario dimension, and with requested reference period

        weight[i] will be evaluated level Exprs at data_dim (with reference period of scen_dim),
        and profile Exprs as an average over scen_dim (both as constants)

        profile[i] will be evaluated as profile vectors over scen_dim

        The query parameter is_zero_one tells which profile type the output
        vector should be converted to.
    """
    # Argument expr checked in _get_profile_vector since it can be recursively called.
    check_type(data_dim, SinglePeriodTimeIndex)
    check_type(scen_dim, FixedFrequencyTimeIndex)
    check_type(is_zero_one, bool)
    check_type(is_float32, bool)
    db = _load_model_and_create_model_db(db)

    return _get_profile_vector(expr, db, data_dim, scen_dim, is_zero_one, is_float32)
get_timeindexes_from_expr(db: QueryDB | Model, expr: Expr) -> set[TimeIndex]

Find all timeindexes behind an expression.

Useful for optimized queries (not asking for more data than necessary).

Source code in framcore/expressions/queries.py
108
109
110
111
112
113
114
115
116
117
118
119
120
def get_timeindexes_from_expr(db: QueryDB | Model, expr: Expr) -> set[TimeIndex]:
    """
    Find all timeindexes behind an expression.

    Useful for optimized queries (not asking for more data than necessary).
    """
    db = _load_model_and_create_model_db(db)

    timeindexes: set[TimeIndex] = set()

    _recursively_update_timeindexes(timeindexes, db, expr)

    return timeindexes
get_units_from_expr(db: QueryDB | Model, expr: Expr) -> set[str]

Find all units behind an expression. Useful for queries involving conversion factors.

Source code in framcore/expressions/queries.py
 97
 98
 99
100
101
102
103
104
105
def get_units_from_expr(db: QueryDB | Model, expr: Expr) -> set[str]:
    """Find all units behind an expression. Useful for queries involving conversion factors."""
    db = _load_model_and_create_model_db(db)

    units: set[str] = set()

    _recursively_update_units(units, db, expr)

    return units

units

Define units used in the system, their handling and conversion rules.

We use SymPy to support unit conversions. Already computed unit conversion factors are cached to minimize redundant calculations.

get_unit_conversion_factor(from_unit: str | None, to_unit: str | None) -> float

Get the conversion factor from one unit to another.

Source code in framcore/expressions/units.py
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
def get_unit_conversion_factor(from_unit: str | None, to_unit: str | None) -> float:  # noqa C901
    """Get the conversion factor from one unit to another."""
    if from_unit == to_unit:
        return 1.0

    if from_unit is None or to_unit is None:
        return _get_unit_conversion_factor_with_none(from_unit, to_unit)

    fastpath = _fastpath_get_unit_conversion_factor(from_unit, to_unit)

    if _DEBUG is False and fastpath is not None:
        return fastpath

    if fastpath is None:
        has_multiplier = False
        with contextlib.suppress(Exception):
            ix = from_unit.index("*")
            multiplier = float(from_unit[:ix])
            base_from_unit = from_unit[ix + 1 :].strip()
            has_multiplier = True

        if has_multiplier:
            fastpath = _fastpath_get_unit_conversion_factor(base_from_unit, to_unit)
            fastpath = fastpath if fastpath is None else fastpath * multiplier
            if _DEBUG is False and fastpath is not None:
                return fastpath

    if _COLLECT_FASTPATH_DATA and fastpath is None:
        if has_multiplier:
            _OBSERVED_UNIT_CONVERSIONS.add((base_from_unit, to_unit))
        else:
            _OBSERVED_UNIT_CONVERSIONS.add((from_unit, to_unit))

    fallback = _fallback_get_unit_conversion_factor(from_unit, to_unit)

    if _DEBUG and fastpath is not None and fallback != fastpath:
        message = f"Different results!\nfrom_unit {from_unit} to_unit {to_unit}\nfastpath {fastpath} fallback {fallback}"
        raise RuntimeError(message)

    if _unit_has_no_floats(from_unit) and _unit_has_no_floats(to_unit):
        _FASTPATH_CONVERSION_FACTORS[(from_unit, to_unit)] = fallback

    return fallback
is_convertable(unit_from: str, unit_to: str) -> bool

Return True if from_unit can be converted to to_unit else False.

Source code in framcore/expressions/units.py
222
223
224
225
226
227
def is_convertable(unit_from: str, unit_to: str) -> bool:
    """Return True if from_unit can be converted to to_unit else False."""
    with contextlib.suppress(Exception):
        get_unit_conversion_factor(unit_from, unit_to)
        return True
    return False
validate_unit_conversion_fastpaths() -> bool

Run-Time validation of fastpaths.

Source code in framcore/expressions/units.py
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
def validate_unit_conversion_fastpaths() -> bool:
    """Run-Time validation of fastpaths."""
    errors = []
    for (from_unit, to_unit), result in _FASTPATH_CONVERSION_FACTORS.items():
        sympy_result = None
        with contextlib.suppress(Exception):
            sympy_result = _fallback_get_unit_conversion_factor(from_unit, to_unit)
        if result != sympy_result:
            message = f"'{from_unit}' to '{to_unit}' failed. Fastpath: {result}, SymPy: {sympy_result}"
            errors.append(message)
    for from_unit, to_unit in _FASTPATH_INCOMPATIBLE_CONVERSIONS:
        with contextlib.suppress(Exception):
            sympy_result = _fallback_get_unit_conversion_factor(from_unit, to_unit)
            message = f"'{from_unit}' to '{to_unit}'. Fastpath claim incompatible units, but SymPy fallback returned {sympy_result}"
            errors.append(message)
    if errors:
        message = "\n".join(errors)
        raise RuntimeError(message)

fingerprints

Fingerprint

Fingerprint of various data structures.

Source code in framcore/fingerprints/fingerprint.py
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
class Fingerprint:
    """Fingerprint of various data structures."""

    def __init__(self, source: object = None) -> None:
        """
        Initialize a Fingerprint instance.

        Args:
            source (object, optional): The source object to fingerprint. Defaults to None.

        """
        self._nested = {}
        self._hash = None
        self._source = source

    def add(self, key: str, value: object) -> None:
        """
        Add a value to the fingerprint under the specified key.

        Args:
            key (str): The key to associate with the value.
            value: The value to add, which can be a Fingerprint, FingerprintRef, or other supported types.

        Returns:
            None

        """
        assert key not in self._nested

        if isinstance(value, Fingerprint | FingerprintRef):
            self._nested[key] = value
        elif hasattr(value, "get_fingerprint"):
            self.add(key, value.get_fingerprint())
        elif isinstance(value, list | tuple | set):
            self.add(key, self._fingerprint_from_list(value))
        elif isinstance(value, dict):
            self.add(key, self._fingerprint_from_dict(value))
        else:
            self._nested[key] = _custom_hash(value)

        self._hash = None

    def _fingerprint_from_list(self, items: list | tuple | set) -> Fingerprint:
        fingerprint = Fingerprint()
        for index, value in enumerate(items):
            fingerprint.add(f"{index}", value)
        return fingerprint

    def _fingerprint_from_dict(self, a_dict: dict) -> Fingerprint:
        fingerprint = Fingerprint()
        for key, value in a_dict.items():
            fingerprint.add(f"{key}", value)
        return fingerprint

    def add_ref(self, prop: str, ref_key: str) -> None:
        """
        Add a FingerprintRef to the fingerprint under the specified property key.

        Args:
            prop (str): The property key to associate with the reference.
            ref_key (str): The key referencing another fingerprint.

        Returns:
            None

        """
        self.add(prop, FingerprintRef(ref_key))

    def get_parts(self) -> dict:
        """
        Return the dictionary of parts contained in the fingerprint.

        Returns:
            dict: A dictionary mapping keys to their associated fingerprint parts.

        """
        return {k: v for k, v in self._nested.items()}

    def update_ref(self, ref_key: str, fingerprint: Fingerprint) -> None:
        """
        Update the reference at the given key with a new Fingerprint.

        Args:
            ref_key (str): The key referencing the FingerprintRef to update.
            fingerprint (Fingerprint): The new Fingerprint to set at the reference.

        Returns:
            None

        """
        assert ref_key in self._nested
        assert isinstance(self._nested[ref_key], FingerprintRef)

        self._nested[ref_key] = fingerprint
        self._hash = None

    def get_hash(self) -> str:
        """
        Return the hash value of the fingerprint.

        Returns:
            str: The computed hash value representing the fingerprint.

        """
        self._resolve_total_hash()
        return self._hash

    def _contains_refs(self) -> bool:
        return any(isinstance(v, FingerprintRef) for v in self._nested.values())

    def _contains_key(self, key: str) -> bool:
        return key in self._nested

    def _resolve_total_hash(self) -> None:
        parts = []
        for k, v in self._nested.items():
            if isinstance(v, Fingerprint):
                parts.append((k, v.get_hash()))
            elif isinstance(v, FingerprintRef):
                parts.append((k, f"#ref:{v.get_key()}"))
            else:
                parts.append((k, v))

        self._hash = _custom_hash(sorted(parts))

    def diff(self, other: Fingerprint | None) -> FingerprintDiff:
        """Return differences between this and other fingerprint."""
        diff = FingerprintDiff()

        if other is None:
            for parent_key, parent_value in self.get_parts().items():
                if isinstance(parent_value, Fingerprint):
                    diff.add_diff(parent_key, FingerprintDiffType.NEW, parent_value._source)  # noqa: SLF001
                    diff.update(parent_value.diff(None))
            return diff

        if self.get_hash() == other.get_hash():
            return diff

        self_parts: dict[str, Fingerprint] = {
            key: value for key, value in self.get_parts().items() if isinstance(value, Fingerprint)
        }
        other_parts: dict[str, Fingerprint] = {
            key: value for key, value in other.get_parts().items() if isinstance(value, Fingerprint)
        }

        # Check for new or modified keys
        for key, value in self_parts.items():
            if key not in other_parts:
                diff.add_diff(key, FingerprintDiffType.NEW, value._source)  # noqa: SLF001
                diff.update(value.diff(None))
            elif value.get_hash() != other_parts[key].get_hash():
                diff.add_diff(key, FingerprintDiffType.MODIFIED, value._source)  # noqa: SLF001
                diff.update(value.diff(other_parts[key]))

        # Check for deleted keys
        for key in other_parts.keys() - self_parts.keys():
            other_value = other_parts[key]
            diff.add_diff(key, FingerprintDiffType.DELETED, other_value._source)  # noqa: SLF001
            source = self  # TODO: Is this correct?
            diff.update(Fingerprint(source).diff(other_value))

        return diff

    def __eq__(self, other: Fingerprint) -> bool:
        """
        Determine if two Fingerprint instances are equal based on their hash values.

        Args:
            other (Fingerprint): The other Fingerprint instance to compare.

        Returns:
            bool: True if the hash values are equal, False otherwise.

        """
        return self.get_hash() == other.get_hash()
__eq__(other: Fingerprint) -> bool

Determine if two Fingerprint instances are equal based on their hash values.

Parameters:

Name Type Description Default
other Fingerprint

The other Fingerprint instance to compare.

required

Returns:

Name Type Description
bool bool

True if the hash values are equal, False otherwise.

Source code in framcore/fingerprints/fingerprint.py
263
264
265
266
267
268
269
270
271
272
273
274
def __eq__(self, other: Fingerprint) -> bool:
    """
    Determine if two Fingerprint instances are equal based on their hash values.

    Args:
        other (Fingerprint): The other Fingerprint instance to compare.

    Returns:
        bool: True if the hash values are equal, False otherwise.

    """
    return self.get_hash() == other.get_hash()
__init__(source: object = None) -> None

Initialize a Fingerprint instance.

Parameters:

Name Type Description Default
source object

The source object to fingerprint. Defaults to None.

None
Source code in framcore/fingerprints/fingerprint.py
102
103
104
105
106
107
108
109
110
111
112
def __init__(self, source: object = None) -> None:
    """
    Initialize a Fingerprint instance.

    Args:
        source (object, optional): The source object to fingerprint. Defaults to None.

    """
    self._nested = {}
    self._hash = None
    self._source = source
add(key: str, value: object) -> None

Add a value to the fingerprint under the specified key.

Parameters:

Name Type Description Default
key str

The key to associate with the value.

required
value object

The value to add, which can be a Fingerprint, FingerprintRef, or other supported types.

required

Returns:

Type Description
None

None

Source code in framcore/fingerprints/fingerprint.py
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
def add(self, key: str, value: object) -> None:
    """
    Add a value to the fingerprint under the specified key.

    Args:
        key (str): The key to associate with the value.
        value: The value to add, which can be a Fingerprint, FingerprintRef, or other supported types.

    Returns:
        None

    """
    assert key not in self._nested

    if isinstance(value, Fingerprint | FingerprintRef):
        self._nested[key] = value
    elif hasattr(value, "get_fingerprint"):
        self.add(key, value.get_fingerprint())
    elif isinstance(value, list | tuple | set):
        self.add(key, self._fingerprint_from_list(value))
    elif isinstance(value, dict):
        self.add(key, self._fingerprint_from_dict(value))
    else:
        self._nested[key] = _custom_hash(value)

    self._hash = None
add_ref(prop: str, ref_key: str) -> None

Add a FingerprintRef to the fingerprint under the specified property key.

Parameters:

Name Type Description Default
prop str

The property key to associate with the reference.

required
ref_key str

The key referencing another fingerprint.

required

Returns:

Type Description
None

None

Source code in framcore/fingerprints/fingerprint.py
153
154
155
156
157
158
159
160
161
162
163
164
165
def add_ref(self, prop: str, ref_key: str) -> None:
    """
    Add a FingerprintRef to the fingerprint under the specified property key.

    Args:
        prop (str): The property key to associate with the reference.
        ref_key (str): The key referencing another fingerprint.

    Returns:
        None

    """
    self.add(prop, FingerprintRef(ref_key))
diff(other: Fingerprint | None) -> FingerprintDiff

Return differences between this and other fingerprint.

Source code in framcore/fingerprints/fingerprint.py
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
def diff(self, other: Fingerprint | None) -> FingerprintDiff:
    """Return differences between this and other fingerprint."""
    diff = FingerprintDiff()

    if other is None:
        for parent_key, parent_value in self.get_parts().items():
            if isinstance(parent_value, Fingerprint):
                diff.add_diff(parent_key, FingerprintDiffType.NEW, parent_value._source)  # noqa: SLF001
                diff.update(parent_value.diff(None))
        return diff

    if self.get_hash() == other.get_hash():
        return diff

    self_parts: dict[str, Fingerprint] = {
        key: value for key, value in self.get_parts().items() if isinstance(value, Fingerprint)
    }
    other_parts: dict[str, Fingerprint] = {
        key: value for key, value in other.get_parts().items() if isinstance(value, Fingerprint)
    }

    # Check for new or modified keys
    for key, value in self_parts.items():
        if key not in other_parts:
            diff.add_diff(key, FingerprintDiffType.NEW, value._source)  # noqa: SLF001
            diff.update(value.diff(None))
        elif value.get_hash() != other_parts[key].get_hash():
            diff.add_diff(key, FingerprintDiffType.MODIFIED, value._source)  # noqa: SLF001
            diff.update(value.diff(other_parts[key]))

    # Check for deleted keys
    for key in other_parts.keys() - self_parts.keys():
        other_value = other_parts[key]
        diff.add_diff(key, FingerprintDiffType.DELETED, other_value._source)  # noqa: SLF001
        source = self  # TODO: Is this correct?
        diff.update(Fingerprint(source).diff(other_value))

    return diff
get_hash() -> str

Return the hash value of the fingerprint.

Returns:

Name Type Description
str str

The computed hash value representing the fingerprint.

Source code in framcore/fingerprints/fingerprint.py
195
196
197
198
199
200
201
202
203
204
def get_hash(self) -> str:
    """
    Return the hash value of the fingerprint.

    Returns:
        str: The computed hash value representing the fingerprint.

    """
    self._resolve_total_hash()
    return self._hash
get_parts() -> dict

Return the dictionary of parts contained in the fingerprint.

Returns:

Name Type Description
dict dict

A dictionary mapping keys to their associated fingerprint parts.

Source code in framcore/fingerprints/fingerprint.py
167
168
169
170
171
172
173
174
175
def get_parts(self) -> dict:
    """
    Return the dictionary of parts contained in the fingerprint.

    Returns:
        dict: A dictionary mapping keys to their associated fingerprint parts.

    """
    return {k: v for k, v in self._nested.items()}
update_ref(ref_key: str, fingerprint: Fingerprint) -> None

Update the reference at the given key with a new Fingerprint.

Parameters:

Name Type Description Default
ref_key str

The key referencing the FingerprintRef to update.

required
fingerprint Fingerprint

The new Fingerprint to set at the reference.

required

Returns:

Type Description
None

None

Source code in framcore/fingerprints/fingerprint.py
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
def update_ref(self, ref_key: str, fingerprint: Fingerprint) -> None:
    """
    Update the reference at the given key with a new Fingerprint.

    Args:
        ref_key (str): The key referencing the FingerprintRef to update.
        fingerprint (Fingerprint): The new Fingerprint to set at the reference.

    Returns:
        None

    """
    assert ref_key in self._nested
    assert isinstance(self._nested[ref_key], FingerprintRef)

    self._nested[ref_key] = fingerprint
    self._hash = None

FingerprintDiff

Differences between two fingerprints.

Source code in framcore/fingerprints/fingerprint.py
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
class FingerprintDiff:
    """Differences between two fingerprints."""

    def __init__(self) -> None:
        """Initialize an empty FingerprintDiff."""
        self._diffs: dict[str, tuple] = {}

    def add_diff(
        self,
        key: str,
        diff_type: FingerprintDiffType,
        obj: object,
    ) -> None:
        """
        Add a difference entry for a fingerprint.

        Args:
            key (str): The key identifying the fingerprint part.
            diff_type (FingerprintDiffType): The type of difference (NEW, MODIFIED, DELETED).
            obj: The object associated with the difference.

        """
        from framcore.components.Component import Component
        from framcore.curves.Curve import Curve
        from framcore.timevectors.TimeVector import TimeVector

        # Trenger vi denne sjekken, siden vi filtrerer ut alt som ikke er Fingerprint før vi kjører add_diff()?
        if isinstance(obj, TimeVector | Curve | Component):
            if key in self._diffs:
                message = f"duplicate entry: {key} ({obj})"
                print(message)

            self._diffs[key] = (obj, diff_type)

    def get_diffs(self) -> dict[str, tuple]:
        """
        Return the dictionary of differences.

        Returns:
            dict[str, tuple]: The differences stored in the FingerprintDiff.

        """
        return self._diffs

    def is_changed(self) -> bool:
        """Return True if there are any differences."""
        return bool(self._diffs)

    def update(self, other: FingerprintDiff) -> None:
        """
        Update this FingerprintDiff with differences from another FingerprintDiff.

        Args:
            other (FingerprintDiff): Another FingerprintDiff whose differences will be added.

        """
        self._diffs.update(other.get_diffs())
__init__() -> None

Initialize an empty FingerprintDiff.

Source code in framcore/fingerprints/fingerprint.py
43
44
45
def __init__(self) -> None:
    """Initialize an empty FingerprintDiff."""
    self._diffs: dict[str, tuple] = {}
add_diff(key: str, diff_type: FingerprintDiffType, obj: object) -> None

Add a difference entry for a fingerprint.

Parameters:

Name Type Description Default
key str

The key identifying the fingerprint part.

required
diff_type FingerprintDiffType

The type of difference (NEW, MODIFIED, DELETED).

required
obj object

The object associated with the difference.

required
Source code in framcore/fingerprints/fingerprint.py
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
def add_diff(
    self,
    key: str,
    diff_type: FingerprintDiffType,
    obj: object,
) -> None:
    """
    Add a difference entry for a fingerprint.

    Args:
        key (str): The key identifying the fingerprint part.
        diff_type (FingerprintDiffType): The type of difference (NEW, MODIFIED, DELETED).
        obj: The object associated with the difference.

    """
    from framcore.components.Component import Component
    from framcore.curves.Curve import Curve
    from framcore.timevectors.TimeVector import TimeVector

    # Trenger vi denne sjekken, siden vi filtrerer ut alt som ikke er Fingerprint før vi kjører add_diff()?
    if isinstance(obj, TimeVector | Curve | Component):
        if key in self._diffs:
            message = f"duplicate entry: {key} ({obj})"
            print(message)

        self._diffs[key] = (obj, diff_type)
get_diffs() -> dict[str, tuple]

Return the dictionary of differences.

Returns:

Type Description
dict[str, tuple]

dict[str, tuple]: The differences stored in the FingerprintDiff.

Source code in framcore/fingerprints/fingerprint.py
74
75
76
77
78
79
80
81
82
def get_diffs(self) -> dict[str, tuple]:
    """
    Return the dictionary of differences.

    Returns:
        dict[str, tuple]: The differences stored in the FingerprintDiff.

    """
    return self._diffs
is_changed() -> bool

Return True if there are any differences.

Source code in framcore/fingerprints/fingerprint.py
84
85
86
def is_changed(self) -> bool:
    """Return True if there are any differences."""
    return bool(self._diffs)
update(other: FingerprintDiff) -> None

Update this FingerprintDiff with differences from another FingerprintDiff.

Parameters:

Name Type Description Default
other FingerprintDiff

Another FingerprintDiff whose differences will be added.

required
Source code in framcore/fingerprints/fingerprint.py
88
89
90
91
92
93
94
95
96
def update(self, other: FingerprintDiff) -> None:
    """
    Update this FingerprintDiff with differences from another FingerprintDiff.

    Args:
        other (FingerprintDiff): Another FingerprintDiff whose differences will be added.

    """
    self._diffs.update(other.get_diffs())

FingerprintDiffType

Bases: Enum

Type of difference between two fingerprints.

Source code in framcore/fingerprints/fingerprint.py
32
33
34
35
36
37
class FingerprintDiffType(Enum):
    """Type of difference between two fingerprints."""

    NEW = "new"
    MODIFIED = "modified"
    DELETED = "deleted"

FingerprintRef

Refers to another fingerprint.

Source code in framcore/fingerprints/fingerprint.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
class FingerprintRef:
    """Refers to another fingerprint."""

    def __init__(self, key: str) -> None:
        """
        Initialize a FingerprintRef with the given key.

        Args:
            key (str): The key referencing another fingerprint.

        """
        self._key = key

    def get_key(self) -> str:
        """
        Return the key referencing another fingerprint.

        Returns:
            str: The key referencing another fingerprint.

        """
        return self._key
__init__(key: str) -> None

Initialize a FingerprintRef with the given key.

Parameters:

Name Type Description Default
key str

The key referencing another fingerprint.

required
Source code in framcore/fingerprints/fingerprint.py
11
12
13
14
15
16
17
18
19
def __init__(self, key: str) -> None:
    """
    Initialize a FingerprintRef with the given key.

    Args:
        key (str): The key referencing another fingerprint.

    """
    self._key = key
get_key() -> str

Return the key referencing another fingerprint.

Returns:

Name Type Description
str str

The key referencing another fingerprint.

Source code in framcore/fingerprints/fingerprint.py
21
22
23
24
25
26
27
28
29
def get_key(self) -> str:
    """
    Return the key referencing another fingerprint.

    Returns:
        str: The key referencing another fingerprint.

    """
    return self._key

fingerprint

Fingerprint

Fingerprint of various data structures.

Source code in framcore/fingerprints/fingerprint.py
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
class Fingerprint:
    """Fingerprint of various data structures."""

    def __init__(self, source: object = None) -> None:
        """
        Initialize a Fingerprint instance.

        Args:
            source (object, optional): The source object to fingerprint. Defaults to None.

        """
        self._nested = {}
        self._hash = None
        self._source = source

    def add(self, key: str, value: object) -> None:
        """
        Add a value to the fingerprint under the specified key.

        Args:
            key (str): The key to associate with the value.
            value: The value to add, which can be a Fingerprint, FingerprintRef, or other supported types.

        Returns:
            None

        """
        assert key not in self._nested

        if isinstance(value, Fingerprint | FingerprintRef):
            self._nested[key] = value
        elif hasattr(value, "get_fingerprint"):
            self.add(key, value.get_fingerprint())
        elif isinstance(value, list | tuple | set):
            self.add(key, self._fingerprint_from_list(value))
        elif isinstance(value, dict):
            self.add(key, self._fingerprint_from_dict(value))
        else:
            self._nested[key] = _custom_hash(value)

        self._hash = None

    def _fingerprint_from_list(self, items: list | tuple | set) -> Fingerprint:
        fingerprint = Fingerprint()
        for index, value in enumerate(items):
            fingerprint.add(f"{index}", value)
        return fingerprint

    def _fingerprint_from_dict(self, a_dict: dict) -> Fingerprint:
        fingerprint = Fingerprint()
        for key, value in a_dict.items():
            fingerprint.add(f"{key}", value)
        return fingerprint

    def add_ref(self, prop: str, ref_key: str) -> None:
        """
        Add a FingerprintRef to the fingerprint under the specified property key.

        Args:
            prop (str): The property key to associate with the reference.
            ref_key (str): The key referencing another fingerprint.

        Returns:
            None

        """
        self.add(prop, FingerprintRef(ref_key))

    def get_parts(self) -> dict:
        """
        Return the dictionary of parts contained in the fingerprint.

        Returns:
            dict: A dictionary mapping keys to their associated fingerprint parts.

        """
        return {k: v for k, v in self._nested.items()}

    def update_ref(self, ref_key: str, fingerprint: Fingerprint) -> None:
        """
        Update the reference at the given key with a new Fingerprint.

        Args:
            ref_key (str): The key referencing the FingerprintRef to update.
            fingerprint (Fingerprint): The new Fingerprint to set at the reference.

        Returns:
            None

        """
        assert ref_key in self._nested
        assert isinstance(self._nested[ref_key], FingerprintRef)

        self._nested[ref_key] = fingerprint
        self._hash = None

    def get_hash(self) -> str:
        """
        Return the hash value of the fingerprint.

        Returns:
            str: The computed hash value representing the fingerprint.

        """
        self._resolve_total_hash()
        return self._hash

    def _contains_refs(self) -> bool:
        return any(isinstance(v, FingerprintRef) for v in self._nested.values())

    def _contains_key(self, key: str) -> bool:
        return key in self._nested

    def _resolve_total_hash(self) -> None:
        parts = []
        for k, v in self._nested.items():
            if isinstance(v, Fingerprint):
                parts.append((k, v.get_hash()))
            elif isinstance(v, FingerprintRef):
                parts.append((k, f"#ref:{v.get_key()}"))
            else:
                parts.append((k, v))

        self._hash = _custom_hash(sorted(parts))

    def diff(self, other: Fingerprint | None) -> FingerprintDiff:
        """Return differences between this and other fingerprint."""
        diff = FingerprintDiff()

        if other is None:
            for parent_key, parent_value in self.get_parts().items():
                if isinstance(parent_value, Fingerprint):
                    diff.add_diff(parent_key, FingerprintDiffType.NEW, parent_value._source)  # noqa: SLF001
                    diff.update(parent_value.diff(None))
            return diff

        if self.get_hash() == other.get_hash():
            return diff

        self_parts: dict[str, Fingerprint] = {
            key: value for key, value in self.get_parts().items() if isinstance(value, Fingerprint)
        }
        other_parts: dict[str, Fingerprint] = {
            key: value for key, value in other.get_parts().items() if isinstance(value, Fingerprint)
        }

        # Check for new or modified keys
        for key, value in self_parts.items():
            if key not in other_parts:
                diff.add_diff(key, FingerprintDiffType.NEW, value._source)  # noqa: SLF001
                diff.update(value.diff(None))
            elif value.get_hash() != other_parts[key].get_hash():
                diff.add_diff(key, FingerprintDiffType.MODIFIED, value._source)  # noqa: SLF001
                diff.update(value.diff(other_parts[key]))

        # Check for deleted keys
        for key in other_parts.keys() - self_parts.keys():
            other_value = other_parts[key]
            diff.add_diff(key, FingerprintDiffType.DELETED, other_value._source)  # noqa: SLF001
            source = self  # TODO: Is this correct?
            diff.update(Fingerprint(source).diff(other_value))

        return diff

    def __eq__(self, other: Fingerprint) -> bool:
        """
        Determine if two Fingerprint instances are equal based on their hash values.

        Args:
            other (Fingerprint): The other Fingerprint instance to compare.

        Returns:
            bool: True if the hash values are equal, False otherwise.

        """
        return self.get_hash() == other.get_hash()
__eq__(other: Fingerprint) -> bool

Determine if two Fingerprint instances are equal based on their hash values.

Parameters:

Name Type Description Default
other Fingerprint

The other Fingerprint instance to compare.

required

Returns:

Name Type Description
bool bool

True if the hash values are equal, False otherwise.

Source code in framcore/fingerprints/fingerprint.py
263
264
265
266
267
268
269
270
271
272
273
274
def __eq__(self, other: Fingerprint) -> bool:
    """
    Determine if two Fingerprint instances are equal based on their hash values.

    Args:
        other (Fingerprint): The other Fingerprint instance to compare.

    Returns:
        bool: True if the hash values are equal, False otherwise.

    """
    return self.get_hash() == other.get_hash()
__init__(source: object = None) -> None

Initialize a Fingerprint instance.

Parameters:

Name Type Description Default
source object

The source object to fingerprint. Defaults to None.

None
Source code in framcore/fingerprints/fingerprint.py
102
103
104
105
106
107
108
109
110
111
112
def __init__(self, source: object = None) -> None:
    """
    Initialize a Fingerprint instance.

    Args:
        source (object, optional): The source object to fingerprint. Defaults to None.

    """
    self._nested = {}
    self._hash = None
    self._source = source
add(key: str, value: object) -> None

Add a value to the fingerprint under the specified key.

Parameters:

Name Type Description Default
key str

The key to associate with the value.

required
value object

The value to add, which can be a Fingerprint, FingerprintRef, or other supported types.

required

Returns:

Type Description
None

None

Source code in framcore/fingerprints/fingerprint.py
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
def add(self, key: str, value: object) -> None:
    """
    Add a value to the fingerprint under the specified key.

    Args:
        key (str): The key to associate with the value.
        value: The value to add, which can be a Fingerprint, FingerprintRef, or other supported types.

    Returns:
        None

    """
    assert key not in self._nested

    if isinstance(value, Fingerprint | FingerprintRef):
        self._nested[key] = value
    elif hasattr(value, "get_fingerprint"):
        self.add(key, value.get_fingerprint())
    elif isinstance(value, list | tuple | set):
        self.add(key, self._fingerprint_from_list(value))
    elif isinstance(value, dict):
        self.add(key, self._fingerprint_from_dict(value))
    else:
        self._nested[key] = _custom_hash(value)

    self._hash = None
add_ref(prop: str, ref_key: str) -> None

Add a FingerprintRef to the fingerprint under the specified property key.

Parameters:

Name Type Description Default
prop str

The property key to associate with the reference.

required
ref_key str

The key referencing another fingerprint.

required

Returns:

Type Description
None

None

Source code in framcore/fingerprints/fingerprint.py
153
154
155
156
157
158
159
160
161
162
163
164
165
def add_ref(self, prop: str, ref_key: str) -> None:
    """
    Add a FingerprintRef to the fingerprint under the specified property key.

    Args:
        prop (str): The property key to associate with the reference.
        ref_key (str): The key referencing another fingerprint.

    Returns:
        None

    """
    self.add(prop, FingerprintRef(ref_key))
diff(other: Fingerprint | None) -> FingerprintDiff

Return differences between this and other fingerprint.

Source code in framcore/fingerprints/fingerprint.py
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
def diff(self, other: Fingerprint | None) -> FingerprintDiff:
    """Return differences between this and other fingerprint."""
    diff = FingerprintDiff()

    if other is None:
        for parent_key, parent_value in self.get_parts().items():
            if isinstance(parent_value, Fingerprint):
                diff.add_diff(parent_key, FingerprintDiffType.NEW, parent_value._source)  # noqa: SLF001
                diff.update(parent_value.diff(None))
        return diff

    if self.get_hash() == other.get_hash():
        return diff

    self_parts: dict[str, Fingerprint] = {
        key: value for key, value in self.get_parts().items() if isinstance(value, Fingerprint)
    }
    other_parts: dict[str, Fingerprint] = {
        key: value for key, value in other.get_parts().items() if isinstance(value, Fingerprint)
    }

    # Check for new or modified keys
    for key, value in self_parts.items():
        if key not in other_parts:
            diff.add_diff(key, FingerprintDiffType.NEW, value._source)  # noqa: SLF001
            diff.update(value.diff(None))
        elif value.get_hash() != other_parts[key].get_hash():
            diff.add_diff(key, FingerprintDiffType.MODIFIED, value._source)  # noqa: SLF001
            diff.update(value.diff(other_parts[key]))

    # Check for deleted keys
    for key in other_parts.keys() - self_parts.keys():
        other_value = other_parts[key]
        diff.add_diff(key, FingerprintDiffType.DELETED, other_value._source)  # noqa: SLF001
        source = self  # TODO: Is this correct?
        diff.update(Fingerprint(source).diff(other_value))

    return diff
get_hash() -> str

Return the hash value of the fingerprint.

Returns:

Name Type Description
str str

The computed hash value representing the fingerprint.

Source code in framcore/fingerprints/fingerprint.py
195
196
197
198
199
200
201
202
203
204
def get_hash(self) -> str:
    """
    Return the hash value of the fingerprint.

    Returns:
        str: The computed hash value representing the fingerprint.

    """
    self._resolve_total_hash()
    return self._hash
get_parts() -> dict

Return the dictionary of parts contained in the fingerprint.

Returns:

Name Type Description
dict dict

A dictionary mapping keys to their associated fingerprint parts.

Source code in framcore/fingerprints/fingerprint.py
167
168
169
170
171
172
173
174
175
def get_parts(self) -> dict:
    """
    Return the dictionary of parts contained in the fingerprint.

    Returns:
        dict: A dictionary mapping keys to their associated fingerprint parts.

    """
    return {k: v for k, v in self._nested.items()}
update_ref(ref_key: str, fingerprint: Fingerprint) -> None

Update the reference at the given key with a new Fingerprint.

Parameters:

Name Type Description Default
ref_key str

The key referencing the FingerprintRef to update.

required
fingerprint Fingerprint

The new Fingerprint to set at the reference.

required

Returns:

Type Description
None

None

Source code in framcore/fingerprints/fingerprint.py
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
def update_ref(self, ref_key: str, fingerprint: Fingerprint) -> None:
    """
    Update the reference at the given key with a new Fingerprint.

    Args:
        ref_key (str): The key referencing the FingerprintRef to update.
        fingerprint (Fingerprint): The new Fingerprint to set at the reference.

    Returns:
        None

    """
    assert ref_key in self._nested
    assert isinstance(self._nested[ref_key], FingerprintRef)

    self._nested[ref_key] = fingerprint
    self._hash = None
FingerprintDiff

Differences between two fingerprints.

Source code in framcore/fingerprints/fingerprint.py
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
class FingerprintDiff:
    """Differences between two fingerprints."""

    def __init__(self) -> None:
        """Initialize an empty FingerprintDiff."""
        self._diffs: dict[str, tuple] = {}

    def add_diff(
        self,
        key: str,
        diff_type: FingerprintDiffType,
        obj: object,
    ) -> None:
        """
        Add a difference entry for a fingerprint.

        Args:
            key (str): The key identifying the fingerprint part.
            diff_type (FingerprintDiffType): The type of difference (NEW, MODIFIED, DELETED).
            obj: The object associated with the difference.

        """
        from framcore.components.Component import Component
        from framcore.curves.Curve import Curve
        from framcore.timevectors.TimeVector import TimeVector

        # Trenger vi denne sjekken, siden vi filtrerer ut alt som ikke er Fingerprint før vi kjører add_diff()?
        if isinstance(obj, TimeVector | Curve | Component):
            if key in self._diffs:
                message = f"duplicate entry: {key} ({obj})"
                print(message)

            self._diffs[key] = (obj, diff_type)

    def get_diffs(self) -> dict[str, tuple]:
        """
        Return the dictionary of differences.

        Returns:
            dict[str, tuple]: The differences stored in the FingerprintDiff.

        """
        return self._diffs

    def is_changed(self) -> bool:
        """Return True if there are any differences."""
        return bool(self._diffs)

    def update(self, other: FingerprintDiff) -> None:
        """
        Update this FingerprintDiff with differences from another FingerprintDiff.

        Args:
            other (FingerprintDiff): Another FingerprintDiff whose differences will be added.

        """
        self._diffs.update(other.get_diffs())
__init__() -> None

Initialize an empty FingerprintDiff.

Source code in framcore/fingerprints/fingerprint.py
43
44
45
def __init__(self) -> None:
    """Initialize an empty FingerprintDiff."""
    self._diffs: dict[str, tuple] = {}
add_diff(key: str, diff_type: FingerprintDiffType, obj: object) -> None

Add a difference entry for a fingerprint.

Parameters:

Name Type Description Default
key str

The key identifying the fingerprint part.

required
diff_type FingerprintDiffType

The type of difference (NEW, MODIFIED, DELETED).

required
obj object

The object associated with the difference.

required
Source code in framcore/fingerprints/fingerprint.py
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
def add_diff(
    self,
    key: str,
    diff_type: FingerprintDiffType,
    obj: object,
) -> None:
    """
    Add a difference entry for a fingerprint.

    Args:
        key (str): The key identifying the fingerprint part.
        diff_type (FingerprintDiffType): The type of difference (NEW, MODIFIED, DELETED).
        obj: The object associated with the difference.

    """
    from framcore.components.Component import Component
    from framcore.curves.Curve import Curve
    from framcore.timevectors.TimeVector import TimeVector

    # Trenger vi denne sjekken, siden vi filtrerer ut alt som ikke er Fingerprint før vi kjører add_diff()?
    if isinstance(obj, TimeVector | Curve | Component):
        if key in self._diffs:
            message = f"duplicate entry: {key} ({obj})"
            print(message)

        self._diffs[key] = (obj, diff_type)
get_diffs() -> dict[str, tuple]

Return the dictionary of differences.

Returns:

Type Description
dict[str, tuple]

dict[str, tuple]: The differences stored in the FingerprintDiff.

Source code in framcore/fingerprints/fingerprint.py
74
75
76
77
78
79
80
81
82
def get_diffs(self) -> dict[str, tuple]:
    """
    Return the dictionary of differences.

    Returns:
        dict[str, tuple]: The differences stored in the FingerprintDiff.

    """
    return self._diffs
is_changed() -> bool

Return True if there are any differences.

Source code in framcore/fingerprints/fingerprint.py
84
85
86
def is_changed(self) -> bool:
    """Return True if there are any differences."""
    return bool(self._diffs)
update(other: FingerprintDiff) -> None

Update this FingerprintDiff with differences from another FingerprintDiff.

Parameters:

Name Type Description Default
other FingerprintDiff

Another FingerprintDiff whose differences will be added.

required
Source code in framcore/fingerprints/fingerprint.py
88
89
90
91
92
93
94
95
96
def update(self, other: FingerprintDiff) -> None:
    """
    Update this FingerprintDiff with differences from another FingerprintDiff.

    Args:
        other (FingerprintDiff): Another FingerprintDiff whose differences will be added.

    """
    self._diffs.update(other.get_diffs())
FingerprintDiffType

Bases: Enum

Type of difference between two fingerprints.

Source code in framcore/fingerprints/fingerprint.py
32
33
34
35
36
37
class FingerprintDiffType(Enum):
    """Type of difference between two fingerprints."""

    NEW = "new"
    MODIFIED = "modified"
    DELETED = "deleted"
FingerprintRef

Refers to another fingerprint.

Source code in framcore/fingerprints/fingerprint.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
class FingerprintRef:
    """Refers to another fingerprint."""

    def __init__(self, key: str) -> None:
        """
        Initialize a FingerprintRef with the given key.

        Args:
            key (str): The key referencing another fingerprint.

        """
        self._key = key

    def get_key(self) -> str:
        """
        Return the key referencing another fingerprint.

        Returns:
            str: The key referencing another fingerprint.

        """
        return self._key
__init__(key: str) -> None

Initialize a FingerprintRef with the given key.

Parameters:

Name Type Description Default
key str

The key referencing another fingerprint.

required
Source code in framcore/fingerprints/fingerprint.py
11
12
13
14
15
16
17
18
19
def __init__(self, key: str) -> None:
    """
    Initialize a FingerprintRef with the given key.

    Args:
        key (str): The key referencing another fingerprint.

    """
    self._key = key
get_key() -> str

Return the key referencing another fingerprint.

Returns:

Name Type Description
str str

The key referencing another fingerprint.

Source code in framcore/fingerprints/fingerprint.py
21
22
23
24
25
26
27
28
29
def get_key(self) -> str:
    """
    Return the key referencing another fingerprint.

    Returns:
        str: The key referencing another fingerprint.

    """
    return self._key

juliamodels

JuliaModel

Manage Julia environment and usage of juliacall for Solvers implemented in the Julia language.

JuliaModel

Bases: Base

Class for running julia code with juliacall.

Source code in framcore/juliamodels/JuliaModel.py
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
class JuliaModel(Base):
    """Class for running julia code with juliacall."""

    ENV_NAME: str = "julia_env"  # Used to let each model define their own project/environment to avoid overwriting.
    _jl = None

    def __init__(
        self,
        env_path: Path | str | None = None,
        depot_path: Path | str | None = None,
        julia_path: Path | str | None = None,
        dependencies: list[str | tuple[str, str | None]] | None = None,
        skip_install_dependencies: bool = False,
        force_julia_install: bool = True,
    ) -> None:
        """
        Initialize management of Julia model, environment and dependencies.

        The three parameters env_path, depot_path and julia_path sets environment variables for locations of your Julia
        environment, packages and language.

        - If user has not specified locations, the default is to use the current python/conda environment.
        - If a system installation of Python is used, the default is set to the current user location.

        Args:
            env_path (Path | str | None, optional): Path to location of Julia environment. If it doesnt exist it will be
                                                    created. Defaults to None.
            depot_path (Path | str | None, optional): Path to location where JuliaCall shoult install package
                                                      dependencies. Defaults to None.
            julia_path (Path | str | None, optional): Path to Julia language location. Will be installed here if it
                                                      doesnt exist. Defaults to None.
            dependencies (list[str] | None, optional): List of dependencies of the model. The strings in the list can be
                                                       either urls or Julia package names.. Defaults to None.
            skip_install_dependencies (bool, optional): Skip installation of dependencies. Defaults to False.
            force_julia_install (bool): Force new Julia install.

        """
        self._check_type(env_path, (Path, str, type(None)))
        self._check_type(depot_path, (Path, str, type(None)))
        self._check_type(julia_path, (Path, str, type(None)))
        self._check_type(dependencies, (list, str, type(None)))
        self._check_type(skip_install_dependencies, bool)

        self._env_path = env_path
        self._depot_path = depot_path
        self._julia_path = julia_path
        self._dependencies = dependencies if dependencies else []
        self._skip_install_dependencies = skip_install_dependencies
        self._force_julia_install = force_julia_install

        self._jlpkg = None
        self._initialize_julia()

    def _initialize_julia(self) -> None:
        """Initialize Julia language, package depot, and environment with JuliaCall."""
        if self._jl is not None:
            return

        # figure out what kind of environment we are in
        prefix = sys.prefix if sys.prefix != sys.base_prefix else os.getenv("CONDA_PREFIX")
        # we have python system installation
        project = Path("~/.julia").expanduser() if prefix is None else prefix

        self._env_path = str(Path(project) / "julia_envs" / self.ENV_NAME) if not self._env_path else str(self._env_path)
        self._depot_path = str(Path(project) / "julia_pkgs") if not self._depot_path else str(self._depot_path)

        os.environ["PYTHON_JULIAPKG_PROJECT"] = self._env_path
        os.environ["JULIA_DEPOT_PATH"] = self._depot_path
        if self._julia_path:  # If Julia path is not set, let JuliaCall handle defaults.
            os.environ["PYTHON_JULIAPKG_EXE"] = str(self._julia_path)

        if self._force_julia_install:
            path = os.environ.get("PATH", "")
            cleaned = os.pathsep.join(p for p in path.split(os.pathsep) if "julia" not in p.lower())
            os.environ["PATH"] = cleaned

        juliacall = importlib.import_module("juliacall")
        JuliaModel._jl = juliacall.Main
        self._jlpkg = juliacall.Pkg

        self._jlpkg.activate(str(self._env_path))

        if not self._skip_install_dependencies:
            self._install_dependencies()

        # Print sysimage Julia
        try:
            path_sysimage = self._jl.seval("unsafe_string(Base.JLOptions().image_file)")
            message = f"path_sysimage: {path_sysimage}"
            self.send_debug_event(message)
        except Exception:
            pass

    def _install_dependencies(self) -> None:
        """Install dependencies."""
        # if (Path(self._env_path) / Path("Manifest.toml")).exists():
        #    print("Manifest found, assuming environment is already initialized.")
        #    return

        url_tuples = [p for p in self._dependencies if isinstance(p, tuple) and _is_url(p[0])]
        urls = [p for p in self._dependencies if isinstance(p, str) and _is_url(p)]
        dev_paths = [p for p in self._dependencies if isinstance(p, str) and Path(p).exists()]
        pkg_names = [p for p in self._dependencies if isinstance(p, str) and not _is_url(p) and not Path(p).exists()]

        unknowns = [p for p in self._dependencies if not (p in url_tuples or p in urls or p in pkg_names or p in dev_paths)]

        if unknowns:
            messages = []
            for p in unknowns:
                messages.append(
                    (
                        f"Unsupported julia package definition: '{p}' of type '{type(p)}' is not supported. "
                        "Must be defined as either str or tuple[str, str | None]"
                    ),
                )
            message = "\n".join(messages)
            raise ValueError(message)

        self._jl.seval("using Pkg")

        pkg_spec_vector = self._jl.seval("x = Pkg.PackageSpec[]")

        for url, rev in url_tuples:
            self._jl.seval(f'push!(x, Pkg.PackageSpec(url="{url}", rev="{rev}"))')

        for url in urls:
            self._jl.seval(f'push!(x, Pkg.PackageSpec(url="{url}))"')

        for pkg_name in pkg_names:
            self._jl.seval(f'push!(x, Pkg.PackageSpec(name="{pkg_name}"))')

        self._jlpkg.add(pkg_spec_vector)

        for dev_path in dev_paths:
            self._jl.seval(f'Pkg.develop(path="{dev_path}")')

    def _run(self, julia_code: str) -> None:
        """Run a string of julia code wich is supposed to start running the Julia Model in the given environment."""
        self._jl.seval(julia_code)
__init__(env_path: Path | str | None = None, depot_path: Path | str | None = None, julia_path: Path | str | None = None, dependencies: list[str | tuple[str, str | None]] | None = None, skip_install_dependencies: bool = False, force_julia_install: bool = True) -> None

Initialize management of Julia model, environment and dependencies.

The three parameters env_path, depot_path and julia_path sets environment variables for locations of your Julia environment, packages and language.

  • If user has not specified locations, the default is to use the current python/conda environment.
  • If a system installation of Python is used, the default is set to the current user location.

Parameters:

Name Type Description Default
env_path Path | str | None

Path to location of Julia environment. If it doesnt exist it will be created. Defaults to None.

None
depot_path Path | str | None

Path to location where JuliaCall shoult install package dependencies. Defaults to None.

None
julia_path Path | str | None

Path to Julia language location. Will be installed here if it doesnt exist. Defaults to None.

None
dependencies list[str] | None

List of dependencies of the model. The strings in the list can be either urls or Julia package names.. Defaults to None.

None
skip_install_dependencies bool

Skip installation of dependencies. Defaults to False.

False
force_julia_install bool

Force new Julia install.

True
Source code in framcore/juliamodels/JuliaModel.py
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
def __init__(
    self,
    env_path: Path | str | None = None,
    depot_path: Path | str | None = None,
    julia_path: Path | str | None = None,
    dependencies: list[str | tuple[str, str | None]] | None = None,
    skip_install_dependencies: bool = False,
    force_julia_install: bool = True,
) -> None:
    """
    Initialize management of Julia model, environment and dependencies.

    The three parameters env_path, depot_path and julia_path sets environment variables for locations of your Julia
    environment, packages and language.

    - If user has not specified locations, the default is to use the current python/conda environment.
    - If a system installation of Python is used, the default is set to the current user location.

    Args:
        env_path (Path | str | None, optional): Path to location of Julia environment. If it doesnt exist it will be
                                                created. Defaults to None.
        depot_path (Path | str | None, optional): Path to location where JuliaCall shoult install package
                                                  dependencies. Defaults to None.
        julia_path (Path | str | None, optional): Path to Julia language location. Will be installed here if it
                                                  doesnt exist. Defaults to None.
        dependencies (list[str] | None, optional): List of dependencies of the model. The strings in the list can be
                                                   either urls or Julia package names.. Defaults to None.
        skip_install_dependencies (bool, optional): Skip installation of dependencies. Defaults to False.
        force_julia_install (bool): Force new Julia install.

    """
    self._check_type(env_path, (Path, str, type(None)))
    self._check_type(depot_path, (Path, str, type(None)))
    self._check_type(julia_path, (Path, str, type(None)))
    self._check_type(dependencies, (list, str, type(None)))
    self._check_type(skip_install_dependencies, bool)

    self._env_path = env_path
    self._depot_path = depot_path
    self._julia_path = julia_path
    self._dependencies = dependencies if dependencies else []
    self._skip_install_dependencies = skip_install_dependencies
    self._force_julia_install = force_julia_install

    self._jlpkg = None
    self._initialize_julia()

loaders

CurveLoader

Bases: Loader, ABC

Loader API for retrieving curve data from some source.

Source code in framcore/loaders/loaders.py
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
class CurveLoader(Loader, ABC):
    """Loader API for retrieving curve data from some source."""

    @abstractmethod
    def get_y_axis(self, curve_id: str) -> NDArray:
        """
        Return the values of a Curves y axis in the Loader source.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            NDArray: Numpy array of all values in the y axis.

        """
        pass

    @abstractmethod
    def get_x_axis(self, curve_id: str) -> NDArray:
        """
        Return the values of a Curves x axis in the Loader source.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            NDArray: Numpy array of all values in the x axis.

        """
        pass

    @abstractmethod
    def get_x_unit(self, curve_id: str) -> str:
        """
        Return the unit of the x axis of a specific curve.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            str: Unit of the curve's x axis.

        """
        pass

    @abstractmethod
    def get_y_unit(self, curve_id: str) -> str:
        """
        Return the unit of the y axis of a specific curve.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            str: Unit of the curve's y axis.

        """
        pass
get_x_axis(curve_id: str) -> NDArray abstractmethod

Return the values of a Curves x axis in the Loader source.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of all values in the x axis.

Source code in framcore/loaders/loaders.py
276
277
278
279
280
281
282
283
284
285
286
287
288
@abstractmethod
def get_x_axis(self, curve_id: str) -> NDArray:
    """
    Return the values of a Curves x axis in the Loader source.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        NDArray: Numpy array of all values in the x axis.

    """
    pass
get_x_unit(curve_id: str) -> str abstractmethod

Return the unit of the x axis of a specific curve.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
str str

Unit of the curve's x axis.

Source code in framcore/loaders/loaders.py
290
291
292
293
294
295
296
297
298
299
300
301
302
@abstractmethod
def get_x_unit(self, curve_id: str) -> str:
    """
    Return the unit of the x axis of a specific curve.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        str: Unit of the curve's x axis.

    """
    pass
get_y_axis(curve_id: str) -> NDArray abstractmethod

Return the values of a Curves y axis in the Loader source.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of all values in the y axis.

Source code in framcore/loaders/loaders.py
262
263
264
265
266
267
268
269
270
271
272
273
274
@abstractmethod
def get_y_axis(self, curve_id: str) -> NDArray:
    """
    Return the values of a Curves y axis in the Loader source.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        NDArray: Numpy array of all values in the y axis.

    """
    pass
get_y_unit(curve_id: str) -> str abstractmethod

Return the unit of the y axis of a specific curve.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
str str

Unit of the curve's y axis.

Source code in framcore/loaders/loaders.py
304
305
306
307
308
309
310
311
312
313
314
315
316
@abstractmethod
def get_y_unit(self, curve_id: str) -> str:
    """
    Return the unit of the y axis of a specific curve.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        str: Unit of the curve's y axis.

    """
    pass

FileLoader

Bases: Loader, ABC

Define common functionality and API for Loaders connected to a file as source.

Source code in framcore/loaders/loaders.py
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
class FileLoader(Loader, ABC):
    """Define common functionality and API for Loaders connected to a file as source."""

    _SUPPORTED_SUFFIXES: ClassVar[list[str]] = []

    def __init__(self, source: Path | str, relative_loc: Path | str | None = None) -> None:
        """
        Check validity of input parameters.

        Args:
            source (Path | str): Full file path or the absolute part of a file path
            relative_loc (Optional[Union[Path, str]], optional): The relative part of a file path. Defaults to None.

        """
        super().__init__()
        self._source = source
        self._relative_loc = relative_loc

        self._check_type(source, (Path, str))
        if self._relative_loc is not None:
            self._check_type(self._relative_loc, (Path, str))
        self._check_path_exists(self.get_source())
        self._check_path_supported(self.get_source())

    def __repr__(self) -> str:
        """Overwrite __repr__ to get better info."""
        return f"{type(self).__name__}(source={self._source}, relative_loc={self._relative_loc})"

    def get_source(self) -> Path:
        """Combine absolute and relative file path (if relative is defined) to get full source."""
        if self._relative_loc is None:
            return Path(self._source)
        return Path(self._source) / self._relative_loc

    def set_source(self, new_source: Path, relative_loc: Path | str | None = None) -> None:
        """
        Set absolute and relative parts of filepath.

        Args:
            new_source (Path): New absolute part.
            relative_loc (Optional[Union[Path, str]], optional): New relative part. Defaults to None.

        """
        self._source = new_source
        self._relative_loc = relative_loc

    @classmethod
    def get_supported_suffixes(cls) -> list[str]:
        """
        Return list of supported file types.

        Returns:
            list: List of filetypes.

        """
        return cls._SUPPORTED_SUFFIXES

    def _check_path_exists(self, path: Path) -> None:
        """
        Check if a file path exists.

        Args:
            path (Path): Path to check.

        Raises:
            FileNotFoundError

        """
        if not path.exists():
            msg = f"""File {path} does not exist. Could not create {type(self)}."""
            raise FileNotFoundError(msg)

    def _check_path_supported(self, path: Path) -> None:
        """
        Check if a file is supported/readable by this FileLoader instance.

        Args:
            path (Path): Path to check.

        Raises:
            ValueError: If the file type is not defined as supported.

        """
        filetype = path.suffix
        if filetype not in self._SUPPORTED_SUFFIXES:
            msg = f"File type of {path}, {filetype} is not supported by {type(self)}. Supported filetypes: {self._SUPPORTED_SUFFIXES}"
            raise ValueError(msg)
__init__(source: Path | str, relative_loc: Path | str | None = None) -> None

Check validity of input parameters.

Parameters:

Name Type Description Default
source Path | str

Full file path or the absolute part of a file path

required
relative_loc Optional[Union[Path, str]]

The relative part of a file path. Defaults to None.

None
Source code in framcore/loaders/loaders.py
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
def __init__(self, source: Path | str, relative_loc: Path | str | None = None) -> None:
    """
    Check validity of input parameters.

    Args:
        source (Path | str): Full file path or the absolute part of a file path
        relative_loc (Optional[Union[Path, str]], optional): The relative part of a file path. Defaults to None.

    """
    super().__init__()
    self._source = source
    self._relative_loc = relative_loc

    self._check_type(source, (Path, str))
    if self._relative_loc is not None:
        self._check_type(self._relative_loc, (Path, str))
    self._check_path_exists(self.get_source())
    self._check_path_supported(self.get_source())
__repr__() -> str

Overwrite repr to get better info.

Source code in framcore/loaders/loaders.py
343
344
345
def __repr__(self) -> str:
    """Overwrite __repr__ to get better info."""
    return f"{type(self).__name__}(source={self._source}, relative_loc={self._relative_loc})"
get_source() -> Path

Combine absolute and relative file path (if relative is defined) to get full source.

Source code in framcore/loaders/loaders.py
347
348
349
350
351
def get_source(self) -> Path:
    """Combine absolute and relative file path (if relative is defined) to get full source."""
    if self._relative_loc is None:
        return Path(self._source)
    return Path(self._source) / self._relative_loc
get_supported_suffixes() -> list[str] classmethod

Return list of supported file types.

Returns:

Name Type Description
list list[str]

List of filetypes.

Source code in framcore/loaders/loaders.py
365
366
367
368
369
370
371
372
373
374
@classmethod
def get_supported_suffixes(cls) -> list[str]:
    """
    Return list of supported file types.

    Returns:
        list: List of filetypes.

    """
    return cls._SUPPORTED_SUFFIXES
set_source(new_source: Path, relative_loc: Path | str | None = None) -> None

Set absolute and relative parts of filepath.

Parameters:

Name Type Description Default
new_source Path

New absolute part.

required
relative_loc Optional[Union[Path, str]]

New relative part. Defaults to None.

None
Source code in framcore/loaders/loaders.py
353
354
355
356
357
358
359
360
361
362
363
def set_source(self, new_source: Path, relative_loc: Path | str | None = None) -> None:
    """
    Set absolute and relative parts of filepath.

    Args:
        new_source (Path): New absolute part.
        relative_loc (Optional[Union[Path, str]], optional): New relative part. Defaults to None.

    """
    self._source = new_source
    self._relative_loc = relative_loc

Loader

Bases: Base, ABC

Base Loader class defining common API and functionality for all Loaders.

Source code in framcore/loaders/loaders.py
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
class Loader(Base, ABC):
    """Base Loader class defining common API and functionality for all Loaders."""

    def __init__(self) -> None:
        """Set up cache of ids contained in the source of the Loader."""
        self._content_ids: list[str] = None

    def __repr__(self) -> str:
        """
        Overwrite string representation.

        Returns:
            str: Object represented as string.

        """
        return f"{type(self).__name__}({vars(self)})"

    def __getstate__(self) -> dict:
        """
        Return current object state, clearing any cached data.

        Returns:
            dict: The object's state dictionary.

        """
        self.clear_cache()
        return self.__dict__

    @abstractmethod
    def clear_cache(self) -> None:
        """Clear cached data from the loader."""
        pass

    def __deepcopy__(self, memo: dict) -> Loader:
        """
        Overwrite deepcopy.

        This is done to enable sharing of loaders. Since a loader is connected to one source, caching can thus be shared
        between Models.

        Args:
            memo (dict): Required argument.

        Returns:
            Loader: Returns itself.

        """
        return self

    @abstractmethod
    def get_source(self) -> object:
        """
        Return Loader source.

        Returns:
            object: Whatever the Loader interacts with to retrieve data.

        """
        pass

    @abstractmethod
    def set_source(self, new_source: object) -> None:
        """
        Set the Loader source.

        Args:
            new_source (object): Whatever the Loader should interact with to retrieve data.

        """
        pass

    @abstractmethod
    def get_metadata(self, content_id: str) -> object:
        """
        Get metadata from the Loader source.

        The metadata could describe behavior of the data in source.

        Args:
            content_id (str): Id of some content.

        Returns:
            object: Metadata in some format only the specific Loader knows.

        """
        pass

    @abstractmethod
    def _get_ids(self) -> list[str]:
        """
        Return list of names which can be used to access specific data structures whithin source.

        Most likely the names of all time vectors or curves in The Loader's source.

        Returns:
            list[str]

        """
        pass

    def get_ids(self) -> list[str]:
        """
        Handle caching of ids existing in the loaders source.

        Returns:
            list[str]: List containing ids in Loader source.

        """
        if self._content_ids is None:
            self._content_ids = self._get_ids()
            seen = set()
            duplicates = []
            for content_id in self._content_ids:
                if content_id in seen:
                    duplicates.append(content_id)
                else:
                    seen.add(content_id)
            if duplicates:
                msg = f"Duplicate ID's found in {self.get_source()}: {duplicates}"
                raise ValueError(msg)

        return self._content_ids

    def _id_exsists(self, content_id: str) -> None:
        """
        Check if a given id exists in source.

        Args:
            content_id (str): Id of some content.

        Raises:
            KeyError: If content id does not exist.

        """
        existing_ids = self.get_ids()
        if content_id not in existing_ids:
            # __repr__ should be overwritten in subclasses to produce enough info in error message.
            msg = f"Could not find ID {content_id} in {self}. Existing IDs: {existing_ids}"
            raise KeyError(msg)
__deepcopy__(memo: dict) -> Loader

Overwrite deepcopy.

This is done to enable sharing of loaders. Since a loader is connected to one source, caching can thus be shared between Models.

Parameters:

Name Type Description Default
memo dict

Required argument.

required

Returns:

Name Type Description
Loader Loader

Returns itself.

Source code in framcore/loaders/loaders.py
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def __deepcopy__(self, memo: dict) -> Loader:
    """
    Overwrite deepcopy.

    This is done to enable sharing of loaders. Since a loader is connected to one source, caching can thus be shared
    between Models.

    Args:
        memo (dict): Required argument.

    Returns:
        Loader: Returns itself.

    """
    return self
__getstate__() -> dict

Return current object state, clearing any cached data.

Returns:

Name Type Description
dict dict

The object's state dictionary.

Source code in framcore/loaders/loaders.py
34
35
36
37
38
39
40
41
42
43
def __getstate__(self) -> dict:
    """
    Return current object state, clearing any cached data.

    Returns:
        dict: The object's state dictionary.

    """
    self.clear_cache()
    return self.__dict__
__init__() -> None

Set up cache of ids contained in the source of the Loader.

Source code in framcore/loaders/loaders.py
20
21
22
def __init__(self) -> None:
    """Set up cache of ids contained in the source of the Loader."""
    self._content_ids: list[str] = None
__repr__() -> str

Overwrite string representation.

Returns:

Name Type Description
str str

Object represented as string.

Source code in framcore/loaders/loaders.py
24
25
26
27
28
29
30
31
32
def __repr__(self) -> str:
    """
    Overwrite string representation.

    Returns:
        str: Object represented as string.

    """
    return f"{type(self).__name__}({vars(self)})"
clear_cache() -> None abstractmethod

Clear cached data from the loader.

Source code in framcore/loaders/loaders.py
45
46
47
48
@abstractmethod
def clear_cache(self) -> None:
    """Clear cached data from the loader."""
    pass
get_ids() -> list[str]

Handle caching of ids existing in the loaders source.

Returns:

Type Description
list[str]

list[str]: List containing ids in Loader source.

Source code in framcore/loaders/loaders.py
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
def get_ids(self) -> list[str]:
    """
    Handle caching of ids existing in the loaders source.

    Returns:
        list[str]: List containing ids in Loader source.

    """
    if self._content_ids is None:
        self._content_ids = self._get_ids()
        seen = set()
        duplicates = []
        for content_id in self._content_ids:
            if content_id in seen:
                duplicates.append(content_id)
            else:
                seen.add(content_id)
        if duplicates:
            msg = f"Duplicate ID's found in {self.get_source()}: {duplicates}"
            raise ValueError(msg)

    return self._content_ids
get_metadata(content_id: str) -> object abstractmethod

Get metadata from the Loader source.

The metadata could describe behavior of the data in source.

Parameters:

Name Type Description Default
content_id str

Id of some content.

required

Returns:

Name Type Description
object object

Metadata in some format only the specific Loader knows.

Source code in framcore/loaders/loaders.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
@abstractmethod
def get_metadata(self, content_id: str) -> object:
    """
    Get metadata from the Loader source.

    The metadata could describe behavior of the data in source.

    Args:
        content_id (str): Id of some content.

    Returns:
        object: Metadata in some format only the specific Loader knows.

    """
    pass
get_source() -> object abstractmethod

Return Loader source.

Returns:

Name Type Description
object object

Whatever the Loader interacts with to retrieve data.

Source code in framcore/loaders/loaders.py
66
67
68
69
70
71
72
73
74
75
@abstractmethod
def get_source(self) -> object:
    """
    Return Loader source.

    Returns:
        object: Whatever the Loader interacts with to retrieve data.

    """
    pass
set_source(new_source: object) -> None abstractmethod

Set the Loader source.

Parameters:

Name Type Description Default
new_source object

Whatever the Loader should interact with to retrieve data.

required
Source code in framcore/loaders/loaders.py
77
78
79
80
81
82
83
84
85
86
@abstractmethod
def set_source(self, new_source: object) -> None:
    """
    Set the Loader source.

    Args:
        new_source (object): Whatever the Loader should interact with to retrieve data.

    """
    pass

TimeVectorLoader

Bases: Loader, ABC

Loader API for retrieving time vector data from some source.

Source code in framcore/loaders/loaders.py
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
class TimeVectorLoader(Loader, ABC):
    """Loader API for retrieving time vector data from some source."""

    @abstractmethod
    def get_values(self, vector_id: str) -> NDArray:
        """
        Return the values of a time vector in the Loader source.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            NDArray: Numpy array of all values.

        """
        pass

    @abstractmethod
    def get_index(self, vector_id: str) -> TimeIndex:
        """
        Return the index a time vector in the Loader source.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            NDArray: TimeIndex object.

        """
        pass

    @abstractmethod
    def get_unit(self, vector_id: str) -> str:
        """
        Return unit of the values within a time vector in the loader source.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            str: String with unit.

        """
        pass

    @abstractmethod
    def is_max_level(self, vector_id: str) -> bool | None:
        """
        Check if the given TimeVector is a level representing max Volume/Capacity/Price.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            True - vector is a level representing max Volume/Capacity.
            False - vector is a level representing average Volume/Capacity over a given reference period.
            None - vector is not a level.

        """
        pass

    @abstractmethod
    def is_zero_one_profile(self, vector_id: str) -> bool | None:
        """
        Check if the given TimeVector is a profile with values between zero and one.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            True - vector is a profile with values between zero and one.
            False - vector is a profile where the mean value is 1 given a reference period.
            None - vector is not a profile.

        """
        pass

    @abstractmethod
    def get_reference_period(self, vector_id: str) -> ReferencePeriod | None:
        """
        Get the reference period of a given vector.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            ReferencePeriod - if the vector is a mean one profile or average level, a reference period must exist.
            None - No reference period if vector is max level, zero one profile or not a level or profile.

        """
        pass

    def get_fingerprint(self, vector_id: str) -> Fingerprint:
        """Return Loader Fingerprint for given vector id."""
        f = Fingerprint(self)
        f.add("unit", self.get_unit(vector_id))
        f.add("index", self.get_index(vector_id))
        f.add("values", self.get_values(vector_id))
        return f
get_fingerprint(vector_id: str) -> Fingerprint

Return Loader Fingerprint for given vector id.

Source code in framcore/loaders/loaders.py
250
251
252
253
254
255
256
def get_fingerprint(self, vector_id: str) -> Fingerprint:
    """Return Loader Fingerprint for given vector id."""
    f = Fingerprint(self)
    f.add("unit", self.get_unit(vector_id))
    f.add("index", self.get_index(vector_id))
    f.add("values", self.get_values(vector_id))
    return f
get_index(vector_id: str) -> TimeIndex abstractmethod

Return the index a time vector in the Loader source.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Name Type Description
NDArray TimeIndex

TimeIndex object.

Source code in framcore/loaders/loaders.py
175
176
177
178
179
180
181
182
183
184
185
186
187
@abstractmethod
def get_index(self, vector_id: str) -> TimeIndex:
    """
    Return the index a time vector in the Loader source.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        NDArray: TimeIndex object.

    """
    pass
get_reference_period(vector_id: str) -> ReferencePeriod | None abstractmethod

Get the reference period of a given vector.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Type Description
ReferencePeriod | None

ReferencePeriod - if the vector is a mean one profile or average level, a reference period must exist.

ReferencePeriod | None

None - No reference period if vector is max level, zero one profile or not a level or profile.

Source code in framcore/loaders/loaders.py
235
236
237
238
239
240
241
242
243
244
245
246
247
248
@abstractmethod
def get_reference_period(self, vector_id: str) -> ReferencePeriod | None:
    """
    Get the reference period of a given vector.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        ReferencePeriod - if the vector is a mean one profile or average level, a reference period must exist.
        None - No reference period if vector is max level, zero one profile or not a level or profile.

    """
    pass
get_unit(vector_id: str) -> str abstractmethod

Return unit of the values within a time vector in the loader source.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Name Type Description
str str

String with unit.

Source code in framcore/loaders/loaders.py
189
190
191
192
193
194
195
196
197
198
199
200
201
@abstractmethod
def get_unit(self, vector_id: str) -> str:
    """
    Return unit of the values within a time vector in the loader source.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        str: String with unit.

    """
    pass
get_values(vector_id: str) -> NDArray abstractmethod

Return the values of a time vector in the Loader source.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of all values.

Source code in framcore/loaders/loaders.py
161
162
163
164
165
166
167
168
169
170
171
172
173
@abstractmethod
def get_values(self, vector_id: str) -> NDArray:
    """
    Return the values of a time vector in the Loader source.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        NDArray: Numpy array of all values.

    """
    pass
is_max_level(vector_id: str) -> bool | None abstractmethod

Check if the given TimeVector is a level representing max Volume/Capacity/Price.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Type Description
bool | None

True - vector is a level representing max Volume/Capacity.

bool | None

False - vector is a level representing average Volume/Capacity over a given reference period.

bool | None

None - vector is not a level.

Source code in framcore/loaders/loaders.py
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
@abstractmethod
def is_max_level(self, vector_id: str) -> bool | None:
    """
    Check if the given TimeVector is a level representing max Volume/Capacity/Price.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        True - vector is a level representing max Volume/Capacity.
        False - vector is a level representing average Volume/Capacity over a given reference period.
        None - vector is not a level.

    """
    pass
is_zero_one_profile(vector_id: str) -> bool | None abstractmethod

Check if the given TimeVector is a profile with values between zero and one.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Type Description
bool | None

True - vector is a profile with values between zero and one.

bool | None

False - vector is a profile where the mean value is 1 given a reference period.

bool | None

None - vector is not a profile.

Source code in framcore/loaders/loaders.py
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
@abstractmethod
def is_zero_one_profile(self, vector_id: str) -> bool | None:
    """
    Check if the given TimeVector is a profile with values between zero and one.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        True - vector is a profile with values between zero and one.
        False - vector is a profile where the mean value is 1 given a reference period.
        None - vector is not a profile.

    """
    pass

loaders

Classes defining APIs for Loaders.

CurveLoader

Bases: Loader, ABC

Loader API for retrieving curve data from some source.

Source code in framcore/loaders/loaders.py
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
class CurveLoader(Loader, ABC):
    """Loader API for retrieving curve data from some source."""

    @abstractmethod
    def get_y_axis(self, curve_id: str) -> NDArray:
        """
        Return the values of a Curves y axis in the Loader source.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            NDArray: Numpy array of all values in the y axis.

        """
        pass

    @abstractmethod
    def get_x_axis(self, curve_id: str) -> NDArray:
        """
        Return the values of a Curves x axis in the Loader source.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            NDArray: Numpy array of all values in the x axis.

        """
        pass

    @abstractmethod
    def get_x_unit(self, curve_id: str) -> str:
        """
        Return the unit of the x axis of a specific curve.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            str: Unit of the curve's x axis.

        """
        pass

    @abstractmethod
    def get_y_unit(self, curve_id: str) -> str:
        """
        Return the unit of the y axis of a specific curve.

        Args:
            curve_id (str): ID of the curve.

        Returns:
            str: Unit of the curve's y axis.

        """
        pass
get_x_axis(curve_id: str) -> NDArray abstractmethod

Return the values of a Curves x axis in the Loader source.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of all values in the x axis.

Source code in framcore/loaders/loaders.py
276
277
278
279
280
281
282
283
284
285
286
287
288
@abstractmethod
def get_x_axis(self, curve_id: str) -> NDArray:
    """
    Return the values of a Curves x axis in the Loader source.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        NDArray: Numpy array of all values in the x axis.

    """
    pass
get_x_unit(curve_id: str) -> str abstractmethod

Return the unit of the x axis of a specific curve.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
str str

Unit of the curve's x axis.

Source code in framcore/loaders/loaders.py
290
291
292
293
294
295
296
297
298
299
300
301
302
@abstractmethod
def get_x_unit(self, curve_id: str) -> str:
    """
    Return the unit of the x axis of a specific curve.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        str: Unit of the curve's x axis.

    """
    pass
get_y_axis(curve_id: str) -> NDArray abstractmethod

Return the values of a Curves y axis in the Loader source.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of all values in the y axis.

Source code in framcore/loaders/loaders.py
262
263
264
265
266
267
268
269
270
271
272
273
274
@abstractmethod
def get_y_axis(self, curve_id: str) -> NDArray:
    """
    Return the values of a Curves y axis in the Loader source.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        NDArray: Numpy array of all values in the y axis.

    """
    pass
get_y_unit(curve_id: str) -> str abstractmethod

Return the unit of the y axis of a specific curve.

Parameters:

Name Type Description Default
curve_id str

ID of the curve.

required

Returns:

Name Type Description
str str

Unit of the curve's y axis.

Source code in framcore/loaders/loaders.py
304
305
306
307
308
309
310
311
312
313
314
315
316
@abstractmethod
def get_y_unit(self, curve_id: str) -> str:
    """
    Return the unit of the y axis of a specific curve.

    Args:
        curve_id (str): ID of the curve.

    Returns:
        str: Unit of the curve's y axis.

    """
    pass
FileLoader

Bases: Loader, ABC

Define common functionality and API for Loaders connected to a file as source.

Source code in framcore/loaders/loaders.py
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
class FileLoader(Loader, ABC):
    """Define common functionality and API for Loaders connected to a file as source."""

    _SUPPORTED_SUFFIXES: ClassVar[list[str]] = []

    def __init__(self, source: Path | str, relative_loc: Path | str | None = None) -> None:
        """
        Check validity of input parameters.

        Args:
            source (Path | str): Full file path or the absolute part of a file path
            relative_loc (Optional[Union[Path, str]], optional): The relative part of a file path. Defaults to None.

        """
        super().__init__()
        self._source = source
        self._relative_loc = relative_loc

        self._check_type(source, (Path, str))
        if self._relative_loc is not None:
            self._check_type(self._relative_loc, (Path, str))
        self._check_path_exists(self.get_source())
        self._check_path_supported(self.get_source())

    def __repr__(self) -> str:
        """Overwrite __repr__ to get better info."""
        return f"{type(self).__name__}(source={self._source}, relative_loc={self._relative_loc})"

    def get_source(self) -> Path:
        """Combine absolute and relative file path (if relative is defined) to get full source."""
        if self._relative_loc is None:
            return Path(self._source)
        return Path(self._source) / self._relative_loc

    def set_source(self, new_source: Path, relative_loc: Path | str | None = None) -> None:
        """
        Set absolute and relative parts of filepath.

        Args:
            new_source (Path): New absolute part.
            relative_loc (Optional[Union[Path, str]], optional): New relative part. Defaults to None.

        """
        self._source = new_source
        self._relative_loc = relative_loc

    @classmethod
    def get_supported_suffixes(cls) -> list[str]:
        """
        Return list of supported file types.

        Returns:
            list: List of filetypes.

        """
        return cls._SUPPORTED_SUFFIXES

    def _check_path_exists(self, path: Path) -> None:
        """
        Check if a file path exists.

        Args:
            path (Path): Path to check.

        Raises:
            FileNotFoundError

        """
        if not path.exists():
            msg = f"""File {path} does not exist. Could not create {type(self)}."""
            raise FileNotFoundError(msg)

    def _check_path_supported(self, path: Path) -> None:
        """
        Check if a file is supported/readable by this FileLoader instance.

        Args:
            path (Path): Path to check.

        Raises:
            ValueError: If the file type is not defined as supported.

        """
        filetype = path.suffix
        if filetype not in self._SUPPORTED_SUFFIXES:
            msg = f"File type of {path}, {filetype} is not supported by {type(self)}. Supported filetypes: {self._SUPPORTED_SUFFIXES}"
            raise ValueError(msg)
__init__(source: Path | str, relative_loc: Path | str | None = None) -> None

Check validity of input parameters.

Parameters:

Name Type Description Default
source Path | str

Full file path or the absolute part of a file path

required
relative_loc Optional[Union[Path, str]]

The relative part of a file path. Defaults to None.

None
Source code in framcore/loaders/loaders.py
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
def __init__(self, source: Path | str, relative_loc: Path | str | None = None) -> None:
    """
    Check validity of input parameters.

    Args:
        source (Path | str): Full file path or the absolute part of a file path
        relative_loc (Optional[Union[Path, str]], optional): The relative part of a file path. Defaults to None.

    """
    super().__init__()
    self._source = source
    self._relative_loc = relative_loc

    self._check_type(source, (Path, str))
    if self._relative_loc is not None:
        self._check_type(self._relative_loc, (Path, str))
    self._check_path_exists(self.get_source())
    self._check_path_supported(self.get_source())
__repr__() -> str

Overwrite repr to get better info.

Source code in framcore/loaders/loaders.py
343
344
345
def __repr__(self) -> str:
    """Overwrite __repr__ to get better info."""
    return f"{type(self).__name__}(source={self._source}, relative_loc={self._relative_loc})"
get_source() -> Path

Combine absolute and relative file path (if relative is defined) to get full source.

Source code in framcore/loaders/loaders.py
347
348
349
350
351
def get_source(self) -> Path:
    """Combine absolute and relative file path (if relative is defined) to get full source."""
    if self._relative_loc is None:
        return Path(self._source)
    return Path(self._source) / self._relative_loc
get_supported_suffixes() -> list[str] classmethod

Return list of supported file types.

Returns:

Name Type Description
list list[str]

List of filetypes.

Source code in framcore/loaders/loaders.py
365
366
367
368
369
370
371
372
373
374
@classmethod
def get_supported_suffixes(cls) -> list[str]:
    """
    Return list of supported file types.

    Returns:
        list: List of filetypes.

    """
    return cls._SUPPORTED_SUFFIXES
set_source(new_source: Path, relative_loc: Path | str | None = None) -> None

Set absolute and relative parts of filepath.

Parameters:

Name Type Description Default
new_source Path

New absolute part.

required
relative_loc Optional[Union[Path, str]]

New relative part. Defaults to None.

None
Source code in framcore/loaders/loaders.py
353
354
355
356
357
358
359
360
361
362
363
def set_source(self, new_source: Path, relative_loc: Path | str | None = None) -> None:
    """
    Set absolute and relative parts of filepath.

    Args:
        new_source (Path): New absolute part.
        relative_loc (Optional[Union[Path, str]], optional): New relative part. Defaults to None.

    """
    self._source = new_source
    self._relative_loc = relative_loc
Loader

Bases: Base, ABC

Base Loader class defining common API and functionality for all Loaders.

Source code in framcore/loaders/loaders.py
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
class Loader(Base, ABC):
    """Base Loader class defining common API and functionality for all Loaders."""

    def __init__(self) -> None:
        """Set up cache of ids contained in the source of the Loader."""
        self._content_ids: list[str] = None

    def __repr__(self) -> str:
        """
        Overwrite string representation.

        Returns:
            str: Object represented as string.

        """
        return f"{type(self).__name__}({vars(self)})"

    def __getstate__(self) -> dict:
        """
        Return current object state, clearing any cached data.

        Returns:
            dict: The object's state dictionary.

        """
        self.clear_cache()
        return self.__dict__

    @abstractmethod
    def clear_cache(self) -> None:
        """Clear cached data from the loader."""
        pass

    def __deepcopy__(self, memo: dict) -> Loader:
        """
        Overwrite deepcopy.

        This is done to enable sharing of loaders. Since a loader is connected to one source, caching can thus be shared
        between Models.

        Args:
            memo (dict): Required argument.

        Returns:
            Loader: Returns itself.

        """
        return self

    @abstractmethod
    def get_source(self) -> object:
        """
        Return Loader source.

        Returns:
            object: Whatever the Loader interacts with to retrieve data.

        """
        pass

    @abstractmethod
    def set_source(self, new_source: object) -> None:
        """
        Set the Loader source.

        Args:
            new_source (object): Whatever the Loader should interact with to retrieve data.

        """
        pass

    @abstractmethod
    def get_metadata(self, content_id: str) -> object:
        """
        Get metadata from the Loader source.

        The metadata could describe behavior of the data in source.

        Args:
            content_id (str): Id of some content.

        Returns:
            object: Metadata in some format only the specific Loader knows.

        """
        pass

    @abstractmethod
    def _get_ids(self) -> list[str]:
        """
        Return list of names which can be used to access specific data structures whithin source.

        Most likely the names of all time vectors or curves in The Loader's source.

        Returns:
            list[str]

        """
        pass

    def get_ids(self) -> list[str]:
        """
        Handle caching of ids existing in the loaders source.

        Returns:
            list[str]: List containing ids in Loader source.

        """
        if self._content_ids is None:
            self._content_ids = self._get_ids()
            seen = set()
            duplicates = []
            for content_id in self._content_ids:
                if content_id in seen:
                    duplicates.append(content_id)
                else:
                    seen.add(content_id)
            if duplicates:
                msg = f"Duplicate ID's found in {self.get_source()}: {duplicates}"
                raise ValueError(msg)

        return self._content_ids

    def _id_exsists(self, content_id: str) -> None:
        """
        Check if a given id exists in source.

        Args:
            content_id (str): Id of some content.

        Raises:
            KeyError: If content id does not exist.

        """
        existing_ids = self.get_ids()
        if content_id not in existing_ids:
            # __repr__ should be overwritten in subclasses to produce enough info in error message.
            msg = f"Could not find ID {content_id} in {self}. Existing IDs: {existing_ids}"
            raise KeyError(msg)
__deepcopy__(memo: dict) -> Loader

Overwrite deepcopy.

This is done to enable sharing of loaders. Since a loader is connected to one source, caching can thus be shared between Models.

Parameters:

Name Type Description Default
memo dict

Required argument.

required

Returns:

Name Type Description
Loader Loader

Returns itself.

Source code in framcore/loaders/loaders.py
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def __deepcopy__(self, memo: dict) -> Loader:
    """
    Overwrite deepcopy.

    This is done to enable sharing of loaders. Since a loader is connected to one source, caching can thus be shared
    between Models.

    Args:
        memo (dict): Required argument.

    Returns:
        Loader: Returns itself.

    """
    return self
__getstate__() -> dict

Return current object state, clearing any cached data.

Returns:

Name Type Description
dict dict

The object's state dictionary.

Source code in framcore/loaders/loaders.py
34
35
36
37
38
39
40
41
42
43
def __getstate__(self) -> dict:
    """
    Return current object state, clearing any cached data.

    Returns:
        dict: The object's state dictionary.

    """
    self.clear_cache()
    return self.__dict__
__init__() -> None

Set up cache of ids contained in the source of the Loader.

Source code in framcore/loaders/loaders.py
20
21
22
def __init__(self) -> None:
    """Set up cache of ids contained in the source of the Loader."""
    self._content_ids: list[str] = None
__repr__() -> str

Overwrite string representation.

Returns:

Name Type Description
str str

Object represented as string.

Source code in framcore/loaders/loaders.py
24
25
26
27
28
29
30
31
32
def __repr__(self) -> str:
    """
    Overwrite string representation.

    Returns:
        str: Object represented as string.

    """
    return f"{type(self).__name__}({vars(self)})"
clear_cache() -> None abstractmethod

Clear cached data from the loader.

Source code in framcore/loaders/loaders.py
45
46
47
48
@abstractmethod
def clear_cache(self) -> None:
    """Clear cached data from the loader."""
    pass
get_ids() -> list[str]

Handle caching of ids existing in the loaders source.

Returns:

Type Description
list[str]

list[str]: List containing ids in Loader source.

Source code in framcore/loaders/loaders.py
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
def get_ids(self) -> list[str]:
    """
    Handle caching of ids existing in the loaders source.

    Returns:
        list[str]: List containing ids in Loader source.

    """
    if self._content_ids is None:
        self._content_ids = self._get_ids()
        seen = set()
        duplicates = []
        for content_id in self._content_ids:
            if content_id in seen:
                duplicates.append(content_id)
            else:
                seen.add(content_id)
        if duplicates:
            msg = f"Duplicate ID's found in {self.get_source()}: {duplicates}"
            raise ValueError(msg)

    return self._content_ids
get_metadata(content_id: str) -> object abstractmethod

Get metadata from the Loader source.

The metadata could describe behavior of the data in source.

Parameters:

Name Type Description Default
content_id str

Id of some content.

required

Returns:

Name Type Description
object object

Metadata in some format only the specific Loader knows.

Source code in framcore/loaders/loaders.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
@abstractmethod
def get_metadata(self, content_id: str) -> object:
    """
    Get metadata from the Loader source.

    The metadata could describe behavior of the data in source.

    Args:
        content_id (str): Id of some content.

    Returns:
        object: Metadata in some format only the specific Loader knows.

    """
    pass
get_source() -> object abstractmethod

Return Loader source.

Returns:

Name Type Description
object object

Whatever the Loader interacts with to retrieve data.

Source code in framcore/loaders/loaders.py
66
67
68
69
70
71
72
73
74
75
@abstractmethod
def get_source(self) -> object:
    """
    Return Loader source.

    Returns:
        object: Whatever the Loader interacts with to retrieve data.

    """
    pass
set_source(new_source: object) -> None abstractmethod

Set the Loader source.

Parameters:

Name Type Description Default
new_source object

Whatever the Loader should interact with to retrieve data.

required
Source code in framcore/loaders/loaders.py
77
78
79
80
81
82
83
84
85
86
@abstractmethod
def set_source(self, new_source: object) -> None:
    """
    Set the Loader source.

    Args:
        new_source (object): Whatever the Loader should interact with to retrieve data.

    """
    pass
TimeVectorLoader

Bases: Loader, ABC

Loader API for retrieving time vector data from some source.

Source code in framcore/loaders/loaders.py
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
class TimeVectorLoader(Loader, ABC):
    """Loader API for retrieving time vector data from some source."""

    @abstractmethod
    def get_values(self, vector_id: str) -> NDArray:
        """
        Return the values of a time vector in the Loader source.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            NDArray: Numpy array of all values.

        """
        pass

    @abstractmethod
    def get_index(self, vector_id: str) -> TimeIndex:
        """
        Return the index a time vector in the Loader source.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            NDArray: TimeIndex object.

        """
        pass

    @abstractmethod
    def get_unit(self, vector_id: str) -> str:
        """
        Return unit of the values within a time vector in the loader source.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            str: String with unit.

        """
        pass

    @abstractmethod
    def is_max_level(self, vector_id: str) -> bool | None:
        """
        Check if the given TimeVector is a level representing max Volume/Capacity/Price.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            True - vector is a level representing max Volume/Capacity.
            False - vector is a level representing average Volume/Capacity over a given reference period.
            None - vector is not a level.

        """
        pass

    @abstractmethod
    def is_zero_one_profile(self, vector_id: str) -> bool | None:
        """
        Check if the given TimeVector is a profile with values between zero and one.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            True - vector is a profile with values between zero and one.
            False - vector is a profile where the mean value is 1 given a reference period.
            None - vector is not a profile.

        """
        pass

    @abstractmethod
    def get_reference_period(self, vector_id: str) -> ReferencePeriod | None:
        """
        Get the reference period of a given vector.

        Args:
            vector_id (str): ID of the vector.

        Returns:
            ReferencePeriod - if the vector is a mean one profile or average level, a reference period must exist.
            None - No reference period if vector is max level, zero one profile or not a level or profile.

        """
        pass

    def get_fingerprint(self, vector_id: str) -> Fingerprint:
        """Return Loader Fingerprint for given vector id."""
        f = Fingerprint(self)
        f.add("unit", self.get_unit(vector_id))
        f.add("index", self.get_index(vector_id))
        f.add("values", self.get_values(vector_id))
        return f
get_fingerprint(vector_id: str) -> Fingerprint

Return Loader Fingerprint for given vector id.

Source code in framcore/loaders/loaders.py
250
251
252
253
254
255
256
def get_fingerprint(self, vector_id: str) -> Fingerprint:
    """Return Loader Fingerprint for given vector id."""
    f = Fingerprint(self)
    f.add("unit", self.get_unit(vector_id))
    f.add("index", self.get_index(vector_id))
    f.add("values", self.get_values(vector_id))
    return f
get_index(vector_id: str) -> TimeIndex abstractmethod

Return the index a time vector in the Loader source.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Name Type Description
NDArray TimeIndex

TimeIndex object.

Source code in framcore/loaders/loaders.py
175
176
177
178
179
180
181
182
183
184
185
186
187
@abstractmethod
def get_index(self, vector_id: str) -> TimeIndex:
    """
    Return the index a time vector in the Loader source.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        NDArray: TimeIndex object.

    """
    pass
get_reference_period(vector_id: str) -> ReferencePeriod | None abstractmethod

Get the reference period of a given vector.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Type Description
ReferencePeriod | None

ReferencePeriod - if the vector is a mean one profile or average level, a reference period must exist.

ReferencePeriod | None

None - No reference period if vector is max level, zero one profile or not a level or profile.

Source code in framcore/loaders/loaders.py
235
236
237
238
239
240
241
242
243
244
245
246
247
248
@abstractmethod
def get_reference_period(self, vector_id: str) -> ReferencePeriod | None:
    """
    Get the reference period of a given vector.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        ReferencePeriod - if the vector is a mean one profile or average level, a reference period must exist.
        None - No reference period if vector is max level, zero one profile or not a level or profile.

    """
    pass
get_unit(vector_id: str) -> str abstractmethod

Return unit of the values within a time vector in the loader source.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Name Type Description
str str

String with unit.

Source code in framcore/loaders/loaders.py
189
190
191
192
193
194
195
196
197
198
199
200
201
@abstractmethod
def get_unit(self, vector_id: str) -> str:
    """
    Return unit of the values within a time vector in the loader source.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        str: String with unit.

    """
    pass
get_values(vector_id: str) -> NDArray abstractmethod

Return the values of a time vector in the Loader source.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Name Type Description
NDArray NDArray

Numpy array of all values.

Source code in framcore/loaders/loaders.py
161
162
163
164
165
166
167
168
169
170
171
172
173
@abstractmethod
def get_values(self, vector_id: str) -> NDArray:
    """
    Return the values of a time vector in the Loader source.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        NDArray: Numpy array of all values.

    """
    pass
is_max_level(vector_id: str) -> bool | None abstractmethod

Check if the given TimeVector is a level representing max Volume/Capacity/Price.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Type Description
bool | None

True - vector is a level representing max Volume/Capacity.

bool | None

False - vector is a level representing average Volume/Capacity over a given reference period.

bool | None

None - vector is not a level.

Source code in framcore/loaders/loaders.py
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
@abstractmethod
def is_max_level(self, vector_id: str) -> bool | None:
    """
    Check if the given TimeVector is a level representing max Volume/Capacity/Price.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        True - vector is a level representing max Volume/Capacity.
        False - vector is a level representing average Volume/Capacity over a given reference period.
        None - vector is not a level.

    """
    pass
is_zero_one_profile(vector_id: str) -> bool | None abstractmethod

Check if the given TimeVector is a profile with values between zero and one.

Parameters:

Name Type Description Default
vector_id str

ID of the vector.

required

Returns:

Type Description
bool | None

True - vector is a profile with values between zero and one.

bool | None

False - vector is a profile where the mean value is 1 given a reference period.

bool | None

None - vector is not a profile.

Source code in framcore/loaders/loaders.py
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
@abstractmethod
def is_zero_one_profile(self, vector_id: str) -> bool | None:
    """
    Check if the given TimeVector is a profile with values between zero and one.

    Args:
        vector_id (str): ID of the vector.

    Returns:
        True - vector is a profile with values between zero and one.
        False - vector is a profile where the mean value is 1 given a reference period.
        None - vector is not a profile.

    """
    pass

metadata

Div

Div

Bases: Meta

Div class is made for loss-less aggregation of metadata. Subclass of Meta.

It's combine method is made to keep all unique metadata, so that nothing is thrown away in connection with aggregation.

Source code in framcore/metadata/Div.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
class Div(Meta):
    """
    Div class is made for loss-less aggregation of metadata. Subclass of Meta.

    It's combine method is made to keep all unique metadata,
    so that nothing is thrown away in connection with aggregation.
    """

    def __init__(self, value: Meta | set[Meta] | None = None) -> None:
        """Create Div metadata."""
        self._check_type_meta(value, with_none=True)

        self._value: set[Meta] = set()

        if isinstance(value, set):
            self._value.update(value)

        elif isinstance(value, Meta):
            self._value.add(value)

    def _check_type_meta(self, value: Meta | set[Meta], with_none: bool) -> None:
        if with_none:
            self._check_type(value, (Meta, set, type(None)))
        else:
            self._check_type(value, (Meta, set))
        if isinstance(value, set):
            for x in value:
                self._check_type(x, Meta)

    def get_value(self) -> set[Meta]:
        """Return str value."""
        return self._value

    def set_value(self, value: Meta | set[Meta]) -> None:
        """Set str value. TypeError if not str."""
        self._check_type_meta(value, with_none=False)
        if isinstance(value, set):
            self._value.update(value)

        elif isinstance(value, Meta):
            self._value.add(value)

    def combine(self, other: Meta | set[Meta]) -> Div:
        """Just consume other and return self."""
        self._check_type_meta(other, with_none=True)
        if isinstance(other, Div):
            for x in other.get_value():
                self.combine(x)
        else:
            self.set_value(other)
        return self

    def get_fingerprint(self) -> Fingerprint:
        """
        Generate and return a Fingerprint representing the current set of Meta values.

        Returns
        -------
        Fingerprint
            A fingerprint object based on the hashes of the contained Meta values.

        """
        fingerprint = Fingerprint()
        hash_list = [value.get_fingerprint().get_hash() for value in self._value]
        fingerprint.add("_value", _custom_hash(hash_list))
        return fingerprint
__init__(value: Meta | set[Meta] | None = None) -> None

Create Div metadata.

Source code in framcore/metadata/Div.py
16
17
18
19
20
21
22
23
24
25
26
def __init__(self, value: Meta | set[Meta] | None = None) -> None:
    """Create Div metadata."""
    self._check_type_meta(value, with_none=True)

    self._value: set[Meta] = set()

    if isinstance(value, set):
        self._value.update(value)

    elif isinstance(value, Meta):
        self._value.add(value)
combine(other: Meta | set[Meta]) -> Div

Just consume other and return self.

Source code in framcore/metadata/Div.py
50
51
52
53
54
55
56
57
58
def combine(self, other: Meta | set[Meta]) -> Div:
    """Just consume other and return self."""
    self._check_type_meta(other, with_none=True)
    if isinstance(other, Div):
        for x in other.get_value():
            self.combine(x)
    else:
        self.set_value(other)
    return self
get_fingerprint() -> Fingerprint

Generate and return a Fingerprint representing the current set of Meta values.

Returns

Fingerprint A fingerprint object based on the hashes of the contained Meta values.

Source code in framcore/metadata/Div.py
60
61
62
63
64
65
66
67
68
69
70
71
72
73
def get_fingerprint(self) -> Fingerprint:
    """
    Generate and return a Fingerprint representing the current set of Meta values.

    Returns
    -------
    Fingerprint
        A fingerprint object based on the hashes of the contained Meta values.

    """
    fingerprint = Fingerprint()
    hash_list = [value.get_fingerprint().get_hash() for value in self._value]
    fingerprint.add("_value", _custom_hash(hash_list))
    return fingerprint
get_value() -> set[Meta]

Return str value.

Source code in framcore/metadata/Div.py
37
38
39
def get_value(self) -> set[Meta]:
    """Return str value."""
    return self._value
set_value(value: Meta | set[Meta]) -> None

Set str value. TypeError if not str.

Source code in framcore/metadata/Div.py
41
42
43
44
45
46
47
48
def set_value(self, value: Meta | set[Meta]) -> None:
    """Set str value. TypeError if not str."""
    self._check_type_meta(value, with_none=False)
    if isinstance(value, set):
        self._value.update(value)

    elif isinstance(value, Meta):
        self._value.add(value)

ExprMeta

ExprMeta

Bases: Meta

ExprMeta represent an Expr. Subclass of Meta.

When used, all components must have a ExprMeta.

Source code in framcore/metadata/ExprMeta.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
class ExprMeta(Meta):
    """
    ExprMeta represent an Expr. Subclass of Meta.

    When used, all components must have a ExprMeta.
    """

    def __init__(self, value: Expr) -> None:
        """Create new ExprMeta with float value."""
        self._value = value
        self._check_type(value, Expr)

    def __repr__(self) -> str:
        """Overwrite __repr__ for better string representation."""
        if not hasattr(self, "_value"):
            return f"{type(self).__name__}(uninitialized)"
        return f"{type(self).__name__}(expr={self._value})"

    def __eq__(self, other: object) -> bool:
        """Check equality based on expr."""
        if not isinstance(other, ExprMeta):
            return False
        return self._value == other._value

    def __hash__(self) -> int:
        """Compute the hash of the ExprMeta."""
        return hash(self._value)

    def get_value(self) -> Expr:
        """Return expr."""
        return self._value

    def set_value(self, value: Expr) -> None:
        """Set expr value. TypeError if not expr."""
        self._check_type(value, Expr)
        self._value = value

    def combine(self, other: Meta) -> Expr | Div:
        """Sum Expr."""
        if isinstance(other, ExprMeta):
            return self._value + other.get_value()

        div = Div(self)
        div.set_value(other)
        return div

    def get_fingerprint(self) -> Fingerprint:
        """Get the fingerprint of the ScalarMeta."""
        return self.get_fingerprint_default()
__eq__(other: object) -> bool

Check equality based on expr.

Source code in framcore/metadata/ExprMeta.py
26
27
28
29
30
def __eq__(self, other: object) -> bool:
    """Check equality based on expr."""
    if not isinstance(other, ExprMeta):
        return False
    return self._value == other._value
__hash__() -> int

Compute the hash of the ExprMeta.

Source code in framcore/metadata/ExprMeta.py
32
33
34
def __hash__(self) -> int:
    """Compute the hash of the ExprMeta."""
    return hash(self._value)
__init__(value: Expr) -> None

Create new ExprMeta with float value.

Source code in framcore/metadata/ExprMeta.py
15
16
17
18
def __init__(self, value: Expr) -> None:
    """Create new ExprMeta with float value."""
    self._value = value
    self._check_type(value, Expr)
__repr__() -> str

Overwrite repr for better string representation.

Source code in framcore/metadata/ExprMeta.py
20
21
22
23
24
def __repr__(self) -> str:
    """Overwrite __repr__ for better string representation."""
    if not hasattr(self, "_value"):
        return f"{type(self).__name__}(uninitialized)"
    return f"{type(self).__name__}(expr={self._value})"
combine(other: Meta) -> Expr | Div

Sum Expr.

Source code in framcore/metadata/ExprMeta.py
45
46
47
48
49
50
51
52
def combine(self, other: Meta) -> Expr | Div:
    """Sum Expr."""
    if isinstance(other, ExprMeta):
        return self._value + other.get_value()

    div = Div(self)
    div.set_value(other)
    return div
get_fingerprint() -> Fingerprint

Get the fingerprint of the ScalarMeta.

Source code in framcore/metadata/ExprMeta.py
54
55
56
def get_fingerprint(self) -> Fingerprint:
    """Get the fingerprint of the ScalarMeta."""
    return self.get_fingerprint_default()
get_value() -> Expr

Return expr.

Source code in framcore/metadata/ExprMeta.py
36
37
38
def get_value(self) -> Expr:
    """Return expr."""
    return self._value
set_value(value: Expr) -> None

Set expr value. TypeError if not expr.

Source code in framcore/metadata/ExprMeta.py
40
41
42
43
def set_value(self, value: Expr) -> None:
    """Set expr value. TypeError if not expr."""
    self._check_type(value, Expr)
    self._value = value

LevelExprMeta

LevelExprMeta

Bases: ExprMeta

LevelExprMeta represent an Expr. Subclass of ExprMeta.

When used, all components must have a ExprMeta.

Source code in framcore/metadata/LevelExprMeta.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
class LevelExprMeta(ExprMeta):
    """
    LevelExprMeta represent an Expr. Subclass of ExprMeta.

    When used, all components must have a ExprMeta.
    """

    def __init__(self, value: Expr | TimeVector) -> None:
        """
        Create new LevelExprMeta with Expr value.

        Args:
            value (Expr | TimeVector): Accepts Expr with is_level=True or TimeVector with is_max_level=True/False.

        Raises:
            TypeError: If value is not Expr or TimeVector.
            ValueError: If value is non-level Expr or TimeVector.

        """
        self._check_type(value, (Expr, TimeVector))

        if isinstance(value, TimeVector) and value.is_max_level() is None:
            raise ValueError("Parameter 'value' (TimeVector) must be a level (is_max_level must be True or False).")

        self._value = ensure_expr(value, is_level=True)
__init__(value: Expr | TimeVector) -> None

Create new LevelExprMeta with Expr value.

Parameters:

Name Type Description Default
value Expr | TimeVector

Accepts Expr with is_level=True or TimeVector with is_max_level=True/False.

required

Raises:

Type Description
TypeError

If value is not Expr or TimeVector.

ValueError

If value is non-level Expr or TimeVector.

Source code in framcore/metadata/LevelExprMeta.py
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
def __init__(self, value: Expr | TimeVector) -> None:
    """
    Create new LevelExprMeta with Expr value.

    Args:
        value (Expr | TimeVector): Accepts Expr with is_level=True or TimeVector with is_max_level=True/False.

    Raises:
        TypeError: If value is not Expr or TimeVector.
        ValueError: If value is non-level Expr or TimeVector.

    """
    self._check_type(value, (Expr, TimeVector))

    if isinstance(value, TimeVector) and value.is_max_level() is None:
        raise ValueError("Parameter 'value' (TimeVector) must be a level (is_max_level must be True or False).")

    self._value = ensure_expr(value, is_level=True)

Member

Member

Bases: Meta

Member represent membership to a catergory or group using a str. Subclass of Meta.

Should not have missing values.

When used, all components must have a membership.

Source code in framcore/metadata/Member.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
class Member(Meta):
    """
    Member represent membership to a catergory or group using a str. Subclass of Meta.

    Should not have missing values.

    When used, all components must have a membership.
    """

    def __init__(self, value: str) -> None:
        """Create new member with str value."""
        self._value = value
        self._check_type(value, str)

    def __repr__(self) -> str:
        """Overwrite __repr__ for better string representation."""
        return f"{type(self).__name__}(value={self._value})"

    def __eq__(self, other: object) -> bool:
        """Check equality based on value."""
        if not isinstance(other, Member):
            return False
        return self._value == other._value

    def __hash__(self) -> int:
        """Overwrite __hash__ since its added to sets."""
        return hash(self.__repr__())

    def get_value(self) -> str:
        """Return str value."""
        return self._value

    def set_value(self, value: str) -> None:
        """Set str value. TypeError if not str."""
        self._check_type(value, str)
        self._value = value

    def combine(self, other: Meta) -> Member | Div:
        """Return self if other == self else return Div containing both."""
        if self == other:
            return self
        d = Div(self)
        d.set_value(other)
        return d

    def get_fingerprint(self) -> Fingerprint:
        """Get the fingerprint of the Member."""
        return self.get_fingerprint_default()
__eq__(other: object) -> bool

Check equality based on value.

Source code in framcore/metadata/Member.py
26
27
28
29
30
def __eq__(self, other: object) -> bool:
    """Check equality based on value."""
    if not isinstance(other, Member):
        return False
    return self._value == other._value
__hash__() -> int

Overwrite hash since its added to sets.

Source code in framcore/metadata/Member.py
32
33
34
def __hash__(self) -> int:
    """Overwrite __hash__ since its added to sets."""
    return hash(self.__repr__())
__init__(value: str) -> None

Create new member with str value.

Source code in framcore/metadata/Member.py
17
18
19
20
def __init__(self, value: str) -> None:
    """Create new member with str value."""
    self._value = value
    self._check_type(value, str)
__repr__() -> str

Overwrite repr for better string representation.

Source code in framcore/metadata/Member.py
22
23
24
def __repr__(self) -> str:
    """Overwrite __repr__ for better string representation."""
    return f"{type(self).__name__}(value={self._value})"
combine(other: Meta) -> Member | Div

Return self if other == self else return Div containing both.

Source code in framcore/metadata/Member.py
45
46
47
48
49
50
51
def combine(self, other: Meta) -> Member | Div:
    """Return self if other == self else return Div containing both."""
    if self == other:
        return self
    d = Div(self)
    d.set_value(other)
    return d
get_fingerprint() -> Fingerprint

Get the fingerprint of the Member.

Source code in framcore/metadata/Member.py
53
54
55
def get_fingerprint(self) -> Fingerprint:
    """Get the fingerprint of the Member."""
    return self.get_fingerprint_default()
get_value() -> str

Return str value.

Source code in framcore/metadata/Member.py
36
37
38
def get_value(self) -> str:
    """Return str value."""
    return self._value
set_value(value: str) -> None

Set str value. TypeError if not str.

Source code in framcore/metadata/Member.py
40
41
42
43
def set_value(self, value: str) -> None:
    """Set str value. TypeError if not str."""
    self._check_type(value, str)
    self._value = value

Meta

Meta

Bases: Base, ABC

Metadata-interface class for components.

The interface is there to support validation and aggregation. - Some types of metadata should not have any missing values - Different types of metadata should be aggregated differently (e.g. ignore, sum, mean, keep all in list, etc.)

Source code in framcore/metadata/Meta.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
class Meta(Base, ABC):
    """
    Metadata-interface class for components.

    The interface is there to support validation and aggregation.
    - Some types of metadata should not have any missing values
    - Different types of metadata should be aggregated differently (e.g. ignore, sum, mean, keep all in list, etc.)
    """

    @abstractmethod
    def get_value(self) -> Any:  # noqa: ANN401
        """Return metadata value."""
        pass

    @abstractmethod
    def set_value(self, value: Any) -> None:  # noqa: ANN401
        """
        Set metadata value.

        Error if incorrect type or value.

        Some Meta types may be immutable and thus error if
        set_value is called with any value.
        """
        pass

    @abstractmethod
    def combine(self, other: Meta) -> Meta | None:
        """How should this metadata type be aggregated?."""
        pass

    @abstractmethod
    def get_fingerprint(self) -> Fingerprint:
        """Return fingerprint."""
        pass
combine(other: Meta) -> Meta | None abstractmethod

How should this metadata type be aggregated?.

Source code in framcore/metadata/Meta.py
36
37
38
39
@abstractmethod
def combine(self, other: Meta) -> Meta | None:
    """How should this metadata type be aggregated?."""
    pass
get_fingerprint() -> Fingerprint abstractmethod

Return fingerprint.

Source code in framcore/metadata/Meta.py
41
42
43
44
@abstractmethod
def get_fingerprint(self) -> Fingerprint:
    """Return fingerprint."""
    pass
get_value() -> Any abstractmethod

Return metadata value.

Source code in framcore/metadata/Meta.py
19
20
21
22
@abstractmethod
def get_value(self) -> Any:  # noqa: ANN401
    """Return metadata value."""
    pass
set_value(value: Any) -> None abstractmethod

Set metadata value.

Error if incorrect type or value.

Some Meta types may be immutable and thus error if set_value is called with any value.

Source code in framcore/metadata/Meta.py
24
25
26
27
28
29
30
31
32
33
34
@abstractmethod
def set_value(self, value: Any) -> None:  # noqa: ANN401
    """
    Set metadata value.

    Error if incorrect type or value.

    Some Meta types may be immutable and thus error if
    set_value is called with any value.
    """
    pass

populators

Populator

Populator API, for creating a system of Components, TimeVectors and Curves (and Expr) for a Model object.

Populator

Bases: Base, ABC

Populate a model with data from a data source.

Source code in framcore/populators/Populator.py
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
class Populator(Base, ABC):
    """Populate a model with data from a data source."""

    def __init__(self) -> None:
        """
        Set up ID and reference registration containers.

        These are used to check if IDs and references actually exist in the system.
        """
        super().__init__()

        self._registered_ids: dict[str, list[object]] = {}
        self._registered_refs: dict[str, set[str]] = {}

    def populate(self, model: Model) -> None:
        """
        Add data objects from a database to an input Model.

        These data objects shall be of class Component, TimeVector, and Curve.
        The method _populate should be overwritten in a subclass of Populator.
        In this way, it is used to create objects from any database.

        Args:
            model (Model): Model which will have the objects added to it.

        """
        self._check_type(model, Model)
        new_data = self._populate()

        # check that the new_data dict complies with the type hints of _populate?
        for existing_id in model.get_data():
            self._register_id(existing_id, model)
        errors = list(self._check_duplicate_ids())
        model.get_data().update(new_data)
        errors += list(self._check_references(model.get_data()))
        self._report_errors(errors)

    @abstractmethod
    def _populate(self) -> dict[str, Component | TimeVector | Curve | Expr]:
        """Create and return Components, TimeVectors and Curves. Possibly also Exprs."""
        pass

    def _check_duplicate_ids(self) -> dict[str, list[object]]:
        """
        Retrieve dictionary with ids of duplicated objects and their corresponding source.

        Returns:
            dict[str, list[object]]: keys are ids and values are lists of sources.

        """
        return {f"Duplicate ID found: '{duplicate_id}' in sources {sources}" for duplicate_id, sources in self._registered_ids.items() if len(sources) > 1}

    def _check_references(self, data: dict[str, Component | TimeVector | Curve | Expr]) -> set:
        errors = set()
        for ref, referencers in self._registered_refs.items():
            if ref not in data:
                msg = f"References to an invalid ID found. ID '{ref}' is not connected to any data."
                try:
                    sources = {source_id: data[source_id] for source_id in referencers}
                except KeyError:
                    errors.add(
                        msg + f" Sub Components referencing the faulty ID: {referencers}",
                    )
                else:
                    errors.add(
                        msg + f" Components referencing the faulty ID: {sources}",
                    )
        return errors

    def _report_errors(self, errors: list[str]) -> None:
        if errors:
            n = len(errors)
            s = "s" if n > 1 else ""
            error_str = "\n".join(errors)
            message = f"Found {n} error{s}:\n{error_str}"
            raise RuntimeError(message)

    def _register_id(self, new_id: str, source: object) -> None:
        """
        Register an id and its source.

        Args:
            new_id (str): New id to be registered.
            source (object): Source of the new id.

        """
        if new_id in self._registered_ids:
            self._registered_ids[new_id].append(source)
        else:
            self._registered_ids[new_id] = [source]

    def _register_references(self, component_id: str, references: set) -> None:
        for ref in references:
            if ref in self._registered_refs:
                self._registered_refs[ref].add(component_id)
            else:
                self._registered_refs[ref] = {component_id}
__init__() -> None

Set up ID and reference registration containers.

These are used to check if IDs and references actually exist in the system.

Source code in framcore/populators/Populator.py
15
16
17
18
19
20
21
22
23
24
def __init__(self) -> None:
    """
    Set up ID and reference registration containers.

    These are used to check if IDs and references actually exist in the system.
    """
    super().__init__()

    self._registered_ids: dict[str, list[object]] = {}
    self._registered_refs: dict[str, set[str]] = {}
populate(model: Model) -> None

Add data objects from a database to an input Model.

These data objects shall be of class Component, TimeVector, and Curve. The method _populate should be overwritten in a subclass of Populator. In this way, it is used to create objects from any database.

Parameters:

Name Type Description Default
model Model

Model which will have the objects added to it.

required
Source code in framcore/populators/Populator.py
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
def populate(self, model: Model) -> None:
    """
    Add data objects from a database to an input Model.

    These data objects shall be of class Component, TimeVector, and Curve.
    The method _populate should be overwritten in a subclass of Populator.
    In this way, it is used to create objects from any database.

    Args:
        model (Model): Model which will have the objects added to it.

    """
    self._check_type(model, Model)
    new_data = self._populate()

    # check that the new_data dict complies with the type hints of _populate?
    for existing_id in model.get_data():
        self._register_id(existing_id, model)
    errors = list(self._check_duplicate_ids())
    model.get_data().update(new_data)
    errors += list(self._check_references(model.get_data()))
    self._report_errors(errors)

querydbs

CacheDB

CacheDB

Bases: QueryDB

Stores models and precomputed values.

Source code in framcore/querydbs/CacheDB.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
class CacheDB(QueryDB):
    """Stores models and precomputed values."""

    def __init__(self, model: Model, *models: tuple[Model]) -> None:
        """
        Initialize CacheDB with one or more Model instances.

        Args:
            model (Model): The primary Model instance.
            *models (tuple[Model]): Additional Model instances.

        """
        self._models: tuple[Model] = (model, *models)
        self._cache = dict()
        self._min_elapsed_seconds = 0.01

    def set_min_elapsed_seconds(self, value: float) -> None:
        """Values that takes below this threshold to compute, does not get cached."""
        self._check_type(value, float)
        self._check_float(value=value, lower_bound=0.0, upper_bound=None)
        self._min_elapsed_seconds = value

    def get_min_elapsed_seconds(self) -> float:
        """Values that takes below this threshold to compute, does not get cached."""
        return self._min_elapsed_seconds

    def _get(self, key: object) -> object:
        if key in self._cache:
            return self._cache[key]
        for m in self._models:
            data = m.get_data()
            if key in data:
                return data[key]
        message = f"Key '{key}' not found."
        raise KeyError(message)

    def _has_key(self, key: object) -> bool:
        return key in self._cache or any(key in m.get_data() for m in self._models)

    def _put(self, key: object, value: object, elapsed_seconds: float) -> None:
        if elapsed_seconds < self._min_elapsed_seconds:
            return
        self._cache[key] = value

    def _get_data(self) -> dict:
        return self._models[0].get_data()
__init__(model: Model, *models: tuple[Model]) -> None

Initialize CacheDB with one or more Model instances.

Parameters:

Name Type Description Default
model Model

The primary Model instance.

required
*models tuple[Model]

Additional Model instances.

()
Source code in framcore/querydbs/CacheDB.py
 8
 9
10
11
12
13
14
15
16
17
18
19
def __init__(self, model: Model, *models: tuple[Model]) -> None:
    """
    Initialize CacheDB with one or more Model instances.

    Args:
        model (Model): The primary Model instance.
        *models (tuple[Model]): Additional Model instances.

    """
    self._models: tuple[Model] = (model, *models)
    self._cache = dict()
    self._min_elapsed_seconds = 0.01
get_min_elapsed_seconds() -> float

Values that takes below this threshold to compute, does not get cached.

Source code in framcore/querydbs/CacheDB.py
27
28
29
def get_min_elapsed_seconds(self) -> float:
    """Values that takes below this threshold to compute, does not get cached."""
    return self._min_elapsed_seconds
set_min_elapsed_seconds(value: float) -> None

Values that takes below this threshold to compute, does not get cached.

Source code in framcore/querydbs/CacheDB.py
21
22
23
24
25
def set_min_elapsed_seconds(self, value: float) -> None:
    """Values that takes below this threshold to compute, does not get cached."""
    self._check_type(value, float)
    self._check_float(value=value, lower_bound=0.0, upper_bound=None)
    self._min_elapsed_seconds = value

ModelDB

ModelDB

Bases: QueryDB

A database-like interface for querying multiple Model instances.

Source code in framcore/querydbs/ModelDB.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
class ModelDB(QueryDB):
    """A database-like interface for querying multiple Model instances."""

    def __init__(self, model: Model, *models: tuple[Model]) -> None:
        """
        Initialize ModelDB with one or more Model instances.

        Args:
            model (Model): The primary Model instance.
            *models (tuple[Model]): Additional Model instances.

        """
        self._models: tuple[Model] = (model, *models)

    def _get(self, key: object) -> object:
        for m in self._models:
            data = m.get_data()
            if key in data:
                return data[key]
        message = f"Key '{key}' not found."
        raise KeyError(message)

    def _has_key(self, key: object) -> bool:
        return any(key in m.get_data() for m in self._models)

    def _put(self, key: object, value: object, elapsed_seconds: float) -> None:
        return None

    def _get_data(self) -> dict:
        return self._models[0].get_data()
__init__(model: Model, *models: tuple[Model]) -> None

Initialize ModelDB with one or more Model instances.

Parameters:

Name Type Description Default
model Model

The primary Model instance.

required
*models tuple[Model]

Additional Model instances.

()
Source code in framcore/querydbs/ModelDB.py
 8
 9
10
11
12
13
14
15
16
17
def __init__(self, model: Model, *models: tuple[Model]) -> None:
    """
    Initialize ModelDB with one or more Model instances.

    Args:
        model (Model): The primary Model instance.
        *models (tuple[Model]): Additional Model instances.

    """
    self._models: tuple[Model] = (model, *models)

QueryDB

QueryDB

Bases: Base, ABC

Abstract base class for database queries.

Provides an interface for getting, putting, and checking keys in a database. Subclasses must implement the _get, _put, and _has_key methods.

Source code in framcore/querydbs/QueryDB.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
class QueryDB(Base, ABC):
    """
    Abstract base class for database queries.

    Provides an interface for getting, putting, and checking keys in a database.
    Subclasses must implement the _get, _put, and _has_key methods.

    """

    def get(self, key: object) -> object:
        """Get value behind key from db."""
        return self._get(key)

    def put(self, key: object, value: object, elapsed_seconds: float) -> None:
        """Put value in db behind key (maybe, depending on implementation)."""
        self._put(key, value, elapsed_seconds)

    def has_key(self, key: str) -> bool:
        """Return True if db has value behind key."""
        return self._has_key(key)

    def get_data(self) -> dict:
        """Return output of get_data called on first underlying model."""
        return self._get_data()

    @abstractmethod
    def _get(self, key: object) -> object:
        pass

    @abstractmethod
    def _put(self, key: object, value: object, elapsed_seconds: float) -> None:
        pass

    @abstractmethod
    def _has_key(self, key: object) -> bool:
        pass

    @abstractmethod
    def _get_data(self) -> dict:
        pass
get(key: object) -> object

Get value behind key from db.

Source code in framcore/querydbs/QueryDB.py
15
16
17
def get(self, key: object) -> object:
    """Get value behind key from db."""
    return self._get(key)
get_data() -> dict

Return output of get_data called on first underlying model.

Source code in framcore/querydbs/QueryDB.py
27
28
29
def get_data(self) -> dict:
    """Return output of get_data called on first underlying model."""
    return self._get_data()
has_key(key: str) -> bool

Return True if db has value behind key.

Source code in framcore/querydbs/QueryDB.py
23
24
25
def has_key(self, key: str) -> bool:
    """Return True if db has value behind key."""
    return self._has_key(key)
put(key: object, value: object, elapsed_seconds: float) -> None

Put value in db behind key (maybe, depending on implementation).

Source code in framcore/querydbs/QueryDB.py
19
20
21
def put(self, key: object, value: object, elapsed_seconds: float) -> None:
    """Put value in db behind key (maybe, depending on implementation)."""
    self._put(key, value, elapsed_seconds)

solvers

Solver

Solver

Bases: Base, ABC

Solver inteface class.

In FRAM we call energy market models for Solvers. They take a populated Model and configurations from a SolverConfig, and transfers this to the solver software. Then it solves the energy market model, and writes results back to the Model.

Source code in framcore/solvers/Solver.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
class Solver(Base, ABC):
    """
    Solver inteface class.

    In FRAM we call energy market models for Solvers. They take a populated Model and configurations from a SolverConfig,
    and transfers this to the solver software. Then it solves the energy market model, and writes results back to the Model.
    """

    _FILENAME_MODEL = "model.pickle"
    _FILENAME_SOLVER = "solver.pickle"

    def solve(self, model: Model) -> None:
        """
        Inititiate the solve.

        It takes the populated Model and configurations from self.SolverConfig, and transfers this to the solver software.
        Then it solves the energy market model, and writes results back to the Model.

        At the end of the solve, the Model (now with results) and the Solver object (with configurations) are pickled to the solve folder.
        - model.pickle can be used to inspect results later.
        - solver.pickle allows reuse of the same solver configurations (with solve_folder set to None to avoid overwriting).
        TODO: Could also pickle the Model before solving, to have a record of the input model.

        """
        self._check_type(model, Model)

        config = self.get_config()

        folder = config.get_solve_folder()

        if folder is None:
            raise ValueError("A folder for the Solver has not been set yet. Use Solver.get_config().set_solve_folder(folder)")

        Path.mkdir(folder, parents=True, exist_ok=True)

        self._solve(folder, model)

        with Path.open(folder / self._FILENAME_MODEL, "wb") as f:
            pickle.dump(model, f)

        c = deepcopy(self)
        c.get_config().set_solve_folder(None)
        with Path.open(folder / self._FILENAME_SOLVER, "wb") as f:
            pickle.dump(c, f)

    @abstractmethod
    def get_config(self) -> SolverConfig:
        """Return the solver's config object."""
        pass

    @abstractmethod
    def _solve(self, folder: Path, model: Model) -> None:
        """Solve the model inplace. Write to folder. Must be implemented by specific solvers."""
        pass
get_config() -> SolverConfig abstractmethod

Return the solver's config object.

Source code in framcore/solvers/Solver.py
55
56
57
58
@abstractmethod
def get_config(self) -> SolverConfig:
    """Return the solver's config object."""
    pass
solve(model: Model) -> None

Inititiate the solve.

It takes the populated Model and configurations from self.SolverConfig, and transfers this to the solver software. Then it solves the energy market model, and writes results back to the Model.

At the end of the solve, the Model (now with results) and the Solver object (with configurations) are pickled to the solve folder. - model.pickle can be used to inspect results later. - solver.pickle allows reuse of the same solver configurations (with solve_folder set to None to avoid overwriting). TODO: Could also pickle the Model before solving, to have a record of the input model.

Source code in framcore/solvers/Solver.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
def solve(self, model: Model) -> None:
    """
    Inititiate the solve.

    It takes the populated Model and configurations from self.SolverConfig, and transfers this to the solver software.
    Then it solves the energy market model, and writes results back to the Model.

    At the end of the solve, the Model (now with results) and the Solver object (with configurations) are pickled to the solve folder.
    - model.pickle can be used to inspect results later.
    - solver.pickle allows reuse of the same solver configurations (with solve_folder set to None to avoid overwriting).
    TODO: Could also pickle the Model before solving, to have a record of the input model.

    """
    self._check_type(model, Model)

    config = self.get_config()

    folder = config.get_solve_folder()

    if folder is None:
        raise ValueError("A folder for the Solver has not been set yet. Use Solver.get_config().set_solve_folder(folder)")

    Path.mkdir(folder, parents=True, exist_ok=True)

    self._solve(folder, model)

    with Path.open(folder / self._FILENAME_MODEL, "wb") as f:
        pickle.dump(model, f)

    c = deepcopy(self)
    c.get_config().set_solve_folder(None)
    with Path.open(folder / self._FILENAME_SOLVER, "wb") as f:
        pickle.dump(c, f)

SolverConfig

Definition of SolverConfig interface.

SolverConfig

Bases: Base, ABC

SolverConfig inteface class.

Source code in framcore/solvers/SolverConfig.py
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
class SolverConfig(Base, ABC):
    """SolverConfig inteface class."""

    _SIMULATION_MODE_SERIAL = "serial"
    _SIMULATION_MODE_FORECAST = "forecast"

    _DIFF_POLICY_ERROR = "error"
    _DIFF_POLICY_IGNORE = "ignore"
    _DIFF_POLICY_BACKUP = "backup"

    def __init__(self) -> None:
        """Create internal variables with default values."""
        self._simulation_mode: str | None = None
        self._diff_policy: str = self._DIFF_POLICY_ERROR
        self._show_screen_output: bool = False
        self._currency: str | None = None
        self._num_cpu_cores: int = 1
        self._is_float32 = True
        self._first_weather_year: int | None = None
        self._num_weather_years: int | None = None
        self._first_simulation_year: int | None = None
        self._num_simulation_years: int | None = None
        self._data_period: TimeIndex | None = None
        self._commodity_unit_flow_default: str | None = None
        self._commodity_unit_stock_default: str | None = None
        self._commodity_unit_flows: dict[str, str] = {}
        self._commodity_unit_stocks: dict[str, str] = {}
        self._solve_folder: Path | None = None

    def set_solve_folder(self, folder: Path | str | None) -> None:
        """Set folder where solve related files will be written."""
        self._check_type(folder, (str, Path, type(None)))
        if isinstance(folder, str):
            folder = Path(folder)
        self._solve_folder = folder

    def get_solve_folder(self) -> Path | None:
        """Get folder where solve related files will be written."""
        return self._solve_folder

    def set_commodity_units(
        self,
        commodity: str,
        stock_unit: str,
        flow_unit: str | None = None,
        is_default: bool | None = None,
    ) -> None:
        """
        Set the stock and flow units for a commodity.

        Parameters
        ----------
        commodity : str
            The name of the commodity.
        stock_unit : str
            The unit for the commodity stock.
        flow_unit : str or None, optional
            The unit for the commodity flow, representing the rate of change of the stock unit over time.
        is_default : bool or None, optional
            If True, set these units as the default for all commodities.

        Raises
        ------
        ValueError
            If the flow unit is incompatible with the stock unit.

        """
        self._check_type(commodity, str)
        self._check_type(stock_unit, str)
        self._check_type(flow_unit, (str, type(None)))
        self._check_type(is_default, (bool, type(None)))
        if flow_unit:
            candidate = f"{stock_unit}/s"
            if not is_convertable(candidate, flow_unit):
                message = (
                    f"Incompatible units for commodity '{commodity}': stock_unit '{stock_unit}' flow_unit '{flow_unit}'"
                    "The flow_unit must represent the rate of change of the stock_unit over time."
                )
                raise ValueError(message)
        if is_default:
            self._warn_if_changed_defaults(stock_unit, flow_unit)
            self._commodity_unit_stock_default = stock_unit
            if flow_unit:
                self._commodity_unit_flow_default = flow_unit
        else:
            self._commodity_unit_stocks[commodity] = stock_unit
            self._commodity_unit_flows[commodity] = flow_unit

    def get_unit_stock(self, commodity: str) -> str:
        """
        Get the stock unit for a given commodity.

        Parameters
        ----------
        commodity : str
            The name of the commodity.

        Returns
        -------
        str
            The stock unit for the commodity.

        Raises
        ------
        ValueError
            If no stock unit is set for the commodity.

        """
        if commodity not in self._commodity_unit_stocks and not self._commodity_unit_stock_default:
            message = f"No stock unit set for '{commodity}'."
            raise ValueError(message)
        return self._commodity_unit_stocks.get(commodity, self._commodity_unit_stock_default)

    def get_unit_flow(self, commodity: str) -> str | None:
        """
        Get the flow unit for a given commodity.

        Parameters
        ----------
        commodity : str
            The name of the commodity.

        Returns
        -------
        str or None
            The flow unit for the commodity, or None if not set.

        """
        return self._commodity_unit_flows.get(commodity, self._commodity_unit_flow_default)

    def _warn_if_changed_defaults(self, stock_unit: str, flow_unit: str) -> None:
        if self._commodity_unit_flow_default and flow_unit != self._commodity_unit_flow_default:
            message = f"Replacing flow default from {self._commodity_unit_flow_default} to {flow_unit}. Usually default is only set once."
            self.send_warning_event(message)
        if self._commodity_unit_stock_default and stock_unit != self._commodity_unit_stock_default:
            message = f"Replacing stock default from {self._commodity_unit_stock_default} to {stock_unit}. Usually default is only set once."
            self.send_warning_event(message)

    def get_num_cpu_cores(self) -> int:
        """Return number of cpu cores the Solver can use."""
        return self._num_cpu_cores

    def set_num_cpu_cores(self, n: int) -> int:
        """Set number of cpu cores the Solver can use."""
        self._num_cpu_cores = n

    def set_currency(self, currency: str) -> None:
        """Set currency."""
        self._check_type(currency, str)
        self._currency = currency

    def get_currency(self) -> str | None:
        """Get currency."""
        return self._currency

    def set_screen_output_on(self) -> None:
        """Print output from Solver to stdout and logfile."""
        self._show_screen_output = True

    def set_screen_output_off(self) -> None:
        """Only print output from Solver to logfile."""
        self._show_screen_output = False

    def show_screen_output(self) -> bool:
        """Return True if screen output is set to be shown."""
        return self._show_screen_output

    def set_diff_policy_error(self) -> None:
        """Error if non-empty diff during solve."""
        self._diff_policy = self._DIFF_POLICY_ERROR

    def set_diff_policy_ignore(self) -> None:
        """Ignore if non-empty diff during solve."""
        self._diff_policy = self._DIFF_POLICY_IGNORE

    def set_diff_policy_backup(self) -> None:
        """Copy existing folder to folder/backup_[timestamp] folder if non-empty diff during solve."""
        self._diff_policy = self._DIFF_POLICY_BACKUP

    def is_diff_policy_error(self) -> bool:
        """Return True if error diff policy."""
        return self._diff_policy == self._DIFF_POLICY_ERROR

    def is_diff_policy_ignore(self) -> bool:
        """Return True if ignore diff policy."""
        return self._diff_policy == self._DIFF_POLICY_IGNORE

    def is_diff_policy_backup(self) -> bool:
        """Return True if backup diff policy."""
        return self._diff_policy == self._DIFF_POLICY_BACKUP

    def set_simulation_mode_serial(self) -> None:
        """Activate serial simulation mode."""
        self._simulation_mode = self._SIMULATION_MODE_SERIAL

    def is_simulation_mode_serial(self) -> bool:
        """Return True if serial simulation mode."""
        return self._simulation_mode == self._SIMULATION_MODE_SERIAL

    def set_data_period(self, period: TimeIndex) -> None:
        """Set period used in level value queries."""
        self._check_type(period, TimeIndex)
        self._data_period = period

    def get_data_period(self) -> TimeIndex | None:
        """Get period used in level value queries."""
        return self._data_period

    def set_simulation_years(self, first_year: int, num_years: int) -> None:
        """Set subset of scenario years. For serial simulation."""
        self._check_type(first_year, int)
        self._check_type(num_years, int)
        self._check_int(first_year, lower_bound=0, upper_bound=None)
        self._check_int(num_years, lower_bound=1, upper_bound=None)
        self._first_simulation_year = first_year
        self._num_simulation_years = num_years

    def get_simulation_years(self) -> tuple[int, int]:
        """
        Get simulation years (first_year, num_years).

        Return weather years as fallback if serial simulation.
        """
        if (self._first_simulation_year is None or self._num_simulation_years is None) and self.is_simulation_mode_serial():
            first_weather_year, num_weather_years = self.get_weather_years()
            if first_weather_year is not None and num_weather_years is not None:
                return first_weather_year, num_weather_years

        if self._first_simulation_year is None or self._num_simulation_years is None:
            message = "Simulation years not set."
            raise ValueError(message)
        return (self._first_simulation_year, self._num_simulation_years)

    def set_weather_years(self, first_year: int, num_years: int) -> None:
        """Set weather scenario period used in profiles."""
        self._check_type(first_year, int)
        self._check_type(num_years, int)
        self._check_int(first_year, lower_bound=0, upper_bound=None)
        self._check_int(num_years, lower_bound=1, upper_bound=None)
        self._first_weather_year = first_year
        self._num_weather_years = num_years

    def get_weather_years(self) -> tuple[int, int]:
        """Get weather scenario period (first_year, num_years) used in profiles."""
        if self._first_weather_year < 0 or self._num_weather_years < 0:
            message = "Scenario years not set."
            raise ValueError(message)
        return (self._first_weather_year, self._num_weather_years)

    def use_float32(self) -> None:
        """Use single precision floating point numbers in data management."""
        self._is_float32 = True

    def use_float64(self) -> None:
        """Use double precision floating point numbers in data management."""
        self._is_float32 = False

    def is_float32(self) -> bool:
        """Return if single precision in data management, else double precision."""
        return self._is_float32
__init__() -> None

Create internal variables with default values.

Source code in framcore/solvers/SolverConfig.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
def __init__(self) -> None:
    """Create internal variables with default values."""
    self._simulation_mode: str | None = None
    self._diff_policy: str = self._DIFF_POLICY_ERROR
    self._show_screen_output: bool = False
    self._currency: str | None = None
    self._num_cpu_cores: int = 1
    self._is_float32 = True
    self._first_weather_year: int | None = None
    self._num_weather_years: int | None = None
    self._first_simulation_year: int | None = None
    self._num_simulation_years: int | None = None
    self._data_period: TimeIndex | None = None
    self._commodity_unit_flow_default: str | None = None
    self._commodity_unit_stock_default: str | None = None
    self._commodity_unit_flows: dict[str, str] = {}
    self._commodity_unit_stocks: dict[str, str] = {}
    self._solve_folder: Path | None = None
get_currency() -> str | None

Get currency.

Source code in framcore/solvers/SolverConfig.py
164
165
166
def get_currency(self) -> str | None:
    """Get currency."""
    return self._currency
get_data_period() -> TimeIndex | None

Get period used in level value queries.

Source code in framcore/solvers/SolverConfig.py
217
218
219
def get_data_period(self) -> TimeIndex | None:
    """Get period used in level value queries."""
    return self._data_period
get_num_cpu_cores() -> int

Return number of cpu cores the Solver can use.

Source code in framcore/solvers/SolverConfig.py
151
152
153
def get_num_cpu_cores(self) -> int:
    """Return number of cpu cores the Solver can use."""
    return self._num_cpu_cores
get_simulation_years() -> tuple[int, int]

Get simulation years (first_year, num_years).

Return weather years as fallback if serial simulation.

Source code in framcore/solvers/SolverConfig.py
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
def get_simulation_years(self) -> tuple[int, int]:
    """
    Get simulation years (first_year, num_years).

    Return weather years as fallback if serial simulation.
    """
    if (self._first_simulation_year is None or self._num_simulation_years is None) and self.is_simulation_mode_serial():
        first_weather_year, num_weather_years = self.get_weather_years()
        if first_weather_year is not None and num_weather_years is not None:
            return first_weather_year, num_weather_years

    if self._first_simulation_year is None or self._num_simulation_years is None:
        message = "Simulation years not set."
        raise ValueError(message)
    return (self._first_simulation_year, self._num_simulation_years)
get_solve_folder() -> Path | None

Get folder where solve related files will be written.

Source code in framcore/solvers/SolverConfig.py
49
50
51
def get_solve_folder(self) -> Path | None:
    """Get folder where solve related files will be written."""
    return self._solve_folder
get_unit_flow(commodity: str) -> str | None

Get the flow unit for a given commodity.

Parameters

commodity : str The name of the commodity.

Returns

str or None The flow unit for the commodity, or None if not set.

Source code in framcore/solvers/SolverConfig.py
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
def get_unit_flow(self, commodity: str) -> str | None:
    """
    Get the flow unit for a given commodity.

    Parameters
    ----------
    commodity : str
        The name of the commodity.

    Returns
    -------
    str or None
        The flow unit for the commodity, or None if not set.

    """
    return self._commodity_unit_flows.get(commodity, self._commodity_unit_flow_default)
get_unit_stock(commodity: str) -> str

Get the stock unit for a given commodity.

Parameters

commodity : str The name of the commodity.

Returns

str The stock unit for the commodity.

Raises

ValueError If no stock unit is set for the commodity.

Source code in framcore/solvers/SolverConfig.py
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
def get_unit_stock(self, commodity: str) -> str:
    """
    Get the stock unit for a given commodity.

    Parameters
    ----------
    commodity : str
        The name of the commodity.

    Returns
    -------
    str
        The stock unit for the commodity.

    Raises
    ------
    ValueError
        If no stock unit is set for the commodity.

    """
    if commodity not in self._commodity_unit_stocks and not self._commodity_unit_stock_default:
        message = f"No stock unit set for '{commodity}'."
        raise ValueError(message)
    return self._commodity_unit_stocks.get(commodity, self._commodity_unit_stock_default)
get_weather_years() -> tuple[int, int]

Get weather scenario period (first_year, num_years) used in profiles.

Source code in framcore/solvers/SolverConfig.py
255
256
257
258
259
260
def get_weather_years(self) -> tuple[int, int]:
    """Get weather scenario period (first_year, num_years) used in profiles."""
    if self._first_weather_year < 0 or self._num_weather_years < 0:
        message = "Scenario years not set."
        raise ValueError(message)
    return (self._first_weather_year, self._num_weather_years)
is_diff_policy_backup() -> bool

Return True if backup diff policy.

Source code in framcore/solvers/SolverConfig.py
200
201
202
def is_diff_policy_backup(self) -> bool:
    """Return True if backup diff policy."""
    return self._diff_policy == self._DIFF_POLICY_BACKUP
is_diff_policy_error() -> bool

Return True if error diff policy.

Source code in framcore/solvers/SolverConfig.py
192
193
194
def is_diff_policy_error(self) -> bool:
    """Return True if error diff policy."""
    return self._diff_policy == self._DIFF_POLICY_ERROR
is_diff_policy_ignore() -> bool

Return True if ignore diff policy.

Source code in framcore/solvers/SolverConfig.py
196
197
198
def is_diff_policy_ignore(self) -> bool:
    """Return True if ignore diff policy."""
    return self._diff_policy == self._DIFF_POLICY_IGNORE
is_float32() -> bool

Return if single precision in data management, else double precision.

Source code in framcore/solvers/SolverConfig.py
270
271
272
def is_float32(self) -> bool:
    """Return if single precision in data management, else double precision."""
    return self._is_float32
is_simulation_mode_serial() -> bool

Return True if serial simulation mode.

Source code in framcore/solvers/SolverConfig.py
208
209
210
def is_simulation_mode_serial(self) -> bool:
    """Return True if serial simulation mode."""
    return self._simulation_mode == self._SIMULATION_MODE_SERIAL
set_commodity_units(commodity: str, stock_unit: str, flow_unit: str | None = None, is_default: bool | None = None) -> None

Set the stock and flow units for a commodity.

Parameters

commodity : str The name of the commodity. stock_unit : str The unit for the commodity stock. flow_unit : str or None, optional The unit for the commodity flow, representing the rate of change of the stock unit over time. is_default : bool or None, optional If True, set these units as the default for all commodities.

Raises

ValueError If the flow unit is incompatible with the stock unit.

Source code in framcore/solvers/SolverConfig.py
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
def set_commodity_units(
    self,
    commodity: str,
    stock_unit: str,
    flow_unit: str | None = None,
    is_default: bool | None = None,
) -> None:
    """
    Set the stock and flow units for a commodity.

    Parameters
    ----------
    commodity : str
        The name of the commodity.
    stock_unit : str
        The unit for the commodity stock.
    flow_unit : str or None, optional
        The unit for the commodity flow, representing the rate of change of the stock unit over time.
    is_default : bool or None, optional
        If True, set these units as the default for all commodities.

    Raises
    ------
    ValueError
        If the flow unit is incompatible with the stock unit.

    """
    self._check_type(commodity, str)
    self._check_type(stock_unit, str)
    self._check_type(flow_unit, (str, type(None)))
    self._check_type(is_default, (bool, type(None)))
    if flow_unit:
        candidate = f"{stock_unit}/s"
        if not is_convertable(candidate, flow_unit):
            message = (
                f"Incompatible units for commodity '{commodity}': stock_unit '{stock_unit}' flow_unit '{flow_unit}'"
                "The flow_unit must represent the rate of change of the stock_unit over time."
            )
            raise ValueError(message)
    if is_default:
        self._warn_if_changed_defaults(stock_unit, flow_unit)
        self._commodity_unit_stock_default = stock_unit
        if flow_unit:
            self._commodity_unit_flow_default = flow_unit
    else:
        self._commodity_unit_stocks[commodity] = stock_unit
        self._commodity_unit_flows[commodity] = flow_unit
set_currency(currency: str) -> None

Set currency.

Source code in framcore/solvers/SolverConfig.py
159
160
161
162
def set_currency(self, currency: str) -> None:
    """Set currency."""
    self._check_type(currency, str)
    self._currency = currency
set_data_period(period: TimeIndex) -> None

Set period used in level value queries.

Source code in framcore/solvers/SolverConfig.py
212
213
214
215
def set_data_period(self, period: TimeIndex) -> None:
    """Set period used in level value queries."""
    self._check_type(period, TimeIndex)
    self._data_period = period
set_diff_policy_backup() -> None

Copy existing folder to folder/backup_[timestamp] folder if non-empty diff during solve.

Source code in framcore/solvers/SolverConfig.py
188
189
190
def set_diff_policy_backup(self) -> None:
    """Copy existing folder to folder/backup_[timestamp] folder if non-empty diff during solve."""
    self._diff_policy = self._DIFF_POLICY_BACKUP
set_diff_policy_error() -> None

Error if non-empty diff during solve.

Source code in framcore/solvers/SolverConfig.py
180
181
182
def set_diff_policy_error(self) -> None:
    """Error if non-empty diff during solve."""
    self._diff_policy = self._DIFF_POLICY_ERROR
set_diff_policy_ignore() -> None

Ignore if non-empty diff during solve.

Source code in framcore/solvers/SolverConfig.py
184
185
186
def set_diff_policy_ignore(self) -> None:
    """Ignore if non-empty diff during solve."""
    self._diff_policy = self._DIFF_POLICY_IGNORE
set_num_cpu_cores(n: int) -> int

Set number of cpu cores the Solver can use.

Source code in framcore/solvers/SolverConfig.py
155
156
157
def set_num_cpu_cores(self, n: int) -> int:
    """Set number of cpu cores the Solver can use."""
    self._num_cpu_cores = n
set_screen_output_off() -> None

Only print output from Solver to logfile.

Source code in framcore/solvers/SolverConfig.py
172
173
174
def set_screen_output_off(self) -> None:
    """Only print output from Solver to logfile."""
    self._show_screen_output = False
set_screen_output_on() -> None

Print output from Solver to stdout and logfile.

Source code in framcore/solvers/SolverConfig.py
168
169
170
def set_screen_output_on(self) -> None:
    """Print output from Solver to stdout and logfile."""
    self._show_screen_output = True
set_simulation_mode_serial() -> None

Activate serial simulation mode.

Source code in framcore/solvers/SolverConfig.py
204
205
206
def set_simulation_mode_serial(self) -> None:
    """Activate serial simulation mode."""
    self._simulation_mode = self._SIMULATION_MODE_SERIAL
set_simulation_years(first_year: int, num_years: int) -> None

Set subset of scenario years. For serial simulation.

Source code in framcore/solvers/SolverConfig.py
221
222
223
224
225
226
227
228
def set_simulation_years(self, first_year: int, num_years: int) -> None:
    """Set subset of scenario years. For serial simulation."""
    self._check_type(first_year, int)
    self._check_type(num_years, int)
    self._check_int(first_year, lower_bound=0, upper_bound=None)
    self._check_int(num_years, lower_bound=1, upper_bound=None)
    self._first_simulation_year = first_year
    self._num_simulation_years = num_years
set_solve_folder(folder: Path | str | None) -> None

Set folder where solve related files will be written.

Source code in framcore/solvers/SolverConfig.py
42
43
44
45
46
47
def set_solve_folder(self, folder: Path | str | None) -> None:
    """Set folder where solve related files will be written."""
    self._check_type(folder, (str, Path, type(None)))
    if isinstance(folder, str):
        folder = Path(folder)
    self._solve_folder = folder
set_weather_years(first_year: int, num_years: int) -> None

Set weather scenario period used in profiles.

Source code in framcore/solvers/SolverConfig.py
246
247
248
249
250
251
252
253
def set_weather_years(self, first_year: int, num_years: int) -> None:
    """Set weather scenario period used in profiles."""
    self._check_type(first_year, int)
    self._check_type(num_years, int)
    self._check_int(first_year, lower_bound=0, upper_bound=None)
    self._check_int(num_years, lower_bound=1, upper_bound=None)
    self._first_weather_year = first_year
    self._num_weather_years = num_years
show_screen_output() -> bool

Return True if screen output is set to be shown.

Source code in framcore/solvers/SolverConfig.py
176
177
178
def show_screen_output(self) -> bool:
    """Return True if screen output is set to be shown."""
    return self._show_screen_output
use_float32() -> None

Use single precision floating point numbers in data management.

Source code in framcore/solvers/SolverConfig.py
262
263
264
def use_float32(self) -> None:
    """Use single precision floating point numbers in data management."""
    self._is_float32 = True
use_float64() -> None

Use double precision floating point numbers in data management.

Source code in framcore/solvers/SolverConfig.py
266
267
268
def use_float64(self) -> None:
    """Use double precision floating point numbers in data management."""
    self._is_float32 = False

timeindexes

FRAM time indexes package provides functionality for handling time-related data.

AverageYearRange

AverageYearRange

Bases: SinglePeriodTimeIndex

AverageYearRange represents a period over a range of years. No extrapolation and represents full iso calendar years.

Source code in framcore/timeindexes/AverageYearRange.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
class AverageYearRange(SinglePeriodTimeIndex):
    """AverageYearRange represents a period over a range of years. No extrapolation and represents full iso calendar years."""

    def __init__(self, start_year: int, num_years: int) -> None:
        """
        Initialize AverageYearRange with a year range. No extrapolation and represents full iso calendar years.

        Args:
            start_year (int): First year in the range.
            num_years (int): Number of years in the range.

        """
        start_time = datetime.fromisocalendar(start_year, 1, 1)
        end_time = datetime.fromisocalendar(start_year + num_years, 1, 1)
        period_duration = end_time - start_time
        super().__init__(
            start_time=start_time,
            period_duration=period_duration,
            is_52_week_years=False,
            extrapolate_first_point=False,
            extrapolate_last_point=False,
        )
__init__(start_year: int, num_years: int) -> None

Initialize AverageYearRange with a year range. No extrapolation and represents full iso calendar years.

Parameters:

Name Type Description Default
start_year int

First year in the range.

required
num_years int

Number of years in the range.

required
Source code in framcore/timeindexes/AverageYearRange.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
def __init__(self, start_year: int, num_years: int) -> None:
    """
    Initialize AverageYearRange with a year range. No extrapolation and represents full iso calendar years.

    Args:
        start_year (int): First year in the range.
        num_years (int): Number of years in the range.

    """
    start_time = datetime.fromisocalendar(start_year, 1, 1)
    end_time = datetime.fromisocalendar(start_year + num_years, 1, 1)
    period_duration = end_time - start_time
    super().__init__(
        start_time=start_time,
        period_duration=period_duration,
        is_52_week_years=False,
        extrapolate_first_point=False,
        extrapolate_last_point=False,
    )

ConstantTimeIndex

ConstantTimeIndex

Bases: SinglePeriodTimeIndex

ConstantTimeIndex that is constant over time. For use in ConstantTimeVector.

Represents a period of 52 weeks starting from the iso calendar week 1 of 1985. Extrapolates both first and last point.

Source code in framcore/timeindexes/ConstantTimeIndex.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
class ConstantTimeIndex(SinglePeriodTimeIndex):
    """
    ConstantTimeIndex that is constant over time. For use in ConstantTimeVector.

    Represents a period of 52 weeks starting from the iso calendar week 1 of 1985. Extrapolates both first and last point.

    """

    def __init__(self) -> None:
        """Initialize ConstantTimeIndex."""
        super().__init__(
            start_time=datetime.fromisocalendar(1985, 1, 1),
            period_duration=timedelta(weeks=52),
            is_52_week_years=True,
            extrapolate_first_point=True,
            extrapolate_last_point=True,
        )
__init__() -> None

Initialize ConstantTimeIndex.

Source code in framcore/timeindexes/ConstantTimeIndex.py
14
15
16
17
18
19
20
21
22
def __init__(self) -> None:
    """Initialize ConstantTimeIndex."""
    super().__init__(
        start_time=datetime.fromisocalendar(1985, 1, 1),
        period_duration=timedelta(weeks=52),
        is_52_week_years=True,
        extrapolate_first_point=True,
        extrapolate_last_point=True,
    )

DailyIndex

DailyIndex

Bases: ProfileTimeIndex

ProfileTimeIndex with one or more whole years with daily resolution. Either years with 52 weeks or full iso calendar years.

No extrapolation inherited from ProfileTimeIndex.

Source code in framcore/timeindexes/DailyIndex.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
class DailyIndex(ProfileTimeIndex):
    """
    ProfileTimeIndex with one or more whole years with daily resolution. Either years with 52 weeks or full iso calendar years.

    No extrapolation inherited from ProfileTimeIndex.
    """

    def __init__(
        self,
        start_year: int,
        num_years: int,
        is_52_week_years: bool = True,
    ) -> None:
        """
        Initialize DailyIndex over a number of years. Either years with 52 weeks or full iso calendar years.

        Args:
            start_year (int): First year in the index.
            num_years (int): Number of years in the index.
            is_52_week_years (bool, optional): Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

        """
        super().__init__(
            start_year=start_year,
            num_years=num_years,
            period_duration=timedelta(days=1),
            is_52_week_years=is_52_week_years,
        )
__init__(start_year: int, num_years: int, is_52_week_years: bool = True) -> None

Initialize DailyIndex over a number of years. Either years with 52 weeks or full iso calendar years.

Parameters:

Name Type Description Default
start_year int

First year in the index.

required
num_years int

Number of years in the index.

required
is_52_week_years bool

Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

True
Source code in framcore/timeindexes/DailyIndex.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def __init__(
    self,
    start_year: int,
    num_years: int,
    is_52_week_years: bool = True,
) -> None:
    """
    Initialize DailyIndex over a number of years. Either years with 52 weeks or full iso calendar years.

    Args:
        start_year (int): First year in the index.
        num_years (int): Number of years in the index.
        is_52_week_years (bool, optional): Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

    """
    super().__init__(
        start_year=start_year,
        num_years=num_years,
        period_duration=timedelta(days=1),
        is_52_week_years=is_52_week_years,
    )

FixedFrequencyTimeIndex

FixedFrequencyTimeIndex

Bases: TimeIndex

TimeIndex with fixed frequency.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
class FixedFrequencyTimeIndex(TimeIndex):
    """TimeIndex with fixed frequency."""

    def __init__(
        self,
        start_time: datetime,
        period_duration: timedelta,
        num_periods: int,
        is_52_week_years: bool,
        extrapolate_first_point: bool,
        extrapolate_last_point: bool,
    ) -> None:
        """
        Initialize a FixedFrequencyTimeIndex.

        Args:
            start_time (datetime): The starting datetime of the time index.
            period_duration (timedelta): The duration of each period.
            num_periods (int): The number of periods in the time index. Must be greater than 0.
            is_52_week_years (bool): Whether to use 52-week years.
            extrapolate_first_point (bool): Whether to allow extrapolation of the first point.
            extrapolate_last_point (bool): Whether to allow extrapolation of the last point.

        """
        if num_periods <= 0:
            msg = f"num_periods must be a positive integer. Got {num_periods}."
            raise ValueError(msg)
        if period_duration < timedelta(seconds=1):
            msg = f"period_duration must be at least one second. Got {period_duration}."
            raise ValueError(msg)
        if not period_duration.total_seconds().is_integer():
            msg = f"period_duration must be a whole number of seconds, got {period_duration.total_seconds()} s"
            raise ValueError(msg)
        if is_52_week_years and start_time.isocalendar().week == 53:  # noqa: PLR2004
            raise ValueError("Week of start_time must not be 53 when is_52_week_years is True.")
        self._check_type(num_periods, int)
        self._start_time = start_time
        self._period_duration = period_duration
        self._num_periods = num_periods
        self._is_52_week_years = is_52_week_years
        self._extrapolate_first_point = extrapolate_first_point
        self._extrapolate_last_point = extrapolate_last_point

    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Check if equal to other."""
        if not isinstance(other, FixedFrequencyTimeIndex):
            return False
        return (
            self._start_time == other._start_time
            and self._period_duration == other._period_duration
            and self._num_periods == other._num_periods
            and self._is_52_week_years == other._is_52_week_years
            and self._extrapolate_first_point == other._extrapolate_first_point
            and self._extrapolate_last_point == other._extrapolate_last_point
        )

    def __hash__(self) -> int:
        """Return the hash value for the FixedFrequencyTimeIndex."""
        return hash(
            (
                self._start_time,
                self._period_duration,
                self._num_periods,
                self._is_52_week_years,
                self._extrapolate_first_point,
                self._extrapolate_last_point,
            ),
        )

    def __repr__(self) -> str:
        """Return a string representation of the FixedFrequencyTimeIndex."""
        return (
            f"{type(self).__name__}("
            f"start_time={self._start_time}, "
            f"period_duration={self._period_duration}, "
            f"num_periods={self._num_periods}, "
            f"is_52_week_years={self._is_52_week_years}, "
            f"extrapolate_first_point={self._extrapolate_first_point}, "
            f"extrapolate_last_point={self._extrapolate_last_point})"
        )

    def get_fingerprint(self) -> Fingerprint:
        """Get the fingerprint."""
        return self.get_fingerprint_default()

    def get_timezone(self) -> tzinfo | None:
        """Get the timezone."""
        return self._start_time.tzinfo

    def get_start_time(self) -> datetime:
        """Get the start time."""
        return self._start_time

    def get_period_duration(self) -> timedelta:
        """Get the period duration."""
        return self._period_duration

    def get_num_periods(self) -> int:
        """Get the number of points."""
        return self._num_periods

    def is_constant(self) -> bool:
        """
        Return True if the time index is constant (single period and both extrapolation flags are True).

        Returns
        -------
        bool
            True if the time index is constant, False otherwise.

        """
        return self._num_periods == 1 and self._extrapolate_first_point == self._extrapolate_last_point is True

    def is_whole_years(self) -> bool:
        """
        Return True if index covers one or more full years.

        The start_time must be the first week and weekday of a year. For real ISO time,
        the stop_time must also be the first week and weekday of a year. For 52-week years,
        the total duration must be an integer number of 52-week years.
        """
        start_time = self.get_start_time()
        start_year, start_week, start_weekday = start_time.isocalendar()
        if not start_week == start_weekday == 1:
            return False

        if not self.is_52_week_years():
            period_duration = self.get_period_duration()
            num_periods = self.get_num_periods()
            stop_time = start_time + num_periods * period_duration
            stop_year, stop_week, stop_weekday = stop_time.isocalendar()
            if stop_year < start_year:
                msg = f"Stop year must be after start year. Current stop year: {stop_year} and start year: {start_year}"
                raise ValueError(msg)
            return stop_week == stop_weekday == 1

        period_duration = self.get_period_duration()
        num_periods = self.get_num_periods()
        seconds_52_week_year = 52 * 168 * 3600
        num_years = (period_duration * num_periods).total_seconds() / seconds_52_week_year
        return num_years.is_integer()

    def get_reference_period(self) -> ReferencePeriod | None:
        """Get the reference period (only if is_whole_years() is True)."""
        if self.is_whole_years():
            start_year = self.get_start_time().isocalendar().year
            if self._is_52_week_years:
                num_years = (self.get_num_periods() * self.get_period_duration()) // timedelta(weeks=52)
            else:
                stop_year = self.get_stop_time().isocalendar().year
                num_years = stop_year - start_year
            return ReferencePeriod(start_year=start_year, num_years=num_years)
        return None

    def is_52_week_years(self) -> bool:
        """Return True if 52-week years and False if real ISO time."""
        return self._is_52_week_years

    def is_one_year(self) -> bool:
        """Return True if exactly one whole year."""
        start_time = self.get_start_time()
        start_year, start_week, start_weekday = start_time.isocalendar()
        if not start_week == start_weekday == 1:
            return False

        if not self.is_52_week_years():
            period_duration = self.get_period_duration()
            num_periods = self.get_num_periods()
            stop_time = start_time + num_periods * period_duration
            stop_year, stop_week, stop_weekday = stop_time.isocalendar()
            if not stop_week == stop_weekday == 1:
                return False
            return start_year + 1 == stop_year

        period_duration = self.get_period_duration()
        num_periods = self.get_num_periods()
        seconds_52_week_year = 52 * 168 * 3600
        num_years = (period_duration * num_periods).total_seconds() / seconds_52_week_year
        return num_years == 1.0

    def extrapolate_first_point(self) -> bool:
        """Return True if first value can be extrapolated backwards to fill missing values."""
        return self._extrapolate_first_point

    def extrapolate_last_point(self) -> bool:
        """Return True if last value can be extrapolated forward to fill missing values."""
        return self._extrapolate_last_point

    def get_period_average(self, vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float:
        """Get the average over the period from the vector."""
        self._check_type(vector, np.ndarray)
        self._check_type(start_time, datetime)
        self._check_type(duration, timedelta)
        self._check_type(is_52_week_years, bool)

        if vector.shape != (self.get_num_periods(),):
            msg = f"Vector shape {vector.shape} does not match number of periods {self.get_num_periods()} of timeindex ({self})."
            raise ValueError(msg)
        target_timeindex = FixedFrequencyTimeIndex(
            start_time=start_time,
            period_duration=duration,
            num_periods=1,
            is_52_week_years=is_52_week_years,
            extrapolate_first_point=self.extrapolate_first_point(),
            extrapolate_last_point=self.extrapolate_last_point(),
        )
        target_vector = np.zeros(1, dtype=vector.dtype)
        self.write_into_fixed_frequency(
            target_vector=target_vector,
            target_timeindex=target_timeindex,
            input_vector=vector,
        )
        return target_vector[0]

    def write_into_fixed_frequency(
        self,
        target_vector: NDArray,
        target_timeindex: FixedFrequencyTimeIndex,
        input_vector: NDArray,
    ) -> None:
        """
        Write the given input_vector into the target_vector according to the target_timeindex, applying necessary transformations.

        Parameters
        ----------
        target_vector : NDArray
            The array where the input_vector will be written to, modified in place.
        target_timeindex : FixedFrequencyTimeIndex
            The time index defining the fixed frequency structure for writing the input_vector into the target_vector.
        input_vector : NDArray
            The array containing the data to be written into the target_vector.

        Notes
        -----
        - If the object is constant (as determined by `self.is_constant()`), the input_vector is expected to have a single value,
          which will be used to fill the entire target_vector.
        - Otherwise, the method delegates the operation to `_write_into_fixed_frequency_recursive` for handling more complex cases.

        """
        if self.is_constant():
            assert input_vector.size == 1
            target_vector.fill(input_vector[0])
        else:
            self._write_into_fixed_frequency_recursive(target_vector, target_timeindex, input_vector)

    def _write_into_fixed_frequency_recursive(  # noqa: C901
        self,
        target_vector: NDArray,
        target_timeindex: FixedFrequencyTimeIndex,
        input_vector: NDArray,
        _depth: int = 0,  # only for recursion depth tracking
    ) -> None:
        """
        Recursively write the input_vector into the target_vector according to the target_timeindex, applying necessary transformations.

        Parameters
        ----------
        target_vector : NDArray
            The array where the input_vector will be written to, modified in place.
        target_timeindex : FixedFrequencyTimeIndex
            The time index defining the fixed frequency structure for writing the input_vector into the target_vector.
        input_vector : NDArray
            The array containing the data to be written into the target_vector.

        """
        if _depth > 100:  # noqa: PLR2004
            raise RecursionError("Maximum recursion depth (100) exceeded in _write_into_fixed_frequency_recursive.")

        if self == target_timeindex:
            np.copyto(target_vector, input_vector)
            return

        transformed_timeindex = None

        # Check differences between self and target_timeindex and apply transformations recursively
        if not target_timeindex._is_compatible_resolution(self):
            transformed_timeindex, transformed_vector = self._transform_to_compatible_resolution(input_vector, target_timeindex)

        elif target_timeindex.is_52_week_years() and not self.is_52_week_years():
            transformed_timeindex, transformed_vector = self._convert_to_52_week_years(input_vector=input_vector)

        elif not target_timeindex.is_52_week_years() and self.is_52_week_years():
            transformed_timeindex, transformed_vector = self._convert_to_iso_time(input_vector=input_vector)

        elif not self._is_same_period(target_timeindex):
            if self.is_one_year():
                transformed_timeindex, transformed_vector = self._repeat_oneyear(input_vector, target_timeindex)
            else:
                transformed_timeindex, transformed_vector = self._adjust_period(input_vector, target_timeindex)

        elif not self.is_same_resolution(target_timeindex):
            if target_timeindex.get_period_duration() < self._period_duration:
                v_ops.disaggregate(
                    input_vector=input_vector,
                    output_vector=target_vector,
                    is_disaggfunc_repeat=True,
                )
            else:
                v_ops.aggregate(
                    input_vector=input_vector,
                    output_vector=target_vector,
                    is_aggfunc_sum=False,
                )

        # Recursively write the transformed vector into the target vector
        if transformed_timeindex is not None:
            transformed_timeindex._write_into_fixed_frequency_recursive(  # noqa: SLF001
                target_vector=target_vector,
                target_timeindex=target_timeindex,
                input_vector=transformed_vector,
                _depth=_depth + 1,
            )

    def _convert_to_iso_time(self, input_vector: NDArray) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        """
        Convert the input vector to ISO time format.

        Parameters
        ----------
        input_vector : NDArray
            The input vector to be transformed into ISO time format.

        Returns
        -------
        tuple[FixedFrequencyTimeIndex, NDArray]
            A tuple containing the transformed FixedFrequencyTimeIndex and the transformed input vector.

        """
        transformed_vector = v_ops.convert_to_isotime(input_vector=input_vector, startdate=self._start_time, period_duration=self._period_duration)

        transformed_timeindex = self.copy_with(
            start_time=self._start_time,
            num_periods=transformed_vector.size,
            is_52_week_years=False,
        )

        return transformed_timeindex, transformed_vector

    def _convert_to_52_week_years(self, input_vector: NDArray) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        """
        Convert the input vector to a 52-week year format.

        This method adjusts the start time of the source index (if needed) and transforms the input vector to match the 52-week year format.

        Parameters
        ----------
        input_vector : NDArray
            The input vector to be transformed.
        startdate : datetime
            The start date of the input vector.
        period_duration : timedelta
            The duration of each period in the input vector.

        Returns
        -------
            tuple[FixedFrequencyTimeIndex, NDArray]
                A tuple containing the transformed FixedFrequencyTimeIndex and the transformed input vector.

        """
        adjusted_start_time, transformed_vector = v_ops.convert_to_modeltime(
            input_vector=input_vector,
            startdate=self._start_time,
            period_duration=self._period_duration,
        )
        transformed_timeindex = self.copy_with(
            start_time=adjusted_start_time,
            num_periods=transformed_vector.size,
            is_52_week_years=True,
        )

        return transformed_timeindex, transformed_vector

    def _is_compatible_resolution(self, other: FixedFrequencyTimeIndex) -> bool:
        """Check if the period duration and start time are compatible with another FixedFrequencyTimeIndex."""
        return self._is_compatible_period(other) and self._is_compatible_starttime(other)

    def _is_compatible_period(self, other: FixedFrequencyTimeIndex) -> bool:
        modulus = self._period_duration.total_seconds() % other.get_period_duration().total_seconds()
        return modulus == 0

    def _is_compatible_starttime(self, other: FixedFrequencyTimeIndex) -> bool:
        delta = abs(self._start_time - other.get_start_time()).total_seconds()
        modulus = delta % other._period_duration.total_seconds()
        return modulus == 0

    def _transform_to_compatible_resolution(
        self,
        input_vector: NDArray,
        target_timeindex: FixedFrequencyTimeIndex,
    ) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        """
        Transform the input vector and source time index to match the target time index resolution.

        Parameters
        ----------
        input_vector : NDArray
            The input vector to be transformed.
        target_timeindex : FixedFrequencyTimeIndex
            The target time index to match the resolution of.

        Returns
        -------
        tuple[FixedFrequencyTimeIndex, NDArray]
            A tuple containing the transformed FixedFrequencyTimeIndex and the transformed input vector.

        """
        new_period_duration = timedelta(
            seconds=math.gcd(
                int(self._period_duration.total_seconds()),
                int(target_timeindex.get_period_duration().total_seconds()),
                int((self._start_time - target_timeindex.get_start_time()).total_seconds()),
            ),
        )

        transformed_timeindex = self.copy_with(
            period_duration=new_period_duration,
            num_periods=int(self._period_duration.total_seconds() // new_period_duration.total_seconds()) * self._num_periods,
        )

        transformed_vector = np.zeros(transformed_timeindex.get_num_periods(), dtype=input_vector.dtype)
        v_ops.disaggregate(
            input_vector=input_vector,
            output_vector=transformed_vector,
            is_disaggfunc_repeat=True,
        )

        return transformed_timeindex, transformed_vector

    def _is_same_period(self, other: FixedFrequencyTimeIndex) -> bool:
        """Check if the start and stop times are the same."""
        return self._start_time == other.get_start_time() and self.get_stop_time() == other.get_stop_time()

    def is_same_resolution(self, other: FixedFrequencyTimeIndex) -> bool:
        """Check if the period duration is the same."""
        return self._period_duration == other.get_period_duration()

    def get_stop_time(self) -> datetime:
        """Get the stop time of the TimeIndex."""
        if not self._is_52_week_years:
            return self._start_time + self._period_duration * self._num_periods

        return v_ops.calculate_52_week_years_stop_time(
            start_time=self._start_time,
            period_duration=self._period_duration,
            num_periods=self._num_periods,
        )

    def slice(
        self,
        input_vector: NDArray,
        start_year: int,
        num_years: int,
        target_start_year: int,
        target_num_years: int,
    ) -> NDArray:
        """Periodize the input vector to match the target timeindex."""
        if self._is_52_week_years:
            return v_ops.periodize_modeltime(input_vector, start_year, num_years, target_start_year, target_num_years)
        return v_ops.periodize_isotime(input_vector, start_year, num_years, target_start_year, target_num_years)

    def _slice_start(self, input_vector: NDArray, target_index: FixedFrequencyTimeIndex) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        """
        Slice the input vector to match the target time index.

        This method handles slicing the input vector to fit the target time index,
        ensuring that the start time aligns correctly.
        """
        num_periods_to_slice = self._periods_between(
            self._start_time,
            target_index.get_start_time(),
            self._period_duration,
            self._is_52_week_years,
        )
        transformed_timeindex = self.copy_with(
            start_time=target_index.get_start_time(),
            num_periods=self._num_periods - num_periods_to_slice,
        )
        transformed_vector = input_vector[num_periods_to_slice:]

        return transformed_timeindex, transformed_vector

    def _slice_end(self, input_vector: NDArray, target_index: FixedFrequencyTimeIndex) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        """
        Slice the input vector to match the target time index.

        This method handles slicing the input vector to fit the target time index,
        ensuring that the stop time aligns correctly.
        """
        num_periods_to_slice = self._periods_between(
            self.get_stop_time(),
            target_index.get_stop_time(),
            self._period_duration,
            self._is_52_week_years,
        )
        transformed_timeindex = self.copy_with(num_periods=self._num_periods - num_periods_to_slice)
        transformed_vector = input_vector[:-num_periods_to_slice]

        return transformed_timeindex, transformed_vector

    def total_duration(self) -> timedelta:
        """Get the duration of the TimeIndex."""
        return self._period_duration * self._num_periods

    def _extend_start(
        self,
        input_vector: NDArray,
        target_timeindex: FixedFrequencyTimeIndex,
    ) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        """
        Extend the start of the input vector to match the target time index.

        This method handles extrapolation of the first point if allowed.
        """
        if not self._extrapolate_first_point:
            raise ValueError("Cannot extend start without extrapolation.")

        num_periods_to_extend = self._periods_between(
            self._start_time,
            target_timeindex.get_start_time(),
            self._period_duration,
            self._is_52_week_years,
        )
        extended_vector = np.concatenate((np.full(num_periods_to_extend, input_vector[0]), input_vector))

        transformed_timeindex = self.copy_with(
            start_time=target_timeindex.get_start_time(),
            num_periods=self._num_periods + num_periods_to_extend,
        )

        return transformed_timeindex, extended_vector

    def _extend_end(
        self,
        input_vector: NDArray,
        target_timeindex: FixedFrequencyTimeIndex,
    ) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        if not self._extrapolate_last_point:
            raise ValueError("Cannot extend end without extrapolation.")

        num_periods_to_extend = self._periods_between(
            self.get_stop_time(),
            target_timeindex.get_stop_time(),
            self._period_duration,
            self._is_52_week_years,
        )
        extended_vector = np.concatenate((input_vector, np.full(num_periods_to_extend, input_vector[-1])))
        target_timeindex = self.copy_with(num_periods=self._num_periods + num_periods_to_extend)

        return target_timeindex, extended_vector

    def _repeat_oneyear(self, input_vector: NDArray, target_timeindex: FixedFrequencyTimeIndex) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        """
        Repeat the one-year time index.

        This method creates a new time vector by repeating the input vector over the time period defined by the target time index.

        Parameters
        ----------
        input_vector : NDArray
            The input vector to be repeated.
        target_timeindex : FixedFrequencyTimeIndex
            The target time index defining the start and duration of the target period.

        Returns
        -------
        tuple[FixedFrequencyTimeIndex, NDArray]
            A tuple containing the new FixedFrequencyTimeIndex and the transformed input vector.

        """
        if self.is_52_week_years():
            transformed_vector = self._repeat_one_year_modeltime(
                input_vector=input_vector,
                target_timeindex=target_timeindex,
            )
        else:
            transformed_vector = self._repeat_one_year_isotime(
                input_vector=input_vector,
                target_timeindex=target_timeindex,
            )
        transformed_timeindex = self.copy_with(
            start_time=target_timeindex.get_start_time(),
            num_periods=transformed_vector.size,
        )

        return transformed_timeindex, transformed_vector

    def _repeat_one_year_isotime(self, input_vector: NDArray, target_timeindex: FixedFrequencyTimeIndex) -> NDArray:
        """
        Repeat the one-year ISO time index.

        This method creates a new time vector by repeating the input vector over the time period defined by the target time index.

        Parameters
        ----------
        input_vector : NDArray
            The input vector to be repeated.
        target_timeindex : FixedFrequencyTimeIndex
            The target time index defining the start and stop times for the repetition.

        Returns
        -------
        NDArray
            The repeated vector that matches the target time index.

        """
        return v_ops.repeat_oneyear_isotime(
            input_vector=input_vector,
            input_start_date=self._start_time,
            period_duration=self.get_period_duration(),
            output_start_date=target_timeindex.get_start_time(),
            output_end_date=target_timeindex.get_stop_time(),
        )

    def _repeat_one_year_modeltime(self, input_vector: NDArray, target_timeindex: FixedFrequencyTimeIndex) -> NDArray:
        """
        Repeat the one-year model time index.

        This method creates a new time vector by repeating the input vector over the time period defined by the target time index.

        Parameters
        ----------
        input_vector : NDArray
            The input vector to be repeated.
        target_timeindex : FixedFrequencyTimeIndex
            The target time index defining the start and stop times for the repetition.

        Returns
        -------
        NDArray
            The repeated vector that matches the target time index.

        """
        return v_ops.repeat_oneyear_modeltime(
            input_vector=input_vector,
            input_start_date=self._start_time,
            period_duration=self.get_period_duration(),
            output_start_date=target_timeindex.get_start_time(),
            output_end_date=target_timeindex.get_stop_time(),
        )

    def _adjust_period(self, input_vector: NDArray, target_timeindex: FixedFrequencyTimeIndex) -> tuple[FixedFrequencyTimeIndex, NDArray]:
        if target_timeindex.get_start_time() < self._start_time:
            if self._extrapolate_first_point:
                return self._extend_start(input_vector, target_timeindex)
            msg = (
                "Cannot write into fixed frequency: incompatible time indices. "
                "Start time of the target index is before the start time of the source index "
                "and extrapolate_first_point is False.\n"
                f"Input timeindex: {self}\n"
                f"Target timeindex: {target_timeindex}"
            )
            raise ValueError(msg)
        if target_timeindex.get_stop_time() > self.get_stop_time():
            if self._extrapolate_last_point:
                return self._extend_end(input_vector, target_timeindex)
            msg = (
                "Cannot write into fixed frequency: incompatible time indices. "
                "'stop_time' of the target index is after the 'stop_time' of the source index "
                "and 'extrapolate_last_point' is False.\n"
                f"Input timeindex: {self}\n"
                f"Target timeindex: {target_timeindex}"
            )
            raise ValueError(msg)
        if target_timeindex.get_start_time() > self.get_start_time():
            return self._slice_start(input_vector, target_timeindex)

        if target_timeindex.get_stop_time() < self.get_stop_time():
            return self._slice_end(input_vector, target_timeindex)
        return target_timeindex, input_vector

    def _periods_between(self, first_time: datetime, second_time: datetime, period_duration: timedelta, is_52_week_years: bool) -> int:
        """
        Calculate the number of periods between two times.

        Parameters
        ----------
        first_time : datetime
            The first time point.
        second_time : datetime
            The second time point.
        period_duration : timedelta
            The duration of each period.
        is_52_week_years : bool
            Whether to use 52-week years.

        Returns
        -------
        int
            The number of periods between the two times.

        """
        start = min(first_time, second_time)
        end = max(first_time, second_time)
        total_period = end - start

        if is_52_week_years:
            weeks_53 = v_ops._find_all_week_53_periods(start, end)  # noqa: SLF001
            total_period -= timedelta(weeks=len(weeks_53))

        return abs(total_period) // period_duration

    def copy_with(
        self,
        start_time: datetime | None = None,
        period_duration: timedelta | None = None,
        num_periods: int | None = None,
        is_52_week_years: bool | None = None,
        extrapolate_first_point: bool | None = None,
        extrapolate_last_point: bool | None = None,
    ) -> FixedFrequencyTimeIndex:
        """
        Create a copy of the FixedFrequencyTimeIndex with the same attributes, allowing specific fields to be overridden.

        Parameters
        ----------
        start_time : datetime, optional
            Override for the start time.
        period_duration : timedelta, optional
            Override for the period duration.
        num_periods : int, optional
            Override for the number of periods.
        is_52_week_years : bool, optional
            Override for 52-week years flag.
        extrapolate_first_point : bool, optional
            Override for extrapolate first point flag.
        extrapolate_last_point : bool, optional
            Override for extrapolate last point flag.

        Returns
        -------
        FixedFrequencyTimeIndex
            A new instance with the updated attributes.

        """
        return FixedFrequencyTimeIndex(
            start_time=start_time if start_time is not None else self._start_time,
            period_duration=period_duration if period_duration is not None else self._period_duration,
            num_periods=num_periods if num_periods is not None else self._num_periods,
            is_52_week_years=is_52_week_years if is_52_week_years is not None else self._is_52_week_years,
            extrapolate_first_point=extrapolate_first_point if extrapolate_first_point is not None else self._extrapolate_first_point,
            extrapolate_last_point=extrapolate_last_point if extrapolate_last_point is not None else self._extrapolate_last_point,
        )

    def copy_as_reference_period(self, reference_period: ReferencePeriod) -> FixedFrequencyTimeIndex:
        """
        Create a copy of the FixedFrequencyTimeIndex with one period matching the given reference period.

        Parameters
        ----------
        reference_period : ReferencePeriod
            The reference period to match for the output.

        Returns
        -------
        FixedFrequencyTimeIndex
            A new instance with the updated attributes.

        """
        if reference_period is None:
            raise ValueError("Cannot copy as reference period when provided reference_period is None.")

        start_year = reference_period.get_start_year()
        num_years = reference_period.get_num_years()
        start_time = datetime.fromisocalendar(start_year, 1, 1)

        if self.is_52_week_years():
            period_duration = timedelta(weeks=52 * num_years)
        else:
            stop_time = datetime.fromisocalendar(start_year + num_years, 1, 1)
            period_duration = stop_time - start_time
        return self.copy_with(
            start_time=start_time,
            num_periods=1,
            period_duration=period_duration,
        )

    def get_datetime_list(self) -> list[datetime]:
        """
        Return list of datetime including stop time.

        Note: When `is_52_week_years` is True, the returned list will skip any datetimes that fall in week 53.
        """
        start_time = self.get_start_time()
        num_periods = self.get_num_periods()
        period_duration = self.get_period_duration()

        if not self._is_52_week_years:
            return [start_time + i * period_duration for i in range(num_periods + 1)]

        datetime_list = []
        i = 0
        count = 0
        while count <= num_periods:
            current = start_time + i * period_duration
            if current.isocalendar().week != 53:  # noqa: PLR2004
                datetime_list.append(current)
                count += 1
            i += 1

        return datetime_list
__eq__(other) -> bool

Check if equal to other.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
58
59
60
61
62
63
64
65
66
67
68
69
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Check if equal to other."""
    if not isinstance(other, FixedFrequencyTimeIndex):
        return False
    return (
        self._start_time == other._start_time
        and self._period_duration == other._period_duration
        and self._num_periods == other._num_periods
        and self._is_52_week_years == other._is_52_week_years
        and self._extrapolate_first_point == other._extrapolate_first_point
        and self._extrapolate_last_point == other._extrapolate_last_point
    )
__hash__() -> int

Return the hash value for the FixedFrequencyTimeIndex.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
71
72
73
74
75
76
77
78
79
80
81
82
def __hash__(self) -> int:
    """Return the hash value for the FixedFrequencyTimeIndex."""
    return hash(
        (
            self._start_time,
            self._period_duration,
            self._num_periods,
            self._is_52_week_years,
            self._extrapolate_first_point,
            self._extrapolate_last_point,
        ),
    )
__init__(start_time: datetime, period_duration: timedelta, num_periods: int, is_52_week_years: bool, extrapolate_first_point: bool, extrapolate_last_point: bool) -> None

Initialize a FixedFrequencyTimeIndex.

Parameters:

Name Type Description Default
start_time datetime

The starting datetime of the time index.

required
period_duration timedelta

The duration of each period.

required
num_periods int

The number of periods in the time index. Must be greater than 0.

required
is_52_week_years bool

Whether to use 52-week years.

required
extrapolate_first_point bool

Whether to allow extrapolation of the first point.

required
extrapolate_last_point bool

Whether to allow extrapolation of the last point.

required
Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
def __init__(
    self,
    start_time: datetime,
    period_duration: timedelta,
    num_periods: int,
    is_52_week_years: bool,
    extrapolate_first_point: bool,
    extrapolate_last_point: bool,
) -> None:
    """
    Initialize a FixedFrequencyTimeIndex.

    Args:
        start_time (datetime): The starting datetime of the time index.
        period_duration (timedelta): The duration of each period.
        num_periods (int): The number of periods in the time index. Must be greater than 0.
        is_52_week_years (bool): Whether to use 52-week years.
        extrapolate_first_point (bool): Whether to allow extrapolation of the first point.
        extrapolate_last_point (bool): Whether to allow extrapolation of the last point.

    """
    if num_periods <= 0:
        msg = f"num_periods must be a positive integer. Got {num_periods}."
        raise ValueError(msg)
    if period_duration < timedelta(seconds=1):
        msg = f"period_duration must be at least one second. Got {period_duration}."
        raise ValueError(msg)
    if not period_duration.total_seconds().is_integer():
        msg = f"period_duration must be a whole number of seconds, got {period_duration.total_seconds()} s"
        raise ValueError(msg)
    if is_52_week_years and start_time.isocalendar().week == 53:  # noqa: PLR2004
        raise ValueError("Week of start_time must not be 53 when is_52_week_years is True.")
    self._check_type(num_periods, int)
    self._start_time = start_time
    self._period_duration = period_duration
    self._num_periods = num_periods
    self._is_52_week_years = is_52_week_years
    self._extrapolate_first_point = extrapolate_first_point
    self._extrapolate_last_point = extrapolate_last_point
__repr__() -> str

Return a string representation of the FixedFrequencyTimeIndex.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
84
85
86
87
88
89
90
91
92
93
94
def __repr__(self) -> str:
    """Return a string representation of the FixedFrequencyTimeIndex."""
    return (
        f"{type(self).__name__}("
        f"start_time={self._start_time}, "
        f"period_duration={self._period_duration}, "
        f"num_periods={self._num_periods}, "
        f"is_52_week_years={self._is_52_week_years}, "
        f"extrapolate_first_point={self._extrapolate_first_point}, "
        f"extrapolate_last_point={self._extrapolate_last_point})"
    )
copy_as_reference_period(reference_period: ReferencePeriod) -> FixedFrequencyTimeIndex

Create a copy of the FixedFrequencyTimeIndex with one period matching the given reference period.

Parameters

reference_period : ReferencePeriod The reference period to match for the output.

Returns

FixedFrequencyTimeIndex A new instance with the updated attributes.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
def copy_as_reference_period(self, reference_period: ReferencePeriod) -> FixedFrequencyTimeIndex:
    """
    Create a copy of the FixedFrequencyTimeIndex with one period matching the given reference period.

    Parameters
    ----------
    reference_period : ReferencePeriod
        The reference period to match for the output.

    Returns
    -------
    FixedFrequencyTimeIndex
        A new instance with the updated attributes.

    """
    if reference_period is None:
        raise ValueError("Cannot copy as reference period when provided reference_period is None.")

    start_year = reference_period.get_start_year()
    num_years = reference_period.get_num_years()
    start_time = datetime.fromisocalendar(start_year, 1, 1)

    if self.is_52_week_years():
        period_duration = timedelta(weeks=52 * num_years)
    else:
        stop_time = datetime.fromisocalendar(start_year + num_years, 1, 1)
        period_duration = stop_time - start_time
    return self.copy_with(
        start_time=start_time,
        num_periods=1,
        period_duration=period_duration,
    )
copy_with(start_time: datetime | None = None, period_duration: timedelta | None = None, num_periods: int | None = None, is_52_week_years: bool | None = None, extrapolate_first_point: bool | None = None, extrapolate_last_point: bool | None = None) -> FixedFrequencyTimeIndex

Create a copy of the FixedFrequencyTimeIndex with the same attributes, allowing specific fields to be overridden.

Parameters

start_time : datetime, optional Override for the start time. period_duration : timedelta, optional Override for the period duration. num_periods : int, optional Override for the number of periods. is_52_week_years : bool, optional Override for 52-week years flag. extrapolate_first_point : bool, optional Override for extrapolate first point flag. extrapolate_last_point : bool, optional Override for extrapolate last point flag.

Returns

FixedFrequencyTimeIndex A new instance with the updated attributes.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
def copy_with(
    self,
    start_time: datetime | None = None,
    period_duration: timedelta | None = None,
    num_periods: int | None = None,
    is_52_week_years: bool | None = None,
    extrapolate_first_point: bool | None = None,
    extrapolate_last_point: bool | None = None,
) -> FixedFrequencyTimeIndex:
    """
    Create a copy of the FixedFrequencyTimeIndex with the same attributes, allowing specific fields to be overridden.

    Parameters
    ----------
    start_time : datetime, optional
        Override for the start time.
    period_duration : timedelta, optional
        Override for the period duration.
    num_periods : int, optional
        Override for the number of periods.
    is_52_week_years : bool, optional
        Override for 52-week years flag.
    extrapolate_first_point : bool, optional
        Override for extrapolate first point flag.
    extrapolate_last_point : bool, optional
        Override for extrapolate last point flag.

    Returns
    -------
    FixedFrequencyTimeIndex
        A new instance with the updated attributes.

    """
    return FixedFrequencyTimeIndex(
        start_time=start_time if start_time is not None else self._start_time,
        period_duration=period_duration if period_duration is not None else self._period_duration,
        num_periods=num_periods if num_periods is not None else self._num_periods,
        is_52_week_years=is_52_week_years if is_52_week_years is not None else self._is_52_week_years,
        extrapolate_first_point=extrapolate_first_point if extrapolate_first_point is not None else self._extrapolate_first_point,
        extrapolate_last_point=extrapolate_last_point if extrapolate_last_point is not None else self._extrapolate_last_point,
    )
extrapolate_first_point() -> bool

Return True if first value can be extrapolated backwards to fill missing values.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
195
196
197
def extrapolate_first_point(self) -> bool:
    """Return True if first value can be extrapolated backwards to fill missing values."""
    return self._extrapolate_first_point
extrapolate_last_point() -> bool

Return True if last value can be extrapolated forward to fill missing values.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
199
200
201
def extrapolate_last_point(self) -> bool:
    """Return True if last value can be extrapolated forward to fill missing values."""
    return self._extrapolate_last_point
get_datetime_list() -> list[datetime]

Return list of datetime including stop time.

Note: When is_52_week_years is True, the returned list will skip any datetimes that fall in week 53.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
def get_datetime_list(self) -> list[datetime]:
    """
    Return list of datetime including stop time.

    Note: When `is_52_week_years` is True, the returned list will skip any datetimes that fall in week 53.
    """
    start_time = self.get_start_time()
    num_periods = self.get_num_periods()
    period_duration = self.get_period_duration()

    if not self._is_52_week_years:
        return [start_time + i * period_duration for i in range(num_periods + 1)]

    datetime_list = []
    i = 0
    count = 0
    while count <= num_periods:
        current = start_time + i * period_duration
        if current.isocalendar().week != 53:  # noqa: PLR2004
            datetime_list.append(current)
            count += 1
        i += 1

    return datetime_list
get_fingerprint() -> Fingerprint

Get the fingerprint.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
96
97
98
def get_fingerprint(self) -> Fingerprint:
    """Get the fingerprint."""
    return self.get_fingerprint_default()
get_num_periods() -> int

Get the number of points.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
112
113
114
def get_num_periods(self) -> int:
    """Get the number of points."""
    return self._num_periods
get_period_average(vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float

Get the average over the period from the vector.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
def get_period_average(self, vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float:
    """Get the average over the period from the vector."""
    self._check_type(vector, np.ndarray)
    self._check_type(start_time, datetime)
    self._check_type(duration, timedelta)
    self._check_type(is_52_week_years, bool)

    if vector.shape != (self.get_num_periods(),):
        msg = f"Vector shape {vector.shape} does not match number of periods {self.get_num_periods()} of timeindex ({self})."
        raise ValueError(msg)
    target_timeindex = FixedFrequencyTimeIndex(
        start_time=start_time,
        period_duration=duration,
        num_periods=1,
        is_52_week_years=is_52_week_years,
        extrapolate_first_point=self.extrapolate_first_point(),
        extrapolate_last_point=self.extrapolate_last_point(),
    )
    target_vector = np.zeros(1, dtype=vector.dtype)
    self.write_into_fixed_frequency(
        target_vector=target_vector,
        target_timeindex=target_timeindex,
        input_vector=vector,
    )
    return target_vector[0]
get_period_duration() -> timedelta

Get the period duration.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
108
109
110
def get_period_duration(self) -> timedelta:
    """Get the period duration."""
    return self._period_duration
get_reference_period() -> ReferencePeriod | None

Get the reference period (only if is_whole_years() is True).

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
157
158
159
160
161
162
163
164
165
166
167
def get_reference_period(self) -> ReferencePeriod | None:
    """Get the reference period (only if is_whole_years() is True)."""
    if self.is_whole_years():
        start_year = self.get_start_time().isocalendar().year
        if self._is_52_week_years:
            num_years = (self.get_num_periods() * self.get_period_duration()) // timedelta(weeks=52)
        else:
            stop_year = self.get_stop_time().isocalendar().year
            num_years = stop_year - start_year
        return ReferencePeriod(start_year=start_year, num_years=num_years)
    return None
get_start_time() -> datetime

Get the start time.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
104
105
106
def get_start_time(self) -> datetime:
    """Get the start time."""
    return self._start_time
get_stop_time() -> datetime

Get the stop time of the TimeIndex.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
451
452
453
454
455
456
457
458
459
460
def get_stop_time(self) -> datetime:
    """Get the stop time of the TimeIndex."""
    if not self._is_52_week_years:
        return self._start_time + self._period_duration * self._num_periods

    return v_ops.calculate_52_week_years_stop_time(
        start_time=self._start_time,
        period_duration=self._period_duration,
        num_periods=self._num_periods,
    )
get_timezone() -> tzinfo | None

Get the timezone.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
100
101
102
def get_timezone(self) -> tzinfo | None:
    """Get the timezone."""
    return self._start_time.tzinfo
is_52_week_years() -> bool

Return True if 52-week years and False if real ISO time.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
169
170
171
def is_52_week_years(self) -> bool:
    """Return True if 52-week years and False if real ISO time."""
    return self._is_52_week_years
is_constant() -> bool

Return True if the time index is constant (single period and both extrapolation flags are True).

Returns

bool True if the time index is constant, False otherwise.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
116
117
118
119
120
121
122
123
124
125
126
def is_constant(self) -> bool:
    """
    Return True if the time index is constant (single period and both extrapolation flags are True).

    Returns
    -------
    bool
        True if the time index is constant, False otherwise.

    """
    return self._num_periods == 1 and self._extrapolate_first_point == self._extrapolate_last_point is True
is_one_year() -> bool

Return True if exactly one whole year.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
def is_one_year(self) -> bool:
    """Return True if exactly one whole year."""
    start_time = self.get_start_time()
    start_year, start_week, start_weekday = start_time.isocalendar()
    if not start_week == start_weekday == 1:
        return False

    if not self.is_52_week_years():
        period_duration = self.get_period_duration()
        num_periods = self.get_num_periods()
        stop_time = start_time + num_periods * period_duration
        stop_year, stop_week, stop_weekday = stop_time.isocalendar()
        if not stop_week == stop_weekday == 1:
            return False
        return start_year + 1 == stop_year

    period_duration = self.get_period_duration()
    num_periods = self.get_num_periods()
    seconds_52_week_year = 52 * 168 * 3600
    num_years = (period_duration * num_periods).total_seconds() / seconds_52_week_year
    return num_years == 1.0
is_same_resolution(other: FixedFrequencyTimeIndex) -> bool

Check if the period duration is the same.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
447
448
449
def is_same_resolution(self, other: FixedFrequencyTimeIndex) -> bool:
    """Check if the period duration is the same."""
    return self._period_duration == other.get_period_duration()
is_whole_years() -> bool

Return True if index covers one or more full years.

The start_time must be the first week and weekday of a year. For real ISO time, the stop_time must also be the first week and weekday of a year. For 52-week years, the total duration must be an integer number of 52-week years.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
def is_whole_years(self) -> bool:
    """
    Return True if index covers one or more full years.

    The start_time must be the first week and weekday of a year. For real ISO time,
    the stop_time must also be the first week and weekday of a year. For 52-week years,
    the total duration must be an integer number of 52-week years.
    """
    start_time = self.get_start_time()
    start_year, start_week, start_weekday = start_time.isocalendar()
    if not start_week == start_weekday == 1:
        return False

    if not self.is_52_week_years():
        period_duration = self.get_period_duration()
        num_periods = self.get_num_periods()
        stop_time = start_time + num_periods * period_duration
        stop_year, stop_week, stop_weekday = stop_time.isocalendar()
        if stop_year < start_year:
            msg = f"Stop year must be after start year. Current stop year: {stop_year} and start year: {start_year}"
            raise ValueError(msg)
        return stop_week == stop_weekday == 1

    period_duration = self.get_period_duration()
    num_periods = self.get_num_periods()
    seconds_52_week_year = 52 * 168 * 3600
    num_years = (period_duration * num_periods).total_seconds() / seconds_52_week_year
    return num_years.is_integer()
slice(input_vector: NDArray, start_year: int, num_years: int, target_start_year: int, target_num_years: int) -> NDArray

Periodize the input vector to match the target timeindex.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
462
463
464
465
466
467
468
469
470
471
472
473
def slice(
    self,
    input_vector: NDArray,
    start_year: int,
    num_years: int,
    target_start_year: int,
    target_num_years: int,
) -> NDArray:
    """Periodize the input vector to match the target timeindex."""
    if self._is_52_week_years:
        return v_ops.periodize_modeltime(input_vector, start_year, num_years, target_start_year, target_num_years)
    return v_ops.periodize_isotime(input_vector, start_year, num_years, target_start_year, target_num_years)
total_duration() -> timedelta

Get the duration of the TimeIndex.

Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
514
515
516
def total_duration(self) -> timedelta:
    """Get the duration of the TimeIndex."""
    return self._period_duration * self._num_periods
write_into_fixed_frequency(target_vector: NDArray, target_timeindex: FixedFrequencyTimeIndex, input_vector: NDArray) -> None

Write the given input_vector into the target_vector according to the target_timeindex, applying necessary transformations.

Parameters

target_vector : NDArray The array where the input_vector will be written to, modified in place. target_timeindex : FixedFrequencyTimeIndex The time index defining the fixed frequency structure for writing the input_vector into the target_vector. input_vector : NDArray The array containing the data to be written into the target_vector.

Notes
  • If the object is constant (as determined by self.is_constant()), the input_vector is expected to have a single value, which will be used to fill the entire target_vector.
  • Otherwise, the method delegates the operation to _write_into_fixed_frequency_recursive for handling more complex cases.
Source code in framcore/timeindexes/FixedFrequencyTimeIndex.py
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
def write_into_fixed_frequency(
    self,
    target_vector: NDArray,
    target_timeindex: FixedFrequencyTimeIndex,
    input_vector: NDArray,
) -> None:
    """
    Write the given input_vector into the target_vector according to the target_timeindex, applying necessary transformations.

    Parameters
    ----------
    target_vector : NDArray
        The array where the input_vector will be written to, modified in place.
    target_timeindex : FixedFrequencyTimeIndex
        The time index defining the fixed frequency structure for writing the input_vector into the target_vector.
    input_vector : NDArray
        The array containing the data to be written into the target_vector.

    Notes
    -----
    - If the object is constant (as determined by `self.is_constant()`), the input_vector is expected to have a single value,
      which will be used to fill the entire target_vector.
    - Otherwise, the method delegates the operation to `_write_into_fixed_frequency_recursive` for handling more complex cases.

    """
    if self.is_constant():
        assert input_vector.size == 1
        target_vector.fill(input_vector[0])
    else:
        self._write_into_fixed_frequency_recursive(target_vector, target_timeindex, input_vector)

HourlyIndex

HourlyIndex

Bases: ProfileTimeIndex

ProfileTimeIndex with one or more whole years with hourly resolution. Either years with 52 weeks or full iso calendar years.

No extrapolation inherited from ProfileTimeIndex.

Source code in framcore/timeindexes/HourlyIndex.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
class HourlyIndex(ProfileTimeIndex):
    """
    ProfileTimeIndex with one or more whole years with hourly resolution. Either years with 52 weeks or full iso calendar years.

    No extrapolation inherited from ProfileTimeIndex.
    """

    def __init__(
        self,
        start_year: int,
        num_years: int,
        is_52_week_years: bool = True,
    ) -> None:
        """
        Initialize HourlyIndex over a number of years. Either years with 52 weeks or full iso calendar years.

        Args:
            start_year (int): First year in the index.
            num_years (int): Number of years in the index.
            is_52_week_years (bool, optional): Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

        """
        super().__init__(
            start_year=start_year,
            num_years=num_years,
            period_duration=timedelta(hours=1),
            is_52_week_years=is_52_week_years,
        )
__init__(start_year: int, num_years: int, is_52_week_years: bool = True) -> None

Initialize HourlyIndex over a number of years. Either years with 52 weeks or full iso calendar years.

Parameters:

Name Type Description Default
start_year int

First year in the index.

required
num_years int

Number of years in the index.

required
is_52_week_years bool

Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

True
Source code in framcore/timeindexes/HourlyIndex.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def __init__(
    self,
    start_year: int,
    num_years: int,
    is_52_week_years: bool = True,
) -> None:
    """
    Initialize HourlyIndex over a number of years. Either years with 52 weeks or full iso calendar years.

    Args:
        start_year (int): First year in the index.
        num_years (int): Number of years in the index.
        is_52_week_years (bool, optional): Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

    """
    super().__init__(
        start_year=start_year,
        num_years=num_years,
        period_duration=timedelta(hours=1),
        is_52_week_years=is_52_week_years,
    )

IsoCalendarDay

IsoCalendarDay

Bases: SinglePeriodTimeIndex

Represents a single ISO calendar day using year, week, and day values.

Inherits from SinglePeriodTimeIndex and provides a time index for one day, constructed from datetime.fromisocalendar(year, week, day).

Source code in framcore/timeindexes/IsoCalendarDay.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
class IsoCalendarDay(SinglePeriodTimeIndex):
    """
    Represents a single ISO calendar day using year, week, and day values.

    Inherits from SinglePeriodTimeIndex and provides a time index for one day,
    constructed from datetime.fromisocalendar(year, week, day).

    """

    def __init__(self, year: int, week: int, day: int) -> None:
        """
        IsoCalendarDay represent a day from datetime.fromisocalendar(year, week, day).

        No extrapolation and is_52_week_years=False. Useful for testing.

        Args:
            year (int): The ISO year.
            week (int): The ISO week number (1-53).
            day (int): The ISO weekday (1=Monday, 7=Sunday).

        """
        super().__init__(
            start_time=datetime.fromisocalendar(year, week, day),
            period_duration=timedelta(days=1),
            is_52_week_years=False,
            extrapolate_first_point=False,
            extrapolate_last_point=False,
        )
__init__(year: int, week: int, day: int) -> None

IsoCalendarDay represent a day from datetime.fromisocalendar(year, week, day).

No extrapolation and is_52_week_years=False. Useful for testing.

Parameters:

Name Type Description Default
year int

The ISO year.

required
week int

The ISO week number (1-53).

required
day int

The ISO weekday (1=Monday, 7=Sunday).

required
Source code in framcore/timeindexes/IsoCalendarDay.py
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def __init__(self, year: int, week: int, day: int) -> None:
    """
    IsoCalendarDay represent a day from datetime.fromisocalendar(year, week, day).

    No extrapolation and is_52_week_years=False. Useful for testing.

    Args:
        year (int): The ISO year.
        week (int): The ISO week number (1-53).
        day (int): The ISO weekday (1=Monday, 7=Sunday).

    """
    super().__init__(
        start_time=datetime.fromisocalendar(year, week, day),
        period_duration=timedelta(days=1),
        is_52_week_years=False,
        extrapolate_first_point=False,
        extrapolate_last_point=False,
    )

ListTimeIndex

ListTimeIndex

Bases: TimeIndex

ListTimeIndex class for TimeIndexes with a list of timestamps. Subclass of TimeIndex.

This TimeIndex is defined by a list of timestamps, with possible irregular intervals.The last timestamp is not necessarily the end of the time vector, and the first timestamp is not necessarily the start of the time vector if extrapolation is enabled.

ListTimeIndex is not recommended for large time vectors, as it is less efficient.

Source code in framcore/timeindexes/ListTimeIndex.py
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
class ListTimeIndex(TimeIndex):
    """
    ListTimeIndex class for TimeIndexes with a list of timestamps. Subclass of TimeIndex.

    This TimeIndex is defined by a list of timestamps, with possible irregular intervals.The last timestamp is not
    necessarily the end of the time vector, and the first timestamp is not necessarily the start of the time vector
    if extrapolation is enabled.

    ListTimeIndex is not recommended for large time vectors, as it is less efficient.
    """

    def __init__(
        self,
        datetime_list: list[datetime],
        is_52_week_years: bool,
        extrapolate_first_point: bool,
        extrapolate_last_point: bool,
    ) -> None:
        """
        Initialize the ListTimeIndex class.

        Args:
            datetime_list (list[datetime]): List of datetime objects defining the time index. Must be ordered and contain more than one element.
            is_52_week_years (bool): Whether to use 52-week years. If False, full iso calendar years are used.
            extrapolate_first_point (bool): Whether to extrapolate the first point.
            extrapolate_last_point (bool): Whether to extrapolate the last point.

        Raises:
            ValueError: If datetime_list has less than two elements or is not ordered.

        """
        dts = datetime_list
        if len(dts) <= 1:
            msg = f"datetime_list must contain more than one element. Got {datetime_list}"
            raise ValueError(msg)
        if not all(dts[i] < dts[i + 1] for i in range(len(dts) - 1)):
            msg = f"All elements of datetime_list must be smaller/lower than the succeeding element. Dates must be ordered. Got {datetime_list}."
            raise ValueError(msg)
        if len(set(dt.tzinfo for dt in dts if dt is not None)) > 1:
            msg = f"Datetime objects in datetime_list have differing time zone information: {set(dt.tzinfo for dt in dts if dt is not None)}"
            raise ValueError(msg)
        if is_52_week_years and any(dts[i].isocalendar().week == 53 for i in range(len(dts))):  # noqa: PLR2004
            msg = "When is_52_week_years is True, datetime_list should not contain week 53 datetimes."
            raise ValueError(msg)

        self._datetime_list = datetime_list
        self._is_52_week_years = is_52_week_years
        self._extrapolate_first_point = extrapolate_first_point
        self._extrapolate_last_point = extrapolate_last_point

    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Check if two ListTimeIndexes are equal."""
        if not isinstance(other, type(self)):
            return False
        return (
            self._datetime_list == other._datetime_list
            and self._extrapolate_first_point == other._extrapolate_first_point
            and self._extrapolate_last_point == other._extrapolate_last_point
        )

    def __hash__(self) -> int:
        """Return the hash of the ListTimeIndex."""
        return hash(
            (
                tuple(self._datetime_list),
                self._extrapolate_first_point,
                self._extrapolate_last_point,
            ),
        )

    def __repr__(self) -> str:
        """Return the string representation of the ListTimeIndex."""
        return (
            "ListTimeIndex("
            f"datetimelist={self._datetime_list}, "
            f"extrapolate_first_point={self._extrapolate_first_point}, "
            f"extrapolate_last_point={self._extrapolate_last_point})"
        )

    def get_fingerprint(self) -> Fingerprint:
        """Get the fingerprint of the ListTimeIndex."""
        fingerprint = Fingerprint()
        fingerprint.add("datetime_list", self._datetime_list)
        fingerprint.add("is_52_week_years", self._is_52_week_years)
        fingerprint.add("extrapolate_first_point", self._extrapolate_first_point)
        fingerprint.add("extrapolate_last_point", self._extrapolate_last_point)
        return fingerprint

    def get_datetime_list(self) -> list[datetime]:
        """Get a list of all periods (num_periods + 1 datetimes)."""
        return self._datetime_list.copy()

    def get_timezone(self) -> tzinfo | None:
        """Get the timezone of the TimeIndex."""
        return self._datetime_list[0].tzinfo

    def get_num_periods(self) -> int:
        """Get the number of periods in the TimeIndex."""
        return len(self._datetime_list) - 1

    def is_52_week_years(self) -> bool:
        """Check if the TimeIndex is based on 52-week years."""
        return self._is_52_week_years

    def is_one_year(self) -> bool:
        """Return True if exactly one whole year."""
        if self._extrapolate_first_point or self._extrapolate_last_point:
            return False
        start_time = self._datetime_list[0]
        stop_time = self._datetime_list[-1]
        start_year, start_week, start_weekday = start_time.isocalendar()

        if not start_weekday == start_week == 1:
            return False

        if self._is_52_week_years:
            expected_stop_time = start_time + timedelta(weeks=52)
            if expected_stop_time.isocalendar().week == 53:  # noqa: PLR2004
                expected_stop_time += timedelta(weeks=1)
            return stop_time == expected_stop_time

        stop_year, stop_week, stop_weekday = stop_time.isocalendar()
        return (start_year + 1 == stop_year) and (stop_weekday == stop_week == 1)

    def is_whole_years(self) -> bool:
        """Return True if index covers one or more full years."""
        start_time = self._datetime_list[0]
        _, start_week, start_weekday = start_time.isocalendar()

        if not start_week == start_weekday == 1:
            return False

        stop_time = self._datetime_list[-1]

        if not self.is_52_week_years():
            _, stop_week, stop_weekday = stop_time.isocalendar()
            return stop_week == stop_weekday == 1

        total_period = self.total_duration()
        total_seconds = int(total_period.total_seconds())
        seconds_per_year = 52 * 7 * 24 * 3600

        return total_seconds % seconds_per_year == 0

    def extrapolate_first_point(self) -> bool:
        """Check if the TimeIndex should extrapolate the first point."""
        return self._extrapolate_first_point

    def extrapolate_last_point(self) -> bool:
        """Check if the TimeIndex should extrapolate the last point."""
        return self._extrapolate_last_point

    def get_period_average(self, vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float:
        """Get the average over the period from the vector."""
        self._check_type(vector, np.ndarray)
        self._check_type(start_time, datetime)
        self._check_type(duration, timedelta)
        self._check_type(is_52_week_years, bool)

        if vector.shape != (self.get_num_periods(),):
            msg = f"Vector shape {vector.shape} does not match number of periods {self.get_num_periods()} of timeindex ({self})."
            raise ValueError(msg)

        if not self.extrapolate_first_point():
            if start_time < self._datetime_list[0]:
                msg = f"start_time {start_time} is before start of timeindex {self._datetime_list[0]}, and extrapolate_first_point is False."
                raise ValueError(msg)
            if (start_time + duration) < self._datetime_list[0]:
                msg = f"End time {start_time + duration} is before start of timeindex {self._datetime_list[0]}, and extrapolate_first_point is False."
                raise ValueError(msg)

        if not self.extrapolate_last_point():
            if (start_time + duration) > self._datetime_list[-1]:
                msg = f"End time {start_time + duration} is after end of timeindex {self._datetime_list[-1]}, and extrapolate_last_point is False."
                raise ValueError(msg)
            if start_time > self._datetime_list[-1]:
                msg = f"start_time {start_time} is after end of timeindex {self._datetime_list[-1]}, and extrapolate_last_point is False."
                raise ValueError(msg)

        target_timeindex = FixedFrequencyTimeIndex(
            start_time=start_time,
            period_duration=duration,
            num_periods=1,
            is_52_week_years=is_52_week_years,
            extrapolate_first_point=self.extrapolate_first_point(),
            extrapolate_last_point=self.extrapolate_last_point(),
        )

        target_vector = np.zeros(1, dtype=vector.dtype)
        self.write_into_fixed_frequency(
            target_vector=target_vector,
            target_timeindex=target_timeindex,
            input_vector=vector,
        )
        return target_vector[0]

    def write_into_fixed_frequency(
        self,
        target_vector: NDArray,
        target_timeindex: FixedFrequencyTimeIndex,
        input_vector: NDArray,
    ) -> None:
        """Write the input vector into the target vector using the target FixedFrequencyTimeIndex."""
        self._check_type(target_vector, np.ndarray)
        self._check_type(target_timeindex, FixedFrequencyTimeIndex)
        self._check_type(input_vector, np.ndarray)

        dts: list[datetime] = self._datetime_list

        durations = set(self._microseconds(period_duration(dts[i], dts[i + 1], self._is_52_week_years)) for i in range(len(dts) - 1))
        smallest_common_period_duration = functools.reduce(math.gcd, durations)

        num_periods_ff = self._microseconds(self.total_duration()) // smallest_common_period_duration
        input_vector_ff = np.zeros(num_periods_ff, dtype=target_vector.dtype)

        i_start_ff = 0
        for i in range(len(dts) - 1):
            num_periods = self._microseconds(period_duration(dts[i], dts[i + 1], self._is_52_week_years)) // smallest_common_period_duration
            i_stop_ff = i_start_ff + num_periods
            input_vector_ff[i_start_ff:i_stop_ff] = input_vector[i]
            i_start_ff = i_stop_ff

        input_timeindex_ff = FixedFrequencyTimeIndex(
            start_time=dts[0],
            num_periods=num_periods_ff,
            period_duration=timedelta(microseconds=smallest_common_period_duration),
            is_52_week_years=self.is_52_week_years(),
            extrapolate_first_point=self.extrapolate_first_point(),
            extrapolate_last_point=self.extrapolate_last_point(),
        )

        input_timeindex_ff.write_into_fixed_frequency(
            target_vector=target_vector,
            target_timeindex=target_timeindex,
            input_vector=input_vector_ff,
        )

    def total_duration(self) -> timedelta:
        """
        Return the total duration covered by the time index.

        Returns
        -------
        timedelta
            The duration from the first to the last datetime in the index, skipping all weeks 53 periods if 52-week time format.

        """
        start_time = self._datetime_list[0]
        end_time = self._datetime_list[-1]
        return period_duration(start_time, end_time, self.is_52_week_years())

    def _microseconds(self, duration: timedelta) -> int:
        return int(duration.total_seconds() * 1e6)

    def is_constant(self) -> bool:
        """
        Return True if the time index is constant (single period and both extrapolation flags are True).

        Returns
        -------
        bool
            True if the time index is constant, False otherwise.

        """
        return self.get_num_periods() == 1 and self.extrapolate_first_point() == self.extrapolate_last_point() is True
__eq__(other) -> bool

Check if two ListTimeIndexes are equal.

Source code in framcore/timeindexes/ListTimeIndex.py
63
64
65
66
67
68
69
70
71
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Check if two ListTimeIndexes are equal."""
    if not isinstance(other, type(self)):
        return False
    return (
        self._datetime_list == other._datetime_list
        and self._extrapolate_first_point == other._extrapolate_first_point
        and self._extrapolate_last_point == other._extrapolate_last_point
    )
__hash__() -> int

Return the hash of the ListTimeIndex.

Source code in framcore/timeindexes/ListTimeIndex.py
73
74
75
76
77
78
79
80
81
def __hash__(self) -> int:
    """Return the hash of the ListTimeIndex."""
    return hash(
        (
            tuple(self._datetime_list),
            self._extrapolate_first_point,
            self._extrapolate_last_point,
        ),
    )
__init__(datetime_list: list[datetime], is_52_week_years: bool, extrapolate_first_point: bool, extrapolate_last_point: bool) -> None

Initialize the ListTimeIndex class.

Parameters:

Name Type Description Default
datetime_list list[datetime]

List of datetime objects defining the time index. Must be ordered and contain more than one element.

required
is_52_week_years bool

Whether to use 52-week years. If False, full iso calendar years are used.

required
extrapolate_first_point bool

Whether to extrapolate the first point.

required
extrapolate_last_point bool

Whether to extrapolate the last point.

required

Raises:

Type Description
ValueError

If datetime_list has less than two elements or is not ordered.

Source code in framcore/timeindexes/ListTimeIndex.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
def __init__(
    self,
    datetime_list: list[datetime],
    is_52_week_years: bool,
    extrapolate_first_point: bool,
    extrapolate_last_point: bool,
) -> None:
    """
    Initialize the ListTimeIndex class.

    Args:
        datetime_list (list[datetime]): List of datetime objects defining the time index. Must be ordered and contain more than one element.
        is_52_week_years (bool): Whether to use 52-week years. If False, full iso calendar years are used.
        extrapolate_first_point (bool): Whether to extrapolate the first point.
        extrapolate_last_point (bool): Whether to extrapolate the last point.

    Raises:
        ValueError: If datetime_list has less than two elements or is not ordered.

    """
    dts = datetime_list
    if len(dts) <= 1:
        msg = f"datetime_list must contain more than one element. Got {datetime_list}"
        raise ValueError(msg)
    if not all(dts[i] < dts[i + 1] for i in range(len(dts) - 1)):
        msg = f"All elements of datetime_list must be smaller/lower than the succeeding element. Dates must be ordered. Got {datetime_list}."
        raise ValueError(msg)
    if len(set(dt.tzinfo for dt in dts if dt is not None)) > 1:
        msg = f"Datetime objects in datetime_list have differing time zone information: {set(dt.tzinfo for dt in dts if dt is not None)}"
        raise ValueError(msg)
    if is_52_week_years and any(dts[i].isocalendar().week == 53 for i in range(len(dts))):  # noqa: PLR2004
        msg = "When is_52_week_years is True, datetime_list should not contain week 53 datetimes."
        raise ValueError(msg)

    self._datetime_list = datetime_list
    self._is_52_week_years = is_52_week_years
    self._extrapolate_first_point = extrapolate_first_point
    self._extrapolate_last_point = extrapolate_last_point
__repr__() -> str

Return the string representation of the ListTimeIndex.

Source code in framcore/timeindexes/ListTimeIndex.py
83
84
85
86
87
88
89
90
def __repr__(self) -> str:
    """Return the string representation of the ListTimeIndex."""
    return (
        "ListTimeIndex("
        f"datetimelist={self._datetime_list}, "
        f"extrapolate_first_point={self._extrapolate_first_point}, "
        f"extrapolate_last_point={self._extrapolate_last_point})"
    )
extrapolate_first_point() -> bool

Check if the TimeIndex should extrapolate the first point.

Source code in framcore/timeindexes/ListTimeIndex.py
157
158
159
def extrapolate_first_point(self) -> bool:
    """Check if the TimeIndex should extrapolate the first point."""
    return self._extrapolate_first_point
extrapolate_last_point() -> bool

Check if the TimeIndex should extrapolate the last point.

Source code in framcore/timeindexes/ListTimeIndex.py
161
162
163
def extrapolate_last_point(self) -> bool:
    """Check if the TimeIndex should extrapolate the last point."""
    return self._extrapolate_last_point
get_datetime_list() -> list[datetime]

Get a list of all periods (num_periods + 1 datetimes).

Source code in framcore/timeindexes/ListTimeIndex.py
101
102
103
def get_datetime_list(self) -> list[datetime]:
    """Get a list of all periods (num_periods + 1 datetimes)."""
    return self._datetime_list.copy()
get_fingerprint() -> Fingerprint

Get the fingerprint of the ListTimeIndex.

Source code in framcore/timeindexes/ListTimeIndex.py
92
93
94
95
96
97
98
99
def get_fingerprint(self) -> Fingerprint:
    """Get the fingerprint of the ListTimeIndex."""
    fingerprint = Fingerprint()
    fingerprint.add("datetime_list", self._datetime_list)
    fingerprint.add("is_52_week_years", self._is_52_week_years)
    fingerprint.add("extrapolate_first_point", self._extrapolate_first_point)
    fingerprint.add("extrapolate_last_point", self._extrapolate_last_point)
    return fingerprint
get_num_periods() -> int

Get the number of periods in the TimeIndex.

Source code in framcore/timeindexes/ListTimeIndex.py
109
110
111
def get_num_periods(self) -> int:
    """Get the number of periods in the TimeIndex."""
    return len(self._datetime_list) - 1
get_period_average(vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float

Get the average over the period from the vector.

Source code in framcore/timeindexes/ListTimeIndex.py
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
def get_period_average(self, vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float:
    """Get the average over the period from the vector."""
    self._check_type(vector, np.ndarray)
    self._check_type(start_time, datetime)
    self._check_type(duration, timedelta)
    self._check_type(is_52_week_years, bool)

    if vector.shape != (self.get_num_periods(),):
        msg = f"Vector shape {vector.shape} does not match number of periods {self.get_num_periods()} of timeindex ({self})."
        raise ValueError(msg)

    if not self.extrapolate_first_point():
        if start_time < self._datetime_list[0]:
            msg = f"start_time {start_time} is before start of timeindex {self._datetime_list[0]}, and extrapolate_first_point is False."
            raise ValueError(msg)
        if (start_time + duration) < self._datetime_list[0]:
            msg = f"End time {start_time + duration} is before start of timeindex {self._datetime_list[0]}, and extrapolate_first_point is False."
            raise ValueError(msg)

    if not self.extrapolate_last_point():
        if (start_time + duration) > self._datetime_list[-1]:
            msg = f"End time {start_time + duration} is after end of timeindex {self._datetime_list[-1]}, and extrapolate_last_point is False."
            raise ValueError(msg)
        if start_time > self._datetime_list[-1]:
            msg = f"start_time {start_time} is after end of timeindex {self._datetime_list[-1]}, and extrapolate_last_point is False."
            raise ValueError(msg)

    target_timeindex = FixedFrequencyTimeIndex(
        start_time=start_time,
        period_duration=duration,
        num_periods=1,
        is_52_week_years=is_52_week_years,
        extrapolate_first_point=self.extrapolate_first_point(),
        extrapolate_last_point=self.extrapolate_last_point(),
    )

    target_vector = np.zeros(1, dtype=vector.dtype)
    self.write_into_fixed_frequency(
        target_vector=target_vector,
        target_timeindex=target_timeindex,
        input_vector=vector,
    )
    return target_vector[0]
get_timezone() -> tzinfo | None

Get the timezone of the TimeIndex.

Source code in framcore/timeindexes/ListTimeIndex.py
105
106
107
def get_timezone(self) -> tzinfo | None:
    """Get the timezone of the TimeIndex."""
    return self._datetime_list[0].tzinfo
is_52_week_years() -> bool

Check if the TimeIndex is based on 52-week years.

Source code in framcore/timeindexes/ListTimeIndex.py
113
114
115
def is_52_week_years(self) -> bool:
    """Check if the TimeIndex is based on 52-week years."""
    return self._is_52_week_years
is_constant() -> bool

Return True if the time index is constant (single period and both extrapolation flags are True).

Returns

bool True if the time index is constant, False otherwise.

Source code in framcore/timeindexes/ListTimeIndex.py
267
268
269
270
271
272
273
274
275
276
277
def is_constant(self) -> bool:
    """
    Return True if the time index is constant (single period and both extrapolation flags are True).

    Returns
    -------
    bool
        True if the time index is constant, False otherwise.

    """
    return self.get_num_periods() == 1 and self.extrapolate_first_point() == self.extrapolate_last_point() is True
is_one_year() -> bool

Return True if exactly one whole year.

Source code in framcore/timeindexes/ListTimeIndex.py
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
def is_one_year(self) -> bool:
    """Return True if exactly one whole year."""
    if self._extrapolate_first_point or self._extrapolate_last_point:
        return False
    start_time = self._datetime_list[0]
    stop_time = self._datetime_list[-1]
    start_year, start_week, start_weekday = start_time.isocalendar()

    if not start_weekday == start_week == 1:
        return False

    if self._is_52_week_years:
        expected_stop_time = start_time + timedelta(weeks=52)
        if expected_stop_time.isocalendar().week == 53:  # noqa: PLR2004
            expected_stop_time += timedelta(weeks=1)
        return stop_time == expected_stop_time

    stop_year, stop_week, stop_weekday = stop_time.isocalendar()
    return (start_year + 1 == stop_year) and (stop_weekday == stop_week == 1)
is_whole_years() -> bool

Return True if index covers one or more full years.

Source code in framcore/timeindexes/ListTimeIndex.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
def is_whole_years(self) -> bool:
    """Return True if index covers one or more full years."""
    start_time = self._datetime_list[0]
    _, start_week, start_weekday = start_time.isocalendar()

    if not start_week == start_weekday == 1:
        return False

    stop_time = self._datetime_list[-1]

    if not self.is_52_week_years():
        _, stop_week, stop_weekday = stop_time.isocalendar()
        return stop_week == stop_weekday == 1

    total_period = self.total_duration()
    total_seconds = int(total_period.total_seconds())
    seconds_per_year = 52 * 7 * 24 * 3600

    return total_seconds % seconds_per_year == 0
total_duration() -> timedelta

Return the total duration covered by the time index.

Returns

timedelta The duration from the first to the last datetime in the index, skipping all weeks 53 periods if 52-week time format.

Source code in framcore/timeindexes/ListTimeIndex.py
250
251
252
253
254
255
256
257
258
259
260
261
262
def total_duration(self) -> timedelta:
    """
    Return the total duration covered by the time index.

    Returns
    -------
    timedelta
        The duration from the first to the last datetime in the index, skipping all weeks 53 periods if 52-week time format.

    """
    start_time = self._datetime_list[0]
    end_time = self._datetime_list[-1]
    return period_duration(start_time, end_time, self.is_52_week_years())
write_into_fixed_frequency(target_vector: NDArray, target_timeindex: FixedFrequencyTimeIndex, input_vector: NDArray) -> None

Write the input vector into the target vector using the target FixedFrequencyTimeIndex.

Source code in framcore/timeindexes/ListTimeIndex.py
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
def write_into_fixed_frequency(
    self,
    target_vector: NDArray,
    target_timeindex: FixedFrequencyTimeIndex,
    input_vector: NDArray,
) -> None:
    """Write the input vector into the target vector using the target FixedFrequencyTimeIndex."""
    self._check_type(target_vector, np.ndarray)
    self._check_type(target_timeindex, FixedFrequencyTimeIndex)
    self._check_type(input_vector, np.ndarray)

    dts: list[datetime] = self._datetime_list

    durations = set(self._microseconds(period_duration(dts[i], dts[i + 1], self._is_52_week_years)) for i in range(len(dts) - 1))
    smallest_common_period_duration = functools.reduce(math.gcd, durations)

    num_periods_ff = self._microseconds(self.total_duration()) // smallest_common_period_duration
    input_vector_ff = np.zeros(num_periods_ff, dtype=target_vector.dtype)

    i_start_ff = 0
    for i in range(len(dts) - 1):
        num_periods = self._microseconds(period_duration(dts[i], dts[i + 1], self._is_52_week_years)) // smallest_common_period_duration
        i_stop_ff = i_start_ff + num_periods
        input_vector_ff[i_start_ff:i_stop_ff] = input_vector[i]
        i_start_ff = i_stop_ff

    input_timeindex_ff = FixedFrequencyTimeIndex(
        start_time=dts[0],
        num_periods=num_periods_ff,
        period_duration=timedelta(microseconds=smallest_common_period_duration),
        is_52_week_years=self.is_52_week_years(),
        extrapolate_first_point=self.extrapolate_first_point(),
        extrapolate_last_point=self.extrapolate_last_point(),
    )

    input_timeindex_ff.write_into_fixed_frequency(
        target_vector=target_vector,
        target_timeindex=target_timeindex,
        input_vector=input_vector_ff,
    )

ModelYear

ModelYear

Bases: SinglePeriodTimeIndex

ModelYear represent a period of 52 weeks starting from the iso calendar week 1 of a specified year. No extrapolation.

Source code in framcore/timeindexes/ModelYear.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
class ModelYear(SinglePeriodTimeIndex):
    """ModelYear represent a period of 52 weeks starting from the iso calendar week 1 of a specified year. No extrapolation."""

    def __init__(self, year: int) -> None:
        """
        Initialize ModelYear to a period of 52 weeks starting from the iso calendar week 1 of the specified year. No extrapolation.

        Args:
            year (int): Year to represent.

        """
        super().__init__(
            start_time=datetime.fromisocalendar(year, 1, 1),
            period_duration=timedelta(weeks=52),
            is_52_week_years=True,
            extrapolate_first_point=False,
            extrapolate_last_point=False,
        )
__init__(year: int) -> None

Initialize ModelYear to a period of 52 weeks starting from the iso calendar week 1 of the specified year. No extrapolation.

Parameters:

Name Type Description Default
year int

Year to represent.

required
Source code in framcore/timeindexes/ModelYear.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
def __init__(self, year: int) -> None:
    """
    Initialize ModelYear to a period of 52 weeks starting from the iso calendar week 1 of the specified year. No extrapolation.

    Args:
        year (int): Year to represent.

    """
    super().__init__(
        start_time=datetime.fromisocalendar(year, 1, 1),
        period_duration=timedelta(weeks=52),
        is_52_week_years=True,
        extrapolate_first_point=False,
        extrapolate_last_point=False,
    )

ModelYears

ModelYears

Bases: ListTimeIndex

ModelYears represents a collection of years as a ListTimeIndex. Extrapolation is enabled and full iso calendar is used.

Source code in framcore/timeindexes/ModelYears.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
class ModelYears(ListTimeIndex):
    """ModelYears represents a collection of years as a ListTimeIndex. Extrapolation is enabled and full iso calendar is used."""

    def __init__(self, years: list[int]) -> None:
        """
        Initialize ModelYears with a list of years.

        Args:
            years (list[int]): List of years to represent.

        """
        if not years:
            raise ValueError("At least one year must be provided.")

        datetime_list = [datetime.fromisocalendar(year, 1, 1) for year in years]
        datetime_list.append(datetime.fromisocalendar(years[-1] + 1, 1, 1))
        super().__init__(
            datetime_list=datetime_list,
            is_52_week_years=False,
            extrapolate_first_point=True,
            extrapolate_last_point=True,
        )
__init__(years: list[int]) -> None

Initialize ModelYears with a list of years.

Parameters:

Name Type Description Default
years list[int]

List of years to represent.

required
Source code in framcore/timeindexes/ModelYears.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
def __init__(self, years: list[int]) -> None:
    """
    Initialize ModelYears with a list of years.

    Args:
        years (list[int]): List of years to represent.

    """
    if not years:
        raise ValueError("At least one year must be provided.")

    datetime_list = [datetime.fromisocalendar(year, 1, 1) for year in years]
    datetime_list.append(datetime.fromisocalendar(years[-1] + 1, 1, 1))
    super().__init__(
        datetime_list=datetime_list,
        is_52_week_years=False,
        extrapolate_first_point=True,
        extrapolate_last_point=True,
    )

OneYearProfileTimeIndex

OneYearProfileTimeIndex

Bases: ProfileTimeIndex

ProfileTimeIndex with fixed frequency over one year of either 52 or 53 weeks. No extrapolation inherited from ProfileTimeIndex.

Attributes:

Name Type Description
period_duration timedelta

Duration of each period.

is_52_week_years bool

Whether to use 52-week years.

Source code in framcore/timeindexes/OneYearProfileTimeIndex.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
class OneYearProfileTimeIndex(ProfileTimeIndex):
    """
    ProfileTimeIndex with fixed frequency over one year of either 52 or 53 weeks. No extrapolation inherited from ProfileTimeIndex.

    Attributes:
        period_duration (timedelta): Duration of each period.
        is_52_week_years (bool): Whether to use 52-week years.

    """

    def __init__(self, period_duration: timedelta, is_52_week_years: bool) -> None:
        """
        Initialize a ProfileTimeIndex with a fixed frequency over one year.

        If is_52_week_years is True, the period_duration must divide evenly into 52 weeks. If False, it must divide evenly into 53 weeks.
        We use 1982 for 52-week years and 1981 for 53-week years.

        Args:
            period_duration (timedelta): Duration of each period.
            is_52_week_years (bool): Whether to use 52-week years.

        """
        year = 1982 if is_52_week_years else 1981
        super().__init__(year, 1, period_duration, is_52_week_years)
__init__(period_duration: timedelta, is_52_week_years: bool) -> None

Initialize a ProfileTimeIndex with a fixed frequency over one year.

If is_52_week_years is True, the period_duration must divide evenly into 52 weeks. If False, it must divide evenly into 53 weeks. We use 1982 for 52-week years and 1981 for 53-week years.

Parameters:

Name Type Description Default
period_duration timedelta

Duration of each period.

required
is_52_week_years bool

Whether to use 52-week years.

required
Source code in framcore/timeindexes/OneYearProfileTimeIndex.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
def __init__(self, period_duration: timedelta, is_52_week_years: bool) -> None:
    """
    Initialize a ProfileTimeIndex with a fixed frequency over one year.

    If is_52_week_years is True, the period_duration must divide evenly into 52 weeks. If False, it must divide evenly into 53 weeks.
    We use 1982 for 52-week years and 1981 for 53-week years.

    Args:
        period_duration (timedelta): Duration of each period.
        is_52_week_years (bool): Whether to use 52-week years.

    """
    year = 1982 if is_52_week_years else 1981
    super().__init__(year, 1, period_duration, is_52_week_years)

ProfileTimeIndex

ProfileTimeIndex

Bases: FixedFrequencyTimeIndex

ProfileTimeIndex represent one or more whole years with fixed time resolution standard. No extrapolation.

Source code in framcore/timeindexes/ProfileTimeIndex.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
class ProfileTimeIndex(FixedFrequencyTimeIndex):
    """ProfileTimeIndex represent one or more whole years with fixed time resolution standard. No extrapolation."""

    def __init__(
        self,
        start_year: int,
        num_years: int,
        period_duration: timedelta,
        is_52_week_years: bool,
    ) -> None:
        """
        Initialize the ProfileTimeIndex. No extrapolation.

        Args:
            start_year (int): First year in the index.
            num_years (int): Number of years in the index.
            period_duration (timedelta): Duration of each period in the index.
            is_52_week_years (bool): Whether to use 52-week years. If False, full iso calendar years are used.

        """
        start_time = datetime.fromisocalendar(start_year, 1, 1)
        if not is_52_week_years:
            stop_time = datetime.fromisocalendar(start_year + num_years, 1, 1)
            num_periods = (stop_time - start_time).total_seconds() / period_duration.total_seconds()
        else:
            num_periods = timedelta(weeks=52 * num_years).total_seconds() / period_duration.total_seconds()
        if not num_periods.is_integer():
            msg = f"Number of periods derived from input arguments must be an integer/whole number. Got {num_periods}."
            raise ValueError(msg)
        num_periods = int(num_periods)
        super().__init__(
            start_time=start_time,
            period_duration=period_duration,
            num_periods=num_periods,
            is_52_week_years=is_52_week_years,
            extrapolate_first_point=False,
            extrapolate_last_point=False,
        )
__init__(start_year: int, num_years: int, period_duration: timedelta, is_52_week_years: bool) -> None

Initialize the ProfileTimeIndex. No extrapolation.

Parameters:

Name Type Description Default
start_year int

First year in the index.

required
num_years int

Number of years in the index.

required
period_duration timedelta

Duration of each period in the index.

required
is_52_week_years bool

Whether to use 52-week years. If False, full iso calendar years are used.

required
Source code in framcore/timeindexes/ProfileTimeIndex.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
def __init__(
    self,
    start_year: int,
    num_years: int,
    period_duration: timedelta,
    is_52_week_years: bool,
) -> None:
    """
    Initialize the ProfileTimeIndex. No extrapolation.

    Args:
        start_year (int): First year in the index.
        num_years (int): Number of years in the index.
        period_duration (timedelta): Duration of each period in the index.
        is_52_week_years (bool): Whether to use 52-week years. If False, full iso calendar years are used.

    """
    start_time = datetime.fromisocalendar(start_year, 1, 1)
    if not is_52_week_years:
        stop_time = datetime.fromisocalendar(start_year + num_years, 1, 1)
        num_periods = (stop_time - start_time).total_seconds() / period_duration.total_seconds()
    else:
        num_periods = timedelta(weeks=52 * num_years).total_seconds() / period_duration.total_seconds()
    if not num_periods.is_integer():
        msg = f"Number of periods derived from input arguments must be an integer/whole number. Got {num_periods}."
        raise ValueError(msg)
    num_periods = int(num_periods)
    super().__init__(
        start_time=start_time,
        period_duration=period_duration,
        num_periods=num_periods,
        is_52_week_years=is_52_week_years,
        extrapolate_first_point=False,
        extrapolate_last_point=False,
    )

SinglePeriodTimeIndex

SinglePeriodTimeIndex

Bases: FixedFrequencyTimeIndex

FixedFrequencyTimeIndex with just one single step.

Source code in framcore/timeindexes/SinglePeriodTimeIndex.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
class SinglePeriodTimeIndex(FixedFrequencyTimeIndex):
    """FixedFrequencyTimeIndex with just one single step."""

    def __init__(
        self,
        start_time: datetime,
        period_duration: timedelta,
        is_52_week_years: bool = False,
        extrapolate_first_point: bool = False,
        extrapolate_last_point: bool = False,
    ) -> None:
        """
        Initialize a SinglePeriodTimeIndex with a single time period.

        Args:
            start_time (datetime): The start time of the period.
            period_duration (timedelta): The duration of the period.
            is_52_week_years (bool, optional): Whether to use 52-week years. Defaults to False.
            extrapolate_first_point (bool, optional): Whether to extrapolate the first point. Defaults to False.
            extrapolate_last_point (bool, optional): Whether to extrapolate the last point. Defaults to False.

        """
        super().__init__(
            start_time=start_time,
            period_duration=period_duration,
            num_periods=1,
            is_52_week_years=is_52_week_years,
            extrapolate_first_point=extrapolate_first_point,
            extrapolate_last_point=extrapolate_last_point,
        )
__init__(start_time: datetime, period_duration: timedelta, is_52_week_years: bool = False, extrapolate_first_point: bool = False, extrapolate_last_point: bool = False) -> None

Initialize a SinglePeriodTimeIndex with a single time period.

Parameters:

Name Type Description Default
start_time datetime

The start time of the period.

required
period_duration timedelta

The duration of the period.

required
is_52_week_years bool

Whether to use 52-week years. Defaults to False.

False
extrapolate_first_point bool

Whether to extrapolate the first point. Defaults to False.

False
extrapolate_last_point bool

Whether to extrapolate the last point. Defaults to False.

False
Source code in framcore/timeindexes/SinglePeriodTimeIndex.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
def __init__(
    self,
    start_time: datetime,
    period_duration: timedelta,
    is_52_week_years: bool = False,
    extrapolate_first_point: bool = False,
    extrapolate_last_point: bool = False,
) -> None:
    """
    Initialize a SinglePeriodTimeIndex with a single time period.

    Args:
        start_time (datetime): The start time of the period.
        period_duration (timedelta): The duration of the period.
        is_52_week_years (bool, optional): Whether to use 52-week years. Defaults to False.
        extrapolate_first_point (bool, optional): Whether to extrapolate the first point. Defaults to False.
        extrapolate_last_point (bool, optional): Whether to extrapolate the last point. Defaults to False.

    """
    super().__init__(
        start_time=start_time,
        period_duration=period_duration,
        num_periods=1,
        is_52_week_years=is_52_week_years,
        extrapolate_first_point=extrapolate_first_point,
        extrapolate_last_point=extrapolate_last_point,
    )

TimeIndex

TimeIndex

Bases: Base, ABC

TimeIndex interface for TimeVectors.

Source code in framcore/timeindexes/TimeIndex.py
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
class TimeIndex(Base, ABC):
    """TimeIndex interface for TimeVectors."""

    @abstractmethod
    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Check if two TimeIndexes are equal."""
        pass

    @abstractmethod
    def __hash__(self) -> int:
        """Compute hash value.."""
        pass

    @abstractmethod
    def get_fingerprint(self) -> Fingerprint:
        """Get the fingerprint of the TimeIndex."""
        pass

    @abstractmethod
    def get_timezone(self) -> tzinfo | None:
        """Get the timezone of the TimeIndex."""
        pass

    @abstractmethod
    def get_num_periods(self) -> bool:
        """Get the number of periods in the TimeIndex."""
        pass

    @abstractmethod
    def is_52_week_years(self) -> bool:
        """Check if the TimeIndex is based on 52-week years."""
        pass

    @abstractmethod
    def is_one_year(self) -> bool:
        """
        Check if the TimeIndex represents a single year.

        Must be False if extrapolate_first_point and or extrapolate_last_point is True.

        When True, can be repeted in profiles.
        """
        pass

    @abstractmethod
    def is_whole_years(self) -> bool:
        """Check if the TimeIndex represents whole years."""
        pass

    @abstractmethod
    def extrapolate_first_point(self) -> bool:
        """Check if the TimeIndex should extrapolate the first point. Must be False if is_one_year is True."""
        pass

    @abstractmethod
    def extrapolate_last_point(self) -> bool:
        """Check if the TimeIndex should extrapolate the last point. Must be False if is_one_year is True."""
        pass

    @abstractmethod
    def get_period_average(self, vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float:
        """Get the average over the period from the vector."""
        pass

    @abstractmethod
    def write_into_fixed_frequency(
        self,
        target_vector: NDArray,
        target_timeindex: FixedFrequencyTimeIndex,
        input_vector: NDArray,
    ) -> None:
        """
        Write the input vector into the target vector based on the target FixedFrequencyTimeIndex.

        Main functionality in FRAM to extracts data to the correct time period and resolution.
        A conversion of the data into a specific time period and resolution follows these steps:
        - If the TimeIndex is not a FixedFrequencyTimeIndex, convert the TimeIndex and the vector to this format.
        - Then convert the data according to the target TimeIndex.
        - It is easier to efficiently do time series operations between FixedFrequencyTimeIndex
            and we only need to implement all the other conversion functionality once here.
            For example, converting between 52-week and ISO-time TimeVectors, selecting a period, extrapolation or changing the resolution.
        - And when we implement a new TimeIndex, we only need to implement the conversion to FixedFrequencyTimeIndex
            and the rest of the conversion functionality can be reused.

        """
        pass

    @abstractmethod
    def is_constant(self) -> bool:
        """Check if the TimeIndex is constant."""
        pass
__eq__(other) -> bool abstractmethod

Check if two TimeIndexes are equal.

Source code in framcore/timeindexes/TimeIndex.py
16
17
18
19
@abstractmethod
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Check if two TimeIndexes are equal."""
    pass
__hash__() -> int abstractmethod

Compute hash value..

Source code in framcore/timeindexes/TimeIndex.py
21
22
23
24
@abstractmethod
def __hash__(self) -> int:
    """Compute hash value.."""
    pass
extrapolate_first_point() -> bool abstractmethod

Check if the TimeIndex should extrapolate the first point. Must be False if is_one_year is True.

Source code in framcore/timeindexes/TimeIndex.py
62
63
64
65
@abstractmethod
def extrapolate_first_point(self) -> bool:
    """Check if the TimeIndex should extrapolate the first point. Must be False if is_one_year is True."""
    pass
extrapolate_last_point() -> bool abstractmethod

Check if the TimeIndex should extrapolate the last point. Must be False if is_one_year is True.

Source code in framcore/timeindexes/TimeIndex.py
67
68
69
70
@abstractmethod
def extrapolate_last_point(self) -> bool:
    """Check if the TimeIndex should extrapolate the last point. Must be False if is_one_year is True."""
    pass
get_fingerprint() -> Fingerprint abstractmethod

Get the fingerprint of the TimeIndex.

Source code in framcore/timeindexes/TimeIndex.py
26
27
28
29
@abstractmethod
def get_fingerprint(self) -> Fingerprint:
    """Get the fingerprint of the TimeIndex."""
    pass
get_num_periods() -> bool abstractmethod

Get the number of periods in the TimeIndex.

Source code in framcore/timeindexes/TimeIndex.py
36
37
38
39
@abstractmethod
def get_num_periods(self) -> bool:
    """Get the number of periods in the TimeIndex."""
    pass
get_period_average(vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float abstractmethod

Get the average over the period from the vector.

Source code in framcore/timeindexes/TimeIndex.py
72
73
74
75
@abstractmethod
def get_period_average(self, vector: NDArray, start_time: datetime, duration: timedelta, is_52_week_years: bool) -> float:
    """Get the average over the period from the vector."""
    pass
get_timezone() -> tzinfo | None abstractmethod

Get the timezone of the TimeIndex.

Source code in framcore/timeindexes/TimeIndex.py
31
32
33
34
@abstractmethod
def get_timezone(self) -> tzinfo | None:
    """Get the timezone of the TimeIndex."""
    pass
is_52_week_years() -> bool abstractmethod

Check if the TimeIndex is based on 52-week years.

Source code in framcore/timeindexes/TimeIndex.py
41
42
43
44
@abstractmethod
def is_52_week_years(self) -> bool:
    """Check if the TimeIndex is based on 52-week years."""
    pass
is_constant() -> bool abstractmethod

Check if the TimeIndex is constant.

Source code in framcore/timeindexes/TimeIndex.py
100
101
102
103
@abstractmethod
def is_constant(self) -> bool:
    """Check if the TimeIndex is constant."""
    pass
is_one_year() -> bool abstractmethod

Check if the TimeIndex represents a single year.

Must be False if extrapolate_first_point and or extrapolate_last_point is True.

When True, can be repeted in profiles.

Source code in framcore/timeindexes/TimeIndex.py
46
47
48
49
50
51
52
53
54
55
@abstractmethod
def is_one_year(self) -> bool:
    """
    Check if the TimeIndex represents a single year.

    Must be False if extrapolate_first_point and or extrapolate_last_point is True.

    When True, can be repeted in profiles.
    """
    pass
is_whole_years() -> bool abstractmethod

Check if the TimeIndex represents whole years.

Source code in framcore/timeindexes/TimeIndex.py
57
58
59
60
@abstractmethod
def is_whole_years(self) -> bool:
    """Check if the TimeIndex represents whole years."""
    pass
write_into_fixed_frequency(target_vector: NDArray, target_timeindex: FixedFrequencyTimeIndex, input_vector: NDArray) -> None abstractmethod

Write the input vector into the target vector based on the target FixedFrequencyTimeIndex.

Main functionality in FRAM to extracts data to the correct time period and resolution. A conversion of the data into a specific time period and resolution follows these steps: - If the TimeIndex is not a FixedFrequencyTimeIndex, convert the TimeIndex and the vector to this format. - Then convert the data according to the target TimeIndex. - It is easier to efficiently do time series operations between FixedFrequencyTimeIndex and we only need to implement all the other conversion functionality once here. For example, converting between 52-week and ISO-time TimeVectors, selecting a period, extrapolation or changing the resolution. - And when we implement a new TimeIndex, we only need to implement the conversion to FixedFrequencyTimeIndex and the rest of the conversion functionality can be reused.

Source code in framcore/timeindexes/TimeIndex.py
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
@abstractmethod
def write_into_fixed_frequency(
    self,
    target_vector: NDArray,
    target_timeindex: FixedFrequencyTimeIndex,
    input_vector: NDArray,
) -> None:
    """
    Write the input vector into the target vector based on the target FixedFrequencyTimeIndex.

    Main functionality in FRAM to extracts data to the correct time period and resolution.
    A conversion of the data into a specific time period and resolution follows these steps:
    - If the TimeIndex is not a FixedFrequencyTimeIndex, convert the TimeIndex and the vector to this format.
    - Then convert the data according to the target TimeIndex.
    - It is easier to efficiently do time series operations between FixedFrequencyTimeIndex
        and we only need to implement all the other conversion functionality once here.
        For example, converting between 52-week and ISO-time TimeVectors, selecting a period, extrapolation or changing the resolution.
    - And when we implement a new TimeIndex, we only need to implement the conversion to FixedFrequencyTimeIndex
        and the rest of the conversion functionality can be reused.

    """
    pass

WeeklyIndex

WeeklyIndex

Bases: ProfileTimeIndex

ProfileTimeIndex with one or more whole years with weekly resolution. Either years with 52 weeks or full iso calendar years.

No extrapolation inherited from ProfileTimeIndex.

Source code in framcore/timeindexes/WeeklyIndex.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
class WeeklyIndex(ProfileTimeIndex):
    """
    ProfileTimeIndex with one or more whole years with weekly resolution. Either years with 52 weeks or full iso calendar years.

    No extrapolation inherited from ProfileTimeIndex.
    """

    def __init__(
        self,
        start_year: int,
        num_years: int,
        is_52_week_years: bool = True,
    ) -> None:
        """
        Initialize WeeklyIndex with one or more whole years with weekly resolution. Either years with 52 weeks or full iso calendar years.

        Args:
            start_year (int): First year in the index.
            num_years (int): Number of years in the index.
            is_52_week_years (bool, optional): Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

        """
        super().__init__(
            start_year=start_year,
            num_years=num_years,
            period_duration=timedelta(weeks=1),
            is_52_week_years=is_52_week_years,
        )
__init__(start_year: int, num_years: int, is_52_week_years: bool = True) -> None

Initialize WeeklyIndex with one or more whole years with weekly resolution. Either years with 52 weeks or full iso calendar years.

Parameters:

Name Type Description Default
start_year int

First year in the index.

required
num_years int

Number of years in the index.

required
is_52_week_years bool

Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

True
Source code in framcore/timeindexes/WeeklyIndex.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
def __init__(
    self,
    start_year: int,
    num_years: int,
    is_52_week_years: bool = True,
) -> None:
    """
    Initialize WeeklyIndex with one or more whole years with weekly resolution. Either years with 52 weeks or full iso calendar years.

    Args:
        start_year (int): First year in the index.
        num_years (int): Number of years in the index.
        is_52_week_years (bool, optional): Whether to use 52-week years. If False, full iso calendar years are used. Defaults to True.

    """
    super().__init__(
        start_year=start_year,
        num_years=num_years,
        period_duration=timedelta(weeks=1),
        is_52_week_years=is_52_week_years,
    )

timevectors

ConstantTimeVector

ConstantTimeVector

Bases: TimeVector

ConstantTimeVector class for TimeVectors that are constant over time. Subclass of TimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
class ConstantTimeVector(TimeVector):
    """ConstantTimeVector class for TimeVectors that are constant over time. Subclass of TimeVector."""

    def __init__(
        self,
        scalar: float,
        unit: str | None = None,
        is_max_level: bool | None = None,
        is_zero_one_profile: bool | None = None,
        reference_period: ReferencePeriod | None = None,
    ) -> None:
        """
        Initialize the ConstantTimeVector class.

        Args:
            scalar (float): Constant float value of the TimeVector.
            unit (str | None): Unit of the value in the vector.
            is_max_level (bool | None): Whether the vector represents the maximum level, average level given a
                                        reference period, or not a level at all.
            is_zero_one_profile (bool | None): Whether the vector represents a profile with values between 0 and 1, a
                                               profile with values averaging to 1 over a given reference period, or is
                                               not a profile.
            reference_period (ReferencePeriod | None, optional): Given reference period if the vector represents average
                                                                 level or mean one profile. Defaults to None.

        Raises:
            ValueError: When both is_max_level and is_zero_one_profile is not None. This would mean the TimeVector
                        represents both a level and a profile, which is not allowed.

        """
        self._scalar = float(scalar)
        self._unit = unit
        self._is_max_level = is_max_level
        self._is_zero_one_profile = is_zero_one_profile
        self._reference_period = reference_period

        self._check_type(scalar, (float, np.float32))  # TODO: Accept np.float32 elsewhere aswell
        self._check_type(unit, (str, type(None)))
        self._check_type(is_max_level, (bool, type(None)))
        self._check_type(is_zero_one_profile, (bool, type(None)))
        self._check_type(reference_period, (ReferencePeriod, type(None)))

        self._check_is_level_or_profile()

    def __repr__(self) -> str:
        """Return the string representation of the ConstantTimeVector."""
        ref_period = None
        if self._reference_period is not None:
            start_year = self._reference_period.get_start_year()
            num_years = self._reference_period.get_num_years()
            ref_period = f"{start_year}-{start_year + num_years - 1}"
        unit = f", unit={self._unit}" if self._unit is not None else ""
        ref_period = f", reference_period={ref_period}" if ref_period is not None else ""
        is_max_level = f", is_max_level={self._is_max_level}"
        return f"ConstantTimeVector({self._scalar}{unit}{ref_period}{is_max_level})"

    def __eq__(self, other: object) -> bool:
        """Check equality between two ConstantTimeVector objects."""
        if not isinstance(other, ConstantTimeVector):
            return False
        return (
            self._scalar == other._scalar
            and self._unit == other._unit
            and self._is_max_level == other._is_max_level
            and self._is_zero_one_profile == other._is_zero_one_profile
            and self._reference_period == other._reference_period
        )

    def __hash__(self) -> int:
        """Compute the hash of the ConstantTimeVector."""
        return hash((self._scalar, self._unit, self._is_max_level, self._is_zero_one_profile, self._reference_period))

    def get_expr_str(self) -> str:
        """Simpler representation of self to show in Expr."""
        if self._unit:
            return f"{self._scalar} {self._unit}"

        return f"{self._scalar}"

    def get_vector(self, is_float32: bool) -> NDArray:
        """Get the values of the TimeVector."""
        dtype = np.float32 if is_float32 else np.float64
        out = np.zeros(1, dtype=dtype)
        out[0] = self._scalar
        return out

    def get_timeindex(self) -> ConstantTimeIndex:
        """Get the TimeIndex of the TimeVector."""
        return ConstantTimeIndex()

    def is_constant(self) -> bool:
        """Check if the TimeVector is constant."""
        return True

    def is_max_level(self) -> bool | None:
        """Check if TimeVector is a level representing maximum Volume/Capacity."""
        return self._is_max_level

    def is_zero_one_profile(self) -> bool | None:
        """Check if TimeVector is a profile with values between zero and one."""
        return self._is_zero_one_profile

    def get_unit(self) -> str | None:
        """Get the unit of the TimeVector."""
        return self._unit

    def get_reference_period(self) -> ReferencePeriod | None:
        """Get the reference period of the TimeVector."""
        if self._reference_period is not None:
            return self._reference_period
        if self.is_zero_one_profile() is False:
            timeindex = self.get_timeindex()
            return timeindex.get_reference_period()
        return None

    def get_fingerprint(self) -> Fingerprint:
        """Get the Fingerprint of the TimeVector."""
        return self.get_fingerprint_default()

    def get_loader(self) -> None:
        """Interface method Not applicable for this type. Return None."""
        return
__eq__(other: object) -> bool

Check equality between two ConstantTimeVector objects.

Source code in framcore/timevectors/ConstantTimeVector.py
66
67
68
69
70
71
72
73
74
75
76
def __eq__(self, other: object) -> bool:
    """Check equality between two ConstantTimeVector objects."""
    if not isinstance(other, ConstantTimeVector):
        return False
    return (
        self._scalar == other._scalar
        and self._unit == other._unit
        and self._is_max_level == other._is_max_level
        and self._is_zero_one_profile == other._is_zero_one_profile
        and self._reference_period == other._reference_period
    )
__hash__() -> int

Compute the hash of the ConstantTimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
78
79
80
def __hash__(self) -> int:
    """Compute the hash of the ConstantTimeVector."""
    return hash((self._scalar, self._unit, self._is_max_level, self._is_zero_one_profile, self._reference_period))
__init__(scalar: float, unit: str | None = None, is_max_level: bool | None = None, is_zero_one_profile: bool | None = None, reference_period: ReferencePeriod | None = None) -> None

Initialize the ConstantTimeVector class.

Parameters:

Name Type Description Default
scalar float

Constant float value of the TimeVector.

required
unit str | None

Unit of the value in the vector.

None
is_max_level bool | None

Whether the vector represents the maximum level, average level given a reference period, or not a level at all.

None
is_zero_one_profile bool | None

Whether the vector represents a profile with values between 0 and 1, a profile with values averaging to 1 over a given reference period, or is not a profile.

None
reference_period ReferencePeriod | None

Given reference period if the vector represents average level or mean one profile. Defaults to None.

None

Raises:

Type Description
ValueError

When both is_max_level and is_zero_one_profile is not None. This would mean the TimeVector represents both a level and a profile, which is not allowed.

Source code in framcore/timevectors/ConstantTimeVector.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
def __init__(
    self,
    scalar: float,
    unit: str | None = None,
    is_max_level: bool | None = None,
    is_zero_one_profile: bool | None = None,
    reference_period: ReferencePeriod | None = None,
) -> None:
    """
    Initialize the ConstantTimeVector class.

    Args:
        scalar (float): Constant float value of the TimeVector.
        unit (str | None): Unit of the value in the vector.
        is_max_level (bool | None): Whether the vector represents the maximum level, average level given a
                                    reference period, or not a level at all.
        is_zero_one_profile (bool | None): Whether the vector represents a profile with values between 0 and 1, a
                                           profile with values averaging to 1 over a given reference period, or is
                                           not a profile.
        reference_period (ReferencePeriod | None, optional): Given reference period if the vector represents average
                                                             level or mean one profile. Defaults to None.

    Raises:
        ValueError: When both is_max_level and is_zero_one_profile is not None. This would mean the TimeVector
                    represents both a level and a profile, which is not allowed.

    """
    self._scalar = float(scalar)
    self._unit = unit
    self._is_max_level = is_max_level
    self._is_zero_one_profile = is_zero_one_profile
    self._reference_period = reference_period

    self._check_type(scalar, (float, np.float32))  # TODO: Accept np.float32 elsewhere aswell
    self._check_type(unit, (str, type(None)))
    self._check_type(is_max_level, (bool, type(None)))
    self._check_type(is_zero_one_profile, (bool, type(None)))
    self._check_type(reference_period, (ReferencePeriod, type(None)))

    self._check_is_level_or_profile()
__repr__() -> str

Return the string representation of the ConstantTimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
54
55
56
57
58
59
60
61
62
63
64
def __repr__(self) -> str:
    """Return the string representation of the ConstantTimeVector."""
    ref_period = None
    if self._reference_period is not None:
        start_year = self._reference_period.get_start_year()
        num_years = self._reference_period.get_num_years()
        ref_period = f"{start_year}-{start_year + num_years - 1}"
    unit = f", unit={self._unit}" if self._unit is not None else ""
    ref_period = f", reference_period={ref_period}" if ref_period is not None else ""
    is_max_level = f", is_max_level={self._is_max_level}"
    return f"ConstantTimeVector({self._scalar}{unit}{ref_period}{is_max_level})"
get_expr_str() -> str

Simpler representation of self to show in Expr.

Source code in framcore/timevectors/ConstantTimeVector.py
82
83
84
85
86
87
def get_expr_str(self) -> str:
    """Simpler representation of self to show in Expr."""
    if self._unit:
        return f"{self._scalar} {self._unit}"

    return f"{self._scalar}"
get_fingerprint() -> Fingerprint

Get the Fingerprint of the TimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
125
126
127
def get_fingerprint(self) -> Fingerprint:
    """Get the Fingerprint of the TimeVector."""
    return self.get_fingerprint_default()
get_loader() -> None

Interface method Not applicable for this type. Return None.

Source code in framcore/timevectors/ConstantTimeVector.py
129
130
131
def get_loader(self) -> None:
    """Interface method Not applicable for this type. Return None."""
    return
get_reference_period() -> ReferencePeriod | None

Get the reference period of the TimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
116
117
118
119
120
121
122
123
def get_reference_period(self) -> ReferencePeriod | None:
    """Get the reference period of the TimeVector."""
    if self._reference_period is not None:
        return self._reference_period
    if self.is_zero_one_profile() is False:
        timeindex = self.get_timeindex()
        return timeindex.get_reference_period()
    return None
get_timeindex() -> ConstantTimeIndex

Get the TimeIndex of the TimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
96
97
98
def get_timeindex(self) -> ConstantTimeIndex:
    """Get the TimeIndex of the TimeVector."""
    return ConstantTimeIndex()
get_unit() -> str | None

Get the unit of the TimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
112
113
114
def get_unit(self) -> str | None:
    """Get the unit of the TimeVector."""
    return self._unit
get_vector(is_float32: bool) -> NDArray

Get the values of the TimeVector.

Source code in framcore/timevectors/ConstantTimeVector.py
89
90
91
92
93
94
def get_vector(self, is_float32: bool) -> NDArray:
    """Get the values of the TimeVector."""
    dtype = np.float32 if is_float32 else np.float64
    out = np.zeros(1, dtype=dtype)
    out[0] = self._scalar
    return out
is_constant() -> bool

Check if the TimeVector is constant.

Source code in framcore/timevectors/ConstantTimeVector.py
100
101
102
def is_constant(self) -> bool:
    """Check if the TimeVector is constant."""
    return True
is_max_level() -> bool | None

Check if TimeVector is a level representing maximum Volume/Capacity.

Source code in framcore/timevectors/ConstantTimeVector.py
104
105
106
def is_max_level(self) -> bool | None:
    """Check if TimeVector is a level representing maximum Volume/Capacity."""
    return self._is_max_level
is_zero_one_profile() -> bool | None

Check if TimeVector is a profile with values between zero and one.

Source code in framcore/timevectors/ConstantTimeVector.py
108
109
110
def is_zero_one_profile(self) -> bool | None:
    """Check if TimeVector is a profile with values between zero and one."""
    return self._is_zero_one_profile

LinearTransformTimeVector

LinearTransformTimeVector

Bases: TimeVector

LinearTransformTimeVector represents a TimeVector as scale * timevector + shift. Immutable.

Source code in framcore/timevectors/LinearTransformTimeVector.py
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
class LinearTransformTimeVector(TimeVector):
    """LinearTransformTimeVector represents a TimeVector as scale * timevector + shift. Immutable."""

    def __init__(
        self,
        timevector: TimeVector,
        scale: float,
        shift: float,
        unit: str | None,
        is_max_level: bool | None = None,
        is_zero_one_profile: bool | None = None,
        reference_period: ReferencePeriod | None = None,
    ) -> None:
        """
        Initialize LinearTransformTimeVector with a TimeVector, scale and shift.

        May also override unit, is_max_level,  is_zero_one_profile and reference_period of the original timevector.

        Args:
            timevector (TimeVector): TimeVector.
            scale (float): Scale factor.
            shift (float): Shift value.
            unit (str | None): Unit of the values in the transformed vector.
            is_max_level (bool | None, optional): Whether the transformed vector represents the maximum level,
                                                  average level given a reference period, or not a level at all.
                                                  Defaults to None.
            is_zero_one_profile (bool | None, optional): Whether the transformed vector represents a profile with values
                                                        between 0 and 1, a profile with values averaging to 1 over a given
                                                        reference period, or is not a profile. Defaults to None.
            reference_period (ReferencePeriod | None, optional): Given reference period if the transformed vector
                                                                 represents average level or mean one profile. Defaults to None.

        """
        self._check_type(timevector, TimeVector)
        self._check_type(scale, float)
        self._check_type(shift, float)
        self._check_type(unit, (str, type(None)))
        self._check_type(is_max_level, (bool, type(None)))
        self._check_type(is_zero_one_profile, (bool, type(None)))
        self._check_type(reference_period, (ReferencePeriod, type(None)))
        self._timevector = timevector
        self._scale = scale
        self._shift = shift
        self._unit = unit
        self._is_max_level = is_max_level
        self._is_zero_one_profile = is_zero_one_profile
        self._reference_period = reference_period

        self._check_is_level_or_profile()

    def get_vector(self, is_float32: bool) -> NDArray:
        """Get the values of the TimeVector."""
        vector = self._timevector.get_vector(is_float32)
        if self._scale == 1.0 and self._shift == 0.0:
            return vector
        out = vector.copy()
        if self._scale != 1.0:
            np.multiply(out, self._scale, out=out)
        if self._shift != 0.0:
            np.add(out, self._shift, out=out)
        return out

    def get_fingerprint(self) -> Fingerprint:
        """Get the Fingerprint of the TimeVector."""
        return self.get_fingerprint_default()

    def get_timeindex(self) -> ConstantTimeIndex:
        """Get the TimeIndex of the TimeVector."""
        return self._timevector.get_timeindex()

    def is_constant(self) -> bool:
        """Check if the TimeVector is constant."""
        return self._timevector.is_constant()

    def is_max_level(self) -> bool | None:
        """Check if TimeVector is a level representing maximum Volume/Capacity."""
        return self._is_max_level

    def is_zero_one_profile(self) -> bool | None:
        """Check if TimeVector is a profile with values between zero and one."""
        return self._is_zero_one_profile

    def get_unit(self) -> str | None:
        """Get the unit of the TimeVector."""
        return self._unit

    def get_reference_period(self) -> ReferencePeriod | None:
        """Get the reference period of the TimeVector."""
        return self._reference_period

    def get_loader(self) -> TimeVectorLoader | None:
        """Call get_loader on underlying time vector."""
        return self._timevector.get_loader()

    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Check if self and other are equal."""
        if not isinstance(other, type(self)):
            return False
        return (
            self._timevector == other._timevector
            and self._scale == other._scale
            and self._shift == other._shift
            and self._unit == other._unit
            and self._is_max_level == other._is_max_level
            and self._is_zero_one_profile == other._is_zero_one_profile
            and self._reference_period == other._reference_period
        )

    def __hash__(self) -> int:
        """Compute the hash of the LinearTransformTimeVector."""
        return hash(
            (
                self._timevector,
                self._scale,
                self._shift,
                self._unit,
                self._is_max_level,
                self._is_zero_one_profile,
                self._reference_period,
            ),
        )
__eq__(other) -> bool

Check if self and other are equal.

Source code in framcore/timevectors/LinearTransformTimeVector.py
105
106
107
108
109
110
111
112
113
114
115
116
117
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Check if self and other are equal."""
    if not isinstance(other, type(self)):
        return False
    return (
        self._timevector == other._timevector
        and self._scale == other._scale
        and self._shift == other._shift
        and self._unit == other._unit
        and self._is_max_level == other._is_max_level
        and self._is_zero_one_profile == other._is_zero_one_profile
        and self._reference_period == other._reference_period
    )
__hash__() -> int

Compute the hash of the LinearTransformTimeVector.

Source code in framcore/timevectors/LinearTransformTimeVector.py
119
120
121
122
123
124
125
126
127
128
129
130
131
def __hash__(self) -> int:
    """Compute the hash of the LinearTransformTimeVector."""
    return hash(
        (
            self._timevector,
            self._scale,
            self._shift,
            self._unit,
            self._is_max_level,
            self._is_zero_one_profile,
            self._reference_period,
        ),
    )
__init__(timevector: TimeVector, scale: float, shift: float, unit: str | None, is_max_level: bool | None = None, is_zero_one_profile: bool | None = None, reference_period: ReferencePeriod | None = None) -> None

Initialize LinearTransformTimeVector with a TimeVector, scale and shift.

May also override unit, is_max_level, is_zero_one_profile and reference_period of the original timevector.

Parameters:

Name Type Description Default
timevector TimeVector

TimeVector.

required
scale float

Scale factor.

required
shift float

Shift value.

required
unit str | None

Unit of the values in the transformed vector.

required
is_max_level bool | None

Whether the transformed vector represents the maximum level, average level given a reference period, or not a level at all. Defaults to None.

None
is_zero_one_profile bool | None

Whether the transformed vector represents a profile with values between 0 and 1, a profile with values averaging to 1 over a given reference period, or is not a profile. Defaults to None.

None
reference_period ReferencePeriod | None

Given reference period if the transformed vector represents average level or mean one profile. Defaults to None.

None
Source code in framcore/timevectors/LinearTransformTimeVector.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
def __init__(
    self,
    timevector: TimeVector,
    scale: float,
    shift: float,
    unit: str | None,
    is_max_level: bool | None = None,
    is_zero_one_profile: bool | None = None,
    reference_period: ReferencePeriod | None = None,
) -> None:
    """
    Initialize LinearTransformTimeVector with a TimeVector, scale and shift.

    May also override unit, is_max_level,  is_zero_one_profile and reference_period of the original timevector.

    Args:
        timevector (TimeVector): TimeVector.
        scale (float): Scale factor.
        shift (float): Shift value.
        unit (str | None): Unit of the values in the transformed vector.
        is_max_level (bool | None, optional): Whether the transformed vector represents the maximum level,
                                              average level given a reference period, or not a level at all.
                                              Defaults to None.
        is_zero_one_profile (bool | None, optional): Whether the transformed vector represents a profile with values
                                                    between 0 and 1, a profile with values averaging to 1 over a given
                                                    reference period, or is not a profile. Defaults to None.
        reference_period (ReferencePeriod | None, optional): Given reference period if the transformed vector
                                                             represents average level or mean one profile. Defaults to None.

    """
    self._check_type(timevector, TimeVector)
    self._check_type(scale, float)
    self._check_type(shift, float)
    self._check_type(unit, (str, type(None)))
    self._check_type(is_max_level, (bool, type(None)))
    self._check_type(is_zero_one_profile, (bool, type(None)))
    self._check_type(reference_period, (ReferencePeriod, type(None)))
    self._timevector = timevector
    self._scale = scale
    self._shift = shift
    self._unit = unit
    self._is_max_level = is_max_level
    self._is_zero_one_profile = is_zero_one_profile
    self._reference_period = reference_period

    self._check_is_level_or_profile()
get_fingerprint() -> Fingerprint

Get the Fingerprint of the TimeVector.

Source code in framcore/timevectors/LinearTransformTimeVector.py
73
74
75
def get_fingerprint(self) -> Fingerprint:
    """Get the Fingerprint of the TimeVector."""
    return self.get_fingerprint_default()
get_loader() -> TimeVectorLoader | None

Call get_loader on underlying time vector.

Source code in framcore/timevectors/LinearTransformTimeVector.py
101
102
103
def get_loader(self) -> TimeVectorLoader | None:
    """Call get_loader on underlying time vector."""
    return self._timevector.get_loader()
get_reference_period() -> ReferencePeriod | None

Get the reference period of the TimeVector.

Source code in framcore/timevectors/LinearTransformTimeVector.py
97
98
99
def get_reference_period(self) -> ReferencePeriod | None:
    """Get the reference period of the TimeVector."""
    return self._reference_period
get_timeindex() -> ConstantTimeIndex

Get the TimeIndex of the TimeVector.

Source code in framcore/timevectors/LinearTransformTimeVector.py
77
78
79
def get_timeindex(self) -> ConstantTimeIndex:
    """Get the TimeIndex of the TimeVector."""
    return self._timevector.get_timeindex()
get_unit() -> str | None

Get the unit of the TimeVector.

Source code in framcore/timevectors/LinearTransformTimeVector.py
93
94
95
def get_unit(self) -> str | None:
    """Get the unit of the TimeVector."""
    return self._unit
get_vector(is_float32: bool) -> NDArray

Get the values of the TimeVector.

Source code in framcore/timevectors/LinearTransformTimeVector.py
61
62
63
64
65
66
67
68
69
70
71
def get_vector(self, is_float32: bool) -> NDArray:
    """Get the values of the TimeVector."""
    vector = self._timevector.get_vector(is_float32)
    if self._scale == 1.0 and self._shift == 0.0:
        return vector
    out = vector.copy()
    if self._scale != 1.0:
        np.multiply(out, self._scale, out=out)
    if self._shift != 0.0:
        np.add(out, self._shift, out=out)
    return out
is_constant() -> bool

Check if the TimeVector is constant.

Source code in framcore/timevectors/LinearTransformTimeVector.py
81
82
83
def is_constant(self) -> bool:
    """Check if the TimeVector is constant."""
    return self._timevector.is_constant()
is_max_level() -> bool | None

Check if TimeVector is a level representing maximum Volume/Capacity.

Source code in framcore/timevectors/LinearTransformTimeVector.py
85
86
87
def is_max_level(self) -> bool | None:
    """Check if TimeVector is a level representing maximum Volume/Capacity."""
    return self._is_max_level
is_zero_one_profile() -> bool | None

Check if TimeVector is a profile with values between zero and one.

Source code in framcore/timevectors/LinearTransformTimeVector.py
89
90
91
def is_zero_one_profile(self) -> bool | None:
    """Check if TimeVector is a profile with values between zero and one."""
    return self._is_zero_one_profile

ListTimeVector

ListTimeVector

Bases: TimeVector

TimeVector with a numpy array of values paired with a timeindex.

Source code in framcore/timevectors/ListTimeVector.py
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
class ListTimeVector(TimeVector):
    """TimeVector with a numpy array of values paired with a timeindex."""

    def __init__(
        self,
        timeindex: TimeIndex,
        vector: NDArray,
        unit: str | None,
        is_max_level: bool | None,
        is_zero_one_profile: bool | None,
        reference_period: ReferencePeriod | None = None,
    ) -> None:
        """
        Initialize the ListTimeVector class.

        Args:
            timeindex (TimeIndex): Index of timestamps for the vector.
            vector (NDArray): Array of vector values.
            unit (str | None): Unit of the values in the vector.
            is_max_level (bool | None): Whether the vector represents the maximum level, average level given a
                                        reference period, or not a level at all.
            is_zero_one_profile (bool | None): Whether the vector represents aprofile with values between 0 and 1, a
                                               profile with values averaging to 1 over a given reference period, or is
                                               not a profile.
            reference_period (ReferencePeriod | None, optional): Given reference period if the vector represents average
                                                                 level or mean one profile. Defaults to None.

        Raises:
            ValueError: When both is_max_level and is_zero_one_profile is not None. This would mean the TimeVector
                        represents both a level and a profile, which is not allowed.
            ValueError: When the shape of the vector does not match the number of periods in the timeindex.

        """
        if vector.shape != (timeindex.get_num_periods(),):
            msg = f"Vector shape {vector.shape} does not match number of periods {timeindex.get_num_periods()} of timeindex ({timeindex})."
            raise ValueError(msg)

        self._timeindex = timeindex
        self._vector = vector
        self._unit = unit
        self._reference_period = reference_period
        self._is_max_level = is_max_level
        self._is_zero_one_profile = is_zero_one_profile

        self._check_type(timeindex, TimeIndex)
        self._check_type(vector, np.ndarray)
        self._check_type(unit, (str, type(None)))
        self._check_type(is_max_level, (bool, type(None)))
        self._check_type(is_zero_one_profile, (bool, type(None)))
        self._check_type(reference_period, (ReferencePeriod, type(None)))

        self._check_is_level_or_profile()

    def __eq__(self, other: object) -> None:
        """Check equality between two ListTimeVector objects."""
        if not isinstance(other, ListTimeVector):
            return NotImplemented
        return (
            (self._timeindex == other._timeindex)
            and np.array_equal(self._vector, other._vector)
            and (self._unit == other._unit)
            and (self._is_max_level == other._is_max_level)
            and (self._is_zero_one_profile == other._is_zero_one_profile)
            and (self._reference_period == other._reference_period)
        )

    def __hash__(self) -> int:
        """Return hash of ListTimeVector object."""
        return hash((self._timeindex, self._vector.tobytes(), self._unit, self._is_max_level, self._is_zero_one_profile, self._reference_period))

    def __repr__(self) -> str:
        """Return the string representation of the ListTimeVector."""
        return f"ListTimeVector(timeindex={self._timeindex}, vector={self._vector}, unit={self._unit}, reference_period={self._reference_period})"

    def get_vector(self, is_float32: bool) -> NDArray:
        """Get the vector of the TimeVector as a numpy array."""
        if is_float32:
            return self._vector.astype(dtype=np.float32)
        return self._vector

    def get_timeindex(self) -> TimeIndex:
        """Get the TimeIndex of the TimeVector."""
        return self._timeindex

    def is_constant(self) -> bool:
        """Check if the TimeVector is constant."""
        return False

    def is_max_level(self) -> bool:
        """Check if TimeVector is a level representing maximum Volume/Capacity."""
        return self._is_max_level

    def is_zero_one_profile(self) -> bool:
        """Check if TimeVector is a profile with vector between zero and one."""
        return self._is_zero_one_profile

    def get_unit(self) -> str | None:
        """Get the unit of the TimeVector."""
        return self._unit

    def get_reference_period(self) -> ReferencePeriod | None:
        """Get the reference period of the TimeVector."""
        return self._reference_period

    def get_fingerprint(self) -> Fingerprint:
        """
        Get the fingerprint of the ListTimeVector.

        Returns:
            Fingerprint: The fingerprint of the ListTimeVector, excluding the reference period.

        """
        excludes = {"_reference_period"}
        return self.get_fingerprint_default(excludes=excludes)

    def get_loader(self) -> None:
        """Interface method Not applicable for this type. Return None."""
        return
__eq__(other: object) -> None

Check equality between two ListTimeVector objects.

Source code in framcore/timevectors/ListTimeVector.py
63
64
65
66
67
68
69
70
71
72
73
74
def __eq__(self, other: object) -> None:
    """Check equality between two ListTimeVector objects."""
    if not isinstance(other, ListTimeVector):
        return NotImplemented
    return (
        (self._timeindex == other._timeindex)
        and np.array_equal(self._vector, other._vector)
        and (self._unit == other._unit)
        and (self._is_max_level == other._is_max_level)
        and (self._is_zero_one_profile == other._is_zero_one_profile)
        and (self._reference_period == other._reference_period)
    )
__hash__() -> int

Return hash of ListTimeVector object.

Source code in framcore/timevectors/ListTimeVector.py
76
77
78
def __hash__(self) -> int:
    """Return hash of ListTimeVector object."""
    return hash((self._timeindex, self._vector.tobytes(), self._unit, self._is_max_level, self._is_zero_one_profile, self._reference_period))
__init__(timeindex: TimeIndex, vector: NDArray, unit: str | None, is_max_level: bool | None, is_zero_one_profile: bool | None, reference_period: ReferencePeriod | None = None) -> None

Initialize the ListTimeVector class.

Parameters:

Name Type Description Default
timeindex TimeIndex

Index of timestamps for the vector.

required
vector NDArray

Array of vector values.

required
unit str | None

Unit of the values in the vector.

required
is_max_level bool | None

Whether the vector represents the maximum level, average level given a reference period, or not a level at all.

required
is_zero_one_profile bool | None

Whether the vector represents aprofile with values between 0 and 1, a profile with values averaging to 1 over a given reference period, or is not a profile.

required
reference_period ReferencePeriod | None

Given reference period if the vector represents average level or mean one profile. Defaults to None.

None

Raises:

Type Description
ValueError

When both is_max_level and is_zero_one_profile is not None. This would mean the TimeVector represents both a level and a profile, which is not allowed.

ValueError

When the shape of the vector does not match the number of periods in the timeindex.

Source code in framcore/timevectors/ListTimeVector.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
def __init__(
    self,
    timeindex: TimeIndex,
    vector: NDArray,
    unit: str | None,
    is_max_level: bool | None,
    is_zero_one_profile: bool | None,
    reference_period: ReferencePeriod | None = None,
) -> None:
    """
    Initialize the ListTimeVector class.

    Args:
        timeindex (TimeIndex): Index of timestamps for the vector.
        vector (NDArray): Array of vector values.
        unit (str | None): Unit of the values in the vector.
        is_max_level (bool | None): Whether the vector represents the maximum level, average level given a
                                    reference period, or not a level at all.
        is_zero_one_profile (bool | None): Whether the vector represents aprofile with values between 0 and 1, a
                                           profile with values averaging to 1 over a given reference period, or is
                                           not a profile.
        reference_period (ReferencePeriod | None, optional): Given reference period if the vector represents average
                                                             level or mean one profile. Defaults to None.

    Raises:
        ValueError: When both is_max_level and is_zero_one_profile is not None. This would mean the TimeVector
                    represents both a level and a profile, which is not allowed.
        ValueError: When the shape of the vector does not match the number of periods in the timeindex.

    """
    if vector.shape != (timeindex.get_num_periods(),):
        msg = f"Vector shape {vector.shape} does not match number of periods {timeindex.get_num_periods()} of timeindex ({timeindex})."
        raise ValueError(msg)

    self._timeindex = timeindex
    self._vector = vector
    self._unit = unit
    self._reference_period = reference_period
    self._is_max_level = is_max_level
    self._is_zero_one_profile = is_zero_one_profile

    self._check_type(timeindex, TimeIndex)
    self._check_type(vector, np.ndarray)
    self._check_type(unit, (str, type(None)))
    self._check_type(is_max_level, (bool, type(None)))
    self._check_type(is_zero_one_profile, (bool, type(None)))
    self._check_type(reference_period, (ReferencePeriod, type(None)))

    self._check_is_level_or_profile()
__repr__() -> str

Return the string representation of the ListTimeVector.

Source code in framcore/timevectors/ListTimeVector.py
80
81
82
def __repr__(self) -> str:
    """Return the string representation of the ListTimeVector."""
    return f"ListTimeVector(timeindex={self._timeindex}, vector={self._vector}, unit={self._unit}, reference_period={self._reference_period})"
get_fingerprint() -> Fingerprint

Get the fingerprint of the ListTimeVector.

Returns:

Name Type Description
Fingerprint Fingerprint

The fingerprint of the ListTimeVector, excluding the reference period.

Source code in framcore/timevectors/ListTimeVector.py
114
115
116
117
118
119
120
121
122
123
def get_fingerprint(self) -> Fingerprint:
    """
    Get the fingerprint of the ListTimeVector.

    Returns:
        Fingerprint: The fingerprint of the ListTimeVector, excluding the reference period.

    """
    excludes = {"_reference_period"}
    return self.get_fingerprint_default(excludes=excludes)
get_loader() -> None

Interface method Not applicable for this type. Return None.

Source code in framcore/timevectors/ListTimeVector.py
125
126
127
def get_loader(self) -> None:
    """Interface method Not applicable for this type. Return None."""
    return
get_reference_period() -> ReferencePeriod | None

Get the reference period of the TimeVector.

Source code in framcore/timevectors/ListTimeVector.py
110
111
112
def get_reference_period(self) -> ReferencePeriod | None:
    """Get the reference period of the TimeVector."""
    return self._reference_period
get_timeindex() -> TimeIndex

Get the TimeIndex of the TimeVector.

Source code in framcore/timevectors/ListTimeVector.py
90
91
92
def get_timeindex(self) -> TimeIndex:
    """Get the TimeIndex of the TimeVector."""
    return self._timeindex
get_unit() -> str | None

Get the unit of the TimeVector.

Source code in framcore/timevectors/ListTimeVector.py
106
107
108
def get_unit(self) -> str | None:
    """Get the unit of the TimeVector."""
    return self._unit
get_vector(is_float32: bool) -> NDArray

Get the vector of the TimeVector as a numpy array.

Source code in framcore/timevectors/ListTimeVector.py
84
85
86
87
88
def get_vector(self, is_float32: bool) -> NDArray:
    """Get the vector of the TimeVector as a numpy array."""
    if is_float32:
        return self._vector.astype(dtype=np.float32)
    return self._vector
is_constant() -> bool

Check if the TimeVector is constant.

Source code in framcore/timevectors/ListTimeVector.py
94
95
96
def is_constant(self) -> bool:
    """Check if the TimeVector is constant."""
    return False
is_max_level() -> bool

Check if TimeVector is a level representing maximum Volume/Capacity.

Source code in framcore/timevectors/ListTimeVector.py
 98
 99
100
def is_max_level(self) -> bool:
    """Check if TimeVector is a level representing maximum Volume/Capacity."""
    return self._is_max_level
is_zero_one_profile() -> bool

Check if TimeVector is a profile with vector between zero and one.

Source code in framcore/timevectors/ListTimeVector.py
102
103
104
def is_zero_one_profile(self) -> bool:
    """Check if TimeVector is a profile with vector between zero and one."""
    return self._is_zero_one_profile

LoadedTimeVector

LoadedTimeVector

Bases: TimeVector

TimeVector which gets its data from a data source via a TimeVectorLoader. Subclass of TimeVector.

Source code in framcore/timevectors/LoadedTimeVector.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
class LoadedTimeVector(TimeVector):
    """TimeVector which gets its data from a data source via a TimeVectorLoader. Subclass of TimeVector."""

    def __init__(self, vector_id: str, loader: TimeVectorLoader) -> None:
        """
        Store vector id and loader in instance variables, get unit from loader.

        Args:
            vector_id (str): Unique name of this vector.
            loader (TimeVectorLoader): Object connected to a data source where vector_id is associated with a time
                                       vector. The Loader object must also implement the TimeVectorLoader API.

        Raises:
            ValueError: When metadata in the TimeVectorLoader for both is_max_level and is_zero_one_profile for the
                        given vector_id is not None. This would mean the TimeVector represents both a level and a
                        profile, which is not allowed.

        """
        self._vector_id = vector_id
        self._loader = loader
        self._check_type(self._vector_id, str)
        self._check_type(self._loader, TimeVectorLoader)
        self._is_max_level = self._loader.is_max_level(self._vector_id)
        self._is_zero_one_profile = self._loader.is_zero_one_profile(self._vector_id)
        self._unit = self._loader.get_unit(self._vector_id)
        self._reference_period = self._loader.get_reference_period(self._vector_id)

        self._check_is_level_or_profile()

    def __repr__(self) -> str:
        """Overwrite string representation of LoadedTimeVector objects."""
        return f"{type(self).__name__}(vector_id={self._vector_id},loader={self._loader},unit={self._unit})"

    def __eq__(self, other: object) -> bool:
        """Check equality between two LoadedTimeVector objects."""
        if not isinstance(other, LoadedTimeVector):
            return NotImplemented
        return (self._vector_id == other._vector_id) and (self._loader == other._loader)

    def __hash__(self) -> int:
        """Return hash of LoadedTimeVector object."""
        return hash((self._vector_id, self._loader))

    def get_vector(self, is_float32: bool) -> NDArray:
        """Get the vector of the TimeVector as a numpy array."""
        vector = self._loader.get_values(self._vector_id)
        if is_float32:
            return vector.astype(np.float32)
        return vector

    def get_timeindex(self) -> TimeIndex:
        """
        Get this time vectors index.

        Returns:
            TimeIndex: Object describing the index.

        """
        return self._loader.get_index(self._vector_id)

    def is_constant(self) -> bool:
        """Signify if this TimeVector is constant."""
        return False

    def get_unit(self) -> str:
        """Get the unit of this TimeVector."""
        return self._unit

    def get_loader(self) -> TimeVectorLoader:
        """Get the Loader this TimeVector retrieves its data from."""
        return self._loader

    def get_reference_period(self) -> ReferencePeriod | None:
        """Get the reference period which the data of this TimeVector is from."""
        return self._reference_period

    def is_max_level(self) -> bool | None:
        """Check if TimeVector is a level representing maximum Volume/Capacity."""
        return self._loader.is_max_level(self._vector_id)

    def is_zero_one_profile(self) -> bool | None:
        """Check if TimeVector is a profile with values between zero and one."""
        return self._loader.is_zero_one_profile(self._vector_id)

    def get_fingerprint(self) -> Fingerprint:
        """Get the Fingerprint of this TimeVector."""
        return self._loader.get_fingerprint(self._vector_id)
__eq__(other: object) -> bool

Check equality between two LoadedTimeVector objects.

Source code in framcore/timevectors/LoadedTimeVector.py
44
45
46
47
48
def __eq__(self, other: object) -> bool:
    """Check equality between two LoadedTimeVector objects."""
    if not isinstance(other, LoadedTimeVector):
        return NotImplemented
    return (self._vector_id == other._vector_id) and (self._loader == other._loader)
__hash__() -> int

Return hash of LoadedTimeVector object.

Source code in framcore/timevectors/LoadedTimeVector.py
50
51
52
def __hash__(self) -> int:
    """Return hash of LoadedTimeVector object."""
    return hash((self._vector_id, self._loader))
__init__(vector_id: str, loader: TimeVectorLoader) -> None

Store vector id and loader in instance variables, get unit from loader.

Parameters:

Name Type Description Default
vector_id str

Unique name of this vector.

required
loader TimeVectorLoader

Object connected to a data source where vector_id is associated with a time vector. The Loader object must also implement the TimeVectorLoader API.

required

Raises:

Type Description
ValueError

When metadata in the TimeVectorLoader for both is_max_level and is_zero_one_profile for the given vector_id is not None. This would mean the TimeVector represents both a level and a profile, which is not allowed.

Source code in framcore/timevectors/LoadedTimeVector.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
def __init__(self, vector_id: str, loader: TimeVectorLoader) -> None:
    """
    Store vector id and loader in instance variables, get unit from loader.

    Args:
        vector_id (str): Unique name of this vector.
        loader (TimeVectorLoader): Object connected to a data source where vector_id is associated with a time
                                   vector. The Loader object must also implement the TimeVectorLoader API.

    Raises:
        ValueError: When metadata in the TimeVectorLoader for both is_max_level and is_zero_one_profile for the
                    given vector_id is not None. This would mean the TimeVector represents both a level and a
                    profile, which is not allowed.

    """
    self._vector_id = vector_id
    self._loader = loader
    self._check_type(self._vector_id, str)
    self._check_type(self._loader, TimeVectorLoader)
    self._is_max_level = self._loader.is_max_level(self._vector_id)
    self._is_zero_one_profile = self._loader.is_zero_one_profile(self._vector_id)
    self._unit = self._loader.get_unit(self._vector_id)
    self._reference_period = self._loader.get_reference_period(self._vector_id)

    self._check_is_level_or_profile()
__repr__() -> str

Overwrite string representation of LoadedTimeVector objects.

Source code in framcore/timevectors/LoadedTimeVector.py
40
41
42
def __repr__(self) -> str:
    """Overwrite string representation of LoadedTimeVector objects."""
    return f"{type(self).__name__}(vector_id={self._vector_id},loader={self._loader},unit={self._unit})"
get_fingerprint() -> Fingerprint

Get the Fingerprint of this TimeVector.

Source code in framcore/timevectors/LoadedTimeVector.py
95
96
97
def get_fingerprint(self) -> Fingerprint:
    """Get the Fingerprint of this TimeVector."""
    return self._loader.get_fingerprint(self._vector_id)
get_loader() -> TimeVectorLoader

Get the Loader this TimeVector retrieves its data from.

Source code in framcore/timevectors/LoadedTimeVector.py
79
80
81
def get_loader(self) -> TimeVectorLoader:
    """Get the Loader this TimeVector retrieves its data from."""
    return self._loader
get_reference_period() -> ReferencePeriod | None

Get the reference period which the data of this TimeVector is from.

Source code in framcore/timevectors/LoadedTimeVector.py
83
84
85
def get_reference_period(self) -> ReferencePeriod | None:
    """Get the reference period which the data of this TimeVector is from."""
    return self._reference_period
get_timeindex() -> TimeIndex

Get this time vectors index.

Returns:

Name Type Description
TimeIndex TimeIndex

Object describing the index.

Source code in framcore/timevectors/LoadedTimeVector.py
61
62
63
64
65
66
67
68
69
def get_timeindex(self) -> TimeIndex:
    """
    Get this time vectors index.

    Returns:
        TimeIndex: Object describing the index.

    """
    return self._loader.get_index(self._vector_id)
get_unit() -> str

Get the unit of this TimeVector.

Source code in framcore/timevectors/LoadedTimeVector.py
75
76
77
def get_unit(self) -> str:
    """Get the unit of this TimeVector."""
    return self._unit
get_vector(is_float32: bool) -> NDArray

Get the vector of the TimeVector as a numpy array.

Source code in framcore/timevectors/LoadedTimeVector.py
54
55
56
57
58
59
def get_vector(self, is_float32: bool) -> NDArray:
    """Get the vector of the TimeVector as a numpy array."""
    vector = self._loader.get_values(self._vector_id)
    if is_float32:
        return vector.astype(np.float32)
    return vector
is_constant() -> bool

Signify if this TimeVector is constant.

Source code in framcore/timevectors/LoadedTimeVector.py
71
72
73
def is_constant(self) -> bool:
    """Signify if this TimeVector is constant."""
    return False
is_max_level() -> bool | None

Check if TimeVector is a level representing maximum Volume/Capacity.

Source code in framcore/timevectors/LoadedTimeVector.py
87
88
89
def is_max_level(self) -> bool | None:
    """Check if TimeVector is a level representing maximum Volume/Capacity."""
    return self._loader.is_max_level(self._vector_id)
is_zero_one_profile() -> bool | None

Check if TimeVector is a profile with values between zero and one.

Source code in framcore/timevectors/LoadedTimeVector.py
91
92
93
def is_zero_one_profile(self) -> bool | None:
    """Check if TimeVector is a profile with values between zero and one."""
    return self._loader.is_zero_one_profile(self._vector_id)

ReferencePeriod

ReferencePeriod

Bases: Base

ReferencePeriod class represents a period of one or more years.

Source code in framcore/timevectors/ReferencePeriod.py
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
class ReferencePeriod(Base):
    """ReferencePeriod class represents a period of one or more years."""

    def __init__(self, start_year: int, num_years: int) -> None:
        """
        Initialize a ReferencePeriod with the start year and number of years.

        Args:
            start_year (int): The first year in the reference period. Must be a positive integer.
            num_years (int): The number of years in the reference period. Must be a positive non-zero integer.

        """
        self._check_type(start_year, int)
        self._check_type(num_years, int)

        if start_year < 0:
            message = f"start_year must be a positive integer. Got {start_year}."
            raise ValueError(message)

        if num_years <= 0:
            message = f"num_years must be a positive non-zero integer. Got {num_years}."
            raise ValueError(message)

        self._start_year = start_year
        self._num_years = num_years

    def get_start_year(self) -> int:
        """Get the start_year from a ReferencePeriod instance."""
        return self._start_year

    def get_num_years(self) -> int:
        """Get the number of years in the ReferencePeriod."""
        return self._num_years

    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Check if self and other are equal."""
        if not isinstance(other, type(self)):
            return False
        return self._start_year == other._start_year and self._num_years == other._num_years

    def __hash__(self) -> int:
        """Compute hash value.."""
        return hash(
            (
                self._start_year,
                self._num_years,
            ),
        )
__eq__(other) -> bool

Check if self and other are equal.

Source code in framcore/timevectors/ReferencePeriod.py
38
39
40
41
42
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Check if self and other are equal."""
    if not isinstance(other, type(self)):
        return False
    return self._start_year == other._start_year and self._num_years == other._num_years
__hash__() -> int

Compute hash value..

Source code in framcore/timevectors/ReferencePeriod.py
44
45
46
47
48
49
50
51
def __hash__(self) -> int:
    """Compute hash value.."""
    return hash(
        (
            self._start_year,
            self._num_years,
        ),
    )
__init__(start_year: int, num_years: int) -> None

Initialize a ReferencePeriod with the start year and number of years.

Parameters:

Name Type Description Default
start_year int

The first year in the reference period. Must be a positive integer.

required
num_years int

The number of years in the reference period. Must be a positive non-zero integer.

required
Source code in framcore/timevectors/ReferencePeriod.py
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
def __init__(self, start_year: int, num_years: int) -> None:
    """
    Initialize a ReferencePeriod with the start year and number of years.

    Args:
        start_year (int): The first year in the reference period. Must be a positive integer.
        num_years (int): The number of years in the reference period. Must be a positive non-zero integer.

    """
    self._check_type(start_year, int)
    self._check_type(num_years, int)

    if start_year < 0:
        message = f"start_year must be a positive integer. Got {start_year}."
        raise ValueError(message)

    if num_years <= 0:
        message = f"num_years must be a positive non-zero integer. Got {num_years}."
        raise ValueError(message)

    self._start_year = start_year
    self._num_years = num_years
get_num_years() -> int

Get the number of years in the ReferencePeriod.

Source code in framcore/timevectors/ReferencePeriod.py
34
35
36
def get_num_years(self) -> int:
    """Get the number of years in the ReferencePeriod."""
    return self._num_years
get_start_year() -> int

Get the start_year from a ReferencePeriod instance.

Source code in framcore/timevectors/ReferencePeriod.py
30
31
32
def get_start_year(self) -> int:
    """Get the start_year from a ReferencePeriod instance."""
    return self._start_year

TimeVector

TimeVector

Bases: Base, ABC

TimeVector interface class for defining timeseries data.

Source code in framcore/timevectors/TimeVector.py
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
class TimeVector(Base, ABC):
    """TimeVector interface class for defining timeseries data."""

    def __init__(self) -> None:
        """Initialize the TimeVector class."""
        super().__init__()

    @abstractmethod
    def __eq__(self, other) -> bool:  # noqa: ANN001
        """Check if two TimeVectors are equal."""
        pass

    @abstractmethod
    def __hash__(self) -> int:
        """Compute hash value."""
        pass

    @abstractmethod
    def get_vector(self, is_float32: bool) -> NDArray:
        """Get the values of the TimeVector."""
        pass

    @abstractmethod
    def get_timeindex(self) -> TimeIndex | None:
        """Get the TimeIndex of the TimeVector."""
        pass

    @abstractmethod
    def is_constant(self) -> bool:
        """Check if the TimeVector is constant."""
        pass

    @abstractmethod
    def is_max_level(self) -> bool | None:
        """
        Whether the TimeVector represents the maximum level, average level given a reference period, or not a level at all.

        See LevelProfile for a description of Level (max or avg) and Profile (max one or mean one), and their formats.

        """
        pass

    @abstractmethod
    def is_zero_one_profile(self) -> bool | None:
        """
        Whether the TimeVector represents a profile with values between 0 and 1, a profile with average 1 over a given reference period, or is not a profile.

        See LevelProfile for a description of Level (max or avg) and Profile (max one or mean one), and their formats.

        """
        pass

    @abstractmethod
    def get_unit(self) -> str | None:
        """Get the unit of the TimeVector."""
        pass

    @abstractmethod
    def get_fingerprint(self) -> Fingerprint:
        """Get the fingerprint of the TimeVector."""
        pass

    @abstractmethod
    def get_reference_period(self) -> ReferencePeriod | None:
        """Get the reference period of the TimeVector."""
        pass

    @abstractmethod
    def get_loader(self) -> TimeVectorLoader | None:
        """
        Get the TimeVectorLoader of the TimeVector if self has one.

        TimeVectors can store timeseries data in Loaders that point to databases. Data is only retrieved and cached when the TimeVector is queried.
        """
        pass

    """
    Checks that the TimeVector is either a level or a profile.

    Raises:
        ValueError: If both is_max_level and is_zero_one_profile are None or both are not None.
    """

    def _check_is_level_or_profile(self) -> None:
        """Ensure that the TimeVector is either a level or a profile."""
        if (self.is_max_level() is not None and self.is_zero_one_profile() is not None) or (self.is_max_level() is None and self.is_zero_one_profile() is None):
            message = (
                f"Invalid input arguments for {self}: Must have exactly one 'non-None' value for "
                "is_max_level and is_zero_one_profile. A TimeVector is either a level or a profile."
            )
            raise ValueError(message)
__eq__(other) -> bool abstractmethod

Check if two TimeVectors are equal.

Source code in framcore/timevectors/TimeVector.py
25
26
27
28
@abstractmethod
def __eq__(self, other) -> bool:  # noqa: ANN001
    """Check if two TimeVectors are equal."""
    pass
__hash__() -> int abstractmethod

Compute hash value.

Source code in framcore/timevectors/TimeVector.py
30
31
32
33
@abstractmethod
def __hash__(self) -> int:
    """Compute hash value."""
    pass
__init__() -> None

Initialize the TimeVector class.

Source code in framcore/timevectors/TimeVector.py
21
22
23
def __init__(self) -> None:
    """Initialize the TimeVector class."""
    super().__init__()
get_fingerprint() -> Fingerprint abstractmethod

Get the fingerprint of the TimeVector.

Source code in framcore/timevectors/TimeVector.py
75
76
77
78
@abstractmethod
def get_fingerprint(self) -> Fingerprint:
    """Get the fingerprint of the TimeVector."""
    pass
get_loader() -> TimeVectorLoader | None abstractmethod

Get the TimeVectorLoader of the TimeVector if self has one.

TimeVectors can store timeseries data in Loaders that point to databases. Data is only retrieved and cached when the TimeVector is queried.

Source code in framcore/timevectors/TimeVector.py
85
86
87
88
89
90
91
92
@abstractmethod
def get_loader(self) -> TimeVectorLoader | None:
    """
    Get the TimeVectorLoader of the TimeVector if self has one.

    TimeVectors can store timeseries data in Loaders that point to databases. Data is only retrieved and cached when the TimeVector is queried.
    """
    pass
get_reference_period() -> ReferencePeriod | None abstractmethod

Get the reference period of the TimeVector.

Source code in framcore/timevectors/TimeVector.py
80
81
82
83
@abstractmethod
def get_reference_period(self) -> ReferencePeriod | None:
    """Get the reference period of the TimeVector."""
    pass
get_timeindex() -> TimeIndex | None abstractmethod

Get the TimeIndex of the TimeVector.

Source code in framcore/timevectors/TimeVector.py
40
41
42
43
@abstractmethod
def get_timeindex(self) -> TimeIndex | None:
    """Get the TimeIndex of the TimeVector."""
    pass
get_unit() -> str | None abstractmethod

Get the unit of the TimeVector.

Source code in framcore/timevectors/TimeVector.py
70
71
72
73
@abstractmethod
def get_unit(self) -> str | None:
    """Get the unit of the TimeVector."""
    pass
get_vector(is_float32: bool) -> NDArray abstractmethod

Get the values of the TimeVector.

Source code in framcore/timevectors/TimeVector.py
35
36
37
38
@abstractmethod
def get_vector(self, is_float32: bool) -> NDArray:
    """Get the values of the TimeVector."""
    pass
is_constant() -> bool abstractmethod

Check if the TimeVector is constant.

Source code in framcore/timevectors/TimeVector.py
45
46
47
48
@abstractmethod
def is_constant(self) -> bool:
    """Check if the TimeVector is constant."""
    pass
is_max_level() -> bool | None abstractmethod

Whether the TimeVector represents the maximum level, average level given a reference period, or not a level at all.

See LevelProfile for a description of Level (max or avg) and Profile (max one or mean one), and their formats.

Source code in framcore/timevectors/TimeVector.py
50
51
52
53
54
55
56
57
58
@abstractmethod
def is_max_level(self) -> bool | None:
    """
    Whether the TimeVector represents the maximum level, average level given a reference period, or not a level at all.

    See LevelProfile for a description of Level (max or avg) and Profile (max one or mean one), and their formats.

    """
    pass
is_zero_one_profile() -> bool | None abstractmethod

Whether the TimeVector represents a profile with values between 0 and 1, a profile with average 1 over a given reference period, or is not a profile.

See LevelProfile for a description of Level (max or avg) and Profile (max one or mean one), and their formats.

Source code in framcore/timevectors/TimeVector.py
60
61
62
63
64
65
66
67
68
@abstractmethod
def is_zero_one_profile(self) -> bool | None:
    """
    Whether the TimeVector represents a profile with values between 0 and 1, a profile with average 1 over a given reference period, or is not a profile.

    See LevelProfile for a description of Level (max or avg) and Profile (max one or mean one), and their formats.

    """
    pass

utils

FlowInfo

Bases: Base

Holds info about one or two related Arrows of a Flow.

Source code in framcore/utils/node_flow_utils.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
class FlowInfo(Base):
    """Holds info about one or two related Arrows of a Flow."""

    def __init__(
        self,
        category: str,
        node_out: str | None = None,
        commodity_out: str | None = None,
        node_in: str | None = None,
        commodity_in: str | None = None,
    ) -> None:
        """
        Based on its arrows, we derive properties about a Flow.

        We use this class to store such info.
        """
        self.category = category
        self.node_out = node_out
        self.commodity_out = commodity_out
        self.node_in = node_in
        self.commodity_in = commodity_in
__init__(category: str, node_out: str | None = None, commodity_out: str | None = None, node_in: str | None = None, commodity_in: str | None = None) -> None

Based on its arrows, we derive properties about a Flow.

We use this class to store such info.

Source code in framcore/utils/node_flow_utils.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
def __init__(
    self,
    category: str,
    node_out: str | None = None,
    commodity_out: str | None = None,
    node_in: str | None = None,
    commodity_in: str | None = None,
) -> None:
    """
    Based on its arrows, we derive properties about a Flow.

    We use this class to store such info.
    """
    self.category = category
    self.node_out = node_out
    self.commodity_out = commodity_out
    self.node_in = node_in
    self.commodity_in = commodity_in

RegionalVolumes

Container for regional energy volumes.

Stores production, consumption, import, and export vectors for each node and category. Provides methods to access these aggregated results.

Source code in framcore/utils/get_regional_volumes.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
class RegionalVolumes:
    """
    Container for regional energy volumes.

    Stores production, consumption, import, and export vectors for each node and category.
    Provides methods to access these aggregated results.
    """

    def __init__(self) -> None:
        """Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export."""
        self._production: dict[str, dict[str, NDArray]] = dict()
        self._consumption: dict[str, dict[str, NDArray]] = dict()
        self._export: dict[str, dict[str, NDArray]] = dict()
        self._import: dict[str, dict[str, NDArray]] = dict()

    def get_production(self) -> dict[str, dict[str, NDArray]]:
        """Return dict with production vector by category for each node."""
        return self._production

    def get_consumption(self) -> dict[str, dict[str, NDArray]]:
        """Return dict with consumption vector by category for each node."""
        return self._consumption

    def get_export(self) -> dict[str, dict[str, NDArray]]:
        """Return nested dict with export vector for each trade partner to an exporting node."""
        return self._export

    def get_import(self) -> dict[str, dict[str, NDArray]]:
        """Return nested dict with import vector for each trade partner to an importing node."""
        return self._import
__init__() -> None

Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export.

Source code in framcore/utils/get_regional_volumes.py
32
33
34
35
36
37
def __init__(self) -> None:
    """Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export."""
    self._production: dict[str, dict[str, NDArray]] = dict()
    self._consumption: dict[str, dict[str, NDArray]] = dict()
    self._export: dict[str, dict[str, NDArray]] = dict()
    self._import: dict[str, dict[str, NDArray]] = dict()
get_consumption() -> dict[str, dict[str, NDArray]]

Return dict with consumption vector by category for each node.

Source code in framcore/utils/get_regional_volumes.py
43
44
45
def get_consumption(self) -> dict[str, dict[str, NDArray]]:
    """Return dict with consumption vector by category for each node."""
    return self._consumption
get_export() -> dict[str, dict[str, NDArray]]

Return nested dict with export vector for each trade partner to an exporting node.

Source code in framcore/utils/get_regional_volumes.py
47
48
49
def get_export(self) -> dict[str, dict[str, NDArray]]:
    """Return nested dict with export vector for each trade partner to an exporting node."""
    return self._export
get_import() -> dict[str, dict[str, NDArray]]

Return nested dict with import vector for each trade partner to an importing node.

Source code in framcore/utils/get_regional_volumes.py
51
52
53
def get_import(self) -> dict[str, dict[str, NDArray]]:
    """Return nested dict with import vector for each trade partner to an importing node."""
    return self._import
get_production() -> dict[str, dict[str, NDArray]]

Return dict with production vector by category for each node.

Source code in framcore/utils/get_regional_volumes.py
39
40
41
def get_production(self) -> dict[str, dict[str, NDArray]]:
    """Return dict with production vector by category for each node."""
    return self._production

add_loaders(loaders: set[Loader], model: Model) -> None

Add all loaders stored in Model to loaders set.

Source code in framcore/utils/loaders.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def add_loaders(loaders: set[Loader], model: Model) -> None:
    """Add all loaders stored in Model to loaders set."""
    from framcore import Model
    from framcore.components import Component, Flow, Node
    from framcore.curves import Curve
    from framcore.expressions import Expr
    from framcore.timevectors import TimeVector
    from framcore.utils import get_supported_components

    _check_type(loaders, "loaders", set)
    _check_type(model, "model", Model)

    data = model.get_data()
    components = dict()

    for key, value in data.items():
        if isinstance(value, Expr):
            value.add_loaders(loaders)

        elif isinstance(value, TimeVector | Curve):
            loader = value.get_loader()
            if loader is not None:
                loaders.add(loader)

        elif isinstance(value, Component):
            components[key] = value

    graph: dict[str, Flow | Node] = get_supported_components(components, (Flow, Node), tuple())

    for c in graph.values():
        c.add_loaders(loaders)

add_loaders_if(loaders: set, value: object | None) -> None

Call value.add_loaders(loaders) if value is not None.

Source code in framcore/utils/loaders.py
13
14
15
16
17
18
def add_loaders_if(loaders: set, value: object | None) -> None:
    """Call value.add_loaders(loaders) if value is not None."""
    _check_type(loaders, "loaders", set)
    if value is None:
        return
    value.add_loaders(loaders)

get_component_to_nodes(data: Model | dict[str, object]) -> dict[str, set[str]]

For each str key in data where value is a Component find all Node id str in data directly connected to the Component.

Source code in framcore/utils/node_flow_utils.py
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
def get_component_to_nodes(data: Model | dict[str, object]) -> dict[str, set[str]]:
    """For each str key in data where value is a Component find all Node id str in data directly connected to the Component."""
    from framcore import Model

    _check_type(data, Model | dict)

    if isinstance(data, Model):
        data = data.get_data()

    components = {k: v for k, v in data.items() if isinstance(v, Component)}
    for k in components:
        assert isinstance(k, str), f"Got invalid key {k}"

    g = get_supported_components(components, (Node, Flow), tuple())

    nodes = {k: v for k, v in g.items() if isinstance(v, Node)}
    flows = {k: v for k, v in g.items() if isinstance(v, Flow)}

    domain_nodes = {k: v for k, v in nodes.items() if (k in components) and isinstance(v, Node)}
    assert all(isinstance(v, Node) for v in domain_nodes.values())

    parent_keys = {v: k for k, v in components.items()}

    out = defaultdict(set)
    for flow in flows.values():
        parent_key = parent_keys[flow.get_top_parent()]
        for a in flow.get_arrows():
            node_id = a.get_node()
            if node_id in domain_nodes:
                out[parent_key].add(node_id)

    return out

get_flow_infos(flow: Flow, node_to_commodity: dict[str, str]) -> list[FlowInfo]

Get flow infos from analysis of all its arrows.

Source code in framcore/utils/node_flow_utils.py
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
def get_flow_infos(flow: Flow, node_to_commodity: dict[str, str]) -> list[FlowInfo]:  # noqa: C901
    """Get flow infos from analysis of all its arrows."""
    _check_type(flow, Flow)
    _check_type(node_to_commodity, dict)

    arrows = flow.get_arrows()

    if len(arrows) == 1:
        arrow = next(iter(arrows))
        node_id = arrow.get_node()

        if node_id not in node_to_commodity:
            message = f"node_id {node_id} missing from node_to_commodity for flow\n{flow}"
            raise RuntimeError(message)

        commodity = node_to_commodity[node_id]
        if arrow.is_ingoing():
            info = FlowInfo(
                "direct_in",
                node_in=node_id,
                commodity_in=commodity,
            )
        else:
            info = FlowInfo(
                "direct_out",
                node_out=node_id,
                commodity_out=commodity,
            )
        return [info]

    seen: set[tuple[str, str]] = set()
    infos: list[FlowInfo] = []
    for x in arrows:
        for y in arrows:
            if x is y:
                continue

            if x.is_ingoing() != y.is_ingoing():
                arrow_in = x if x.is_ingoing() else y
                arrow_out = x if y.is_ingoing() else y

                node_in = arrow_in.get_node()
                node_out = arrow_out.get_node()

                if node_in not in node_to_commodity:
                    message = f"node_in {node_in} missing from node_to_commodity for flow\n{flow}"
                    raise RuntimeError(message)

                if node_out not in node_to_commodity:
                    message = f"node_out {node_out} missing from node_to_commodity for flow\n{flow}"
                    raise RuntimeError(message)

                commodity_in = node_to_commodity[node_in]
                commodity_out = node_to_commodity[node_out]

                info = FlowInfo(
                    category="transport" if commodity_in == commodity_out else "conversion",
                    node_in=node_in,
                    commodity_in=commodity_in,
                    node_out=node_out,
                    commodity_out=commodity_out,
                )
                key = (node_in, node_out)
                if key in seen:
                    continue

                infos.append(info)
                seen.add(key)

    for arrow in arrows:
        node = arrow.get_node()
        if any(node in [info.node_in, info.node_out] for info in infos):
            continue
        node_id = arrow.get_node()
        commodity = node_to_commodity[node_id]
        if arrow.is_ingoing():
            info = FlowInfo(
                "direct_in",
                node_in=node_id,
                commodity_in=commodity,
            )
        else:
            info = FlowInfo(
                "direct_out",
                node_out=node_id,
                commodity_out=commodity,
            )
        infos.append(info)

    return infos

get_hydro_downstream_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], module_name: str, power_node: str | None = None) -> Expr

Get the expression for the sum downstream energy equivalent for a hydro module.

  • If power node is given, only count downstream energy equivalents that are connected to the power node.
  • Energy equivalents are collected from hydro generators downstream, and the main topology follows the release_to attribute.
  • Transport pumps are included in the downstream topology, but counted as negative energy equivalents.

Parameters:

Name Type Description Default
data dict[str, Component | TimeVector | Curve | Expr]

The dict containing the components.

required
module_name str

The name of the hydro module to start from.

required
power_node str

Optional power node to filter energy equivalents.

None
Source code in framcore/utils/global_energy_equivalent.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
def get_hydro_downstream_energy_equivalent(
    data: dict[str, Component | TimeVector | Curve | Expr],
    module_name: str,
    power_node: str | None = None,
) -> Expr:
    """
    Get the expression for the sum downstream energy equivalent for a hydro module.

    - If power node is given, only count downstream energy equivalents that are connected to the power node.
    - Energy equivalents are collected from hydro generators downstream, and the main topology follows the release_to attribute.
    - Transport pumps are included in the downstream topology, but counted as negative energy equivalents.

    Args:
        data (dict[str, Component | TimeVector | Curve | Expr]): The dict containing the components.
        module_name (str): The name of the hydro module to start from.
        power_node (str): Optional power node to filter energy equivalents.

    """
    if data[module_name].get_pump() and data[module_name].get_pump().get_from_module() == module_name:  # transport pump
        pump_power_node = data[module_name].get_pump().get_power_node()
        pump_to = data[module_name].get_pump().get_to_module()
        energy_equivalent = get_hydro_downstream_energy_equivalent(data, pump_to, power_node)  # continue downstream of pump_to module
        if power_node in (pump_power_node, None):
            return energy_equivalent - data[module_name].get_pump().get_energy_equivalent().get_level()  # pumps has negative energy equivalents
        return energy_equivalent

    energy_equivalent = 0
    if data[module_name].get_generator():  # hydro generator
        module_power_node = data[module_name].get_generator().get_power_node()
        if power_node in (module_power_node, None):
            energy_equivalent += data[module_name].get_generator().get_energy_equivalent().get_level()
    if data[module_name].get_release_to():  # continue from release_to module
        release_to = data[module_name].get_release_to()
        energy_equivalent += get_hydro_downstream_energy_equivalent(data, release_to, power_node)
    return energy_equivalent

get_node_to_commodity(data: dict[str, object]) -> dict[str, str]

Return dict with commodity (str) for each node id (str) in data.

Source code in framcore/utils/node_flow_utils.py
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
def get_node_to_commodity(data: dict[str, object]) -> dict[str, str]:
    """Return dict with commodity (str) for each node id (str) in data."""
    _check_type(data, dict)

    components = {k: v for k, v in data.items() if isinstance(v, Component)}
    for k in components:
        assert isinstance(k, str), f"Got invalid key {k}"

    g = get_supported_components(components, (Node, Flow), tuple())

    out = dict()
    for k, v in g.items():
        if isinstance(v, Node):
            _check_type(k, str)
            out[k] = v.get_commodity()
    return out

get_one_commodity_storage_subsystems(graph: dict[str, Node | Flow], include_boundaries: bool) -> dict[str, tuple[str, set[str], set[str]]]

Group all storage subsystems belonging to same commodity.

Returns dict[subsystem_id, (domain_commodity, member_component_ids, boundary_domain_commodities)]

The boundary_domain_commodities of the output is a set of boundary commodities. Some algorithms can only handle one boundary commodity, so this output is useful to verify that those conditions apply, and to derive conversion factor unit, which need both storage_commodity unit and boundray_commodity unit.

If include_boundaries is False only nodes with same commodity as storage_node will be included in the subsystem.

Source code in framcore/utils/storage_subsystems.py
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
def get_one_commodity_storage_subsystems(  # noqa: C901
    graph: dict[str, Node | Flow],
    include_boundaries: bool,
) -> dict[str, tuple[str, set[str], set[str]]]:
    """
    Group all storage subsystems belonging to same commodity.

    Returns dict[subsystem_id, (domain_commodity, member_component_ids, boundary_domain_commodities)]

    The boundary_domain_commodities of the output is a set of boundary commodities.
    Some algorithms can only handle one boundary commodity, so this output is useful
    to verify that those conditions apply, and to derive conversion factor unit,
    which need both storage_commodity unit and boundray_commodity unit.

    If include_boundaries is False only nodes with same commodity as storage_node will
    be included in the subsystem.
    """
    if not all(isinstance(c, Flow | Node) for c in graph.values()):
        invalid = {k: v for k, v in graph.items() if not isinstance(v, Flow | Node)}
        message = f"All values in graph must be Flow or Node objects. Found invalid objects: {invalid}"
        raise ValueError(message)

    flows: dict[str, Flow] = {k: v for k, v in graph.items() if isinstance(v, Flow)}
    nodes: dict[str, Node] = {k: v for k, v in graph.items() if isinstance(v, Node)}

    storage_nodes: dict[str, Node] = {k: v for k, v in nodes.items() if v.get_storage()}

    node_to_flows: dict[str, set[str]] = defaultdict(set)
    flow_to_nodes: dict[str, set[str]] = defaultdict(set)
    for flow_id, flow in flows.items():
        for arrow in flow.get_arrows():
            node_id = arrow.get_node()
            node_to_flows[node_id].add(flow_id)
            flow_to_nodes[flow_id].add(node_id)

    out = dict()
    allocated: set[str] = set()
    for storage_node_id, storage_node in storage_nodes.items():
        if storage_node_id in allocated:
            continue

        subsystem_id = storage_node_id
        storage_commodity = storage_node.get_commodity()

        member_component_ids: set[str] = set()
        boundary_commodities: set[str] = set()

        visited: set[str] = set()
        remaining: set[str] = set()

        remaining.add(storage_node_id)

        while remaining:
            component_id = remaining.pop()
            if component_id in visited:
                continue

            visited.add(component_id)

            if component_id in nodes:
                node: Node = nodes[component_id]
                node_commodity = node.get_commodity()

                if node_commodity == storage_commodity:
                    allocated.add(component_id)
                    remaining.update(node_to_flows.get(component_id, set()))
                else:
                    boundary_commodities.add(node_commodity)

                if include_boundaries or node_commodity == storage_commodity:
                    member_component_ids.add(component_id)

            else:
                remaining.update(flow_to_nodes.get(component_id, set()))
                member_component_ids.add(component_id)

        out[subsystem_id] = (storage_commodity, member_component_ids, boundary_commodities)

    return out

get_transports_by_commodity(data: Model | dict[str, object], commodity: str) -> dict[str, tuple[str, str]]

Return dict with key component_id and value (from_node_id, to_node_id) where both nodes belong to given commodity.

Source code in framcore/utils/node_flow_utils.py
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
def get_transports_by_commodity(data: Model | dict[str, object], commodity: str) -> dict[str, tuple[str, str]]:
    """Return dict with key component_id and value (from_node_id, to_node_id) where both nodes belong to given commodity."""
    from framcore import Model

    _check_type(data, Model | dict)
    _check_type(commodity, str)

    if isinstance(data, Model):
        data = data.get_data()

    components = {k: v for k, v in data.items() if isinstance(v, Component)}
    for k in components:
        assert isinstance(k, str), f"Got invalid key {k}"

    node_to_commodity = get_node_to_commodity(components)

    g = get_supported_components(components, (Node, Flow), tuple())

    flows = {k: v for k, v in g.items() if isinstance(v, Flow)}

    parent_keys = {v: k for k, v in components.items()}

    out = dict()
    for flow in flows.values():
        parent_key = parent_keys[flow.get_top_parent()]
        infos = get_flow_infos(flow, node_to_commodity)
        if len(infos) != 1:
            continue
        info = infos[0]
        if info.category != "transport":
            continue
        if info.commodity_in != commodity:
            continue
        out[parent_key] = (info.node_out, info.node_in)

    return out

is_transport_by_commodity(flow: Flow, node_to_commodity: dict[str, str], commodity: str) -> bool

Return True if flow is a transport of the given commodity.

Source code in framcore/utils/node_flow_utils.py
223
224
225
226
227
228
229
230
231
232
233
234
235
236
def is_transport_by_commodity(flow: Flow, node_to_commodity: dict[str, str], commodity: str) -> bool:
    """Return True if flow is a transport of the given commodity."""
    _check_type(flow, Flow)
    _check_type(node_to_commodity, dict)
    arrows = flow.get_arrows()
    try:
        x, y = tuple(arrows)
        opposite_directions = x.is_ingoing() != y.is_ingoing()
        x_commodity = node_to_commodity[x.get_node()]
        y_commodity = node_to_commodity[y.get_node()]
        correct_commodity = x_commodity == y_commodity == commodity
        return opposite_directions and correct_commodity
    except Exception:
        return False

replace_loader_path(loaders: set[Loader], old: Path, new: Path) -> None

Replace old path with new for all loaders using old path.

Source code in framcore/utils/loaders.py
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
def replace_loader_path(loaders: set[Loader], old: Path, new: Path) -> None:
    """Replace old path with new for all loaders using old path."""
    from framcore.loaders import FileLoader

    _check_type(loaders, "loaders", set)

    new = _check_path(new, "new", make_absolute=True)
    old = _check_path(old, "old", error_if_not_absolute=True)

    for loader in loaders:
        try:
            source = loader.get_source()
        except Exception:
            send_warning_event(f"loader.get_source() failed for {loader}. Skipping this one.")
            continue

        if isinstance(source, Path) and old in source.parents:
            loader.set_source(new_source=new / source.relative_to(old))

        if isinstance(loader, FileLoader) and not isinstance(source, Path):
            send_warning_event(f"FileLoader.get_source() does not return Path as it should for loader {loader}. Instead of Path, got {source}")

set_global_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], metakey_energy_eq_downstream: str) -> None

Loop through data dict and set the downstream energy equivalent for all HydroModules.

Send a warning event if a HydroModule has no downstream energy equivalents.

Parameters:

Name Type Description Default
data dict[str, Component | TimeVector | Curve | Expr]

The dict containing the components.

required
metakey_energy_eq_downstream str

The meta key to use for storing the downstream energy equivalent.

required
Source code in framcore/utils/global_energy_equivalent.py
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
def set_global_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], metakey_energy_eq_downstream: str) -> None:
    """
    Loop through data dict and set the downstream energy equivalent for all HydroModules.

    Send a warning event if a HydroModule has no downstream energy equivalents.

    Args:
        data (dict[str, Component | TimeVector | Curve | Expr]): The dict containing the components.
        metakey_energy_eq_downstream (str): The meta key to use for storing the downstream energy equivalent.

    """
    for module_name, module in data.items():
        if isinstance(module, HydroModule) and module.get_reservoir():
            energy_equivalent = get_hydro_downstream_energy_equivalent(data, module_name)
            if energy_equivalent == 0:
                message = f"HydroModule {module_name} has no downstream energy equivalents."
                module.send_warning_event(message)
                energy_equivalent = ConstantTimeVector(scalar=0.0, unit="kWh/m3", is_max_level=False)
            module.add_meta(metakey_energy_eq_downstream, LevelExprMeta(energy_equivalent))

get_regional_volumes

RegionalVolumes

Container for regional energy volumes.

Stores production, consumption, import, and export vectors for each node and category. Provides methods to access these aggregated results.

Source code in framcore/utils/get_regional_volumes.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
class RegionalVolumes:
    """
    Container for regional energy volumes.

    Stores production, consumption, import, and export vectors for each node and category.
    Provides methods to access these aggregated results.
    """

    def __init__(self) -> None:
        """Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export."""
        self._production: dict[str, dict[str, NDArray]] = dict()
        self._consumption: dict[str, dict[str, NDArray]] = dict()
        self._export: dict[str, dict[str, NDArray]] = dict()
        self._import: dict[str, dict[str, NDArray]] = dict()

    def get_production(self) -> dict[str, dict[str, NDArray]]:
        """Return dict with production vector by category for each node."""
        return self._production

    def get_consumption(self) -> dict[str, dict[str, NDArray]]:
        """Return dict with consumption vector by category for each node."""
        return self._consumption

    def get_export(self) -> dict[str, dict[str, NDArray]]:
        """Return nested dict with export vector for each trade partner to an exporting node."""
        return self._export

    def get_import(self) -> dict[str, dict[str, NDArray]]:
        """Return nested dict with import vector for each trade partner to an importing node."""
        return self._import
__init__() -> None

Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export.

Source code in framcore/utils/get_regional_volumes.py
32
33
34
35
36
37
def __init__(self) -> None:
    """Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export."""
    self._production: dict[str, dict[str, NDArray]] = dict()
    self._consumption: dict[str, dict[str, NDArray]] = dict()
    self._export: dict[str, dict[str, NDArray]] = dict()
    self._import: dict[str, dict[str, NDArray]] = dict()
get_consumption() -> dict[str, dict[str, NDArray]]

Return dict with consumption vector by category for each node.

Source code in framcore/utils/get_regional_volumes.py
43
44
45
def get_consumption(self) -> dict[str, dict[str, NDArray]]:
    """Return dict with consumption vector by category for each node."""
    return self._consumption
get_export() -> dict[str, dict[str, NDArray]]

Return nested dict with export vector for each trade partner to an exporting node.

Source code in framcore/utils/get_regional_volumes.py
47
48
49
def get_export(self) -> dict[str, dict[str, NDArray]]:
    """Return nested dict with export vector for each trade partner to an exporting node."""
    return self._export
get_import() -> dict[str, dict[str, NDArray]]

Return nested dict with import vector for each trade partner to an importing node.

Source code in framcore/utils/get_regional_volumes.py
51
52
53
def get_import(self) -> dict[str, dict[str, NDArray]]:
    """Return nested dict with import vector for each trade partner to an importing node."""
    return self._import
get_production() -> dict[str, dict[str, NDArray]]

Return dict with production vector by category for each node.

Source code in framcore/utils/get_regional_volumes.py
39
40
41
def get_production(self) -> dict[str, dict[str, NDArray]]:
    """Return dict with production vector by category for each node."""
    return self._production
get_regional_volumes(db: Model | QueryDB, commodity: str, node_category: str, production_category: str, consumption_category: str, data_period: SinglePeriodTimeIndex, scenario_period: FixedFrequencyTimeIndex, unit: str, is_float32: bool = True) -> RegionalVolumes

Calculate aggregated production, consumption, import and export for member in node_category.

Decompose the model components into nodes and flows. Analyze the flows to determine their contribution to production, consumption, import, and export if they are associated with the specified commodity. Group these contributions based on the provided node_category, production_category, and consumption_category metadata.

Parameters:

Name Type Description Default
db Model | QueryDB

Model or QueryDB to use

required
commodity str

Commodity to consider

required
node_category str

Meta key for node category to group the results by

required
production_category str

Meta key for production category to group the results by

required
consumption_category str

Meta key for consumption category to group the results by

required
data_period SinglePeriodTimeIndex

Consider results for this data period

required
scenario_period FixedFrequencyTimeIndex

Consider results for this scenario period

required
unit str

Unit to use for the results

required
is_float32 bool

Use float32 for calculations and results if True

True
Source code in framcore/utils/get_regional_volumes.py
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
def get_regional_volumes(  # noqa C901
    db: Model | QueryDB,
    commodity: str,
    node_category: str,
    production_category: str,
    consumption_category: str,
    data_period: SinglePeriodTimeIndex,
    scenario_period: FixedFrequencyTimeIndex,
    unit: str,
    is_float32: bool = True,
) -> RegionalVolumes:
    """
    Calculate aggregated production, consumption, import and export for member in node_category.

    Decompose the model components into nodes and flows. Analyze the flows to determine their contribution to production, consumption, import, and export if
    they are associated with the specified commodity. Group these contributions based on the provided node_category, production_category, and
    consumption_category metadata.

    Args:
        db (Model | QueryDB): Model or QueryDB to use
        commodity (str): Commodity to consider
        node_category (str): Meta key for node category to group the results by
        production_category (str): Meta key for production category to group the results by
        consumption_category (str): Meta key for consumption category to group the results by
        data_period (SinglePeriodTimeIndex): Consider results for this data period
        scenario_period (FixedFrequencyTimeIndex): Consider results for this scenario period
        unit (str): Unit to use for the results
        is_float32 (bool): Use float32 for calculations and results if True

    """
    db = _load_model_and_create_model_db(db)

    if not isinstance(is_float32, bool):
        message = f"Expected bool for is_float32, got {is_float32}"
        raise ValueError(message)

    domain_components = {k: v for k, v in db.get_data().items() if isinstance(v, Component)}

    graph: dict[str, Node | Flow] = get_supported_components(
        components=domain_components,
        supported_types=(Node, Flow),
        forbidden_types=tuple(),
    )

    flows: dict[str, Flow] = {k: v for k, v in graph.items() if isinstance(v, Flow)}
    nodes: dict[str, Node] = {k: v for k, v in graph.items() if isinstance(v, Node)}

    node_to_commodity = get_node_to_commodity(graph)

    # only nodes of prefered commodity
    nodes_of_commodity: dict[str, Node] = {k: v for k, v in nodes.items() if v.get_commodity() == commodity}

    # Mapping of node to category of prefered node level
    node_to_category: dict[str, str] = {k: _get_meta_value(k, v, node_category) for k, v in nodes_of_commodity.items()}

    category_to_nodes: dict[str, set[str]] = defaultdict(set)
    visited = set()
    for node_id, category in node_to_category.items():
        assert node_id not in visited, f"{node_id} is duplicated"
        category_to_nodes[category].add(node_id)
        visited.add(node_id)

    direct_production: dict[str, dict[str, list[Flow]]] = dict()
    direct_consumption: dict[str, dict[str, list[Flow]]] = dict()
    converted_production: dict[str, dict[str, list[Flow]]] = dict()
    converted_consumption: dict[str, dict[str, list[Flow]]] = dict()
    import_: dict[str, dict[str, list[Flow]]] = dict()
    export: dict[str, dict[str, list[Flow]]] = dict()

    for flow_id, flow in flows.items():
        flow_infos = get_flow_infos(flow, node_to_commodity)

        prod_category = None
        cons_category = None
        with contextlib.suppress(Exception):
            prod_category = _get_meta_value(flow_id, flow, production_category)
        with contextlib.suppress(Exception):
            cons_category = _get_meta_value(flow_id, flow, consumption_category)

        for flow_info in flow_infos:
            flow_info: FlowInfo
            if flow_info.category == "direct_in" and flow_info.commodity_in == commodity:
                _check_category(prod_category, flow_id, flow_info)
                node_category = node_to_category[flow_info.node_in]
                if node_category not in direct_production:
                    direct_production[node_category] = defaultdict(list)
                direct_production[node_category][prod_category].append(flow)

            elif flow_info.category == "conversion" and flow_info.commodity_in == commodity:
                _check_category(prod_category, flow_id, flow_info)
                node_category = node_to_category[flow_info.node_in]
                if node_category not in converted_production:
                    converted_production[node_category] = defaultdict(list)
                converted_production[node_category][prod_category].append(flow)

            elif flow_info.category == "direct_out" and flow_info.commodity_out == commodity:
                _check_category(cons_category, flow_id, flow_info)
                node_category = node_to_category[flow_info.node_out]
                if node_category not in direct_consumption:
                    direct_consumption[node_category] = defaultdict(list)
                direct_consumption[node_category][cons_category].append(flow)

            elif flow_info.category == "conversion" and flow_info.commodity_out == commodity:
                _check_category(cons_category, flow_id, flow_info)
                node_category = node_to_category[flow_info.node_out]
                if node_category not in converted_consumption:
                    converted_consumption[node_category] = defaultdict(list)
                converted_consumption[node_category][cons_category].append(flow)

            elif flow_info.category == "transport":
                if node_to_commodity[flow_info.node_in] != commodity:
                    continue
                category_in = node_to_category[flow_info.node_in]
                category_out = node_to_category[flow_info.node_out]
                if category_in == category_out:
                    continue

                if category_in not in import_:
                    import_[category_in] = defaultdict(list)
                import_[category_in][category_out].append(flow)

                if category_out not in export:
                    export[category_out] = defaultdict(list)
                export[category_out][category_in].append(flow)

    num_periods = scenario_period.get_num_periods()
    dtype = np.float32 if is_float32 else np.float64

    out = RegionalVolumes()

    # direct
    for flow_dict, out_dict, is_ingoing in [(direct_production, out.get_production(), True), (direct_consumption, out.get_consumption(), False)]:
        for node_category, flow_categories in flow_dict.items():
            if node_category not in out_dict:
                out_dict[node_category] = dict()
            for flow_category, flows in flow_categories.items():
                x = np.zeros(num_periods, dtype=dtype)
                for flow in set(flows):
                    try:
                        vector = _get_vector(
                            flow=flow,
                            is_ingoing=is_ingoing,
                            commodity=commodity,
                            node_to_commodity=node_to_commodity,
                            db=db,
                            scenario_period=scenario_period,
                            data_period=data_period,
                            unit=unit,
                            is_float32=is_float32,
                        )
                        np.add(x, vector, out=x)
                    except Exception as e:
                        send_warning_event(flow, f"Could not get direct production or consumption for flow {flow}: {e}")
                out_dict[node_category][flow_category] = x

    # converted
    for flow_dict, out_dict, is_ingoing in [(converted_production, out.get_production(), True), (converted_consumption, out.get_consumption(), False)]:
        for node_category, flow_categories in flow_dict.items():
            if node_category not in out_dict:
                out_dict[node_category] = dict()
            for flow_category, flows in flow_categories.items():
                x = out_dict[node_category][flow_category] if flow_category in out_dict[node_category] else np.zeros(num_periods, dtype=dtype)
                for flow in set(flows):
                    try:
                        vector = _get_vector(
                            flow=flow,
                            is_ingoing=is_ingoing,
                            commodity=commodity,
                            node_to_commodity=node_to_commodity,
                            db=db,
                            scenario_period=scenario_period,
                            data_period=data_period,
                            unit=unit,
                            is_float32=is_float32,
                        )
                        np.add(x, vector, out=x)
                    except Exception as e:
                        send_warning_event(flow, f"Could not get indirect production or consumption for flow {flow}: {e}")
                out_dict[node_category][flow_category] = x

    # trade
    for flow_dict, out_dict, is_ingoing in [(import_, out.get_import(), True), (export, out.get_export(), False)]:
        for category, trade_partners in flow_dict.items():
            out_dict[category] = dict()
            for trade_partner, flows in trade_partners.items():
                x = np.zeros(num_periods, dtype=dtype)
                for flow in set(flows):
                    try:
                        vector = _get_vector(
                            flow=flow,
                            is_ingoing=is_ingoing,
                            commodity=commodity,
                            node_to_commodity=node_to_commodity,
                            db=db,
                            scenario_period=scenario_period,
                            data_period=data_period,
                            unit=unit,
                            is_float32=is_float32,
                        )
                        np.add(x, vector, out=x)
                    except Exception as e:
                        send_warning_event(flow, f"Could not get trade for flow {flow}: {e}")
                out_dict[category][trade_partner] = x

    return out

get_supported_components

get_supported_components(components: dict[str, Component], supported_types: tuple[type[Component]], forbidden_types: tuple[type[Component]]) -> dict[str, Component]

Return simplified version of components in compliance with specified component types.See description in Component.

Source code in framcore/utils/get_supported_components.py
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
def get_supported_components(
    components: dict[str, Component],
    supported_types: tuple[type[Component]],
    forbidden_types: tuple[type[Component]],
) -> dict[str, Component]:
    """Return simplified version of components in compliance with specified component types.See description in Component."""
    output: dict[str, Component] = {}
    errors: list[str] = []

    _simplify_until_supported(
        output,
        errors,
        components,
        supported_types,
        forbidden_types,
    )

    if errors:
        message = "\n".join(errors)
        raise ValueError(message)

    return output

global_energy_equivalent

get_hydro_downstream_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], module_name: str, power_node: str | None = None) -> Expr

Get the expression for the sum downstream energy equivalent for a hydro module.

  • If power node is given, only count downstream energy equivalents that are connected to the power node.
  • Energy equivalents are collected from hydro generators downstream, and the main topology follows the release_to attribute.
  • Transport pumps are included in the downstream topology, but counted as negative energy equivalents.

Parameters:

Name Type Description Default
data dict[str, Component | TimeVector | Curve | Expr]

The dict containing the components.

required
module_name str

The name of the hydro module to start from.

required
power_node str

Optional power node to filter energy equivalents.

None
Source code in framcore/utils/global_energy_equivalent.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
def get_hydro_downstream_energy_equivalent(
    data: dict[str, Component | TimeVector | Curve | Expr],
    module_name: str,
    power_node: str | None = None,
) -> Expr:
    """
    Get the expression for the sum downstream energy equivalent for a hydro module.

    - If power node is given, only count downstream energy equivalents that are connected to the power node.
    - Energy equivalents are collected from hydro generators downstream, and the main topology follows the release_to attribute.
    - Transport pumps are included in the downstream topology, but counted as negative energy equivalents.

    Args:
        data (dict[str, Component | TimeVector | Curve | Expr]): The dict containing the components.
        module_name (str): The name of the hydro module to start from.
        power_node (str): Optional power node to filter energy equivalents.

    """
    if data[module_name].get_pump() and data[module_name].get_pump().get_from_module() == module_name:  # transport pump
        pump_power_node = data[module_name].get_pump().get_power_node()
        pump_to = data[module_name].get_pump().get_to_module()
        energy_equivalent = get_hydro_downstream_energy_equivalent(data, pump_to, power_node)  # continue downstream of pump_to module
        if power_node in (pump_power_node, None):
            return energy_equivalent - data[module_name].get_pump().get_energy_equivalent().get_level()  # pumps has negative energy equivalents
        return energy_equivalent

    energy_equivalent = 0
    if data[module_name].get_generator():  # hydro generator
        module_power_node = data[module_name].get_generator().get_power_node()
        if power_node in (module_power_node, None):
            energy_equivalent += data[module_name].get_generator().get_energy_equivalent().get_level()
    if data[module_name].get_release_to():  # continue from release_to module
        release_to = data[module_name].get_release_to()
        energy_equivalent += get_hydro_downstream_energy_equivalent(data, release_to, power_node)
    return energy_equivalent
set_global_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], metakey_energy_eq_downstream: str) -> None

Loop through data dict and set the downstream energy equivalent for all HydroModules.

Send a warning event if a HydroModule has no downstream energy equivalents.

Parameters:

Name Type Description Default
data dict[str, Component | TimeVector | Curve | Expr]

The dict containing the components.

required
metakey_energy_eq_downstream str

The meta key to use for storing the downstream energy equivalent.

required
Source code in framcore/utils/global_energy_equivalent.py
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
def set_global_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], metakey_energy_eq_downstream: str) -> None:
    """
    Loop through data dict and set the downstream energy equivalent for all HydroModules.

    Send a warning event if a HydroModule has no downstream energy equivalents.

    Args:
        data (dict[str, Component | TimeVector | Curve | Expr]): The dict containing the components.
        metakey_energy_eq_downstream (str): The meta key to use for storing the downstream energy equivalent.

    """
    for module_name, module in data.items():
        if isinstance(module, HydroModule) and module.get_reservoir():
            energy_equivalent = get_hydro_downstream_energy_equivalent(data, module_name)
            if energy_equivalent == 0:
                message = f"HydroModule {module_name} has no downstream energy equivalents."
                module.send_warning_event(message)
                energy_equivalent = ConstantTimeVector(scalar=0.0, unit="kWh/m3", is_max_level=False)
            module.add_meta(metakey_energy_eq_downstream, LevelExprMeta(energy_equivalent))

isolate_subnodes

Demo to show how we can use the core to write some functions we need.

isolate_subnodes(model: Model, commodity: str, meta_key: str, members: list[str]) -> None

For components in model, delete all nodes of commodity except member nodes, and their flows and boundary nodes.

  • Keep member nodes and all flows between them.
  • Set boundary nodes exogenous and keep boundary flows into or out from member nodes.
  • Delete all other nodes of commodity and all other flows pointing to them.

Parameters:

Name Type Description Default
model Model

Model to modify

required
commodity str

Commodity of nodes to consider

required
meta_key str

Meta key to use to identify members

required
members List[str]

List of meta key values identifying member nodes

required
Source code in framcore/utils/isolate_subnodes.py
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
def isolate_subnodes(model: Model, commodity: str, meta_key: str, members: list[str]) -> None:  # noqa: PLR0915, C901
    """
    For components in model, delete all nodes of commodity except member nodes, and their flows and boundary nodes.

    - Keep member nodes and all flows between them.
    - Set boundary nodes exogenous and keep boundary flows into or out from member nodes.
    - Delete all other nodes of commodity and all other flows pointing to them.

    Args:
        model (Model): Model to modify
        commodity (str): Commodity of nodes to consider
        meta_key (str): Meta key to use to identify members
        members (List[str]): List of meta key values identifying member nodes

    """
    t = time()

    data = model.get_data()
    counts_before = model.get_content_counts()

    has_not_converged = True
    num_iterations = 0

    while has_not_converged:
        num_iterations += 1

        n_data_before = len(data)

        # We need copy of components to set _parent None so component becomes top_parent in upcoming code
        components: dict[str, Component] = {k: copy(v) for k, v in data.items() if isinstance(v, Component)}
        for c in components.values():
            c: Component
            c._parent = None  # noqa: SLF001

        node_to_commodity = get_node_to_commodity(components)

        parent_keys: dict[Component, str] = {v: k for k, v in components.items()}

        graph: dict[str, Node | Flow] = get_supported_components(components, (Node, Flow), tuple())

        parent_to_components = defaultdict(set)
        for c in graph.values():
            parent_to_components[c.get_top_parent()].add(c)

        nodes: dict[str, Node] = {k: v for k, v in graph.items() if isinstance(v, Node)}
        flows: dict[str, Flow] = {k: v for k, v in graph.items() if isinstance(v, Flow)}

        commodity_nodes: dict[str, Node] = {k: v for k, v in nodes.items() if commodity == v.get_commodity()}
        for k, v in commodity_nodes.items():
            assert v.get_meta(meta_key), f"missing meta_key {meta_key} node_id {k}"

        inside_nodes: dict[str, Node] = {k: v for k, v in commodity_nodes.items() if _is_member(v, meta_key, members)}

        transports: dict[str, Flow] = {k: v for k, v in flows.items() if is_transport_by_commodity(v, node_to_commodity, commodity)}

        boundary_flows: dict[str, Flow] = {k: v for k, v in transports.items() if _is_boundary_flow(v, inside_nodes.keys())}

        boundary_nodes: dict[str, Node] = dict()
        for flow_id, flow in boundary_flows.items():
            for a in flow.get_arrows():
                node_id = a.get_node()
                if node_id not in inside_nodes:
                    boundary_nodes[node_id] = nodes[node_id]

        outside_nodes: dict[str, Node] = {k: v for k, v in commodity_nodes.items() if not (k in inside_nodes or k in boundary_nodes)}

        deletes: set[str] = set()

        deletes.update(outside_nodes.keys())
        deletes.update(boundary_nodes.keys())  # will be kept in delete step below
        deletes.update(boundary_flows.keys())  # will be kept in delete step below

        # delete flows delivering to deleted node
        for k, flow in flows.items():
            for a in flow.get_arrows():
                if a.get_node() in deletes:
                    deletes.add(k)
                    break  # goto next k, flow

        # needed for next two steps
        node_to_flows: dict[str, set[str]] = defaultdict(set)
        flow_to_nodes: dict[str, set[str]] = defaultdict(set)
        for flow_id, flow in flows.items():
            for arrow in flow.get_arrows():
                node_id = arrow.get_node()
                node_to_flows[node_id].add(flow_id)
                flow_to_nodes[flow_id].add(node_id)

        # delete disconnected subgraphs
        remaining = set(n for n in nodes if n not in commodity_nodes)
        while remaining:
            is_disconnected_subgraph = True
            subgraph = set()
            possible_members = set()
            possible_members.add(remaining.pop())
            while possible_members:
                member = possible_members.pop()
                if member in subgraph:  # avoid cycle
                    continue
                if member in flows:
                    subgraph.add(member)
                    for node in flow_to_nodes[member]:
                        if node not in outside_nodes or node not in boundary_nodes:
                            possible_members.add(node)
                            if node in inside_nodes:
                                is_disconnected_subgraph = False
                else:
                    subgraph.add(member)
                    for flow in node_to_flows[member]:
                        possible_members.add(flow)
            if is_disconnected_subgraph:
                deletes.update(subgraph)

        for key in deletes:
            if (key in boundary_flows) or (key in boundary_nodes):
                continue

            if key not in graph:
                continue

            parent_key = parent_keys[graph[key].get_top_parent()]

            if parent_key in data:
                del data[parent_key]

        n_data_after = len(data)

        if n_data_after == n_data_before:
            has_not_converged = False

    counts_after = model.get_content_counts()

    added_components = counts_after["components"] - counts_before["components"]
    if added_components.total() > 0:
        message = f"Expected only deleted components. Got additions {added_components}"
        raise RuntimeError(message)

    deleted_components = counts_before["components"] - counts_after["components"]

    for node_id in boundary_nodes:
        if node_id in data:
            node: Node = data[node_id]
            node.set_exogenous()
            if not node.get_price().has_level():
                message = f"{node_id} set to be exogenous, but no price is available."
                raise RuntimeError(message)

    send_debug_event(isolate_subnodes, f"Used {num_iterations} iterations and {round(time() - t, 2)} seconds and deleted {deleted_components}")

loaders

add_loaders(loaders: set[Loader], model: Model) -> None

Add all loaders stored in Model to loaders set.

Source code in framcore/utils/loaders.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def add_loaders(loaders: set[Loader], model: Model) -> None:
    """Add all loaders stored in Model to loaders set."""
    from framcore import Model
    from framcore.components import Component, Flow, Node
    from framcore.curves import Curve
    from framcore.expressions import Expr
    from framcore.timevectors import TimeVector
    from framcore.utils import get_supported_components

    _check_type(loaders, "loaders", set)
    _check_type(model, "model", Model)

    data = model.get_data()
    components = dict()

    for key, value in data.items():
        if isinstance(value, Expr):
            value.add_loaders(loaders)

        elif isinstance(value, TimeVector | Curve):
            loader = value.get_loader()
            if loader is not None:
                loaders.add(loader)

        elif isinstance(value, Component):
            components[key] = value

    graph: dict[str, Flow | Node] = get_supported_components(components, (Flow, Node), tuple())

    for c in graph.values():
        c.add_loaders(loaders)
add_loaders_if(loaders: set, value: object | None) -> None

Call value.add_loaders(loaders) if value is not None.

Source code in framcore/utils/loaders.py
13
14
15
16
17
18
def add_loaders_if(loaders: set, value: object | None) -> None:
    """Call value.add_loaders(loaders) if value is not None."""
    _check_type(loaders, "loaders", set)
    if value is None:
        return
    value.add_loaders(loaders)
replace_loader_path(loaders: set[Loader], old: Path, new: Path) -> None

Replace old path with new for all loaders using old path.

Source code in framcore/utils/loaders.py
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
def replace_loader_path(loaders: set[Loader], old: Path, new: Path) -> None:
    """Replace old path with new for all loaders using old path."""
    from framcore.loaders import FileLoader

    _check_type(loaders, "loaders", set)

    new = _check_path(new, "new", make_absolute=True)
    old = _check_path(old, "old", error_if_not_absolute=True)

    for loader in loaders:
        try:
            source = loader.get_source()
        except Exception:
            send_warning_event(f"loader.get_source() failed for {loader}. Skipping this one.")
            continue

        if isinstance(source, Path) and old in source.parents:
            loader.set_source(new_source=new / source.relative_to(old))

        if isinstance(loader, FileLoader) and not isinstance(source, Path):
            send_warning_event(f"FileLoader.get_source() does not return Path as it should for loader {loader}. Instead of Path, got {source}")

node_flow_utils

FlowInfo

Bases: Base

Holds info about one or two related Arrows of a Flow.

Source code in framcore/utils/node_flow_utils.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
class FlowInfo(Base):
    """Holds info about one or two related Arrows of a Flow."""

    def __init__(
        self,
        category: str,
        node_out: str | None = None,
        commodity_out: str | None = None,
        node_in: str | None = None,
        commodity_in: str | None = None,
    ) -> None:
        """
        Based on its arrows, we derive properties about a Flow.

        We use this class to store such info.
        """
        self.category = category
        self.node_out = node_out
        self.commodity_out = commodity_out
        self.node_in = node_in
        self.commodity_in = commodity_in
__init__(category: str, node_out: str | None = None, commodity_out: str | None = None, node_in: str | None = None, commodity_in: str | None = None) -> None

Based on its arrows, we derive properties about a Flow.

We use this class to store such info.

Source code in framcore/utils/node_flow_utils.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
def __init__(
    self,
    category: str,
    node_out: str | None = None,
    commodity_out: str | None = None,
    node_in: str | None = None,
    commodity_in: str | None = None,
) -> None:
    """
    Based on its arrows, we derive properties about a Flow.

    We use this class to store such info.
    """
    self.category = category
    self.node_out = node_out
    self.commodity_out = commodity_out
    self.node_in = node_in
    self.commodity_in = commodity_in
get_component_to_nodes(data: Model | dict[str, object]) -> dict[str, set[str]]

For each str key in data where value is a Component find all Node id str in data directly connected to the Component.

Source code in framcore/utils/node_flow_utils.py
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
def get_component_to_nodes(data: Model | dict[str, object]) -> dict[str, set[str]]:
    """For each str key in data where value is a Component find all Node id str in data directly connected to the Component."""
    from framcore import Model

    _check_type(data, Model | dict)

    if isinstance(data, Model):
        data = data.get_data()

    components = {k: v for k, v in data.items() if isinstance(v, Component)}
    for k in components:
        assert isinstance(k, str), f"Got invalid key {k}"

    g = get_supported_components(components, (Node, Flow), tuple())

    nodes = {k: v for k, v in g.items() if isinstance(v, Node)}
    flows = {k: v for k, v in g.items() if isinstance(v, Flow)}

    domain_nodes = {k: v for k, v in nodes.items() if (k in components) and isinstance(v, Node)}
    assert all(isinstance(v, Node) for v in domain_nodes.values())

    parent_keys = {v: k for k, v in components.items()}

    out = defaultdict(set)
    for flow in flows.values():
        parent_key = parent_keys[flow.get_top_parent()]
        for a in flow.get_arrows():
            node_id = a.get_node()
            if node_id in domain_nodes:
                out[parent_key].add(node_id)

    return out
get_flow_infos(flow: Flow, node_to_commodity: dict[str, str]) -> list[FlowInfo]

Get flow infos from analysis of all its arrows.

Source code in framcore/utils/node_flow_utils.py
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
def get_flow_infos(flow: Flow, node_to_commodity: dict[str, str]) -> list[FlowInfo]:  # noqa: C901
    """Get flow infos from analysis of all its arrows."""
    _check_type(flow, Flow)
    _check_type(node_to_commodity, dict)

    arrows = flow.get_arrows()

    if len(arrows) == 1:
        arrow = next(iter(arrows))
        node_id = arrow.get_node()

        if node_id not in node_to_commodity:
            message = f"node_id {node_id} missing from node_to_commodity for flow\n{flow}"
            raise RuntimeError(message)

        commodity = node_to_commodity[node_id]
        if arrow.is_ingoing():
            info = FlowInfo(
                "direct_in",
                node_in=node_id,
                commodity_in=commodity,
            )
        else:
            info = FlowInfo(
                "direct_out",
                node_out=node_id,
                commodity_out=commodity,
            )
        return [info]

    seen: set[tuple[str, str]] = set()
    infos: list[FlowInfo] = []
    for x in arrows:
        for y in arrows:
            if x is y:
                continue

            if x.is_ingoing() != y.is_ingoing():
                arrow_in = x if x.is_ingoing() else y
                arrow_out = x if y.is_ingoing() else y

                node_in = arrow_in.get_node()
                node_out = arrow_out.get_node()

                if node_in not in node_to_commodity:
                    message = f"node_in {node_in} missing from node_to_commodity for flow\n{flow}"
                    raise RuntimeError(message)

                if node_out not in node_to_commodity:
                    message = f"node_out {node_out} missing from node_to_commodity for flow\n{flow}"
                    raise RuntimeError(message)

                commodity_in = node_to_commodity[node_in]
                commodity_out = node_to_commodity[node_out]

                info = FlowInfo(
                    category="transport" if commodity_in == commodity_out else "conversion",
                    node_in=node_in,
                    commodity_in=commodity_in,
                    node_out=node_out,
                    commodity_out=commodity_out,
                )
                key = (node_in, node_out)
                if key in seen:
                    continue

                infos.append(info)
                seen.add(key)

    for arrow in arrows:
        node = arrow.get_node()
        if any(node in [info.node_in, info.node_out] for info in infos):
            continue
        node_id = arrow.get_node()
        commodity = node_to_commodity[node_id]
        if arrow.is_ingoing():
            info = FlowInfo(
                "direct_in",
                node_in=node_id,
                commodity_in=commodity,
            )
        else:
            info = FlowInfo(
                "direct_out",
                node_out=node_id,
                commodity_out=commodity,
            )
        infos.append(info)

    return infos
get_node_to_commodity(data: dict[str, object]) -> dict[str, str]

Return dict with commodity (str) for each node id (str) in data.

Source code in framcore/utils/node_flow_utils.py
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
def get_node_to_commodity(data: dict[str, object]) -> dict[str, str]:
    """Return dict with commodity (str) for each node id (str) in data."""
    _check_type(data, dict)

    components = {k: v for k, v in data.items() if isinstance(v, Component)}
    for k in components:
        assert isinstance(k, str), f"Got invalid key {k}"

    g = get_supported_components(components, (Node, Flow), tuple())

    out = dict()
    for k, v in g.items():
        if isinstance(v, Node):
            _check_type(k, str)
            out[k] = v.get_commodity()
    return out
get_transports_by_commodity(data: Model | dict[str, object], commodity: str) -> dict[str, tuple[str, str]]

Return dict with key component_id and value (from_node_id, to_node_id) where both nodes belong to given commodity.

Source code in framcore/utils/node_flow_utils.py
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
def get_transports_by_commodity(data: Model | dict[str, object], commodity: str) -> dict[str, tuple[str, str]]:
    """Return dict with key component_id and value (from_node_id, to_node_id) where both nodes belong to given commodity."""
    from framcore import Model

    _check_type(data, Model | dict)
    _check_type(commodity, str)

    if isinstance(data, Model):
        data = data.get_data()

    components = {k: v for k, v in data.items() if isinstance(v, Component)}
    for k in components:
        assert isinstance(k, str), f"Got invalid key {k}"

    node_to_commodity = get_node_to_commodity(components)

    g = get_supported_components(components, (Node, Flow), tuple())

    flows = {k: v for k, v in g.items() if isinstance(v, Flow)}

    parent_keys = {v: k for k, v in components.items()}

    out = dict()
    for flow in flows.values():
        parent_key = parent_keys[flow.get_top_parent()]
        infos = get_flow_infos(flow, node_to_commodity)
        if len(infos) != 1:
            continue
        info = infos[0]
        if info.category != "transport":
            continue
        if info.commodity_in != commodity:
            continue
        out[parent_key] = (info.node_out, info.node_in)

    return out
is_transport_by_commodity(flow: Flow, node_to_commodity: dict[str, str], commodity: str) -> bool

Return True if flow is a transport of the given commodity.

Source code in framcore/utils/node_flow_utils.py
223
224
225
226
227
228
229
230
231
232
233
234
235
236
def is_transport_by_commodity(flow: Flow, node_to_commodity: dict[str, str], commodity: str) -> bool:
    """Return True if flow is a transport of the given commodity."""
    _check_type(flow, Flow)
    _check_type(node_to_commodity, dict)
    arrows = flow.get_arrows()
    try:
        x, y = tuple(arrows)
        opposite_directions = x.is_ingoing() != y.is_ingoing()
        x_commodity = node_to_commodity[x.get_node()]
        y_commodity = node_to_commodity[y.get_node()]
        correct_commodity = x_commodity == y_commodity == commodity
        return opposite_directions and correct_commodity
    except Exception:
        return False

storage_subsystems

get_one_commodity_storage_subsystems(graph: dict[str, Node | Flow], include_boundaries: bool) -> dict[str, tuple[str, set[str], set[str]]]

Group all storage subsystems belonging to same commodity.

Returns dict[subsystem_id, (domain_commodity, member_component_ids, boundary_domain_commodities)]

The boundary_domain_commodities of the output is a set of boundary commodities. Some algorithms can only handle one boundary commodity, so this output is useful to verify that those conditions apply, and to derive conversion factor unit, which need both storage_commodity unit and boundray_commodity unit.

If include_boundaries is False only nodes with same commodity as storage_node will be included in the subsystem.

Source code in framcore/utils/storage_subsystems.py
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
def get_one_commodity_storage_subsystems(  # noqa: C901
    graph: dict[str, Node | Flow],
    include_boundaries: bool,
) -> dict[str, tuple[str, set[str], set[str]]]:
    """
    Group all storage subsystems belonging to same commodity.

    Returns dict[subsystem_id, (domain_commodity, member_component_ids, boundary_domain_commodities)]

    The boundary_domain_commodities of the output is a set of boundary commodities.
    Some algorithms can only handle one boundary commodity, so this output is useful
    to verify that those conditions apply, and to derive conversion factor unit,
    which need both storage_commodity unit and boundray_commodity unit.

    If include_boundaries is False only nodes with same commodity as storage_node will
    be included in the subsystem.
    """
    if not all(isinstance(c, Flow | Node) for c in graph.values()):
        invalid = {k: v for k, v in graph.items() if not isinstance(v, Flow | Node)}
        message = f"All values in graph must be Flow or Node objects. Found invalid objects: {invalid}"
        raise ValueError(message)

    flows: dict[str, Flow] = {k: v for k, v in graph.items() if isinstance(v, Flow)}
    nodes: dict[str, Node] = {k: v for k, v in graph.items() if isinstance(v, Node)}

    storage_nodes: dict[str, Node] = {k: v for k, v in nodes.items() if v.get_storage()}

    node_to_flows: dict[str, set[str]] = defaultdict(set)
    flow_to_nodes: dict[str, set[str]] = defaultdict(set)
    for flow_id, flow in flows.items():
        for arrow in flow.get_arrows():
            node_id = arrow.get_node()
            node_to_flows[node_id].add(flow_id)
            flow_to_nodes[flow_id].add(node_id)

    out = dict()
    allocated: set[str] = set()
    for storage_node_id, storage_node in storage_nodes.items():
        if storage_node_id in allocated:
            continue

        subsystem_id = storage_node_id
        storage_commodity = storage_node.get_commodity()

        member_component_ids: set[str] = set()
        boundary_commodities: set[str] = set()

        visited: set[str] = set()
        remaining: set[str] = set()

        remaining.add(storage_node_id)

        while remaining:
            component_id = remaining.pop()
            if component_id in visited:
                continue

            visited.add(component_id)

            if component_id in nodes:
                node: Node = nodes[component_id]
                node_commodity = node.get_commodity()

                if node_commodity == storage_commodity:
                    allocated.add(component_id)
                    remaining.update(node_to_flows.get(component_id, set()))
                else:
                    boundary_commodities.add(node_commodity)

                if include_boundaries or node_commodity == storage_commodity:
                    member_component_ids.add(component_id)

            else:
                remaining.update(flow_to_nodes.get(component_id, set()))
                member_component_ids.add(component_id)

        out[subsystem_id] = (storage_commodity, member_component_ids, boundary_commodities)

    return out