Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow to specify soc-maxima and soc-minima as a range #899

Merged
merged 21 commits into from
Jan 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions documentation/api/change_log.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ API change log

.. note:: The FlexMeasures API follows its own versioning scheme. This is also reflected in the URL (e.g. `/api/v3_0`), allowing developers to upgrade at their own pace.

v3.0-15 | 2024-01-11
""""""""""""""""""""

- Support setting SoC constraints in the flex model for a given time period rather than a single datetime, using the new ``start``, ``end`` and/or ``duration`` fields of ``soc-maxima``, ``soc-minima`` and ``soc-targets``.

v3.0-14 | 2023-12-07
""""""""""""""""""""

Expand Down
31 changes: 4 additions & 27 deletions documentation/api/v2g.rst
Original file line number Diff line number Diff line change
Expand Up @@ -52,20 +52,8 @@ To enable a temporary target SoC of more than 85% (for car reservations, see the
"soc-maxima": [
{
"value": 51,
"datetime": "2024-02-04T10:35:00+01:00"
},
{
"value": 51,
"datetime": "2024-02-04T10:40:00+01:00"
},
...
{
"value": 51,
"datetime": "2024-02-05T04:20:00+01:00"
},
{
"value": 51,
"datetime": "2024-02-05T04:25:00+01:00"
"start": "2024-02-04T10:35:00+01:00",
"end": "2024-02-05T04:25:00+01:00"
}
],
"soc-unit": "kWh"
Expand Down Expand Up @@ -106,19 +94,8 @@ To make sure the car remains at 95% SoC for some time, additional soc-minima con
"soc-minima": [
{
"value": 57,
"datetime": "2024-02-05T08:00:00+01:00"
},
{
"value": 57,
"datetime": "2024-02-05T08:05:00+01:00"
},
{
"value": 57,
"datetime": "2024-02-05T08:10:00+01:00"
},
{
"value": 57,
"datetime": "2024-02-05T08:15:00+01:00"
"start": "2024-02-05T08:00:00+01:00",
"end": "2024-02-05T08:15:00+01:00"
}
]
}
Expand Down
2 changes: 2 additions & 0 deletions documentation/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ v0.19.0 | February xx, 2024
New features
-------------

* Support a less verbose way of setting the same :abbr:`SoC (state of charge)` constraint for a given time window [see `PR #899 <https://github.com/FlexMeasures/flexmeasures/pull/899>`_]

Infrastructure / Support
----------------------

Expand Down
7 changes: 2 additions & 5 deletions flexmeasures/api/v3_0/sensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,11 +320,8 @@ def trigger_schedule(
"soc-minima": [
{
"value": 15,
"datetime": "2015-06-02T14:00:00+00:00"
},
{
"value": 20,
"datetime": "2015-06-02T15:00:00+00:00"
"start": "2015-06-02T14:00:00+00:00",
"end": "2015-06-02T15:00:00+00:00"
}
],
"soc-min": 10,
Expand Down
180 changes: 3 additions & 177 deletions flexmeasures/api/v3_0/tests/test_sensor_schedules.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,16 @@
from flask import url_for
import pytest
from isodate import parse_datetime, parse_duration
from isodate import parse_datetime

import pandas as pd
from rq.job import Job

from flexmeasures.api.common.responses import unknown_schedule, unrecognized_event
from flexmeasures.api.tests.utils import check_deprecation
from flexmeasures.api.v3_0.tests.utils import message_for_trigger_schedule
from flexmeasures.data.models.data_sources import DataSource
from flexmeasures.data.models.generic_assets import GenericAsset
from flexmeasures.data.models.time_series import Sensor, TimedBelief
from flexmeasures.data.models.time_series import Sensor
from flexmeasures.data.tests.utils import work_on_rq
from flexmeasures.data.services.scheduling import (
handle_scheduling_exception,
get_data_source_for_job,
)
from flexmeasures.utils.calculations import integrate_time_series
from flexmeasures.data.services.scheduling import handle_scheduling_exception


@pytest.mark.parametrize(
Expand Down Expand Up @@ -163,174 +157,6 @@ def test_trigger_and_get_schedule_with_unknown_prices(
assert "prices unknown" in get_schedule_response.json["message"].lower()


@pytest.mark.parametrize(
"message, asset_name",
[
(message_for_trigger_schedule(), "Test battery"),
(message_for_trigger_schedule(with_targets=True), "Test charging station"),
],
)
@pytest.mark.parametrize(
"requesting_user", ["test_prosumer_user@seita.nl"], indirect=True
)
def test_trigger_and_get_schedule(
app,
add_market_prices,
add_battery_assets,
battery_soc_sensor,
add_charging_station_assets,
keep_scheduling_queue_empty,
message,
asset_name,
requesting_user,
): # noqa: C901
# Include the price sensor and site-power-capacity in the flex-context explicitly, to test deserialization
price_sensor_id = add_market_prices["epex_da"].id
message["flex-context"] = {
"consumption-price-sensor": price_sensor_id,
"production-price-sensor": price_sensor_id,
"site-power-capacity": "1 TW", # should be big enough to avoid any infeasibilities
}

# trigger a schedule through the /sensors/<id>/schedules/trigger [POST] api endpoint
assert len(app.queues["scheduling"]) == 0

sensor = (
Sensor.query.filter(Sensor.name == "power")
.join(GenericAsset)
.filter(GenericAsset.id == Sensor.generic_asset_id)
.filter(GenericAsset.name == asset_name)
.one_or_none()
)
with app.test_client() as client:
trigger_schedule_response = client.post(
url_for("SensorAPI:trigger_schedule", id=sensor.id),
json=message,
)
print("Server responded with:\n%s" % trigger_schedule_response.json)
assert trigger_schedule_response.status_code == 200
job_id = trigger_schedule_response.json["schedule"]

# look for scheduling jobs in queue
assert (
len(app.queues["scheduling"]) == 1
) # only 1 schedule should be made for 1 asset
job = app.queues["scheduling"].jobs[0]
assert job.kwargs["asset_or_sensor"]["id"] == sensor.id
assert job.kwargs["start"] == parse_datetime(message["start"])
assert job.id == job_id

# process the scheduling queue
work_on_rq(app.queues["scheduling"], exc_handler=handle_scheduling_exception)
assert (
Job.fetch(job_id, connection=app.queues["scheduling"].connection).is_finished
is True
)

# Derive some expectations from the POSTed message
if "flex-model" not in message:
start_soc = message["soc-at-start"] / 1000 # in MWh
roundtrip_efficiency = (
float(message["roundtrip-efficiency"].replace("%", "")) / 100.0
)
storage_efficiency = (
float(message["storage-efficiency"].replace("%", "")) / 100.0
)
soc_targets = message.get("soc-targets")
else:
start_soc = message["flex-model"]["soc-at-start"] / 1000 # in MWh
roundtrip_efficiency = (
float(message["flex-model"]["roundtrip-efficiency"].replace("%", ""))
/ 100.0
)
storage_efficiency = (
float(message["flex-model"]["storage-efficiency"].replace("%", "")) / 100.0
)
soc_targets = message["flex-model"].get("soc-targets")
resolution = sensor.event_resolution
if soc_targets:
# Schedule length may be extended to accommodate targets that lie beyond the schedule's end
max_target_datetime = max(
[parse_datetime(soc_target["datetime"]) for soc_target in soc_targets]
)
expected_length_of_schedule = (
max(
parse_duration(message["duration"]),
max_target_datetime - parse_datetime(message["start"]),
)
/ resolution
)
else:
expected_length_of_schedule = parse_duration(message["duration"]) / resolution

# check results are in the database

# First, make sure the scheduler data source is now there
job.refresh() # catch meta info that was added on this very instance
scheduler_source = get_data_source_for_job(job)
assert scheduler_source is not None

# Then, check if the data was created
power_values = (
TimedBelief.query.filter(TimedBelief.sensor_id == sensor.id)
.filter(TimedBelief.source_id == scheduler_source.id)
.all()
)
consumption_schedule = pd.Series(
[-v.event_value for v in power_values],
index=pd.DatetimeIndex([v.event_start for v in power_values], freq=resolution),
) # For consumption schedules, positive values denote consumption. For the db, consumption is negative
assert len(consumption_schedule) == expected_length_of_schedule

# check targets, if applicable
if soc_targets:
soc_schedule = integrate_time_series(
consumption_schedule,
start_soc,
up_efficiency=roundtrip_efficiency**0.5,
down_efficiency=roundtrip_efficiency**0.5,
storage_efficiency=storage_efficiency,
decimal_precision=6,
)
print(consumption_schedule)
print(soc_schedule)
for target in soc_targets:
assert soc_schedule[target["datetime"]] == target["value"] / 1000

# try to retrieve the schedule through the /sensors/<id>/schedules/<job_id> [GET] api endpoint
get_schedule_response = client.get(
url_for("SensorAPI:get_schedule", id=sensor.id, uuid=job_id),
query_string={"duration": "PT48H"},
)
print("Server responded with:\n%s" % get_schedule_response.json)
assert get_schedule_response.status_code == 200
# assert get_schedule_response.json["type"] == "GetDeviceMessageResponse"
assert len(get_schedule_response.json["values"]) == expected_length_of_schedule

# Test that a shorter planning horizon yields the same result for the shorter planning horizon
get_schedule_response_short = client.get(
url_for("SensorAPI:get_schedule", id=sensor.id, uuid=job_id),
query_string={"duration": "PT6H"},
)
assert (
get_schedule_response_short.json["values"]
== get_schedule_response.json["values"][0:24]
)

# Test that a much longer planning horizon yields the same result (when there are only 2 days of prices)
get_schedule_response_long = client.get(
url_for("SensorAPI:get_schedule", id=sensor.id, uuid=job_id),
query_string={"duration": "PT1000H"},
)
assert (
get_schedule_response_long.json["values"][0:192]
== get_schedule_response.json["values"]
)

# Check whether the soc-at-start was persisted as an asset attribute
assert sensor.generic_asset.get_attribute("soc_in_mwh") == start_soc


@pytest.mark.parametrize(
"requesting_user", ["test_prosumer_user@seita.nl"], indirect=True
)
Expand Down
Loading
Loading