Skip to content

Commit

Permalink
Merge branch 'emf_subnetworks' into refactoring/use_TieLineUtil_getPa…
Browse files Browse the repository at this point in the history
…iredDanglingLine
  • Loading branch information
annetill committed Sep 28, 2023
2 parents 2ab4fc1 + b9afaa7 commit 73b5d3c
Show file tree
Hide file tree
Showing 7 changed files with 184 additions and 75 deletions.
9 changes: 0 additions & 9 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,6 @@ jobs:
with:
java-version: 17

- name: Checkout powsybl-open-loadflow latest sources
uses: actions/checkout@v1
with:
repository: powsybl/powsybl-open-loadflow
ref: refs/heads/main

- name: Build and install powsybl-open-loadflow with Maven
run: mvn --batch-mode -DskipTests=true --file ../powsybl-open-loadflow/pom.xml install

- name: Build with Maven
if: matrix.os == 'ubuntu-latest'
run: mvn --batch-mode -Pjacoco install
Expand Down
71 changes: 17 additions & 54 deletions emf/src/test/java/com/powsybl/emf/IGMmergeTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,27 +10,26 @@
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import com.powsybl.cgmes.conformity.CgmesConformity1Catalog;
import com.powsybl.cgmes.conversion.export.*;
import com.powsybl.cgmes.conversion.CgmesExport;
import com.powsybl.cgmes.model.GridModelReferenceResources;
import com.powsybl.commons.datasource.GenericReadOnlyDataSource;
import com.powsybl.commons.datasource.ResourceSet;
import com.powsybl.commons.xml.XmlUtil;
import com.powsybl.iidm.modification.ReplaceTieLinesByLines;
import com.powsybl.iidm.network.*;
import com.powsybl.loadflow.LoadFlow;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.*;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.*;
import java.util.function.Consumer;

import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;

/**
* @author Bertrand Rix <bertrand.rix at artelys.com>
Expand All @@ -53,43 +52,33 @@ void tearDown() throws IOException {

@Test
void igmsSubnetworksMerge() throws IOException {

Set<String> branchIds = new HashSet<>();
Set<String> generatorIds = new HashSet<>();
Set<String> voltageLevelIds = new HashSet<>();

// load two IGMs BE and NL
Map<String, Network> validNetworks = new HashMap<>();
GridModelReferenceResources resBE = CgmesConformity1Catalog.microGridBaseCaseBE();
Network igmBE = Network.read(resBE.dataSource());
String idBE = igmBE.getId();
igmBE.getBranches().forEach(b -> branchIds.add(b.getId()));
igmBE.getGenerators().forEach(g -> generatorIds.add(g.getId()));
igmBE.getVoltageLevels().forEach(v -> voltageLevelIds.add(v.getId()));

GridModelReferenceResources resNL = CgmesConformity1Catalog.microGridBaseCaseNL();
Network igmNL = Network.read(resNL.dataSource());
String idNL = igmNL.getId();
igmNL.getBranches().forEach(b -> branchIds.add(b.getId()));
igmNL.getGenerators().forEach(g -> generatorIds.add(g.getId()));
igmNL.getVoltageLevels().forEach(v -> voltageLevelIds.add(v.getId()));

// merge, serialize and deserialize the network
Network merged = Network.create("Merged", igmBE, igmNL);
Network subnetworkBE = merged.getSubnetwork(idBE);
Network subnetworkNL = merged.getSubnetwork(idNL);

validNetworks.put("Merged", merged);
validNetworks.put("BE", subnetworkBE);
validNetworks.put("NL", subnetworkNL);
Network merged = Network.merge("Merged", igmBE, igmNL);

// Check that we have subnetworks
assertEquals(2, merged.getSubnetworks().size());

LoadFlow.run(merged);

Path mergedDir = Files.createDirectories(tmpDir.resolve("subnetworksMerge"));
exportNetwork(merged, mergedDir, "BE_NL", validNetworks, Set.of("EQ", "TP", "SSH", "SV"));
exportNetwork(merged, mergedDir, "BE_NL", Set.of("EQ", "TP", "SSH", "SV"));

// copy the boundary set explicitly it is not serialized and is needed for reimport
ResourceSet boundaries = CgmesConformity1Catalog.microGridBaseCaseBoundaries();
Expand Down Expand Up @@ -132,7 +121,7 @@ void testSubnetworksExports() throws IOException {
Network igmNL = Network.read(resNL.dataSource());

// merge, serialize and deserialize the network
Network merged = Network.create("Merged", igmBE, igmNL);
Network merged = Network.merge("Merged", igmBE, igmNL);
Network subnetworkBE = merged.getSubnetwork(idBE);

// Check that we have subnetworks
Expand All @@ -150,7 +139,7 @@ void testSubnetworksExports() throws IOException {

private Network exportAndLoad(Network network, String dirName, String country) throws IOException {
Path dir = Files.createDirectories(tmpDir.resolve(dirName));
exportNetwork(network, dir, country, Map.of(country, network), Set.of("EQ", "TP", "SSH"));
exportNetwork(network, dir, country, Set.of("EQ", "TP", "SSH"));

// copy the boundary set explicitly it is not serialized and is needed for reimport
ResourceSet boundaries = CgmesConformity1Catalog.microGridBaseCaseBoundaries();
Expand Down Expand Up @@ -178,7 +167,7 @@ void cgmToCgmes() throws IOException {
LoadFlow.run(networkBENL);

Path mergedResourcesDir = Files.createDirectories(tmpDir.resolve("mergedResourcesExport"));
exportNetwork(networkBENL, mergedResourcesDir, "BE_NL", Map.of("BENL", networkBENL), Set.of("EQ", "TP", "SSH", "SV"));
exportNetwork(networkBENL, mergedResourcesDir, "BE_NL", Set.of("EQ", "TP", "SSH", "SV"));

//Copy the boundary set explicitly it is not serialized and is needed for reimport
ResourceSet boundaries = CgmesConformity1Catalog.microGridBaseCaseBoundaries();
Expand All @@ -201,7 +190,7 @@ void cgmToCgmes() throws IOException {

@Test
void testCompareSubnetworksMergeAgainstAssembled() {
Network merged = Network.create("merged",
Network merged = Network.merge("merged",
Network.read(CgmesConformity1Catalog.microGridBaseCaseBE().dataSource()),
Network.read(CgmesConformity1Catalog.microGridBaseCaseNL().dataSource()));
// In merged, reset all p0, q0 values for all paired dangling lines
Expand All @@ -219,37 +208,11 @@ private static void validate(Network n, Set<String> branchIds, Set<String> gener
voltageLevelIds.forEach(v -> assertNotNull(n.getVoltageLevel(v)));
}

private static void exportNetwork(Network network, Path outputDir, String baseName, Map<String, Network> validNetworks, Set<String> profilesToExport) {
private static void exportNetwork(Network network, Path outputDir, String baseName, Set<String> profilesToExport) {
Objects.requireNonNull(network);
Path filenameEq = outputDir.resolve(baseName + "_EQ.xml");
Path filenameTp = outputDir.resolve(baseName + "_TP.xml");
Path filenameSsh = outputDir.resolve(baseName + "_SSH.xml");
Path filenameSv = outputDir.resolve(baseName + "_SV.xml");
CgmesExportContext context = new CgmesExportContext();
context.setScenarioTime(network.getCaseDate());
validNetworks.forEach((name, n) -> context.addIidmMappings(n));

if (profilesToExport.contains("EQ")) {
export(filenameEq, writer -> EquipmentExport.write(network, writer, context));
}
if (profilesToExport.contains("TP")) {
export(filenameTp, writer -> TopologyExport.write(network, writer, context));
}
if (profilesToExport.contains("SSH")) {
export(filenameSsh, writer -> SteadyStateHypothesisExport.write(network, writer, context));
}
if (profilesToExport.contains("SV")) {
export(filenameSv, writer -> StateVariablesExport.write(network, writer, context));
}
}

private static void export(Path file, Consumer<XMLStreamWriter> outConsumer) {
try (OutputStream out = Files.newOutputStream(file)) {
XMLStreamWriter writer = XmlUtil.initializeWriter(true, " ", out);
outConsumer.accept(writer);
} catch (IOException | XMLStreamException e) {
throw new RuntimeException(e);
}
Properties exportParams = new Properties();
exportParams.put(CgmesExport.PROFILES, String.join(",", profilesToExport));
network.write("CGMES", exportParams, outputDir.resolve(baseName));
}

private static void checkDanglingLine(DanglingLine dl1, DanglingLine dl2) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
*/
public final class CseGlskDocument implements GlskDocument {
private static final Logger LOGGER = LoggerFactory.getLogger(CseGlskDocument.class);
private static final String LINEAR_GLSK_NOT_HANDLED = "CSE GLSK document does not handle Linear GLSK conversion";
private static final String DATA_CHRONOLOGY_NOT_HANDLED = "CSE GLSK document only supports hourly data";
private static final String COUNTRIES_IN_AREA_KEY = "countriesInArea";
private static final String COUNTRIES_OUT_AREA_KEY = "countriesOutArea";

Expand Down Expand Up @@ -170,19 +170,14 @@ public List<GlskPoint> getGlskPoints(String zone) {
return cseGlskPoints.getOrDefault(zone, Collections.emptyList());
}

@Override
public ZonalData<SensitivityVariableSet> getZonalGlsks(Network network) {
throw new NotImplementedException(LINEAR_GLSK_NOT_HANDLED);
}

@Override
public ZonalData<SensitivityVariableSet> getZonalGlsks(Network network, Instant instant) {
throw new NotImplementedException(LINEAR_GLSK_NOT_HANDLED);
throw new NotImplementedException(DATA_CHRONOLOGY_NOT_HANDLED);
}

@Override
public ZonalDataChronology<SensitivityVariableSet> getZonalGlsksChronology(Network network) {
throw new NotImplementedException(LINEAR_GLSK_NOT_HANDLED);
throw new NotImplementedException(DATA_CHRONOLOGY_NOT_HANDLED);
}

@Override
Expand Down Expand Up @@ -214,11 +209,11 @@ private boolean isHybridCseGlskPoint(GlskPoint zonalGlskPoint) {

@Override
public ZonalData<Scalable> getZonalScalable(Network network, Instant instant) {
throw new NotImplementedException("CSE GLSK document does only support hourly data");
throw new NotImplementedException(DATA_CHRONOLOGY_NOT_HANDLED);
}

@Override
public ZonalDataChronology<Scalable> getZonalScalableChronology(Network network) {
throw new NotImplementedException("CSE GLSK document does only support hourly data");
throw new NotImplementedException(DATA_CHRONOLOGY_NOT_HANDLED);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@
import com.powsybl.glsk.commons.ZonalData;
import com.powsybl.iidm.modification.scalable.Scalable;
import com.powsybl.iidm.network.Network;
import org.apache.commons.lang3.NotImplementedException;
import org.junit.jupiter.api.Test;

import java.io.InputStream;
import java.time.Instant;
import java.util.List;

import static org.junit.jupiter.api.Assertions.*;
Expand Down Expand Up @@ -705,4 +707,15 @@ void checkNewMethodsExist() {
glskDocument = GlskDocumentImporters.importAndValidateGlsk(getClass().getResourceAsStream("/testGlskMerged.xml"));
assertNotNull(glskDocument);
}

@Test
void checkExceptionWhenCallingNotImplementedFeatures() {
Network network = Network.read("testCase.xiidm", getClass().getResourceAsStream("/testCase.xiidm"));
GlskDocument glskDocument = GlskDocumentImporters.importGlsk(getClass().getResourceAsStream("/testGlsk.xml"));
assertThrows(NotImplementedException.class, () -> glskDocument.getZonalScalableChronology(network));
assertThrows(NotImplementedException.class, () -> glskDocument.getZonalGlsksChronology(network));
Instant testInstant = Instant.now();
assertThrows(NotImplementedException.class, () -> glskDocument.getZonalScalable(network, testInstant));
assertThrows(NotImplementedException.class, () -> glskDocument.getZonalGlsks(network, testInstant));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* Copyright (c) 2023, RTE (http://www.rte-france.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
* SPDX-License-Identifier: MPL-2.0
*/
package com.powsybl.glsk.cse;

import com.powsybl.glsk.api.GlskDocument;
import com.powsybl.glsk.api.io.GlskDocumentImporters;
import com.powsybl.glsk.commons.GlskException;
import com.powsybl.iidm.network.Network;
import com.powsybl.sensitivity.SensitivityVariableSet;
import org.junit.jupiter.api.Test;

import static org.junit.jupiter.api.Assertions.*;

/**
* @author Sebastien Murgey {@literal <sebastien.murgey at rte-france.com>}
*/
class ZonalGlsksTest {
private static final double EPSILON = 1e-3;

@Test
void checkZonalGlskFromManualGskBlocks() {
Network network = Network.read("testCase.xiidm", getClass().getResourceAsStream("/testCase.xiidm"));
CseGlskDocument cseGlskDocument = CseGlskDocument.importGlsk(getClass().getResourceAsStream("/testGlskOnlyLinear.xml"), false, true);
SensitivityVariableSet manualGlskSensi = cseGlskDocument.getZonalGlsks(network).getData("FR_MANUAL");

assertNotNull(manualGlskSensi);
assertEquals(0.7, manualGlskSensi.getVariable("FFR1AA1 _generator").getWeight(), EPSILON);
assertEquals(0.3, manualGlskSensi.getVariable("FFR3AA1 _generator").getWeight(), EPSILON);
}

@Test
void checkZonalGlskFromProportionalGskBlocks() {
Network network = Network.read("testCase.xiidm", getClass().getResourceAsStream("/testCase.xiidm"));
GlskDocument cseGlskDocument = GlskDocumentImporters.importGlsk(getClass().getResourceAsStream("/testGlskOnlyLinear.xml"));
SensitivityVariableSet glskSensi = cseGlskDocument.getZonalGlsks(network).getData("FR_PROPGSK");

assertNotNull(glskSensi);
assertEquals(0.286, glskSensi.getVariable("FFR1AA1 _generator").getWeight(), EPSILON);
assertEquals(0.286, glskSensi.getVariable("FFR2AA1 _generator").getWeight(), EPSILON);
assertEquals(0.428, glskSensi.getVariable("FFR3AA1 _generator").getWeight(), EPSILON);
}

@Test
void checkZonalGlskFromProportionalGlskBlocks() {
Network network = Network.read("testCase.xiidm", getClass().getResourceAsStream("/testCase.xiidm"));
CseGlskDocument cseGlskDocument = CseGlskDocument.importGlsk(getClass().getResourceAsStream("/testGlskOnlyLinear.xml"), false, true);
SensitivityVariableSet glskSensi = cseGlskDocument.getZonalGlsks(network).getData("FR_PROPGLSK");

assertNotNull(glskSensi);
assertEquals(0.2, glskSensi.getVariable("FFR1AA1 _generator").getWeight(), EPSILON);
assertEquals(0.2, glskSensi.getVariable("FFR2AA1 _generator").getWeight(), EPSILON);
assertEquals(0.3, glskSensi.getVariable("FFR3AA1 _generator").getWeight(), EPSILON);
assertEquals(0.067, glskSensi.getVariable("FFR1AA1 _load").getWeight(), EPSILON);
assertEquals(0.233, glskSensi.getVariable("FFR2AA1 _load").getWeight(), EPSILON);
}

@Test
void checkZonalGlskFailsWithNonLinearGlskBlocks() {
Network network = Network.read("testCase.xiidm", getClass().getResourceAsStream("/testCase.xiidm"));
CseGlskDocument cseGlskDocument = CseGlskDocument.importGlsk(getClass().getResourceAsStream("/testGlsk.xml"), false, true);
assertThrows(GlskException.class, () -> cseGlskDocument.getZonalGlsks(network));
}
}
79 changes: 79 additions & 0 deletions glsk/glsk-document-cse/src/test/resources/testGlskOnlyLinear.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
<?xml version="1.0" encoding="utf-8"?>
<GSKDocument DtdVersion="5" DtdRelease="0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="gsk-document.xsd">
<DocumentIdentification v="testGlsk"/>
<DocumentVersion v="1"/>
<DocumentType v="Z02"/>
<ProcessType v="Z02"/>
<SenderIdentification v="10VCSEEH4MMD-NDO" codingScheme="A01"/>
<SenderRole v="A36"/>
<ReceiverIdentification v="10X1001A1001A345" codingScheme="A01"/>
<ReceiverRole v="A04"/>
<CreationDateTime v="2020-09-15T17:59:09Z"/>
<GSKTimeInterval v="2020-09-16T22:00Z/2020-09-16T23:00Z"/>
<Domain v="10YDOM-1001A061T" codingScheme="A01"/>
<TimeSeries>
<TimeSeriesIdentification v="1"/>
<BusinessType v="Z02"/>
<Area v="FR_MANUAL" codingScheme="A01"/>
<Name v="FR_MANUAL"/>
<TimeInterval v="2020-09-16T22:00Z/2020-09-16T22:59Z"/>
<ManualGSKBlock>
<Factor v="1"/>
<Node>
<Name v="FFR1AA1 "/>
<Factor v="70"/>
</Node>
<Node>
<Name v="FFR3AA1 "/>
<Factor v="30"/>
</Node>
</ManualGSKBlock>
</TimeSeries>
<TimeSeries>
<TimeSeriesIdentification v="1"/>
<BusinessType v="Z02"/>
<Area v="FR_PROPGSK" codingScheme="A01"/>
<Name v="FR_PROPGSK"/>
<TimeInterval v="2020-09-16T22:00Z/2020-09-16T22:59Z"/>
<PropGSKBlock>
<Factor v="1"/>
<Node>
<Name v="FFR1AA1 "/>
</Node>
<Node>
<Name v="FFR2AA1 "/>
</Node>
<Node>
<Name v="FFR3AA1 "/>
</Node>
</PropGSKBlock>
</TimeSeries>
<TimeSeries>
<TimeSeriesIdentification v="1"/>
<BusinessType v="Z02"/>
<Area v="FR_PROPGLSK" codingScheme="A01"/>
<Name v="FR_PROPGLSK"/>
<TimeInterval v="2020-09-16T22:00Z/2020-09-16T22:59Z"/>
<PropGSKBlock>
<Factor v="0.7"/>
<Node>
<Name v="FFR1AA1 "/>
</Node>
<Node>
<Name v="FFR2AA1 "/>
</Node>
<Node>
<Name v="FFR3AA1 "/>
</Node>
</PropGSKBlock>
<PropLSKBlock>
<Factor v="0.3"/>
<Node>
<Name v="FFR1AA1 "/>
</Node>
<Node>
<Name v="FFR2AA1 "/>
</Node>
</PropLSKBlock>
</TimeSeries>
</GSKDocument>
Loading

0 comments on commit 73b5d3c

Please sign in to comment.