From a494623844590afd3acb53e387811b6c13def60b Mon Sep 17 00:00:00 2001
From: Akshay Waghmare <120705327+akshayw1@users.noreply.github.com>
Date: Wed, 18 Dec 2024 22:43:08 +0530
Subject: [PATCH] Rename gspconsumer to pvliveconsumer for clarity (#93)
* Rename gspconsumer to pvliveconsumer for clarity
* readme-renamed
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
---------
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
.all-contributorsrc | 2 +-
.bumpversion.cfg | 2 +-
.github/workflows/release.yaml | 2 +-
.github/workflows/test-docker.yaml | 2 +-
README.md | 14 +++++++-------
infrastructure/docker/Dockerfile | 4 ++--
{gspconsumer => pvliveconsumer}/__init__.py | 0
{gspconsumer => pvliveconsumer}/app.py | 14 +++++++-------
{gspconsumer => pvliveconsumer}/backup.py | 0
.../data/uk_gsp_locations.csv | 0
{gspconsumer => pvliveconsumer}/gsps.py | 0
{gspconsumer => pvliveconsumer}/nitghtime.py | 0
{gspconsumer => pvliveconsumer}/time.py | 0
scripts/gsp_name_update/merge_labels.py | 6 +++---
scripts/gsp_name_update/update_region_name.py | 4 ++--
scripts/v3_to_v4/plot.py | 4 ++--
scripts/v3_to_v4/plot_gsp.py | 4 ++--
scripts/v3_to_v4/plot_gsp_analysis.py | 4 ++--
scripts/v3_to_v4/update_installed_capacity.py | 6 +++---
setup.py | 6 +++---
test-docker-compose.yml | 6 +++---
tests/test_app.py | 2 +-
tests/test_backup.py | 2 +-
tests/test_gsps.py | 2 +-
tests/test_nighttime.py | 2 +-
tests/test_time.py | 2 +-
26 files changed, 45 insertions(+), 45 deletions(-)
rename {gspconsumer => pvliveconsumer}/__init__.py (100%)
rename {gspconsumer => pvliveconsumer}/app.py (95%)
rename {gspconsumer => pvliveconsumer}/backup.py (100%)
rename {gspconsumer => pvliveconsumer}/data/uk_gsp_locations.csv (100%)
rename {gspconsumer => pvliveconsumer}/gsps.py (100%)
rename {gspconsumer => pvliveconsumer}/nitghtime.py (100%)
rename {gspconsumer => pvliveconsumer}/time.py (100%)
diff --git a/.all-contributorsrc b/.all-contributorsrc
index ff866c8..f66c313 100644
--- a/.all-contributorsrc
+++ b/.all-contributorsrc
@@ -34,7 +34,7 @@
}
],
"contributorsPerLine": 7,
- "projectName": "GSPConsumer",
+ "projectName": "pvlive-consumer",
"projectOwner": "openclimatefix",
"repoType": "github",
"repoHost": "https://github.com",
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index c7f8648..92eae97 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -4,6 +4,6 @@ tag = True
current_version = 1.2.1
message = Bump version: {current_version} → {new_version} [skip ci]
-[bumpversion:file:gspconsumer/__init__.py]
+[bumpversion:file:pvliveconsumer/__init__.py]
search = __version__ = "{current_version}"
replace = __version__ = "{new_version}"
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 6625b7b..7d05b2e 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -12,5 +12,5 @@ jobs:
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
with:
- image_base_name: gspconsumer
+ image_base_name: pvliveconsumer
docker_file: infrastructure/docker/Dockerfile
diff --git a/.github/workflows/test-docker.yaml b/.github/workflows/test-docker.yaml
index 6e9ff59..471f13d 100644
--- a/.github/workflows/test-docker.yaml
+++ b/.github/workflows/test-docker.yaml
@@ -18,7 +18,7 @@ jobs:
run: docker compose -f test-docker-compose.yml build
- name: Run tests inside the container
- run: docker compose -f test-docker-compose.yml run gspconsumer
+ run: docker compose -f test-docker-compose.yml run pvliveconsumer
env: # Or as an environment variable
API_KEY: ${{ secrets.API_KEY }}
SYSTEM_ID: ${{ secrets.SYSTEM_ID }}
diff --git a/README.md b/README.md
index 3f88035..87b7aee 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# GSPConsumer
+# PVLiveConsumer
[![All Contributors](https://img.shields.io/badge/all_contributors-3-orange.svg?style=flat-square)](#contributors-)
@@ -8,7 +8,7 @@ Collect GSP solar generation data from PVlive from Sheffield Solar
To the run the appication install this library and run
```
-python gspconsumer/app.py
+python pvliveconsumer/app.py
```
The environmental variables are
@@ -23,7 +23,7 @@ The environmental variables are
These options can also be enter like this:
```
-python gspconsumer/app.py --n-gsps=10
+python pvliveconsumer/app.py --n-gsps=10
```
## Tests
@@ -32,7 +32,7 @@ To run tests use the following command
```bash
docker stop $(docker ps -a -q)
docker-compose -f test-docker-compose.yml build
-docker-compose -f test-docker-compose.yml run gspconsumer
+docker-compose -f test-docker-compose.yml run pvliveconsumer
```
## Contributors ✨
@@ -45,9 +45,9 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
diff --git a/infrastructure/docker/Dockerfile b/infrastructure/docker/Dockerfile
index a1e4328..540a611 100644
--- a/infrastructure/docker/Dockerfile
+++ b/infrastructure/docker/Dockerfile
@@ -17,7 +17,7 @@ RUN pip install -r app/requirements.txt
Run pip install git+https://github.com/SheffieldSolar/PV_Live-API#pvlive_api
# copy library files
-COPY gspconsumer/ app/gspconsumer/
+COPY pvliveconsumer/ app/pvliveconsumer/
COPY tests/ app/tests/
# change to app folder
@@ -28,4 +28,4 @@ RUN pip install -e .
RUN if [ "$TESTING" = 1 ]; then pip install pytest pytest-cov coverage; fi
-CMD ["python", "-u","gspconsumer/app.py"]
+CMD ["python", "-u","pvliveconsumer/app.py"]
diff --git a/gspconsumer/__init__.py b/pvliveconsumer/__init__.py
similarity index 100%
rename from gspconsumer/__init__.py
rename to pvliveconsumer/__init__.py
diff --git a/gspconsumer/app.py b/pvliveconsumer/app.py
similarity index 95%
rename from gspconsumer/app.py
rename to pvliveconsumer/app.py
index 39f119b..394c06f 100644
--- a/gspconsumer/app.py
+++ b/pvliveconsumer/app.py
@@ -20,11 +20,11 @@
from pvlive_api import PVLive
from sqlalchemy.orm import Session
-import gspconsumer
-from gspconsumer.backup import make_gsp_yields_from_national
-from gspconsumer.gsps import filter_gsps_which_have_new_data, get_gsps
-from gspconsumer.nitghtime import make_night_time_zeros
-from gspconsumer.time import check_uk_london_hour
+import pvliveconsumer
+from pvliveconsumer.backup import make_gsp_yields_from_national
+from pvliveconsumer.gsps import filter_gsps_which_have_new_data, get_gsps
+from pvliveconsumer.nitghtime import make_night_time_zeros
+from pvliveconsumer.time import check_uk_london_hour
logging.basicConfig(
level=getattr(logging, os.getenv("LOGLEVEL", "DEBUG")),
@@ -37,7 +37,7 @@
)
sentry_sdk.set_tag("app_name", "GSP_consumer")
-sentry_sdk.set_tag("version", gspconsumer.__version__)
+sentry_sdk.set_tag("version", pvliveconsumer.__version__)
@click.command()
@@ -95,7 +95,7 @@ def app(
This is to solve clock change issues when running with cron in UTC.
"""
- logger.info(f"Running GSP Consumer app ({gspconsumer.__version__}) for regime {regime}")
+ logger.info(f"Running GSP Consumer app ({pvliveconsumer.__version__}) for regime {regime}")
if uk_london_time_hour is not None:
check_uk_london_hour(hour=int(uk_london_time_hour))
diff --git a/gspconsumer/backup.py b/pvliveconsumer/backup.py
similarity index 100%
rename from gspconsumer/backup.py
rename to pvliveconsumer/backup.py
diff --git a/gspconsumer/data/uk_gsp_locations.csv b/pvliveconsumer/data/uk_gsp_locations.csv
similarity index 100%
rename from gspconsumer/data/uk_gsp_locations.csv
rename to pvliveconsumer/data/uk_gsp_locations.csv
diff --git a/gspconsumer/gsps.py b/pvliveconsumer/gsps.py
similarity index 100%
rename from gspconsumer/gsps.py
rename to pvliveconsumer/gsps.py
diff --git a/gspconsumer/nitghtime.py b/pvliveconsumer/nitghtime.py
similarity index 100%
rename from gspconsumer/nitghtime.py
rename to pvliveconsumer/nitghtime.py
diff --git a/gspconsumer/time.py b/pvliveconsumer/time.py
similarity index 100%
rename from gspconsumer/time.py
rename to pvliveconsumer/time.py
diff --git a/scripts/gsp_name_update/merge_labels.py b/scripts/gsp_name_update/merge_labels.py
index 76ccb20..02c5b62 100644
--- a/scripts/gsp_name_update/merge_labels.py
+++ b/scripts/gsp_name_update/merge_labels.py
@@ -4,11 +4,11 @@
import pandas as pd
# importing file with new ids
-new_df = pd.read_csv("GSPConsumer/gsp_name_update/gsp_new_ids.csv")
+new_df = pd.read_csv("pvliveconsumer/gsp_name_update/gsp_new_ids.csv")
print(new_df.head())
# importing file with old ids
-old_df = pd.read_csv("GSPConsumer/gsp_name_update/gsp_old_ids.csv")
+old_df = pd.read_csv("pvliveconsumer/gsp_name_update/gsp_old_ids.csv")
print(old_df.head())
@@ -22,7 +22,7 @@
print("subset removed")
-joined_df.to_csv("GSPConsumer/gsp_name_update/gsp_new_ids_and_names.csv")
+joined_df.to_csv("pvliveconsumer/gsp_name_update/gsp_new_ids_and_names.csv")
print(joined_df.head())
diff --git a/scripts/gsp_name_update/update_region_name.py b/scripts/gsp_name_update/update_region_name.py
index 1eaf167..9352427 100644
--- a/scripts/gsp_name_update/update_region_name.py
+++ b/scripts/gsp_name_update/update_region_name.py
@@ -15,7 +15,7 @@
from nowcasting_datamodel.models.base import Base_Forecast
from nowcasting_datamodel.read.read import get_location
-import gspconsumer
+import pvliveconsumer
# laod database secret from AWS secrets
client = boto3.client("secretsmanager")
@@ -30,7 +30,7 @@
connection = DatabaseConnection(url=db_url, base=Base_Forecast, echo=True)
# load new region names
-folder = os.path.dirname(gspconsumer.__file__) + "/../gsp_name_update"
+folder = os.path.dirname(pvliveconsumer.__file__) + "/../gsp_name_update"
data_df = pd.read_csv(f"{folder}/gsp_new_ids_and_names-edited.csv")
# this has columns 'gsp_id' and 'region_name'
diff --git a/scripts/v3_to_v4/plot.py b/scripts/v3_to_v4/plot.py
index 9076da1..c533d6c 100644
--- a/scripts/v3_to_v4/plot.py
+++ b/scripts/v3_to_v4/plot.py
@@ -8,12 +8,12 @@
import geopandas as gpd
import plotly.graph_objects as go
-import gspconsumer
+import pvliveconsumer
file_v3 = "data/gsp_regions_20181031.geojson"
file_v4 = "data/gsp_regions_20220314.geojson"
-dir = os.path.dirname(gspconsumer.__file__) + "/../scripts/v3_to_v4/"
+dir = os.path.dirname(pvliveconsumer.__file__) + "/../scripts/v3_to_v4/"
v3_gdf = gpd.read_file(dir + file_v3) # 329
v4_gdf = gpd.read_file(dir + file_v4) # 333
diff --git a/scripts/v3_to_v4/plot_gsp.py b/scripts/v3_to_v4/plot_gsp.py
index 5cbd88a..9e900fd 100644
--- a/scripts/v3_to_v4/plot_gsp.py
+++ b/scripts/v3_to_v4/plot_gsp.py
@@ -10,12 +10,12 @@
import geopandas as gpd
import plotly.graph_objects as go
-import gspconsumer
+import pvliveconsumer
file_v3 = "data/gsp_regions_20181031.geojson"
file_v4 = "data/gsp_regions_20220314.geojson"
-dir = os.path.dirname(gspconsumer.__file__) + "/../scripts/v3_to_v4/"
+dir = os.path.dirname(pvliveconsumer.__file__) + "/../scripts/v3_to_v4/"
v3_gdf = gpd.read_file(dir + file_v3) # 329
v4_gdf = gpd.read_file(dir + file_v4) # 333
diff --git a/scripts/v3_to_v4/plot_gsp_analysis.py b/scripts/v3_to_v4/plot_gsp_analysis.py
index 2c41c6d..8014968 100644
--- a/scripts/v3_to_v4/plot_gsp_analysis.py
+++ b/scripts/v3_to_v4/plot_gsp_analysis.py
@@ -18,12 +18,12 @@
# get installed capacity from pv live (this takes ~30 seconds)
from nowcasting_dataset.data_sources.gsp.pvlive import get_installed_capacity
-import gspconsumer
+import pvliveconsumer
file_v3 = "data/gsp_regions_20181031.geojson"
file_v4 = "data/gsp_regions_20220314.geojson"
-dir = os.path.dirname(gspconsumer.__file__) + "/../scripts/v3_to_v4/"
+dir = os.path.dirname(pvliveconsumer.__file__) + "/../scripts/v3_to_v4/"
# load data
v3_gdf = gpd.read_file(dir + file_v3) # 329
diff --git a/scripts/v3_to_v4/update_installed_capacity.py b/scripts/v3_to_v4/update_installed_capacity.py
index 5fee295..9d4ce2c 100644
--- a/scripts/v3_to_v4/update_installed_capacity.py
+++ b/scripts/v3_to_v4/update_installed_capacity.py
@@ -12,7 +12,7 @@
from nowcasting_datamodel.models.base import Base_Forecast
from nowcasting_datamodel.read.read import get_all_locations
-import gspconsumer
+import pvliveconsumer
# get db connection from database
client = boto3.client("secretsmanager")
@@ -26,7 +26,7 @@
# get installed cpapacity from file
file = "data/pv_capacity_by_20220314_GSP.csv"
-dir = os.path.dirname(gspconsumer.__file__) + "/../scripts/v3_to_v4/"
+dir = os.path.dirname(pvliveconsumer.__file__) + "/../scripts/v3_to_v4/"
installed_capacity = pd.read_csv(f"{dir}/{file}")
# gsps names
@@ -39,7 +39,7 @@
# get installed cpapacity from file
file = "data/pv_capacity_by_20220314_GSP.csv"
-dir = os.path.dirname(gspconsumer.__file__) + "/../scripts/v3_to_v4/"
+dir = os.path.dirname(pvliveconsumer.__file__) + "/../scripts/v3_to_v4/"
installed_capacity = pd.read_csv(f"{dir}/{file}")
# add national
diff --git a/setup.py b/setup.py
index c80825e..167987e 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,4 @@
-"""Setup file for gspconsumer package."""
+"""Setup file for pvliveconsumer package."""
from pathlib import Path
from setuptools import find_packages, setup
@@ -7,14 +7,14 @@
install_requires = (this_directory / "requirements.txt").read_text().splitlines()
# get version
-with open("gspconsumer/__init__.py") as f:
+with open("pvliveconsumer/__init__.py") as f:
for line in f:
if line.startswith("__version__"):
_, _, version = line.replace("'", "").split()
version = version.replace('"', "")
setup(
- name="gspconsumer",
+ name="pvliveconsumer",
version=version,
packages=find_packages(),
install_requires=install_requires,
diff --git a/test-docker-compose.yml b/test-docker-compose.yml
index 3de3e8b..5166b2e 100644
--- a/test-docker-compose.yml
+++ b/test-docker-compose.yml
@@ -10,7 +10,7 @@ services:
ports:
- "5432:5432"
- gspconsumer:
+ pvliveconsumer:
build:
context: .
dockerfile: infrastructure/docker/Dockerfile
@@ -23,7 +23,7 @@ services:
- SYSTEM_ID=$SYSTEM_ID
- DATA_SERVICE_URL=https://pvoutput.org/
command: >
- bash -c "pytest --cov=./gspconsumer
+ bash -c "pytest --cov=./pvliveconsumer
&& coverage report -m
&& coverage xml
&& cp .coverage ./tests/
@@ -32,4 +32,4 @@ services:
- "postgres"
volumes:
- ./tests/:/app/tests
- - ./gspconsumer/:/app/gspconsumer
+ - ./pvliveconsumer/:/app/pvliveconsumer
diff --git a/tests/test_app.py b/tests/test_app.py
index 74b4844..7041ec2 100644
--- a/tests/test_app.py
+++ b/tests/test_app.py
@@ -3,7 +3,7 @@
from nowcasting_datamodel.models.gsp import GSPYieldSQL, Location, LocationSQL
from nowcasting_datamodel.models.models import national_gb_label
-from gspconsumer.app import app, pull_data_and_save
+from pvliveconsumer.app import app, pull_data_and_save
from freezegun import freeze_time
diff --git a/tests/test_backup.py b/tests/test_backup.py
index 69884cb..904d13f 100644
--- a/tests/test_backup.py
+++ b/tests/test_backup.py
@@ -5,7 +5,7 @@
from nowcasting_datamodel.models.gsp import GSPYield, GSPYieldSQL, Location, LocationSQL
from nowcasting_datamodel.read.read_gsp import get_latest_gsp_yield
-from gspconsumer.backup import get_number_gsp_yields, make_gsp_yields_from_national
+from pvliveconsumer.backup import get_number_gsp_yields, make_gsp_yields_from_national
def add_national_gsp_yields(db_session):
diff --git a/tests/test_gsps.py b/tests/test_gsps.py
index 69815a6..c481428 100644
--- a/tests/test_gsps.py
+++ b/tests/test_gsps.py
@@ -5,7 +5,7 @@
from nowcasting_datamodel.models.gsp import GSPYield, Location, LocationSQL
from nowcasting_datamodel.read.read_gsp import get_latest_gsp_yield
-from gspconsumer.gsps import filter_gsps_which_have_new_data, get_gsps
+from pvliveconsumer.gsps import filter_gsps_which_have_new_data, get_gsps
def test_get_gsps(db_session):
diff --git a/tests/test_nighttime.py b/tests/test_nighttime.py
index 602ef15..928a5bc 100644
--- a/tests/test_nighttime.py
+++ b/tests/test_nighttime.py
@@ -1,4 +1,4 @@
-from gspconsumer.nitghtime import make_night_time_zeros
+from pvliveconsumer.nitghtime import make_night_time_zeros
from datetime import datetime
from nowcasting_datamodel.models.gsp import LocationSQL, GSPYieldSQL
import pandas as pd
diff --git a/tests/test_time.py b/tests/test_time.py
index 9017de8..48b0ded 100644
--- a/tests/test_time.py
+++ b/tests/test_time.py
@@ -1,7 +1,7 @@
import pytest
from freezegun import freeze_time
-from gspconsumer.time import check_uk_london_hour
+from pvliveconsumer.time import check_uk_london_hour
def test_time_none():