diff --git a/.gitignore b/.gitignore index 0c317854..f086f1ab 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,7 @@ minio/* rabbitmq/* .vscode/ staticfiles/ -sample_data/*/* +sample_data/downloads/* # osmnx data cache folder cache diff --git a/sample_data/ingest_sample_data.py b/sample_data/ingest_sample_data.py index 064b4170..fa77a8ff 100644 --- a/sample_data/ingest_sample_data.py +++ b/sample_data/ingest_sample_data.py @@ -10,13 +10,16 @@ from uvdat.core.models import Chart, Context, Dataset, FileItem +USE_CASE_FOLDER = Path('sample_data/use_cases') + + def ingest_file(file_info, index=0, dataset=None, chart=None): file_path = file_info.get('path') file_name = file_info.get('name', file_path.split('/')[-1]) file_url = file_info.get('url') file_metadata = file_info.get('metadata', {}) - file_location = Path('sample_data', file_path) + file_location = Path('sample_data/downloads', file_path) file_type = file_path.split('.')[-1] if not file_location.exists(): print(f'\t Downloading data file {file_name}.') @@ -46,9 +49,9 @@ def ingest_file(file_info, index=0, dataset=None, chart=None): new_file_item.file.save(file_path, ContentFile(f.read())) -def ingest_contexts(): +def ingest_contexts(use_case): print('Creating Context objects...') - with open('sample_data/contexts.json') as contexts_json: + with open(str(USE_CASE_FOLDER / use_case / 'contexts.json')) as contexts_json: data = json.load(contexts_json) for context in data: print('\t- ', context['name']) @@ -66,9 +69,9 @@ def ingest_contexts(): context_for_setting.datasets.set(Dataset.objects.filter(name__in=context['datasets'])) -def ingest_charts(): +def ingest_charts(use_case): print('Creating Chart objects...') - with open('sample_data/charts.json') as charts_json: + with open(str(USE_CASE_FOLDER / use_case / 'charts.json')) as charts_json: data = json.load(charts_json) for chart in data: print('\t- ', chart['name']) @@ -100,9 +103,9 @@ def ingest_charts(): ) -def ingest_datasets(include_large=False, dataset_indexes=None): +def ingest_datasets(use_case, include_large=False, dataset_indexes=None): print('Creating Dataset objects...') - with open('sample_data/datasets.json') as datasets_json: + with open(str(USE_CASE_FOLDER / use_case / 'datasets.json')) as datasets_json: data = json.load(datasets_json) for index, dataset in enumerate(data): if dataset_indexes is None or index in dataset_indexes: diff --git a/sample_data/ingest_sample_data_output.txt b/sample_data/ingest_sample_data_output.txt deleted file mode 100644 index e510a6d5..00000000 --- a/sample_data/ingest_sample_data_output.txt +++ /dev/null @@ -1,112 +0,0 @@ -root@70f88cf67dbf:/opt/django-project# python manage.py populate --include_large ------------------------------------- -Populating server with sample data... -Creating Context objects... - - Boston Transportation - Context Boston Transportation created. - - DC Transportation - Context DC Transportation created. - - Boston-Washington Transportation - Context Boston-Washington Transportation created. -Creating Chart objects... - - Boston Harbor Daily Tide Levels - Chart Boston Harbor Daily Tide Levels created. - FileItem tide_level_data.csv created. - Converting data for Boston Harbor Daily Tide Levels... - Saved converted data for chart Boston Harbor Daily Tide Levels. -Creating Dataset objects... - - MBTA Rapid Transit - Dataset MBTA Rapid Transit created. - FileItem mbta_rapid_transit.zip created. - Converting data for MBTA Rapid Transit... - VectorMapLayer 1 created. - 31 vector tiles created. - 158 nodes and 164 edges created. - - MBTA Commuter Rail - Dataset MBTA Commuter Rail created. - FileItem commuter_rail.zip created. - Converting data for MBTA Commuter Rail... - VectorMapLayer 2 created. - 686 vector tiles created. - 268 nodes and 269 edges created. - - Boston Hurricane Surge Inundation Zones - Dataset Boston Hurricane Surge Inundation Zones created. - FileItem hurr_inun.zip created. - Converting data for Boston Hurricane Surge Inundation Zones... - VectorMapLayer 3 created. - 295 vector tiles created. - - Boston FEMA National Flood Hazard - Dataset Boston FEMA National Flood Hazard created. - FileItem flood_hazard_fema.zip created. - Converting data for Boston FEMA National Flood Hazard... - VectorMapLayer 4 created. - 587 vector tiles created. - - Massachusetts Elevation Data - Dataset Massachusetts Elevation Data created. - FileItem easternmass.tif created. - Converting data for Massachusetts Elevation Data... - RasterMapLayer 1 created. - - Boston Neighborhoods - Dataset Boston Neighborhoods created. - FileItem neighborhoods2020.json created. - Converting data for Boston Neighborhoods... - VectorMapLayer 5 created. - 26 vector tiles created. - 24 regions created. - - Boston Census 2020 Block Groups - Dataset Boston Census 2020 Block Groups created. - FileItem blockgroups.zip created. - Converting data for Boston Census 2020 Block Groups... - VectorMapLayer 6 created. - 26 vector tiles created. - 581 regions created. - - Boston Zip Codes - Dataset Boston Zip Codes created. - FileItem zipcodes.zip created. - Converting data for Boston Zip Codes... - VectorMapLayer 7 created. - 780 vector tiles created. - 539 regions created. - - Boston Sea Level Rises - Dataset Boston Sea Level Rises created. - FileItem 9in_rise.geojson created. - FileItem 21in_rise.geojson created. - FileItem 36in_rise.geojson created. - Converting data for Boston Sea Level Rises... - VectorMapLayer 8 created. - 26 vector tiles created. - VectorMapLayer 9 created. - 26 vector tiles created. - VectorMapLayer 10 created. - 26 vector tiles created. - - Boston 10-Year Flood Events - Dataset Boston 10-Year Flood Events created. - FileItem 9in_10yr_flood.geojson created. - FileItem 21in_10yr_flood.geojson created. - FileItem 36in_10yr_flood.geojson created. - Converting data for Boston 10-Year Flood Events... - VectorMapLayer 11 created. - 26 vector tiles created. - VectorMapLayer 12 created. - 26 vector tiles created. - VectorMapLayer 13 created. - 26 vector tiles created. - - Boston 100-Year Flood Events - Dataset Boston 100-Year Flood Events created. - FileItem 9in_100yr_flood.geojson created. - FileItem 21in_100yr_flood.geojson created. - FileItem 36in_100yr_flood.geojson created. - Converting data for Boston 100-Year Flood Events... - VectorMapLayer 14 created. - 26 vector tiles created. - VectorMapLayer 15 created. - 26 vector tiles created. - VectorMapLayer 16 created. - 26 vector tiles created. - - DC Metro - Dataset DC Metro created. - FileItem DC_Metro.zip created. - Converting data for DC Metro... - VectorMapLayer 17 created. - 56 vector tiles created. - 98 nodes and 134 edges created. diff --git a/sample_data/charts.json b/sample_data/use_cases/boston_floods/charts.json similarity index 100% rename from sample_data/charts.json rename to sample_data/use_cases/boston_floods/charts.json diff --git a/sample_data/cities.json b/sample_data/use_cases/boston_floods/cities.json similarity index 100% rename from sample_data/cities.json rename to sample_data/use_cases/boston_floods/cities.json diff --git a/sample_data/contexts.json b/sample_data/use_cases/boston_floods/contexts.json similarity index 55% rename from sample_data/contexts.json rename to sample_data/use_cases/boston_floods/contexts.json index 0f6428b2..409a7911 100644 --- a/sample_data/contexts.json +++ b/sample_data/use_cases/boston_floods/contexts.json @@ -30,27 +30,5 @@ "datasets": [ "DC Metro" ] - }, - { - "name": "Boston-Washington Transportation", - "default_map_center": [ - 40.5, - -74.5 - ], - "default_map_zoom": 8, - "datasets": [ - "MBTA Rapid Transit", - "MBTA Commuter Rail", - "Massachusetts Elevation Data", - "Boston Hurricane Surge Inundation Zones", - "Bsoton FEMA National Flood Hazard", - "Boston Neighborhoods", - "Boston Census 2020 Block Groups", - "Boston Zip Codes", - "Boston Sea Level Rises", - "Boston 10-Year Flood Events", - "Boston 100-Year Flood Events", - "DC Metro" - ] } ] diff --git a/sample_data/datasets.json b/sample_data/use_cases/boston_floods/datasets.json similarity index 100% rename from sample_data/datasets.json rename to sample_data/use_cases/boston_floods/datasets.json diff --git a/uvdat/core/management/commands/populate.py b/uvdat/core/management/commands/populate.py index 8c8abffa..ff4f07cd 100644 --- a/uvdat/core/management/commands/populate.py +++ b/uvdat/core/management/commands/populate.py @@ -7,6 +7,11 @@ class Command(BaseCommand): requires_migrations_checks = True def add_arguments(self, parser): + parser.add_argument( + 'use_case', + choices=['boston_floods', 'new_york_energy'], + help='Sample data collection to load', + ) parser.add_argument( '--include_large', action='store_true', @@ -15,15 +20,17 @@ def add_arguments(self, parser): parser.add_argument('--dataset_indexes', nargs='*', type=int) def handle(self, *args, **kwargs): - print('Populating server with sample data...') + use_case = kwargs['use_case'] include_large = kwargs['include_large'] dataset_indexes = kwargs['dataset_indexes'] if dataset_indexes is None or len(dataset_indexes) == 0: dataset_indexes = None + print(f'Populating server with sample data for use case {use_case}...') ingest_datasets( + use_case, include_large=include_large, dataset_indexes=dataset_indexes, ) - ingest_contexts() - ingest_charts() + ingest_contexts(use_case) + ingest_charts(use_case) diff --git a/uvdat/core/tests/test_populate.py b/uvdat/core/tests/test_populate.py index f2bcb7c7..55d79bd0 100644 --- a/uvdat/core/tests/test_populate.py +++ b/uvdat/core/tests/test_populate.py @@ -29,6 +29,7 @@ def test_populate(): call_command( 'populate', + 'boston_floods', include_large=True, dataset_indexes=dataset_indexes, )