diff --git a/assets/dataloggers.csv b/assets/dataloggers.csv index 931d3ec6e..a1b5f440e 100644 --- a/assets/dataloggers.csv +++ b/assets/dataloggers.csv @@ -974,38 +974,38 @@ Quanterra,Q4120/6,990801,12996,Obsolete Quanterra,Q4120/6,990802,12995,Obsolete Quanterra,Q730/3,980307,,Obsolete Quanterra,Q730/4,980306,,Obsolete -Reftek,130,902A,, -Reftek,130,902B,, -Reftek,130,902C,, -Reftek,130,902D,, -Reftek,130,9337,, -Reftek,130,953D,, -Reftek,130,9793,, -Reftek,130,989C,, -Reftek,130,9D3D,, -Reftek,130,9F99,, -Reftek,130,9F9A,, -Reftek,130,A895,, -Reftek,130,AC13,, -Reftek,130,AC14,, -Reftek,REFTEK-RECORDER-1995,384,, -Reftek,REFTEK-RECORDER-1995,528,, -Reftek,REFTEK-RECORDER-2001,243,, -Reftek,REFTEK-RECORDER-2001,274,, -Reftek,REFTEK-RECORDER-2001,276,, -Reftek,REFTEK-RECORDER-2001,277,, -Reftek,REFTEK-RECORDER-2001,280,, -Reftek,REFTEK-RECORDER-2001,296,, -Reftek,REFTEK-RECORDER-2001,301,, -Reftek,REFTEK-RECORDER-2001,323,, -Reftek,REFTEK-RECORDER-2001,325,, -Reftek,REFTEK-RECORDER-2001,327,, -Reftek,REFTEK-RECORDER-2001,330,, -Reftek,REFTEK-RECORDER-2001,331,, -Reftek,REFTEK-RECORDER-2001,332,, -Reftek,REFTEK-RECORDER-2001,333,, -Reftek,REFTEK-RECORDER-2001,335,, -Reftek,REFTEK-RECORDER-2001,336,, +RefTek,130,902A,, +RefTek,130,902B,, +RefTek,130,902C,, +RefTek,130,902D,, +RefTek,130,9337,, +RefTek,130,953D,, +RefTek,130,9793,, +RefTek,130,989C,, +RefTek,130,9D3D,, +RefTek,130,9F99,, +RefTek,130,9F9A,, +RefTek,130,A895,, +RefTek,130,AC13,, +RefTek,130,AC14,, +RefTek,REFTEK-RECORDER-1995,384,, +RefTek,REFTEK-RECORDER-1995,528,, +RefTek,REFTEK-RECORDER-2001,243,, +RefTek,REFTEK-RECORDER-2001,274,, +RefTek,REFTEK-RECORDER-2001,276,, +RefTek,REFTEK-RECORDER-2001,277,, +RefTek,REFTEK-RECORDER-2001,280,, +RefTek,REFTEK-RECORDER-2001,296,, +RefTek,REFTEK-RECORDER-2001,301,, +RefTek,REFTEK-RECORDER-2001,323,, +RefTek,REFTEK-RECORDER-2001,325,, +RefTek,REFTEK-RECORDER-2001,327,, +RefTek,REFTEK-RECORDER-2001,330,, +RefTek,REFTEK-RECORDER-2001,331,, +RefTek,REFTEK-RECORDER-2001,332,, +RefTek,REFTEK-RECORDER-2001,333,, +RefTek,REFTEK-RECORDER-2001,335,, +RefTek,REFTEK-RECORDER-2001,336,, Sprengnether Instruments Inc.,SPRENGNETHER,CAZ_1,, Unknown-logger-make,Unknown-logger-model,Unknown,,Ruapehu Crater Lake 1993-95 Wenner,WENNER,WEL_1,, diff --git a/cmd/deltadb/README.md b/cmd/deltadb/README.md index 9aca8f1ea..b95b2c872 100644 --- a/cmd/deltadb/README.md +++ b/cmd/deltadb/README.md @@ -51,12 +51,14 @@ and then to examine the file sqlite3 delta.db SQLite version 3.46.1 2024-08-13 09:16:08 Enter ".help" for usage hints. -sqlite> .schema Network -CREATE TABLE Network ( - Code TEXT PRIMARY KEY, - External TEXT, - Description TEXT, - Restricted TEXT +sqlite> .schema network +CREATE TABLE network ( + network_id INTEGER PRIMARY KEY NOT NULL, + network TEXT NOT NULL, + external TEXT NOT NULL, + description TEXT DEFAULT "" NOT NULL, + restricted BOOLEAN DEFAULT false NOT NULL, + UNIQUE (network) ); sqlite> ``` @@ -90,16 +92,20 @@ curl -s localhost:8080/network|jq .|head or ``` -curl -s localhost:8080/station/WEL/site/20/sensor/|jq . | head +curl -s localhost:8080/station/WEL|jq . | head [ { - "make": "Kinemetrics", - "model": "FBA-23", - "serial": "25073", - "start": "1990-10-03T03:04:00Z", - "end": "2003-04-08T00:00:00Z", - "dip": 0, - "azimuth": 0, - "factor": 0, + "station": "WEL", + "network": "NZ", + "name": "Wellington", + "latitude": -41.284047578, + "longitude": 174.768184021, + "elevation": 138, + "datum": "WGS84", + "start": "1916-01-01T00:00:00Z", ... ``` + +## schema + +[![Schema](delta.svg)](delta.svg) diff --git a/cmd/deltadb/delta.svg b/cmd/deltadb/delta.svg new file mode 100644 index 000000000..e353ad5ab --- /dev/null +++ b/cmd/deltadb/delta.svg @@ -0,0 +1,3703 @@ + + + + + + +structs + + + +antenna + +antenna + +🔑 + +antenna_id + + + + + +  + +asset_id + + + + + +  + +mark_id + + + + + +  + +height + + + + + +  + +north + + + + + +  + +east + + + + + +  + +azimuth + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +asset + +asset + +🔑 + +asset_id + + + + + +  + +model_id + + + + + +  + +serial + + + + + +  + +number + + + + + +  + +notes + + + + + + + +antenna:e->asset:w + + + + + +mark + +mark + +🔑 + +mark_id + + + + + +  + +datum_id + + + + + +  + +mark + + + + + +  + +igs + + + + + +  + +name + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +elevation + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +antenna:e->mark:w + + + + + +model + +model + +🔑 + +model_id + + + + + +  + +make_id + + + + + +  + +model + + + + + + + +asset:e->model:w + + + + + +bedrock + +bedrock + +🔑 + +bedrock_id + + + + + +  + +bedrock + + + + + + + +calibration + +calibration + +🔑 + +calibration_id + + + + + +  + +asset_id + + + + + +  + +number + + + + + +  + +scale_factor + + + + + +  + +scale_bias + + + + + +  + +scale_absolute + + + + + +  + +frequency + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +calibration:e->asset:w + + + + + +camera + +camera + +🔑 + +camera_id + + + + + +  + +asset_id + + + + + +  + +view_id + + + + + +  + +dip + + + + + +  + +azimuth + + + + + +  + +height + + + + + +  + +north + + + + + +  + +east + + + + + +  + +notes + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +camera:e->asset:w + + + + + +view + +view + +🔑 + +view_id + + + + + +  + +mount_id + + + + + +  + +view + + + + + +  + +label + + + + +  + +azimuth + + + + + +  + +dip + + + + + +  + +method + + + + +  + +description + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +camera:e->view:w + + + + + +channel + +channel + +🔑 + +channel_id + + + + + +  + +model_id + + + + + +  + +response_id + + + + +  + +channel_type + + + + + +  + +number + + + + + +  + +sampling_rate + + + + + + + +channel:e->model:w + + + + + +response + +response + +🔑 + +response_id + + + + + +  + +response + + + + + +  + +xml + + + + + + + +channel:e->response:w + + + + + +citation + +citation + +🔑 + +citation_id + + + + + +  + +key + + + + + +  + +author + + + + + +  + +year + + + + + +  + +title + + + + + +  + +published + + + + +  + +volume + + + + +  + +pages + + + + +  + +doi + + + + +  + +link + + + + +  + +retrieved + + + + + + +class + +class + +🔑 + +class_id + + + + + +  + +station_id + + + + + +  + +site_class + + + + + +  + +vs30 + + + + + +  + +vs30_quality + + + + + +  + +tsite + + + + + +  + +tsite_method + + + + + +  + +tsite_quality + + + + + +  + +basement_depth + + + + + +  + +depth_quality + + + + + +  + +link + + + + +  + +notes + + + + + + +station + +station + +🔑 + +station_id + + + + + +  + +datum_id + + + + + +  + +station + + + + + +  + +name + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +elevation + + + + +  + +depth + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +class:e->station:w + + + + + +class_citation + +class_citation + +🔑 + +class_citation_id + + + + + +  + +class_id + + + + + +  + +citation_id + + + + + + + +class_citation:e->citation:w + + + + + +class_citation:e->class:w + + + + + +component + +component + +🔑 + +component_id + + + + + +  + +model_id + + + + + +  + +response_id + + + + + +  + +component_type + + + + +  + +number + + + + + +  + +source + + + + +  + +subsource + + + + + +  + +dip + + + + + +  + +azimuth + + + + + +  + +types + + + + + +  + +sampling_rate + + + + + + +component:e->model:w + + + + + +component:e->response:w + + + + + +connection + +connection + +🔑 + +connection_id + + + + + +  + +site_id + + + + + +  + +place_role_id + + + + + +  + +number + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +place_role + +place_role + +🔑 + +place_role_id + + + + + +  + +place + + + + + +  + +role + + + + + + + +connection:e->place_role:w + + + + + +site + +site + +🔑 + +site_id + + + + + +  + +station_id + + + + + +  + +datum_id + + + + + +  + +location + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +elevation + + + + +  + +depth + + + + +  + +survey + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +connection:e->site:w + + + + + +constituent + +constituent + +🔑 + +constituent_id + + + + + +  + +gauge_id + + + + + +  + +number + + + + + +  + +constituent + + + + + +  + +amplitude + + + + + +  + +lag + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +gauge + +gauge + +🔑 + +gauge_id + + + + + +  + +gauge + + + + + +  + +identification_number + + + + + +  + +analysis_time_zone + + + + + +  + +analysis_latitude + + + + + +  + +analysis_longitude + + + + + +  + +crex_tag + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +constituent:e->gauge:w + + + + + +dart + +dart + +🔑 + +dart_id + + + + + +  + +station_id + + + + + +  + +pid + + + + + +  + +wmo_identifier + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +dart:e->station:w + + + + + +datalogger + +datalogger + +🔑 + +datalogger_id + + + + + +  + +asset_id + + + + + +  + +place_role_id + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +datalogger:e->asset:w + + + + + +datalogger:e->place_role:w + + + + + +datum + +datum + +🔑 + +datum_id + + + + + +  + +datum + + + + + + + +doas + +doas + +🔑 + +doas_id + + + + + +  + +asset_id + + + + + +  + +view_id + + + + + +  + +dip + + + + + +  + +azimuth + + + + + +  + +height + + + + + +  + +north + + + + + +  + +east + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +doas:e->asset:w + + + + + +doas:e->view:w + + + + + +feature + +feature + +🔑 + +feature_id + + + + + +  + +site_id + + + + + +  + +sublocation + + + + +  + +property + + + + + +  + +description + + + + +  + +aspect + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +feature:e->site:w + + + + + +firmware + +firmware + +🔑 + +firmware_id + + + + + +  + +asset_id + + + + + +  + +version + + + + + +  + +notes + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +firmware:e->asset:w + + + + + +foundation_type + +foundation_type + +🔑 + +foundation_type_id + + + + + +  + +foundation_type + + + + + + + +gain + +gain + +🔑 + +gain_id + + + + + +  + +site_id + + + + + +  + +sublocation + + + + +  + +subsource + + + + +  + +scale_factor + + + + + +  + +scale_bias + + + + + +  + +absolute_bias + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +gain:e->site:w + + + + + +geology + +geology + +🔑 + +geology_id + + + + + +  + +geology + + + + + + + +make + +make + +🔑 + +make_id + + + + + +  + +make + + + + + + + +mark:e->datum:w + + + + + +mark_network + +mark_network + +🔑 + +mark_network_id + + + + + +  + +mark_id + + + + + +  + +network_id + + + + + + + +mark_network:e->mark:w + + + + + +network + +network + +🔑 + +network_id + + + + + +  + +network + + + + + +  + +external + + + + + +  + +description + + + + + +  + +restricted + + + + + + + +mark_network:e->network:w + + + + + +mark_type + +mark_type + +🔑 + +mark_type_id + + + + + +  + +mark_type + + + + + + + +method + +method + +🔑 + +method_id + + + + + +  + +method + + + + + + + +metsensor + +metsensor + +🔑 + +metsensor_id + + + + + +  + +asset_id + + + + + +  + +mark_id + + + + + +  + +datum_id + + + + + +  + +ims_comment + + + + +  + +humidity + + + + + +  + +pressure + + + + + +  + +temperature + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +elevation + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +metsensor:e->asset:w + + + + + +metsensor:e->datum:w + + + + + +metsensor:e->mark:w + + + + + +model:e->make:w + + + + + +monument + +monument + +🔑 + +monument_id + + + + + +  + +mark_id + + + + + +  + +mark_type_id + + + + + +  + +monument_type_id + + + + + +  + +foundation_type_id + + + + + +  + +bedrock_id + + + + + +  + +geology_id + + + + + +  + +domes_number + + + + + +  + +ground_relationship + + + + + +  + +foundation_depth + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +monument:e->bedrock:w + + + + + +monument:e->foundation_type:w + + + + + +monument:e->geology:w + + + + + +monument:e->mark:w + + + + + +monument:e->mark_type:w + + + + + +monument_type + +monument_type + +🔑 + +monument_type_id + + + + + +  + +monument_type + + + + + + + +monument:e->monument_type:w + + + + + +mount + +mount + +🔑 + +mount_id + + + + + +  + +datum_id + + + + + +  + +mount + + + + + +  + +name + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +elevation + + + + + +  + +description + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +mount:e->datum:w + + + + + +mount_network + +mount_network + +🔑 + +mount_network_id + + + + + +  + +mount_id + + + + + +  + +network_id + + + + + + + +mount_network:e->mount:w + + + + + +mount_network:e->network:w + + + + + +placename + +placename + +🔑 + +placename_id + + + + + +  + +name + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +level + + + + + + + +point + +point + +🔑 + +point_id + + + + + +  + +sample_id + + + + + +  + +datum_id + + + + + +  + +location + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +elevation + + + + + +  + +depth + + + + + +  + +survey + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +point:e->datum:w + + + + + +sample + +sample + +🔑 + +sample_id + + + + + +  + +datum_id + + + + + +  + +station + + + + + +  + +name + + + + + +  + +latitude + + + + + +  + +longitude + + + + + +  + +elevation + + + + +  + +depth + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +point:e->sample:w + + + + + +polarity + +polarity + +🔑 + +polarity_id + + + + + +  + +site_id + + + + + +  + +sublocation + + + + +  + +subsource + + + + +  + +preferred + + + + + +  + +reversed + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +polarity:e->site:w + + + + + +preamp + +preamp + +🔑 + +preamp_id + + + + + +  + +site_id + + + + + +  + +subsource + + + + +  + +scale_factor + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +preamp:e->site:w + + + + + +radome + +radome + +🔑 + +radome_id + + + + + +  + +asset_id + + + + + +  + +mark_id + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +radome:e->asset:w + + + + + +radome:e->mark:w + + + + + +receiver + +receiver + +🔑 + +receiver_id + + + + + +  + +asset_id + + + + + +  + +mark_id + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +receiver:e->asset:w + + + + + +receiver:e->mark:w + + + + + +recorder + +recorder + +🔑 + +recorder_id + + + + + +  + +asset_id + + + + + +  + +model_id + + + + + +  + +site_id + + + + + +  + +method_id + + + + + +  + +azimuth + + + + + +  + +dip + + + + + +  + +depth + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +recorder:e->asset:w + + + + + +recorder:e->method:w + + + + + +recorder:e->model:w + + + + + +recorder:e->site:w + + + + + +sample:e->datum:w + + + + + +sample_network + +sample_network + +🔑 + +sample_network_id + + + + + +  + +sample_id + + + + + +  + +network_id + + + + + + + +sample_network:e->network:w + + + + + +sample_network:e->sample:w + + + + + +sensor + +sensor + +🔑 + +sensor_id + + + + + +  + +asset_id + + + + + +  + +site_id + + + + + +  + +method_id + + + + + +  + +azimuth + + + + + +  + +dip + + + + + +  + +depth + + + + + +  + +north + + + + + +  + +east + + + + + +  + +scale_factor + + + + + +  + +scale_bias + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +sensor:e->asset:w + + + + + +sensor:e->method:w + + + + + +sensor:e->site:w + + + + + +session + +session + +🔑 + +session_id + + + + + +  + +mark_id + + + + + +  + +operator + + + + + +  + +agency + + + + + +  + +model + + + + + +  + +satellite_system + + + + + +  + +interval + + + + + +  + +elevation_mask + + + + + +  + +header_comment + + + + + +  + +format + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +session:e->mark:w + + + + + +site:e->datum:w + + + + + +site:e->station:w + + + + + +station:e->datum:w + + + + + +station_network + +station_network + +🔑 + +station_network_id + + + + + +  + +station_id + + + + + +  + +network_id + + + + + + + +station_network:e->network:w + + + + + +station_network:e->station:w + + + + + +stream + +stream + +🔑 + +stream_id + + + + + +  + +site_id + + + + + +  + +band + + + + + +  + +source + + + + + +  + +sampling_rate + + + + + +  + +axial + + + + + +  + +reversed + + + + + +  + +triggered + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +stream:e->site:w + + + + + +telemetry + +telemetry + +🔑 + +telemetry_id + + + + + +  + +site_id + + + + + +  + +scale_factor + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +telemetry:e->site:w + + + + + +timing + +timing + +🔑 + +timing_id + + + + + +  + +site_id + + + + + +  + +correction + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +timing:e->site:w + + + + + +view:e->mount:w + + + + + +visibility + +visibility + +🔑 + +visibility_id + + + + + +  + +mark_id + + + + + +  + +sky_visibility + + + + + +  + +start_date + + + + + +  + +end_date + + + + + + + +visibility:e->mark:w + + + + + diff --git a/cmd/deltadb/handler.go b/cmd/deltadb/handler.go index b9f61a24e..731b48b2f 100644 --- a/cmd/deltadb/handler.go +++ b/cmd/deltadb/handler.go @@ -19,53 +19,23 @@ func newHandler(db *sql.DB) *http.ServeMux { mux.HandleFunc("/", homePage) mux.HandleFunc("GET /network", h.GetNetworks) mux.HandleFunc("GET /network/", h.GetNetworks) - mux.HandleFunc("GET /network/{code}", h.GetNetworkCode) - mux.HandleFunc("GET /network/{code}/", h.GetNetworkCode) - mux.HandleFunc("GET /network/{code}/station", h.GetNetworkStations) - mux.HandleFunc("GET /network/{code}/station/", h.GetNetworkStations) - mux.HandleFunc("GET /network/{code}/mark", h.GetNetworkMarks) - mux.HandleFunc("GET /network/{code}/mark/", h.GetNetworkMarks) - mux.HandleFunc("GET /network/{code}/mount", h.GetNetworkMounts) - mux.HandleFunc("GET /network/{code}/mount/", h.GetNetworkMounts) - mux.HandleFunc("GET /network/{code}/sample", h.GetNetworkSamples) - mux.HandleFunc("GET /network/{code}/sample/", h.GetNetworkSamples) mux.HandleFunc("GET /station", h.GetStations) mux.HandleFunc("GET /station/", h.GetStations) mux.HandleFunc("GET /station/{code}", h.GetStationCode) mux.HandleFunc("GET /station/{code}/", h.GetStationCode) - mux.HandleFunc("GET /station/{code}/site", h.GetStationSites) - mux.HandleFunc("GET /station/{code}/site/", h.GetStationSites) - mux.HandleFunc("GET /station/{code}/site/{location}", h.GetStationSiteLocation) - mux.HandleFunc("GET /station/{code}/site/{location}/", h.GetStationSiteLocation) - mux.HandleFunc("GET /station/{code}/site/{location}/sensor", h.GetStationSiteSensors) - mux.HandleFunc("GET /station/{code}/site/{location}/sensor/", h.GetStationSiteSensors) mux.HandleFunc("GET /site", h.GetSites) mux.HandleFunc("GET /site/", h.GetSites) - mux.HandleFunc("GET /site/{location}", h.GetSiteLocations) - mux.HandleFunc("GET /site/{location}/", h.GetSiteLocations) mux.HandleFunc("GET /mark", h.GetMarks) mux.HandleFunc("GET /mark/", h.GetMarks) - mux.HandleFunc("GET /mark/{code}", h.GetMarkCode) - mux.HandleFunc("GET /mark/{code}/", h.GetMarkCode) mux.HandleFunc("GET /monument", h.GetMonuments) mux.HandleFunc("GET /monument/", h.GetMonuments) - mux.HandleFunc("GET /monument/{mark}", h.GetMonumentMark) - mux.HandleFunc("GET /monument/{mark}/", h.GetMonumentMark) mux.HandleFunc("GET /sample", h.GetSamples) mux.HandleFunc("GET /sample/", h.GetSamples) - mux.HandleFunc("GET /sample/{code}", h.GetSampleCode) - mux.HandleFunc("GET /sample/{code}/", h.GetSampleCode) - mux.HandleFunc("GET /sample/{code}/point", h.GetSamplePoints) - mux.HandleFunc("GET /sample/{code}/point/", h.GetSamplePoints) - mux.HandleFunc("GET /sample/{code}/point/{location}", h.GetSamplePointLocation) - mux.HandleFunc("GET /sample/{code}/point/{location}/", h.GetSamplePointLocation) - mux.HandleFunc("GET /sample/{code}/point/{location}/sensor", h.GetSamplePointLocationSensors) - mux.HandleFunc("GET /sample/{code}/point/{location}/sensor/", h.GetSamplePointLocationSensors) mux.HandleFunc("GET /sensor", h.GetSensors) mux.HandleFunc("GET /sensor/", h.GetSensors) diff --git a/cmd/deltadb/main.go b/cmd/deltadb/main.go index ad5e45b9e..02a4e2fd6 100644 --- a/cmd/deltadb/main.go +++ b/cmd/deltadb/main.go @@ -106,7 +106,7 @@ func main() { if settings.db == "" || settings.init { // insert extra response files - extra := set.KeyValue(settings.response, "Name", "Response", values) + extra := set.KeyValue(settings.response, "Response", "XML", values) log.Println("initialise database") start := time.Now() diff --git a/cmd/deltadb/mark.go b/cmd/deltadb/mark.go index 6d4d57ead..c36c65eda 100644 --- a/cmd/deltadb/mark.go +++ b/cmd/deltadb/mark.go @@ -4,32 +4,53 @@ import ( "encoding/json" "log" "net/http" - - "github.com/GeoNet/delta/meta/sqlite" + "time" ) +type Mark struct { + Mark string `json:"station"` + Network string `json:"network"` + Igs string `json:"igs"` + Name string `json:"name"` + Latitude float64 `json:"latitude"` + Longitude float64 `json:"longitude"` + Elevation *float64 `json:"elevation,omitempty"` + Datum string `json:"datum"` + Start time.Time `json:"start"` + End time.Time `json:"end"` +} + func (h handler) GetMarks(w http.ResponseWriter, r *http.Request) { - marks, err := sqlite.Marks(r.Context(), h.db) + query := "SELECT mark.mark,network.network,mark.igs,mark.name,mark.latitude,mark.longitude,mark.elevation,datum.datum,mark.start_date,mark.end_date FROM mark, datum, network, mark_network WHERE mark.datum_id = datum.datum_id AND mark.mark_id = mark_network.mark_id AND network.network_id = mark_network.network_id" + stmt, err := h.db.PrepareContext(r.Context(), query) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer stmt.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(marks); err != nil { - log.Println(err) + results, err := stmt.QueryContext(r.Context()) + if err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return } -} + defer results.Close() -func (h handler) GetMarkCode(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") + marks := make([]Mark, 0) + for results.Next() { + var mark Mark + if err := results.Scan(&mark.Mark, &mark.Network, &mark.Igs, &mark.Name, &mark.Latitude, &mark.Longitude, &mark.Elevation, &mark.Datum, &mark.Start, &mark.End); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + marks = append(marks, mark) + } - marks, err := sqlite.Marks(r.Context(), h.db, sqlite.Code(code)) - if err != nil { + if err = results.Err(); err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return diff --git a/cmd/deltadb/monument.go b/cmd/deltadb/monument.go index ddc27304c..1c6c74a20 100644 --- a/cmd/deltadb/monument.go +++ b/cmd/deltadb/monument.go @@ -4,32 +4,54 @@ import ( "encoding/json" "log" "net/http" - - "github.com/GeoNet/delta/meta/sqlite" + "time" ) +type Monument struct { + Mark string `json:"mark"` + DomesNumber string `json:"domes_number,omitempty"` + MarkType string `json:"mark_type"` + Type string `json:"type"` + GroundRelationship float64 `json:"ground_relationship"` + FoundationType string `json:"foundation_type"` + FoundationDepth float64 `json:"foundation_depth"` + Bedrock string `json:"bedrock,omitempty"` + Geology string `json:"geology,omitempty"` + Start time.Time `json:"start"` + End time.Time `json:"end"` +} + func (h handler) GetMonuments(w http.ResponseWriter, r *http.Request) { - monuments, err := sqlite.Monuments(r.Context(), h.db) + query := "SELECT mark.mark,monument.domes_number,mark_type.mark_type,monument_type.monument_type,monument.ground_relationship,foundation_type.foundation_type,monument.foundation_depth,bedrock.bedrock,geology.geology,monument.start_date,monument.end_date FROM mark, monument, mark_type, monument_type, foundation_type, bedrock, geology WHERE mark.mark_id = monument.mark_id AND monument.mark_type_id = mark_type.mark_type_id AND monument.monument_type_id = monument_type.monument_type_id AND monument.foundation_type_id = foundation_type.foundation_type_id AND monument.bedrock_id = bedrock.bedrock_id AND monument.geology_id = geology.geology_id" + stmt, err := h.db.PrepareContext(r.Context(), query) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer stmt.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(monuments); err != nil { - log.Println(err) + results, err := stmt.QueryContext(r.Context()) + if err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return } -} + defer results.Close() -func (h handler) GetMonumentMark(w http.ResponseWriter, r *http.Request) { - - mark := r.PathValue("mark") + monuments := make([]Monument, 0) + for results.Next() { + var monument Monument + if err := results.Scan(&monument.Mark, &monument.DomesNumber, &monument.MarkType, &monument.Type, &monument.GroundRelationship, &monument.FoundationType, &monument.FoundationDepth, &monument.Bedrock, &monument.Geology, &monument.Start, &monument.End); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + monuments = append(monuments, monument) + } - monuments, err := sqlite.Monuments(r.Context(), h.db, sqlite.Mark(mark)) - if err != nil { + if err = results.Err(); err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return diff --git a/cmd/deltadb/network.go b/cmd/deltadb/network.go index 309136a4d..9de49dc16 100644 --- a/cmd/deltadb/network.go +++ b/cmd/deltadb/network.go @@ -4,68 +4,46 @@ import ( "encoding/json" "log" "net/http" - - "github.com/GeoNet/delta/meta/sqlite" ) func (h handler) GetNetworks(w http.ResponseWriter, r *http.Request) { - networks, err := sqlite.Networks(r.Context(), h.db) + query := "SELECT network,external,description,restricted FROM network" + stmt, err := h.db.PrepareContext(r.Context(), query) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer stmt.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(networks); err != nil { - log.Println(err) - } -} - -func (h handler) GetNetworkCode(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - - networks, err := sqlite.Networks(r.Context(), h.db, sqlite.Code(code)) + results, err := stmt.QueryContext(r.Context()) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer results.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(networks); err != nil { - log.Println(err) - } -} - -func (h handler) GetNetworkStations(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - - stations, err := sqlite.Stations(r.Context(), h.db, sqlite.Network(code)) - if err != nil { - log.Println("failed to execute query", err) - w.WriteHeader(500) - return + type Network struct { + Network string `json:"network"` + External string `json:"external"` + Description string `json:"description"` + Restricted bool `json:"restricted,omitempty"` } - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(stations); err != nil { - log.Println(err) + networks := make([]Network, 0) + for results.Next() { + var network Network + if err := results.Scan(&network.Network, &network.External, &network.Description, &network.Restricted); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + networks = append(networks, network) } -} -func (h handler) GetNetworkMarks(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - - marks, err := sqlite.Marks(r.Context(), h.db, sqlite.Network(code)) - if err != nil { + if err = results.Err(); err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return @@ -73,43 +51,7 @@ func (h handler) GetNetworkMarks(w http.ResponseWriter, r *http.Request) { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(marks); err != nil { - log.Println(err) - } -} - -func (h handler) GetNetworkSamples(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - - samples, err := sqlite.Samples(r.Context(), h.db, sqlite.Network(code)) - if err != nil { - log.Println("failed to execute query", err) - w.WriteHeader(500) - return - } - - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(samples); err != nil { - log.Println(err) - } -} - -func (h handler) GetNetworkMounts(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - - mounts, err := sqlite.Mounts(r.Context(), h.db, sqlite.Network(code)) - if err != nil { - log.Println("failed to execute query", err) - w.WriteHeader(500) - return - } - - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(mounts); err != nil { + if err := json.NewEncoder(w).Encode(networks); err != nil { log.Println(err) } } diff --git a/cmd/deltadb/sample.go b/cmd/deltadb/sample.go index 89a104f42..7c4e09815 100644 --- a/cmd/deltadb/sample.go +++ b/cmd/deltadb/sample.go @@ -4,88 +4,53 @@ import ( "encoding/json" "log" "net/http" - - "github.com/GeoNet/delta/meta/sqlite" + "time" ) -func (h handler) GetSamples(w http.ResponseWriter, r *http.Request) { - - samples, err := sqlite.Samples(r.Context(), h.db) - if err != nil { - log.Println("failed to execute query", err) - w.WriteHeader(500) - return - } - - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(samples); err != nil { - log.Println(err) - } -} - -func (h handler) GetSampleCode(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - - samples, err := sqlite.Samples(r.Context(), h.db, sqlite.Code(code)) - if err != nil { - log.Println("failed to execute query", err) - w.WriteHeader(500) - return - } - - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(samples); err != nil { - log.Println(err) - } +type Sample struct { + Station string `json:"station"` + Network string `json:"network"` + Name string `json:"name"` + Latitude float64 `json:"latitude"` + Longitude float64 `json:"longitude"` + Elevation *float64 `json:"elevation,omitempty"` + Depth *float64 `json:"depth,omitempty"` + Datum string `json:"datum"` + Start time.Time `json:"start"` + End time.Time `json:"end"` } -func (h handler) GetSamplePoints(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") +func (h handler) GetSamples(w http.ResponseWriter, r *http.Request) { - points, err := sqlite.Points(r.Context(), h.db, sqlite.Sample(code)) + query := "SELECT sample.station,network.network,sample.name,sample.latitude,sample.longitude,sample.elevation,sample.depth,datum.datum,sample.start_date,sample.end_date FROM sample, datum, network, sample_network WHERE sample.datum_id = datum.datum_id AND sample.sample_id = sample_network.sample_id AND network.network_id = sample_network.network_id" + stmt, err := h.db.PrepareContext(r.Context(), query) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer stmt.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(points); err != nil { - log.Println(err) - } -} - -func (h handler) GetSamplePointLocation(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - location := r.PathValue("location") - - points, err := sqlite.Points(r.Context(), h.db, sqlite.Sample(code), sqlite.Location(location)) + results, err := stmt.QueryContext(r.Context()) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } - - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(points); err != nil { - log.Println(err) + defer results.Close() + + samples := make([]Sample, 0) + for results.Next() { + var sample Sample + if err := results.Scan(&sample.Station, &sample.Network, &sample.Name, &sample.Latitude, &sample.Longitude, &sample.Elevation, &sample.Depth, &sample.Datum, &sample.Start, &sample.End); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + samples = append(samples, sample) } -} - -func (h handler) GetSamplePointLocationSensors(w http.ResponseWriter, r *http.Request) { - code := r.PathValue("code") - location := r.PathValue("location") - - sensors, err := sqlite.Sensors(r.Context(), h.db, sqlite.Station(code), sqlite.Location(location)) - if err != nil { + if err = results.Err(); err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return @@ -93,7 +58,7 @@ func (h handler) GetSamplePointLocationSensors(w http.ResponseWriter, r *http.Re w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(sensors); err != nil { + if err := json.NewEncoder(w).Encode(samples); err != nil { log.Println(err) } } diff --git a/cmd/deltadb/sensor.go b/cmd/deltadb/sensor.go index 88e5b1bb6..d2b31bbae 100644 --- a/cmd/deltadb/sensor.go +++ b/cmd/deltadb/sensor.go @@ -4,23 +4,68 @@ import ( "encoding/json" "log" "net/http" - - "github.com/GeoNet/delta/meta/sqlite" + "time" ) +type Sensor struct { + Make string `json:"make"` + Model string `json:"model"` + Serial string `json:"serial"` + Station string `json:"station"` + Location string `json:"location"` + Azimuth float64 `json:"azimuth"` + Method string `json:"method"` + Dip float64 `json:"dip"` + Depth float64 `json:"depth"` + North float64 `json:"north"` + East float64 `json:"east"` + Factor float64 `json:"factor"` + Bias float64 `json:"bias"` + Start time.Time `json:"start"` + End time.Time `json:"end"` +} + func (h handler) GetSensors(w http.ResponseWriter, r *http.Request) { - sensors, err := sqlite.Sensors(r.Context(), h.db) + query := "SELECT make.make,model.model,asset.serial,station.station,site.location,sensor.azimuth,method.method,sensor.dip,sensor.depth,sensor.north,sensor.east,sensor.scale_factor,sensor.scale_bias,sensor.start_date,sensor.end_date FROM make,model,asset,station,site,method,sensor WHERE make.make_id = model.make_id AND model.model_id = asset.model_id AND asset.asset_id = sensor.asset_id AND site.station_id = station.station_id AND sensor.site_id = site.site_id AND sensor.method_id = method.method_id" + stmt, err := h.db.PrepareContext(r.Context(), query) + if err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + defer stmt.Close() + + results, err := stmt.QueryContext(r.Context()) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer results.Close() + + sensors := make([]Sensor, 0) + for results.Next() { + var sensor Sensor + + if err := results.Scan(&sensor.Make, &sensor.Model, &sensor.Serial, &sensor.Station, &sensor.Location, &sensor.Azimuth, &sensor.Method, &sensor.Dip, &sensor.Depth, &sensor.North, &sensor.East, &sensor.Factor, &sensor.Bias, &sensor.Start, &sensor.End); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + + sensors = append(sensors, sensor) + } + + if err = results.Err(); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusOK) if err := json.NewEncoder(w).Encode(sensors); err != nil { log.Println(err) } - } diff --git a/cmd/deltadb/site.go b/cmd/deltadb/site.go index 07ea90c60..4c22edd8d 100644 --- a/cmd/deltadb/site.go +++ b/cmd/deltadb/site.go @@ -4,32 +4,53 @@ import ( "encoding/json" "log" "net/http" - - "github.com/GeoNet/delta/meta/sqlite" + "time" ) +type Site struct { + Station string `json:"station"` + Location string `json:"location"` + Latitude float64 `json:"latitude"` + Longitude float64 `json:"longitude"` + Elevation *float64 `json:"elevation,omitempty"` + Depth *float64 `json:"depth,omitempty"` + Datum string `json:"datum"` + Survey string `json:"survey"` + Start time.Time `json:"start"` + End time.Time `json:"end"` +} + func (h handler) GetSites(w http.ResponseWriter, r *http.Request) { - sites, err := sqlite.Sites(r.Context(), h.db) + query := "SELECT station.station,site.location,site.latitude,site.longitude,site.elevation,site.depth,datum.datum,site.survey,site.start_date,site.end_date FROM station, site, datum WHERE site.station_id = station.station_id AND site.datum_id = datum.datum_id" + stmt, err := h.db.PrepareContext(r.Context(), query) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer stmt.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(sites); err != nil { - log.Println(err) + results, err := stmt.QueryContext(r.Context()) + if err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return } -} + defer results.Close() -func (h handler) GetSiteLocations(w http.ResponseWriter, r *http.Request) { - - location := r.PathValue("location") + sites := make([]Site, 0) + for results.Next() { + var site Site + if err := results.Scan(&site.Station, &site.Location, &site.Latitude, &site.Longitude, &site.Elevation, &site.Depth, &site.Datum, &site.Survey, &site.Start, &site.End); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + sites = append(sites, site) + } - sites, err := sqlite.Sites(r.Context(), h.db, sqlite.Location(location)) - if err != nil { + if err = results.Err(); err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return diff --git a/cmd/deltadb/station.go b/cmd/deltadb/station.go index e08a421f7..16810f3f5 100644 --- a/cmd/deltadb/station.go +++ b/cmd/deltadb/station.go @@ -4,32 +4,53 @@ import ( "encoding/json" "log" "net/http" - - "github.com/GeoNet/delta/meta/sqlite" + "time" ) +type Station struct { + Station string `json:"station"` + Network string `json:"network"` + Name string `json:"name"` + Latitude float64 `json:"latitude"` + Longitude float64 `json:"longitude"` + Elevation *float64 `json:"elevation,omitempty"` + Depth *float64 `json:"depth,omitempty"` + Datum string `json:"datum"` + Start time.Time `json:"start"` + End time.Time `json:"end"` +} + func (h handler) GetStations(w http.ResponseWriter, r *http.Request) { - stations, err := sqlite.Stations(r.Context(), h.db) + query := "SELECT station.station,network.network,station.name,station.latitude,station.longitude,station.elevation,station.depth,datum.datum,station.start_date,station.end_date FROM station, datum, network, station_network WHERE station.datum_id = datum.datum_id AND station.station_id = station_network.station_id AND network.network_id = station_network.network_id" + stmt, err := h.db.PrepareContext(r.Context(), query) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer stmt.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(stations); err != nil { - log.Println(err) + results, err := stmt.QueryContext(r.Context()) + if err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + defer results.Close() + + stations := make([]Station, 0) + for results.Next() { + var station Station + if err := results.Scan(&station.Station, &station.Network, &station.Name, &station.Latitude, &station.Longitude, &station.Elevation, &station.Depth, &station.Datum, &station.Start, &station.End); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + stations = append(stations, station) } -} - -func (h handler) GetStationCode(w http.ResponseWriter, r *http.Request) { - - code := r.PathValue("code") - stations, err := sqlite.Stations(r.Context(), h.db, sqlite.Code(code)) - if err != nil { + if err = results.Err(); err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return @@ -42,50 +63,39 @@ func (h handler) GetStationCode(w http.ResponseWriter, r *http.Request) { } } -func (h handler) GetStationSites(w http.ResponseWriter, r *http.Request) { +func (h handler) GetStationCode(w http.ResponseWriter, r *http.Request) { - station := r.PathValue("code") + code := r.PathValue("code") - sites, err := sqlite.Sites(r.Context(), h.db, sqlite.Station(station)) + query := "SELECT station.station,network.network,station.name,station.latitude,station.longitude,station.elevation,station.depth,datum.datum,station.start_date,station.end_date FROM station, datum, network, station_network WHERE station.datum_id = datum.datum_id AND station.station_id = station_network.station_id AND network.network_id = station_network.network_id AND station.station = ?" + stmt, err := h.db.PrepareContext(r.Context(), query) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } + defer stmt.Close() - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(sites); err != nil { - log.Println(err) - } -} - -func (h handler) GetStationSiteLocation(w http.ResponseWriter, r *http.Request) { - - station := r.PathValue("code") - location := r.PathValue("location") - - sites, err := sqlite.Sites(r.Context(), h.db, sqlite.Station(station), sqlite.Location(location)) + results, err := stmt.QueryContext(r.Context(), code) if err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return } - - w.Header().Add("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(sites); err != nil { - log.Println(err) + defer results.Close() + + stations := make([]Station, 0) + for results.Next() { + var station Station + if err := results.Scan(&station.Station, &station.Network, &station.Name, &station.Latitude, &station.Longitude, &station.Elevation, &station.Depth, &station.Datum, &station.Start, &station.End); err != nil { + log.Println("failed to execute query", err) + w.WriteHeader(500) + return + } + stations = append(stations, station) } -} - -func (h handler) GetStationSiteSensors(w http.ResponseWriter, r *http.Request) { - station := r.PathValue("code") - location := r.PathValue("location") - - sensors, err := sqlite.Sensors(r.Context(), h.db, sqlite.Station(station), sqlite.Location(location)) - if err != nil { + if err = results.Err(); err != nil { log.Println("failed to execute query", err) w.WriteHeader(500) return @@ -93,8 +103,7 @@ func (h handler) GetStationSiteSensors(w http.ResponseWriter, r *http.Request) { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusOK) - if err := json.NewEncoder(w).Encode(sensors); err != nil { + if err := json.NewEncoder(w).Encode(stations); err != nil { log.Println(err) } - } diff --git a/meta/placenames.go b/meta/placenames.go index eb97625ab..261c5d876 100644 --- a/meta/placenames.go +++ b/meta/placenames.go @@ -30,7 +30,7 @@ var placenameHeaders Header = map[string]int{ } var PlacenameTable Table = Table{ - name: "Name", + name: "Placename", headers: placenameHeaders, primary: []string{"Name"}, native: []string{"Latitude", "Longitude", "Level"}, diff --git a/meta/sqlite/asset.go b/meta/sqlite/asset.go new file mode 100644 index 000000000..0787d3f54 --- /dev/null +++ b/meta/sqlite/asset.go @@ -0,0 +1,198 @@ +package sqlite + +import ( + "fmt" +) + +const makeCreate = ` +DROP TABLE IF EXISTS make; +CREATE TABLE IF NOT EXISTS make ( + make_id INTEGER PRIMARY KEY NOT NULL, + make TEXT NOT NULL, + UNIQUE (make) +);` + +var mmake = Table{ + Create: makeCreate, + Select: func() string { + return "SELECT make_id FROM make WHERE make = ?" + }, + Insert: func() string { + return "INSERT INTO make (make) VALUES (?) ON CONFLICT (make) DO NOTHING;" + }, + + Fields: []string{"Make"}, +} + +const modelCreate = ` +DROP TABLE IF EXISTS model; +CREATE TABLE IF NOT EXISTS model ( + model_id INTEGER PRIMARY KEY NOT NULL, + make_id INTEGER NOT NULL, + model TEXT NOT NULL, + FOREIGN KEY (make_id) REFERENCES make (make_id), + UNIQUE (make_id, model) +);` + +var model = Table{ + Create: modelCreate, + Select: func() string { + return fmt.Sprintf("SELECT model_id FROM model WHERE make_id = (%s) AND model = ?", mmake.Select()) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO model (make_id, model) VALUES ((%s), ?) ON CONFLICT (make_id, model) DO NOTHING;", mmake.Select()) + }, + Fields: []string{"Make", "Model"}, +} + +const assetCreate = ` +DROP TABLE IF EXISTS asset; +CREATE TABLE IF NOT EXISTS asset ( + asset_id INTEGER PRIMARY KEY NOT NULL, + model_id INTEGER NOT NULL, + serial TEXT NOT NULL, + number TEXT DEFAULT "" NOT NULL, + notes TEXT DEFAULT "" NOT NULL, + FOREIGN KEY (model_id) REFERENCES model (model_id), + UNIQUE (model_id,serial) +);` + +var asset = Table{ + Create: assetCreate, + Select: func() string { + return fmt.Sprintf("SELECT asset_id FROM asset WHERE model_id = (%s) AND serial = ?", model.Select()) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO asset (model_id, serial, number, notes) VALUES ((%s), ?, ?, ?);", model.Select()) + }, + Fields: []string{"Make", "Model", "Serial", "Number", "Notes"}, +} + +const firmwareCreate = ` +DROP TABLE IF EXISTS firmware; +CREATE TABLE IF NOT EXISTS firmware ( + firmware_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + version TEXT NOT NULL, + notes TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + UNIQUE (asset_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_firmware BEFORE INSERT ON firmware +WHEN EXISTS ( + SELECT * FROM firmware + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on firmware"); +END; +` + +var firmware = Table{ + Create: firmwareCreate, + Select: func() string { + return fmt.Sprintf("SELECT firmware_id FROM firmware WHERE asset_id = (%s) AND start_date = ? AND end_date = ?", + asset.Select()) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO firmware (asset_id, version, notes, start_date, end_date) VALUES ((%s), ?, ?, ?, ?);", + asset.Select()) + }, + Fields: []string{"Make", "Model", "Serial", "Version", "Notes", "Start Date", "End Date"}, +} + +const channelCreate = ` +DROP TABLE IF EXISTS channel; +CREATE TABLE IF NOT EXISTS channel ( + channel_id INTEGER PRIMARY KEY NOT NULL, + model_id INTEGER NOT NULL, + response_id INTEGER NULL, + channel_type TEXT NOT NULL, + number REAL DEFAULT 0 NOT NULL, + sampling_rate REAL NOT NULL, + FOREIGN KEY (model_id) REFERENCES model (model_id), + FOREIGN KEY (response_id) REFERENCES response (response_id), + UNIQUE(model_id, channel_type, number, sampling_rate) +); +` + +var channel = Table{ + Create: channelCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO channel (model_id, response_id, channel_type, number, sampling_rate) VALUES ((%s), (%s), ?, ?, ?);", + model.Select(), response.Select()) + }, + Fields: []string{"Make", "Model", "Response", "Type", "Number", "SamplingRate"}, + Nulls: []string{"Response"}, +} + +const componentCreate = ` +DROP TABLE IF EXISTS component; +CREATE TABLE IF NOT EXISTS component ( + component_id INTEGER PRIMARY KEY NOT NULL, + model_id INTEGER NOT NULL, + response_id INTEGER NOT NULL, + component_type TEXT NULL, + number REAL NOT NULL, + source TEXT NULL, + subsource TEXT NOT NULL, + dip REAL NOT NULL, + azimuth REAL NOT NULL, + types TEXT NOT NULL, + sampling_rate REAL NULL, + FOREIGN KEY (model_id) REFERENCES model (model_id), + FOREIGN KEY (response_id) REFERENCES response (response_id), + UNIQUE(model_id, number, source, subsource, sampling_rate) +); +` + +var component = Table{ + Create: componentCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO component (model_id, response_id, component_type, number, source, subsource, dip, azimuth, types, sampling_rate) VALUES ((%s), (%s), ?, ?, ?, ?, ?, ?, ?, ?);", + model.Select(), response.Select()) + }, + Fields: []string{"Make", "Model", "Response", "Type", "Number", "Source", "Subsource", "Dip", "Azimuth", "Types", "Sampling Rate"}, +} + +const calibrationCreate = ` +DROP TABLE IF EXISTS calibration; +CREATE TABLE IF NOT EXISTS calibration ( + calibration_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + number TEXT NOT NULL, + scale_factor REAL DEFAULT 1.0 NOT NULL, + scale_bias REAL DEFAULT 0.0 NOT NULL, + scale_absolute REAL DEFAULT 0.0 NOT NULL, + frequency REAL NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + UNIQUE(asset_id, number, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_calibration BEFORE INSERT ON calibration +WHEN EXISTS ( + SELECT * FROM calibration + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND number = NEW.number +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on calibration"); +END; +` + +var calibration = Table{ + Create: calibrationCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO calibration (asset_id, number, scale_factor, scale_bias, scale_absolute, frequency, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?);", + asset.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Number", "Scale Factor", "Scale Bias", "Scale Absolute", "Frequency", "Start Date", "End Date"}, +} diff --git a/meta/sqlite/camera.go b/meta/sqlite/camera.go new file mode 100644 index 000000000..a4efced02 --- /dev/null +++ b/meta/sqlite/camera.go @@ -0,0 +1,116 @@ +package sqlite + +import ( + "fmt" +) + +const cameraCreate = ` +DROP TABLE IF EXISTS camera; +CREATE TABLE IF NOT EXISTS camera ( + camera_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + view_id INTEGER NOT NULL, + dip REAL DEFAULT 0.0 NOT NULL, + azimuth REAL DEFAULT 0.0 NOT NULL, + height REAL DEFAULT 0.0 NOT NULL, + north REAL DEFAULT 0.0 NOT NULL, + east REAL DEFAULT 0.0 NOT NULL, + notes TEXT DEFAULT "" NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (view_id) REFERENCES view (view_id), + UNIQUE(asset_id, view_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_camera BEFORE INSERT ON camera +WHEN EXISTS ( + SELECT * FROM camera + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND view_id = NEW.view_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on camera"); +END; +CREATE TRIGGER IF NOT EXISTS camera_too_soon BEFORE INSERT ON camera +WHEN NEW.start_date < (SELECT view.start_date FROM view WHERE view.view_id = new.view_id) +BEGIN + SELECT RAISE(FAIL, "camera too soon for view"); +END; +CREATE TRIGGER IF NOT EXISTS camera_too_late BEFORE INSERT ON camera +WHEN NEW.end_date > (SELECT view.end_date FROM view WHERE view.view_id = new.view_id) +BEGIN + SELECT RAISE(FAIL, "camera too late for view"); +END; +` + +var camera = Table{ + Create: cameraCreate, + Select: func() string { + return fmt.Sprintf("SELECT camera_id FROM camera WHERE asset_id = (%s) AND view_id = (%s)", + asset.Select(), view.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO camera (asset_id, view_id, dip, azimuth, height, north, east, notes, start_date, end_date) VALUES ((%s), (%s), ?, ?, ?, ?, ?, ?, ?, ?);", + asset.Select(), view.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Mount", "View", "Start Date", "End Date", "Dip", "Azimuth", "Height", "North", "East", "Notes", "Start Date", "End Date"}, +} + +var doasCreate = ` +DROP TABLE IF EXISTS doas; +CREATE TABLE IF NOT EXISTS doas ( + doas_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + view_id INTEGER NOT NULL, + dip REAL NOT NULL, + azimuth REAL NOT NULL, + height REAL NOT NULL, + north REAL NOT NULL, + east REAL NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (view_id) REFERENCES view (view_id), + UNIQUE(asset_id, view_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_doas BEFORE INSERT ON doas +WHEN EXISTS ( + SELECT * FROM doas + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND view_id = NEW.view_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on doas"); +END; +CREATE TRIGGER IF NOT EXISTS doas_too_soon BEFORE INSERT ON doas +WHEN NEW.start_date < (SELECT view.start_date FROM view WHERE view.view_id = new.view_id) +BEGIN + SELECT RAISE(FAIL, "doas too soon for view"); +END; +CREATE TRIGGER IF NOT EXISTS doas_too_late BEFORE INSERT ON doas +WHEN NEW.end_date > (SELECT view.end_date FROM view WHERE view.view_id = new.view_id) +BEGIN + SELECT RAISE(FAIL, "doas too late for view"); +END; +` + +var doas = Table{ + Create: doasCreate, + Select: func() string { + return fmt.Sprintf("SELECT doas_id FROM doas WHERE asset_id = (%s) AND view_id = (%s)", + asset.Select(), view.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO doas (asset_id, view_id, dip, azimuth, height, north, east, notes, start_date, end_date) VALUES ((%s), (%s), ?, ?, ?, ?, ?, ?, ?, ?, ?);", + asset.Select(), view.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Mount", "View", "Dip", "Azimuth", "Height", "North", "East", "Notes", "Start Date", "End Date"}, +} diff --git a/meta/sqlite/database.go b/meta/sqlite/database.go deleted file mode 100644 index 6f160fd58..000000000 --- a/meta/sqlite/database.go +++ /dev/null @@ -1,204 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - "fmt" - "strings" - - "github.com/GeoNet/delta/meta" -) - -type Database struct { - db *sql.DB -} - -func New(db *sql.DB) Database { - return Database{ - db: db, - } -} - -func (d Database) exec(ctx context.Context, tx *sql.Tx, cmds ...string) error { - for _, cmd := range cmds { - if _, err := tx.ExecContext(ctx, cmd); err != nil { - return fmt.Errorf("cmd %q: %w", cmd, err) - } - } - return nil -} - -func (d Database) prepare(ctx context.Context, tx *sql.Tx, cmd string, values ...[]any) error { - - stmt, err := tx.PrepareContext(ctx, cmd) - if err != nil { - return err - } - defer stmt.Close() - - for _, v := range values { - if _, err := stmt.ExecContext(ctx, v...); err != nil { - return err - } - } - - return nil -} - -func (d Database) Init(ctx context.Context, tables []meta.TableList) error { - - // Get a Tx for making transaction requests. - tx, err := d.db.BeginTx(ctx, nil) - if err != nil { - return err - } - // Defer a rollback in case anything fails, not actually - // worried about any rollback error. - defer func() { _ = tx.Rollback() }() - - for _, t := range tables { - if err := d.exec(ctx, tx, d.create(t.Table)...); err != nil { - return err - } - - cmd, values, ok := d.insert(t.Table, t.List) - if !ok { - continue - } - - if err := d.prepare(ctx, tx, cmd, values...); err != nil { - return err - } - } - - // Commit the transaction. - if err = tx.Commit(); err != nil { - return err - } - - return nil -} - -func (d Database) create(table meta.Table) []string { - var drop strings.Builder - fmt.Fprintf(&drop, "DROP TABLE IF EXISTS %s;\n", table.Name()) - - var create strings.Builder - - var primary []string - for n, x := range table.Columns() { - if !table.IsPrimary(n) { - continue - } - primary = append(primary, table.Remap(x)) - } - - fmt.Fprintf(&create, "CREATE TABLE IF NOT EXISTS %s(\n", table.Name()) - for n, x := range table.Columns() { - if n > 0 { - fmt.Fprintf(&create, ",\n") - } - switch { - case table.IsPrimary(n) && len(primary) == 1: - fmt.Fprintf(&create, " %s TEXT PRIMARY KEY", table.Remap(x)) - case table.IsNative(n): - fmt.Fprintf(&create, " %s REAL", table.Remap(x)) - case table.IsDateTime(n): - fmt.Fprintf(&create, " %s DATETIME CHECK (%s IS strftime('%%Y-%%m-%%dT%%H:%%M:%%SZ', %s))", table.Remap(x), table.Remap(x), table.Remap(x)) - default: - fmt.Fprintf(&create, " %s TEXT", table.Remap(x)) - } - } - if len(primary) > 1 { - fmt.Fprintf(&create, ",\n PRIMARY KEY(%s)", strings.Join(primary, ",")) - } - - foreign := make(map[string][]string) - for n, x := range table.Columns() { - if v, ok := table.IsForeign(n); ok { - foreign[v] = append(foreign[v], table.Remap(x)) - - } - } - - if len(foreign) > 0 { - for k, v := range foreign { - fmt.Fprintf(&create, ",\n FOREIGN KEY(%s) REFERENCES %s (%s)", strings.Join(v, ","), k, strings.Join(v, ",")) - } - } - - fmt.Fprintln(&create, "\n);") - - var trigger strings.Builder - if start, end, ok := table.HasDateTime(); ok { - var primary []string - for n, x := range table.Columns() { - if !table.IsPrimary(n) { - continue - } - if t, ok := table.Start(); ok && x == t { - continue - } - if t, ok := table.End(); ok && x == t { - continue - } - primary = append(primary, table.Remap(x)) - } - fmt.Fprintf(&trigger, "CREATE TRIGGER IF NOT EXISTS NoOverlapOn%s", table.Name()) - fmt.Fprintf(&trigger, " BEFORE INSERT ON %s", table.Name()) - fmt.Fprintf(&trigger, " WHEN EXISTS (\n SELECT * FROM %s\n WHERE ", table.Name()) - if len(primary) > 0 { - for n, v := range primary { - if n > 0 { - fmt.Fprintf(&trigger, "\n AND ") - } - fmt.Fprintf(&trigger, "%s == NEW.%s", v, v) - } - fmt.Fprintf(&trigger, "\n AND ") - } - fmt.Fprintf(&trigger, "datetime(%s) <= datetime(NEW.%s)\n AND ", table.Remap(start), table.Remap(end)) - fmt.Fprintf(&trigger, "datetime(%s) > datetime(NEW.%s)\n)\n", table.Remap(end), table.Remap(start)) - fmt.Fprintf(&trigger, "\nBEGIN\n") - fmt.Fprintf(&trigger, "SELECT RAISE(FAIL, \"Overlapping Intervals on %s\");\n", table.Name()) - fmt.Fprintf(&trigger, "END;\n") - } - - return []string{drop.String(), create.String(), trigger.String()} -} - -func (d Database) insert(table meta.Table, list meta.ListEncoder) (string, [][]any, bool) { - - lines := table.Encode(list) - if !(len(lines) > 0) { - return "", nil, false - } - - var header []string - for _, x := range lines[0] { - header = append(header, table.Remap(x)) - } - var parts []string - for n := range header { - parts = append(parts, fmt.Sprintf("$%d", n+1)) - } - - var sb strings.Builder - - fmt.Fprintf(&sb, "INSERT INTO %s (%s) VALUES (%s);\n", table.Name(), strings.Join(header, ","), strings.Join(parts, ",")) - - var values [][]any - for _, line := range lines[1:] { - var parts []any - for n, p := range line { - switch { - case table.IsNative(n) && p == "": - parts = append(parts, "0") - default: - parts = append(parts, p) - } - } - values = append(values, parts) - } - - return sb.String(), values, true -} diff --git a/meta/sqlite/db.go b/meta/sqlite/db.go new file mode 100644 index 000000000..4d2c7593c --- /dev/null +++ b/meta/sqlite/db.go @@ -0,0 +1,484 @@ +package sqlite + +import ( + "context" + "database/sql" + "fmt" + "log" + + "github.com/GeoNet/delta/meta" +) + +type DB struct { + db *sql.DB +} + +func New(db *sql.DB) DB { + return DB{ + db: db, + } +} + +func (d DB) exec(ctx context.Context, tx *sql.Tx, cmds ...string) error { + for _, cmd := range cmds { + if _, err := tx.ExecContext(ctx, cmd); err != nil { + return fmt.Errorf("cmd %q: %w", cmd, err) + } + } + return nil +} + +func (d DB) prepare(ctx context.Context, tx *sql.Tx, cmd string, values ...[]any) error { + + stmt, err := tx.PrepareContext(ctx, cmd) + if err != nil { + return err + } + defer stmt.Close() + + for _, v := range values { + if _, err := stmt.ExecContext(ctx, v...); err != nil { + return fmt.Errorf("%v : %w", v, err) + } + } + + return nil +} + +func (d DB) Init(ctx context.Context, list []meta.TableList) error { + + tables := make(map[string]meta.TableList) + for _, v := range list { + tables[v.Table.Name()] = v + } + + // Get a Tx for making transaction requests. + tx, err := d.db.BeginTx(ctx, nil) + if err != nil { + return err + } + // Defer a rollback in case anything fails, not actually + // worried about any rollback error. + defer func() { _ = tx.Rollback() }() + + // overall lookup tables, this should likely be pre-populated + // to allow for constraints to be applied. + if err := d.exec(ctx, tx, datum.Create); err != nil { + return err + } + + if err := d.exec(ctx, tx, method.Create); err != nil { + return err + } + if err := d.exec(ctx, tx, placeRole.Create); err != nil { + return err + } + + for _, l := range list { + switch l.Table.Name() { + case "Response": + if err := d.exec(ctx, tx, response.Create); err != nil { + return fmt.Errorf("resonse create: %v", err) + } + if err := d.prepare(ctx, tx, response.Insert(), response.Columns(l)...); err != nil { + return fmt.Errorf("resonse insert: %v", err) + } + case "Placename": + if err := d.exec(ctx, tx, placename.Create); err != nil { + return fmt.Errorf("placename create: %v", err) + } + if err := d.prepare(ctx, tx, placename.Insert(), placename.Columns(l)...); err != nil { + return fmt.Errorf("placename insert: %v", err) + } + case "Citation": + if err := d.exec(ctx, tx, citation.Create); err != nil { + return fmt.Errorf("citation create: %v", err) + } + if err := d.prepare(ctx, tx, citation.Insert(), citation.Columns(l)...); err != nil { + return fmt.Errorf("citation insert: %v", err) + } + case "Asset": + if err := d.exec(ctx, tx, makeCreate); err != nil { + return fmt.Errorf("make create: %v", err) + } + if err := d.prepare(ctx, tx, mmake.Insert(), mmake.Columns(l)...); err != nil { + return fmt.Errorf("make insert: %v", err) + } + if err := d.exec(ctx, tx, modelCreate); err != nil { + return fmt.Errorf("model create: %v", err) + } + if err := d.prepare(ctx, tx, model.Insert(), model.Columns(l)...); err != nil { + return fmt.Errorf("model insert: %v", err) + } + if err := d.exec(ctx, tx, asset.Create); err != nil { + return fmt.Errorf("asset create: %v", err) + } + if err := d.prepare(ctx, tx, asset.Insert(), asset.Columns(l)...); err != nil { + return fmt.Errorf("asset insert: %v", err) + } + case "Firmware": + if err := d.exec(ctx, tx, firmware.Create); err != nil { + return fmt.Errorf("firmware create: %v", err) + } + if err := d.prepare(ctx, tx, firmware.Insert(), firmware.Columns(l)...); err != nil { + return fmt.Errorf("firmware insert: %v", err) + } + case "Calibration": + if err := d.exec(ctx, tx, calibration.Create); err != nil { + return fmt.Errorf("calibration create: %v", err) + } + if err := d.prepare(ctx, tx, calibration.Insert(), calibration.Columns(l)...); err != nil { + return fmt.Errorf("calibration insert: %v", err) + } + case "Channel": + if err := d.prepare(ctx, tx, mmake.Insert(), mmake.Columns(l)...); err != nil { + return fmt.Errorf("channel make insert: %v", err) + } + if err := d.prepare(ctx, tx, model.Insert(), model.Columns(l)...); err != nil { + return fmt.Errorf("channel model insert: %v", err) + } + if err := d.exec(ctx, tx, channel.Create); err != nil { + return fmt.Errorf("channel create: %v", err) + } + if err := d.prepare(ctx, tx, channel.Insert(), channel.Columns(l)...); err != nil { + return fmt.Errorf("channel insert: %v", err) + } + case "Component": + if err := d.prepare(ctx, tx, mmake.Insert(), mmake.Columns(l)...); err != nil { + return fmt.Errorf("component make insert: %v", err) + } + if err := d.prepare(ctx, tx, model.Insert(), model.Columns(l)...); err != nil { + return fmt.Errorf("component model insert: %v", err) + } + if err := d.exec(ctx, tx, component.Create); err != nil { + return fmt.Errorf("component create: %v", err) + } + if err := d.prepare(ctx, tx, component.Insert(), component.Columns(l)...); err != nil { + return fmt.Errorf("component insert: %v", err) + } + case "Network": + if err := d.exec(ctx, tx, network.Create); err != nil { + return fmt.Errorf("network create: %v", err) + } + if err := d.prepare(ctx, tx, network.Insert(), network.Columns(l)...); err != nil { + return fmt.Errorf("network insert: %v", err) + } + case "Station": + if err := d.prepare(ctx, tx, datum.Insert(), datum.Columns(l)...); err != nil { + return fmt.Errorf("datum insert: %v", err) + } + if err := d.exec(ctx, tx, station.Create); err != nil { + return fmt.Errorf("station create: %v", err) + } + if err := d.prepare(ctx, tx, station.Insert(), station.Columns(l)...); err != nil { + return fmt.Errorf("station insert: %v", err) + } + if err := d.exec(ctx, tx, stationNetwork.Create); err != nil { + return fmt.Errorf("station network create: %v", err) + } + if err := d.prepare(ctx, tx, stationNetwork.Insert(), stationNetwork.Columns(l)...); err != nil { + return fmt.Errorf("station network insert: %v", err) + } + case "Sample": + if err := d.prepare(ctx, tx, datum.Insert(), datum.Columns(l)...); err != nil { + return fmt.Errorf("datum insert: %v", err) + } + if err := d.exec(ctx, tx, sample.Create); err != nil { + return fmt.Errorf("sample create: %v", err) + } + if err := d.prepare(ctx, tx, sample.Insert(), sample.Columns(l)...); err != nil { + return fmt.Errorf("sample insert: %v", err) + } + if err := d.exec(ctx, tx, sampleNetwork.Create); err != nil { + return fmt.Errorf("sample network create: %v", err) + } + if err := d.prepare(ctx, tx, sampleNetwork.Insert(), sampleNetwork.Columns(l)...); err != nil { + return fmt.Errorf("sample network insert: %v", err) + } + case "Site": + if err := d.prepare(ctx, tx, datum.Insert(), datum.Columns(l)...); err != nil { + return fmt.Errorf("datum insert: %v", err) + } + if err := d.exec(ctx, tx, site.Create); err != nil { + return fmt.Errorf("site create: %v", err) + } + if err := d.prepare(ctx, tx, site.Insert(), site.Columns(l)...); err != nil { + return fmt.Errorf("site insert: %v", err) + } + case "Point": + if err := d.prepare(ctx, tx, datum.Insert(), datum.Columns(l)...); err != nil { + return fmt.Errorf("datum insert: %v", err) + } + if err := d.exec(ctx, tx, point.Create); err != nil { + return fmt.Errorf("point create: %v", err) + } + if err := d.prepare(ctx, tx, point.Insert(), point.Columns(l)...); err != nil { + return fmt.Errorf("point insert: %v", err) + } + case "Feature": + if err := d.exec(ctx, tx, feature.Create); err != nil { + return fmt.Errorf("feature create: %v", err) + } + if err := d.prepare(ctx, tx, feature.Insert(), feature.Columns(l)...); err != nil { + return fmt.Errorf("feature insert: %v", err) + } + case "Class": + if err := d.exec(ctx, tx, class.Create); err != nil { + return fmt.Errorf("class create: %v", err) + } + if err := d.prepare(ctx, tx, class.Insert(), class.Columns(l)...); err != nil { + return fmt.Errorf("class insert: %v", err) + } + if err := d.exec(ctx, tx, classCitation.Create); err != nil { + return fmt.Errorf("class citation create: %v", err) + } + if err := d.prepare(ctx, tx, classCitation.Insert(), class.Links(l)...); err != nil { + return fmt.Errorf("class citation insert: %v", err) + } + case "Mark": + if err := d.exec(ctx, tx, mark.Create); err != nil { + return fmt.Errorf("mark create: %v", err) + } + if err := d.prepare(ctx, tx, mark.Insert(), mark.Columns(l)...); err != nil { + return fmt.Errorf("mark insert: %v", err) + } + if err := d.exec(ctx, tx, markNetwork.Create); err != nil { + return fmt.Errorf("mark network create: %v", err) + } + if err := d.prepare(ctx, tx, markNetwork.Insert(), markNetwork.Columns(l)...); err != nil { + return fmt.Errorf("mark network insert: %v", err) + } + case "Monument": + if err := d.exec(ctx, tx, markType.Create); err != nil { + return fmt.Errorf("mark type create: %v", err) + } + if err := d.prepare(ctx, tx, markType.Insert(), markType.Columns(l)...); err != nil { + return fmt.Errorf("mark type insert: %v", err) + } + if err := d.exec(ctx, tx, monumentType.Create); err != nil { + return fmt.Errorf("monument type create: %v", err) + } + if err := d.prepare(ctx, tx, monumentType.Insert(), monumentType.Columns(l)...); err != nil { + return fmt.Errorf("monument type insert: %v", err) + } + if err := d.exec(ctx, tx, foundationType.Create); err != nil { + return fmt.Errorf("foundation type create: %v", err) + } + if err := d.prepare(ctx, tx, foundationType.Insert(), foundationType.Columns(l)...); err != nil { + return fmt.Errorf("foundation type insert: %v", err) + } + if err := d.exec(ctx, tx, bedrock.Create); err != nil { + return fmt.Errorf("bedrock create: %v", err) + } + if err := d.prepare(ctx, tx, bedrock.Insert(), bedrock.Columns(l)...); err != nil { + return fmt.Errorf("bedrock insert: %v", err) + } + if err := d.exec(ctx, tx, geology.Create); err != nil { + return fmt.Errorf("geology create: %v", err) + } + if err := d.prepare(ctx, tx, geology.Insert(), geology.Columns(l)...); err != nil { + return fmt.Errorf("geology insert: %v", err) + } + if err := d.exec(ctx, tx, monument.Create); err != nil { + return fmt.Errorf("monument create: %v", err) + } + if err := d.prepare(ctx, tx, monument.Insert(), monument.Columns(l)...); err != nil { + return fmt.Errorf("monument insert: %v", err) + } + case "Visibility": + if err := d.exec(ctx, tx, visibility.Create); err != nil { + return fmt.Errorf("visibility create: %v", err) + } + if err := d.prepare(ctx, tx, visibility.Insert(), visibility.Columns(l)...); err != nil { + return fmt.Errorf("visibility insert: %v", err) + } + case "Antenna": + if err := d.exec(ctx, tx, antenna.Create); err != nil { + return fmt.Errorf("antenna create: %v", err) + } + if err := d.prepare(ctx, tx, antenna.Insert(), antenna.Columns(l)...); err != nil { + return fmt.Errorf("antenna insert: %v", err) + } + case "MetSensor": + if err := d.exec(ctx, tx, metsensor.Create); err != nil { + return fmt.Errorf("metsensor create: %v", err) + } + if err := d.prepare(ctx, tx, metsensor.Insert(), metsensor.Columns(l)...); err != nil { + return fmt.Errorf("metsensor insert: %v", err) + } + case "Radome": + if err := d.exec(ctx, tx, radome.Create); err != nil { + return fmt.Errorf("radome create: %v", err) + } + if err := d.prepare(ctx, tx, radome.Insert(), radome.Columns(l)...); err != nil { + return fmt.Errorf("radome insert: %v", err) + } + case "Receiver": + if err := d.exec(ctx, tx, receiver.Create); err != nil { + return fmt.Errorf("receiver create: %v", err) + } + if err := d.prepare(ctx, tx, receiver.Insert(), receiver.Columns(l)...); err != nil { + return fmt.Errorf("receiver insert: %v", err) + } + case "Session": + if err := d.exec(ctx, tx, session.Create); err != nil { + return fmt.Errorf("session create: %v", err) + } + if err := d.prepare(ctx, tx, session.Insert(), session.Columns(l)...); err != nil { + return fmt.Errorf("session insert: %v", err) + } + case "Gauge": + if err := d.exec(ctx, tx, gauge.Create); err != nil { + return fmt.Errorf("gauge create: %v", err) + } + if err := d.prepare(ctx, tx, gauge.Insert(), gauge.Columns(l)...); err != nil { + return fmt.Errorf("gauge insert: %v", err) + } + case "Constituent": + if err := d.exec(ctx, tx, constituent.Create); err != nil { + return fmt.Errorf("constituent create: %v", err) + } + if err := d.prepare(ctx, tx, constituent.Insert(), constituent.Columns(l)...); err != nil { + return fmt.Errorf("constituent insert: %v", err) + } + case "Dart": + if err := d.exec(ctx, tx, dartCreate); err != nil { + return fmt.Errorf("dart create: %v", err) + } + if err := d.prepare(ctx, tx, dart.Insert(), dart.Columns(l)...); err != nil { + return fmt.Errorf("dart insert: %v", err) + } + case "Mount": + if err := d.prepare(ctx, tx, datum.Insert(), datum.Columns(l)...); err != nil { + return fmt.Errorf("datum insert: %v", err) + } + if err := d.exec(ctx, tx, mount.Create); err != nil { + return fmt.Errorf("mount create: %v", err) + } + if err := d.prepare(ctx, tx, mount.Insert(), mount.Columns(l)...); err != nil { + return fmt.Errorf("mount insert: %v", err) + } + if err := d.exec(ctx, tx, mountNetwork.Create); err != nil { + return fmt.Errorf("mount network create: %v", err) + } + if err := d.prepare(ctx, tx, mountNetwork.Insert(), mountNetwork.Columns(l)...); err != nil { + return fmt.Errorf("mount network insert: %v", err) + } + case "View": + if err := d.exec(ctx, tx, view.Create); err != nil { + return fmt.Errorf("view create: %v", err) + } + if err := d.prepare(ctx, tx, view.Insert(), view.Columns(l)...); err != nil { + return fmt.Errorf("view insert: %v", err) + } + case "Camera": + if err := d.exec(ctx, tx, camera.Create); err != nil { + return fmt.Errorf("camera create: %v", err) + } + if err := d.prepare(ctx, tx, camera.Insert(), camera.Columns(l)...); err != nil { + return fmt.Errorf("camera insert: %v", err) + } + case "Doas": + if err := d.exec(ctx, tx, doas.Create); err != nil { + return fmt.Errorf("doas create: %v", err) + } + if err := d.prepare(ctx, tx, doas.Insert(), doas.Columns(l)...); err != nil { + return fmt.Errorf("doas insert: %v", err) + } + case "Datalogger": + if err := d.prepare(ctx, tx, placeRole.Insert(), placeRole.Columns(l)...); err != nil { + return fmt.Errorf("place role insert: %v", err) + } + if err := d.exec(ctx, tx, datalogger.Create); err != nil { + return fmt.Errorf("datalogger create: %v", err) + } + if err := d.prepare(ctx, tx, datalogger.Insert(), datalogger.Columns(l)...); err != nil { + return fmt.Errorf("datalogger insert: %v", err) + } + case "Sensor": + if err := d.prepare(ctx, tx, method.Insert(), method.Columns(l)...); err != nil { + return fmt.Errorf("method insert: %v", err) + } + if err := d.exec(ctx, tx, sensor.Create); err != nil { + return fmt.Errorf("sensor create: %v", err) + } + if err := d.prepare(ctx, tx, sensor.Insert(), sensor.Columns(l)...); err != nil { + return fmt.Errorf("sensor insert: %v", err) + } + case "Recorder": + if err := d.prepare(ctx, tx, method.Insert(), method.Columns(l)...); err != nil { + return fmt.Errorf("method insert: %v", err) + } + if err := d.exec(ctx, tx, recorder.Create); err != nil { + return fmt.Errorf("method create: %v", err) + } + if err := d.prepare(ctx, tx, recorderModel.Insert(), recorderModel.Columns(l)...); err != nil { + return fmt.Errorf("recorder model insert: %v", err) + } + if err := d.prepare(ctx, tx, recorder.Insert(), recorder.Columns(l)...); err != nil { + return fmt.Errorf("recorder insert: %v", err) + } + case "Timing": + if err := d.exec(ctx, tx, timing.Create); err != nil { + return fmt.Errorf("timing create: %v", err) + } + if err := d.prepare(ctx, tx, timing.Insert(), timing.Columns(l)...); err != nil { + return fmt.Errorf("timing insert: %v", err) + } + case "Telemetry": + if err := d.exec(ctx, tx, telemetry.Create); err != nil { + return fmt.Errorf("telemetry create: %v", err) + } + if err := d.prepare(ctx, tx, telemetry.Insert(), telemetry.Columns(l)...); err != nil { + return fmt.Errorf("telemetry insert: %v", err) + } + case "Polarity": + if err := d.exec(ctx, tx, polarity.Create); err != nil { + return fmt.Errorf("polarity create: %v", err) + } + if err := d.prepare(ctx, tx, polarity.Insert(), polarity.Columns(l)...); err != nil { + return fmt.Errorf("polarity insert: %v", err) + } + case "Gain": + if err := d.exec(ctx, tx, gain.Create); err != nil { + return fmt.Errorf("gain create: %v", err) + } + if err := d.prepare(ctx, tx, gain.Insert(), gain.Columns(l)...); err != nil { + return fmt.Errorf("gain insert: %v", err) + } + case "Preamp": + if err := d.exec(ctx, tx, preamp.Create); err != nil { + return fmt.Errorf("preamp create: %v", err) + } + if err := d.prepare(ctx, tx, preamp.Insert(), preamp.Columns(l)...); err != nil { + return fmt.Errorf("preamp insert: %v", err) + } + case "Stream": + if err := d.exec(ctx, tx, stream.Create); err != nil { + return fmt.Errorf("stream create: %v", err) + } + if err := d.prepare(ctx, tx, stream.Insert(), stream.Columns(l)...); err != nil { + return fmt.Errorf("stream insert: %v", err) + } + case "Connection": + if err := d.prepare(ctx, tx, placeRole.Insert(), placeRole.Columns(l)...); err != nil { + return fmt.Errorf("connection place role insert: %v", err) + } + if err := d.exec(ctx, tx, connection.Create); err != nil { + return fmt.Errorf("connection create: %v", err) + } + if err := d.prepare(ctx, tx, connection.Insert(), connection.Columns(l)...); err != nil { + return fmt.Errorf("connection insert: %v", err) + } + default: + log.Printf("ignoring %s", l.Table.Name()) + } + } + + // Commit the transaction. + if err = tx.Commit(); err != nil { + return err + } + + return nil +} diff --git a/meta/sqlite/gauge.go b/meta/sqlite/gauge.go new file mode 100644 index 000000000..9614b5882 --- /dev/null +++ b/meta/sqlite/gauge.go @@ -0,0 +1,101 @@ +package sqlite + +import ( + "fmt" +) + +const gaugeCreate = ` +DROP TABLE IF EXISTS gauge; +CREATE TABLE IF NOT EXISTS gauge ( + gauge_id INTEGER PRIMARY KEY NOT NULL, + gauge TEXT NOT NULL, + identification_number TEXT NOT NULL, + analysis_time_zone REAL NOT NULL, + analysis_latitude REAL NOT NULL, + analysis_longitude REAL NOT NULL, + crex_tag TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + UNIQUE(gauge, start_date, end_date) +); +` + +var gauge = Table{ + Create: gaugeCreate, + Select: func() string { + return "SELECT gauge_id FROM gauge WHERE gauge = ?" + }, + Insert: func() string { + return "INSERT INTO gauge (gauge, identification_number, analysis_time_zone, analysis_latitude, analysis_longitude, crex_tag, start_date, end_date) VALUES (?, ?, ?, ?, ?, ?, ?, ?);" + }, + Fields: []string{"Gauge", "Identification Number", "Analysis Time Zone", "Analysis Latitude", "Analysis Longitude", "Crex Tag", "Start Date", "End Date"}, +} + +const constituentCreate = ` +DROP TABLE IF EXISTS constituent; +CREATE TABLE IF NOT EXISTS constituent ( + constituent_id INTEGER PRIMARY KEY NOT NULL, + gauge_id INTEGER NOT NULL, + number TEXT NOT NULL, + constituent TEXT NOT NULL, + amplitude REAL NOT NULL, + lag TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (gauge_id) REFERENCES gauge (gauge_id), + UNIQUE(gauge_id, number, start_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_constituent BEFORE INSERT ON constituent +WHEN EXISTS ( + SELECT * FROM constituent + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND gauge_id = NEW.gauge_id + AND number = NEW.number +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on constituent"); +END; +` + +var constituent = Table{ + Create: constituentCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO constituent (gauge, number, constituent, amplitude, lag, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?);", + gauge.Select(), + ) + }, + Fields: []string{"Gauge", "Identification Number", "Analysis Time Zone", "Analysis Latitude", "Analysis Longitude", "Crex Tag", "Start Date", "End Date"}, +} + +const dartCreate = ` +DROP TABLE IF EXISTS dart; +CREATE TABLE IF NOT EXISTS dart ( + dart_id INTEGER PRIMARY KEY NOT NULL, + station_id INTEGER NOT NULL, + pid TEXT NOT NULL, + wmo_identifier TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (station_id) REFERENCES station (station_id), + UNIQUE(station_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_dart BEFORE INSERT ON dart +WHEN EXISTS ( + SELECT * FROM dart + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND station_id = NEW.station_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on dart"); +END; +` + +var dart = Table{ + Create: dartCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO dart (station_id, pid, wmo_identifier, start_date, end_date) VALUES ((%s), ?, ?, ?, ?);", station.Select()) + }, + Fields: []string{"Station", "Pid", "WMO Identifier", "Start Date", "End Date"}, +} diff --git a/meta/sqlite/mark.go b/meta/sqlite/mark.go index 9b8245f9e..896ca8100 100644 --- a/meta/sqlite/mark.go +++ b/meta/sqlite/mark.go @@ -1,58 +1,227 @@ package sqlite import ( - "context" - "database/sql" - - "github.com/GeoNet/delta/meta" + "fmt" ) -func Marks(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Mark, error) { - - query := `SELECT Code,Network,Igs,Name,Latitude,Longitude,Elevation,Datum,Start,End FROM Mark` - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - marks := make([]meta.Mark, 0) - for results.Next() { - var mark meta.Mark - var igs string - if err := results.Scan(&mark.Code, &mark.Network, &igs, &mark.Name, &mark.Latitude, &mark.Longitude, &mark.Elevation, &mark.Datum, &mark.Start, &mark.End); err != nil { - return nil, err - } - if b, ok := ParseBool(igs); ok { - mark.Igs = b - } - marks = append(marks, mark) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return marks, nil +// should be loaded from a reference file +const bedrockCreate = ` +DROP TABLE IF EXISTS bedrock; +CREATE TABLE IF NOT EXISTS bedrock ( + bedrock_id INTEGER PRIMARY KEY NOT NULL, + bedrock TEXT NOT NULL, + UNIQUE (bedrock) +);` + +var bedrock = Table{ + Create: bedrockCreate, + Select: func() string { + return "SELECT bedrock_id FROM bedrock WHERE bedrock = ?" + }, + Insert: func() string { + return "INSERT INTO bedrock (bedrock) VALUES (?) ON CONFLICT(bedrock) DO NOTHING;" + }, + Fields: []string{"Bedrock"}, +} + +// should be loaded from a reference file +const markTypeCreate = ` +DROP TABLE IF EXISTS mark_type; +CREATE TABLE IF NOT EXISTS mark_type ( + mark_type_id INTEGER PRIMARY KEY NOT NULL, + mark_type TEXT NOT NULL, + UNIQUE (mark_type) +);` + +var markType = Table{ + Create: markTypeCreate, + Select: func() string { + return "SELECT mark_type_id FROM mark_type WHERE mark_type = ?" + }, + Insert: func() string { + return "INSERT INTO mark_type (mark_type) VALUES (?) ON CONFLICT(mark_type) DO NOTHING;" + }, + Fields: []string{"Mark Type"}, +} + +// should be loaded from a reference file +const monumentTypeCreate = ` +DROP TABLE IF EXISTS monument_type; +CREATE TABLE IF NOT EXISTS monument_type ( + monument_type_id INTEGER PRIMARY KEY NOT NULL, + monument_type TEXT NOT NULL, + UNIQUE (monument_type) +);` + +var monumentType = Table{ + Create: monumentTypeCreate, + Select: func() string { + return "SELECT monument_type_id FROM monument_type WHERE monument_type = ?" + }, + Insert: func() string { + return "INSERT INTO monument_type (monument_type) VALUES (?) ON CONFLICT(monument_type) DO NOTHING;" + }, + Fields: []string{"Type"}, +} + +// should be loaded from a reference file +const foundationTypeCreate = ` +DROP TABLE IF EXISTS foundation_type; +CREATE TABLE IF NOT EXISTS foundation_type ( + foundation_type_id INTEGER PRIMARY KEY NOT NULL, + foundation_type TEXT NOT NULL, + UNIQUE (foundation_type) +);` + +var foundationType = Table{ + Create: foundationTypeCreate, + Select: func() string { + return "SELECT foundation_type_id FROM foundation_type WHERE foundation_type = ?" + }, + Insert: func() string { + return "INSERT INTO foundation_type (foundation_type) VALUES (?) ON CONFLICT(foundation_type) DO NOTHING;" + }, + Fields: []string{"Foundation Type"}, +} + +// should be loaded from a reference file +const geologyCreate = ` +DROP TABLE IF EXISTS geology; +CREATE TABLE IF NOT EXISTS geology ( + geology_id INTEGER PRIMARY KEY NOT NULL, + geology TEXT NOT NULL, + UNIQUE (geology) +);` + +var geology = Table{ + Create: geologyCreate, + Select: func() string { + return "SELECT geology_id FROM geology WHERE geology = ?" + }, + Insert: func() string { + return "INSERT INTO geology (geology) VALUES (?) ON CONFLICT(geology) DO NOTHING;" + }, + Fields: []string{"Geology"}, +} + +const markCreate = ` +DROP TABLE IF EXISTS mark; +CREATE TABLE IF NOT EXISTS mark ( + mark_id INTEGER PRIMARY KEY NOT NULL, + datum_id INTEGER NOT NULL, + mark TEXT NOT NULL, + igs BOOLEAN NOT NULL, + name TEXT NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + elevation REAL DEFAULT 0 NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (datum_id) REFERENCES datum (datum_id), + UNIQUE (mark) +);` + +var mark = Table{ + Create: markCreate, + Select: func() string { + return "SELECT mark_id FROM mark WHERE mark = ?" + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO mark (datum_id, mark, igs, name, latitude, longitude, elevation, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?);", + datum.Select(), + ) + }, + + Fields: []string{"Datum", "Mark", "Igs", "Name", "Latitude", "Longitude", "Elevation", "Start Date", "End Date"}, +} + +const markNetworkCreate = ` +DROP TABLE IF EXISTS mark_network; +CREATE TABLE IF NOT EXISTS mark_network ( + mark_network_id INTEGER PRIMARY KEY NOT NULL, + mark_id INTEGER NOT NULL, + network_id INTEGER NOT NULL, + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + FOREIGN KEY (network_id) REFERENCES network (network_id), + UNIQUE (mark_id, network_id) +);` + +var markNetwork = Table{ + Create: markNetworkCreate, + Select: func() string { + return fmt.Sprintf("SELECT mark_network_id FROM mark_network WHERE mark_id = (%s) AND network_id = (%s)", + mark.Select(), network.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO mark_network (mark_id, network_id) VALUES ((%s), (%s));", + mark.Select(), network.Select(), + ) + }, + Fields: []string{"Mark", "Network"}, +} + +const monumentCreate = ` +DROP TABLE IF EXISTS monument; +CREATE TABLE IF NOT EXISTS monument ( + monument_id INTEGER PRIMARY KEY NOT NULL, + mark_id INTEGER NOT NULL, + mark_type_id INTEGER NOT NULL, + monument_type_id INTEGER NOT NULL, + foundation_type_id INTEGER NOT NULL, + bedrock_id INTEGER NOT NULL, + geology_id INTEGER NOT NULL, + domes_number TEXT NOT NULL, + ground_relationship REAL NOT NULL, + foundation_depth REAL NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + FOREIGN KEY (mark_type_id) REFERENCES mark_type (mark_type_id), + FOREIGN KEY (monument_type_id) REFERENCES monument_type (monument_type_id), + FOREIGN KEY (foundation_type_id) REFERENCES foundation_type (foundation_type_id), + FOREIGN KEY (bedrock_id) REFERENCES bedrock (bedrock_id), + FOREIGN KEY (geology_id) REFERENCES geology (geology_id), + UNIQUE (mark_id) +);` + +var monument = Table{ + Create: monumentCreate, + Select: func() string { + return fmt.Sprintf("SELECT monument_id FROM monument WHERE mark_id = (%s)", mark.Select()) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO monument (mark_id, mark_type_id, monument_type_id, foundation_type_id, bedrock_id, geology_id, domes_number, ground_relationship, foundation_depth, start_date, end_date) VALUES ((%s), (%s), (%s), (%s), (%s), (%s), ?, ?, ?, ?, ?);", + mark.Select(), markType.Select(), monumentType.Select(), foundationType.Select(), bedrock.Select(), geology.Select(), + ) + }, + Fields: []string{"Mark", "Mark Type", "Type", "Foundation Type", "Bedrock", "Geology", "Domes Number", "Ground Relationship", "Foundation Depth", "Start Date", "End Date"}, +} + +const visibilityCreate = ` +DROP TABLE IF EXISTS visibility; +CREATE TABLE IF NOT EXISTS visibility ( + visibility_id INTEGER PRIMARY KEY NOT NULL, + mark_id INTEGER NOT NULL, + sky_visibility TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + UNIQUE(mark_id, sky_visibility, start_date, end_date) +); +` + +var visibility = Table{ + Create: visibilityCreate, + Select: func() string { + return fmt.Sprintf("SELECT visibility_id FROM visibility WHERE mark_id = (%s) AND start_date = ? AND end_date = ?", + mark.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO visibility (mark_id, sky_visibility, start_date, end_date) VALUES ((%s), ?, ?, ?);", + mark.Select(), + ) + }, + Fields: []string{"Mark", "Sky Visibility", "Start Date", "End Date"}, } diff --git a/meta/sqlite/monument.go b/meta/sqlite/monument.go deleted file mode 100644 index d60a4e343..000000000 --- a/meta/sqlite/monument.go +++ /dev/null @@ -1,55 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - - "github.com/GeoNet/delta/meta" -) - -func Monuments(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Monument, error) { - - query := `SELECT Mark,DomesNumber,MarkType,Type,GroundRelationship,FoundationType,FoundationDepth,Start,End,Bedrock,Geology FROM Monument` - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - monuments := make([]meta.Monument, 0) - for results.Next() { - var monument meta.Monument - if err := results.Scan(&monument.Mark, &monument.DomesNumber, &monument.MarkType, &monument.Type, &monument.GroundRelationship, &monument.FoundationType, &monument.FoundationDepth, &monument.Start, &monument.End, &monument.Bedrock, &monument.Geology); err != nil { - return nil, err - } - - monuments = append(monuments, monument) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return monuments, nil -} diff --git a/meta/sqlite/mount.go b/meta/sqlite/mount.go index d2ba4d5de..d2e0cb6a3 100644 --- a/meta/sqlite/mount.go +++ b/meta/sqlite/mount.go @@ -1,54 +1,116 @@ package sqlite import ( - "context" - "database/sql" + "fmt" +) + +const mountCreate = ` +DROP TABLE IF EXISTS mount; +CREATE TABLE IF NOT EXISTS mount ( + mount_id INTEGER PRIMARY KEY NOT NULL, + datum_id INTEGER NOT NULL, + mount TEXT NOT NULL, + name TEXT NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + elevation REAL NOT NULL, + description TEXT DEFAULT "" NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (datum_id) REFERENCES datum (datum_id), + UNIQUE(mount, start_date, end_date) +); +` + +var mount = Table{ + Create: mountCreate, + Select: func() string { + return "SELECT mount_id FROM mount WHERE mount = ?" + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO mount (datum_id, mount, name, latitude, longitude, elevation, description, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?);", + datum.Select(), + ) + }, + Fields: []string{"Datum", "Mount", "Name", "Latitude", "Longitude", "Elevation", "Description", "Start Date", "End Date"}, +} + +const mountNetworkCreate = ` +DROP TABLE IF EXISTS mount_network; +CREATE TABLE IF NOT EXISTS mount_network ( + mount_network_id INTEGER PRIMARY KEY NOT NULL, + mount_id INTEGER NOT NULL, + network_id INTEGER NOT NULL, + FOREIGN KEY (mount_id) REFERENCES mount (mount_id), + FOREIGN KEY (network_id) REFERENCES network (network_id), + UNIQUE (mount_id, network_id) +);` + +var mountNetwork = Table{ + Create: mountNetworkCreate, + Select: func() string { + return fmt.Sprintf("SELECT mount_network_id FROM mount_network WHERE mount_id = (%s) AND network_id = (%s)", + mount.Select(), network.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO mount_network (mount_id, network_id) VALUES ((%s), (%s));", + mount.Select(), network.Select()) + }, + + Fields: []string{"Mount", "Network"}, +} - "github.com/GeoNet/delta/meta" +const viewCreate = ` +DROP TABLE IF EXISTS view; +CREATE TABLE IF NOT EXISTS view ( + view_id INTEGER PRIMARY KEY NOT NULL, + mount_id INTEGER NOT NULL, + view TEXT NOT NULL, + label TEXT NULL, + azimuth REAL NOT NULL, + dip REAL NOT NULL, + method TEXT NULL, + description TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (mount_id) REFERENCES mount (mount_id), + UNIQUE(mount_id, start_date, view) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_view BEFORE INSERT ON view +WHEN EXISTS ( + SELECT * FROM view + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND mount_id = NEW.mount_id + AND view = NEW.view ) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on view"); +END; +CREATE TRIGGER IF NOT EXISTS view_too_soon BEFORE INSERT ON view +WHEN NEW.start_date < (SELECT mount.start_date FROM mount WHERE mount.mount_id = new.mount_id) +BEGIN + SELECT RAISE(FAIL, "view too soon for mount"); +END; +CREATE TRIGGER IF NOT EXISTS view_too_late BEFORE INSERT ON view +WHEN NEW.end_date > (SELECT mount.end_date FROM mount WHERE mount.mount_id = new.mount_id) +BEGIN + SELECT RAISE(FAIL, "view too late for mount"); +END; +` -func Mounts(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Mount, error) { - - query := `SELECT Code,Network,Name,Latitude,Longitude,Elevation,Datum,Description,Start,End FROM Mount` - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - mounts := make([]meta.Mount, 0) - for results.Next() { - var mount meta.Mount - if err := results.Scan(&mount.Code, &mount.Network, &mount.Name, &mount.Latitude, &mount.Longitude, &mount.Elevation, &mount.Datum, &mount.Description, &mount.Start, &mount.End); err != nil { - return nil, err - } - mounts = append(mounts, mount) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return mounts, nil +var view = Table{ + Create: viewCreate, + Select: func() string { + return fmt.Sprintf("SELECT view_id FROM view WHERE mount_id = (%s) AND view = ? AND start_date <= ? AND end_date >= ?", + mount.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO view (mount_id, view, label, azimuth, dip, method, description, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?);", + mount.Select(), + ) + }, + Fields: []string{"Mount", "View", "Label", "Azimuth", "Dip", "Method", "Description", "Start Date", "End Date"}, } diff --git a/meta/sqlite/network.go b/meta/sqlite/network.go deleted file mode 100644 index a38f6b09a..000000000 --- a/meta/sqlite/network.go +++ /dev/null @@ -1,54 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - - "github.com/GeoNet/delta/meta" -) - -func Networks(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Network, error) { - - query := "SELECT Code,External,Description,Restricted FROM Network" - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - networks := make([]meta.Network, 0) - for results.Next() { - var network meta.Network - if err := results.Scan(&network.Code, &network.External, &network.Description, &network.Restricted); err != nil { - return nil, err - } - networks = append(networks, network) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return networks, nil -} diff --git a/meta/sqlite/point.go b/meta/sqlite/point.go deleted file mode 100644 index 77cc5b762..000000000 --- a/meta/sqlite/point.go +++ /dev/null @@ -1,54 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - - "github.com/GeoNet/delta/meta" -) - -func Points(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Point, error) { - - query := "SELECT Sample,Location,Latitude,Longitude,Elevation,Depth,Datum,Survey,Start,End FROM Point" - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - points := make([]meta.Point, 0) - for results.Next() { - var point meta.Point - if err := results.Scan(&point.Sample, &point.Location, &point.Latitude, &point.Longitude, &point.Elevation, &point.Depth, &point.Datum, &point.Survey, &point.Start, &point.End); err != nil { - return nil, err - } - points = append(points, point) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return points, nil -} diff --git a/meta/sqlite/receiver.go b/meta/sqlite/receiver.go new file mode 100644 index 000000000..8a8703dd5 --- /dev/null +++ b/meta/sqlite/receiver.go @@ -0,0 +1,200 @@ +package sqlite + +import ( + "fmt" +) + +const antennaCreate = ` +DROP TABLE IF EXISTS antenna; +CREATE TABLE IF NOT EXISTS antenna ( + antenna_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + mark_id INTEGER NOT NULL, + height REAL NOT NULL, + north REAL NOT NULL, + east REAL NOT NULL, + azimuth REAL NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + UNIQUE(asset_id, mark_id, start_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_antenna BEFORE INSERT ON antenna +WHEN EXISTS ( + SELECT * FROM antenna + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND mark_id = NEW.mark_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on antenna"); +END; +` + +var antenna = Table{ + Create: antennaCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO antenna (asset_id, mark_id, height, north, east, azimuth, start_date, end_date) VALUES ((%s), (%s), ?, ?, ?, ?, ?, ?);", + asset.Select(), mark.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Mark", "Height", "North", "East", "Azimuth", "Start Date", "End Date"}, +} + +const metsensorCreate = ` +DROP TABLE IF EXISTS metsensor; +CREATE TABLE IF NOT EXISTS metsensor ( + metsensor_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + mark_id INTEGER NOT NULL, + datum_id INTEGER NOT NULL, + ims_comment TEXT NULL, + humidity REAL NOT NULL, + pressure REAL NOT NULL, + temperature REAL NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + elevation REAL NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + FOREIGN KEY (datum_id) REFERENCES datum (datum_id), + UNIQUE(asset_id, mark_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_metsensor BEFORE INSERT ON metsensor +WHEN EXISTS ( + SELECT * FROM metsensor + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND mark_id = NEW.mark_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on metsensor"); +END; +` + +var metsensor = Table{ + Create: metsensorCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO metsensor (asset_id, mark_id, datum_id, ims_comment, humidity, pressure, temperature, latitude, longitude, elevation, start_date, end_date) VALUES ((%s), (%s), (%s), ?, ?, ?, ?, ?, ?, ?, ?, ?);", + asset.Select(), mark.Select(), datum.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Mark", "Datum", "IMS Comment", "Humidity", "Pressure", "Temperature", "Latitude", "Longitude", "Elevation", "Start Date", "End Date"}, +} + +var radomeCreate = ` +DROP TABLE IF EXISTS radome; +CREATE TABLE IF NOT EXISTS radome ( + radome_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + mark_id INTEGER NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + UNIQUE(asset_id, mark_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_radome BEFORE INSERT ON radome +WHEN EXISTS ( + SELECT * FROM radome + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND mark_id = NEW.mark_id + AND asset_id = NEW.asset_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on radome"); +END; +` + +var radome = Table{ + Create: radomeCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO radome (asset_id, mark_id, start_date, end_date) VALUES ((%s), (%s), ?, ?);", + asset.Select(), mark.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Mark", "Start Date", "End Date"}, +} + +var receiverCreate = ` +DROP TABLE IF EXISTS receiver; +CREATE TABLE IF NOT EXISTS receiver ( + receiver_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + mark_id INTEGER NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + UNIQUE(asset_id, mark_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_receiver BEFORE INSERT ON receiver +WHEN EXISTS ( + SELECT * FROM receiver + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND mark_id = NEW.mark_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on receiver"); +END; +` + +var receiver = Table{ + Create: receiverCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO receiver (asset_id, mark_id, start_date, end_date) VALUES ((%s), (%s), ?, ?);", + asset.Select(), mark.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Mark", "Start Date", "End Date"}, +} + +const sessionCreate = ` +DROP TABLE IF EXISTS session; +CREATE TABLE IF NOT EXISTS session ( + session_id INTEGER PRIMARY KEY NOT NULL, + mark_id INTEGER NOT NULL, + operator TEXT NOT NULL, + agency TEXT NOT NULL, + model TEXT NOT NULL, + satellite_system TEXT NOT NULL, + interval TEXT NOT NULL, + elevation_mask REAL NOT NULL, + header_comment TEXT NOT NULL, + format TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (mark_id) REFERENCES mark (mark_id), + UNIQUE(interval, mark_id, start_date) +); + +CREATE TRIGGER IF NOT EXISTS no_overlap_on_session BEFORE INSERT ON session +WHEN EXISTS ( + SELECT * FROM session + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND mark_id = NEW.mark_id + AND interval = NEW.interval +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on session"); +END; +` + +var session = Table{ + Create: sessionCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO session (mark_id, operator, agency, model, satellite, interval, elevation_mask, header_comment, format, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);", + mark.Select(), + ) + }, + Fields: []string{"Make", "Operator", "Agency", "Model", "Satellite", "Interval", "Elevation Mask", "Header Comment", "Format", "Start Date", "End Date"}, +} diff --git a/meta/sqlite/reference.go b/meta/sqlite/reference.go new file mode 100644 index 000000000..7d10800ef --- /dev/null +++ b/meta/sqlite/reference.go @@ -0,0 +1,50 @@ +package sqlite + +const placenameCreate = ` +DROP TABLE IF EXISTS placename; +CREATE TABLE IF NOT EXISTS placename ( + placename_id INTEGER PRIMARY KEY NOT NULL, + name TEXT NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + level INTEGER NOT NULL, + UNIQUE(name) +); +` + +var placename = Table{ + Create: placenameCreate, + Insert: func() string { + return "INSERT INTO placename (name, latitude, longitude, level) VALUES (?, ?, ?, ?);" + }, + Fields: []string{"Name", "Latitude", "Longitude", "Level"}, +} + +const citationCreate = ` +DROP TABLE IF EXISTS citation; +CREATE TABLE IF NOT EXISTS citation ( + citation_id INTEGER PRIMARY KEY NOT NULL, + key TEXT NOT NULL, + author TEXT NOT NULL, + year REAL NOT NULL, + title TEXT NOT NULL, + published TEXT NULL, + volume TEXT NULL, + pages TEXT NULL, + doi TEXT NULL, + link TEXT NULL, + retrieved TEXT NULL, + UNIQUE(key) +); +` + +var citation = Table{ + Create: citationCreate, + Select: func() string { + return "SELECT citation_id FROM citation WHERE key = ?" + }, + Insert: func() string { + return "INSERT INTO citation (key, author, year, title, published, volume, pages, doi, link, retrieved) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?);" + }, + Fields: []string{"Key", "Author", "Year", "Title", "Published", "Volume", "Pages", "DOI", "Link", "Retrieved"}, +} diff --git a/meta/sqlite/response.go b/meta/sqlite/response.go new file mode 100644 index 000000000..bc53538bf --- /dev/null +++ b/meta/sqlite/response.go @@ -0,0 +1,21 @@ +package sqlite + +const responseCreate = ` +DROP TABLE IF EXISTS response; +CREATE TABLE IF NOT EXISTS response ( + response_id INTEGER PRIMARY KEY NOT NULL, + response TEXT NOT NULL, + xml TEXT NOT NULL, + UNIQUE (response) +);` + +var response = Table{ + Create: responseCreate, + Select: func() string { + return "SELECT response_id FROM response WHERE response = ?" + }, + Insert: func() string { + return "INSERT INTO response (response, xml) VALUES (?, ?);" + }, + Fields: []string{"Response", "XML"}, +} diff --git a/meta/sqlite/sample.go b/meta/sqlite/sample.go deleted file mode 100644 index 914239cb2..000000000 --- a/meta/sqlite/sample.go +++ /dev/null @@ -1,54 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - - "github.com/GeoNet/delta/meta" -) - -func Samples(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Sample, error) { - - query := `SELECT Code,Network,Name,Latitude,Longitude,Elevation,Depth,Datum,Start,End FROM Sample` - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - samples := make([]meta.Sample, 0) - for results.Next() { - var sample meta.Sample - if err := results.Scan(&sample.Code, &sample.Network, &sample.Name, &sample.Latitude, &sample.Longitude, &sample.Elevation, &sample.Depth, &sample.Datum, &sample.Start, &sample.End); err != nil { - return nil, err - } - samples = append(samples, sample) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return samples, nil -} diff --git a/meta/sqlite/sensor.go b/meta/sqlite/sensor.go index 9a048f20c..dad30fe90 100644 --- a/meta/sqlite/sensor.go +++ b/meta/sqlite/sensor.go @@ -1,56 +1,431 @@ package sqlite import ( - "context" - "database/sql" - - "github.com/GeoNet/delta/meta" -) - -func Sensors(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.InstalledSensor, error) { - - query := "SELECT Make,Model,Serial,Station,Location,Azimuth,Method,Dip,Depth,North,East,Factor,Bias,Start,End FROM Sensor" - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - sensors := make([]meta.InstalledSensor, 0) - for results.Next() { - var depth float64 - var sensor meta.InstalledSensor - if err := results.Scan(&sensor.Make, &sensor.Model, &sensor.Serial, &sensor.Station, &sensor.Location, &sensor.Azimuth, &sensor.Method, &sensor.Dip, &depth, &sensor.North, &sensor.East, &sensor.Factor, &sensor.Bias, &sensor.Start, &sensor.End); err != nil { - return nil, err - } - sensor.Vertical = -depth - sensors = append(sensors, sensor) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return sensors, nil + "fmt" +) + +const timingCreate = ` +DROP TABLE IF EXISTS timing; +CREATE TABLE IF NOT EXISTS timing ( + timing_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + correction TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id), + UNIQUE(site_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_timing BEFORE INSERT ON timing +WHEN EXISTS ( + SELECT * FROM timing + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on timing"); +END; +` + +var timing = Table{ + Create: timingCreate, + Select: func() string { + return fmt.Sprintf("SELECT timing_id FROM timing WHERE site_id = (%s) AND start_date = ? AND end_date = ?", + site.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO timing (site_id, correction, start_date, end_date) VALUES ((%s), ?, ?, ?);", + site.Select(), + ) + }, + Fields: []string{"Site", "Correction", "Start Date", "End Date"}, +} + +const telemetryCreate = ` +DROP TABLE IF EXISTS telemetry; +CREATE TABLE IF NOT EXISTS telemetry ( + telemetry_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + scale_factor REAL DEFAULT 1.0 NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id), + UNIQUE(site_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_telemetry BEFORE INSERT ON telemetry +WHEN EXISTS ( + SELECT * FROM telemetry + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on timing"); +END; +` + +var telemetry = Table{ + Create: telemetryCreate, + Select: func() string { + return fmt.Sprintf("SELECT telemetry_id FROM telemetry WHERE site_id = (%s) AND start_date = ? AND end_date = ?", + site.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO telemetry (site_id, scalar_factor, start_date, end_date) VALUES ((%s), ?, ?, ?);", + site.Select(), + ) + }, + Fields: []string{"Site", "Scalar Factor", "Start Date", "End Date"}, +} + +const polarityCreate = ` +DROP TABLE IF EXISTS polarity; +CREATE TABLE IF NOT EXISTS polarity ( + polarity_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + sublocation TEXT NULL, + subsource TEXT NULL, + preferred BOOLEAN DEFAULT true NOT NULL, + reversed BOOLEAN DEFAULT false NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_polarity BEFORE INSERT ON polarity +WHEN EXISTS ( + SELECT * FROM polarity + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id + AND sublocation = NEW.sublocation + AND subsource = NEW.subsource + AND preferred = NEW.preferred + AND preferred = true +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on preferred polarity"); +END; +` + +var polarity = Table{ + Create: polarityCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO polarity (site_id, sublocation, subsource, preferred, reversed, start_date, end_date) VALUES ((%s), ?, ?, ?);", + site.Select(), + ) + }, + Fields: []string{"Site", "Sublocation", "Subsource", "Primary", "Reversed", "Start Date", "End Date"}, +} + +const preampCreate = ` +DROP TABLE IF EXISTS preamp; +CREATE TABLE IF NOT EXISTS preamp ( + preamp_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + subsource TEXT NULL, + scale_factor REAL DEFAULT 1.0 NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id), + UNIQUE(site_id, subsource, start_date, end_date) +); + +CREATE TRIGGER IF NOT EXISTS no_overlap_on_preamp BEFORE INSERT ON preamp +WHEN EXISTS ( + SELECT * FROM preamp + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id + AND subsource = NEW.subsource +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on preamp"); +END; +` + +var preamp = Table{ + Create: preampCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO preamp (site_id, subsource, scale_factor, start_date, end_date) VALUES ((%s), ?, ?, ?, ?);", + site.Select(), + ) + }, + Fields: []string{"Site", "Subsource", "Scale Factor", "Start Date", "End Date"}, +} + +const gainCreate = ` +DROP TABLE IF EXISTS gain; +CREATE TABLE IF NOT EXISTS gain ( + gain_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + sublocation TEXT NULL, + subsource TEXT NULL, + scale_factor REAL NOT NULL ON CONFLICT REPLACE DEFAULT 1.0, + scale_bias REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + absolute_bias REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id), + UNIQUE(site_id, sublocation, subsource, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_gain BEFORE INSERT ON gain +WHEN EXISTS ( + SELECT * FROM gain + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id + AND sublocation = NEW.sublocation + AND subsource = NEW.subsource +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on gain"); +END; +` + +var gain = Table{ + Create: gainCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO gain (site_id, sublocation, subsource, scale_factor, scale_bias, absolute_bias, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?);", + site.Select(), + ) + }, + Fields: []string{"Station", "Location", "Sublocation", "Subsource", "Scale Factor", "Scale Bias", "Absolute Bias", "Start Date", "End Date"}, + Nulls: []string{ + "Sublocation", "Subsource", "Scale Factor", "Scale Bias", "Absolute Bias", + }, +} + +const dataloggerCreate = ` +DROP TABLE IF EXISTS datalogger; +CREATE TABLE IF NOT EXISTS datalogger ( + datalogger_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + place_role_id INTEGER NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (place_role_id) REFERENCES place_role (place_role_id), + UNIQUE(asset_id, place_role_id, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_datalogger BEFORE INSERT ON datalogger +WHEN EXISTS ( + SELECT * FROM datalogger + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND place_role_id = NEW.place_role_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on datalogger"); +END; +` + +var datalogger = Table{ + Create: dataloggerCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO datalogger (asset_id, place_role_id, start_date, end_date) VALUES ((%s), (%s), ?, ?);", + asset.Select(), placeRole.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Place", "Role", "Start Date", "End Date"}, +} + +const sensorCreate = ` +DROP TABLE IF EXISTS sensor; +CREATE TABLE IF NOT EXISTS sensor ( + sensor_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + site_id INTEGER NOT NULL, + method_id INTEGER NOT NULL, + azimuth REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + dip REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + depth REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + north REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + east REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + scale_factor REAL NOT NULL ON CONFLICT REPLACE DEFAULT 1.0, + scale_bias REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (site_id) REFERENCES site (site_id), + FOREIGN KEY (method_id) REFERENCES method (method_id), + UNIQUE(asset_id, site_id, start_date, end_date) +); + +CREATE TRIGGER IF NOT EXISTS no_overlap_on_sensor BEFORE INSERT ON sensor +WHEN EXISTS ( + SELECT * FROM sensor + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND site_id = NEW.site_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on sensor"); +END; +` + +var sensor = Table{ + Create: sensorCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO sensor (asset_id, site_id, method_id, azimuth, dip, depth, north, east, scale_factor, scale_bias, start_date, end_date) VALUES ((%s), (%s), (%s), ?, ?, ?, ?, ?, ?, ?, ?, ?);", + asset.Select(), site.Select(), method.Select(), + ) + }, + Fields: []string{"Make", "Model", "Serial", "Station", "Location", "Method", "Azimuth", "Dip", "Depth", "North", "East", "Scale Factor", "Scale Bias", "Start Date", "End Date"}, +} + +const recorderCreate = ` +DROP TABLE IF EXISTS recorder; +CREATE TABLE IF NOT EXISTS recorder ( + recorder_id INTEGER PRIMARY KEY NOT NULL, + asset_id INTEGER NOT NULL, + model_id INTEGER NOT NULL, + site_id INTEGER NOT NULL, + method_id INTEGER NOT NULL, + azimuth REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + dip REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + depth REAL NOT NULL ON CONFLICT REPLACE DEFAULT 0.0, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (asset_id) REFERENCES asset (asset_id), + FOREIGN KEY (model_id) REFERENCES model (model_id), + FOREIGN KEY (site_id) REFERENCES site (site_id), + FOREIGN KEY (method_id) REFERENCES method (method_id), + UNIQUE(asset_id, model_id, site_id, start_date, end_date) +); +` + +/* +CREATE TRIGGER IF NOT EXISTS no_overlap_on_recorder BEFORE INSERT ON recorder +WHEN EXISTS ( + SELECT * FROM recorder + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND asset_id = NEW.asset_id + AND model_id = NEW.model_id + AND site_id = NEW.site_id +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on recorder"); +END; +*/ + +var recorder = Table{ + Create: recorderCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO recorder (asset_id, model_id, site_id, method_id, azimuth, dip, depth, start_date, end_date) VALUES ((%s), (%s), (%s), (%s), ?, ?, ?, ?, ?);", + asset.Select(), model.Select(), site.Select(), method.Select(), + ) + }, + Fields: []string{"Make", "Datalogger", "Serial", "Make", "Sensor", "Station", "Location", "Method", "Azimuth", "Dip", "Depth", "Start Date", "End Date"}, +} + +var recorderModel = Table{ + Insert: func() string { + return fmt.Sprintf("INSERT INTO model (make_id, model) VALUES ((%s), ?) ON CONFLICT(make_id, model) DO NOTHING;", + mmake.Select(), + ) + }, + Fields: []string{"Make", "Sensor"}, +} + +const streamCreate = ` +DROP TABLE IF EXISTS stream; +CREATE TABLE IF NOT EXISTS stream ( + stream_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + band TEXT DEFAULT "" NOT NULL, + source TEXT DEFAULT "" NOT NULL, + sampling_rate REAL NOT NULL, + axial TEXT NOT NULL, + reversed TEXT NOT NULL, + triggered TEXT NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id), + UNIQUE(sampling_rate, site_id, source, start_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_stream BEFORE INSERT ON stream +WHEN EXISTS ( + SELECT * FROM stream + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id + AND source = NEW.source + AND sampling_rate = NEW.sampling_rate +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on stream"); +END; +` + +var stream = Table{ + Create: streamCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO stream (site_id, band, source, sampling_rate, axial, reversed, triggered, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?);", + site.Select(), + ) + }, + Fields: []string{"Station", "Location", "Band", "Source", "Sampling Rate", "Axial", "Reversed", "Triggered", "Start Date", "End Date"}, +} + +const connectionCreate = ` +DROP TABLE IF EXISTS connection; +CREATE TABLE IF NOT EXISTS connection ( + connection_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + place_role_id INTEGER NOT NULL, + number TEXT DEFAULT "" NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id), + FOREIGN KEY (place_role_id) REFERENCES place_role (place_role_id), + UNIQUE(site_id, place_role_id, number, start_date, end_date) +); + +CREATE TRIGGER IF NOT EXISTS no_overlap_on_connection BEFORE INSERT ON connection +WHEN EXISTS ( + SELECT * FROM connection + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id + AND place_role_id = NEW.place_role_id + AND number = NEW.number +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on connection"); +END; +` + +var connection = Table{ + Create: connectionCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO connection (site_id, place_role_id, number, start_date, end_date) VALUES ((%s), (%s), ?, ?, ?);", + site.Select(), placeRole.Select(), + ) + }, + Fields: []string{"Station", "Location", "Place", "Role", "Number", "Start Date", "End Date"}, +} + +const placeRoleCreate = ` +DROP TABLE IF EXISTS place_role; +CREATE TABLE IF NOT EXISTS place_role ( + place_role_id INTEGER PRIMARY KEY NOT NULL, + place TEXT NOT NULL, + role TEXT DEFAULT "" NOT NULL, + UNIQUE (place, role) +);` + +var placeRole = Table{ + Create: placeRoleCreate, + Select: func() string { + return "SELECT place_role_id FROM place_role WHERE place = ? AND role = ?" + }, + Insert: func() string { + return "INSERT INTO place_role (place, role) VALUES (?, ?) ON CONFLICT(place, role) DO NOTHING;" + }, + Fields: []string{"Place", "Role"}, } diff --git a/meta/sqlite/site.go b/meta/sqlite/site.go deleted file mode 100644 index 9c96ff60b..000000000 --- a/meta/sqlite/site.go +++ /dev/null @@ -1,54 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - - "github.com/GeoNet/delta/meta" -) - -func Sites(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Site, error) { - - query := "SELECT Station,Location,Latitude,Longitude,Elevation,Depth,Datum,Survey,Start,End FROM Site" - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - sites := make([]meta.Site, 0) - for results.Next() { - var site meta.Site - if err := results.Scan(&site.Station, &site.Location, &site.Latitude, &site.Longitude, &site.Elevation, &site.Depth, &site.Datum, &site.Survey, &site.Start, &site.End); err != nil { - return nil, err - } - sites = append(sites, site) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return sites, nil -} diff --git a/meta/sqlite/sql.go b/meta/sqlite/sql.go deleted file mode 100644 index 288a4bb62..000000000 --- a/meta/sqlite/sql.go +++ /dev/null @@ -1,71 +0,0 @@ -package sqlite - -import ( - "fmt" - "strings" -) - -type QueryOpt func(int) (string, any) - -func (q QueryOpt) K(i int) string { - k, _ := q(i) - return k -} - -func (q QueryOpt) V() any { - _, v := q(0) - return v -} - -func Option(k, v any) QueryOpt { - return func(n int) (string, any) { - return fmt.Sprintf(" %s = $%d", k, n+1), v - } -} - -func Code(v any) QueryOpt { - return Option("Code", v) -} - -func Network(v any) QueryOpt { - return Option("Network", v) -} - -func Station(v any) QueryOpt { - return Option("Station", v) -} - -func Location(v any) QueryOpt { - return Option("Location", v) -} - -func Mark(v any) QueryOpt { - return Option("Mark", v) -} - -func Make(v any) QueryOpt { - return Option("Make", v) -} - -func Model(v any) QueryOpt { - return Option("Model", v) -} - -func Serial(v any) QueryOpt { - return Option("Serial", v) -} - -func Sample(v any) QueryOpt { - return Option("Sample", v) -} - -func ParseBool(str string) (bool, bool) { - switch strings.ToLower(str) { - case "1", "t", "y", "yes", "true": - return true, true - case "0", "f", "n", "no", "false": - return false, true - default: - return false, false - } -} diff --git a/meta/sqlite/station.go b/meta/sqlite/station.go index b48eb2473..de670d4ff 100644 --- a/meta/sqlite/station.go +++ b/meta/sqlite/station.go @@ -1,54 +1,367 @@ package sqlite import ( - "context" - "database/sql" + "fmt" +) + +const datumCreate = ` +DROP TABLE IF EXISTS datum; +CREATE TABLE IF NOT EXISTS datum ( + datum_id INTEGER PRIMARY KEY NOT NULL, + datum TEXT NOT NULL, + UNIQUE (datum) +);` + +var datum = Table{ + Create: datumCreate, + Select: func() string { + return "SELECT datum_id FROM datum WHERE datum = ?" + }, + Insert: func() string { + return "INSERT INTO datum (datum) VALUES (?) ON CONFLICT(datum) DO NOTHING;" + }, + Fields: []string{"Datum"}, +} + +const methodCreate = ` +DROP TABLE IF EXISTS method; +CREATE TABLE IF NOT EXISTS method ( + method_id INTEGER PRIMARY KEY NOT NULL, + method TEXT DEFAULT "Unknown" NOT NULL, + UNIQUE (method) +);` + +var method = Table{ + Create: methodCreate, + Select: func() string { + return "SELECT method_id FROM method WHERE method = ?" + }, + Insert: func() string { + return "INSERT INTO method (method) VALUES (?) ON CONFLICT(method) DO NOTHING;" + }, + Fields: []string{"Method"}, +} + +const networkCreate = ` +DROP TABLE IF EXISTS network; +CREATE TABLE IF NOT EXISTS network ( + network_id INTEGER PRIMARY KEY NOT NULL, + network TEXT NOT NULL, + external TEXT NOT NULL, + description TEXT DEFAULT "" NOT NULL, + restricted BOOLEAN DEFAULT false NOT NULL, + UNIQUE (network) +);` + +var network = Table{ + Create: networkCreate, + Select: func() string { + return "SELECT network_id FROM network WHERE network = ?" + }, + Insert: func() string { + return "INSERT INTO network (network, external, description, restricted) VALUES (?, ?, ?, ?);" + }, + Fields: []string{"Network", "External", "Description", "Restricted"}, +} + +const stationCreate = ` +DROP TABLE IF EXISTS station; +CREATE TABLE IF NOT EXISTS station ( + station_id INTEGER PRIMARY KEY NOT NULL, + datum_id INTEGER NOT NULL, + station TEXT NOT NULL, + name TEXT NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + elevation REAL NULL, + depth REAL NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (datum_id) REFERENCES datum (datum_id), + UNIQUE (station) +);` + +var station = Table{ + Create: stationCreate, + Select: func() string { + return "SELECT station_id FROM station WHERE station = ?" + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO station (datum_id, station, name, latitude, longitude, elevation, depth, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?);", datum.Select()) + }, + Fields: []string{"Datum", "Station", "Name", "Latitude", "Longitude", "Elevation", "Depth", "Start Date", "End Date"}, + Nulls: []string{"Elevation", "Depth"}, +} + +const stationNetworkCreate = ` +DROP TABLE IF EXISTS station_network; +CREATE TABLE IF NOT EXISTS station_network ( + station_network_id INTEGER PRIMARY KEY NOT NULL, + station_id INTEGER NOT NULL, + network_id INTEGER NOT NULL, + FOREIGN KEY (station_id) REFERENCES station (station_id), + FOREIGN KEY (network_id) REFERENCES network (network_id), + UNIQUE (station_id, network_id) +);` + +var stationNetwork = Table{ + Create: stationNetworkCreate, + Select: func() string { + return fmt.Sprintf("SELECT station_network_id FROM station_network WHERE station_id = (%s) AND network_id = (%s)", + station.Select(), network.Select()) + }, + Insert: func() string { + // not all networks are in the networks table so simply ignore any that fail + return fmt.Sprintf("INSERT OR IGNORE INTO station_network (station_id, network_id) VALUES ((%s), (%s));", + station.Select(), network.Select()) + }, + Fields: []string{"Station", "Network"}, +} + +const siteCreate = ` +DROP TABLE IF EXISTS site; +CREATE TABLE IF NOT EXISTS site ( + site_id INTEGER PRIMARY KEY NOT NULL, + station_id INTEGER NOT NULL, + datum_id INTEGER NOT NULL, + location TEXT NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + elevation REAL NULL, + depth REAL NULL, + survey TEXT DEFAULT "Unknown" NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (station_id) REFERENCES station (station_id), + FOREIGN KEY (datum_id) REFERENCES datum (datum_id), + UNIQUE (station_id, location) +); +CREATE TRIGGER IF NOT EXISTS site_too_soon BEFORE INSERT ON site +WHEN NEW.start_date < (SELECT station.start_date FROM station WHERE station.station_id = new.station_id) +BEGIN + SELECT RAISE(FAIL, "site too soon for station"); +END; +CREATE TRIGGER IF NOT EXISTS site_too_late BEFORE INSERT ON site +WHEN NEW.end_date > (SELECT station.end_date FROM station WHERE station.station_id = new.station_id) +BEGIN + SELECT RAISE(FAIL, "site too late for station"); +END; +` + +var site = Table{ + Create: siteCreate, + Select: func() string { + return fmt.Sprintf("SELECT site_id FROM site WHERE station_id = (%s) AND location = ?", station.Select()) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO site (station_id, datum_id, location, latitude, longitude, elevation, depth, survey, start_date, end_date) VALUES ((%s), (%s), ?, ?, ?, ?, ?, ?, ?, ?);", + station.Select(), datum.Select()) + }, - "github.com/GeoNet/delta/meta" + Fields: []string{"Station", "Datum", "Location", "Latitude", "Longitude", "Elevation", "Depth", "Survey", "Start Date", "End Date"}, + Nulls: []string{"Elevation", "Depth"}, +} + +const sampleNetworkCreate = ` +DROP TABLE IF EXISTS sample_network; +CREATE TABLE IF NOT EXISTS sample_network ( + sample_network_id INTEGER PRIMARY KEY NOT NULL, + sample_id INTEGER NOT NULL, + network_id INTEGER NOT NULL, + FOREIGN KEY (sample_id) REFERENCES sample (sample_id), + FOREIGN KEY (network_id) REFERENCES network (network_id), + UNIQUE (sample_id, network_id) +);` + +var sampleNetwork = Table{ + Create: sampleNetworkCreate, + Select: func() string { + return fmt.Sprintf("SELECT sample_network_id FROM sample_network WHERE sample_id = (%s) AND network_id = (%s)", + sample.Select(), network.Select()) + }, + Insert: func() string { + // not all networks are in the networks table so simply ignore any that fail + return fmt.Sprintf("INSERT OR IGNORE INTO sample_network (sample_id, network_id) VALUES ((%s), (%s));", + sample.Select(), network.Select()) + }, + Fields: []string{"Station", "Network"}, +} + +const sampleCreate = ` +DROP TABLE IF EXISTS sample; +CREATE TABLE IF NOT EXISTS sample ( + sample_id INTEGER PRIMARY KEY NOT NULL, + datum_id INTEGER NOT NULL, + station TEXT NOT NULL, + name TEXT NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + elevation REAL NULL, + depth REAL NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (datum_id) REFERENCES datum (datum_id), + UNIQUE(station) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_sample BEFORE INSERT ON sample +WHEN EXISTS ( + SELECT * FROM sample + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND station = NEW.station +) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on sample"); +END; +` + +var sample = Table{ + Create: sampleCreate, + Select: func() string { + return "SELECT sample_id FROM sample WHERE station = ?" + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO sample (datum_id, station, name, latitude, longitude, elevation, depth, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?);", + datum.Select()) + }, + Fields: []string{"Datum", "Station", "Name", "Latitude", "Longitude", "Elevation", "Depth", "Start Date", "End Date"}, + Nulls: []string{"Elevation", "Depth"}, +} + +const pointCreate = ` +DROP TABLE IF EXISTS point; +CREATE TABLE IF NOT EXISTS point ( + point_id INTEGER PRIMARY KEY NOT NULL, + sample_id INTEGER NOT NULL, + datum_id INTEGER NOT NULL, + location TEXT NOT NULL, + latitude REAL NOT NULL, + longitude REAL NOT NULL, + elevation REAL DEFAULT 0 NOT NULL, + depth REAL DEFAULT 0 NOT NULL, + survey TEXT DEFAULT "Unknown" NOT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (sample_id) REFERENCES sample (sample_id), + FOREIGN KEY (datum_id) REFERENCES datum (datum_id), + UNIQUE (sample_id, location) +); +CREATE TRIGGER IF NOT EXISTS point_too_soon BEFORE INSERT ON point +WHEN NEW.start_date < (SELECT sample.start_date FROM sample WHERE sample.sample_id = new.sample_id) +BEGIN + SELECT RAISE(FAIL, "point too soon for sample"); +END; +CREATE TRIGGER IF NOT EXISTS site_too_late BEFORE INSERT ON point +WHEN NEW.end_date > (SELECT sample.end_date FROM sample WHERE sample.sample_id = new.sample_id) +BEGIN + SELECT RAISE(FAIL, "point too late for sample"); +END; +` + +var point = Table{ + Create: pointCreate, + Insert: func() string { + return fmt.Sprintf("INSERT INTO point (sample_id, datum_id, location, latitude, longitude, elevation, depth, survey, start_date, end_date) VALUES ((%s), (%s), ?, ?, ?, ?, ?, ?, ?, ?);", + sample.Select(), datum.Select()) + }, + Fields: []string{"Point", "Datum", "Location", "Latitude", "Longitude", "Elevation", "Depth", "Survey", "Start Date", "End Date"}, +} + +const featureCreate = ` +DROP TABLE IF EXISTS feature; +CREATE TABLE IF NOT EXISTS feature ( + feature_id INTEGER PRIMARY KEY NOT NULL, + site_id INTEGER NOT NULL, + sublocation TEXT NULL, + property TEXT NOT NULL, + description TEXT NULL, + aspect TEXT NULL, + start_date DATETIME NOT NULL CHECK (start_date IS strftime('%Y-%m-%dT%H:%M:%SZ', start_date)), + end_date DATETIME NOT NULL CHECK (end_date IS strftime('%Y-%m-%dT%H:%M:%SZ', end_date)), + FOREIGN KEY (site_id) REFERENCES site (site_id), + UNIQUE(site_id, sublocation, property, description, aspect, start_date, end_date) +); +CREATE TRIGGER IF NOT EXISTS no_overlap_on_feature BEFORE INSERT ON feature +WHEN EXISTS ( + SELECT * FROM feature + WHERE datetime(start_date) <= datetime(NEW.end_date) + AND datetime(end_date) > datetime(NEW.start_date) + AND site_id = NEW.site_id + AND sublocation = NEW.sublocation + AND property = NEW.property + AND description = NEW.description + AND aspect = NEW.aspect ) +BEGIN + SELECT RAISE(FAIL, "overlapping intervals on feature"); +END; +` + +var feature = Table{ + Create: featureCreate, + Insert: func() string { + // currently a feature could reference a site or a point or a sample, solution is consolidation. + return fmt.Sprintf("INSERT OR IGNORE INTO feature (site_id, sublocation, property, description, aspect, start_date, end_date) VALUES ((%s), ?, ?, ?, ?, ?, ?);", + site.Select()) + }, + Fields: []string{"Station", "Location", "Sublocation", "Property", "Description", "Aspect", "Start Date", "End Date"}, +} + +const classCreate = ` +DROP TABLE IF EXISTS class; +CREATE TABLE IF NOT EXISTS class ( + class_id INTEGER PRIMARY KEY NOT NULL, + station_id INTEGER NOT NULL, + site_class TEXT NOT NULL, + vs30 REAL NOT NULL, + vs30_quality TEXT NOT NULL, + tsite TEXT NOT NULL, + tsite_method TEXT NOT NULL, + tsite_quality TEXT NOT NULL, + basement_depth REAL NOT NULL, + depth_quality TEXT NOT NULL, + link TEXT NULL, + notes TEXT NULL, + FOREIGN KEY (station_id) REFERENCES station (station_id), + UNIQUE(station_id) +); +` + +var class = Table{ + Create: classCreate, + Select: func() string { + return fmt.Sprintf("SELECT class_id FROM class WHERE station_id = (%s)", station.Select()) + }, + Insert: func() string { + // not all stations are in the stations file, ignore any conflicts for now + return fmt.Sprintf("INSERT OR IGNORE INTO class (station_id, site_class, vs30, vs30_quality, tsite, tsite_method, tsite_quality, basement_depth, depth_quality, link, notes) VALUES ((%s), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);", station.Select()) + }, + Fields: []string{"Station", "Site Class", "Vs30", "Vs30 Quality", "Tsite", "Tsite Method", "Tsite Quality", "Basement Depth", "Depth Quality", "Link", "Notes"}, +} + +const classCitationCreate = ` +DROP TABLE IF EXISTS class_citation; +CREATE TABLE IF NOT EXISTS class_citation ( + class_citation_id INTEGER PRIMARY KEY NOT NULL, + class_id INTEGER NOT NULL, + citation_id INTEGER NOT NULL, + FOREIGN KEY (class_id) REFERENCES class (class_id), + FOREIGN KEY (citation_id) REFERENCES citation (citation_id), + UNIQUE (class_id, citation_id) +);` -func Stations(ctx context.Context, db *sql.DB, opts ...QueryOpt) ([]meta.Station, error) { - - query := `SELECT Code,Network,Name,Latitude,Longitude,Elevation,Depth,Datum,Start,End FROM Station` - if len(opts) > 0 { - query += " WHERE " - } - for n, opt := range opts { - if n > 0 { - query += " AND " - } - query += opt.K(n) - } - query += ";" - - stmt, err := db.PrepareContext(ctx, query) - if err != nil { - return nil, err - } - defer stmt.Close() - - var args []any - for _, opt := range opts { - args = append(args, opt.V()) - } - results, err := stmt.QueryContext(ctx, args...) - if err != nil { - return nil, err - } - defer results.Close() - - stations := make([]meta.Station, 0) - for results.Next() { - var station meta.Station - if err := results.Scan(&station.Code, &station.Network, &station.Name, &station.Latitude, &station.Longitude, &station.Elevation, &station.Depth, &station.Datum, &station.Start, &station.End); err != nil { - return nil, err - } - stations = append(stations, station) - } - - if err = results.Err(); err != nil { - return nil, err - } - - return stations, nil +var classCitation = Table{ + Create: classCitationCreate, + Select: func() string { + return fmt.Sprintf("SELECT class_citation_id FROM class_citation WHERE class_id = (%s) AND citation_id = (%s)", + class.Select(), citation.Select(), + ) + }, + Insert: func() string { + return fmt.Sprintf("INSERT INTO class_citation (class_id, citation_id) VALUES ((%s), (%s));", + class.Select(), citation.Select(), + ) + }, + Fields: []string{"Station", "Citations"}, + Unwrap: "Citations", } diff --git a/meta/sqlite/table.go b/meta/sqlite/table.go new file mode 100644 index 000000000..9c669a88d --- /dev/null +++ b/meta/sqlite/table.go @@ -0,0 +1,120 @@ +package sqlite + +import ( + "strings" + + "github.com/GeoNet/delta/meta" +) + +type Table struct { + // Create holds the SQL code needed to create the table and all associated + // triggers and constraints. + Create string + // Select holds the prepared statement that can be used to select the primary + // key from the table. + Select func() string + // Insert holds the full prepared statement to insert a row into the table. + Insert func() string + // Fields gives the delta csv column names to use for inserting a row into the table. + Fields []string + // Nulls holds the set of columds that are allowed to be NULL in the table, an empty + // string in the CSV field will indicate a NULL value should be passed into the row. + Nulls []string + // Unwrap can be used to build a linking table when a column has multiple fields + Unwrap string +} + +// Links returns the rows to insert into a Linking table for the given unwrapping column. +func (t Table) Links(list meta.TableList) [][]any { + + lines := list.Table.Encode(list.List) + if !(len(lines) > 0) { + return nil + } + + lookup := make(map[string]int) + for n, v := range list.Table.Columns() { + lookup[v] = n + } + + w, ok := lookup[t.Unwrap] + if !ok { + return nil + } + + var res [][]any + for _, line := range lines[1:] { + if !(w < len(line)) { + continue + } + for _, c := range strings.Fields(strings.TrimSpace(line[w])) { + var parts []any + for _, f := range t.Fields { + n, ok := lookup[f] + if !ok { + return nil + } + if !(n < len(line)) { + return nil + } + switch { + case n == w: + parts = append(parts, c) + default: + parts = append(parts, line[n]) + } + } + res = append(res, parts) + } + } + return res +} + +// checkNull returns null if the value is empty and is a member +// of the nulls map, otherwise it returns the value. +func checkNull(nulls map[string]interface{}, key, value string) any { + if _, ok := nulls[key]; ok && value == "" { + return nil + } + return value +} + +// Columns returns the expected rows for the given TableList. +func (t Table) Columns(list meta.TableList) [][]any { + + lines := list.Table.Encode(list.List) + if !(len(lines) > 0) { + return nil + } + + nulls := make(map[string]interface{}) + for _, v := range t.Nulls { + nulls[v] = true + } + + lookup := make(map[string]int) + for n, v := range list.Table.Columns() { + lookup[v] = n + } + + var res [][]any + for _, line := range lines[1:] { + var parts []any + + for _, f := range t.Fields { + n, ok := lookup[f] + if !ok { + return nil + } + if !(n < len(line)) { + return nil + } + + parts = append(parts, checkNull(nulls, f, line[n])) + } + + res = append(res, parts) + } + + return res +} diff --git a/meta/table.go b/meta/table.go index 7db93b19c..a6c5b61f4 100644 --- a/meta/table.go +++ b/meta/table.go @@ -1,6 +1,7 @@ package meta import ( + "slices" "sort" ) @@ -174,7 +175,8 @@ func (s *Set) KeyValue(name, label, desc string, entries map[string]string) Tabl // TableList returns a pre-built set of Tables and associated Lists. func (s *Set) TableList(extra ...TableList) []TableList { - return append([]TableList{ + return append(slices.Clone(extra), []TableList{ + {Table: AssetTable, List: AssetList(s.Assets())}, {Table: NetworkTable, List: NetworkList(s.Networks())}, {Table: StationTable, List: StationList(s.Stations())}, {Table: SiteTable, List: SiteList(s.Sites())}, @@ -186,7 +188,6 @@ func (s *Set) TableList(extra ...TableList) []TableList { {Table: PointTable, List: PointList(s.Points())}, {Table: InstalledSensorTable, List: InstalledSensorList(s.InstalledSensors())}, {Table: InstalledAntennaTable, List: InstalledAntennaList(s.InstalledAntennas())}, - {Table: AssetTable, List: AssetList(s.Assets())}, {Table: CalibrationTable, List: CalibrationList(s.Calibrations())}, {Table: InstalledCameraTable, List: InstalledCameraList(s.InstalledCameras())}, {Table: ChannelTable, List: ChannelList(s.Channels())}, @@ -214,5 +215,5 @@ func (s *Set) TableList(extra ...TableList) []TableList { {Table: TelemetryTable, List: TelemetryList(s.Telemetries())}, {Table: TimingTable, List: TimingList(s.Timings())}, {Table: VisibilityTable, List: VisibilityList(s.Visibilities())}, - }, extra...) + }...) } diff --git a/network/mounts.csv b/network/mounts.csv index 20733a483..b9fdebe54 100644 --- a/network/mounts.csv +++ b/network/mounts.csv @@ -1,5 +1,5 @@ Mount,Network,Name,Latitude,Longitude,Elevation,Datum,Description,Start Date,End Date -CHTB,BC,Christchurch Telecom Building,-43.53162,172.63544,0,WGS84,Images of Christchurch.,2011-11-28T00:00:03Z,2019-02-24T21:00:00Z +CHTB,BC,Christchurch Telecom Building,-43.53162,172.63544,0,WGS84,Images of Christchurch.,2011-11-28T00:00:00Z,2019-02-24T21:00:00Z DISC,VC,Discovery Lodge,-39.16732,175.48055,881,WGS84,Images of Ruapehu and Ngauruhoe.,2020-07-28T02:20:00Z,9999-01-01T00:00:00Z KAKA,VC,Kakaramea,-38.97490003,175.694834619,1327,WGS84,Images of Tongariro.,2011-09-05T00:01:00Z,9999-01-01T00:00:00Z KMTP,VC,Kaimanawa Tai Ping,-39.29642,175.7663,1366,WGS84,Images of Ruapehu and Ngauruhoe.,2009-03-03T02:00:00Z,9999-01-01T00:00:00Z @@ -17,11 +17,11 @@ TOD01,EN,East Lower Te Maari,-39.10511,175.68043,1399,WGS84,SO2 from Tongariro,2 TOD02,EN,West Lower Te Maari,-39.10304,175.67048,1406,WGS84,SO2 from Tongariro,2021-07-29T01:00:00Z,9999-01-01T00:00:00Z TOKR,VC,Karewarewa,-39.094442315,175.641203517,1207,WGS84,Images of Te Maari Crater.,2012-08-29T02:00:00Z,9999-01-01T00:00:00Z WHHB,VC,Whakatane Harbour,-37.949622999,177.002304198,10,WGS84,Images of White Island.,2001-05-21T13:00:10Z,2007-04-13T00:00:00Z -WHOH,VC,Whakatane Observatory Hill,-37.956903132,177.000764155,100,WGS84,Images of White Island.,2006-03-29T05:32:07Z,9999-01-01T00:00:00Z -WICF,VC,White Island Crater Floor,-37.526515575,177.189407876,10,WGS84,Images of White Island crater.,2009-10-28T00:45:02Z,2023-12-01T00:00:00Z -WID01,EN,Whakaari/White Island North-East Point,-37.51669,177.1929793,30,WGS84,SO2 from White Island,2009-11-13T03:43:24Z,2021-05-25T02:11:46Z -WID02,EN,Whakaari/White Island South Rim,-37.526533,177.189013,65,WGS84,SO2 from White Island,2009-08-01T22:41:25Z,2019-12-03T21:42:55Z -WIFC,VC,White Island Factory,-37.526515575,177.189407876,10,WGS84,Images of White Island crater.,2009-07-20T01:00:05Z,2009-10-27T21:00:05Z +WHOH,VC,Whakatane Observatory Hill,-37.956903132,177.000764155,100,WGS84,Images of White Island.,2006-02-24T14:00:00Z,9999-01-01T00:00:00Z +WICF,VC,White Island Crater Floor,-37.526515575,177.189407876,10,WGS84,Images of White Island crater.,2009-10-28T00:30:00Z,2023-12-01T00:00:00Z +WID01,EN,Whakaari/White Island North-East Point,-37.5188,177.1795,280,WGS84,SO2 from White Island from North-East Point,2009-11-13T03:43:24Z,2021-05-25T02:11:46Z +WID02,EN,Whakaari/White Island South Rim,-37.5188,177.1795,280,WGS84,SO2 from White Island from South Rim,2009-08-01T22:41:25Z,2019-12-03T21:42:55Z +WIFC,VC,White Island Factory,-37.526515575,177.189407876,10,WGS84,Images of White Island crater.,2009-07-20T00:00:05Z,2009-10-27T22:00:00Z WIFW,VC,White Island Factory Wall,-37.526515575,177.189407876,10,WGS84,Images of White Island crater.,2001-10-12T12:00:00Z,2009-07-20T00:00:05Z WINR,VC,White Island North Rim,-37.521796,177.192346,96,WGS84,Images of White Island crater.,2006-03-17T15:30:00Z,2023-12-01T00:00:00Z WIWR,VC,White Island West Rim,-37.5188,177.1795,280,WGS84,Images of White Island crater.,2013-12-10T00:00:00Z,2023-12-01T00:00:00Z diff --git a/network/stations.csv b/network/stations.csv index 6027c2658..b682a52b6 100644 --- a/network/stations.csv +++ b/network/stations.csv @@ -1537,7 +1537,7 @@ NZD,TD,Offshore Bay of Plenty Kermadec,-36.0998,178.6037,0,2445,WGS84,2021-07-23 NZE,TD,Offshore East Cape Kermadec,-36.049,-177.708,0,5779,WGS84,2019-12-18T17:00:00Z,9999-01-01T00:00:00Z NZF,TD,Offshore Raoul Island Kermadec,-29.6826,-175.0125,0,5060,WGS84,2019-12-17T00:00:00Z,9999-01-01T00:00:00Z NZG,TD,Offshore Tongatapu Tonga,-23.3517,-173.4018,0,5741,WGS84,2020-09-10T00:00:00Z,9999-01-01T00:00:00Z -NZH,TD,Offshore Niue Tonga,-20.0885,-171.8630,0,5531,WGS84,2020-09-04T00:00:00Z,9999-01-01T00:00:00Z +NZH,TD,Offshore Niue Tonga,-20.0885,-171.8630,0,5531,WGS84,2020-09-03T22:24:30Z,9999-01-01T00:00:00Z NZI,TD,Offshore Samoa Tonga,-16.8890,-171.1905,0,5237,WGS84,2020-09-08T00:00:00Z,9999-01-01T00:00:00Z NZJ,TD,Offshore Norfolk Island New Hebrides,-26.6672,163.9549,0,1912,WGS84,2021-07-09T00:00:00Z,9999-01-01T00:00:00Z NZK,TD,Offshore New Caledonia New Hebrides,-24.3093,169.4988,0,2098,WGS84,2021-07-15T00:00:00Z,9999-01-01T00:00:00Z @@ -1967,7 +1967,7 @@ TOK,OE,Tokyo,-35.684741727,139.758118639,21,,WGS84,1875-06-01T00:00:00Z,9999-01- TOKS,SM,Te Oka Bay Road SW Banks Peninsula,-43.82243,172.78084,577,,WGS84,2013-03-19T00:00:00Z,9999-01-01T00:00:00Z TON,NZ,Tongariro,-39.201044242,175.538181851,1120,,WGS84,1945-10-01T00:00:00Z,1966-04-23T00:00:00Z TON1,XX,Tongariro 1,-39.1319,175.6153,1491,,NZGD2000,2012-07-20T00:00:00Z,2013-06-07T00:00:00Z -TON2,XX,Tongariro 2,-39.10857758,175.6731,1521,,WGS84,2012-07-20T21:30:00Z,2012-10-12T00:00:00Z +TON2,XX,Tongariro 2,-39.10857758,175.6731,1521,,WGS84,2012-07-20T21:30:00Z,2012-11-21T00:00:00Z TON3,XX,Tongariro 3,-39.1357,175.7106,1236,,WGS84,2012-07-20T00:00:00Z,2013-06-07T00:00:00Z TON4,XX,Tongariro 4,-39.0779,175.6762,836,,NZGD2000,2012-04-20T00:00:00Z,2013-06-07T00:00:00Z TON7,XX,Tongariro 7,-39.1283,175.6548,1727,,WGS84,2012-08-22T00:00:00Z,2013-06-07T00:00:00Z