From 379b90f83a96826602ba2e19f5679c8616075ac2 Mon Sep 17 00:00:00 2001 From: Gabriele Bozzola Date: Tue, 10 Sep 2024 19:57:39 -0700 Subject: [PATCH] Remove t_start --- .../src/diagnostics/developers_diagnostics.md | 11 +- docs/src/diagnostics/users_diagnostics.md | 7 +- experiments/benchmarks/land.jl | 10 -- experiments/benchmarks/richards.jl | 2 - .../integrated/global/global_soil_canopy.jl | 10 -- experiments/long_runs/land.jl | 10 -- experiments/long_runs/soil.jl | 9 -- experiments/standalone/Bucket/bucket_era5.jl | 9 -- .../standalone/Soil/richards_runoff.jl | 2 - src/diagnostics/default_diagnostics.jl | 35 ++-- src/diagnostics/land_compute_methods.jl | 4 +- .../standard_diagnostic_frequencies.jl | 152 ++++++++---------- src/standalone/Bucket/Bucket.jl | 3 - test/standalone/Bucket/albedo_types.jl | 5 +- 14 files changed, 98 insertions(+), 171 deletions(-) diff --git a/docs/src/diagnostics/developers_diagnostics.md b/docs/src/diagnostics/developers_diagnostics.md index a1ed75c1f1..c639d5d7dd 100644 --- a/docs/src/diagnostics/developers_diagnostics.md +++ b/docs/src/diagnostics/developers_diagnostics.md @@ -17,7 +17,7 @@ Internally, this is done by using the [`ClimaDiagnostics.jl`](https://github.com `add_diagnostic_variable!`, and dispatch off the type of land\_model to define how to compute a diagnostic (for example, surface temperature is computed in `p.bucket.T_sfc` in the bucket model). - compute methods are defined in a separate file, for example, `bucket_compute_methods.jl`. - `standard_diagnostic_frequencies.jl` defines standard functions to schedule diagnostics, for example, hourly average or monthly max, these functions are called on a list of diagnostic variables. As developers, we can add more standard functions that users may want to have access to easily in this file. - - `default_diagnostics.jl` defines default diagnostics functions to use on a model simulation. For example, `default_diagnostics(land_model::BucketModel, t_start; output_writer)`. + - `default_diagnostics.jl` defines default diagnostics functions to use on a model simulation. For example, `default_diagnostics(land_model::BucketModel, output_writer)`. will return a `ScheduledDiagnostics` that computes hourly averages for all Bucket variables, along with their metadata, ready to be written on a NetCDF file when running a Bucket simulation. The following section give more details on these functions, along with examples. As developers, we want to extand these functionality as ClimaLand progresses. @@ -66,7 +66,7 @@ For each model, we define a function `default_diagnostics` which will define wha on what schedule (for example, hourly average). For example, ```Julia -function default_diagnostics(land_model::BucketModel, t_start; output_writer) +function default_diagnostics(land_model::BucketModel{FT}; output_writer) where {FT} define_diagnostics!(land_model) @@ -87,7 +87,7 @@ function default_diagnostics(land_model::BucketModel, t_start; output_writer) ] default_outputs = - hourly_averages(bucket_diagnostics...; output_writer, t_start) + hourly_averages(FT, bucket_diagnostics...; output_writer) return [default_outputs...] end ``` @@ -103,11 +103,10 @@ If `average_period = :hourly`, `default_outputs` calls `hourly_averages`, et cet We defined some functions of diagnostic schedule that may often be used in `standard_diagnostic_frequencies.jl`, for example ```Julia -hourly_averages(short_names...; output_writer, t_start) = common_diagnostics( - 60 * 60 * one(t_start), +hourly_averages(FT, short_names...; output_writer) = common_diagnostics( + 60 * 60 * one(FT), (+), output_writer, - t_start, short_names...; pre_output_hook! = average_pre_output_hook!, ) diff --git a/docs/src/diagnostics/users_diagnostics.md b/docs/src/diagnostics/users_diagnostics.md index 143ca80922..ae397461d2 100644 --- a/docs/src/diagnostics/users_diagnostics.md +++ b/docs/src/diagnostics/users_diagnostics.md @@ -118,11 +118,10 @@ add_diagnostic_variable!( ### Define how to schedule your variables. For example, you want the seasonal maximum of your variables, where season is defined as 90 days. ```Julia -seasonal_maxs(short_names...; output_writer, t_start) = common_diagnostics( - 90 * 24 * 60 * 60 * one(t_start), +seasonal_maxs(FT, short_names...; output_writer) = common_diagnostics( + 90 * 24 * 60 * 60 * one(FT), max, output_writer, - t_start, short_names..., ) ``` @@ -134,7 +133,7 @@ Now, you can call your schedule with your variables. ```Julia my_custom_diagnostics = ["lhf", "bor"] -diags = seasonal_maxs(my_custom_diagnostics...; output_writer, t_start) +diags = seasonal_maxs(FT, my_custom_diagnostics...; output_writer) ``` ### Analyze your simulation output diff --git a/experiments/benchmarks/land.jl b/experiments/benchmarks/land.jl index 4a27d246dd..e6c36e7c18 100644 --- a/experiments/benchmarks/land.jl +++ b/experiments/benchmarks/land.jl @@ -72,7 +72,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) subsurface_space = domain.space.subsurface ref_time = DateTime(2021) - t_start = 0.0 # Forcing data era5_artifact_path = @@ -82,7 +81,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "rf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -92,7 +90,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "sf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -102,7 +99,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "ws", surface_space; reference_date = ref_time, - t_start, regridder_type, ) q_atmos = TimeVaryingInput( @@ -110,7 +106,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "q", surface_space; reference_date = ref_time, - t_start, regridder_type, ) P_atmos = TimeVaryingInput( @@ -118,7 +113,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "sp", surface_space; reference_date = ref_time, - t_start, regridder_type, ) @@ -127,7 +121,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "t2m", surface_space; reference_date = ref_time, - t_start, regridder_type, ) h_atmos = FT(10) @@ -150,7 +143,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "ssrd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -159,7 +151,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "strd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -493,7 +484,6 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) "lai", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data > 0.05 ? data : 0.0, diff --git a/experiments/benchmarks/richards.jl b/experiments/benchmarks/richards.jl index b778c225bf..1a994f616f 100644 --- a/experiments/benchmarks/richards.jl +++ b/experiments/benchmarks/richards.jl @@ -196,14 +196,12 @@ function setup_prob(t0, tf, Δt; nelements = (101, 15)) # 1. Convert precipitation to be negative (as it is downwards) # 2. Convert accumulations over an hour to a rate per second ref_time = DateTime(2021) - t_start = 0.0 # Precipitation: precip = TimeVaryingInput( joinpath(era5_artifact_path, "era5_2021_0.9x1.25.nc"), "tp", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) diff --git a/experiments/integrated/global/global_soil_canopy.jl b/experiments/integrated/global/global_soil_canopy.jl index 86baac684b..74c45a4a8a 100644 --- a/experiments/integrated/global/global_soil_canopy.jl +++ b/experiments/integrated/global/global_soil_canopy.jl @@ -57,7 +57,6 @@ surface_space = domain.space.surface subsurface_space = domain.space.subsurface ref_time = DateTime(2021); -t_start = 0.0 # Forcing data era5_artifact_path = @@ -67,7 +66,6 @@ precip = TimeVaryingInput( "rf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -77,7 +75,6 @@ snow_precip = TimeVaryingInput( "sf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -87,7 +84,6 @@ u_atmos = TimeVaryingInput( "ws", surface_space; reference_date = ref_time, - t_start, regridder_type, ) q_atmos = TimeVaryingInput( @@ -95,7 +91,6 @@ q_atmos = TimeVaryingInput( "q", surface_space; reference_date = ref_time, - t_start, regridder_type, ) P_atmos = TimeVaryingInput( @@ -103,7 +98,6 @@ P_atmos = TimeVaryingInput( "sp", surface_space; reference_date = ref_time, - t_start, regridder_type, ) @@ -112,7 +106,6 @@ T_atmos = TimeVaryingInput( "t2m", surface_space; reference_date = ref_time, - t_start, regridder_type, ) h_atmos = FT(10); @@ -135,7 +128,6 @@ SW_d = TimeVaryingInput( "ssrd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -144,7 +136,6 @@ LW_d = TimeVaryingInput( "strd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -241,7 +232,6 @@ LAIfunction = TimeVaryingInput( "lai", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data > 0.05 ? data : 0.0, diff --git a/experiments/long_runs/land.jl b/experiments/long_runs/land.jl index 46163bf2ee..6844fba63f 100644 --- a/experiments/long_runs/land.jl +++ b/experiments/long_runs/land.jl @@ -73,7 +73,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) subsurface_space = domain.space.subsurface ref_time = DateTime(2021) - t_start = t0 # Forcing data era5_artifact_path = ClimaLand.Artifacts.era5_land_forcing_data2021_folder_path(; context) # Precipitation: @@ -82,7 +81,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "rf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -92,7 +90,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "sf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -102,7 +99,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "ws", surface_space; reference_date = ref_time, - t_start, regridder_type, ) q_atmos = TimeVaryingInput( @@ -110,7 +106,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "q", surface_space; reference_date = ref_time, - t_start, regridder_type, ) P_atmos = TimeVaryingInput( @@ -118,7 +113,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "sp", surface_space; reference_date = ref_time, - t_start, regridder_type, ) @@ -127,7 +121,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "t2m", surface_space; reference_date = ref_time, - t_start, regridder_type, ) h_atmos = FT(10) @@ -150,7 +143,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "ssrd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -159,7 +151,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "strd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -494,7 +485,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "lai", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data > 0.05 ? data : 0.0, diff --git a/experiments/long_runs/soil.jl b/experiments/long_runs/soil.jl index 66f22c3acd..4f8e7d5aa3 100644 --- a/experiments/long_runs/soil.jl +++ b/experiments/long_runs/soil.jl @@ -71,7 +71,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) subsurface_space = domain.space.subsurface ref_time = DateTime(2021) - t_start = t0 # Forcing data era5_artifact_path = ClimaLand.Artifacts.era5_land_forcing_data2021_folder_path(; context) # Precipitation: @@ -80,7 +79,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "rf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -90,7 +88,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "sf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -100,7 +97,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "ws", surface_space; reference_date = ref_time, - t_start, regridder_type, ) q_atmos = TimeVaryingInput( @@ -108,7 +104,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "q", surface_space; reference_date = ref_time, - t_start, regridder_type, ) P_atmos = TimeVaryingInput( @@ -116,7 +111,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "sp", surface_space; reference_date = ref_time, - t_start, regridder_type, ) @@ -125,7 +119,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "t2m", surface_space; reference_date = ref_time, - t_start, regridder_type, ) h_atmos = FT(10) @@ -148,7 +141,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "ssrd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -157,7 +149,6 @@ function setup_prob(t0, tf, Δt; outdir = outdir, nelements = (101, 15)) "strd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) diff --git a/experiments/standalone/Bucket/bucket_era5.jl b/experiments/standalone/Bucket/bucket_era5.jl index d76ab40de6..595d376112 100644 --- a/experiments/standalone/Bucket/bucket_era5.jl +++ b/experiments/standalone/Bucket/bucket_era5.jl @@ -122,7 +122,6 @@ tf = 14 * 86400; device_suffix = typeof(ClimaComms.context().device) <: ClimaComms.CPUSingleThreaded ? "cpu" : "gpu" -t_start = t0 surface_space = bucket_domain.space.surface α_snow = FT(0.8) albedo = PrescribedBaregroundAlbedo{FT}(α_snow, surface_space); @@ -142,7 +141,6 @@ precip = TimeVaryingInput( "rf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -152,7 +150,6 @@ snow_precip = TimeVaryingInput( "sf", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) @@ -162,7 +159,6 @@ u_atmos = TimeVaryingInput( "ws", surface_space; reference_date = ref_time, - t_start, regridder_type, ) q_atmos = TimeVaryingInput( @@ -170,7 +166,6 @@ q_atmos = TimeVaryingInput( "q", surface_space; reference_date = ref_time, - t_start, regridder_type, ) P_atmos = TimeVaryingInput( @@ -178,7 +173,6 @@ P_atmos = TimeVaryingInput( "sp", surface_space; reference_date = ref_time, - t_start, regridder_type, ) @@ -187,7 +181,6 @@ T_atmos = TimeVaryingInput( "t2m", surface_space; reference_date = ref_time, - t_start, regridder_type, ) h_atmos = FT(10); @@ -214,7 +207,6 @@ SW_d = TimeVaryingInput( "ssrd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) @@ -223,7 +215,6 @@ LW_d = TimeVaryingInput( "strd", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> data / 3600,), ) diff --git a/experiments/standalone/Soil/richards_runoff.jl b/experiments/standalone/Soil/richards_runoff.jl index 9f8ca9bf17..8d683cd052 100644 --- a/experiments/standalone/Soil/richards_runoff.jl +++ b/experiments/standalone/Soil/richards_runoff.jl @@ -158,14 +158,12 @@ era5_artifact_path = # 1. Convert precipitation to be negative (as it is downwards) # 2. Convert accumulations over an hour to a rate per second ref_time = DateTime(2021); -t_start = 0.0 # Precipitation: precip = TimeVaryingInput( joinpath(era5_artifact_path, "era5_2021_0.9x1.25.nc"), "tp", surface_space; reference_date = ref_time, - t_start, regridder_type, file_reader_kwargs = (; preprocess_func = (data) -> -data / 3600,), ) diff --git a/src/diagnostics/default_diagnostics.jl b/src/diagnostics/default_diagnostics.jl index 14a26b3c08..f9cf0c035d 100644 --- a/src/diagnostics/default_diagnostics.jl +++ b/src/diagnostics/default_diagnostics.jl @@ -14,7 +14,6 @@ export default_diagnostics period, reduction, output_writer, - t_start, reference_date, short_names...; pre_output_hook! = nothing, @@ -26,7 +25,6 @@ function common_diagnostics( period, reduction, output_writer, - t_start, reference_date, short_names...; pre_output_hook! = nothing, @@ -35,8 +33,8 @@ function common_diagnostics( map(short_names) do short_name output_schedule_func = period isa Period ? - EveryCalendarDtSchedule(period; t_start, reference_date) : - EveryDtSchedule(period; t_start) + EveryCalendarDtSchedule(period; reference_date) : + EveryDtSchedule(period) return ScheduledDiagnostic( variable = get_diagnostic_variable(short_name), compute_schedule_func = EveryStepSchedule(), @@ -53,11 +51,10 @@ include("standard_diagnostic_frequencies.jl") # Bucket function default_diagnostics( - land_model::BucketModel, - t_start, + land_model::BucketModel{FT}, reference_date; output_writer, -) +) where {FT} define_diagnostics!(land_model) @@ -78,9 +75,9 @@ function default_diagnostics( ] default_outputs = hourly_averages( + FT, bucket_diagnostics...; output_writer, - t_start, reference_date, ) @@ -89,13 +86,12 @@ end # SoilCanopyModel function default_diagnostics( - land_model::SoilCanopyModel, - t_start, + land_model::SoilCanopyModel{FT}, reference_date; output_writer, output_vars = :long, average_period = :daily, -) +) where {FT} define_diagnostics!(land_model) @@ -163,23 +159,23 @@ function default_diagnostics( if average_period == :hourly default_outputs = hourly_averages( + FT, soilcanopy_diagnostics...; output_writer, - t_start, reference_date, ) elseif average_period == :daily default_outputs = daily_averages( + FT, soilcanopy_diagnostics...; output_writer, - t_start, reference_date, ) elseif average_period == :monthly default_outputs = monthly_averages( + FT, soilcanopy_diagnostics...; output_writer, - t_start, reference_date, ) end @@ -190,12 +186,11 @@ end # SoilModel function default_diagnostics( - land_model::EnergyHydrology, - t_start, + land_model::EnergyHydrology{FT}, reference_date; output_writer, average_period = :daily, -) +) where {FT} define_diagnostics!(land_model) @@ -203,23 +198,23 @@ function default_diagnostics( if average_period == :hourly default_outputs = hourly_averages( + FT, soil_diagnostics...; output_writer, - t_start, reference_date, ) elseif average_period == :daily default_outputs = daily_averages( + FT, soil_diagnostics...; output_writer, - t_start, reference_date, ) elseif average_period == :monthly default_outputs = monthly_averages( + FT, soil_diagnostics...; output_writer, - t_start, reference_date, ) end diff --git a/src/diagnostics/land_compute_methods.jl b/src/diagnostics/land_compute_methods.jl index 3532d949c8..41d9e33eb7 100644 --- a/src/diagnostics/land_compute_methods.jl +++ b/src/diagnostics/land_compute_methods.jl @@ -113,7 +113,7 @@ end ## Drivers Module ## -@diagnostic_compute "soil_organic_carbon" SoilCanopyModel p.drivers.soc # need to fix this in src/shared_utilities/drivers +@diagnostic_compute "soil_organic_carbon" SoilCanopyModel p.drivers.soc # need to fix this in src/shared_utilities/drivers @diagnostic_compute "pressure" SoilCanopyModel p.drivers.P @diagnostic_compute "rainfall" SoilCanopyModel p.drivers.P_liq @diagnostic_compute "radiation_longwave_down" SoilCanopyModel p.drivers.LW_d @@ -151,7 +151,7 @@ function compute_heterotrophic_respiration!( else out .= p.soilco2.top_bc .* FT(83.26) end -end # Convert from kg C to mol CO2. +end # Convert from kg C to mol CO2. # To convert from kg C to mol CO2, we need to multiply by: # [3.664 kg CO2/ kg C] x [10^3 g CO2/ kg CO2] x [1 mol CO2/44.009 g CO2] = 83.26 mol CO2/kg C diff --git a/src/diagnostics/standard_diagnostic_frequencies.jl b/src/diagnostics/standard_diagnostic_frequencies.jl index b68b1ca02a..2e48a04e62 100644 --- a/src/diagnostics/standard_diagnostic_frequencies.jl +++ b/src/diagnostics/standard_diagnostic_frequencies.jl @@ -1,272 +1,262 @@ """ - monthly_maxs(short_names...; output_writer, t_start, reference_date) + monthly_maxs(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the monthly max for the given variables. """ -monthly_maxs(short_names...; output_writer, t_start, reference_date) = - common_diagnostics(Month(1), max, output_writer, t_start, short_names...) +monthly_maxs(FT, short_names...; output_writer, reference_date) = + common_diagnostics(Month(1), max, output_writer, short_names...) """ - monthly_max(short_names; output_writer, t_start, reference_date) + monthly_max(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the monthly max for the given variable. """ -monthly_max(short_names; output_writer, t_start, reference_date) = - monthly_maxs(short_names; output_writer, t_start, reference_date)[1] +monthly_max(FT, short_names; output_writer, reference_date) = + monthly_maxs(FT, short_names; output_writer, reference_date)[1] """ - monthly_mins(short_names...; output_writer, t_start, reference_date) + monthly_mins(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the monthly min for the given variables. """ -monthly_mins(short_names...; output_writer, t_start, reference_date) = - common_diagnostics(Month(1), min, output_writer, t_start, short_names...) +monthly_mins(FT, short_names...; output_writer, reference_date) = + common_diagnostics(Month(1), min, output_writer, short_names...) """ - monthly_min(short_names; output_writer, t_start, reference_date) + monthly_min(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the monthly min for the given variable. """ -monthly_min(short_names; output_writer, t_start, reference_date) = - monthly_mins(short_names; output_writer, t_start, reference_date)[1] +monthly_min(FT, short_names; output_writer, reference_date) = + monthly_mins(FT, short_names; output_writer, reference_date)[1] """ - monthly_averages(short_names...; output_writer, t_start, reference_date) + monthly_averages(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the monthly average for the given variables. """ # An average is just a sum with a normalization before output -monthly_averages(short_names...; output_writer, t_start, reference_date) = +monthly_averages(FT, short_names...; output_writer, reference_date) = common_diagnostics( Month(1), (+), output_writer, - t_start, reference_date, short_names...; pre_output_hook! = average_pre_output_hook!, ) """ - monthly_average(short_names; output_writer, t_start, reference_date) + monthly_average(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that compute the monthly average for the given variable. """ # An average is just a sum with a normalization before output -monthly_average(short_names; output_writer, t_start, reference_date) = - monthly_averages(short_names; output_writer, t_start, reference_date)[1] +monthly_average(FT, short_names; output_writer, reference_date) = + monthly_averages(FT, short_names; output_writer, reference_date)[1] """ - tendaily_maxs(short_names...; output_writer, t_start, reference_date) + tendaily_maxs(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the max over ten days for the given variables. """ -tendaily_maxs(short_names...; output_writer, t_start, reference_date) = +tendaily_maxs(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 10 * 24 * 60 * 60 * one(t_start), + 10 * 24 * 60 * 60 * one(FT), max, output_writer, - t_start, reference_date, short_names..., ) """ - tendaily_max(short_names; output_writer, t_start, reference_date) + tendaily_max(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the max over ten days for the given variable. """ -tendaily_max(short_names; output_writer, t_start, reference_date) = - tendaily_maxs(short_names; output_writer, t_start, reference_date)[1] +tendaily_max(FT, short_names; output_writer, reference_date) = + tendaily_maxs(FT, short_names; output_writer, reference_date)[1] """ - tendaily_mins(short_names...; output_writer, t_start, reference_date) + tendaily_mins(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the min over ten days for the given variables. """ -tendaily_mins(short_names...; output_writer, t_start, reference_date) = +tendaily_mins(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 10 * 24 * 60 * 60 * one(t_start), + 10 * 24 * 60 * 60 * one(FT), min, output_writer, - t_start, reference_date, short_names..., ) """ - tendaily_min(short_names; output_writer, t_start, reference_date) + tendaily_min(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the min over ten days for the given variable. """ -tendaily_min(short_names; output_writer, t_start, reference_date) = - tendaily_mins(short_names; output_writer, t_start, reference_date)[1] +tendaily_min(FT, short_names; output_writer, reference_date) = + tendaily_mins(FT, short_names; output_writer, reference_date)[1] """ - tendaily_averages(short_names...; output_writer, t_start, reference_date) + tendaily_averages(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the average over ten days for the given variables. """ # An average is just a sum with a normalization before output -tendaily_averages(short_names...; output_writer, t_start, reference_date) = +tendaily_averages(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 10 * 24 * 60 * 60 * one(t_start), + 10 * 24 * 60 * 60 * one(FT), (+), output_writer, - t_start, reference_date, short_names...; pre_output_hook! = average_pre_output_hook!, ) """ - tendaily_average(short_names; output_writer, t_start, reference_date) + tendaily_average(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that compute the average over ten days for the given variable. """ # An average is just a sum with a normalization before output -tendaily_average(short_names; output_writer, t_start, reference_date) = - tendaily_averages(short_names; output_writer, t_start, reference_date)[1] +tendaily_average(FT, short_names; output_writer, reference_date) = + tendaily_averages(FT, short_names; output_writer, reference_date)[1] """ - daily_maxs(short_names...; output_writer, t_start, reference_date) + daily_maxs(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the daily max for the given variables. """ -daily_maxs(short_names...; output_writer, t_start, reference_date) = +daily_maxs(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 24 * 60 * 60 * one(t_start), + 24 * 60 * 60 * one(FT), max, output_writer, - t_start, reference_date, short_names..., ) """ - daily_max(short_names; output_writer, t_start, reference_date) + daily_max(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the daily max for the given variable. """ -daily_max(short_names; output_writer, t_start, reference_date) = - daily_maxs(short_names; output_writer, t_start, reference_date)[1] +daily_max(FT, short_names; output_writer, reference_date) = + daily_maxs(FT, short_names; output_writer, reference_date)[1] """ - daily_mins(short_names...; output_writer, t_start, reference_date) + daily_mins(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the daily min for the given variables. """ -daily_mins(short_names...; output_writer, t_start, reference_date) = +daily_mins(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 24 * 60 * 60 * one(t_start), + 24 * 60 * 60 * one(FT), min, output_writer, - t_start, reference_date, short_names..., ) """ - daily_min(short_names; output_writer, t_start, reference_date) + daily_min(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the daily min for the given variable. """ -daily_min(short_names; output_writer, t_start, reference_date) = - daily_mins(short_names; output_writer, t_start, reference_date)[1] +daily_min(FT, short_names; output_writer, reference_date) = + daily_mins(FT, short_names; output_writer, reference_date)[1] """ - daily_averages(short_names...; output_writer, t_start, reference_date) + daily_averages(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the daily average for the given variables. """ # An average is just a sum with a normalization before output -daily_averages(short_names...; output_writer, t_start, reference_date) = +daily_averages(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 24 * 60 * 60 * one(t_start), + 24 * 60 * 60 * one(FT), (+), output_writer, - t_start, reference_date, short_names...; pre_output_hook! = average_pre_output_hook!, ) """ - daily_average(short_names; output_writer, t_start, reference_date) + daily_average(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that compute the daily average for the given variable. """ # An average is just a sum with a normalization before output -daily_average(short_names; output_writer, t_start, reference_date) = - daily_averages(short_names; output_writer, t_start, reference_date)[1] +daily_average(FT, short_names; output_writer, reference_date) = + daily_averages(FT, short_names; output_writer, reference_date)[1] """ - hourly_maxs(short_names...; output_writer, t_start, reference_date) + hourly_maxs(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the hourly max for the given variables. """ -hourly_maxs(short_names...; output_writer, t_start, reference_date) = +hourly_maxs(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 60 * 60 * one(t_start), + 60 * 60 * one(FT), max, output_writer, - t_start, reference_date, short_names..., ) """ - hourly_max(short_names; output_writer, t_start, reference_date) + hourly_max(FT, short_names; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the hourly max for the given variable. """ -hourly_max(short_names; output_writer, t_start, reference_date) = - hourly_maxs(short_names; output_writer, t_start, reference_date)[1] +hourly_max(FT, short_names; output_writer, reference_date) = + hourly_maxs(FT, short_names; output_writer, reference_date)[1] """ - hourly_mins(short_names...; output_writer, t_start, reference_date) + hourly_mins(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the hourly min for the given variables. """ -hourly_mins(short_names...; output_writer, t_start, reference_date) = +hourly_mins(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 60 * 60 * one(t_start), + 60 * 60 * one(FT), min, output_writer, - t_start, reference_date, short_names..., ) """ - hourly_mins(short_names...; output_writer, t_start, reference_date) + hourly_mins(FT, short_names...; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the hourly min for the given variable. """ -hourly_min(short_names; output_writer, t_start, reference_date) = - hourly_mins(short_names; output_writer, t_start, reference_date)[1] +hourly_min(FT, short_names; output_writer, reference_date) = + hourly_mins(FT, short_names; output_writer, reference_date)[1] # An average is just a sum with a normalization before output """ - hourly_averages(short_names...; output_writer, t_start, reference_date) + hourly_averages(FT, short_names...; output_writer, reference_date) Return a list of `ScheduledDiagnostics` that compute the hourly average for the given variables. """ -hourly_averages(short_names...; output_writer, t_start, reference_date) = +hourly_averages(FT, short_names...; output_writer, reference_date) = common_diagnostics( - 60 * 60 * one(t_start), + 60 * 60 * one(FT), (+), output_writer, - t_start, reference_date, short_names...; pre_output_hook! = average_pre_output_hook!, ) """ - hourly_average(short_names...; output_writer, t_start, reference_date) + hourly_average(FT, short_names...; output_writer, reference_date) Return a `ScheduledDiagnostics` that computes the hourly average for the given variable. """ -hourly_average(short_names; output_writer, t_start, reference_date) = - hourly_averages(short_names; output_writer, t_start, reference_date)[1] +hourly_average(FT, short_names; output_writer, reference_date) = + hourly_averages(FT, short_names; output_writer, reference_date)[1] diff --git a/src/standalone/Bucket/Bucket.jl b/src/standalone/Bucket/Bucket.jl index fa37ae2174..99f89a34bc 100644 --- a/src/standalone/Bucket/Bucket.jl +++ b/src/standalone/Bucket/Bucket.jl @@ -157,7 +157,6 @@ end """ PrescribedSurfaceAlbedo{FT}( date_ref::Union{DateTime, DateTimeNoLeap}, - t_start, Space::ClimaCore.Spaces.AbstractSpace; get_infile = ClimaLand.Artifacts.cesm2_albedo_dataset_path, varname = "sw_alb" @@ -172,7 +171,6 @@ The input data file must have a time component. """ function PrescribedSurfaceAlbedo{FT}( date_ref::Union{DateTime, DateTimeNoLeap}, - t_start, space::ClimaCore.Spaces.AbstractSpace; albedo_file_path = ClimaLand.Artifacts.cesm2_albedo_dataset_path(), varname = "sw_alb", @@ -188,7 +186,6 @@ function PrescribedSurfaceAlbedo{FT}( varname, space; reference_date = date_ref, - t_start, regridder_type, ) diff --git a/test/standalone/Bucket/albedo_types.jl b/test/standalone/Bucket/albedo_types.jl index 651558d6ba..576f5da25e 100644 --- a/test/standalone/Bucket/albedo_types.jl +++ b/test/standalone/Bucket/albedo_types.jl @@ -146,7 +146,7 @@ end ) t_start = Float64(0) - albedo = PrescribedSurfaceAlbedo{FT}(date_ref, t_start, space) + albedo = PrescribedSurfaceAlbedo{FT}(date_ref, space) Y = (; bucket = (; W = Fields.zeros(space))) p = (; bucket = (; α_sfc = Fields.zeros(space))) @@ -309,7 +309,7 @@ end for bucket_domain in bucket_domains space = bucket_domain.space.surface if bucket_domain isa SphericalShell - albedo_model = PrescribedSurfaceAlbedo{FT}(date_ref, t_start, space) + albedo_model = PrescribedSurfaceAlbedo{FT}(date_ref, space) # Radiation ref_time = DateTime(2005, 1, 15, 12) SW_d = (t) -> 0 @@ -388,7 +388,6 @@ end FT, }( date_ref, - t_start, space, ) end