From ffe7bcc55fa412ac7eba02c90b012ab6109e7ba6 Mon Sep 17 00:00:00 2001 From: MilesCranmer Date: Sat, 23 Dec 2023 09:03:29 +0000 Subject: [PATCH] Prefer to use `@info` instead of internal `debug` command --- src/Configure.jl | 52 ++++++++++++++++++--------------------- src/SingleIteration.jl | 1 - src/SymbolicRegression.jl | 5 ++-- src/Utils.jl | 12 --------- 4 files changed, 26 insertions(+), 44 deletions(-) diff --git a/src/Configure.jl b/src/Configure.jl index 700d594c9..a2405a698 100644 --- a/src/Configure.jl +++ b/src/Configure.jl @@ -95,25 +95,18 @@ function test_dataset_configuration( ) end - if size(dataset.X, 2) > 10000 - if !options.batching - debug( - verbosity > 0, - "Note: you are running with more than 10,000 datapoints. You should consider turning on batching (`options.batching`), and also if you need that many datapoints. Unless you have a large amount of noise (in which case you should smooth your dataset first), generally < 10,000 datapoints is enough to find a functional form.", - ) - end + if size(dataset.X, 2) > 10000 && !options.batching && verbosity > 0 + @info "Note: you are running with more than 10,000 datapoints. You should consider turning on batching (`options.batching`), and also if you need that many datapoints. Unless you have a large amount of noise (in which case you should smooth your dataset first), generally < 10,000 datapoints is enough to find a functional form." end - if !(typeof(options.elementwise_loss) <: SupervisedLoss) - if dataset.weighted - if !(3 in [m.nargs - 1 for m in methods(options.elementwise_loss)]) - throw( - AssertionError( - "When you create a custom loss function, and are using weights, you need to define your loss function with three scalar arguments: f(prediction, target, weight).", - ), - ) - end - end + if !(typeof(options.elementwise_loss) <: SupervisedLoss) && + dataset.weighted && + !(3 in [m.nargs - 1 for m in methods(options.elementwise_loss)]) + throw( + AssertionError( + "When you create a custom loss function, and are using weights, you need to define your loss function with three scalar arguments: f(prediction, target, weight).", + ), + ) end end @@ -202,14 +195,15 @@ end function copy_definition_to_workers(op, procs, options::Options, verbosity) name = nameof(op) - debug_inline(verbosity > 0, "Copying definition of $op to workers...") + verbosity > 0 && @info "Copying definition of $op to workers..." src_ms = methods(op).ms # Thanks https://discourse.julialang.org/t/easy-way-to-send-custom-function-to-distributed-workers/22118/2 @everywhere procs @eval function $name end for m in src_ms @everywhere procs @eval $m end - return debug(verbosity > 0, "Finished!") + verbosity > 0 && @info "Finished!" + return nothing end function test_function_on_workers(example_inputs, op, procs) @@ -223,7 +217,7 @@ function test_function_on_workers(example_inputs, op, procs) end function activate_env_on_workers(procs, project_path::String, options::Options, verbosity) - debug(verbosity > 0, "Activating environment on workers.") + verbosity > 0 && @info "Activating environment on workers." @everywhere procs begin Base.MainInclude.eval( quote @@ -237,7 +231,7 @@ end function import_module_on_workers(procs, filename::String, options::Options, verbosity) included_local = !("SymbolicRegression" in [k.name for (k, v) in Base.loaded_modules]) if included_local - debug_inline(verbosity > 0, "Importing local module ($filename) on workers...") + verbosity > 0 && @info "Importing local module ($filename) on workers..." @everywhere procs begin # Parse functions on every worker node Base.MainInclude.eval( @@ -247,18 +241,18 @@ function import_module_on_workers(procs, filename::String, options::Options, ver end, ) end - debug(verbosity > 0, "Finished!") + verbosity > 0 && @info "Finished!" else - debug_inline(verbosity > 0, "Importing installed module on workers...") + verbosity > 0 && @info "Importing installed module on workers..." @everywhere procs begin Base.MainInclude.eval(using SymbolicRegression) end - debug(verbosity > 0, "Finished!") + verbosity > 0 && @info "Finished!" end end function test_module_on_workers(procs, options::Options, verbosity) - debug_inline(verbosity > 0, "Testing module on workers...") + verbosity > 0 && @info "Testing module on workers..." futures = [] for proc in procs push!( @@ -269,14 +263,15 @@ function test_module_on_workers(procs, options::Options, verbosity) for future in futures fetch(future) end - return debug(verbosity > 0, "Finished!") + verbosity > 0 && @info "Finished!" + return nothing end function test_entire_pipeline( procs, dataset::Dataset{T}, options::Options, verbosity ) where {T<:DATA_TYPE} futures = [] - debug_inline(verbosity > 0, "Testing entire pipeline on workers...") + verbosity > 0 && @info "Testing entire pipeline on workers..." for proc in procs push!( futures, @@ -307,7 +302,8 @@ function test_entire_pipeline( for future in futures fetch(future) end - return debug(verbosity > 0, "Finished!") + verbosity > 0 && @info "Finished!" + return nothing end function configure_workers(; diff --git a/src/SingleIteration.jl b/src/SingleIteration.jl index 8dc87a8e3..9a8fe5c56 100644 --- a/src/SingleIteration.jl +++ b/src/SingleIteration.jl @@ -3,7 +3,6 @@ module SingleIterationModule import DynamicExpressions: Node, string_tree, simplify_tree, combine_operators import ..CoreModule: Options, Dataset, RecordType, DATA_TYPE, LOSS_TYPE import ..ComplexityModule: compute_complexity -import ..UtilsModule: debug import ..PopMemberModule: generate_reference import ..PopulationModule: Population, finalize_scores, best_sub_pop import ..HallOfFameModule: HallOfFame diff --git a/src/SymbolicRegression.jl b/src/SymbolicRegression.jl index 5e5231d64..c53a88589 100644 --- a/src/SymbolicRegression.jl +++ b/src/SymbolicRegression.jl @@ -192,8 +192,7 @@ import .CoreModule: erf, erfc, atanh_clip -import .UtilsModule: - debug, debug_inline, is_anonymous_function, recursive_merge, json3_write +import .UtilsModule: is_anonymous_function, recursive_merge, json3_write import .ComplexityModule: compute_complexity import .CheckConstraintsModule: check_constraints import .AdaptiveParsimonyModule: @@ -755,7 +754,7 @@ function _equation_search( allPops[j][i] = updated_pop end - debug(verbosity > 0, "Started!") + verbosity > 0 && @info "Started!" start_time = time() total_cycles = options.populations * niterations cycles_remaining = [total_cycles for j in 1:nout] diff --git a/src/Utils.jl b/src/Utils.jl index 4ec2f1b2f..f0e474d97 100644 --- a/src/Utils.jl +++ b/src/Utils.jl @@ -4,18 +4,6 @@ module UtilsModule import Printf: @printf import MacroTools: splitdef, combinedef -function debug(verbosity, string...) - if verbosity > 0 - println(string...) - end -end - -function debug_inline(verbosity, string...) - if verbosity > 0 - print(string...) - end -end - const pseudo_time = Ref(0) function get_birth_order(; deterministic=false)::Int