diff --git a/base/options.jl b/base/options.jl index cb35b2b3806f4..2b2906bf86e82 100644 --- a/base/options.jl +++ b/base/options.jl @@ -34,6 +34,7 @@ struct JLOptions can_inline::Int8 polly::Int8 trace_compile::Ptr{UInt8} + trace_dispatch::Ptr{UInt8} fast_math::Int8 worker::Int8 cookie::Ptr{UInt8} diff --git a/src/gf.c b/src/gf.c index 88810757dc15c..1aee075c8d15a 100644 --- a/src/gf.c +++ b/src/gf.c @@ -2365,6 +2365,38 @@ static void record_precompile_statement(jl_method_instance_t *mi) JL_UNLOCK(&precomp_statement_out_lock); } +jl_mutex_t dispatch_statement_out_lock; + +static void record_dispatch_statement(jl_method_instance_t *mi) +{ + static ios_t f_dispatch; + static JL_STREAM* s_dispatch = NULL; + jl_method_t *def = mi->def.method; + if (!jl_is_method(def)) + return; + + JL_LOCK(&dispatch_statement_out_lock); + if (s_dispatch == NULL) { + const char *t = jl_options.trace_dispatch; + if (!strncmp(t, "stderr", 6)) { + s_dispatch = JL_STDERR; + } + else { + if (ios_file(&f_dispatch, t, 1, 1, 1, 1) == NULL) + jl_errorf("cannot open dispatch statement file \"%s\" for writing", t); + s_dispatch = (JL_STREAM*) &f_dispatch; + } + } + if (!jl_has_free_typevars(mi->specTypes)) { + jl_printf(s_dispatch, "precompile("); + jl_static_show(s_dispatch, mi->specTypes); + jl_printf(s_dispatch, ")\n"); + if (s_dispatch != JL_STDERR) + ios_flush(&f_dispatch); + } + JL_UNLOCK(&dispatch_statement_out_lock); +} + jl_method_instance_t *jl_normalize_to_compilable_mi(jl_method_instance_t *mi JL_PROPAGATES_ROOT); jl_code_instance_t *jl_compile_method_internal(jl_method_instance_t *mi, size_t world) @@ -2779,7 +2811,8 @@ static void jl_compile_now(jl_method_instance_t *mi) JL_DLLEXPORT void jl_compile_method_instance(jl_method_instance_t *mi, jl_tupletype_t *types, size_t world) { size_t tworld = jl_typeinf_world; - jl_atomic_store_relaxed(&mi->precompiled, 1); + uint8_t miflags = jl_atomic_load_relaxed(&mi->flags) | JL_MI_FLAGS_MASK_PRECOMPILED; + jl_atomic_store_relaxed(&mi->flags, miflags); if (jl_generating_output()) { jl_compile_now(mi); // In addition to full compilation of the compilation-signature, if `types` is more specific (e.g. due to nospecialize), @@ -2794,7 +2827,8 @@ JL_DLLEXPORT void jl_compile_method_instance(jl_method_instance_t *mi, jl_tuplet types2 = jl_type_intersection_env((jl_value_t*)types, (jl_value_t*)mi->def.method->sig, &tpenv2); jl_method_instance_t *mi2 = jl_specializations_get_linfo(mi->def.method, (jl_value_t*)types2, tpenv2); JL_GC_POP(); - jl_atomic_store_relaxed(&mi2->precompiled, 1); + miflags = jl_atomic_load_relaxed(&mi2->flags) | JL_MI_FLAGS_MASK_PRECOMPILED; + jl_atomic_store_relaxed(&mi2->flags, miflags); if (jl_rettype_inferred(mi2, world, world) == jl_nothing) (void)jl_type_infer(mi2, world, 1); if (jl_typeinf_func && mi->def.method->primary_world <= tworld) { @@ -3060,6 +3094,16 @@ STATIC_INLINE jl_method_instance_t *jl_lookup_generic_(jl_value_t *F, jl_value_t jl_method_error(F, args, nargs, world); // unreachable } + // mfunc is about to be dispatched + if (jl_options.trace_dispatch != NULL) { + uint8_t miflags = jl_atomic_load_relaxed(&mfunc->flags); + uint8_t was_dispatched = miflags & JL_MI_FLAGS_MASK_DISPATCHED; + if (!was_dispatched) { + miflags |= JL_MI_FLAGS_MASK_DISPATCHED; + jl_atomic_store_relaxed(&mfunc->flags, miflags); + record_dispatch_statement(mfunc); + } + } } #ifdef JL_TRACE @@ -3182,6 +3226,15 @@ jl_value_t *jl_gf_invoke_by_method(jl_method_t *method, jl_value_t *gf, jl_value jl_gc_sync_total_bytes(last_alloc); // discard allocation count from compilation } JL_GC_PROMISE_ROOTED(mfunc); + if (jl_options.trace_dispatch != NULL) { + uint8_t miflags = jl_atomic_load_relaxed(&mfunc->flags); + uint8_t was_dispatched = miflags & JL_MI_FLAGS_MASK_DISPATCHED; + if (!was_dispatched) { + miflags |= JL_MI_FLAGS_MASK_DISPATCHED; + jl_atomic_store_relaxed(&mfunc->flags, miflags); + record_dispatch_statement(mfunc); + } + } size_t world = jl_current_task->world_age; return _jl_invoke(gf, args, nargs - 1, mfunc, world); } diff --git a/src/jloptions.c b/src/jloptions.c index 47f78f2de2a17..14d2242284ae1 100644 --- a/src/jloptions.c +++ b/src/jloptions.c @@ -67,6 +67,7 @@ JL_DLLEXPORT void jl_init_options(void) 1, // can_inline JL_OPTIONS_POLLY_ON, // polly NULL, // trace_compile + NULL, // trace_dispatch JL_OPTIONS_FAST_MATH_DEFAULT, 0, // worker NULL, // cookie @@ -234,6 +235,7 @@ JL_DLLEXPORT void jl_parse_opts(int *argcp, char ***argvp) opt_inline, opt_polly, opt_trace_compile, + opt_trace_dispatch, opt_math_mode, opt_worker, opt_bind_to, @@ -310,6 +312,7 @@ JL_DLLEXPORT void jl_parse_opts(int *argcp, char ***argvp) { "inline", required_argument, 0, opt_inline }, { "polly", required_argument, 0, opt_polly }, { "trace-compile", required_argument, 0, opt_trace_compile }, + { "trace-dispatch", required_argument, 0, opt_trace_dispatch }, { "math-mode", required_argument, 0, opt_math_mode }, { "handle-signals", required_argument, 0, opt_handle_signals }, // hidden command line options @@ -752,6 +755,11 @@ JL_DLLEXPORT void jl_parse_opts(int *argcp, char ***argvp) if (!jl_options.trace_compile) jl_errorf("fatal error: failed to allocate memory: %s", strerror(errno)); break; + case opt_trace_dispatch: + jl_options.trace_dispatch = strdup(optarg); + if (!jl_options.trace_dispatch) + jl_errorf("fatal error: failed to allocate memory: %s", strerror(errno)); + break; case opt_math_mode: if (!strcmp(optarg,"ieee")) jl_options.fast_math = JL_OPTIONS_FAST_MATH_OFF; diff --git a/src/jloptions.h b/src/jloptions.h index b633291c6ed41..20027ebfff24a 100644 --- a/src/jloptions.h +++ b/src/jloptions.h @@ -38,6 +38,7 @@ typedef struct { int8_t can_inline; int8_t polly; const char *trace_compile; + const char *trace_dispatch; int8_t fast_math; int8_t worker; const char *cookie; diff --git a/src/jltypes.c b/src/jltypes.c index 66db26a575e90..e9d843c383a9c 100644 --- a/src/jltypes.c +++ b/src/jltypes.c @@ -3089,7 +3089,7 @@ void jl_init_types(void) JL_GC_DISABLED "cache", "inInference", "cache_with_orig", - "precompiled"), + "flags"), jl_svec(10, jl_new_struct(jl_uniontype_type, jl_method_type, jl_module_type), jl_any_type, diff --git a/src/julia.h b/src/julia.h index 110e5bfbff2fd..f34bb1623eed5 100644 --- a/src/julia.h +++ b/src/julia.h @@ -397,8 +397,14 @@ struct _jl_method_instance_t { _Atomic(struct _jl_code_instance_t*) cache; uint8_t inInference; // flags to tell if inference is running on this object uint8_t cache_with_orig; // !cache_with_specTypes - _Atomic(uint8_t) precompiled; // true if this instance was generated by an explicit `precompile(...)` call + + // flags for this method instance + // bit 0: generated by an explicit `precompile(...)` + // bit 1: dispatched + _Atomic(uint8_t) flags; }; +#define JL_MI_FLAGS_MASK_PRECOMPILED 0x01 +#define JL_MI_FLAGS_MASK_DISPATCHED 0x02 // OpaqueClosure typedef struct _jl_opaque_closure_t { diff --git a/src/method.c b/src/method.c index e96d1008e3056..d46302127534e 100644 --- a/src/method.c +++ b/src/method.c @@ -453,7 +453,7 @@ JL_DLLEXPORT jl_method_instance_t *jl_new_method_instance_uninit(void) jl_atomic_store_relaxed(&mi->cache, NULL); mi->inInference = 0; mi->cache_with_orig = 0; - jl_atomic_store_relaxed(&mi->precompiled, 0); + jl_atomic_store_relaxed(&mi->flags, 0); return mi; } diff --git a/src/staticdata.c b/src/staticdata.c index c130c2e7569fe..945593034a890 100644 --- a/src/staticdata.c +++ b/src/staticdata.c @@ -1482,7 +1482,7 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED else if (jl_is_method_instance(v)) { assert(f == s->s); jl_method_instance_t *newmi = (jl_method_instance_t*)&f->buf[reloc_offset]; - jl_atomic_store_relaxed(&newmi->precompiled, 0); + jl_atomic_store_relaxed(&newmi->flags, 0); } else if (jl_is_code_instance(v)) { assert(f == s->s); diff --git a/src/staticdata_utils.c b/src/staticdata_utils.c index b488934606671..2fe01d919e885 100644 --- a/src/staticdata_utils.c +++ b/src/staticdata_utils.c @@ -158,7 +158,8 @@ static int has_backedge_to_worklist(jl_method_instance_t *mi, htable_t *visited, if (jl_is_method(mod)) mod = ((jl_method_t*)mod)->module; assert(jl_is_module(mod)); - if (mi->precompiled || !jl_object_in_image((jl_value_t*)mod) || type_in_worklist(mi->specTypes)) { + uint8_t is_precompiled = jl_atomic_load_relaxed(&mi->flags) & JL_MI_FLAGS_MASK_PRECOMPILED; + if (is_precompiled || !jl_object_in_image((jl_value_t*)mod) || type_in_worklist(mi->specTypes)) { return 1; } if (!mi->backedges) { diff --git a/test/cmdlineargs.jl b/test/cmdlineargs.jl index e82d9afde7ce5..e5659b7c2439f 100644 --- a/test/cmdlineargs.jl +++ b/test/cmdlineargs.jl @@ -750,6 +750,28 @@ let exename = `$(Base.julia_cmd()) --startup-file=no --color=no` # tested in test/parallel.jl) @test errors_not_signals(`$exename --worker=true`) + # --trace-compile + let + io = IOBuffer() + v = writereadpipeline( + "foo(x) = begin Base.Experimental.@force_compile; x; end; foo(1)", + `$exename --trace-compile=stderr -i`, + stderr=io) + _stderr = String(take!(io)) + @test occursin("precompile(Tuple{typeof(Main.foo), Int", _stderr) + end + + # --trace-dispatch + let + io = IOBuffer() + v = writereadpipeline( + "foo(x) = begin Base.Experimental.@force_compile; x; end; foo(1)", + `$exename --trace-dispatch=stderr -i`, + stderr=io) + _stderr = String(take!(io)) + @test occursin("precompile(Tuple{typeof(Main.foo), Int", _stderr) + end + # test passing arguments mktempdir() do dir testfile, io = mktemp(dir) diff --git a/test/core.jl b/test/core.jl index eefeeb8d1240d..0212193a0206b 100644 --- a/test/core.jl +++ b/test/core.jl @@ -32,7 +32,7 @@ for (T, c) in ( (Core.CodeInfo, []), (Core.CodeInstance, [:next, :inferred, :purity_bits, :invoke, :specptr, :precompile]), (Core.Method, []), - (Core.MethodInstance, [:uninferred, :cache, :precompiled]), + (Core.MethodInstance, [:uninferred, :cache, :flags]), (Core.MethodTable, [:defs, :leafcache, :cache, :max_args]), (Core.TypeMapEntry, [:next]), (Core.TypeMapLevel, [:arg1, :targ, :name1, :tname, :list, :any]),