diff --git a/src/mono/mono/mini/interp/interp-internals.h b/src/mono/mono/mini/interp/interp-internals.h
index 43534b51e07fb..5fe04bf314c8c 100644
--- a/src/mono/mono/mini/interp/interp-internals.h
+++ b/src/mono/mono/mini/interp/interp-internals.h
@@ -155,6 +155,7 @@ struct InterpMethod {
MonoProfilerCallInstrumentationFlags prof_flags;
InterpMethodCodeType code_type;
int ref_slot_offset; // GC visible pointer slot
+ int swift_error_offset; // swift error struct
MonoBitSet *ref_slots;
#ifdef ENABLE_EXPERIMENT_TIERED
MiniTieredCounter tiered_counter;
diff --git a/src/mono/mono/mini/interp/interp.c b/src/mono/mono/mini/interp/interp.c
index 3e25c7acc2966..08023c81a4890 100644
--- a/src/mono/mono/mini/interp/interp.c
+++ b/src/mono/mono/mini/interp/interp.c
@@ -3176,6 +3176,21 @@ interp_entry_from_trampoline (gpointer ccontext_untyped, gpointer rmethod_untype
/* Copy the args saved in the trampoline to the frame stack */
gpointer retp = mono_arch_get_native_call_context_args (ccontext, &frame, sig, call_info);
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ int swift_error_arg_index = -1;
+ gpointer swift_error_data;
+ gpointer* swift_error_pointer;
+ if (mono_method_signature_has_ext_callconv (sig, MONO_EXT_CALLCONV_SWIFTCALL)) {
+ swift_error_data = mono_arch_get_swift_error (ccontext, sig, &swift_error_arg_index);
+
+ int swift_error_offset = frame.imethod->swift_error_offset;
+ if (swift_error_offset >= 0) {
+ swift_error_pointer = (gpointer*)((guchar*)frame.stack + swift_error_offset);
+ *swift_error_pointer = *(gpointer*)swift_error_data;
+ }
+ }
+#endif
+
/* Allocate storage for value types */
stackval *newsp = sp;
/* FIXME we should reuse computation on imethod for this */
@@ -3195,6 +3210,10 @@ interp_entry_from_trampoline (gpointer ccontext_untyped, gpointer rmethod_untype
} else {
size = MINT_STACK_SLOT_SIZE;
}
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ if (swift_error_arg_index >= 0 && swift_error_arg_index == i)
+ newsp->data.p = swift_error_pointer;
+#endif
newsp = STACK_ADD_BYTES (newsp, size);
}
newsp = (stackval*)ALIGN_TO (newsp, MINT_STACK_ALIGNMENT);
@@ -3205,6 +3224,11 @@ interp_entry_from_trampoline (gpointer ccontext_untyped, gpointer rmethod_untype
mono_interp_exec_method (&frame, context, NULL);
MONO_EXIT_GC_UNSAFE;
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ if (swift_error_arg_index >= 0)
+ *(gpointer*)swift_error_data = *(gpointer*)swift_error_pointer;
+#endif
+
context->stack_pointer = (guchar*)sp;
g_assert (!context->has_resume_state);
@@ -3467,9 +3491,16 @@ interp_create_method_pointer (MonoMethod *method, gboolean compile, MonoError *e
* separate temp register. We should update the wrappers for this
* if we really care about those architectures (arm).
*/
- MonoMethod *wrapper = mini_get_interp_in_wrapper (sig);
- entry_wrapper = mono_jit_compile_method_jit_only (wrapper, error);
+ MonoMethod *wrapper = NULL;
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ /* Methods with Swift cconv should go to trampoline */
+ if (!mono_method_signature_has_ext_callconv (sig, MONO_EXT_CALLCONV_SWIFTCALL))
+#endif
+ {
+ wrapper = mini_get_interp_in_wrapper (sig);
+ entry_wrapper = mono_jit_compile_method_jit_only (wrapper, error);
+ }
#endif
if (!entry_wrapper) {
#ifndef MONO_ARCH_HAVE_INTERP_ENTRY_TRAMPOLINE
diff --git a/src/mono/mono/mini/interp/transform.c b/src/mono/mono/mini/interp/transform.c
index d540cec6a9780..4b4b057e7477a 100644
--- a/src/mono/mono/mini/interp/transform.c
+++ b/src/mono/mono/mini/interp/transform.c
@@ -4360,6 +4360,13 @@ interp_method_compute_offsets (TransformData *td, InterpMethod *imethod, MonoMet
td->renamable_vars_capacity = target_vars_capacity;
offset = 0;
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ int swift_error_index = -1;
+ imethod->swift_error_offset = -1;
+ MonoClass *swift_error = mono_class_try_get_swift_error_class ();
+ MonoClass *swift_error_ptr = mono_class_create_ptr (m_class_get_this_arg (swift_error));
+#endif
+
/*
* We will load arguments as if they are locals. Unlike normal locals, every argument
* is stored in a stackval sized slot and valuetypes have special semantics since we
@@ -4384,6 +4391,15 @@ interp_method_compute_offsets (TransformData *td, InterpMethod *imethod, MonoMet
td->vars [i].offset = offset;
interp_mark_ref_slots_for_var (td, i);
offset += size;
+
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ if (swift_error_index < 0 && mono_method_signature_has_ext_callconv (sig, MONO_EXT_CALLCONV_SWIFTCALL)) {
+ MonoClass *klass = mono_class_from_mono_type_internal (type);
+ if (klass == swift_error_ptr)
+ swift_error_index = i;
+ }
+#endif
+
}
offset = ALIGN_TO (offset, MINT_STACK_ALIGNMENT);
@@ -4417,6 +4433,16 @@ interp_method_compute_offsets (TransformData *td, InterpMethod *imethod, MonoMet
td->il_locals_size = offset - td->il_locals_offset;
td->total_locals_size = offset;
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ if (mono_method_signature_has_ext_callconv (sig, MONO_EXT_CALLCONV_SWIFTCALL) && swift_error_index >= 0) {
+ MonoType* type = mono_method_signature_internal (td->method)->params [swift_error_index - sig->hasthis];
+ int var = interp_create_var_explicit (td, type, sizeof(gpointer));
+ td->vars [var].global = TRUE;
+ interp_alloc_global_var_offset (td, var);
+ imethod->swift_error_offset = td->vars [var].offset;
+ }
+#endif
+
imethod->clause_data_offsets = (guint32*)g_malloc (header->num_clauses * sizeof (guint32));
td->clause_vars = (int*)mono_mempool_alloc (td->mempool, sizeof (int) * header->num_clauses);
for (guint i = 0; i < header->num_clauses; i++) {
diff --git a/src/mono/mono/mini/mini-amd64.c b/src/mono/mono/mini/mini-amd64.c
index 1bc469b1aac7d..aeabd56a1ed5b 100644
--- a/src/mono/mono/mini/mini-amd64.c
+++ b/src/mono/mono/mini/mini-amd64.c
@@ -1324,7 +1324,7 @@ mono_arch_set_native_call_context_ret (CallContext *ccontext, gpointer frame, Mo
storage = alloca (temp_size);
else
storage = arg_get_storage (ccontext, ainfo);
- memset (ccontext, 0, sizeof (CallContext)); // FIXME
+
interp_cb->frame_arg_to_data ((MonoInterpFrameHandle)frame, sig, -1, storage);
if (temp_size)
arg_set_val (ccontext, ainfo, storage);
diff --git a/src/mono/mono/mini/mini-arm64.c b/src/mono/mono/mini/mini-arm64.c
index 0af0da4b8030c..05ea6500381ac 100644
--- a/src/mono/mono/mini/mini-arm64.c
+++ b/src/mono/mono/mini/mini-arm64.c
@@ -2081,7 +2081,7 @@ mono_arch_set_native_call_context_ret (CallContext *ccontext, gpointer frame, Mo
storage = alloca (temp_size);
else
storage = arg_get_storage (ccontext, ainfo);
- memset (ccontext, 0, sizeof (CallContext)); // FIXME
+
interp_cb->frame_arg_to_data ((MonoInterpFrameHandle)frame, sig, -1, storage);
if (temp_size)
arg_set_val (ccontext, ainfo, storage);
diff --git a/src/mono/mono/mini/tramp-amd64.c b/src/mono/mono/mini/tramp-amd64.c
index 4f0e48c61918a..8cb668a1e4578 100644
--- a/src/mono/mono/mini/tramp-amd64.c
+++ b/src/mono/mono/mini/tramp-amd64.c
@@ -1263,6 +1263,12 @@ mono_arch_get_native_to_interp_trampoline (MonoTrampInfo **info)
for (i = 0; i < FLOAT_PARAM_REGS; i++)
amd64_sse_movsd_membase_reg (code, AMD64_RSP, ctx_offset + MONO_STRUCT_OFFSET (CallContext, fregs) + i * sizeof (double), i);
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ /* set context registers to CallContext */
+ for (i = 0; i < CTX_REGS; i++)
+ amd64_mov_membase_reg (code, AMD64_RSP, ctx_offset + MONO_STRUCT_OFFSET (CallContext, gregs) + (i + CTX_REGS_OFFSET) * sizeof (target_mgreg_t), i + CTX_REGS_OFFSET, sizeof (target_mgreg_t));
+#endif
+
/* set the stack pointer to the value at call site */
amd64_mov_reg_reg (code, AMD64_R11, AMD64_RBP, sizeof (target_mgreg_t));
amd64_alu_reg_imm (code, X86_ADD, AMD64_R11, 2 * sizeof (target_mgreg_t));
@@ -1283,6 +1289,12 @@ mono_arch_get_native_to_interp_trampoline (MonoTrampInfo **info)
for (i = 0; i < FLOAT_RETURN_REGS; i++)
amd64_sse_movsd_reg_membase (code, i, AMD64_RSP, ctx_offset + MONO_STRUCT_OFFSET (CallContext, fregs) + i * sizeof (double));
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ /* set the context registers from CallContext */
+ for (i = 0; i < CTX_REGS; i++)
+ amd64_mov_reg_membase (code, i + CTX_REGS_OFFSET, AMD64_RSP, ctx_offset + MONO_STRUCT_OFFSET (CallContext, gregs) + (i + CTX_REGS_OFFSET) * sizeof (target_mgreg_t), sizeof (target_mgreg_t));
+#endif
+
/* reset stack and return */
#if TARGET_WIN32
amd64_lea_membase (code, AMD64_RSP, AMD64_RBP, 0);
diff --git a/src/mono/mono/mini/tramp-arm64.c b/src/mono/mono/mini/tramp-arm64.c
index 7e8483409610c..f4869988e87b6 100644
--- a/src/mono/mono/mini/tramp-arm64.c
+++ b/src/mono/mono/mini/tramp-arm64.c
@@ -846,6 +846,12 @@ mono_arch_get_native_to_interp_trampoline (MonoTrampInfo **info)
for (i = 0; i < FP_PARAM_REGS; i++)
arm_strfpx (code, i, ARMREG_FP, ccontext_offset + MONO_STRUCT_OFFSET (CallContext, fregs) + i * sizeof (double));
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ /* set context registers to CallContext */
+ for (i = 0; i < CTX_REGS; i++)
+ arm_strx (code, i + CTX_REGS_OFFSET, ARMREG_FP, ccontext_offset + MONO_STRUCT_OFFSET (CallContext, gregs) + (i + PARAM_REGS + 1) * sizeof (host_mgreg_t));
+#endif
+
/* set the stack pointer to the value at call site */
arm_addx_imm (code, ARMREG_R0, ARMREG_FP, framesize);
arm_strp (code, ARMREG_R0, ARMREG_FP, ccontext_offset + MONO_STRUCT_OFFSET (CallContext, stack));
@@ -863,6 +869,12 @@ mono_arch_get_native_to_interp_trampoline (MonoTrampInfo **info)
for (i = 0; i < FP_PARAM_REGS; i++)
arm_ldrfpx (code, i, ARMREG_FP, ccontext_offset + MONO_STRUCT_OFFSET (CallContext, fregs) + i * sizeof (double));
+#ifdef MONO_ARCH_HAVE_SWIFTCALL
+ /* set the context registers from CallContext */
+ for (i = 0; i < CTX_REGS; i++)
+ arm_ldrx (code, i + CTX_REGS_OFFSET, ARMREG_FP, ccontext_offset + MONO_STRUCT_OFFSET (CallContext, gregs) + (i + PARAM_REGS + 1) * sizeof (host_mgreg_t));
+#endif
+
/* reset stack and return */
arm_ldpx (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, 0);
arm_addx_imm (code, ARMREG_SP, ARMREG_SP, framesize);
diff --git a/src/tests/issues.targets b/src/tests/issues.targets
index a27fbb6ac4ef8..a8b94f6576cf2 100644
--- a/src/tests/issues.targets
+++ b/src/tests/issues.targets
@@ -2145,9 +2145,6 @@
https://github.com/dotnet/runtime/issues/71656
-
- Reverse P/Invokes not supported yet
-