-
Notifications
You must be signed in to change notification settings - Fork 4.7k
/
importercalls.cpp
9121 lines (7893 loc) · 356 KB
/
importercalls.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
#include "jitpch.h"
//------------------------------------------------------------------------
// impImportCall: import a call-inspiring opcode
//
// Arguments:
// opcode - opcode that inspires the call
// pResolvedToken - resolved token for the call target
// pConstrainedResolvedToken - resolved constraint token (or nullptr)
// newObjThis - tree for this pointer or uninitialized newobj temp (or nullptr)
// prefixFlags - IL prefix flags for the call
// callInfo - EE supplied info for the call
// rawILOffset - IL offset of the opcode, used for guarded devirtualization.
//
// Returns:
// Type of the call's return value.
// If we're importing an inlinee and have realized the inline must fail, the call return type should be TYP_UNDEF.
// However we can't assert for this here yet because there are cases we miss. See issue #13272.
//
//
// Notes:
// opcode can be CEE_CALL, CEE_CALLI, CEE_CALLVIRT, or CEE_NEWOBJ.
//
// For CEE_NEWOBJ, newobjThis should be the temp grabbed for the allocated
// uninitialized object.
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
var_types Compiler::impImportCall(OPCODE opcode,
CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_RESOLVED_TOKEN* pConstrainedResolvedToken,
GenTree* newobjThis,
int prefixFlags,
CORINFO_CALL_INFO* callInfo,
IL_OFFSET rawILOffset)
{
assert(opcode == CEE_CALL || opcode == CEE_CALLVIRT || opcode == CEE_NEWOBJ || opcode == CEE_CALLI);
// The current statement DI may not refer to the exact call, but for calls
// we wish to be able to attach the exact IL instruction to get "return
// value" support in the debugger, so create one with the exact IL offset.
DebugInfo di = impCreateDIWithCurrentStackInfo(rawILOffset, true);
var_types callRetTyp = TYP_COUNT;
CORINFO_SIG_INFO* sig = nullptr;
CORINFO_METHOD_HANDLE methHnd = nullptr;
CORINFO_CLASS_HANDLE clsHnd = nullptr;
unsigned clsFlags = 0;
unsigned mflags = 0;
GenTree* call = nullptr;
CORINFO_THIS_TRANSFORM constraintCallThisTransform = CORINFO_NO_THIS_TRANSFORM;
CORINFO_CONTEXT_HANDLE exactContextHnd = nullptr;
bool exactContextNeedsRuntimeLookup = false;
bool canTailCall = true;
const char* szCanTailCallFailReason = nullptr;
const int tailCallFlags = (prefixFlags & PREFIX_TAILCALL);
const bool isReadonlyCall = (prefixFlags & PREFIX_READONLY) != 0;
methodPointerInfo* ldftnInfo = nullptr;
// Synchronized methods need to call CORINFO_HELP_MON_EXIT at the end. We could
// do that before tailcalls, but that is probably not the intended
// semantic. So just disallow tailcalls from synchronized methods.
// Also, popping arguments in a varargs function is more work and NYI
// If we have a security object, we have to keep our frame around for callers
// to see any imperative security.
// Reverse P/Invokes need a call to CORINFO_HELP_JIT_REVERSE_PINVOKE_EXIT
// at the end, so tailcalls should be disabled.
if (info.compFlags & CORINFO_FLG_SYNCH)
{
canTailCall = false;
szCanTailCallFailReason = "Caller is synchronized";
}
else if (opts.IsReversePInvoke())
{
canTailCall = false;
szCanTailCallFailReason = "Caller is Reverse P/Invoke";
}
#if !FEATURE_FIXED_OUT_ARGS
else if (info.compIsVarArgs)
{
canTailCall = false;
szCanTailCallFailReason = "Caller is varargs";
}
#endif // FEATURE_FIXED_OUT_ARGS
// We only need to cast the return value of pinvoke inlined calls that return small types
bool checkForSmallType = false;
bool bIntrinsicImported = false;
CORINFO_SIG_INFO calliSig;
NewCallArg extraArg;
/*-------------------------------------------------------------------------
* First create the call node
*/
if (opcode == CEE_CALLI)
{
if (IsTargetAbi(CORINFO_NATIVEAOT_ABI))
{
// See comment in impCheckForPInvokeCall
BasicBlock* block = compIsForInlining() ? impInlineInfo->iciBlock : compCurBB;
if (info.compCompHnd->convertPInvokeCalliToCall(pResolvedToken, !impCanPInvokeInlineCallSite(block)))
{
eeGetCallInfo(pResolvedToken, nullptr, CORINFO_CALLINFO_ALLOWINSTPARAM, callInfo);
return impImportCall(CEE_CALL, pResolvedToken, nullptr, nullptr, prefixFlags, callInfo, rawILOffset);
}
}
/* Get the call site sig */
eeGetSig(pResolvedToken->token, pResolvedToken->tokenScope, pResolvedToken->tokenContext, &calliSig);
callRetTyp = JITtype2varType(calliSig.retType);
call = impImportIndirectCall(&calliSig, di);
// We don't know the target method, so we have to infer the flags, or
// assume the worst-case.
mflags = (calliSig.callConv & CORINFO_CALLCONV_HASTHIS) ? 0 : CORINFO_FLG_STATIC;
#ifdef DEBUG
if (verbose)
{
unsigned structSize = (callRetTyp == TYP_STRUCT) ? eeTryGetClassSize(calliSig.retTypeSigClass) : 0;
printf("\nIn Compiler::impImportCall: opcode is %s, kind=%d, callRetType is %s, structSize is %u\n",
opcodeNames[opcode], callInfo->kind, varTypeName(callRetTyp), structSize);
}
#endif
sig = &calliSig;
}
else // (opcode != CEE_CALLI)
{
NamedIntrinsic ni = NI_Illegal;
// Passing CORINFO_CALLINFO_ALLOWINSTPARAM indicates that this JIT is prepared to
// supply the instantiation parameters necessary to make direct calls to underlying
// shared generic code, rather than calling through instantiating stubs. If the
// returned signature has CORINFO_CALLCONV_PARAMTYPE then this indicates that the JIT
// must indeed pass an instantiation parameter.
methHnd = callInfo->hMethod;
sig = &(callInfo->sig);
callRetTyp = JITtype2varType(sig->retType);
mflags = callInfo->methodFlags;
#ifdef DEBUG
if (verbose)
{
unsigned structSize = (callRetTyp == TYP_STRUCT) ? eeTryGetClassSize(sig->retTypeSigClass) : 0;
printf("\nIn Compiler::impImportCall: opcode is %s, kind=%d, callRetType is %s, structSize is %u\n",
opcodeNames[opcode], callInfo->kind, varTypeName(callRetTyp), structSize);
}
#endif
if (compIsForInlining())
{
/* Does the inlinee use StackCrawlMark */
if (mflags & CORINFO_FLG_DONT_INLINE_CALLER)
{
compInlineResult->NoteFatal(InlineObservation::CALLEE_STACK_CRAWL_MARK);
return TYP_UNDEF;
}
/* For now ignore varargs */
if ((sig->callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_NATIVEVARARG)
{
compInlineResult->NoteFatal(InlineObservation::CALLEE_HAS_NATIVE_VARARGS);
return TYP_UNDEF;
}
if ((sig->callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_VARARG)
{
compInlineResult->NoteFatal(InlineObservation::CALLEE_HAS_MANAGED_VARARGS);
return TYP_UNDEF;
}
if ((mflags & CORINFO_FLG_VIRTUAL) && (sig->sigInst.methInstCount != 0) && (opcode == CEE_CALLVIRT))
{
compInlineResult->NoteFatal(InlineObservation::CALLEE_IS_GENERIC_VIRTUAL);
return TYP_UNDEF;
}
}
clsHnd = pResolvedToken->hClass;
clsFlags = callInfo->classFlags;
#ifdef DEBUG
// If this is a call to JitTestLabel.Mark, do "early inlining", and record the test attribute.
// This recognition should really be done by knowing the methHnd of the relevant Mark method(s).
// These should be in corelib.h, and available through a JIT/EE interface call.
const char* namespaceName;
const char* className;
const char* methodName =
info.compCompHnd->getMethodNameFromMetadata(methHnd, &className, &namespaceName, nullptr);
if ((namespaceName != nullptr) && (className != nullptr) && (methodName != nullptr) &&
(strcmp(namespaceName, "System.Runtime.CompilerServices") == 0) &&
(strcmp(className, "JitTestLabel") == 0) && (strcmp(methodName, "Mark") == 0))
{
return impImportJitTestLabelMark(sig->numArgs);
}
#endif // DEBUG
const bool isIntrinsic = (mflags & CORINFO_FLG_INTRINSIC) != 0;
// <NICE> Factor this into getCallInfo </NICE>
bool isSpecialIntrinsic = false;
if (isIntrinsic || !info.compMatchedVM)
{
// For mismatched VM (AltJit) we want to check all methods as intrinsic to ensure
// we get more accurate codegen. This particularly applies to HWIntrinsic usage
const bool isTailCall = canTailCall && (tailCallFlags != 0);
call = impIntrinsic(newobjThis, clsHnd, methHnd, sig, mflags, pResolvedToken, isReadonlyCall, isTailCall,
opcode == CEE_CALLVIRT, pConstrainedResolvedToken, callInfo->thisTransform, &ni,
&isSpecialIntrinsic);
if (compDonotInline())
{
return TYP_UNDEF;
}
if (call != nullptr)
{
#ifdef FEATURE_READYTORUN
if (call->OperGet() == GT_INTRINSIC)
{
if (opts.IsReadyToRun())
{
noway_assert(callInfo->kind == CORINFO_CALL);
call->AsIntrinsic()->gtEntryPoint = callInfo->codePointerLookup.constLookup;
}
else
{
call->AsIntrinsic()->gtEntryPoint.addr = nullptr;
call->AsIntrinsic()->gtEntryPoint.accessType = IAT_VALUE;
}
}
#endif
bIntrinsicImported = true;
goto DONE_CALL;
}
}
if ((mflags & CORINFO_FLG_VIRTUAL) && (mflags & CORINFO_FLG_EnC) && (opcode == CEE_CALLVIRT))
{
NO_WAY("Virtual call to a function added via EnC is not supported");
}
if ((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_DEFAULT &&
(sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_VARARG &&
(sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_NATIVEVARARG)
{
BADCODE("Bad calling convention");
}
//-------------------------------------------------------------------------
// Construct the call node
//
// Work out what sort of call we're making.
// Dispense with virtual calls implemented via LDVIRTFTN immediately.
constraintCallThisTransform = callInfo->thisTransform;
exactContextHnd = callInfo->contextHandle;
exactContextNeedsRuntimeLookup = callInfo->exactContextNeedsRuntimeLookup;
switch (callInfo->kind)
{
case CORINFO_VIRTUALCALL_STUB:
{
assert(!(mflags & CORINFO_FLG_STATIC)); // can't call a static method
assert(!(clsFlags & CORINFO_FLG_VALUECLASS));
if (callInfo->stubLookup.lookupKind.needsRuntimeLookup)
{
if (callInfo->stubLookup.lookupKind.runtimeLookupKind == CORINFO_LOOKUP_NOT_SUPPORTED)
{
// Runtime does not support inlining of all shapes of runtime lookups
// Inlining has to be aborted in such a case
compInlineResult->NoteFatal(InlineObservation::CALLSITE_HAS_COMPLEX_HANDLE);
return TYP_UNDEF;
}
GenTree* stubAddr = impRuntimeLookupToTree(pResolvedToken, &callInfo->stubLookup, methHnd);
// stubAddr tree may require a new temp.
// If we're inlining, this may trigger the too many locals inline failure.
//
// If so, we need to bail out.
//
if (compDonotInline())
{
return TYP_UNDEF;
}
// This is the rough code to set up an indirect stub call
assert(stubAddr != nullptr);
// The stubAddr may be a
// complex expression. As it is evaluated after the args,
// it may cause registered args to be spilled. Simply spill it.
//
unsigned const lclNum = lvaGrabTemp(true DEBUGARG("VirtualCall with runtime lookup"));
if (compDonotInline())
{
return TYP_UNDEF;
}
impStoreTemp(lclNum, stubAddr, CHECK_SPILL_NONE);
stubAddr = gtNewLclvNode(lclNum, TYP_I_IMPL);
// Create the actual call node
assert((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_VARARG &&
(sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_NATIVEVARARG);
call = gtNewIndCallNode(stubAddr, callRetTyp);
call->gtFlags |= GTF_EXCEPT | (stubAddr->gtFlags & GTF_GLOB_EFFECT);
call->gtFlags |= GTF_CALL_VIRT_STUB;
#ifdef TARGET_X86
// No tailcalls allowed for these yet...
canTailCall = false;
szCanTailCallFailReason = "VirtualCall with runtime lookup";
#endif
}
else
{
// The stub address is known at compile time
call = gtNewCallNode(CT_USER_FUNC, callInfo->hMethod, callRetTyp, di);
call->AsCall()->gtStubCallStubAddr = callInfo->stubLookup.constLookup.addr;
call->gtFlags |= GTF_CALL_VIRT_STUB;
assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE &&
callInfo->stubLookup.constLookup.accessType != IAT_RELPVALUE);
if (callInfo->stubLookup.constLookup.accessType == IAT_PVALUE)
{
call->AsCall()->gtCallMoreFlags |= GTF_CALL_M_VIRTSTUB_REL_INDIRECT;
}
}
#ifdef FEATURE_READYTORUN
if (opts.IsReadyToRun())
{
// Null check is sometimes needed for ready to run to handle
// non-virtual <-> virtual changes between versions
if (callInfo->nullInstanceCheck)
{
call->gtFlags |= GTF_CALL_NULLCHECK;
}
}
#endif
break;
}
case CORINFO_VIRTUALCALL_VTABLE:
{
assert(!(mflags & CORINFO_FLG_STATIC)); // can't call a static method
assert(!(clsFlags & CORINFO_FLG_VALUECLASS));
call = gtNewCallNode(CT_USER_FUNC, callInfo->hMethod, callRetTyp, di);
call->gtFlags |= GTF_CALL_VIRT_VTABLE;
// Mark this method to expand the virtual call target early in fgMorphCall
call->AsCall()->SetExpandedEarly();
break;
}
case CORINFO_VIRTUALCALL_LDVIRTFTN:
{
if (compIsForInlining())
{
compInlineResult->NoteFatal(InlineObservation::CALLSITE_HAS_CALL_VIA_LDVIRTFTN);
return TYP_UNDEF;
}
assert(!(mflags & CORINFO_FLG_STATIC)); // can't call a static method
assert(!(clsFlags & CORINFO_FLG_VALUECLASS));
// OK, We've been told to call via LDVIRTFTN, so just
// take the call now....
call = gtNewIndCallNode(nullptr, callRetTyp, di);
impPopCallArgs(sig, call->AsCall());
GenTree* thisPtr = impPopStack().val;
thisPtr = impTransformThis(thisPtr, pConstrainedResolvedToken, callInfo->thisTransform);
assert(thisPtr != nullptr);
// Clone the (possibly transformed) "this" pointer
GenTree* thisPtrCopy;
thisPtr =
impCloneExpr(thisPtr, &thisPtrCopy, CHECK_SPILL_ALL, nullptr DEBUGARG("LDVIRTFTN this pointer"));
GenTree* fptr = impImportLdvirtftn(thisPtr, pResolvedToken, callInfo);
assert(fptr != nullptr);
call->AsCall()
->gtArgs.PushFront(this, NewCallArg::Primitive(thisPtrCopy).WellKnown(WellKnownArg::ThisPointer));
// Now make an indirect call through the function pointer
unsigned lclNum = lvaGrabTemp(true DEBUGARG("VirtualCall through function pointer"));
impStoreTemp(lclNum, fptr, CHECK_SPILL_ALL);
fptr = gtNewLclvNode(lclNum, TYP_I_IMPL);
call->AsCall()->gtCallAddr = fptr;
call->gtFlags |= GTF_EXCEPT | (fptr->gtFlags & GTF_GLOB_EFFECT);
if ((sig->sigInst.methInstCount != 0) && IsTargetAbi(CORINFO_NATIVEAOT_ABI))
{
// NativeAOT generic virtual method: need to handle potential fat function pointers
addFatPointerCandidate(call->AsCall());
}
#ifdef FEATURE_READYTORUN
if (opts.IsReadyToRun())
{
// Null check is needed for ready to run to handle
// non-virtual <-> virtual changes between versions
call->gtFlags |= GTF_CALL_NULLCHECK;
}
#endif
// Sine we are jumping over some code, check that its OK to skip that code
assert((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_VARARG &&
(sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_NATIVEVARARG);
goto DONE;
}
case CORINFO_CALL:
{
// This is for a non-virtual, non-interface etc. call
call = gtNewCallNode(CT_USER_FUNC, callInfo->hMethod, callRetTyp, di);
// We remove the nullcheck for the GetType call intrinsic.
// TODO-CQ: JIT64 does not introduce the null check for many more helper calls
// and intrinsics.
if (callInfo->nullInstanceCheck &&
!((mflags & CORINFO_FLG_INTRINSIC) != 0 && (ni == NI_System_Object_GetType)))
{
call->gtFlags |= GTF_CALL_NULLCHECK;
}
#ifdef FEATURE_READYTORUN
if (opts.IsReadyToRun())
{
call->AsCall()->setEntryPoint(callInfo->codePointerLookup.constLookup);
}
#endif
break;
}
case CORINFO_CALL_CODE_POINTER:
{
// The EE has asked us to call by computing a code pointer and then doing an
// indirect call. This is because a runtime lookup is required to get the code entry point.
// These calls always follow a uniform calling convention, i.e. no extra hidden params
assert((sig->callConv & CORINFO_CALLCONV_PARAMTYPE) == 0);
assert((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_VARARG);
assert((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_NATIVEVARARG);
GenTree* fptr =
impLookupToTree(pResolvedToken, &callInfo->codePointerLookup, GTF_ICON_FTN_ADDR, callInfo->hMethod);
if (compDonotInline())
{
return TYP_UNDEF;
}
// Now make an indirect call through the function pointer
unsigned lclNum = lvaGrabTemp(true DEBUGARG("Indirect call through function pointer"));
impStoreTemp(lclNum, fptr, CHECK_SPILL_ALL);
fptr = gtNewLclvNode(lclNum, TYP_I_IMPL);
call = gtNewIndCallNode(fptr, callRetTyp, di);
call->gtFlags |= GTF_EXCEPT | (fptr->gtFlags & GTF_GLOB_EFFECT);
if (callInfo->nullInstanceCheck)
{
call->gtFlags |= GTF_CALL_NULLCHECK;
}
break;
}
default:
assert(!"unknown call kind");
break;
}
//-------------------------------------------------------------------------
// Set more flags
PREFIX_ASSUME(call != nullptr);
if (mflags & CORINFO_FLG_NOGCCHECK)
{
call->AsCall()->gtCallMoreFlags |= GTF_CALL_M_NOGCCHECK;
}
// Mark call if it's one of the ones we will maybe treat as an intrinsic
if (isSpecialIntrinsic)
{
call->AsCall()->gtCallMoreFlags |= GTF_CALL_M_SPECIAL_INTRINSIC;
}
}
assert(sig);
assert(clsHnd || (opcode == CEE_CALLI)); // We're never verifying for CALLI, so this is not set.
/* Some sanity checks */
// CALL_VIRT and NEWOBJ must have a THIS pointer
assert((opcode != CEE_CALLVIRT && opcode != CEE_NEWOBJ) || (sig->callConv & CORINFO_CALLCONV_HASTHIS));
// static bit and hasThis are negations of one another
assert(((mflags & CORINFO_FLG_STATIC) != 0) == ((sig->callConv & CORINFO_CALLCONV_HASTHIS) == 0));
assert(call != nullptr);
/*-------------------------------------------------------------------------
* Check special-cases etc
*/
/* Special case - Check if it is a call to Delegate.Invoke(). */
if (mflags & CORINFO_FLG_DELEGATE_INVOKE)
{
assert(!(mflags & CORINFO_FLG_STATIC)); // can't call a static method
assert(mflags & CORINFO_FLG_FINAL);
/* Set the delegate flag */
call->AsCall()->gtCallMoreFlags |= GTF_CALL_M_DELEGATE_INV;
if (callInfo->wrapperDelegateInvoke)
{
call->AsCall()->gtCallMoreFlags |= GTF_CALL_M_WRAPPER_DELEGATE_INV;
}
if (opcode == CEE_CALLVIRT)
{
assert(mflags & CORINFO_FLG_FINAL);
/* It should have the GTF_CALL_NULLCHECK flag set. Reset it */
assert(call->gtFlags & GTF_CALL_NULLCHECK);
call->gtFlags &= ~GTF_CALL_NULLCHECK;
}
}
CORINFO_CLASS_HANDLE actualMethodRetTypeSigClass;
actualMethodRetTypeSigClass = sig->retTypeSigClass;
/* Check for varargs */
if (!compFeatureVarArg() && ((sig->callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_VARARG ||
(sig->callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_NATIVEVARARG))
{
BADCODE("Varargs not supported.");
}
if ((sig->callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_VARARG ||
(sig->callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_NATIVEVARARG)
{
assert(!compIsForInlining());
/* Set the right flags */
call->gtFlags |= GTF_CALL_POP_ARGS;
call->AsCall()->gtArgs.SetIsVarArgs();
/* Can't allow tailcall for varargs as it is caller-pop. The caller
will be expecting to pop a certain number of arguments, but if we
tailcall to a function with a different number of arguments, we
are hosed. There are ways around this (caller remembers esp value,
varargs is not caller-pop, etc), but not worth it. */
CLANG_FORMAT_COMMENT_ANCHOR;
#ifdef TARGET_X86
if (canTailCall)
{
canTailCall = false;
szCanTailCallFailReason = "Callee is varargs";
}
#endif
/* Get the total number of arguments - this is already correct
* for CALLI - for methods we have to get it from the call site */
if (opcode != CEE_CALLI)
{
#ifdef DEBUG
unsigned numArgsDef = sig->numArgs;
#endif
eeGetCallSiteSig(pResolvedToken->token, pResolvedToken->tokenScope, pResolvedToken->tokenContext, sig);
// For vararg calls we must be sure to load the return type of the
// method actually being called, as well as the return types of the
// specified in the vararg signature. With type equivalency, these types
// may not be the same.
if (sig->retTypeSigClass != actualMethodRetTypeSigClass)
{
if (actualMethodRetTypeSigClass != nullptr && sig->retType != CORINFO_TYPE_CLASS &&
sig->retType != CORINFO_TYPE_BYREF && sig->retType != CORINFO_TYPE_PTR &&
sig->retType != CORINFO_TYPE_VAR)
{
// Make sure that all valuetypes (including enums) that we push are loaded.
// This is to guarantee that if a GC is triggered from the prestub of this methods,
// all valuetypes in the method signature are already loaded.
// We need to be able to find the size of the valuetypes, but we cannot
// do a class-load from within GC.
info.compCompHnd->classMustBeLoadedBeforeCodeIsRun(actualMethodRetTypeSigClass);
}
}
assert(numArgsDef <= sig->numArgs);
}
/* We will have "cookie" as the last argument but we cannot push
* it on the operand stack because we may overflow, so we append it
* to the arg list next after we pop them */
}
//--------------------------- Inline NDirect ------------------------------
// For inline cases we technically should look at both the current
// block and the call site block (or just the latter if we've
// fused the EH trees). However the block-related checks pertain to
// EH and we currently won't inline a method with EH. So for
// inlinees, just checking the call site block is sufficient.
{
// New lexical block here to avoid compilation errors because of GOTOs.
BasicBlock* block = compIsForInlining() ? impInlineInfo->iciBlock : compCurBB;
impCheckForPInvokeCall(call->AsCall(), methHnd, sig, mflags, block);
}
#ifdef UNIX_X86_ABI
// On Unix x86 we use caller-cleaned convention.
if ((call->gtFlags & GTF_CALL_UNMANAGED) == 0)
call->gtFlags |= GTF_CALL_POP_ARGS;
#endif // UNIX_X86_ABI
if (call->gtFlags & GTF_CALL_UNMANAGED)
{
// We set up the unmanaged call by linking the frame, disabling GC, etc
// This needs to be cleaned up on return.
// In addition, native calls have different normalization rules than managed code
// (managed calling convention always widens return values in the callee)
if (canTailCall)
{
canTailCall = false;
szCanTailCallFailReason = "Callee is native";
}
checkForSmallType = true;
impPopArgsForUnmanagedCall(call->AsCall(), sig);
goto DONE;
}
else if ((opcode == CEE_CALLI) && ((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_DEFAULT) &&
((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_VARARG))
{
if (!info.compCompHnd->canGetCookieForPInvokeCalliSig(sig))
{
// Normally this only happens with inlining.
// However, a generic method (or type) being NGENd into another module
// can run into this issue as well. There's not an easy fall-back for NGEN
// so instead we fallback to JIT.
if (compIsForInlining())
{
compInlineResult->NoteFatal(InlineObservation::CALLSITE_CANT_EMBED_PINVOKE_COOKIE);
}
else
{
IMPL_LIMITATION("Can't get PInvoke cookie (cross module generics)");
}
return TYP_UNDEF;
}
GenTree* cookie = eeGetPInvokeCookie(sig);
// This cookie is required to be either a simple GT_CNS_INT or
// an indirection of a GT_CNS_INT
//
GenTree* cookieConst = cookie;
if (cookie->gtOper == GT_IND)
{
cookieConst = cookie->AsOp()->gtOp1;
}
assert(cookieConst->gtOper == GT_CNS_INT);
// Setting GTF_DONT_CSE on the GT_CNS_INT as well as on the GT_IND (if it exists) will ensure that
// we won't allow this tree to participate in any CSE logic
//
cookie->gtFlags |= GTF_DONT_CSE;
cookieConst->gtFlags |= GTF_DONT_CSE;
call->AsCall()->gtCallCookie = cookie;
if (canTailCall)
{
canTailCall = false;
szCanTailCallFailReason = "PInvoke calli";
}
}
/*-------------------------------------------------------------------------
* Create the argument list
*/
//-------------------------------------------------------------------------
// Special case - for varargs we have an implicit last argument
if ((sig->callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_VARARG)
{
assert(!compIsForInlining());
void *varCookie, *pVarCookie;
if (!info.compCompHnd->canGetVarArgsHandle(sig))
{
compInlineResult->NoteFatal(InlineObservation::CALLSITE_CANT_EMBED_VARARGS_COOKIE);
return TYP_UNDEF;
}
varCookie = info.compCompHnd->getVarArgsHandle(sig, &pVarCookie);
assert((!varCookie) != (!pVarCookie));
GenTree* cookieNode = gtNewIconEmbHndNode(varCookie, pVarCookie, GTF_ICON_VARG_HDL, sig);
assert(extraArg.Node == nullptr);
extraArg = NewCallArg::Primitive(cookieNode).WellKnown(WellKnownArg::VarArgsCookie);
}
//-------------------------------------------------------------------------
// Extra arg for shared generic code and array methods
//
// Extra argument containing instantiation information is passed in the
// following circumstances:
// (a) To the "Address" method on array classes; the extra parameter is
// the array's type handle (a TypeDesc)
// (b) To shared-code instance methods in generic structs; the extra parameter
// is the struct's type handle (a vtable ptr)
// (c) To shared-code per-instantiation non-generic static methods in generic
// classes and structs; the extra parameter is the type handle
// (d) To shared-code generic methods; the extra parameter is an
// exact-instantiation MethodDesc
//
// We also set the exact type context associated with the call so we can
// inline the call correctly later on.
if (sig->callConv & CORINFO_CALLCONV_PARAMTYPE)
{
assert(call->AsCall()->gtCallType == CT_USER_FUNC);
if (clsHnd == nullptr)
{
NO_WAY("CALLI on parameterized type");
}
assert(opcode != CEE_CALLI);
GenTree* instParam;
bool runtimeLookup;
// Instantiated generic method
if (((SIZE_T)exactContextHnd & CORINFO_CONTEXTFLAGS_MASK) == CORINFO_CONTEXTFLAGS_METHOD)
{
assert(exactContextHnd != METHOD_BEING_COMPILED_CONTEXT());
CORINFO_METHOD_HANDLE exactMethodHandle =
(CORINFO_METHOD_HANDLE)((SIZE_T)exactContextHnd & ~CORINFO_CONTEXTFLAGS_MASK);
if (!exactContextNeedsRuntimeLookup)
{
#ifdef FEATURE_READYTORUN
if (opts.IsReadyToRun())
{
instParam =
impReadyToRunLookupToTree(&callInfo->instParamLookup, GTF_ICON_METHOD_HDL, exactMethodHandle);
if (instParam == nullptr)
{
assert(compDonotInline());
return TYP_UNDEF;
}
}
else
#endif
{
instParam = gtNewIconEmbMethHndNode(exactMethodHandle);
info.compCompHnd->methodMustBeLoadedBeforeCodeIsRun(exactMethodHandle);
}
}
else
{
instParam = impTokenToHandle(pResolvedToken, &runtimeLookup, true /*mustRestoreHandle*/);
if (instParam == nullptr)
{
assert(compDonotInline());
return TYP_UNDEF;
}
}
}
// otherwise must be an instance method in a generic struct,
// a static method in a generic type, or a runtime-generated array method
else
{
assert(((SIZE_T)exactContextHnd & CORINFO_CONTEXTFLAGS_MASK) == CORINFO_CONTEXTFLAGS_CLASS);
CORINFO_CLASS_HANDLE exactClassHandle = eeGetClassFromContext(exactContextHnd);
if (compIsForInlining() && (clsFlags & CORINFO_FLG_ARRAY) != 0)
{
compInlineResult->NoteFatal(InlineObservation::CALLEE_IS_ARRAY_METHOD);
return TYP_UNDEF;
}
if ((clsFlags & CORINFO_FLG_ARRAY) && isReadonlyCall)
{
// We indicate "readonly" to the Address operation by using a null
// instParam.
instParam = gtNewIconNode(0, TYP_REF);
}
else if (!exactContextNeedsRuntimeLookup)
{
#ifdef FEATURE_READYTORUN
if (opts.IsReadyToRun())
{
instParam =
impReadyToRunLookupToTree(&callInfo->instParamLookup, GTF_ICON_CLASS_HDL, exactClassHandle);
if (instParam == nullptr)
{
assert(compDonotInline());
return TYP_UNDEF;
}
}
else
#endif
{
instParam = gtNewIconEmbClsHndNode(exactClassHandle);
info.compCompHnd->classMustBeLoadedBeforeCodeIsRun(exactClassHandle);
}
}
else
{
instParam = impParentClassTokenToHandle(pResolvedToken, &runtimeLookup, true /*mustRestoreHandle*/);
if (instParam == nullptr)
{
assert(compDonotInline());
return TYP_UNDEF;
}
}
}
assert(extraArg.Node == nullptr);
extraArg = NewCallArg::Primitive(instParam).WellKnown(WellKnownArg::InstParam);
}
if ((opcode == CEE_NEWOBJ) && ((clsFlags & CORINFO_FLG_DELEGATE) != 0))
{
// Only verifiable cases are supported.
// dup; ldvirtftn; newobj; or ldftn; newobj.
// IL test could contain unverifiable sequence, in this case optimization should not be done.
if (impStackHeight() > 0)
{
typeInfo delegateTypeInfo = impStackTop().seTypeInfo;
if (delegateTypeInfo.IsMethod())
{
ldftnInfo = delegateTypeInfo.GetMethodPointerInfo();
}
}
}
//-------------------------------------------------------------------------
// The main group of arguments
impPopCallArgs(sig, call->AsCall());
if (extraArg.Node != nullptr)
{
if (Target::g_tgtArgOrder == Target::ARG_ORDER_R2L)
{
call->AsCall()->gtArgs.PushFront(this, extraArg);
}
else
{
call->AsCall()->gtArgs.PushBack(this, extraArg);
}
call->gtFlags |= extraArg.Node->gtFlags & GTF_GLOB_EFFECT;
}
//-------------------------------------------------------------------------
// The "this" pointer
if (((mflags & CORINFO_FLG_STATIC) == 0) && ((sig->callConv & CORINFO_CALLCONV_EXPLICITTHIS) == 0) &&
!((opcode == CEE_NEWOBJ) && (newobjThis == nullptr)))
{
GenTree* obj;
if (opcode == CEE_NEWOBJ)
{
obj = newobjThis;
}
else
{
obj = impPopStack().val;
obj = impTransformThis(obj, pConstrainedResolvedToken, constraintCallThisTransform);
if (compDonotInline())
{
return TYP_UNDEF;
}
}
// Store the "this" value in the call
call->gtFlags |= obj->gtFlags & GTF_GLOB_EFFECT;
call->AsCall()->gtArgs.PushFront(this, NewCallArg::Primitive(obj).WellKnown(WellKnownArg::ThisPointer));
if (impIsThis(obj))
{
call->AsCall()->gtCallMoreFlags |= GTF_CALL_M_NONVIRT_SAME_THIS;
}
}
bool probing;
probing = impConsiderCallProbe(call->AsCall(), rawILOffset);
// See if we can devirt if we aren't probing.
if (!probing && opts.OptimizationEnabled())
{
if (call->AsCall()->IsVirtual())
{
// only true object pointers can be virtual
assert(call->AsCall()->gtArgs.HasThisPointer() &&
call->AsCall()->gtArgs.GetThisArg()->GetNode()->TypeIs(TYP_REF));
// See if we can devirtualize.
const bool isExplicitTailCall = (tailCallFlags & PREFIX_TAILCALL_EXPLICIT) != 0;
const bool isLateDevirtualization = false;
impDevirtualizeCall(call->AsCall(), pResolvedToken, &callInfo->hMethod, &callInfo->methodFlags,
&callInfo->contextHandle, &exactContextHnd, isLateDevirtualization, isExplicitTailCall,
// Take care to pass raw IL offset here as the 'debug info' might be different for
// inlinees.
rawILOffset);
// Devirtualization may change which method gets invoked. Update our local cache.
//
methHnd = callInfo->hMethod;
}
else if (call->AsCall()->IsDelegateInvoke())
{
considerGuardedDevirtualization(call->AsCall(), rawILOffset, false, NO_METHOD_HANDLE, NO_CLASS_HANDLE,
nullptr);
}
}
//-------------------------------------------------------------------------
// The "this" pointer for "newobj"
if (opcode == CEE_NEWOBJ)
{
if (clsFlags & CORINFO_FLG_VAROBJSIZE)
{
assert(!(clsFlags & CORINFO_FLG_ARRAY)); // arrays handled separately
// This is a 'new' of a variable sized object, wher
// the constructor is to return the object. In this case
// the constructor claims to return VOID but we know it
// actually returns the new object
assert(callRetTyp == TYP_VOID);
callRetTyp = TYP_REF;
call->gtType = TYP_REF;
impSpillSpecialSideEff();
impPushOnStack(call, typeInfo(clsHnd));
}
else
{
if (clsFlags & CORINFO_FLG_DELEGATE)
{
// New inliner morph it in impImportCall.
// This will allow us to inline the call to the delegate constructor.
call = fgOptimizeDelegateConstructor(call->AsCall(), &exactContextHnd, ldftnInfo);
}
if (!bIntrinsicImported)
{
#if defined(DEBUG) || defined(INLINE_DATA)
// Keep track of the raw IL offset of the call
call->AsCall()->gtRawILOffset = rawILOffset;
#endif // defined(DEBUG) || defined(INLINE_DATA)