forked from dotnet/coreclr
-
Notifications
You must be signed in to change notification settings - Fork 0
/
gentree.h
3486 lines (2869 loc) · 124 KB
/
gentree.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
/*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XX
XX GenTree XX
XX XX
XX This is the node in the semantic tree graph. It represents the operation XX
XX corresponding to the node, and other information during code-gen. XX
XX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
*/
/*****************************************************************************/
#ifndef _GENTREE_H_
#define _GENTREE_H_
/*****************************************************************************/
#include "vartype.h" // For "var_types"
#include "target.h" // For "regNumber"
#include "ssaconfig.h" // For "SsaConfig::RESERVED_SSA_NUM"
#include "reglist.h"
#include "valuenumtype.h"
#include "simplerhash.h"
#include "nodeinfo.h"
#include "simd.h"
// Debugging GenTree is much easier if we add a magic virtual function to make the debugger able to figure out what type it's got.
// This is enabled by default in DEBUG. To enable it in RET builds (temporarily!), you need to change the build to define DEBUGGABLE_GENTREE=1,
// as well as pass /OPT:NOICF to the linker (or else all the vtables get merged, making the debugging value supplied by them useless).
// See protojit.nativeproj for a commented example of setting the build flags correctly.
#ifndef DEBUGGABLE_GENTREE
#ifdef DEBUG
#define DEBUGGABLE_GENTREE 1
#else // !DEBUG
#define DEBUGGABLE_GENTREE 0
#endif // !DEBUG
#endif // !DEBUGGABLE_GENTREE
/*****************************************************************************/
DECLARE_TYPED_ENUM(genTreeOps,BYTE)
{
#define GTNODE(en,sn,cm,ok) en,
#include "gtlist.h"
GT_COUNT,
// GT_CNS_NATIVELONG is the gtOper symbol for GT_CNS_LNG or GT_CNS_INT, depending on the target.
// For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes
// For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others.
// In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG
//
#ifdef _TARGET_64BIT_
GT_CNS_NATIVELONG = GT_CNS_INT,
#else
GT_CNS_NATIVELONG = GT_CNS_LNG,
#endif
}
END_DECLARE_TYPED_ENUM(genTreeOps,BYTE)
/*****************************************************************************
*
* The following enum defines a set of bit flags that can be used
* to classify expression tree nodes. Note that some operators will
* have more than one bit set, as follows:
*
* GTK_CONST implies GTK_LEAF
* GTK_RELOP implies GTK_BINOP
* GTK_LOGOP implies GTK_BINOP
*/
enum genTreeKinds
{
GTK_SPECIAL = 0x0000, // unclassified operator (special handling reqd)
GTK_CONST = 0x0001, // constant operator
GTK_LEAF = 0x0002, // leaf operator
GTK_UNOP = 0x0004, // unary operator
GTK_BINOP = 0x0008, // binary operator
GTK_RELOP = 0x0010, // comparison operator
GTK_LOGOP = 0x0020, // logical operator
GTK_ASGOP = 0x0040, // assignment operator
GTK_KINDMASK= 0x007F, // operator kind mask
GTK_COMMUTE = 0x0080, // commutative operator
GTK_EXOP = 0x0100, // Indicates that an oper for a node type that extends GenTreeOp (or GenTreeUnOp)
// by adding non-node fields to unary or binary operator.
GTK_LOCAL = 0x0200, // is a local access (load, store, phi)
/* Define composite value(s) */
GTK_SMPOP = (GTK_UNOP|GTK_BINOP|GTK_RELOP|GTK_LOGOP)
};
/*****************************************************************************/
#define SMALL_TREE_NODES 1
/*****************************************************************************/
DECLARE_TYPED_ENUM(gtCallTypes,BYTE)
{
CT_USER_FUNC, // User function
CT_HELPER, // Jit-helper
CT_INDIRECT, // Indirect call
CT_COUNT // fake entry (must be last)
}
END_DECLARE_TYPED_ENUM(gtCallTypes,BYTE)
/*****************************************************************************/
struct BasicBlock;
struct InlineCandidateInfo;
/*****************************************************************************/
// GT_FIELD nodes will be lowered into more "code-gen-able" representations, like
// GT_IND's of addresses, or GT_LCL_FLD nodes. We'd like to preserve the more abstract
// information, and will therefore annotate such lowered nodes with FieldSeq's. A FieldSeq
// represents a (possibly) empty sequence of fields. The fields are in the order
// in which they are dereferenced. The first field may be an object field or a struct field;
// all subsequent fields must be struct fields.
struct FieldSeqNode
{
CORINFO_FIELD_HANDLE m_fieldHnd;
FieldSeqNode* m_next;
FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next) : m_fieldHnd(fieldHnd), m_next(next) {}
// returns true when this is the pseudo #FirstElem field sequence
bool IsFirstElemFieldSeq();
// returns true when this is the pseudo #ConstantIndex field sequence
bool IsConstantIndexFieldSeq();
// returns true when this is the the pseudo #FirstElem field sequence or the pseudo #ConstantIndex field sequence
bool IsPseudoField();
// Make sure this provides methods that allow it to be used as a KeyFuncs type in SimplerHash.
static int GetHashCode(FieldSeqNode fsn)
{
return reinterpret_cast<int>(fsn.m_fieldHnd) ^ reinterpret_cast<int>(fsn.m_next);
}
static bool Equals(FieldSeqNode fsn1, FieldSeqNode fsn2)
{
return fsn1.m_fieldHnd == fsn2.m_fieldHnd && fsn1.m_next == fsn2.m_next;
}
};
// This class canonicalizes field sequences.
class FieldSeqStore
{
typedef SimplerHashTable<FieldSeqNode, /*KeyFuncs*/FieldSeqNode, FieldSeqNode*, DefaultSimplerHashBehavior> FieldSeqNodeCanonMap;
IAllocator* m_alloc;
FieldSeqNodeCanonMap* m_canonMap;
static FieldSeqNode s_notAField; // No value, just exists to provide an address.
// Dummy variables to provide the addresses for the "pseudo field handle" statics below.
static int FirstElemPseudoFieldStruct;
static int ConstantIndexPseudoFieldStruct;
public:
FieldSeqStore(IAllocator* alloc);
// Returns the (canonical in the store) singleton field sequence for the given handle.
FieldSeqNode* CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd);
// This is a special distinguished FieldSeqNode indicating that a constant does *not*
// represent a valid field sequence. This is "infectious", in the sense that appending it
// (on either side) to any field sequence yields the "NotAField()" sequence.
static FieldSeqNode* NotAField() { return &s_notAField; }
// Returns the (canonical in the store) field sequence representing the concatenation of
// the sequences represented by "a" and "b". Assumes that "a" and "b" are canonical; that is,
// they are the results of CreateSingleton, NotAField, or Append calls. If either of the arguments
// are the "NotAField" value, so is the result.
FieldSeqNode* Append(FieldSeqNode* a, FieldSeqNode* b);
// We have a few "pseudo" field handles:
// This treats the constant offset of the first element of something as if it were a field.
// Works for method table offsets of boxed structs, or first elem offset of arrays/strings.
static CORINFO_FIELD_HANDLE FirstElemPseudoField;
// If there is a constant index, we make a psuedo field to correspond to the constant added to
// offset of the indexed field. This keeps the field sequence structure "normalized", especially in the
// case where the element type is a struct, so we might add a further struct field offset.
static CORINFO_FIELD_HANDLE ConstantIndexPseudoField;
static bool IsPseudoField(CORINFO_FIELD_HANDLE hnd)
{
return hnd == FirstElemPseudoField || hnd == ConstantIndexPseudoField;
}
};
/*****************************************************************************/
typedef struct GenTree * GenTreePtr;
struct GenTreeArgList;
// Forward declarations of the subtypes
#define GTSTRUCT_0(fn, en) struct GenTree##fn;
#define GTSTRUCT_1(fn, en) struct GenTree##fn;
#define GTSTRUCT_2(fn, en, en2) struct GenTree##fn;
#define GTSTRUCT_3(fn, en, en2, en3) struct GenTree##fn;
#define GTSTRUCT_4(fn, en, en2, en3, en4) struct GenTree##fn;
#define GTSTRUCT_N(fn, ...) struct GenTree##fn;
#include "gtstructs.h"
/*****************************************************************************/
#ifndef _WIN64
#include <pshpack4.h>
#endif
struct GenTree
{
// We use GT_STRUCT_0 only for the category of simple ops.
#define GTSTRUCT_0(fn, en) GenTree##fn* As##fn() \
{ \
assert(this->OperIsSimple()); \
return reinterpret_cast<GenTree##fn*>(this); \
} \
GenTree##fn& As##fn##Ref() { return *As##fn(); } \
__declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
#define GTSTRUCT_1(fn, en) GenTree##fn* As##fn() \
{ \
assert(this->gtOper == en); \
return reinterpret_cast<GenTree##fn*>(this); \
} \
GenTree##fn& As##fn##Ref() { return *As##fn(); } \
__declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
#define GTSTRUCT_2(fn, en, en2) GenTree##fn* As##fn() \
{ \
assert(this->gtOper == en || this->gtOper == en2); \
return reinterpret_cast<GenTree##fn*>(this); \
} \
GenTree##fn& As##fn##Ref() { return *As##fn(); } \
__declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
#define GTSTRUCT_3(fn, en, en2, en3) GenTree##fn* As##fn() \
{ \
assert(this->gtOper == en || this->gtOper == en2 || this->gtOper == en3); \
return reinterpret_cast<GenTree##fn*>(this); \
} \
GenTree##fn& As##fn##Ref() { return *As##fn(); } \
__declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
#define GTSTRUCT_4(fn, en, en2, en3, en4) GenTree##fn* As##fn() \
{ \
assert(this->gtOper == en || this->gtOper == en2 || this->gtOper == en3 || this->gtOper == en4); \
return reinterpret_cast<GenTree##fn*>(this); \
} \
GenTree##fn& As##fn##Ref() { return *As##fn(); } \
__declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
#ifdef DEBUG
// VC does not optimize out this loop in retail even though the value it computes is unused
// so we need a separate version for non-debug
#define GTSTRUCT_N(fn, ...) GenTree##fn* As##fn() \
{ \
genTreeOps validOps[] = {__VA_ARGS__}; \
bool found = false; \
for (unsigned i=0; i<ArrLen(validOps); i++) { \
if (this->gtOper == validOps[i]) \
{ \
found = true; \
break; \
} \
} \
assert(found); \
return reinterpret_cast<GenTree##fn*>(this); \
} \
GenTree##fn& As##fn##Ref() { return *As##fn(); } \
__declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
#else
#define GTSTRUCT_N(fn, ...) GenTree##fn* As##fn() \
{ \
return reinterpret_cast<GenTree##fn*>(this); \
} \
GenTree##fn& As##fn##Ref() { return *As##fn(); } \
__declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
#endif
#include "gtstructs.h"
genTreeOps gtOper; // enum subtype BYTE
var_types gtType; // enum subtype BYTE
genTreeOps OperGet() const { return gtOper; }
var_types TypeGet() const { return gtType; }
#ifdef DEBUG
genTreeOps gtOperSave; // Only used to save gtOper when we destroy a node, to aid debugging.
#endif
#if FEATURE_ANYCSE
#define NO_CSE (0)
#define IS_CSE_INDEX(x) (x != 0)
#define IS_CSE_USE(x) (x > 0)
#define IS_CSE_DEF(x) (x < 0)
#define GET_CSE_INDEX(x) ((x > 0) ? x : -x)
#define TO_CSE_DEF(x) (-x)
signed char gtCSEnum; // 0 or the CSE index (negated if def)
// valid only for CSE expressions
#endif // FEATURE_ANYCSE
#if ASSERTION_PROP
unsigned char gtAssertionNum; // 0 or Assertion table index
// valid only for non-GT_STMT nodes
bool HasAssertion() const { return gtAssertionNum != 0; }
void ClearAssertion() { gtAssertionNum = 0; }
unsigned int GetAssertion() const { return gtAssertionNum; }
void SetAssertion(unsigned int value) { assert((unsigned char)value == value); gtAssertionNum = (unsigned char)value; }
#endif
#if FEATURE_STACK_FP_X87
unsigned char gtFPlvl; // x87 stack depth at this node
void gtCopyFPlvl(GenTree * other) { gtFPlvl = other->gtFPlvl; }
void gtSetFPlvl(unsigned level) { noway_assert(FitsIn<unsigned char>(level)); gtFPlvl = (unsigned char)level; }
#else // FEATURE_STACK_FP_X87
void gtCopyFPlvl(GenTree * other) { }
void gtSetFPlvl(unsigned level) { }
#endif // FEATURE_STACK_FP_X87
//
// Cost metrics on the node. Don't allow direct access to the variable for setting.
//
public:
#ifdef DEBUG
// You are not allowed to read the cost values before they have been set in gtSetEvalOrder().
// Keep track of whether the costs have been initialized, and assert if they are read before being initialized.
// Obviously, this information does need to be initialized when a node is created.
// This is public so the dumpers can see it.
bool gtCostsInitialized;
#endif // DEBUG
#define MAX_COST UCHAR_MAX
#define IND_COST_EX 3 // execution cost for an indirection
__declspec(property(get=GetCostEx))
unsigned char gtCostEx; // estimate of expression execution cost
__declspec(property(get=GetCostSz))
unsigned char gtCostSz; // estimate of expression code size cost
unsigned char GetCostEx() const { assert(gtCostsInitialized); return _gtCostEx; }
unsigned char GetCostSz() const { assert(gtCostsInitialized); return _gtCostSz; }
// Set the costs. They are always both set at the same time.
// Don't use the "put" property: force calling this function, to make it more obvious in the few places
// that set the values.
// Note that costs are only set in gtSetEvalOrder() and its callees.
void SetCosts(unsigned costEx, unsigned costSz)
{
assert(costEx != (unsigned)-1); // looks bogus
assert(costSz != (unsigned)-1); // looks bogus
INDEBUG(gtCostsInitialized = true;)
_gtCostEx = (costEx > MAX_COST) ? MAX_COST : (unsigned char)costEx;
_gtCostSz = (costSz > MAX_COST) ? MAX_COST : (unsigned char)costSz;
}
// Opimized copy function, to avoid the SetCosts() function comparisons, and make it more clear that a node copy is happening.
void CopyCosts(const GenTree* const tree)
{
INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;) // If the 'tree' costs aren't initialized, we'll hit an assert below.
_gtCostEx = tree->gtCostEx;
_gtCostSz = tree->gtCostSz;
}
// Same as CopyCosts, but avoids asserts if the costs we are copying have not been initialized.
// This is because the importer, for example, clones nodes, before these costs have been initialized.
// Note that we directly access the 'tree' costs, not going through the accessor functions (either
// directly or through the properties).
void CopyRawCosts(const GenTree* const tree)
{
INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;)
_gtCostEx = tree->_gtCostEx;
_gtCostSz = tree->_gtCostSz;
}
private:
unsigned char _gtCostEx; // estimate of expression execution cost
unsigned char _gtCostSz; // estimate of expression code size cost
//
// Register or register pair number of the node.
//
#ifdef DEBUG
public:
enum genRegTag
{
GT_REGTAG_NONE, // Nothing has been assigned to _gtRegNum/_gtRegPair
GT_REGTAG_REG, // _gtRegNum has been assigned
GT_REGTAG_REGPAIR // _gtRegPair has been assigned
};
genRegTag GetRegTag() const
{
assert(gtRegTag == GT_REGTAG_NONE ||
gtRegTag == GT_REGTAG_REG ||
gtRegTag == GT_REGTAG_REGPAIR);
return gtRegTag;
}
private:
genRegTag gtRegTag; // What is in _gtRegNum/_gtRegPair?
#endif // DEBUG
private:
union
{
// NOTE: After LSRA, one of these values may be valid even if GTF_REG_VAL is not set in gtFlags.
// They store the register assigned to the node. If a register is not assigned, _gtRegNum is set to REG_NA
// or _gtRegPair is set to REG_PAIR_NONE, depending on the node type.
regNumberSmall _gtRegNum; // which register the value is in
regPairNoSmall _gtRegPair; // which register pair the value is in
};
public:
// The register number is stored in a small format (8 bits), but the getters return and the setters take
// a full-size (unsigned) format, to localize the casts here.
__declspec(property(get=GetRegNum,put=SetRegNum))
regNumber gtRegNum;
// for codegen purposes, is this node a subnode of its parent
bool isContained() const;
bool isContainedIndir() const;
bool isIndirAddrMode();
bool isIndir() const;
bool isContainedIntOrIImmed() const { return isContained() && IsCnsIntOrI(); }
bool isContainedFltOrDblImmed() const { return isContained() && (OperGet() == GT_CNS_DBL); }
bool isLclField() const { return OperGet() == GT_LCL_FLD || OperGet() == GT_STORE_LCL_FLD; }
bool isContainedLclField() const { return isContained() && isLclField(); }
// Indicates whether it is a memory op.
// Right now it includes Indir and LclField ops.
bool isMemoryOp() const { return isIndir() || isLclField(); }
bool isContainedMemoryOp() const { return isContained() && isMemoryOp(); }
regNumber GetRegNum() const
{
assert((gtRegTag == GT_REGTAG_REG) ||
(gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
regNumber reg = (regNumber) _gtRegNum;
assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
(reg >= REG_FIRST &&
reg <= REG_COUNT));
return reg;
}
void SetRegNum(regNumber reg)
{
assert(reg >= REG_FIRST &&
reg <= REG_COUNT);
// Make sure the upper bits of _gtRegPair are clear
_gtRegPair = (regPairNoSmall) 0;
_gtRegNum = (regNumberSmall) reg;
INDEBUG(gtRegTag = GT_REGTAG_REG;)
assert(_gtRegNum == reg);
}
__declspec(property(get=GetRegPair,put=SetRegPair))
regPairNo gtRegPair;
regPairNo GetRegPair() const
{
assert((gtRegTag == GT_REGTAG_REGPAIR) ||
(gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
regPairNo regPair = (regPairNo) _gtRegPair;
assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
(regPair >= REG_PAIR_FIRST &&
regPair <= REG_PAIR_LAST) ||
(regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
return regPair;
}
void SetRegPair(regPairNo regPair)
{
assert((regPair >= REG_PAIR_FIRST &&
regPair <= REG_PAIR_LAST) ||
(regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
_gtRegPair = (regPairNoSmall) regPair;
INDEBUG(gtRegTag = GT_REGTAG_REGPAIR;)
assert(_gtRegPair == regPair);
}
// Copy the _gtRegNum/_gtRegPair/gtRegTag fields
void CopyReg(GenTreePtr from)
{
// To do the copy, use _gtRegPair, which must be bigger than _gtRegNum. Note that the values
// might be undefined (so gtRegTag == GT_REGTAG_NONE).
_gtRegPair = from->_gtRegPair;
C_ASSERT(sizeof(_gtRegPair) >= sizeof(_gtRegNum));
INDEBUG(gtRegTag = from->gtRegTag;)
}
void gtClearReg(Compiler* compiler);
bool gtHasReg() const
{
// Has the node been assigned a register by LSRA?
//
// In order for this to work properly, gtClearReg (above) must be called prior to setting
// the register value.
if (isRegPairType(TypeGet()))
{
assert(_gtRegNum != REG_NA);
INDEBUG(assert(gtRegTag == GT_REGTAG_REGPAIR));
return gtRegPair != REG_PAIR_NONE;
}
else
{
assert(_gtRegNum != REG_PAIR_NONE);
INDEBUG(assert(gtRegTag == GT_REGTAG_REG));
return gtRegNum != REG_NA;
}
}
regMaskTP gtGetRegMask() const
{
if (isRegPairType(TypeGet()))
{
return genRegPairMask(gtRegPair);
}
else
{
return genRegMask(gtRegNum);
}
}
unsigned gtFlags; // see GTF_xxxx below
ValueNumPair gtVNPair;
regMaskSmall gtRsvdRegs; // set of fixed trashed registers
#ifdef LEGACY_BACKEND
regMaskSmall gtUsedRegs; // set of used (trashed) registers
#endif // LEGACY_BACKEND
#ifndef LEGACY_BACKEND
TreeNodeInfo gtLsraInfo;
#endif // !LEGACY_BACKEND
void SetVNsFromNode(GenTreePtr tree)
{
gtVNPair = tree->gtVNPair;
}
ValueNum GetVN(ValueNumKind vnk) const
{
if (vnk == VNK_Liberal)
{
return gtVNPair.GetLiberal();
}
else
{
assert(vnk == VNK_Conservative);
return gtVNPair.GetConservative();
}
}
void SetVN(ValueNumKind vnk, ValueNum vn)
{
if (vnk == VNK_Liberal)
{
return gtVNPair.SetLiberal(vn);
}
else
{
assert(vnk == VNK_Conservative);
return gtVNPair.SetConservative(vn);
}
}
void ClearVN()
{
gtVNPair = ValueNumPair(); // Initializes both elements to "NoVN".
}
//---------------------------------------------------------------------
// The first set of flags can be used with a large set of nodes, and
// thus they must all have distinct values. That is, one can test any
// expression node for one of these flags.
//---------------------------------------------------------------------
#define GTF_ASG 0x00000001 // sub-expression contains an assignment
#define GTF_CALL 0x00000002 // sub-expression contains a func. call
#define GTF_EXCEPT 0x00000004 // sub-expression might throw an exception
#define GTF_GLOB_REF 0x00000008 // sub-expression uses global variable(s)
#define GTF_ORDER_SIDEEFF 0x00000010 // sub-expression has a re-ordering side effect
// If you set these flags, make sure that code:gtExtractSideEffList knows how to find the tree,
// otherwise the C# (run csc /o-)
// var v = side_eff_operation
// with no use of v will drop your tree on the floor.
#define GTF_PERSISTENT_SIDE_EFFECTS (GTF_ASG|GTF_CALL)
#define GTF_SIDE_EFFECT (GTF_PERSISTENT_SIDE_EFFECTS|GTF_EXCEPT)
#define GTF_GLOB_EFFECT (GTF_SIDE_EFFECT|GTF_GLOB_REF)
#define GTF_ALL_EFFECT (GTF_GLOB_EFFECT|GTF_ORDER_SIDEEFF)
// The extra flag GTF_DEAD is used to tell the consumer of these flags
// that we are calling in the context of performing a CSE, thus we
// should allow the run-once side effects of running a class constructor.
//
#define GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE (GTF_ASG|GTF_CALL|GTF_DEAD)
// Can any side-effects be observed externally, say by a caller method?
// For assignments, only assignments to global memory can be observed
// externally, whereas simple assignments to local variables can not.
//
// Be careful when using this inside a "try" protected region as the
// order of assignments to local variables would need to be preserved
// wrt side effects if the variables are alive on entry to the
// "catch/finally" region. In such cases, even assignments to locals
// will have to be restricted.
#define GTF_GLOBALLY_VISIBLE_SIDE_EFFECTS(flags) \
(((flags) & (GTF_CALL|GTF_EXCEPT)) || \
(((flags) & (GTF_ASG|GTF_GLOB_REF)) == (GTF_ASG|GTF_GLOB_REF)))
#define GTF_REVERSE_OPS 0x00000020 // operand op2 should be evaluated before op1 (normally, op1 is evaluated first and op2 is evaluated second)
#define GTF_REG_VAL 0x00000040 // operand is sitting in a register (or part of a TYP_LONG operand is sitting in a register)
#define GTF_SPILLED 0x00000080 // the value has been spilled
#define GTF_SPILLED_OPER 0x00000100 // op1 has been spilled
#ifdef LEGACY_BACKEND
#define GTF_SPILLED_OP2 0x00000200 // op2 has been spilled
#endif // LEGACY_BACKEND
#ifdef DEBUG
#ifndef LEGACY_BACKEND
#define GTF_MORPHED 0x00000200 // the node has been morphed (in the global morphing phase)
#else // LEGACY_BACKEND
// For LEGACY_BACKEND, 0x00000200 is in use, but we can use the same value as GTF_SPILLED since we
// don't call gtSetEvalOrder(), which clears GTF_MORPHED, after GTF_SPILLED has been set.
#define GTF_MORPHED 0x00000080 // the node has been morphed (in the global morphing phase)
#endif // LEGACY_BACKEND
#endif // DEBUG
#define GTF_REDINDEX_CHECK 0x00000100 // Used for redundant range checks. Disjoint from GTF_SPILLED_OPER
#define GTF_ZSF_SET 0x00000400 // the zero(ZF) and sign(SF) flags set to the operand
#if FEATURE_SET_FLAGS
#define GTF_SET_FLAGS 0x00000800 // Requires that codegen for this node set the flags
// Use gtSetFlags() to check this flags
#endif
#define GTF_IND_NONFAULTING 0x00000800 // An indir that cannot fault. GTF_SET_FLAGS is not used on indirs
#if FEATURE_ANYCSE
#define GTF_DEAD 0x00001000 // this node won't be used any more
#endif // FEATURE_ANYCSE
#define GTF_MAKE_CSE 0x00002000 // Hoisted Expression: try hard to make this into CSE (see optPerformHoistExpr)
#define GTF_DONT_CSE 0x00004000 // don't bother CSE'ing this expr
#define GTF_COLON_COND 0x00008000 // this node is conditionally executed (part of ? :)
#if defined(DEBUG) && SMALL_TREE_NODES
#define GTF_NODE_LARGE 0x00010000
#define GTF_NODE_SMALL 0x00020000
// Property of the node itself, not the gtOper
#define GTF_NODE_MASK (GTF_COLON_COND | GTF_MORPHED | GTF_NODE_SMALL | GTF_NODE_LARGE )
#else
#define GTF_NODE_MASK (GTF_COLON_COND)
#endif
#define GTF_BOOLEAN 0x00040000 // value is known to be 0/1
#define GTF_SMALL_OK 0x00080000 // actual small int sufficient
#define GTF_UNSIGNED 0x00100000 // with GT_CAST: the source operand is an unsigned type
// with operators: the specified node is an unsigned operator
#define GTF_LATE_ARG 0x00200000 // the specified node is evaluated to a temp in the arg list, and this temp is added to gtCallLateArgs.
#define GTF_SPILL 0x00400000 // needs to be spilled here
#define GTF_SPILL_HIGH 0x00040000 // shared with GTF_BOOLEAN
#define GTF_COMMON_MASK 0x007FFFFF // mask of all the flags above
#define GTF_REUSE_REG_VAL 0x00800000 // This is set by the register allocator on nodes whose value already exists in the
// register assigned to this node, so the code generator does not have to generate
// code to produce the value.
// It is currently used only on constant nodes.
// It CANNOT be set on var (GT_LCL*) nodes, or on indir (GT_IND or GT_STOREIND) nodes, since
// 1) it is not needed for lclVars and is highly unlikely to be useful for indir nodes, and
// 2) it conflicts with GTFD_VAR_CSE_REF for lclVars (though this is debug only, and
// GTF_IND_ARR_INDEX for indirs.
//---------------------------------------------------------------------
// The following flags can be used only with a small set of nodes, and
// thus their values need not be distinct (other than within the set
// that goes with a particular node/nodes, of course). That is, one can
// only test for one of these flags if the 'gtOper' value is tested as
// well to make sure it's the right operator for the particular flag.
//---------------------------------------------------------------------
// NB: GTF_VAR_* and GTF_REG_* share the same namespace of flags, because
// GT_LCL_VAR nodes may be changed to GT_REG_VAR nodes without resetting
// the flags. These are also used by GT_LCL_FLD.
#define GTF_VAR_DEF 0x80000000 // GT_LCL_VAR -- this is a definition
#define GTF_VAR_USEASG 0x40000000 // GT_LCL_VAR -- this is a use/def for a x<op>=y
#define GTF_VAR_USEDEF 0x20000000 // GT_LCL_VAR -- this is a use/def as in x=x+y (only the lhs x is tagged)
#define GTF_VAR_CAST 0x10000000 // GT_LCL_VAR -- has been explictly cast (variable node may not be type of local)
#define GTF_VAR_ITERATOR 0x08000000 // GT_LCL_VAR -- this is a iterator reference in the loop condition
#define GTF_VAR_CLONED 0x01000000 // GT_LCL_VAR -- this node has been cloned or is a clone
// Relevant for inlining optimizations (see fgInlinePrependStatements)
// TODO-Cleanup: Currently, GTF_REG_BIRTH is used only by stackfp
// We should consider using it more generally for VAR_BIRTH, instead of
// GTF_VAR_DEF && !GTF_VAR_USEASG
#define GTF_REG_BIRTH 0x04000000 // GT_REG_VAR -- enregistered variable born here
#define GTF_VAR_DEATH 0x02000000 // GT_LCL_VAR, GT_REG_VAR -- variable dies here (last use)
#define GTF_VAR_ARR_INDEX 0x00000020 // The variable is part of (the index portion of) an array index expression.
// Shares a value with GTF_REVERSE_OPS, which is meaningless for local var.
#define GTF_LIVENESS_MASK (GTF_VAR_DEF|GTF_VAR_USEASG|GTF_VAR_USEDEF|GTF_REG_BIRTH|GTF_VAR_DEATH)
#define GTF_CALL_UNMANAGED 0x80000000 // GT_CALL -- direct call to unmanaged code
#define GTF_CALL_INLINE_CANDIDATE 0x40000000 // GT_CALL -- this call has been marked as an inline candidate
//
#define GTF_CALL_VIRT_KIND_MASK 0x30000000
#define GTF_CALL_NONVIRT 0x00000000 // GT_CALL -- a non virtual call
#define GTF_CALL_VIRT_STUB 0x10000000 // GT_CALL -- a stub-dispatch virtual call
#define GTF_CALL_VIRT_VTABLE 0x20000000 // GT_CALL -- a vtable-based virtual call
#define GTF_CALL_NULLCHECK 0x08000000 // GT_CALL -- must check instance pointer for null
#define GTF_CALL_POP_ARGS 0x04000000 // GT_CALL -- caller pop arguments?
#define GTF_CALL_HOISTABLE 0x02000000 // GT_CALL -- call is hoistable
#define GTF_CALL_REG_SAVE 0x01000000 // GT_CALL -- This call preserves all integer regs
// For additional flags for GT_CALL node see GTF_CALL_M_
#ifdef DEBUG
#define GTFD_VAR_CSE_REF 0x00800000 // GT_LCL_VAR -- This is a CSE LCL_VAR node
#endif
#define GTF_NOP_DEATH 0x40000000 // GT_NOP -- operand dies here
#define GTF_FLD_NULLCHECK 0x80000000 // GT_FIELD -- need to nullcheck the "this" pointer
#define GTF_FLD_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
#define GTF_INX_RNGCHK 0x80000000 // GT_INDEX -- the array reference should be range-checked.
#define GTF_INX_REFARR_LAYOUT 0x20000000 // GT_INDEX -- same as GTF_IND_REFARR_LAYOUT
#define GTF_INX_STRING_LAYOUT 0x40000000 // GT_INDEX -- this uses the special string array layout
#define GTF_IND_VOLATILE 0x40000000 // GT_IND -- the load or store must use volatile sematics (this is a nop on X86)
#define GTF_IND_REFARR_LAYOUT 0x20000000 // GT_IND -- the array holds object refs (only effects layout of Arrays)
#define GTF_IND_TGTANYWHERE 0x10000000 // GT_IND -- the target could be anywhere
#define GTF_IND_TLS_REF 0x08000000 // GT_IND -- the target is accessed via TLS
#define GTF_IND_ASG_LHS 0x04000000 // GT_IND -- this GT_IND node is (the effective val) of the LHS of an assignment; don't evaluate it independently.
#define GTF_IND_UNALIGNED 0x02000000 // GT_IND -- the load or store is unaligned (we assume worst case alignment of 1 byte)
#define GTF_IND_INVARIANT 0x01000000 // GT_IND -- the target is invariant (a prejit indirection)
#define GTF_IND_ARR_LEN 0x80000000 // GT_IND -- the indirection represents an array length (of the REF contribution to its argument).
#define GTF_IND_ARR_INDEX 0x00800000 // GT_IND -- the indirection represents an (SZ) array index (this shares the same value as GTFD_VAR_CSE_REF,
// but is disjoint because a GT_LCL_VAR is never an ind (GT_IND or GT_STOREIND)
#define GTF_IND_FLAGS (GTF_IND_VOLATILE|GTF_IND_REFARR_LAYOUT|GTF_IND_TGTANYWHERE|GTF_IND_NONFAULTING|\
GTF_IND_TLS_REF|GTF_IND_UNALIGNED|GTF_IND_INVARIANT|GTF_IND_ARR_INDEX)
#define GTF_CLS_VAR_ASG_LHS 0x04000000 // GT_CLS_VAR -- this GT_CLS_VAR node is (the effective val) of the LHS of an assignment; don't evaluate it independently.
#define GTF_ADDR_ONSTACK 0x80000000 // GT_ADDR -- this expression is guarenteed to be on the stack
#define GTF_ADDRMODE_NO_CSE 0x80000000 // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex addressing mode
#define GTF_MUL_64RSLT 0x40000000 // GT_MUL -- produce 64-bit result
#define GTF_MOD_INT_RESULT 0x80000000 // GT_MOD, -- the real tree represented by this
// GT_UMOD node evaluates to an int even though
// its type is long. The result is
// placed in the low member of the
// reg pair
#define GTF_RELOP_NAN_UN 0x80000000 // GT_<relop> -- Is branch taken if ops are NaN?
#define GTF_RELOP_JMP_USED 0x40000000 // GT_<relop> -- result of compare used for jump or ?:
#define GTF_RELOP_QMARK 0x20000000 // GT_<relop> -- the node is the condition for ?:
#define GTF_RELOP_SMALL 0x10000000 // GT_<relop> -- We should use a byte or short sized compare (op1->gtType is the small type)
#define GTF_QMARK_CAST_INSTOF 0x80000000 // GT_QMARK -- Is this a top (not nested) level qmark created for castclass or instanceof?
#define GTF_BOX_VALUE 0x80000000 // GT_BOX -- "box" is on a value type
#define GTF_ICON_HDL_MASK 0xF0000000 // Bits used by handle types below
#define GTF_ICON_SCOPE_HDL 0x10000000 // GT_CNS_INT -- constant is a scope handle
#define GTF_ICON_CLASS_HDL 0x20000000 // GT_CNS_INT -- constant is a class handle
#define GTF_ICON_METHOD_HDL 0x30000000 // GT_CNS_INT -- constant is a method handle
#define GTF_ICON_FIELD_HDL 0x40000000 // GT_CNS_INT -- constant is a field handle
#define GTF_ICON_STATIC_HDL 0x50000000 // GT_CNS_INT -- constant is a handle to static data
#define GTF_ICON_STR_HDL 0x60000000 // GT_CNS_INT -- constant is a string handle
#define GTF_ICON_PSTR_HDL 0x70000000 // GT_CNS_INT -- constant is a ptr to a string handle
#define GTF_ICON_PTR_HDL 0x80000000 // GT_CNS_INT -- constant is a ldptr handle
#define GTF_ICON_VARG_HDL 0x90000000 // GT_CNS_INT -- constant is a var arg cookie handle
#define GTF_ICON_PINVKI_HDL 0xA0000000 // GT_CNS_INT -- constant is a pinvoke calli handle
#define GTF_ICON_TOKEN_HDL 0xB0000000 // GT_CNS_INT -- constant is a token handle
#define GTF_ICON_TLS_HDL 0xC0000000 // GT_CNS_INT -- constant is a TLS ref with offset
#define GTF_ICON_FTN_ADDR 0xD0000000 // GT_CNS_INT -- constant is a function address
#define GTF_ICON_CIDMID_HDL 0xE0000000 // GT_CNS_INT -- constant is a class or module ID handle
#define GTF_ICON_BBC_PTR 0xF0000000 // GT_CNS_INT -- constant is a basic block count pointer
#define GTF_ICON_FIELD_OFF 0x08000000 // GT_CNS_INT -- constant is a field offset
#define GTF_BLK_HASGCPTR 0x80000000 // GT_COPYBLK -- This struct copy will copy GC Pointers
#define GTF_BLK_VOLATILE 0x40000000 // GT_INITBLK/GT_COPYBLK -- is a volatile block operation
#define GTF_BLK_UNALIGNED 0x02000000 // GT_INITBLK/GT_COPYBLK -- is an unaligned block operation
#define GTF_OVERFLOW 0x10000000 // GT_ADD, GT_SUB, GT_MUL, - Need overflow check
// GT_ASG_ADD, GT_ASG_SUB,
// GT_CAST
// Use gtOverflow(Ex)() to check this flag
#define GTF_NO_OP_NO 0x80000000 // GT_NO_OP --Have the codegenerator generate a special nop
//----------------------------------------------------------------
#define GTF_STMT_CMPADD 0x80000000 // GT_STMT -- added by compiler
#define GTF_STMT_HAS_CSE 0x40000000 // GT_STMT -- CSE def or use was subsituted
#define GTF_STMT_TOP_LEVEL 0x20000000 // GT_STMT -- Top-level statement - true iff gtStmtList->gtPrev == nullptr
// True for all stmts when in FGOrderTree
#define GTF_STMT_SKIP_LOWER 0x10000000 // GT_STMT -- Skip lowering if we already lowered an embedded stmt.
//----------------------------------------------------------------
GenTreePtr gtNext;
GenTreePtr gtPrev;
#ifdef DEBUG
unsigned gtTreeID;
unsigned gtSeqNum; // liveness traversal order within the current statement
#endif
static
const unsigned short gtOperKindTable[];
static
unsigned OperKind(unsigned gtOper)
{
assert(gtOper < GT_COUNT);
return gtOperKindTable[gtOper];
}
unsigned OperKind() const
{
assert(gtOper < GT_COUNT);
return gtOperKindTable[gtOper];
}
static bool IsExOp(unsigned opKind)
{
return (opKind & GTK_EXOP) != 0;
}
// Returns the operKind with the GTK_EX_OP bit removed (the
// kind of operator, unary or binary, that is extended).
static unsigned StripExOp(unsigned opKind)
{
return opKind & ~GTK_EXOP;
}
static
int OperIsConst(genTreeOps gtOper)
{
return (OperKind(gtOper) & GTK_CONST ) != 0;
}
int OperIsConst() const
{
return (OperKind(gtOper) & GTK_CONST ) != 0;
}
static
int OperIsLeaf(genTreeOps gtOper)
{
return (OperKind(gtOper) & GTK_LEAF ) != 0;
}
int OperIsLeaf() const
{
return (OperKind(gtOper) & GTK_LEAF ) != 0;
}
static
int OperIsCompare(genTreeOps gtOper)
{
return (OperKind(gtOper) & GTK_RELOP ) != 0;
}
static
bool OperIsLocal(genTreeOps gtOper)
{
bool result = (OperKind(gtOper) & GTK_LOCAL) != 0;
assert(result ==
(gtOper == GT_LCL_VAR ||
gtOper == GT_PHI_ARG ||
gtOper == GT_REG_VAR ||
gtOper == GT_LCL_FLD ||
gtOper == GT_STORE_LCL_VAR ||
gtOper == GT_STORE_LCL_FLD));
return result;
}
static
bool OperIsBlkOp(genTreeOps gtOper)
{
return (gtOper == GT_INITBLK ||
gtOper == GT_COPYBLK ||
gtOper == GT_COPYOBJ);
}
static
bool OperIsCopyBlkOp(genTreeOps gtOper)
{
return (gtOper == GT_COPYOBJ || gtOper == GT_COPYBLK);
}
static
bool OperIsLocalAddr(genTreeOps gtOper)
{
return (gtOper == GT_LCL_VAR_ADDR ||
gtOper == GT_LCL_FLD_ADDR);
}
static
bool OperIsScalarLocal(genTreeOps gtOper)
{
return (gtOper == GT_LCL_VAR ||
gtOper == GT_REG_VAR ||
gtOper == GT_STORE_LCL_VAR);
}
static
bool OperIsNonPhiLocal(genTreeOps gtOper)
{
return OperIsLocal(gtOper) && (gtOper != GT_PHI_ARG);
}
static
bool OperIsLocalRead(genTreeOps gtOper)
{
return (OperIsLocal(gtOper) && !OperIsLocalStore(gtOper));
}
static
bool OperIsLocalStore(genTreeOps gtOper)
{
return (gtOper == GT_STORE_LCL_VAR ||
gtOper == GT_STORE_LCL_FLD);
}
static
bool OperIsAddrMode(genTreeOps gtOper)
{
return (gtOper == GT_LEA);
}
bool OperIsBlkOp() const
{
return OperIsBlkOp(OperGet());
}