@@ -109,12 +109,14 @@ NESTED_END RhpNewObject, _TEXT
109
109
.macro NEW_ARRAY_FAST
110
110
mov r4 , r0 // Save MethodTable
111
111
mov r5 , r1 // Save element count
112
- mov r6 , r2 // Save string size
112
+ mov r6 , r2 // Save string/array size
113
+
113
114
// r0 = ee_alloc_context pointer ; trashes volatile registers, expects saved lr
114
115
INLINE_GET_ALLOC_CONTEXT
116
+
115
117
// r4 == MethodTable
116
118
// r5 == element count
117
- // r6 == string size
119
+ // r6 == string/array size
118
120
// r0 == ee_alloc_context *
119
121
120
122
// Load potential new object address into r12 .
@@ -123,7 +125,7 @@ NESTED_END RhpNewObject, _TEXT
123
125
// Determine whether the end of the object would lie outside of the current allocation context. If so ,
124
126
// we abandon the attempt to allocate the object directly and fall back to the slow helper.
125
127
adds r6 , r12
126
- bcs 1f // if we get a carry here , the string is too large to fit below 4 GB
128
+ bcs 1f // if we get a carry here , the string/array is too large to fit below 4 GB
127
129
128
130
ldr r12 , [ r0 , #OFFSETOF__ee_alloc_context__combined_limit ]
129
131
cmp r6 , r12
@@ -255,12 +257,16 @@ LEAF_ENTRY RhpNewObjectArrayFast, _TEXT
255
257
256
258
MOV32 r2 , (ASM_LARGE_OBJECT_SIZE - 256 )/ 4 // sizeof(void * )
257
259
cmp r1 , r2
258
- bhs C_FUNC(RhpNewArray )
260
+ bhs LOCAL_LABEL(RhpNewObjectArrayFast_RarePath )
259
261
260
262
ldr r2 , [ r0 , #OFFSETOF__MethodTable__m_uBaseSize ]
261
263
add r2 , r2 , r1 , lsl # 2
262
264
263
265
NEW_ARRAY_FAST
266
+
267
+ LOCAL_LABEL(RhpNewObjectArrayFast_RarePath):
268
+ NEW_ARRAY_FAST_TAIL_EPILOG
269
+ b C_FUNC(RhpNewArray)
264
270
LEAF_END RhpNewObjectArrayFast , _TEXT
265
271
#endif
266
272
0 commit comments