@@ -18,9 +18,9 @@ use super::{CanAccessStatics, CompileTimeEvalContext, CompileTimeInterpreter};
1818use crate :: errors;
1919use crate :: interpret:: eval_nullary_intrinsic;
2020use crate :: interpret:: {
21- intern_const_alloc_recursive, Allocation , ConstAlloc , ConstValue , CtfeValidationMode , GlobalId ,
22- Immediate , InternKind , InterpCx , InterpError , InterpResult , MPlaceTy , MemoryKind , OpTy ,
23- RefTracking , StackPopCleanup ,
21+ intern_const_alloc_recursive, ConstAlloc , ConstValue , CtfeValidationMode , GlobalId , Immediate ,
22+ InternKind , InterpCx , InterpError , InterpResult , MPlaceTy , MemoryKind , OpTy , RefTracking ,
23+ StackPopCleanup ,
2424} ;
2525
2626// Returns a pointer to where the result lives
@@ -105,91 +105,68 @@ pub(super) fn mk_eval_cx<'mir, 'tcx>(
105105 )
106106}
107107
108- /// This function converts an interpreter value into a constant that is meant for use in the
109- /// type system.
108+ /// This function converts an interpreter value into a MIR constant.
110109#[ instrument( skip( ecx) , level = "debug" ) ]
111110pub ( super ) fn op_to_const < ' tcx > (
112111 ecx : & CompileTimeEvalContext < ' _ , ' tcx > ,
113112 op : & OpTy < ' tcx > ,
114113) -> ConstValue < ' tcx > {
115- // We do not have value optimizations for everything.
116- // Only scalars and slices, since they are very common.
117- // Note that further down we turn scalars of uninitialized bits back to `ByRef`. These can result
118- // from scalar unions that are initialized with one of their zero sized variants. We could
119- // instead allow `ConstValue::Scalar` to store `ScalarMaybeUninit`, but that would affect all
120- // the usual cases of extracting e.g. a `usize`, without there being a real use case for the
121- // `Undef` situation.
122- let try_as_immediate = match op. layout . abi {
114+ // Handle ZST consistently and early.
115+ if op. layout . is_zst ( ) {
116+ return ConstValue :: ZeroSized ;
117+ }
118+
119+ // All scalar types should be stored as `ConstValue::Scalar`. This is needed to make
120+ // `ConstValue::try_to_scalar` efficient; we want that to work for *all* constants of scalar
121+ // type (it's used throughout the compiler and having it work just on literals is not enough)
122+ // and we want it to be fast (i.e., don't go to an `Allocation` and reconstruct the `Scalar`
123+ // from its byte-serialized form).
124+ let force_as_immediate = match op. layout . abi {
123125 Abi :: Scalar ( abi:: Scalar :: Initialized { .. } ) => true ,
124- Abi :: ScalarPair ( ..) => match op. layout . ty . kind ( ) {
125- ty:: Ref ( _, inner, _) => match * inner. kind ( ) {
126- ty:: Slice ( elem) => elem == ecx. tcx . types . u8 ,
127- ty:: Str => true ,
128- _ => false ,
129- } ,
130- _ => false ,
131- } ,
126+ // We don't *force* `ConstValue::Slice` for `ScalarPair`. This has the advantage that if the
127+ // input `op` is a place, then turning it into a `ConstValue` and back into a `OpTy` will
128+ // not have to generate any duplicate allocations (we preserve the original `AllocId` in
129+ // `ConstValue::Indirect`). It means accessing the contents of a slice can be slow (since
130+ // they can be stored as `ConstValue::Indirect`), but that's not relevant since we barely
131+ // ever have to do this. (`try_get_slice_bytes_for_diagnostics` exists to provide this
132+ // functionality.)
132133 _ => false ,
133134 } ;
134- let immediate = if try_as_immediate {
135+ let immediate = if force_as_immediate {
135136 Right ( ecx. read_immediate ( op) . expect ( "normalization works on validated constants" ) )
136137 } else {
137- // It is guaranteed that any non-slice scalar pair is actually ByRef here.
138- // When we come back from raw const eval, we are always by-ref. The only way our op here is
139- // by-val is if we are in destructure_mir_constant, i.e., if this is (a field of) something that we
140- // "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
141- // structs containing such.
142138 op. as_mplace_or_imm ( )
143139 } ;
144140
145141 debug ! ( ?immediate) ;
146142
147- // We know `offset` is relative to the allocation, so we can use `into_parts`.
148- let to_const_value = |mplace : & MPlaceTy < ' _ > | {
149- debug ! ( "to_const_value(mplace: {:?})" , mplace) ;
150- match mplace. ptr ( ) . into_parts ( ) {
151- ( Some ( alloc_id) , offset) => {
152- let alloc = ecx. tcx . global_alloc ( alloc_id) . unwrap_memory ( ) ;
153- ConstValue :: ByRef { alloc, offset }
154- }
155- ( None , offset) => {
156- assert ! ( mplace. layout. is_zst( ) ) ;
157- assert_eq ! (
158- offset. bytes( ) % mplace. layout. align. abi. bytes( ) ,
159- 0 ,
160- "this MPlaceTy must come from a validated constant, thus we can assume the \
161- alignment is correct",
162- ) ;
163- ConstValue :: ZeroSized
164- }
165- }
166- } ;
167143 match immediate {
168- Left ( ref mplace) => to_const_value ( mplace) ,
169- // see comment on `let try_as_immediate` above
144+ Left ( ref mplace) => {
145+ // We know `offset` is relative to the allocation, so we can use `into_parts`.
146+ let ( alloc_id, offset) = mplace. ptr ( ) . into_parts ( ) ;
147+ let alloc_id = alloc_id. expect ( "cannot have `fake` place fot non-ZST type" ) ;
148+ ConstValue :: Indirect { alloc_id, offset }
149+ }
150+ // see comment on `let force_as_immediate` above
170151 Right ( imm) => match * imm {
171- _ if imm. layout . is_zst ( ) => ConstValue :: ZeroSized ,
172152 Immediate :: Scalar ( x) => ConstValue :: Scalar ( x) ,
173153 Immediate :: ScalarPair ( a, b) => {
174154 debug ! ( "ScalarPair(a: {:?}, b: {:?})" , a, b) ;
155+ // FIXME: assert that this has an appropriate type.
156+ // Currently we actually get here for non-[u8] slices during valtree construction!
157+ let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to actually allocated memory" ;
175158 // We know `offset` is relative to the allocation, so we can use `into_parts`.
176- let ( data, start) = match a. to_pointer ( ecx) . unwrap ( ) . into_parts ( ) {
177- ( Some ( alloc_id) , offset) => {
178- ( ecx. tcx . global_alloc ( alloc_id) . unwrap_memory ( ) , offset. bytes ( ) )
179- }
180- ( None , _offset) => (
181- ecx. tcx . mk_const_alloc ( Allocation :: from_bytes_byte_aligned_immutable (
182- b"" as & [ u8 ] ,
183- ) ) ,
184- 0 ,
185- ) ,
186- } ;
187- let len = b. to_target_usize ( ecx) . unwrap ( ) ;
188- let start = start. try_into ( ) . unwrap ( ) ;
159+ // We use `ConstValue::Slice` so that we don't have to generate an allocation for
160+ // `ConstValue::Indirect` here.
161+ let ( alloc_id, offset) = a. to_pointer ( ecx) . expect ( msg) . into_parts ( ) ;
162+ let alloc_id = alloc_id. expect ( msg) ;
163+ let data = ecx. tcx . global_alloc ( alloc_id) . unwrap_memory ( ) ;
164+ let start = offset. bytes_usize ( ) ;
165+ let len = b. to_target_usize ( ecx) . expect ( msg) ;
189166 let len: usize = len. try_into ( ) . unwrap ( ) ;
190167 ConstValue :: Slice { data, start, end : start + len }
191168 }
192- Immediate :: Uninit => to_const_value ( & op. assert_mem_place ( ) ) ,
169+ Immediate :: Uninit => bug ! ( "`Uninit` is not a valid value for {}" , op. layout . ty ) ,
193170 } ,
194171 }
195172}
0 commit comments