@@ -30,6 +30,17 @@ use crate::{
30
30
StructKind , TagEncoding , Variants , WrappingRange ,
31
31
} ;
32
32
33
+ /// This option controls how coroutine saved locals are packed
34
+ /// into the coroutine state data
35
+ #[ derive( Debug , Clone , Copy ) ]
36
+ pub enum PackCoroutineLayout {
37
+ /// The classic layout where captures are always promoted to coroutine state prefix
38
+ Classic ,
39
+ /// Captures are first saved into the `UNRESUME` state and promoted
40
+ /// when they are used across more than one suspension
41
+ CapturesOnly ,
42
+ }
43
+
33
44
/// Overlap eligibility and variant assignment for each CoroutineSavedLocal.
34
45
#[ derive( Clone , Debug , PartialEq ) ]
35
46
enum SavedLocalEligibility < VariantIdx , FieldIdx > {
@@ -145,20 +156,23 @@ pub(super) fn layout<
145
156
> (
146
157
calc : & super :: LayoutCalculator < impl HasDataLayout > ,
147
158
local_layouts : & IndexSlice < LocalIdx , F > ,
148
- mut prefix_layouts : IndexVec < FieldIdx , F > ,
159
+ relocated_upvars : & IndexSlice < LocalIdx , Option < LocalIdx > > ,
160
+ upvar_layouts : IndexVec < FieldIdx , F > ,
149
161
variant_fields : & IndexSlice < VariantIdx , IndexVec < FieldIdx , LocalIdx > > ,
150
162
storage_conflicts : & BitMatrix < LocalIdx , LocalIdx > ,
163
+ pack : PackCoroutineLayout ,
151
164
tag_to_layout : impl Fn ( Scalar ) -> F ,
152
165
) -> super :: LayoutCalculatorResult < FieldIdx , VariantIdx , F > {
153
166
use SavedLocalEligibility :: * ;
154
167
155
168
let ( ineligible_locals, assignments) =
156
169
coroutine_saved_local_eligibility ( local_layouts. len ( ) , variant_fields, storage_conflicts) ;
157
170
158
- // Build a prefix layout, including "promoting" all ineligible
159
- // locals as part of the prefix. We compute the layout of all of
160
- // these fields at once to get optimal packing.
161
- let tag_index = prefix_layouts. next_index ( ) ;
171
+ // Build a prefix layout, consisting of only the state tag and, as per request, upvars
172
+ let tag_index = match pack {
173
+ PackCoroutineLayout :: CapturesOnly => FieldIdx :: new ( 0 ) ,
174
+ PackCoroutineLayout :: Classic => upvar_layouts. next_index ( ) ,
175
+ } ;
162
176
163
177
// `variant_fields` already accounts for the reserved variants, so no need to add them.
164
178
let max_discr = ( variant_fields. len ( ) - 1 ) as u128 ;
@@ -169,17 +183,28 @@ pub(super) fn layout<
169
183
} ;
170
184
171
185
let promoted_layouts = ineligible_locals. iter ( ) . map ( |local| local_layouts[ local] ) ;
172
- prefix_layouts. push ( tag_to_layout ( tag) ) ;
173
- prefix_layouts. extend ( promoted_layouts) ;
186
+ // FIXME: when we introduce more pack scheme, we need to change the prefix layout here
187
+ let prefix_layouts: IndexVec < _ , _ > = match pack {
188
+ PackCoroutineLayout :: Classic => {
189
+ // Classic scheme packs the states as follows
190
+ // [ <upvars>.. , <state tag>, <promoted ineligibles>] ++ <variant data>
191
+ // In addition, UNRESUME overlaps with the <upvars> part
192
+ upvar_layouts. into_iter ( ) . chain ( [ tag_to_layout ( tag) ] ) . chain ( promoted_layouts) . collect ( )
193
+ }
194
+ PackCoroutineLayout :: CapturesOnly => {
195
+ [ tag_to_layout ( tag) ] . into_iter ( ) . chain ( promoted_layouts) . collect ( )
196
+ }
197
+ } ;
198
+ debug ! ( ?prefix_layouts, ?pack) ;
174
199
let prefix =
175
200
calc. univariant ( & prefix_layouts, & ReprOptions :: default ( ) , StructKind :: AlwaysSized ) ?;
176
201
177
202
let ( prefix_size, prefix_align) = ( prefix. size , prefix. align ) ;
178
203
179
- // Split the prefix layout into the "outer" fields (upvars and
180
- // discriminant) and the "promoted" fields. Promoted fields will
181
- // get included in each variant that requested them in
182
- // CoroutineLayout.
204
+ // Split the prefix layout into the discriminant and
205
+ // the "promoted" fields.
206
+ // Promoted fields will get included in each variant
207
+ // that requested them in CoroutineLayout.
183
208
debug ! ( "prefix = {:#?}" , prefix) ;
184
209
let ( outer_fields, promoted_offsets, promoted_memory_index) = match prefix. fields {
185
210
FieldsShape :: Arbitrary { mut offsets, memory_index } => {
@@ -218,19 +243,67 @@ pub(super) fn layout<
218
243
let variants = variant_fields
219
244
. iter_enumerated ( )
220
245
. map ( |( index, variant_fields) | {
246
+ let is_unresumed = index == VariantIdx :: new ( 0 ) ;
247
+ if is_unresumed && matches ! ( pack, PackCoroutineLayout :: Classic ) {
248
+ let fields = FieldsShape :: Arbitrary {
249
+ offsets : ( 0 ..tag_index. index ( ) ) . map ( |i| outer_fields. offset ( i) ) . collect ( ) ,
250
+ memory_index : ( 0 ..tag_index. index ( ) )
251
+ . map ( |i| {
252
+ ( outer_fields. memory_index ( i) + promoted_memory_index. len ( ) ) as u32
253
+ } )
254
+ . collect ( ) ,
255
+ } ;
256
+ let align = prefix. align ;
257
+ let size = prefix. size ;
258
+ return Ok ( LayoutData {
259
+ fields,
260
+ variants : Variants :: Single { index } ,
261
+ backend_repr : BackendRepr :: Memory { sized : true } ,
262
+ largest_niche : None ,
263
+ uninhabited : false ,
264
+ align,
265
+ size,
266
+ max_repr_align : None ,
267
+ unadjusted_abi_align : align. abi ,
268
+ randomization_seed : Default :: default ( ) ,
269
+ } ) ;
270
+ }
271
+ let mut is_ineligible = IndexVec :: from_elem_n ( None , variant_fields. len ( ) ) ;
272
+ for ( field, & local) in variant_fields. iter_enumerated ( ) {
273
+ if is_unresumed {
274
+ if let Some ( inner_local) = relocated_upvars[ local]
275
+ && let Ineligible ( Some ( promoted_field) ) = assignments[ inner_local]
276
+ {
277
+ is_ineligible. insert ( field, promoted_field) ;
278
+ continue ;
279
+ }
280
+ }
281
+ match assignments[ local] {
282
+ Assigned ( v) if v == index => { }
283
+ Ineligible ( Some ( promoted_field) ) => {
284
+ is_ineligible. insert ( field, promoted_field) ;
285
+ }
286
+ Ineligible ( None ) => {
287
+ panic ! ( "an ineligible local should have been promoted into the prefix" )
288
+ }
289
+ Assigned ( _) => {
290
+ panic ! ( "an eligible local should have been assigned to exactly one variant" )
291
+ }
292
+ Unassigned => {
293
+ panic ! ( "each saved local should have been inspected at least once" )
294
+ }
295
+ }
296
+ }
221
297
// Only include overlap-eligible fields when we compute our variant layout.
222
- let variant_only_tys = variant_fields
223
- . iter ( )
224
- . filter ( |local| match assignments[ * * local] {
225
- Unassigned => unreachable ! ( ) ,
226
- Assigned ( v) if v == index => true ,
227
- Assigned ( _) => unreachable ! ( "assignment does not match variant" ) ,
228
- Ineligible ( _) => false ,
298
+ let fields: IndexVec < _ , _ > = variant_fields
299
+ . iter_enumerated ( )
300
+ . filter_map ( |( field, & local) | {
301
+ if is_ineligible. contains ( field) { None } else { Some ( local_layouts[ local] ) }
229
302
} )
230
- . map ( |local| local_layouts [ * local ] ) ;
303
+ . collect ( ) ;
231
304
232
305
let mut variant = calc. univariant (
233
- & variant_only_tys . collect :: < IndexVec < _ , _ > > ( ) ,
306
+ & fields ,
234
307
& ReprOptions :: default ( ) ,
235
308
StructKind :: Prefixed ( prefix_size, prefix_align. abi ) ,
236
309
) ?;
@@ -254,19 +327,14 @@ pub(super) fn layout<
254
327
IndexVec :: from_elem_n ( FieldIdx :: new ( invalid_field_idx) , invalid_field_idx) ;
255
328
256
329
let mut offsets_and_memory_index = iter:: zip ( offsets, memory_index) ;
257
- let combined_offsets = variant_fields
330
+ let combined_offsets = is_ineligible
258
331
. iter_enumerated ( )
259
- . map ( |( i, local) | {
260
- let ( offset, memory_index) = match assignments[ * local] {
261
- Unassigned => unreachable ! ( ) ,
262
- Assigned ( _) => {
263
- let ( offset, memory_index) = offsets_and_memory_index. next ( ) . unwrap ( ) ;
264
- ( offset, promoted_memory_index. len ( ) as u32 + memory_index)
265
- }
266
- Ineligible ( field_idx) => {
267
- let field_idx = field_idx. unwrap ( ) ;
268
- ( promoted_offsets[ field_idx] , promoted_memory_index[ field_idx] )
269
- }
332
+ . map ( |( i, & is_ineligible) | {
333
+ let ( offset, memory_index) = if let Some ( field_idx) = is_ineligible {
334
+ ( promoted_offsets[ field_idx] , promoted_memory_index[ field_idx] )
335
+ } else {
336
+ let ( offset, memory_index) = offsets_and_memory_index. next ( ) . unwrap ( ) ;
337
+ ( offset, promoted_memory_index. len ( ) as u32 + memory_index)
270
338
} ;
271
339
combined_inverse_memory_index[ memory_index] = i;
272
340
offset
0 commit comments