@@ -264,9 +264,10 @@ fn resolve_with_registry<'gctx>(
264264/// Resolves all dependencies for a package using an optional previous instance
265265/// of resolve to guide the resolution process.
266266///
267- /// This also takes an optional hash set, `to_avoid`, which is a list of package
268- /// IDs that should be avoided when consulting the previous instance of resolve
269- /// (often used in pairings with updates).
267+ /// This also takes an optional filter `keep_previous`, which informs the `registry`
268+ /// which package ID should be locked to the previous instance of resolve
269+ /// (often used in pairings with updates). See comments in [`register_previous_locks`]
270+ /// for scenarios that might override this.
270271///
271272/// The previous resolve normally comes from a lock file. This function does not
272273/// read or write lock files from the filesystem.
@@ -283,7 +284,7 @@ pub fn resolve_with_previous<'gctx>(
283284 cli_features : & CliFeatures ,
284285 has_dev_units : HasDevUnits ,
285286 previous : Option < & Resolve > ,
286- to_avoid : Option < & HashSet < PackageId > > ,
287+ keep_previous : Option < & dyn Fn ( & PackageId ) -> bool > ,
287288 specs : & [ PackageIdSpec ] ,
288289 register_patches : bool ,
289290) -> CargoResult < Resolve > {
@@ -293,29 +294,8 @@ pub fn resolve_with_previous<'gctx>(
293294 . gctx ( )
294295 . acquire_package_cache_lock ( CacheLockMode :: DownloadExclusive ) ?;
295296
296- // Here we place an artificial limitation that all non-registry sources
297- // cannot be locked at more than one revision. This means that if a Git
298- // repository provides more than one package, they must all be updated in
299- // step when any of them are updated.
300- //
301- // TODO: this seems like a hokey reason to single out the registry as being
302- // different.
303- let to_avoid_sources: HashSet < SourceId > = to_avoid
304- . map ( |set| {
305- set. iter ( )
306- . map ( |p| p. source_id ( ) )
307- . filter ( |s| !s. is_registry ( ) )
308- . collect ( )
309- } )
310- . unwrap_or_default ( ) ;
311-
312- let pre_patch_keep = |p : & PackageId | {
313- !to_avoid_sources. contains ( & p. source_id ( ) )
314- && match to_avoid {
315- Some ( set) => !set. contains ( p) ,
316- None => true ,
317- }
318- } ;
297+ // Try to keep all from previous resolve if no instruction given.
298+ let keep_previous = keep_previous. unwrap_or ( & |_| true ) ;
319299
320300 // While registering patches, we will record preferences for particular versions
321301 // of various packages.
@@ -327,117 +307,14 @@ pub fn resolve_with_previous<'gctx>(
327307 version_prefs. max_rust_version ( ws. rust_version ( ) . cloned ( ) . map ( RustVersion :: into_partial) ) ;
328308 }
329309
330- // This is a set of PackageIds of `[patch]` entries, and some related locked PackageIds, for
331- // which locking should be avoided (but which will be preferred when searching dependencies,
332- // via prefer_patch_deps below)
333- let mut avoid_patch_ids = HashSet :: new ( ) ;
334-
335- if register_patches {
336- for ( url, patches) in ws. root_patch ( ) ?. iter ( ) {
337- for patch in patches {
338- version_prefs. prefer_dependency ( patch. clone ( ) ) ;
339- }
340- let Some ( previous) = previous else {
341- let patches: Vec < _ > = patches. iter ( ) . map ( |p| ( p, None ) ) . collect ( ) ;
342- let unlock_ids = registry. patch ( url, & patches) ?;
343- // Since nothing is locked, this shouldn't possibly return anything.
344- assert ! ( unlock_ids. is_empty( ) ) ;
345- continue ;
346- } ;
347-
348- // This is a list of pairs where the first element of the pair is
349- // the raw `Dependency` which matches what's listed in `Cargo.toml`.
350- // The second element is, if present, the "locked" version of
351- // the `Dependency` as well as the `PackageId` that it previously
352- // resolved to. This second element is calculated by looking at the
353- // previous resolve graph, which is primarily what's done here to
354- // build the `registrations` list.
355- let mut registrations = Vec :: new ( ) ;
356- for dep in patches {
357- let candidates = || {
358- previous
359- . iter ( )
360- . chain ( previous. unused_patches ( ) . iter ( ) . cloned ( ) )
361- . filter ( & pre_patch_keep)
362- } ;
363-
364- let lock = match candidates ( ) . find ( |id| dep. matches_id ( * id) ) {
365- // If we found an exactly matching candidate in our list of
366- // candidates, then that's the one to use.
367- Some ( package_id) => {
368- let mut locked_dep = dep. clone ( ) ;
369- locked_dep. lock_to ( package_id) ;
370- Some ( LockedPatchDependency {
371- dependency : locked_dep,
372- package_id,
373- alt_package_id : None ,
374- } )
375- }
376- None => {
377- // If the candidate does not have a matching source id
378- // then we may still have a lock candidate. If we're
379- // loading a v2-encoded resolve graph and `dep` is a
380- // git dep with `branch = 'master'`, then this should
381- // also match candidates without `branch = 'master'`
382- // (which is now treated separately in Cargo).
383- //
384- // In this scenario we try to convert candidates located
385- // in the resolve graph to explicitly having the
386- // `master` branch (if they otherwise point to
387- // `DefaultBranch`). If this works and our `dep`
388- // matches that then this is something we'll lock to.
389- match candidates ( ) . find ( |& id| {
390- match master_branch_git_source ( id, previous) {
391- Some ( id) => dep. matches_id ( id) ,
392- None => false ,
393- }
394- } ) {
395- Some ( id_using_default) => {
396- let id_using_master = id_using_default. with_source_id (
397- dep. source_id ( )
398- . with_precise_from ( id_using_default. source_id ( ) ) ,
399- ) ;
400-
401- let mut locked_dep = dep. clone ( ) ;
402- locked_dep. lock_to ( id_using_master) ;
403- Some ( LockedPatchDependency {
404- dependency : locked_dep,
405- package_id : id_using_master,
406- // Note that this is where the magic
407- // happens, where the resolve graph
408- // probably has locks pointing to
409- // DefaultBranch sources, and by including
410- // this here those will get transparently
411- // rewritten to Branch("master") which we
412- // have a lock entry for.
413- alt_package_id : Some ( id_using_default) ,
414- } )
415- }
416-
417- // No locked candidate was found
418- None => None ,
419- }
420- }
421- } ;
422-
423- registrations. push ( ( dep, lock) ) ;
424- }
425-
426- let canonical = CanonicalUrl :: new ( url) ?;
427- for ( orig_patch, unlock_id) in registry. patch ( url, & registrations) ? {
428- // Avoid the locked patch ID.
429- avoid_patch_ids. insert ( unlock_id) ;
430- // Also avoid the thing it is patching.
431- avoid_patch_ids. extend ( previous. iter ( ) . filter ( |id| {
432- orig_patch. matches_ignoring_source ( * id)
433- && * id. source_id ( ) . canonical_url ( ) == canonical
434- } ) ) ;
435- }
436- }
437- }
438- debug ! ( "avoid_patch_ids={:?}" , avoid_patch_ids) ;
310+ let avoid_patch_ids = if register_patches {
311+ register_patch_entries ( registry, ws, previous, & mut version_prefs, keep_previous) ?
312+ } else {
313+ HashSet :: new ( )
314+ } ;
439315
440- let keep = |p : & PackageId | pre_patch_keep ( p) && !avoid_patch_ids. contains ( p) ;
316+ // Refine `keep` with patches that should avoid locking.
317+ let keep = |p : & PackageId | keep_previous ( p) && !avoid_patch_ids. contains ( p) ;
441318
442319 let dev_deps = ws. require_optional_deps ( ) || has_dev_units == HasDevUnits :: Yes ;
443320
@@ -880,3 +757,121 @@ fn emit_warnings_of_unused_patches(
880757
881758 return Ok ( ( ) ) ;
882759}
760+
761+ /// Informs `registry` and `version_pref` that `[patch]` entries are available
762+ /// and preferable for the dependency resolution.
763+ ///
764+ /// This returns a set of PackageIds of `[patch]` entries, and some related
765+ /// locked PackageIds, for which locking should be avoided (but which will be
766+ /// preferred when searching dependencies, via [`VersionPreferences::prefer_patch_deps`]).
767+ #[ tracing:: instrument( level = "debug" , skip_all, ret) ]
768+ fn register_patch_entries (
769+ registry : & mut PackageRegistry < ' _ > ,
770+ ws : & Workspace < ' _ > ,
771+ previous : Option < & Resolve > ,
772+ version_prefs : & mut VersionPreferences ,
773+ keep_previous : & dyn Fn ( & PackageId ) -> bool ,
774+ ) -> CargoResult < HashSet < PackageId > > {
775+ let mut avoid_patch_ids = HashSet :: new ( ) ;
776+ for ( url, patches) in ws. root_patch ( ) ?. iter ( ) {
777+ for patch in patches {
778+ version_prefs. prefer_dependency ( patch. clone ( ) ) ;
779+ }
780+ let Some ( previous) = previous else {
781+ let patches: Vec < _ > = patches. iter ( ) . map ( |p| ( p, None ) ) . collect ( ) ;
782+ let unlock_ids = registry. patch ( url, & patches) ?;
783+ // Since nothing is locked, this shouldn't possibly return anything.
784+ assert ! ( unlock_ids. is_empty( ) ) ;
785+ continue ;
786+ } ;
787+
788+ // This is a list of pairs where the first element of the pair is
789+ // the raw `Dependency` which matches what's listed in `Cargo.toml`.
790+ // The second element is, if present, the "locked" version of
791+ // the `Dependency` as well as the `PackageId` that it previously
792+ // resolved to. This second element is calculated by looking at the
793+ // previous resolve graph, which is primarily what's done here to
794+ // build the `registrations` list.
795+ let mut registrations = Vec :: new ( ) ;
796+ for dep in patches {
797+ let candidates = || {
798+ previous
799+ . iter ( )
800+ . chain ( previous. unused_patches ( ) . iter ( ) . cloned ( ) )
801+ . filter ( & keep_previous)
802+ } ;
803+
804+ let lock = match candidates ( ) . find ( |id| dep. matches_id ( * id) ) {
805+ // If we found an exactly matching candidate in our list of
806+ // candidates, then that's the one to use.
807+ Some ( package_id) => {
808+ let mut locked_dep = dep. clone ( ) ;
809+ locked_dep. lock_to ( package_id) ;
810+ Some ( LockedPatchDependency {
811+ dependency : locked_dep,
812+ package_id,
813+ alt_package_id : None ,
814+ } )
815+ }
816+ None => {
817+ // If the candidate does not have a matching source id
818+ // then we may still have a lock candidate. If we're
819+ // loading a v2-encoded resolve graph and `dep` is a
820+ // git dep with `branch = 'master'`, then this should
821+ // also match candidates without `branch = 'master'`
822+ // (which is now treated separately in Cargo).
823+ //
824+ // In this scenario we try to convert candidates located
825+ // in the resolve graph to explicitly having the
826+ // `master` branch (if they otherwise point to
827+ // `DefaultBranch`). If this works and our `dep`
828+ // matches that then this is something we'll lock to.
829+ match candidates ( ) . find ( |& id| match master_branch_git_source ( id, previous) {
830+ Some ( id) => dep. matches_id ( id) ,
831+ None => false ,
832+ } ) {
833+ Some ( id_using_default) => {
834+ let id_using_master = id_using_default. with_source_id (
835+ dep. source_id ( )
836+ . with_precise_from ( id_using_default. source_id ( ) ) ,
837+ ) ;
838+
839+ let mut locked_dep = dep. clone ( ) ;
840+ locked_dep. lock_to ( id_using_master) ;
841+ Some ( LockedPatchDependency {
842+ dependency : locked_dep,
843+ package_id : id_using_master,
844+ // Note that this is where the magic
845+ // happens, where the resolve graph
846+ // probably has locks pointing to
847+ // DefaultBranch sources, and by including
848+ // this here those will get transparently
849+ // rewritten to Branch("master") which we
850+ // have a lock entry for.
851+ alt_package_id : Some ( id_using_default) ,
852+ } )
853+ }
854+
855+ // No locked candidate was found
856+ None => None ,
857+ }
858+ }
859+ } ;
860+
861+ registrations. push ( ( dep, lock) ) ;
862+ }
863+
864+ let canonical = CanonicalUrl :: new ( url) ?;
865+ for ( orig_patch, unlock_id) in registry. patch ( url, & registrations) ? {
866+ // Avoid the locked patch ID.
867+ avoid_patch_ids. insert ( unlock_id) ;
868+ // Also avoid the thing it is patching.
869+ avoid_patch_ids. extend ( previous. iter ( ) . filter ( |id| {
870+ orig_patch. matches_ignoring_source ( * id)
871+ && * id. source_id ( ) . canonical_url ( ) == canonical
872+ } ) ) ;
873+ }
874+ }
875+
876+ Ok ( avoid_patch_ids)
877+ }
0 commit comments