[PATCH] largefiles: add --no-largefiles-caching option to pull

natosha at unity3d.com natosha at unity3d.com
Sat Feb 9 10:39:28 CST 2013


# HG changeset patch
# User Na'Tosha Bard <natosha at unity3d.com>
# Date 1360425447 0
# Node ID 8c4097213e8ce9e07eebdc2f4ceca9687f463048
# Parent  7f26c8bcbd74c0248acea8247f7b12b4aafe5a53
largefiles: add --no-largefiles-caching option to pull

The caching of largefiles for new heads that are pulled makes sense
in a lot of cases and is a reasonable default (if you pull a new head,
you may have pulled it from a non-default location that Mercurial
won't know about later and you may want to merge with that head later).
However, if you tend not to pull from different repositories and have
a lot of branches being added on a regular basis in your default repository,
the default caching behavior can be an unecessary overhead (if you know
you are pulling from the default remote location, you can easily get
the largefiles later when you need them to merge, and you may not want
to cache largefiles for a dozen new heads right now).

This changeset implements a --no-largefiles-caching flag to pull,
which will override and disable the default caching behavior.

diff -r 7f26c8bcbd74 -r 8c4097213e8c hgext/largefiles/overrides.py
--- a/hgext/largefiles/overrides.py	Sat Feb 09 15:25:46 2013 +0000
+++ b/hgext/largefiles/overrides.py	Sat Feb 09 15:57:27 2013 +0000
@@ -731,19 +731,21 @@
         repo.lfpullsource = source
         oldheads = lfutil.getcurrentheads(repo)
         result = orig(ui, repo, source, **opts)
-        # If we do not have the new largefiles for any new heads we pulled, we
-        # will run into a problem later if we try to merge or rebase with one of
-        # these heads, so cache the largefiles now directly into the system
-        # cache.
-        numcached = 0
-        heads = lfutil.getcurrentheads(repo)
-        newheads = set(heads).difference(set(oldheads))
-        if len(newheads) > 0:
-            ui.status(_("caching largefiles for %s heads\n") % len(newheads))
-        for head in newheads:
-            (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
-            numcached += len(cached)
-        ui.status(_("%d largefiles cached\n") % numcached)
+        if not opts.get('no_largefiles_caching'):
+            # If we do not have the new largefiles for any new heads we
+            # pulled, we will run into a problem later if we try to merge
+            # or rebase with one of these heads, so cache the largefiles
+            # now directly into the system cache.
+            numcached = 0
+            heads = lfutil.getcurrentheads(repo)
+            newheads = set(heads).difference(set(oldheads))
+            if len(newheads) > 0:
+                ui.status(_("caching largefiles for %s heads\n") %
+                          len(newheads))
+            for head in newheads:
+                (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
+                numcached += len(cached)
+            ui.status(_("%d largefiles cached\n") % numcached)
     if opts.get('all_largefiles'):
         revspostpull = len(repo)
         revs = []
diff -r 7f26c8bcbd74 -r 8c4097213e8c hgext/largefiles/uisetup.py
--- a/hgext/largefiles/uisetup.py	Sat Feb 09 15:25:46 2013 +0000
+++ b/hgext/largefiles/uisetup.py	Sat Feb 09 15:57:27 2013 +0000
@@ -79,7 +79,9 @@
     entry = extensions.wrapcommand(commands.table, 'pull',
                                    overrides.overridepull)
     pullopt = [('', 'all-largefiles', None,
-                 _('download all pulled versions of largefiles'))]
+                 _('download all pulled versions of largefiles')),
+               ('', 'no-largefiles-caching', None,
+                 _('do not cache largefiles for new heads'))]
     entry[1].extend(pullopt)
     entry = extensions.wrapcommand(commands.table, 'clone',
                                    overrides.overrideclone)
diff -r 7f26c8bcbd74 -r 8c4097213e8c tests/test-largefiles.t
--- a/tests/test-largefiles.t	Sat Feb 09 15:25:46 2013 +0000
+++ b/tests/test-largefiles.t	Sat Feb 09 15:57:27 2013 +0000
@@ -859,6 +859,30 @@
   abort: --all-largefiles is incompatible with non-local destination ssh://localhost/a
   [255]
 
+Test pulling with the --no-largefiles-caching flag
+
+  $ rm -Rf a-backup
+  $ hg clone -r 1 a a-backup
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 8 changes to 4 files
+  updating to branch default
+  getting changed largefiles
+  2 largefiles updated, 0 removed
+  4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ rm "${USERCACHE}"/*
+  $ cd a-backup
+  $ hg pull --no-largefiles-caching
+  pulling from $TESTTMP/a
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 6 changesets with 16 changes to 8 files
+  (run 'hg update' to get a working copy)
+  $ cd ..
+
 Test pulling with --all-largefiles flag.  Also test that the largefiles are
 downloaded from 'default' instead of 'default-push' when no source is specified
 (issue3584)


More information about the Mercurial-devel mailing list