[PATCH 3 of 6] largefiles: use constant file chunk size instead of repeating 128 * 1024

Mads Kiilerich mads at kiilerich.com
Fri Oct 7 19:26:07 EDT 2016


# HG changeset patch
# User Mads Kiilerich <madski at unity3d.com>
# Date 1475881181 -7200
#      Sat Oct 08 00:59:41 2016 +0200
# Node ID 5d63b84517ab6ecff8695aff0748bef9695c241c
# Parent  87cea1040403001e660dd1c6b2e2d069d8a51d2e
largefiles: use constant file chunk size instead of repeating 128 * 1024

diff --git a/hgext/largefiles/lfutil.py b/hgext/largefiles/lfutil.py
--- a/hgext/largefiles/lfutil.py
+++ b/hgext/largefiles/lfutil.py
@@ -30,6 +30,7 @@ from mercurial import (
 shortname = '.hglf'
 shortnameslash = shortname + '/'
 longname = 'largefiles'
+filechunkitersize = 128 * 1024
 
 # -- Private worker functions ------------------------------------------
 
@@ -371,7 +372,7 @@ def hashfile(file):
         return ''
     hasher = hashlib.sha1('')
     with open(file, 'rb') as fd:
-        for data in util.filechunkiter(fd, 128 * 1024):
+        for data in util.filechunkiter(fd, filechunkitersize):
             hasher.update(data)
     return hasher.hexdigest()
 
diff --git a/hgext/largefiles/overrides.py b/hgext/largefiles/overrides.py
--- a/hgext/largefiles/overrides.py
+++ b/hgext/largefiles/overrides.py
@@ -1356,7 +1356,8 @@ def overridecat(orig, ui, repo, file1, *
                               'downloaded')  % lf)
                 path = lfutil.usercachepath(repo.ui, hash)
                 with open(path, "rb") as fpin:
-                    for chunk in util.filechunkiter(fpin, 128 * 1024):
+                    for chunk in util.filechunkiter(fpin,
+                                                    lfutil.filechunkitersize):
                         fp.write(chunk)
         err = 0
     return err
diff --git a/hgext/largefiles/proto.py b/hgext/largefiles/proto.py
--- a/hgext/largefiles/proto.py
+++ b/hgext/largefiles/proto.py
@@ -134,7 +134,8 @@ def wirereposetup(ui, repo):
                                                 length))
 
             # SSH streams will block if reading more than length
-            for chunk in util.filechunkiter(stream, 128 * 1024, length):
+            for chunk in util.filechunkiter(stream, lfutil.filechunkitersize,
+                                            length):
                 yield chunk
             # HTTP streams must hit the end to process the last empty
             # chunk of Chunked-Encoding so the connection can be reused.


More information about the Mercurial-devel mailing list