[PATCH] py3: use namedtuple._replace to produce new tokens

Martijn Pieters mj at zopatista.com
Thu Oct 13 08:29:26 UTC 2016


# HG changeset patch
# User Martijn Pieters <mjpieters at fb.com>
# Date 1476347257 -3600
#      Thu Oct 13 09:27:37 2016 +0100
# Node ID 22a15130f57927a79227ebfe763be082e2992f04
# Parent  81d23b9e2b329666db6e342f6bafec54a893687c
py3: use namedtuple._replace to produce new tokens

diff --git a/mercurial/__init__.py b/mercurial/__init__.py
--- a/mercurial/__init__.py
+++ b/mercurial/__init__.py
@@ -236,9 +236,7 @@
             """
             st = tokens[j]
             if st.type == token.STRING and st.string.startswith(("'", '"')):
-                rt = tokenize.TokenInfo(st.type, 'u%s' % st.string,
-                                        st.start, st.end, st.line)
-                tokens[j] = rt
+                tokens[j] = st._replace(string='u%s' % st.string)
 
         for i, t in enumerate(tokens):
             # Convert most string literals to byte literals. String literals
@@ -269,8 +267,7 @@
                     continue
 
                 # String literal. Prefix to make a b'' string.
-                yield tokenize.TokenInfo(t.type, 'b%s' % s, t.start, t.end,
-                                          t.line)
+                yield t._replace(string='b%s' % t.string)
                 continue
 
             # Insert compatibility imports at "from __future__ import" line.
@@ -290,10 +287,8 @@
                 for u in tokenize.tokenize(io.BytesIO(l).readline):
                     if u.type in (tokenize.ENCODING, token.ENDMARKER):
                         continue
-                    yield tokenize.TokenInfo(u.type, u.string,
-                                             (r, c + u.start[1]),
-                                             (r, c + u.end[1]),
-                                             '')
+                    yield u._replace(
+                        start=(r, c + u.start[1]), end=(r, c + u.end[1]))
                 continue
 
             # This looks like a function call.
@@ -325,8 +320,7 @@
                 # It changes iteritems to items as iteritems is not
                 # present in Python 3 world.
                 elif fn == 'iteritems':
-                    yield tokenize.TokenInfo(t.type, 'items',
-                                             t.start, t.end, t.line)
+                    yield t._replace(string='items')
                     continue
 
             # Emit unmodified token.


More information about the Mercurial-devel mailing list