mirror of
https://github.com/jart/cosmopolitan.git
synced 2025-05-23 05:42:29 +00:00
python-3.6.zip added from Github
README.cosmo contains the necessary links.
This commit is contained in:
parent
75fc601ff5
commit
0c4c56ff39
4219 changed files with 1968626 additions and 0 deletions
1
third_party/python/Lib/lib2to3/fixes/__init__.py
vendored
Normal file
1
third_party/python/Lib/lib2to3/fixes/__init__.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
# Dummy file to make this directory a package.
|
70
third_party/python/Lib/lib2to3/fixes/fix_apply.py
vendored
Normal file
70
third_party/python/Lib/lib2to3/fixes/fix_apply.py
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for apply().
|
||||
|
||||
This converts apply(func, v, k) into (func)(*v, **k)."""
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Call, Comma, parenthesize
|
||||
|
||||
class FixApply(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
power< 'apply'
|
||||
trailer<
|
||||
'('
|
||||
arglist<
|
||||
(not argument<NAME '=' any>) func=any ','
|
||||
(not argument<NAME '=' any>) args=any [','
|
||||
(not argument<NAME '=' any>) kwds=any] [',']
|
||||
>
|
||||
')'
|
||||
>
|
||||
>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
syms = self.syms
|
||||
assert results
|
||||
func = results["func"]
|
||||
args = results["args"]
|
||||
kwds = results.get("kwds")
|
||||
# I feel like we should be able to express this logic in the
|
||||
# PATTERN above but I don't know how to do it so...
|
||||
if args:
|
||||
if args.type == self.syms.star_expr:
|
||||
return # Make no change.
|
||||
if (args.type == self.syms.argument and
|
||||
args.children[0].value == '**'):
|
||||
return # Make no change.
|
||||
if kwds and (kwds.type == self.syms.argument and
|
||||
kwds.children[0].value == '**'):
|
||||
return # Make no change.
|
||||
prefix = node.prefix
|
||||
func = func.clone()
|
||||
if (func.type not in (token.NAME, syms.atom) and
|
||||
(func.type != syms.power or
|
||||
func.children[-2].type == token.DOUBLESTAR)):
|
||||
# Need to parenthesize
|
||||
func = parenthesize(func)
|
||||
func.prefix = ""
|
||||
args = args.clone()
|
||||
args.prefix = ""
|
||||
if kwds is not None:
|
||||
kwds = kwds.clone()
|
||||
kwds.prefix = ""
|
||||
l_newargs = [pytree.Leaf(token.STAR, "*"), args]
|
||||
if kwds is not None:
|
||||
l_newargs.extend([Comma(),
|
||||
pytree.Leaf(token.DOUBLESTAR, "**"),
|
||||
kwds])
|
||||
l_newargs[-2].prefix = " " # that's the ** token
|
||||
# XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t)
|
||||
# can be translated into f(x, y, *t) instead of f(*(x, y) + t)
|
||||
#new = pytree.Node(syms.power, (func, ArgList(l_newargs)))
|
||||
return Call(func, l_newargs, prefix=prefix)
|
34
third_party/python/Lib/lib2to3/fixes/fix_asserts.py
vendored
Normal file
34
third_party/python/Lib/lib2to3/fixes/fix_asserts.py
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
"""Fixer that replaces deprecated unittest method names."""
|
||||
|
||||
# Author: Ezio Melotti
|
||||
|
||||
from ..fixer_base import BaseFix
|
||||
from ..fixer_util import Name
|
||||
|
||||
NAMES = dict(
|
||||
assert_="assertTrue",
|
||||
assertEquals="assertEqual",
|
||||
assertNotEquals="assertNotEqual",
|
||||
assertAlmostEquals="assertAlmostEqual",
|
||||
assertNotAlmostEquals="assertNotAlmostEqual",
|
||||
assertRegexpMatches="assertRegex",
|
||||
assertRaisesRegexp="assertRaisesRegex",
|
||||
failUnlessEqual="assertEqual",
|
||||
failIfEqual="assertNotEqual",
|
||||
failUnlessAlmostEqual="assertAlmostEqual",
|
||||
failIfAlmostEqual="assertNotAlmostEqual",
|
||||
failUnless="assertTrue",
|
||||
failUnlessRaises="assertRaises",
|
||||
failIf="assertFalse",
|
||||
)
|
||||
|
||||
|
||||
class FixAsserts(BaseFix):
|
||||
|
||||
PATTERN = """
|
||||
power< any+ trailer< '.' meth=(%s)> any* >
|
||||
""" % '|'.join(map(repr, NAMES))
|
||||
|
||||
def transform(self, node, results):
|
||||
name = results["meth"][0]
|
||||
name.replace(Name(NAMES[str(name)], prefix=name.prefix))
|
14
third_party/python/Lib/lib2to3/fixes/fix_basestring.py
vendored
Normal file
14
third_party/python/Lib/lib2to3/fixes/fix_basestring.py
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
"""Fixer for basestring -> str."""
|
||||
# Author: Christian Heimes
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
class FixBasestring(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = "'basestring'"
|
||||
|
||||
def transform(self, node, results):
|
||||
return Name("str", prefix=node.prefix)
|
22
third_party/python/Lib/lib2to3/fixes/fix_buffer.py
vendored
Normal file
22
third_party/python/Lib/lib2to3/fixes/fix_buffer.py
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that changes buffer(...) into memoryview(...)."""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
|
||||
class FixBuffer(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
explicit = True # The user must ask for this fixer
|
||||
|
||||
PATTERN = """
|
||||
power< name='buffer' trailer< '(' [any] ')' > any* >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
name = results["name"]
|
||||
name.replace(Name("memoryview", prefix=name.prefix))
|
106
third_party/python/Lib/lib2to3/fixes/fix_dict.py
vendored
Normal file
106
third_party/python/Lib/lib2to3/fixes/fix_dict.py
vendored
Normal file
|
@ -0,0 +1,106 @@
|
|||
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for dict methods.
|
||||
|
||||
d.keys() -> list(d.keys())
|
||||
d.items() -> list(d.items())
|
||||
d.values() -> list(d.values())
|
||||
|
||||
d.iterkeys() -> iter(d.keys())
|
||||
d.iteritems() -> iter(d.items())
|
||||
d.itervalues() -> iter(d.values())
|
||||
|
||||
d.viewkeys() -> d.keys()
|
||||
d.viewitems() -> d.items()
|
||||
d.viewvalues() -> d.values()
|
||||
|
||||
Except in certain very specific contexts: the iter() can be dropped
|
||||
when the context is list(), sorted(), iter() or for...in; the list()
|
||||
can be dropped when the context is list() or sorted() (but not iter()
|
||||
or for...in!). Special contexts that apply to both: list(), sorted(), tuple()
|
||||
set(), any(), all(), sum().
|
||||
|
||||
Note: iter(d.keys()) could be written as iter(d) but since the
|
||||
original d.iterkeys() was also redundant we don't fix this. And there
|
||||
are (rare) contexts where it makes a difference (e.g. when passing it
|
||||
as an argument to a function that introspects the argument).
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from .. import patcomp
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, Call, Dot
|
||||
from .. import fixer_util
|
||||
|
||||
|
||||
iter_exempt = fixer_util.consuming_calls | {"iter"}
|
||||
|
||||
|
||||
class FixDict(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
power< head=any+
|
||||
trailer< '.' method=('keys'|'items'|'values'|
|
||||
'iterkeys'|'iteritems'|'itervalues'|
|
||||
'viewkeys'|'viewitems'|'viewvalues') >
|
||||
parens=trailer< '(' ')' >
|
||||
tail=any*
|
||||
>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
head = results["head"]
|
||||
method = results["method"][0] # Extract node for method name
|
||||
tail = results["tail"]
|
||||
syms = self.syms
|
||||
method_name = method.value
|
||||
isiter = method_name.startswith("iter")
|
||||
isview = method_name.startswith("view")
|
||||
if isiter or isview:
|
||||
method_name = method_name[4:]
|
||||
assert method_name in ("keys", "items", "values"), repr(method)
|
||||
head = [n.clone() for n in head]
|
||||
tail = [n.clone() for n in tail]
|
||||
special = not tail and self.in_special_context(node, isiter)
|
||||
args = head + [pytree.Node(syms.trailer,
|
||||
[Dot(),
|
||||
Name(method_name,
|
||||
prefix=method.prefix)]),
|
||||
results["parens"].clone()]
|
||||
new = pytree.Node(syms.power, args)
|
||||
if not (special or isview):
|
||||
new.prefix = ""
|
||||
new = Call(Name("iter" if isiter else "list"), [new])
|
||||
if tail:
|
||||
new = pytree.Node(syms.power, [new] + tail)
|
||||
new.prefix = node.prefix
|
||||
return new
|
||||
|
||||
P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
|
||||
p1 = patcomp.compile_pattern(P1)
|
||||
|
||||
P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
||||
| comp_for< 'for' any 'in' node=any any* >
|
||||
"""
|
||||
p2 = patcomp.compile_pattern(P2)
|
||||
|
||||
def in_special_context(self, node, isiter):
|
||||
if node.parent is None:
|
||||
return False
|
||||
results = {}
|
||||
if (node.parent.parent is not None and
|
||||
self.p1.match(node.parent.parent, results) and
|
||||
results["node"] is node):
|
||||
if isiter:
|
||||
# iter(d.iterkeys()) -> iter(d.keys()), etc.
|
||||
return results["func"].value in iter_exempt
|
||||
else:
|
||||
# list(d.keys()) -> list(d.keys()), etc.
|
||||
return results["func"].value in fixer_util.consuming_calls
|
||||
if not isiter:
|
||||
return False
|
||||
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
|
||||
return self.p2.match(node.parent, results) and results["node"] is node
|
93
third_party/python/Lib/lib2to3/fixes/fix_except.py
vendored
Normal file
93
third_party/python/Lib/lib2to3/fixes/fix_except.py
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
"""Fixer for except statements with named exceptions.
|
||||
|
||||
The following cases will be converted:
|
||||
|
||||
- "except E, T:" where T is a name:
|
||||
|
||||
except E as T:
|
||||
|
||||
- "except E, T:" where T is not a name, tuple or list:
|
||||
|
||||
except E as t:
|
||||
T = t
|
||||
|
||||
This is done because the target of an "except" clause must be a
|
||||
name.
|
||||
|
||||
- "except E, T:" where T is a tuple or list literal:
|
||||
|
||||
except E as t:
|
||||
T = t.args
|
||||
"""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms
|
||||
|
||||
def find_excepts(nodes):
|
||||
for i, n in enumerate(nodes):
|
||||
if n.type == syms.except_clause:
|
||||
if n.children[0].value == 'except':
|
||||
yield (n, nodes[i+2])
|
||||
|
||||
class FixExcept(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
try_stmt< 'try' ':' (simple_stmt | suite)
|
||||
cleanup=(except_clause ':' (simple_stmt | suite))+
|
||||
tail=(['except' ':' (simple_stmt | suite)]
|
||||
['else' ':' (simple_stmt | suite)]
|
||||
['finally' ':' (simple_stmt | suite)]) >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
syms = self.syms
|
||||
|
||||
tail = [n.clone() for n in results["tail"]]
|
||||
|
||||
try_cleanup = [ch.clone() for ch in results["cleanup"]]
|
||||
for except_clause, e_suite in find_excepts(try_cleanup):
|
||||
if len(except_clause.children) == 4:
|
||||
(E, comma, N) = except_clause.children[1:4]
|
||||
comma.replace(Name("as", prefix=" "))
|
||||
|
||||
if N.type != token.NAME:
|
||||
# Generate a new N for the except clause
|
||||
new_N = Name(self.new_name(), prefix=" ")
|
||||
target = N.clone()
|
||||
target.prefix = ""
|
||||
N.replace(new_N)
|
||||
new_N = new_N.clone()
|
||||
|
||||
# Insert "old_N = new_N" as the first statement in
|
||||
# the except body. This loop skips leading whitespace
|
||||
# and indents
|
||||
#TODO(cwinter) suite-cleanup
|
||||
suite_stmts = e_suite.children
|
||||
for i, stmt in enumerate(suite_stmts):
|
||||
if isinstance(stmt, pytree.Node):
|
||||
break
|
||||
|
||||
# The assignment is different if old_N is a tuple or list
|
||||
# In that case, the assignment is old_N = new_N.args
|
||||
if is_tuple(N) or is_list(N):
|
||||
assign = Assign(target, Attr(new_N, Name('args')))
|
||||
else:
|
||||
assign = Assign(target, new_N)
|
||||
|
||||
#TODO(cwinter) stopgap until children becomes a smart list
|
||||
for child in reversed(suite_stmts[:i]):
|
||||
e_suite.insert_child(0, child)
|
||||
e_suite.insert_child(i, assign)
|
||||
elif N.prefix == "":
|
||||
# No space after a comma is legal; no space after "as",
|
||||
# not so much.
|
||||
N.prefix = " "
|
||||
|
||||
#TODO(cwinter) fix this when children becomes a smart list
|
||||
children = [c.clone() for c in node.children[:3]] + try_cleanup + tail
|
||||
return pytree.Node(node.type, children)
|
39
third_party/python/Lib/lib2to3/fixes/fix_exec.py
vendored
Normal file
39
third_party/python/Lib/lib2to3/fixes/fix_exec.py
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for exec.
|
||||
|
||||
This converts usages of the exec statement into calls to a built-in
|
||||
exec() function.
|
||||
|
||||
exec code in ns1, ns2 -> exec(code, ns1, ns2)
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Comma, Name, Call
|
||||
|
||||
|
||||
class FixExec(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >
|
||||
|
|
||||
exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
assert results
|
||||
syms = self.syms
|
||||
a = results["a"]
|
||||
b = results.get("b")
|
||||
c = results.get("c")
|
||||
args = [a.clone()]
|
||||
args[0].prefix = ""
|
||||
if b is not None:
|
||||
args.extend([Comma(), b.clone()])
|
||||
if c is not None:
|
||||
args.extend([Comma(), c.clone()])
|
||||
|
||||
return Call(Name("exec"), args, prefix=node.prefix)
|
53
third_party/python/Lib/lib2to3/fixes/fix_execfile.py
vendored
Normal file
53
third_party/python/Lib/lib2to3/fixes/fix_execfile.py
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for execfile.
|
||||
|
||||
This converts usages of the execfile function into calls to the built-in
|
||||
exec() function.
|
||||
"""
|
||||
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node,
|
||||
ArgList, String, syms)
|
||||
|
||||
|
||||
class FixExecfile(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
|
||||
|
|
||||
power< 'execfile' trailer< '(' filename=any ')' > >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
assert results
|
||||
filename = results["filename"]
|
||||
globals = results.get("globals")
|
||||
locals = results.get("locals")
|
||||
|
||||
# Copy over the prefix from the right parentheses end of the execfile
|
||||
# call.
|
||||
execfile_paren = node.children[-1].children[-1].clone()
|
||||
# Construct open().read().
|
||||
open_args = ArgList([filename.clone(), Comma(), String('"rb"', ' ')],
|
||||
rparen=execfile_paren)
|
||||
open_call = Node(syms.power, [Name("open"), open_args])
|
||||
read = [Node(syms.trailer, [Dot(), Name('read')]),
|
||||
Node(syms.trailer, [LParen(), RParen()])]
|
||||
open_expr = [open_call] + read
|
||||
# Wrap the open call in a compile call. This is so the filename will be
|
||||
# preserved in the execed code.
|
||||
filename_arg = filename.clone()
|
||||
filename_arg.prefix = " "
|
||||
exec_str = String("'exec'", " ")
|
||||
compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str]
|
||||
compile_call = Call(Name("compile"), compile_args, "")
|
||||
# Finally, replace the execfile call with an exec call.
|
||||
args = [compile_call]
|
||||
if globals is not None:
|
||||
args.extend([Comma(), globals.clone()])
|
||||
if locals is not None:
|
||||
args.extend([Comma(), locals.clone()])
|
||||
return Call(Name("exec"), args, prefix=node.prefix)
|
72
third_party/python/Lib/lib2to3/fixes/fix_exitfunc.py
vendored
Normal file
72
third_party/python/Lib/lib2to3/fixes/fix_exitfunc.py
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
"""
|
||||
Convert use of sys.exitfunc to use the atexit module.
|
||||
"""
|
||||
|
||||
# Author: Benjamin Peterson
|
||||
|
||||
from lib2to3 import pytree, fixer_base
|
||||
from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms
|
||||
|
||||
|
||||
class FixExitfunc(fixer_base.BaseFix):
|
||||
keep_line_order = True
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
(
|
||||
sys_import=import_name<'import'
|
||||
('sys'
|
||||
|
|
||||
dotted_as_names< (any ',')* 'sys' (',' any)* >
|
||||
)
|
||||
>
|
||||
|
|
||||
expr_stmt<
|
||||
power< 'sys' trailer< '.' 'exitfunc' > >
|
||||
'=' func=any >
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(self, *args):
|
||||
super(FixExitfunc, self).__init__(*args)
|
||||
|
||||
def start_tree(self, tree, filename):
|
||||
super(FixExitfunc, self).start_tree(tree, filename)
|
||||
self.sys_import = None
|
||||
|
||||
def transform(self, node, results):
|
||||
# First, find the sys import. We'll just hope it's global scope.
|
||||
if "sys_import" in results:
|
||||
if self.sys_import is None:
|
||||
self.sys_import = results["sys_import"]
|
||||
return
|
||||
|
||||
func = results["func"].clone()
|
||||
func.prefix = ""
|
||||
register = pytree.Node(syms.power,
|
||||
Attr(Name("atexit"), Name("register"))
|
||||
)
|
||||
call = Call(register, [func], node.prefix)
|
||||
node.replace(call)
|
||||
|
||||
if self.sys_import is None:
|
||||
# That's interesting.
|
||||
self.warning(node, "Can't find sys import; Please add an atexit "
|
||||
"import at the top of your file.")
|
||||
return
|
||||
|
||||
# Now add an atexit import after the sys import.
|
||||
names = self.sys_import.children[1]
|
||||
if names.type == syms.dotted_as_names:
|
||||
names.append_child(Comma())
|
||||
names.append_child(Name("atexit", " "))
|
||||
else:
|
||||
containing_stmt = self.sys_import.parent
|
||||
position = containing_stmt.children.index(self.sys_import)
|
||||
stmt_container = containing_stmt.parent
|
||||
new_import = pytree.Node(syms.import_name,
|
||||
[Name("import"), Name("atexit", " ")]
|
||||
)
|
||||
new = pytree.Node(syms.simple_stmt, [new_import])
|
||||
containing_stmt.insert_child(position + 1, Newline())
|
||||
containing_stmt.insert_child(position + 2, new)
|
90
third_party/python/Lib/lib2to3/fixes/fix_filter.py
vendored
Normal file
90
third_party/python/Lib/lib2to3/fixes/fix_filter.py
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that changes filter(F, X) into list(filter(F, X)).
|
||||
|
||||
We avoid the transformation if the filter() call is directly contained
|
||||
in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
|
||||
for V in <>:.
|
||||
|
||||
NOTE: This is still not correct if the original code was depending on
|
||||
filter(F, X) to return a string if X is a string and a tuple if X is a
|
||||
tuple. That would require type inference, which we don't do. Let
|
||||
Python 2.6 figure it out.
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..pytree import Node
|
||||
from ..pygram import python_symbols as syms
|
||||
from ..fixer_util import Name, ArgList, ListComp, in_special_context
|
||||
|
||||
|
||||
class FixFilter(fixer_base.ConditionalFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
filter_lambda=power<
|
||||
'filter'
|
||||
trailer<
|
||||
'('
|
||||
arglist<
|
||||
lambdef< 'lambda'
|
||||
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
|
||||
>
|
||||
','
|
||||
it=any
|
||||
>
|
||||
')'
|
||||
>
|
||||
[extra_trailers=trailer*]
|
||||
>
|
||||
|
|
||||
power<
|
||||
'filter'
|
||||
trailer< '(' arglist< none='None' ',' seq=any > ')' >
|
||||
[extra_trailers=trailer*]
|
||||
>
|
||||
|
|
||||
power<
|
||||
'filter'
|
||||
args=trailer< '(' [any] ')' >
|
||||
[extra_trailers=trailer*]
|
||||
>
|
||||
"""
|
||||
|
||||
skip_on = "future_builtins.filter"
|
||||
|
||||
def transform(self, node, results):
|
||||
if self.should_skip(node):
|
||||
return
|
||||
|
||||
trailers = []
|
||||
if 'extra_trailers' in results:
|
||||
for t in results['extra_trailers']:
|
||||
trailers.append(t.clone())
|
||||
|
||||
if "filter_lambda" in results:
|
||||
new = ListComp(results.get("fp").clone(),
|
||||
results.get("fp").clone(),
|
||||
results.get("it").clone(),
|
||||
results.get("xp").clone())
|
||||
new = Node(syms.power, [new] + trailers, prefix="")
|
||||
|
||||
elif "none" in results:
|
||||
new = ListComp(Name("_f"),
|
||||
Name("_f"),
|
||||
results["seq"].clone(),
|
||||
Name("_f"))
|
||||
new = Node(syms.power, [new] + trailers, prefix="")
|
||||
|
||||
else:
|
||||
if in_special_context(node):
|
||||
return None
|
||||
|
||||
args = results['args'].clone()
|
||||
new = Node(syms.power, [Name("filter"), args], prefix="")
|
||||
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
||||
new.prefix = ""
|
||||
new.prefix = node.prefix
|
||||
return new
|
21
third_party/python/Lib/lib2to3/fixes/fix_funcattrs.py
vendored
Normal file
21
third_party/python/Lib/lib2to3/fixes/fix_funcattrs.py
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
"""Fix function attribute names (f.func_x -> f.__x__)."""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
|
||||
class FixFuncattrs(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
|
||||
| 'func_name' | 'func_defaults' | 'func_code'
|
||||
| 'func_dict') > any* >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
attr = results["attr"][0]
|
||||
attr.replace(Name(("__%s__" % attr.value[5:]),
|
||||
prefix=attr.prefix))
|
22
third_party/python/Lib/lib2to3/fixes/fix_future.py
vendored
Normal file
22
third_party/python/Lib/lib2to3/fixes/fix_future.py
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
"""Remove __future__ imports
|
||||
|
||||
from __future__ import foo is replaced with an empty line.
|
||||
"""
|
||||
# Author: Christian Heimes
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import BlankLine
|
||||
|
||||
class FixFuture(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
|
||||
|
||||
# This should be run last -- some things check for the import
|
||||
run_order = 10
|
||||
|
||||
def transform(self, node, results):
|
||||
new = BlankLine()
|
||||
new.prefix = node.prefix
|
||||
return new
|
19
third_party/python/Lib/lib2to3/fixes/fix_getcwdu.py
vendored
Normal file
19
third_party/python/Lib/lib2to3/fixes/fix_getcwdu.py
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
"""
|
||||
Fixer that changes os.getcwdu() to os.getcwd().
|
||||
"""
|
||||
# Author: Victor Stinner
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
class FixGetcwdu(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
power< 'os' trailer< dot='.' name='getcwdu' > any* >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
name = results["name"]
|
||||
name.replace(Name("getcwd", prefix=name.prefix))
|
109
third_party/python/Lib/lib2to3/fixes/fix_has_key.py
vendored
Normal file
109
third_party/python/Lib/lib2to3/fixes/fix_has_key.py
vendored
Normal file
|
@ -0,0 +1,109 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for has_key().
|
||||
|
||||
Calls to .has_key() methods are expressed in terms of the 'in'
|
||||
operator:
|
||||
|
||||
d.has_key(k) -> k in d
|
||||
|
||||
CAVEATS:
|
||||
1) While the primary target of this fixer is dict.has_key(), the
|
||||
fixer will change any has_key() method call, regardless of its
|
||||
class.
|
||||
|
||||
2) Cases like this will not be converted:
|
||||
|
||||
m = d.has_key
|
||||
if m(k):
|
||||
...
|
||||
|
||||
Only *calls* to has_key() are converted. While it is possible to
|
||||
convert the above to something like
|
||||
|
||||
m = d.__contains__
|
||||
if m(k):
|
||||
...
|
||||
|
||||
this is currently not done.
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, parenthesize
|
||||
|
||||
|
||||
class FixHasKey(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
anchor=power<
|
||||
before=any+
|
||||
trailer< '.' 'has_key' >
|
||||
trailer<
|
||||
'('
|
||||
( not(arglist | argument<any '=' any>) arg=any
|
||||
| arglist<(not argument<any '=' any>) arg=any ','>
|
||||
)
|
||||
')'
|
||||
>
|
||||
after=any*
|
||||
>
|
||||
|
|
||||
negation=not_test<
|
||||
'not'
|
||||
anchor=power<
|
||||
before=any+
|
||||
trailer< '.' 'has_key' >
|
||||
trailer<
|
||||
'('
|
||||
( not(arglist | argument<any '=' any>) arg=any
|
||||
| arglist<(not argument<any '=' any>) arg=any ','>
|
||||
)
|
||||
')'
|
||||
>
|
||||
>
|
||||
>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
assert results
|
||||
syms = self.syms
|
||||
if (node.parent.type == syms.not_test and
|
||||
self.pattern.match(node.parent)):
|
||||
# Don't transform a node matching the first alternative of the
|
||||
# pattern when its parent matches the second alternative
|
||||
return None
|
||||
negation = results.get("negation")
|
||||
anchor = results["anchor"]
|
||||
prefix = node.prefix
|
||||
before = [n.clone() for n in results["before"]]
|
||||
arg = results["arg"].clone()
|
||||
after = results.get("after")
|
||||
if after:
|
||||
after = [n.clone() for n in after]
|
||||
if arg.type in (syms.comparison, syms.not_test, syms.and_test,
|
||||
syms.or_test, syms.test, syms.lambdef, syms.argument):
|
||||
arg = parenthesize(arg)
|
||||
if len(before) == 1:
|
||||
before = before[0]
|
||||
else:
|
||||
before = pytree.Node(syms.power, before)
|
||||
before.prefix = " "
|
||||
n_op = Name("in", prefix=" ")
|
||||
if negation:
|
||||
n_not = Name("not", prefix=" ")
|
||||
n_op = pytree.Node(syms.comp_op, (n_not, n_op))
|
||||
new = pytree.Node(syms.comparison, (arg, n_op, before))
|
||||
if after:
|
||||
new = parenthesize(new)
|
||||
new = pytree.Node(syms.power, (new,) + tuple(after))
|
||||
if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr,
|
||||
syms.and_expr, syms.shift_expr,
|
||||
syms.arith_expr, syms.term,
|
||||
syms.factor, syms.power):
|
||||
new = parenthesize(new)
|
||||
new.prefix = prefix
|
||||
return new
|
152
third_party/python/Lib/lib2to3/fixes/fix_idioms.py
vendored
Normal file
152
third_party/python/Lib/lib2to3/fixes/fix_idioms.py
vendored
Normal file
|
@ -0,0 +1,152 @@
|
|||
"""Adjust some old Python 2 idioms to their modern counterparts.
|
||||
|
||||
* Change some type comparisons to isinstance() calls:
|
||||
type(x) == T -> isinstance(x, T)
|
||||
type(x) is T -> isinstance(x, T)
|
||||
type(x) != T -> not isinstance(x, T)
|
||||
type(x) is not T -> not isinstance(x, T)
|
||||
|
||||
* Change "while 1:" into "while True:".
|
||||
|
||||
* Change both
|
||||
|
||||
v = list(EXPR)
|
||||
v.sort()
|
||||
foo(v)
|
||||
|
||||
and the more general
|
||||
|
||||
v = EXPR
|
||||
v.sort()
|
||||
foo(v)
|
||||
|
||||
into
|
||||
|
||||
v = sorted(EXPR)
|
||||
foo(v)
|
||||
"""
|
||||
# Author: Jacques Frechet, Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms
|
||||
|
||||
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
|
||||
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
|
||||
|
||||
class FixIdioms(fixer_base.BaseFix):
|
||||
explicit = True # The user must ask for this fixer
|
||||
|
||||
PATTERN = r"""
|
||||
isinstance=comparison< %s %s T=any >
|
||||
|
|
||||
isinstance=comparison< T=any %s %s >
|
||||
|
|
||||
while_stmt< 'while' while='1' ':' any+ >
|
||||
|
|
||||
sorted=any<
|
||||
any*
|
||||
simple_stmt<
|
||||
expr_stmt< id1=any '='
|
||||
power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
|
||||
>
|
||||
'\n'
|
||||
>
|
||||
sort=
|
||||
simple_stmt<
|
||||
power< id2=any
|
||||
trailer< '.' 'sort' > trailer< '(' ')' >
|
||||
>
|
||||
'\n'
|
||||
>
|
||||
next=any*
|
||||
>
|
||||
|
|
||||
sorted=any<
|
||||
any*
|
||||
simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
|
||||
sort=
|
||||
simple_stmt<
|
||||
power< id2=any
|
||||
trailer< '.' 'sort' > trailer< '(' ')' >
|
||||
>
|
||||
'\n'
|
||||
>
|
||||
next=any*
|
||||
>
|
||||
""" % (TYPE, CMP, CMP, TYPE)
|
||||
|
||||
def match(self, node):
|
||||
r = super(FixIdioms, self).match(node)
|
||||
# If we've matched one of the sort/sorted subpatterns above, we
|
||||
# want to reject matches where the initial assignment and the
|
||||
# subsequent .sort() call involve different identifiers.
|
||||
if r and "sorted" in r:
|
||||
if r["id1"] == r["id2"]:
|
||||
return r
|
||||
return None
|
||||
return r
|
||||
|
||||
def transform(self, node, results):
|
||||
if "isinstance" in results:
|
||||
return self.transform_isinstance(node, results)
|
||||
elif "while" in results:
|
||||
return self.transform_while(node, results)
|
||||
elif "sorted" in results:
|
||||
return self.transform_sort(node, results)
|
||||
else:
|
||||
raise RuntimeError("Invalid match")
|
||||
|
||||
def transform_isinstance(self, node, results):
|
||||
x = results["x"].clone() # The thing inside of type()
|
||||
T = results["T"].clone() # The type being compared against
|
||||
x.prefix = ""
|
||||
T.prefix = " "
|
||||
test = Call(Name("isinstance"), [x, Comma(), T])
|
||||
if "n" in results:
|
||||
test.prefix = " "
|
||||
test = Node(syms.not_test, [Name("not"), test])
|
||||
test.prefix = node.prefix
|
||||
return test
|
||||
|
||||
def transform_while(self, node, results):
|
||||
one = results["while"]
|
||||
one.replace(Name("True", prefix=one.prefix))
|
||||
|
||||
def transform_sort(self, node, results):
|
||||
sort_stmt = results["sort"]
|
||||
next_stmt = results["next"]
|
||||
list_call = results.get("list")
|
||||
simple_expr = results.get("expr")
|
||||
|
||||
if list_call:
|
||||
list_call.replace(Name("sorted", prefix=list_call.prefix))
|
||||
elif simple_expr:
|
||||
new = simple_expr.clone()
|
||||
new.prefix = ""
|
||||
simple_expr.replace(Call(Name("sorted"), [new],
|
||||
prefix=simple_expr.prefix))
|
||||
else:
|
||||
raise RuntimeError("should not have reached here")
|
||||
sort_stmt.remove()
|
||||
|
||||
btwn = sort_stmt.prefix
|
||||
# Keep any prefix lines between the sort_stmt and the list_call and
|
||||
# shove them right after the sorted() call.
|
||||
if "\n" in btwn:
|
||||
if next_stmt:
|
||||
# The new prefix should be everything from the sort_stmt's
|
||||
# prefix up to the last newline, then the old prefix after a new
|
||||
# line.
|
||||
prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix)
|
||||
next_stmt[0].prefix = "\n".join(prefix_lines)
|
||||
else:
|
||||
assert list_call.parent
|
||||
assert list_call.next_sibling is None
|
||||
# Put a blank line after list_call and set its prefix.
|
||||
end_line = BlankLine()
|
||||
list_call.parent.append_child(end_line)
|
||||
assert list_call.next_sibling is end_line
|
||||
# The new prefix should be everything up to the first new line
|
||||
# of sort_stmt's prefix.
|
||||
end_line.prefix = btwn.rpartition("\n")[0]
|
99
third_party/python/Lib/lib2to3/fixes/fix_import.py
vendored
Normal file
99
third_party/python/Lib/lib2to3/fixes/fix_import.py
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
"""Fixer for import statements.
|
||||
If spam is being imported from the local directory, this import:
|
||||
from spam import eggs
|
||||
Becomes:
|
||||
from .spam import eggs
|
||||
|
||||
And this import:
|
||||
import spam
|
||||
Becomes:
|
||||
from . import spam
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from os.path import dirname, join, exists, sep
|
||||
from ..fixer_util import FromImport, syms, token
|
||||
|
||||
|
||||
def traverse_imports(names):
|
||||
"""
|
||||
Walks over all the names imported in a dotted_as_names node.
|
||||
"""
|
||||
pending = [names]
|
||||
while pending:
|
||||
node = pending.pop()
|
||||
if node.type == token.NAME:
|
||||
yield node.value
|
||||
elif node.type == syms.dotted_name:
|
||||
yield "".join([ch.value for ch in node.children])
|
||||
elif node.type == syms.dotted_as_name:
|
||||
pending.append(node.children[0])
|
||||
elif node.type == syms.dotted_as_names:
|
||||
pending.extend(node.children[::-2])
|
||||
else:
|
||||
raise AssertionError("unknown node type")
|
||||
|
||||
|
||||
class FixImport(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
import_from< 'from' imp=any 'import' ['('] any [')'] >
|
||||
|
|
||||
import_name< 'import' imp=any >
|
||||
"""
|
||||
|
||||
def start_tree(self, tree, name):
|
||||
super(FixImport, self).start_tree(tree, name)
|
||||
self.skip = "absolute_import" in tree.future_features
|
||||
|
||||
def transform(self, node, results):
|
||||
if self.skip:
|
||||
return
|
||||
imp = results['imp']
|
||||
|
||||
if node.type == syms.import_from:
|
||||
# Some imps are top-level (eg: 'import ham')
|
||||
# some are first level (eg: 'import ham.eggs')
|
||||
# some are third level (eg: 'import ham.eggs as spam')
|
||||
# Hence, the loop
|
||||
while not hasattr(imp, 'value'):
|
||||
imp = imp.children[0]
|
||||
if self.probably_a_local_import(imp.value):
|
||||
imp.value = "." + imp.value
|
||||
imp.changed()
|
||||
else:
|
||||
have_local = False
|
||||
have_absolute = False
|
||||
for mod_name in traverse_imports(imp):
|
||||
if self.probably_a_local_import(mod_name):
|
||||
have_local = True
|
||||
else:
|
||||
have_absolute = True
|
||||
if have_absolute:
|
||||
if have_local:
|
||||
# We won't handle both sibling and absolute imports in the
|
||||
# same statement at the moment.
|
||||
self.warning(node, "absolute and local imports together")
|
||||
return
|
||||
|
||||
new = FromImport(".", [imp])
|
||||
new.prefix = node.prefix
|
||||
return new
|
||||
|
||||
def probably_a_local_import(self, imp_name):
|
||||
if imp_name.startswith("."):
|
||||
# Relative imports are certainly not local imports.
|
||||
return False
|
||||
imp_name = imp_name.split(".", 1)[0]
|
||||
base_path = dirname(self.filename)
|
||||
base_path = join(base_path, imp_name)
|
||||
# If there is no __init__.py next to the file its not in a package
|
||||
# so can't be a relative import.
|
||||
if not exists(join(dirname(base_path), "__init__.py")):
|
||||
return False
|
||||
for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
|
||||
if exists(base_path + ext):
|
||||
return True
|
||||
return False
|
145
third_party/python/Lib/lib2to3/fixes/fix_imports.py
vendored
Normal file
145
third_party/python/Lib/lib2to3/fixes/fix_imports.py
vendored
Normal file
|
@ -0,0 +1,145 @@
|
|||
"""Fix incompatible imports and module references."""
|
||||
# Authors: Collin Winter, Nick Edds
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, attr_chain
|
||||
|
||||
MAPPING = {'StringIO': 'io',
|
||||
'cStringIO': 'io',
|
||||
'cPickle': 'pickle',
|
||||
'__builtin__' : 'builtins',
|
||||
'copy_reg': 'copyreg',
|
||||
'Queue': 'queue',
|
||||
'SocketServer': 'socketserver',
|
||||
'ConfigParser': 'configparser',
|
||||
'repr': 'reprlib',
|
||||
'FileDialog': 'tkinter.filedialog',
|
||||
'tkFileDialog': 'tkinter.filedialog',
|
||||
'SimpleDialog': 'tkinter.simpledialog',
|
||||
'tkSimpleDialog': 'tkinter.simpledialog',
|
||||
'tkColorChooser': 'tkinter.colorchooser',
|
||||
'tkCommonDialog': 'tkinter.commondialog',
|
||||
'Dialog': 'tkinter.dialog',
|
||||
'Tkdnd': 'tkinter.dnd',
|
||||
'tkFont': 'tkinter.font',
|
||||
'tkMessageBox': 'tkinter.messagebox',
|
||||
'ScrolledText': 'tkinter.scrolledtext',
|
||||
'Tkconstants': 'tkinter.constants',
|
||||
'Tix': 'tkinter.tix',
|
||||
'ttk': 'tkinter.ttk',
|
||||
'Tkinter': 'tkinter',
|
||||
'markupbase': '_markupbase',
|
||||
'_winreg': 'winreg',
|
||||
'thread': '_thread',
|
||||
'dummy_thread': '_dummy_thread',
|
||||
# anydbm and whichdb are handled by fix_imports2
|
||||
'dbhash': 'dbm.bsd',
|
||||
'dumbdbm': 'dbm.dumb',
|
||||
'dbm': 'dbm.ndbm',
|
||||
'gdbm': 'dbm.gnu',
|
||||
'xmlrpclib': 'xmlrpc.client',
|
||||
'DocXMLRPCServer': 'xmlrpc.server',
|
||||
'SimpleXMLRPCServer': 'xmlrpc.server',
|
||||
'httplib': 'http.client',
|
||||
'htmlentitydefs' : 'html.entities',
|
||||
'HTMLParser' : 'html.parser',
|
||||
'Cookie': 'http.cookies',
|
||||
'cookielib': 'http.cookiejar',
|
||||
'BaseHTTPServer': 'http.server',
|
||||
'SimpleHTTPServer': 'http.server',
|
||||
'CGIHTTPServer': 'http.server',
|
||||
#'test.test_support': 'test.support',
|
||||
'commands': 'subprocess',
|
||||
'UserString' : 'collections',
|
||||
'UserList' : 'collections',
|
||||
'urlparse' : 'urllib.parse',
|
||||
'robotparser' : 'urllib.robotparser',
|
||||
}
|
||||
|
||||
|
||||
def alternates(members):
|
||||
return "(" + "|".join(map(repr, members)) + ")"
|
||||
|
||||
|
||||
def build_pattern(mapping=MAPPING):
|
||||
mod_list = ' | '.join(["module_name='%s'" % key for key in mapping])
|
||||
bare_names = alternates(mapping.keys())
|
||||
|
||||
yield """name_import=import_name< 'import' ((%s) |
|
||||
multiple_imports=dotted_as_names< any* (%s) any* >) >
|
||||
""" % (mod_list, mod_list)
|
||||
yield """import_from< 'from' (%s) 'import' ['(']
|
||||
( any | import_as_name< any 'as' any > |
|
||||
import_as_names< any* >) [')'] >
|
||||
""" % mod_list
|
||||
yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > |
|
||||
multiple_imports=dotted_as_names<
|
||||
any* dotted_as_name< (%s) 'as' any > any* >) >
|
||||
""" % (mod_list, mod_list)
|
||||
|
||||
# Find usages of module members in code e.g. thread.foo(bar)
|
||||
yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
|
||||
|
||||
|
||||
class FixImports(fixer_base.BaseFix):
|
||||
|
||||
BM_compatible = True
|
||||
keep_line_order = True
|
||||
# This is overridden in fix_imports2.
|
||||
mapping = MAPPING
|
||||
|
||||
# We want to run this fixer late, so fix_import doesn't try to make stdlib
|
||||
# renames into relative imports.
|
||||
run_order = 6
|
||||
|
||||
def build_pattern(self):
|
||||
return "|".join(build_pattern(self.mapping))
|
||||
|
||||
def compile_pattern(self):
|
||||
# We override this, so MAPPING can be pragmatically altered and the
|
||||
# changes will be reflected in PATTERN.
|
||||
self.PATTERN = self.build_pattern()
|
||||
super(FixImports, self).compile_pattern()
|
||||
|
||||
# Don't match the node if it's within another match.
|
||||
def match(self, node):
|
||||
match = super(FixImports, self).match
|
||||
results = match(node)
|
||||
if results:
|
||||
# Module usage could be in the trailer of an attribute lookup, so we
|
||||
# might have nested matches when "bare_with_attr" is present.
|
||||
if "bare_with_attr" not in results and \
|
||||
any(match(obj) for obj in attr_chain(node, "parent")):
|
||||
return False
|
||||
return results
|
||||
return False
|
||||
|
||||
def start_tree(self, tree, filename):
|
||||
super(FixImports, self).start_tree(tree, filename)
|
||||
self.replace = {}
|
||||
|
||||
def transform(self, node, results):
|
||||
import_mod = results.get("module_name")
|
||||
if import_mod:
|
||||
mod_name = import_mod.value
|
||||
new_name = self.mapping[mod_name]
|
||||
import_mod.replace(Name(new_name, prefix=import_mod.prefix))
|
||||
if "name_import" in results:
|
||||
# If it's not a "from x import x, y" or "import x as y" import,
|
||||
# marked its usage to be replaced.
|
||||
self.replace[mod_name] = new_name
|
||||
if "multiple_imports" in results:
|
||||
# This is a nasty hack to fix multiple imports on a line (e.g.,
|
||||
# "import StringIO, urlparse"). The problem is that I can't
|
||||
# figure out an easy way to make a pattern recognize the keys of
|
||||
# MAPPING randomly sprinkled in an import statement.
|
||||
results = self.match(node)
|
||||
if results:
|
||||
self.transform(node, results)
|
||||
else:
|
||||
# Replace usage of the module.
|
||||
bare_name = results["bare_with_attr"][0]
|
||||
new_name = self.replace.get(bare_name.value)
|
||||
if new_name:
|
||||
bare_name.replace(Name(new_name, prefix=bare_name.prefix))
|
16
third_party/python/Lib/lib2to3/fixes/fix_imports2.py
vendored
Normal file
16
third_party/python/Lib/lib2to3/fixes/fix_imports2.py
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
"""Fix incompatible imports and module references that must be fixed after
|
||||
fix_imports."""
|
||||
from . import fix_imports
|
||||
|
||||
|
||||
MAPPING = {
|
||||
'whichdb': 'dbm',
|
||||
'anydbm': 'dbm',
|
||||
}
|
||||
|
||||
|
||||
class FixImports2(fix_imports.FixImports):
|
||||
|
||||
run_order = 7
|
||||
|
||||
mapping = MAPPING
|
26
third_party/python/Lib/lib2to3/fixes/fix_input.py
vendored
Normal file
26
third_party/python/Lib/lib2to3/fixes/fix_input.py
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
"""Fixer that changes input(...) into eval(input(...))."""
|
||||
# Author: Andre Roberge
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Call, Name
|
||||
from .. import patcomp
|
||||
|
||||
|
||||
context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
|
||||
|
||||
|
||||
class FixInput(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< 'input' args=trailer< '(' [any] ')' > >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
# If we're already wrapped in an eval() call, we're done.
|
||||
if context.match(node.parent.parent):
|
||||
return
|
||||
|
||||
new = node.clone()
|
||||
new.prefix = ""
|
||||
return Call(Name("eval"), [new], prefix=node.prefix)
|
41
third_party/python/Lib/lib2to3/fixes/fix_intern.py
vendored
Normal file
41
third_party/python/Lib/lib2to3/fixes/fix_intern.py
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
# Copyright 2006 Georg Brandl.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for intern().
|
||||
|
||||
intern(s) -> sys.intern(s)"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import ImportAndCall, touch_import
|
||||
|
||||
|
||||
class FixIntern(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
order = "pre"
|
||||
|
||||
PATTERN = """
|
||||
power< 'intern'
|
||||
trailer< lpar='('
|
||||
( not(arglist | argument<any '=' any>) obj=any
|
||||
| obj=arglist<(not argument<any '=' any>) any ','> )
|
||||
rpar=')' >
|
||||
after=any*
|
||||
>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
if results:
|
||||
# I feel like we should be able to express this logic in the
|
||||
# PATTERN above but I don't know how to do it so...
|
||||
obj = results['obj']
|
||||
if obj:
|
||||
if obj.type == self.syms.star_expr:
|
||||
return # Make no change.
|
||||
if (obj.type == self.syms.argument and
|
||||
obj.children[0].value == '**'):
|
||||
return # Make no change.
|
||||
names = ('sys', 'intern')
|
||||
new = ImportAndCall(node, results, names)
|
||||
touch_import(None, 'sys', node)
|
||||
return new
|
52
third_party/python/Lib/lib2to3/fixes/fix_isinstance.py
vendored
Normal file
52
third_party/python/Lib/lib2to3/fixes/fix_isinstance.py
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
# Copyright 2008 Armin Ronacher.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that cleans up a tuple argument to isinstance after the tokens
|
||||
in it were fixed. This is mainly used to remove double occurrences of
|
||||
tokens as a leftover of the long -> int / unicode -> str conversion.
|
||||
|
||||
eg. isinstance(x, (int, long)) -> isinstance(x, (int, int))
|
||||
-> isinstance(x, int)
|
||||
"""
|
||||
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import token
|
||||
|
||||
|
||||
class FixIsinstance(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power<
|
||||
'isinstance'
|
||||
trailer< '(' arglist< any ',' atom< '('
|
||||
args=testlist_gexp< any+ >
|
||||
')' > > ')' >
|
||||
>
|
||||
"""
|
||||
|
||||
run_order = 6
|
||||
|
||||
def transform(self, node, results):
|
||||
names_inserted = set()
|
||||
testlist = results["args"]
|
||||
args = testlist.children
|
||||
new_args = []
|
||||
iterator = enumerate(args)
|
||||
for idx, arg in iterator:
|
||||
if arg.type == token.NAME and arg.value in names_inserted:
|
||||
if idx < len(args) - 1 and args[idx + 1].type == token.COMMA:
|
||||
next(iterator)
|
||||
continue
|
||||
else:
|
||||
new_args.append(arg)
|
||||
if arg.type == token.NAME:
|
||||
names_inserted.add(arg.value)
|
||||
if new_args and new_args[-1].type == token.COMMA:
|
||||
del new_args[-1]
|
||||
if len(new_args) == 1:
|
||||
atom = testlist.parent
|
||||
new_args[0].prefix = atom.prefix
|
||||
atom.replace(new_args[0])
|
||||
else:
|
||||
args[:] = new_args
|
||||
node.changed()
|
43
third_party/python/Lib/lib2to3/fixes/fix_itertools.py
vendored
Normal file
43
third_party/python/Lib/lib2to3/fixes/fix_itertools.py
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
|
||||
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
|
||||
|
||||
imports from itertools are fixed in fix_itertools_import.py
|
||||
|
||||
If itertools is imported as something else (ie: import itertools as it;
|
||||
it.izip(spam, eggs)) method calls will not get fixed.
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
class FixItertools(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
|
||||
PATTERN = """
|
||||
power< it='itertools'
|
||||
trailer<
|
||||
dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
|
||||
|
|
||||
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
|
||||
""" %(locals())
|
||||
|
||||
# Needs to be run after fix_(map|zip|filter)
|
||||
run_order = 6
|
||||
|
||||
def transform(self, node, results):
|
||||
prefix = None
|
||||
func = results['func'][0]
|
||||
if ('it' in results and
|
||||
func.value not in ('ifilterfalse', 'izip_longest')):
|
||||
dot, it = (results['dot'], results['it'])
|
||||
# Remove the 'itertools'
|
||||
prefix = it.prefix
|
||||
it.remove()
|
||||
# Replace the node which contains ('.', 'function') with the
|
||||
# function (to be consistent with the second part of the pattern)
|
||||
dot.remove()
|
||||
func.parent.replace(func)
|
||||
|
||||
prefix = prefix or func.prefix
|
||||
func.replace(Name(func.value[1:], prefix=prefix))
|
57
third_party/python/Lib/lib2to3/fixes/fix_itertools_imports.py
vendored
Normal file
57
third_party/python/Lib/lib2to3/fixes/fix_itertools_imports.py
vendored
Normal file
|
@ -0,0 +1,57 @@
|
|||
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
||||
|
||||
# Local imports
|
||||
from lib2to3 import fixer_base
|
||||
from lib2to3.fixer_util import BlankLine, syms, token
|
||||
|
||||
|
||||
class FixItertoolsImports(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
import_from< 'from' 'itertools' 'import' imports=any >
|
||||
""" %(locals())
|
||||
|
||||
def transform(self, node, results):
|
||||
imports = results['imports']
|
||||
if imports.type == syms.import_as_name or not imports.children:
|
||||
children = [imports]
|
||||
else:
|
||||
children = imports.children
|
||||
for child in children[::2]:
|
||||
if child.type == token.NAME:
|
||||
member = child.value
|
||||
name_node = child
|
||||
elif child.type == token.STAR:
|
||||
# Just leave the import as is.
|
||||
return
|
||||
else:
|
||||
assert child.type == syms.import_as_name
|
||||
name_node = child.children[0]
|
||||
member_name = name_node.value
|
||||
if member_name in ('imap', 'izip', 'ifilter'):
|
||||
child.value = None
|
||||
child.remove()
|
||||
elif member_name in ('ifilterfalse', 'izip_longest'):
|
||||
node.changed()
|
||||
name_node.value = ('filterfalse' if member_name[1] == 'f'
|
||||
else 'zip_longest')
|
||||
|
||||
# Make sure the import statement is still sane
|
||||
children = imports.children[:] or [imports]
|
||||
remove_comma = True
|
||||
for child in children:
|
||||
if remove_comma and child.type == token.COMMA:
|
||||
child.remove()
|
||||
else:
|
||||
remove_comma ^= True
|
||||
|
||||
while children and children[-1].type == token.COMMA:
|
||||
children.pop().remove()
|
||||
|
||||
# If there are no imports left, just get rid of the entire statement
|
||||
if (not (imports.children or getattr(imports, 'value', None)) or
|
||||
imports.parent is None):
|
||||
p = node.prefix
|
||||
node = BlankLine()
|
||||
node.prefix = p
|
||||
return node
|
19
third_party/python/Lib/lib2to3/fixes/fix_long.py
vendored
Normal file
19
third_party/python/Lib/lib2to3/fixes/fix_long.py
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that turns 'long' into 'int' everywhere.
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from lib2to3 import fixer_base
|
||||
from lib2to3.fixer_util import is_probably_builtin
|
||||
|
||||
|
||||
class FixLong(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = "'long'"
|
||||
|
||||
def transform(self, node, results):
|
||||
if is_probably_builtin(node):
|
||||
node.value = "int"
|
||||
node.changed()
|
110
third_party/python/Lib/lib2to3/fixes/fix_map.py
vendored
Normal file
110
third_party/python/Lib/lib2to3/fixes/fix_map.py
vendored
Normal file
|
@ -0,0 +1,110 @@
|
|||
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there
|
||||
exists a 'from future_builtins import map' statement in the top-level
|
||||
namespace.
|
||||
|
||||
As a special case, map(None, X) is changed into list(X). (This is
|
||||
necessary because the semantics are changed in this case -- the new
|
||||
map(None, X) is equivalent to [(x,) for x in X].)
|
||||
|
||||
We avoid the transformation (except for the special case mentioned
|
||||
above) if the map() call is directly contained in iter(<>), list(<>),
|
||||
tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
|
||||
|
||||
NOTE: This is still not correct if the original code was depending on
|
||||
map(F, X, Y, ...) to go on until the longest argument is exhausted,
|
||||
substituting None for missing values -- like zip(), it now stops as
|
||||
soon as the shortest argument is exhausted.
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, ArgList, Call, ListComp, in_special_context
|
||||
from ..pygram import python_symbols as syms
|
||||
from ..pytree import Node
|
||||
|
||||
|
||||
class FixMap(fixer_base.ConditionalFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
map_none=power<
|
||||
'map'
|
||||
trailer< '(' arglist< 'None' ',' arg=any [','] > ')' >
|
||||
[extra_trailers=trailer*]
|
||||
>
|
||||
|
|
||||
map_lambda=power<
|
||||
'map'
|
||||
trailer<
|
||||
'('
|
||||
arglist<
|
||||
lambdef< 'lambda'
|
||||
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
|
||||
>
|
||||
','
|
||||
it=any
|
||||
>
|
||||
')'
|
||||
>
|
||||
[extra_trailers=trailer*]
|
||||
>
|
||||
|
|
||||
power<
|
||||
'map' args=trailer< '(' [any] ')' >
|
||||
[extra_trailers=trailer*]
|
||||
>
|
||||
"""
|
||||
|
||||
skip_on = 'future_builtins.map'
|
||||
|
||||
def transform(self, node, results):
|
||||
if self.should_skip(node):
|
||||
return
|
||||
|
||||
trailers = []
|
||||
if 'extra_trailers' in results:
|
||||
for t in results['extra_trailers']:
|
||||
trailers.append(t.clone())
|
||||
|
||||
if node.parent.type == syms.simple_stmt:
|
||||
self.warning(node, "You should use a for loop here")
|
||||
new = node.clone()
|
||||
new.prefix = ""
|
||||
new = Call(Name("list"), [new])
|
||||
elif "map_lambda" in results:
|
||||
new = ListComp(results["xp"].clone(),
|
||||
results["fp"].clone(),
|
||||
results["it"].clone())
|
||||
new = Node(syms.power, [new] + trailers, prefix="")
|
||||
|
||||
else:
|
||||
if "map_none" in results:
|
||||
new = results["arg"].clone()
|
||||
new.prefix = ""
|
||||
else:
|
||||
if "args" in results:
|
||||
args = results["args"]
|
||||
if args.type == syms.trailer and \
|
||||
args.children[1].type == syms.arglist and \
|
||||
args.children[1].children[0].type == token.NAME and \
|
||||
args.children[1].children[0].value == "None":
|
||||
self.warning(node, "cannot convert map(None, ...) "
|
||||
"with multiple arguments because map() "
|
||||
"now truncates to the shortest sequence")
|
||||
return
|
||||
|
||||
new = Node(syms.power, [Name("map"), args.clone()])
|
||||
new.prefix = ""
|
||||
|
||||
if in_special_context(node):
|
||||
return None
|
||||
|
||||
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
||||
new.prefix = ""
|
||||
|
||||
new.prefix = node.prefix
|
||||
return new
|
228
third_party/python/Lib/lib2to3/fixes/fix_metaclass.py
vendored
Normal file
228
third_party/python/Lib/lib2to3/fixes/fix_metaclass.py
vendored
Normal file
|
@ -0,0 +1,228 @@
|
|||
"""Fixer for __metaclass__ = X -> (metaclass=X) methods.
|
||||
|
||||
The various forms of classef (inherits nothing, inherits once, inherints
|
||||
many) don't parse the same in the CST so we look at ALL classes for
|
||||
a __metaclass__ and if we find one normalize the inherits to all be
|
||||
an arglist.
|
||||
|
||||
For one-liner classes ('class X: pass') there is no indent/dedent so
|
||||
we normalize those into having a suite.
|
||||
|
||||
Moving the __metaclass__ into the classdef can also cause the class
|
||||
body to be empty so there is some special casing for that as well.
|
||||
|
||||
This fixer also tries very hard to keep original indenting and spacing
|
||||
in all those corner cases.
|
||||
|
||||
"""
|
||||
# Author: Jack Diederich
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..pygram import token
|
||||
from ..fixer_util import syms, Node, Leaf
|
||||
|
||||
|
||||
def has_metaclass(parent):
|
||||
""" we have to check the cls_node without changing it.
|
||||
There are two possibilities:
|
||||
1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
|
||||
2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
|
||||
"""
|
||||
for node in parent.children:
|
||||
if node.type == syms.suite:
|
||||
return has_metaclass(node)
|
||||
elif node.type == syms.simple_stmt and node.children:
|
||||
expr_node = node.children[0]
|
||||
if expr_node.type == syms.expr_stmt and expr_node.children:
|
||||
left_side = expr_node.children[0]
|
||||
if isinstance(left_side, Leaf) and \
|
||||
left_side.value == '__metaclass__':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def fixup_parse_tree(cls_node):
|
||||
""" one-line classes don't get a suite in the parse tree so we add
|
||||
one to normalize the tree
|
||||
"""
|
||||
for node in cls_node.children:
|
||||
if node.type == syms.suite:
|
||||
# already in the preferred format, do nothing
|
||||
return
|
||||
|
||||
# !%@#! oneliners have no suite node, we have to fake one up
|
||||
for i, node in enumerate(cls_node.children):
|
||||
if node.type == token.COLON:
|
||||
break
|
||||
else:
|
||||
raise ValueError("No class suite and no ':'!")
|
||||
|
||||
# move everything into a suite node
|
||||
suite = Node(syms.suite, [])
|
||||
while cls_node.children[i+1:]:
|
||||
move_node = cls_node.children[i+1]
|
||||
suite.append_child(move_node.clone())
|
||||
move_node.remove()
|
||||
cls_node.append_child(suite)
|
||||
node = suite
|
||||
|
||||
|
||||
def fixup_simple_stmt(parent, i, stmt_node):
|
||||
""" if there is a semi-colon all the parts count as part of the same
|
||||
simple_stmt. We just want the __metaclass__ part so we move
|
||||
everything after the semi-colon into its own simple_stmt node
|
||||
"""
|
||||
for semi_ind, node in enumerate(stmt_node.children):
|
||||
if node.type == token.SEMI: # *sigh*
|
||||
break
|
||||
else:
|
||||
return
|
||||
|
||||
node.remove() # kill the semicolon
|
||||
new_expr = Node(syms.expr_stmt, [])
|
||||
new_stmt = Node(syms.simple_stmt, [new_expr])
|
||||
while stmt_node.children[semi_ind:]:
|
||||
move_node = stmt_node.children[semi_ind]
|
||||
new_expr.append_child(move_node.clone())
|
||||
move_node.remove()
|
||||
parent.insert_child(i, new_stmt)
|
||||
new_leaf1 = new_stmt.children[0].children[0]
|
||||
old_leaf1 = stmt_node.children[0].children[0]
|
||||
new_leaf1.prefix = old_leaf1.prefix
|
||||
|
||||
|
||||
def remove_trailing_newline(node):
|
||||
if node.children and node.children[-1].type == token.NEWLINE:
|
||||
node.children[-1].remove()
|
||||
|
||||
|
||||
def find_metas(cls_node):
|
||||
# find the suite node (Mmm, sweet nodes)
|
||||
for node in cls_node.children:
|
||||
if node.type == syms.suite:
|
||||
break
|
||||
else:
|
||||
raise ValueError("No class suite!")
|
||||
|
||||
# look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ]
|
||||
for i, simple_node in list(enumerate(node.children)):
|
||||
if simple_node.type == syms.simple_stmt and simple_node.children:
|
||||
expr_node = simple_node.children[0]
|
||||
if expr_node.type == syms.expr_stmt and expr_node.children:
|
||||
# Check if the expr_node is a simple assignment.
|
||||
left_node = expr_node.children[0]
|
||||
if isinstance(left_node, Leaf) and \
|
||||
left_node.value == '__metaclass__':
|
||||
# We found an assignment to __metaclass__.
|
||||
fixup_simple_stmt(node, i, simple_node)
|
||||
remove_trailing_newline(simple_node)
|
||||
yield (node, i, simple_node)
|
||||
|
||||
|
||||
def fixup_indent(suite):
|
||||
""" If an INDENT is followed by a thing with a prefix then nuke the prefix
|
||||
Otherwise we get in trouble when removing __metaclass__ at suite start
|
||||
"""
|
||||
kids = suite.children[::-1]
|
||||
# find the first indent
|
||||
while kids:
|
||||
node = kids.pop()
|
||||
if node.type == token.INDENT:
|
||||
break
|
||||
|
||||
# find the first Leaf
|
||||
while kids:
|
||||
node = kids.pop()
|
||||
if isinstance(node, Leaf) and node.type != token.DEDENT:
|
||||
if node.prefix:
|
||||
node.prefix = ''
|
||||
return
|
||||
else:
|
||||
kids.extend(node.children[::-1])
|
||||
|
||||
|
||||
class FixMetaclass(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
classdef<any*>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
if not has_metaclass(node):
|
||||
return
|
||||
|
||||
fixup_parse_tree(node)
|
||||
|
||||
# find metaclasses, keep the last one
|
||||
last_metaclass = None
|
||||
for suite, i, stmt in find_metas(node):
|
||||
last_metaclass = stmt
|
||||
stmt.remove()
|
||||
|
||||
text_type = node.children[0].type # always Leaf(nnn, 'class')
|
||||
|
||||
# figure out what kind of classdef we have
|
||||
if len(node.children) == 7:
|
||||
# Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite])
|
||||
# 0 1 2 3 4 5 6
|
||||
if node.children[3].type == syms.arglist:
|
||||
arglist = node.children[3]
|
||||
# Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite])
|
||||
else:
|
||||
parent = node.children[3].clone()
|
||||
arglist = Node(syms.arglist, [parent])
|
||||
node.set_child(3, arglist)
|
||||
elif len(node.children) == 6:
|
||||
# Node(classdef, ['class', 'name', '(', ')', ':', suite])
|
||||
# 0 1 2 3 4 5
|
||||
arglist = Node(syms.arglist, [])
|
||||
node.insert_child(3, arglist)
|
||||
elif len(node.children) == 4:
|
||||
# Node(classdef, ['class', 'name', ':', suite])
|
||||
# 0 1 2 3
|
||||
arglist = Node(syms.arglist, [])
|
||||
node.insert_child(2, Leaf(token.RPAR, ')'))
|
||||
node.insert_child(2, arglist)
|
||||
node.insert_child(2, Leaf(token.LPAR, '('))
|
||||
else:
|
||||
raise ValueError("Unexpected class definition")
|
||||
|
||||
# now stick the metaclass in the arglist
|
||||
meta_txt = last_metaclass.children[0].children[0]
|
||||
meta_txt.value = 'metaclass'
|
||||
orig_meta_prefix = meta_txt.prefix
|
||||
|
||||
if arglist.children:
|
||||
arglist.append_child(Leaf(token.COMMA, ','))
|
||||
meta_txt.prefix = ' '
|
||||
else:
|
||||
meta_txt.prefix = ''
|
||||
|
||||
# compact the expression "metaclass = Meta" -> "metaclass=Meta"
|
||||
expr_stmt = last_metaclass.children[0]
|
||||
assert expr_stmt.type == syms.expr_stmt
|
||||
expr_stmt.children[1].prefix = ''
|
||||
expr_stmt.children[2].prefix = ''
|
||||
|
||||
arglist.append_child(last_metaclass)
|
||||
|
||||
fixup_indent(suite)
|
||||
|
||||
# check for empty suite
|
||||
if not suite.children:
|
||||
# one-liner that was just __metaclass_
|
||||
suite.remove()
|
||||
pass_leaf = Leaf(text_type, 'pass')
|
||||
pass_leaf.prefix = orig_meta_prefix
|
||||
node.append_child(pass_leaf)
|
||||
node.append_child(Leaf(token.NEWLINE, '\n'))
|
||||
|
||||
elif len(suite.children) > 1 and \
|
||||
(suite.children[-2].type == token.INDENT and
|
||||
suite.children[-1].type == token.DEDENT):
|
||||
# there was only one line in the class body and it was __metaclass__
|
||||
pass_leaf = Leaf(text_type, 'pass')
|
||||
suite.insert_child(-1, pass_leaf)
|
||||
suite.insert_child(-1, Leaf(token.NEWLINE, '\n'))
|
24
third_party/python/Lib/lib2to3/fixes/fix_methodattrs.py
vendored
Normal file
24
third_party/python/Lib/lib2to3/fixes/fix_methodattrs.py
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
"""Fix bound method attributes (method.im_? -> method.__?__).
|
||||
"""
|
||||
# Author: Christian Heimes
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
MAP = {
|
||||
"im_func" : "__func__",
|
||||
"im_self" : "__self__",
|
||||
"im_class" : "__self__.__class__"
|
||||
}
|
||||
|
||||
class FixMethodattrs(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
attr = results["attr"][0]
|
||||
new = MAP[attr.value]
|
||||
attr.replace(Name(new, prefix=attr.prefix))
|
23
third_party/python/Lib/lib2to3/fixes/fix_ne.py
vendored
Normal file
23
third_party/python/Lib/lib2to3/fixes/fix_ne.py
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that turns <> into !=."""
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
|
||||
|
||||
class FixNe(fixer_base.BaseFix):
|
||||
# This is so simple that we don't need the pattern compiler.
|
||||
|
||||
_accept_type = token.NOTEQUAL
|
||||
|
||||
def match(self, node):
|
||||
# Override
|
||||
return node.value == "<>"
|
||||
|
||||
def transform(self, node, results):
|
||||
new = pytree.Leaf(token.NOTEQUAL, "!=", prefix=node.prefix)
|
||||
return new
|
103
third_party/python/Lib/lib2to3/fixes/fix_next.py
vendored
Normal file
103
third_party/python/Lib/lib2to3/fixes/fix_next.py
vendored
Normal file
|
@ -0,0 +1,103 @@
|
|||
"""Fixer for it.next() -> next(it), per PEP 3114."""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Things that currently aren't covered:
|
||||
# - listcomp "next" names aren't warned
|
||||
# - "with" statement targets aren't checked
|
||||
|
||||
# Local imports
|
||||
from ..pgen2 import token
|
||||
from ..pygram import python_symbols as syms
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, Call, find_binding
|
||||
|
||||
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
||||
|
||||
|
||||
class FixNext(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|
||||
|
|
||||
power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
|
||||
|
|
||||
classdef< 'class' any+ ':'
|
||||
suite< any*
|
||||
funcdef< 'def'
|
||||
name='next'
|
||||
parameters< '(' NAME ')' > any+ >
|
||||
any* > >
|
||||
|
|
||||
global=global_stmt< 'global' any* 'next' any* >
|
||||
"""
|
||||
|
||||
order = "pre" # Pre-order tree traversal
|
||||
|
||||
def start_tree(self, tree, filename):
|
||||
super(FixNext, self).start_tree(tree, filename)
|
||||
|
||||
n = find_binding('next', tree)
|
||||
if n:
|
||||
self.warning(n, bind_warning)
|
||||
self.shadowed_next = True
|
||||
else:
|
||||
self.shadowed_next = False
|
||||
|
||||
def transform(self, node, results):
|
||||
assert results
|
||||
|
||||
base = results.get("base")
|
||||
attr = results.get("attr")
|
||||
name = results.get("name")
|
||||
|
||||
if base:
|
||||
if self.shadowed_next:
|
||||
attr.replace(Name("__next__", prefix=attr.prefix))
|
||||
else:
|
||||
base = [n.clone() for n in base]
|
||||
base[0].prefix = ""
|
||||
node.replace(Call(Name("next", prefix=node.prefix), base))
|
||||
elif name:
|
||||
n = Name("__next__", prefix=name.prefix)
|
||||
name.replace(n)
|
||||
elif attr:
|
||||
# We don't do this transformation if we're assigning to "x.next".
|
||||
# Unfortunately, it doesn't seem possible to do this in PATTERN,
|
||||
# so it's being done here.
|
||||
if is_assign_target(node):
|
||||
head = results["head"]
|
||||
if "".join([str(n) for n in head]).strip() == '__builtin__':
|
||||
self.warning(node, bind_warning)
|
||||
return
|
||||
attr.replace(Name("__next__"))
|
||||
elif "global" in results:
|
||||
self.warning(node, bind_warning)
|
||||
self.shadowed_next = True
|
||||
|
||||
|
||||
### The following functions help test if node is part of an assignment
|
||||
### target.
|
||||
|
||||
def is_assign_target(node):
|
||||
assign = find_assign(node)
|
||||
if assign is None:
|
||||
return False
|
||||
|
||||
for child in assign.children:
|
||||
if child.type == token.EQUAL:
|
||||
return False
|
||||
elif is_subtree(child, node):
|
||||
return True
|
||||
return False
|
||||
|
||||
def find_assign(node):
|
||||
if node.type == syms.expr_stmt:
|
||||
return node
|
||||
if node.type == syms.simple_stmt or node.parent is None:
|
||||
return None
|
||||
return find_assign(node.parent)
|
||||
|
||||
def is_subtree(root, node):
|
||||
if root == node:
|
||||
return True
|
||||
return any(is_subtree(c, node) for c in root.children)
|
21
third_party/python/Lib/lib2to3/fixes/fix_nonzero.py
vendored
Normal file
21
third_party/python/Lib/lib2to3/fixes/fix_nonzero.py
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
"""Fixer for __nonzero__ -> __bool__ methods."""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
class FixNonzero(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
classdef< 'class' any+ ':'
|
||||
suite< any*
|
||||
funcdef< 'def' name='__nonzero__'
|
||||
parameters< '(' NAME ')' > any+ >
|
||||
any* > >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
name = results["name"]
|
||||
new = Name("__bool__", prefix=name.prefix)
|
||||
name.replace(new)
|
28
third_party/python/Lib/lib2to3/fixes/fix_numliterals.py
vendored
Normal file
28
third_party/python/Lib/lib2to3/fixes/fix_numliterals.py
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
"""Fixer that turns 1L into 1, 0755 into 0o755.
|
||||
"""
|
||||
# Copyright 2007 Georg Brandl.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
# Local imports
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Number
|
||||
|
||||
|
||||
class FixNumliterals(fixer_base.BaseFix):
|
||||
# This is so simple that we don't need the pattern compiler.
|
||||
|
||||
_accept_type = token.NUMBER
|
||||
|
||||
def match(self, node):
|
||||
# Override
|
||||
return (node.value.startswith("0") or node.value[-1] in "Ll")
|
||||
|
||||
def transform(self, node, results):
|
||||
val = node.value
|
||||
if val[-1] in 'Ll':
|
||||
val = val[:-1]
|
||||
elif val.startswith('0') and val.isdigit() and len(set(val)) > 1:
|
||||
val = "0o" + val[1:]
|
||||
|
||||
return Number(val, prefix=node.prefix)
|
98
third_party/python/Lib/lib2to3/fixes/fix_operator.py
vendored
Normal file
98
third_party/python/Lib/lib2to3/fixes/fix_operator.py
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
"""Fixer for operator functions.
|
||||
|
||||
operator.isCallable(obj) -> hasattr(obj, '__call__')
|
||||
operator.sequenceIncludes(obj) -> operator.contains(obj)
|
||||
operator.isSequenceType(obj) -> isinstance(obj, collections.Sequence)
|
||||
operator.isMappingType(obj) -> isinstance(obj, collections.Mapping)
|
||||
operator.isNumberType(obj) -> isinstance(obj, numbers.Number)
|
||||
operator.repeat(obj, n) -> operator.mul(obj, n)
|
||||
operator.irepeat(obj, n) -> operator.imul(obj, n)
|
||||
"""
|
||||
|
||||
import collections
|
||||
|
||||
# Local imports
|
||||
from lib2to3 import fixer_base
|
||||
from lib2to3.fixer_util import Call, Name, String, touch_import
|
||||
|
||||
|
||||
def invocation(s):
|
||||
def dec(f):
|
||||
f.invocation = s
|
||||
return f
|
||||
return dec
|
||||
|
||||
|
||||
class FixOperator(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
order = "pre"
|
||||
|
||||
methods = """
|
||||
method=('isCallable'|'sequenceIncludes'
|
||||
|'isSequenceType'|'isMappingType'|'isNumberType'
|
||||
|'repeat'|'irepeat')
|
||||
"""
|
||||
obj = "'(' obj=any ')'"
|
||||
PATTERN = """
|
||||
power< module='operator'
|
||||
trailer< '.' %(methods)s > trailer< %(obj)s > >
|
||||
|
|
||||
power< %(methods)s trailer< %(obj)s > >
|
||||
""" % dict(methods=methods, obj=obj)
|
||||
|
||||
def transform(self, node, results):
|
||||
method = self._check_method(node, results)
|
||||
if method is not None:
|
||||
return method(node, results)
|
||||
|
||||
@invocation("operator.contains(%s)")
|
||||
def _sequenceIncludes(self, node, results):
|
||||
return self._handle_rename(node, results, "contains")
|
||||
|
||||
@invocation("hasattr(%s, '__call__')")
|
||||
def _isCallable(self, node, results):
|
||||
obj = results["obj"]
|
||||
args = [obj.clone(), String(", "), String("'__call__'")]
|
||||
return Call(Name("hasattr"), args, prefix=node.prefix)
|
||||
|
||||
@invocation("operator.mul(%s)")
|
||||
def _repeat(self, node, results):
|
||||
return self._handle_rename(node, results, "mul")
|
||||
|
||||
@invocation("operator.imul(%s)")
|
||||
def _irepeat(self, node, results):
|
||||
return self._handle_rename(node, results, "imul")
|
||||
|
||||
@invocation("isinstance(%s, collections.Sequence)")
|
||||
def _isSequenceType(self, node, results):
|
||||
return self._handle_type2abc(node, results, "collections", "Sequence")
|
||||
|
||||
@invocation("isinstance(%s, collections.Mapping)")
|
||||
def _isMappingType(self, node, results):
|
||||
return self._handle_type2abc(node, results, "collections", "Mapping")
|
||||
|
||||
@invocation("isinstance(%s, numbers.Number)")
|
||||
def _isNumberType(self, node, results):
|
||||
return self._handle_type2abc(node, results, "numbers", "Number")
|
||||
|
||||
def _handle_rename(self, node, results, name):
|
||||
method = results["method"][0]
|
||||
method.value = name
|
||||
method.changed()
|
||||
|
||||
def _handle_type2abc(self, node, results, module, abc):
|
||||
touch_import(None, module, node)
|
||||
obj = results["obj"]
|
||||
args = [obj.clone(), String(", " + ".".join([module, abc]))]
|
||||
return Call(Name("isinstance"), args, prefix=node.prefix)
|
||||
|
||||
def _check_method(self, node, results):
|
||||
method = getattr(self, "_" + results["method"][0].value)
|
||||
if isinstance(method, collections.Callable):
|
||||
if "module" in results:
|
||||
return method
|
||||
else:
|
||||
sub = (str(results["obj"]),)
|
||||
invocation_str = method.invocation % sub
|
||||
self.warning(node, "You should use '%s' here." % invocation_str)
|
||||
return None
|
44
third_party/python/Lib/lib2to3/fixes/fix_paren.py
vendored
Normal file
44
third_party/python/Lib/lib2to3/fixes/fix_paren.py
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
"""Fixer that addes parentheses where they are required
|
||||
|
||||
This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``."""
|
||||
|
||||
# By Taek Joo Kim and Benjamin Peterson
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import LParen, RParen
|
||||
|
||||
# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2]
|
||||
class FixParen(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
atom< ('[' | '(')
|
||||
(listmaker< any
|
||||
comp_for<
|
||||
'for' NAME 'in'
|
||||
target=testlist_safe< any (',' any)+ [',']
|
||||
>
|
||||
[any]
|
||||
>
|
||||
>
|
||||
|
|
||||
testlist_gexp< any
|
||||
comp_for<
|
||||
'for' NAME 'in'
|
||||
target=testlist_safe< any (',' any)+ [',']
|
||||
>
|
||||
[any]
|
||||
>
|
||||
>)
|
||||
(']' | ')') >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
target = results["target"]
|
||||
|
||||
lparen = LParen()
|
||||
lparen.prefix = target.prefix
|
||||
target.prefix = "" # Make it hug the parentheses
|
||||
target.insert_child(0, lparen)
|
||||
target.append_child(RParen())
|
87
third_party/python/Lib/lib2to3/fixes/fix_print.py
vendored
Normal file
87
third_party/python/Lib/lib2to3/fixes/fix_print.py
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for print.
|
||||
|
||||
Change:
|
||||
'print' into 'print()'
|
||||
'print ...' into 'print(...)'
|
||||
'print ... ,' into 'print(..., end=" ")'
|
||||
'print >>x, ...' into 'print(..., file=x)'
|
||||
|
||||
No changes are applied if print_function is imported from __future__
|
||||
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import patcomp
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, Call, Comma, String
|
||||
|
||||
|
||||
parend_expr = patcomp.compile_pattern(
|
||||
"""atom< '(' [atom|STRING|NAME] ')' >"""
|
||||
)
|
||||
|
||||
|
||||
class FixPrint(fixer_base.BaseFix):
|
||||
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
simple_stmt< any* bare='print' any* > | print_stmt
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
assert results
|
||||
|
||||
bare_print = results.get("bare")
|
||||
|
||||
if bare_print:
|
||||
# Special-case print all by itself
|
||||
bare_print.replace(Call(Name("print"), [],
|
||||
prefix=bare_print.prefix))
|
||||
return
|
||||
assert node.children[0] == Name("print")
|
||||
args = node.children[1:]
|
||||
if len(args) == 1 and parend_expr.match(args[0]):
|
||||
# We don't want to keep sticking parens around an
|
||||
# already-parenthesised expression.
|
||||
return
|
||||
|
||||
sep = end = file = None
|
||||
if args and args[-1] == Comma():
|
||||
args = args[:-1]
|
||||
end = " "
|
||||
if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, ">>"):
|
||||
assert len(args) >= 2
|
||||
file = args[1].clone()
|
||||
args = args[3:] # Strip a possible comma after the file expression
|
||||
# Now synthesize a print(args, sep=..., end=..., file=...) node.
|
||||
l_args = [arg.clone() for arg in args]
|
||||
if l_args:
|
||||
l_args[0].prefix = ""
|
||||
if sep is not None or end is not None or file is not None:
|
||||
if sep is not None:
|
||||
self.add_kwarg(l_args, "sep", String(repr(sep)))
|
||||
if end is not None:
|
||||
self.add_kwarg(l_args, "end", String(repr(end)))
|
||||
if file is not None:
|
||||
self.add_kwarg(l_args, "file", file)
|
||||
n_stmt = Call(Name("print"), l_args)
|
||||
n_stmt.prefix = node.prefix
|
||||
return n_stmt
|
||||
|
||||
def add_kwarg(self, l_nodes, s_kwd, n_expr):
|
||||
# XXX All this prefix-setting may lose comments (though rarely)
|
||||
n_expr.prefix = ""
|
||||
n_argument = pytree.Node(self.syms.argument,
|
||||
(Name(s_kwd),
|
||||
pytree.Leaf(token.EQUAL, "="),
|
||||
n_expr))
|
||||
if l_nodes:
|
||||
l_nodes.append(Comma())
|
||||
n_argument.prefix = " "
|
||||
l_nodes.append(n_argument)
|
90
third_party/python/Lib/lib2to3/fixes/fix_raise.py
vendored
Normal file
90
third_party/python/Lib/lib2to3/fixes/fix_raise.py
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
"""Fixer for 'raise E, V, T'
|
||||
|
||||
raise -> raise
|
||||
raise E -> raise E
|
||||
raise E, V -> raise E(V)
|
||||
raise E, V, T -> raise E(V).with_traceback(T)
|
||||
raise E, None, T -> raise E.with_traceback(T)
|
||||
|
||||
raise (((E, E'), E''), E'''), V -> raise E(V)
|
||||
raise "foo", V, T -> warns about string exceptions
|
||||
|
||||
|
||||
CAVEATS:
|
||||
1) "raise E, V" will be incorrectly translated if V is an exception
|
||||
instance. The correct Python 3 idiom is
|
||||
|
||||
raise E from V
|
||||
|
||||
but since we can't detect instance-hood by syntax alone and since
|
||||
any client code would have to be changed as well, we don't automate
|
||||
this.
|
||||
"""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, Call, Attr, ArgList, is_tuple
|
||||
|
||||
class FixRaise(fixer_base.BaseFix):
|
||||
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
syms = self.syms
|
||||
|
||||
exc = results["exc"].clone()
|
||||
if exc.type == token.STRING:
|
||||
msg = "Python 3 does not support string exceptions"
|
||||
self.cannot_convert(node, msg)
|
||||
return
|
||||
|
||||
# Python 2 supports
|
||||
# raise ((((E1, E2), E3), E4), E5), V
|
||||
# as a synonym for
|
||||
# raise E1, V
|
||||
# Since Python 3 will not support this, we recurse down any tuple
|
||||
# literals, always taking the first element.
|
||||
if is_tuple(exc):
|
||||
while is_tuple(exc):
|
||||
# exc.children[1:-1] is the unparenthesized tuple
|
||||
# exc.children[1].children[0] is the first element of the tuple
|
||||
exc = exc.children[1].children[0].clone()
|
||||
exc.prefix = " "
|
||||
|
||||
if "val" not in results:
|
||||
# One-argument raise
|
||||
new = pytree.Node(syms.raise_stmt, [Name("raise"), exc])
|
||||
new.prefix = node.prefix
|
||||
return new
|
||||
|
||||
val = results["val"].clone()
|
||||
if is_tuple(val):
|
||||
args = [c.clone() for c in val.children[1:-1]]
|
||||
else:
|
||||
val.prefix = ""
|
||||
args = [val]
|
||||
|
||||
if "tb" in results:
|
||||
tb = results["tb"].clone()
|
||||
tb.prefix = ""
|
||||
|
||||
e = exc
|
||||
# If there's a traceback and None is passed as the value, then don't
|
||||
# add a call, since the user probably just wants to add a
|
||||
# traceback. See issue #9661.
|
||||
if val.type != token.NAME or val.value != "None":
|
||||
e = Call(exc, args)
|
||||
with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
|
||||
new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb)
|
||||
new.prefix = node.prefix
|
||||
return new
|
||||
else:
|
||||
return pytree.Node(syms.raise_stmt,
|
||||
[Name("raise"), Call(exc, args)],
|
||||
prefix=node.prefix)
|
17
third_party/python/Lib/lib2to3/fixes/fix_raw_input.py
vendored
Normal file
17
third_party/python/Lib/lib2to3/fixes/fix_raw_input.py
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
"""Fixer that changes raw_input(...) into input(...)."""
|
||||
# Author: Andre Roberge
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
class FixRawInput(fixer_base.BaseFix):
|
||||
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< name='raw_input' trailer< '(' [any] ')' > any* >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
name = results["name"]
|
||||
name.replace(Name("input", prefix=name.prefix))
|
35
third_party/python/Lib/lib2to3/fixes/fix_reduce.py
vendored
Normal file
35
third_party/python/Lib/lib2to3/fixes/fix_reduce.py
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Copyright 2008 Armin Ronacher.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for reduce().
|
||||
|
||||
Makes sure reduce() is imported from the functools module if reduce is
|
||||
used in that module.
|
||||
"""
|
||||
|
||||
from lib2to3 import fixer_base
|
||||
from lib2to3.fixer_util import touch_import
|
||||
|
||||
|
||||
|
||||
class FixReduce(fixer_base.BaseFix):
|
||||
|
||||
BM_compatible = True
|
||||
order = "pre"
|
||||
|
||||
PATTERN = """
|
||||
power< 'reduce'
|
||||
trailer< '('
|
||||
arglist< (
|
||||
(not(argument<any '=' any>) any ','
|
||||
not(argument<any '=' any>) any) |
|
||||
(not(argument<any '=' any>) any ','
|
||||
not(argument<any '=' any>) any ','
|
||||
not(argument<any '=' any>) any)
|
||||
) >
|
||||
')' >
|
||||
>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
touch_import('functools', 'reduce', node)
|
38
third_party/python/Lib/lib2to3/fixes/fix_reload.py
vendored
Normal file
38
third_party/python/Lib/lib2to3/fixes/fix_reload.py
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
"""Fixer for reload().
|
||||
|
||||
reload(s) -> imp.reload(s)"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import ImportAndCall, touch_import
|
||||
|
||||
|
||||
class FixReload(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
order = "pre"
|
||||
|
||||
PATTERN = """
|
||||
power< 'reload'
|
||||
trailer< lpar='('
|
||||
( not(arglist | argument<any '=' any>) obj=any
|
||||
| obj=arglist<(not argument<any '=' any>) any ','> )
|
||||
rpar=')' >
|
||||
after=any*
|
||||
>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
if results:
|
||||
# I feel like we should be able to express this logic in the
|
||||
# PATTERN above but I don't know how to do it so...
|
||||
obj = results['obj']
|
||||
if obj:
|
||||
if obj.type == self.syms.star_expr:
|
||||
return # Make no change.
|
||||
if (obj.type == self.syms.argument and
|
||||
obj.children[0].value == '**'):
|
||||
return # Make no change.
|
||||
names = ('imp', 'reload')
|
||||
new = ImportAndCall(node, results, names)
|
||||
touch_import(None, 'imp', node)
|
||||
return new
|
70
third_party/python/Lib/lib2to3/fixes/fix_renames.py
vendored
Normal file
70
third_party/python/Lib/lib2to3/fixes/fix_renames.py
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
"""Fix incompatible renames
|
||||
|
||||
Fixes:
|
||||
* sys.maxint -> sys.maxsize
|
||||
"""
|
||||
# Author: Christian Heimes
|
||||
# based on Collin Winter's fix_import
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, attr_chain
|
||||
|
||||
MAPPING = {"sys": {"maxint" : "maxsize"},
|
||||
}
|
||||
LOOKUP = {}
|
||||
|
||||
def alternates(members):
|
||||
return "(" + "|".join(map(repr, members)) + ")"
|
||||
|
||||
|
||||
def build_pattern():
|
||||
#bare = set()
|
||||
for module, replace in list(MAPPING.items()):
|
||||
for old_attr, new_attr in list(replace.items()):
|
||||
LOOKUP[(module, old_attr)] = new_attr
|
||||
#bare.add(module)
|
||||
#bare.add(old_attr)
|
||||
#yield """
|
||||
# import_name< 'import' (module=%r
|
||||
# | dotted_as_names< any* module=%r any* >) >
|
||||
# """ % (module, module)
|
||||
yield """
|
||||
import_from< 'from' module_name=%r 'import'
|
||||
( attr_name=%r | import_as_name< attr_name=%r 'as' any >) >
|
||||
""" % (module, old_attr, old_attr)
|
||||
yield """
|
||||
power< module_name=%r trailer< '.' attr_name=%r > any* >
|
||||
""" % (module, old_attr)
|
||||
#yield """bare_name=%s""" % alternates(bare)
|
||||
|
||||
|
||||
class FixRenames(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = "|".join(build_pattern())
|
||||
|
||||
order = "pre" # Pre-order tree traversal
|
||||
|
||||
# Don't match the node if it's within another match
|
||||
def match(self, node):
|
||||
match = super(FixRenames, self).match
|
||||
results = match(node)
|
||||
if results:
|
||||
if any(match(obj) for obj in attr_chain(node, "parent")):
|
||||
return False
|
||||
return results
|
||||
return False
|
||||
|
||||
#def start_tree(self, tree, filename):
|
||||
# super(FixRenames, self).start_tree(tree, filename)
|
||||
# self.replace = {}
|
||||
|
||||
def transform(self, node, results):
|
||||
mod_name = results.get("module_name")
|
||||
attr_name = results.get("attr_name")
|
||||
#bare_name = results.get("bare_name")
|
||||
#import_mod = results.get("module")
|
||||
|
||||
if mod_name and attr_name:
|
||||
new_attr = LOOKUP[(mod_name.value, attr_name.value)]
|
||||
attr_name.replace(Name(new_attr, prefix=attr_name.prefix))
|
23
third_party/python/Lib/lib2to3/fixes/fix_repr.py
vendored
Normal file
23
third_party/python/Lib/lib2to3/fixes/fix_repr.py
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that transforms `xyzzy` into repr(xyzzy)."""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Call, Name, parenthesize
|
||||
|
||||
|
||||
class FixRepr(fixer_base.BaseFix):
|
||||
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
atom < '`' expr=any '`' >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
expr = results["expr"].clone()
|
||||
|
||||
if expr.type == self.syms.testlist1:
|
||||
expr = parenthesize(expr)
|
||||
return Call(Name("repr"), [expr], prefix=node.prefix)
|
53
third_party/python/Lib/lib2to3/fixes/fix_set_literal.py
vendored
Normal file
53
third_party/python/Lib/lib2to3/fixes/fix_set_literal.py
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
"""
|
||||
Optional fixer to transform set() calls to set literals.
|
||||
"""
|
||||
|
||||
# Author: Benjamin Peterson
|
||||
|
||||
from lib2to3 import fixer_base, pytree
|
||||
from lib2to3.fixer_util import token, syms
|
||||
|
||||
|
||||
|
||||
class FixSetLiteral(fixer_base.BaseFix):
|
||||
|
||||
BM_compatible = True
|
||||
explicit = True
|
||||
|
||||
PATTERN = """power< 'set' trailer< '('
|
||||
(atom=atom< '[' (items=listmaker< any ((',' any)* [',']) >
|
||||
|
|
||||
single=any) ']' >
|
||||
|
|
||||
atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' >
|
||||
)
|
||||
')' > >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
single = results.get("single")
|
||||
if single:
|
||||
# Make a fake listmaker
|
||||
fake = pytree.Node(syms.listmaker, [single.clone()])
|
||||
single.replace(fake)
|
||||
items = fake
|
||||
else:
|
||||
items = results["items"]
|
||||
|
||||
# Build the contents of the literal
|
||||
literal = [pytree.Leaf(token.LBRACE, "{")]
|
||||
literal.extend(n.clone() for n in items.children)
|
||||
literal.append(pytree.Leaf(token.RBRACE, "}"))
|
||||
# Set the prefix of the right brace to that of the ')' or ']'
|
||||
literal[-1].prefix = items.next_sibling.prefix
|
||||
maker = pytree.Node(syms.dictsetmaker, literal)
|
||||
maker.prefix = node.prefix
|
||||
|
||||
# If the original was a one tuple, we need to remove the extra comma.
|
||||
if len(maker.children) == 4:
|
||||
n = maker.children[2]
|
||||
n.remove()
|
||||
maker.children[-1].prefix = n.prefix
|
||||
|
||||
# Finally, replace the set call with our shiny new literal.
|
||||
return maker
|
18
third_party/python/Lib/lib2to3/fixes/fix_standarderror.py
vendored
Normal file
18
third_party/python/Lib/lib2to3/fixes/fix_standarderror.py
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for StandardError -> Exception."""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
|
||||
class FixStandarderror(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
'StandardError'
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
return Name("Exception", prefix=node.prefix)
|
30
third_party/python/Lib/lib2to3/fixes/fix_sys_exc.py
vendored
Normal file
30
third_party/python/Lib/lib2to3/fixes/fix_sys_exc.py
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
"""Fixer for sys.exc_{type, value, traceback}
|
||||
|
||||
sys.exc_type -> sys.exc_info()[0]
|
||||
sys.exc_value -> sys.exc_info()[1]
|
||||
sys.exc_traceback -> sys.exc_info()[2]
|
||||
"""
|
||||
|
||||
# By Jeff Balogh and Benjamin Peterson
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms
|
||||
|
||||
class FixSysExc(fixer_base.BaseFix):
|
||||
# This order matches the ordering of sys.exc_info().
|
||||
exc_info = ["exc_type", "exc_value", "exc_traceback"]
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< 'sys' trailer< dot='.' attribute=(%s) > >
|
||||
""" % '|'.join("'%s'" % e for e in exc_info)
|
||||
|
||||
def transform(self, node, results):
|
||||
sys_attr = results["attribute"][0]
|
||||
index = Number(self.exc_info.index(sys_attr.value))
|
||||
|
||||
call = Call(Name("exc_info"), prefix=sys_attr.prefix)
|
||||
attr = Attr(Name("sys"), call)
|
||||
attr[1].children[0].prefix = results["dot"].prefix
|
||||
attr.append(Subscript(index))
|
||||
return Node(syms.power, attr, prefix=node.prefix)
|
56
third_party/python/Lib/lib2to3/fixes/fix_throw.py
vendored
Normal file
56
third_party/python/Lib/lib2to3/fixes/fix_throw.py
vendored
Normal file
|
@ -0,0 +1,56 @@
|
|||
"""Fixer for generator.throw(E, V, T).
|
||||
|
||||
g.throw(E) -> g.throw(E)
|
||||
g.throw(E, V) -> g.throw(E(V))
|
||||
g.throw(E, V, T) -> g.throw(E(V).with_traceback(T))
|
||||
|
||||
g.throw("foo"[, V[, T]]) will warn about string exceptions."""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, Call, ArgList, Attr, is_tuple
|
||||
|
||||
class FixThrow(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< any trailer< '.' 'throw' >
|
||||
trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' >
|
||||
>
|
||||
|
|
||||
power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
syms = self.syms
|
||||
|
||||
exc = results["exc"].clone()
|
||||
if exc.type is token.STRING:
|
||||
self.cannot_convert(node, "Python 3 does not support string exceptions")
|
||||
return
|
||||
|
||||
# Leave "g.throw(E)" alone
|
||||
val = results.get("val")
|
||||
if val is None:
|
||||
return
|
||||
|
||||
val = val.clone()
|
||||
if is_tuple(val):
|
||||
args = [c.clone() for c in val.children[1:-1]]
|
||||
else:
|
||||
val.prefix = ""
|
||||
args = [val]
|
||||
|
||||
throw_args = results["args"]
|
||||
|
||||
if "tb" in results:
|
||||
tb = results["tb"].clone()
|
||||
tb.prefix = ""
|
||||
|
||||
e = Call(exc, args)
|
||||
with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
|
||||
throw_args.replace(pytree.Node(syms.power, with_tb))
|
||||
else:
|
||||
throw_args.replace(Call(exc, args))
|
175
third_party/python/Lib/lib2to3/fixes/fix_tuple_params.py
vendored
Normal file
175
third_party/python/Lib/lib2to3/fixes/fix_tuple_params.py
vendored
Normal file
|
@ -0,0 +1,175 @@
|
|||
"""Fixer for function definitions with tuple parameters.
|
||||
|
||||
def func(((a, b), c), d):
|
||||
...
|
||||
|
||||
->
|
||||
|
||||
def func(x, d):
|
||||
((a, b), c) = x
|
||||
...
|
||||
|
||||
It will also support lambdas:
|
||||
|
||||
lambda (x, y): x + y -> lambda t: t[0] + t[1]
|
||||
|
||||
# The parens are a syntax error in Python 3
|
||||
lambda (x): x + y -> lambda x: x + y
|
||||
"""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms
|
||||
|
||||
def is_docstring(stmt):
|
||||
return isinstance(stmt, pytree.Node) and \
|
||||
stmt.children[0].type == token.STRING
|
||||
|
||||
class FixTupleParams(fixer_base.BaseFix):
|
||||
run_order = 4 #use a lower order since lambda is part of other
|
||||
#patterns
|
||||
BM_compatible = True
|
||||
|
||||
PATTERN = """
|
||||
funcdef< 'def' any parameters< '(' args=any ')' >
|
||||
['->' any] ':' suite=any+ >
|
||||
|
|
||||
lambda=
|
||||
lambdef< 'lambda' args=vfpdef< '(' inner=any ')' >
|
||||
':' body=any
|
||||
>
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
if "lambda" in results:
|
||||
return self.transform_lambda(node, results)
|
||||
|
||||
new_lines = []
|
||||
suite = results["suite"]
|
||||
args = results["args"]
|
||||
# This crap is so "def foo(...): x = 5; y = 7" is handled correctly.
|
||||
# TODO(cwinter): suite-cleanup
|
||||
if suite[0].children[1].type == token.INDENT:
|
||||
start = 2
|
||||
indent = suite[0].children[1].value
|
||||
end = Newline()
|
||||
else:
|
||||
start = 0
|
||||
indent = "; "
|
||||
end = pytree.Leaf(token.INDENT, "")
|
||||
|
||||
# We need access to self for new_name(), and making this a method
|
||||
# doesn't feel right. Closing over self and new_lines makes the
|
||||
# code below cleaner.
|
||||
def handle_tuple(tuple_arg, add_prefix=False):
|
||||
n = Name(self.new_name())
|
||||
arg = tuple_arg.clone()
|
||||
arg.prefix = ""
|
||||
stmt = Assign(arg, n.clone())
|
||||
if add_prefix:
|
||||
n.prefix = " "
|
||||
tuple_arg.replace(n)
|
||||
new_lines.append(pytree.Node(syms.simple_stmt,
|
||||
[stmt, end.clone()]))
|
||||
|
||||
if args.type == syms.tfpdef:
|
||||
handle_tuple(args)
|
||||
elif args.type == syms.typedargslist:
|
||||
for i, arg in enumerate(args.children):
|
||||
if arg.type == syms.tfpdef:
|
||||
# Without add_prefix, the emitted code is correct,
|
||||
# just ugly.
|
||||
handle_tuple(arg, add_prefix=(i > 0))
|
||||
|
||||
if not new_lines:
|
||||
return
|
||||
|
||||
# This isn't strictly necessary, but it plays nicely with other fixers.
|
||||
# TODO(cwinter) get rid of this when children becomes a smart list
|
||||
for line in new_lines:
|
||||
line.parent = suite[0]
|
||||
|
||||
# TODO(cwinter) suite-cleanup
|
||||
after = start
|
||||
if start == 0:
|
||||
new_lines[0].prefix = " "
|
||||
elif is_docstring(suite[0].children[start]):
|
||||
new_lines[0].prefix = indent
|
||||
after = start + 1
|
||||
|
||||
for line in new_lines:
|
||||
line.parent = suite[0]
|
||||
suite[0].children[after:after] = new_lines
|
||||
for i in range(after+1, after+len(new_lines)+1):
|
||||
suite[0].children[i].prefix = indent
|
||||
suite[0].changed()
|
||||
|
||||
def transform_lambda(self, node, results):
|
||||
args = results["args"]
|
||||
body = results["body"]
|
||||
inner = simplify_args(results["inner"])
|
||||
|
||||
# Replace lambda ((((x)))): x with lambda x: x
|
||||
if inner.type == token.NAME:
|
||||
inner = inner.clone()
|
||||
inner.prefix = " "
|
||||
args.replace(inner)
|
||||
return
|
||||
|
||||
params = find_params(args)
|
||||
to_index = map_to_index(params)
|
||||
tup_name = self.new_name(tuple_name(params))
|
||||
|
||||
new_param = Name(tup_name, prefix=" ")
|
||||
args.replace(new_param.clone())
|
||||
for n in body.post_order():
|
||||
if n.type == token.NAME and n.value in to_index:
|
||||
subscripts = [c.clone() for c in to_index[n.value]]
|
||||
new = pytree.Node(syms.power,
|
||||
[new_param.clone()] + subscripts)
|
||||
new.prefix = n.prefix
|
||||
n.replace(new)
|
||||
|
||||
|
||||
### Helper functions for transform_lambda()
|
||||
|
||||
def simplify_args(node):
|
||||
if node.type in (syms.vfplist, token.NAME):
|
||||
return node
|
||||
elif node.type == syms.vfpdef:
|
||||
# These look like vfpdef< '(' x ')' > where x is NAME
|
||||
# or another vfpdef instance (leading to recursion).
|
||||
while node.type == syms.vfpdef:
|
||||
node = node.children[1]
|
||||
return node
|
||||
raise RuntimeError("Received unexpected node %s" % node)
|
||||
|
||||
def find_params(node):
|
||||
if node.type == syms.vfpdef:
|
||||
return find_params(node.children[1])
|
||||
elif node.type == token.NAME:
|
||||
return node.value
|
||||
return [find_params(c) for c in node.children if c.type != token.COMMA]
|
||||
|
||||
def map_to_index(param_list, prefix=[], d=None):
|
||||
if d is None:
|
||||
d = {}
|
||||
for i, obj in enumerate(param_list):
|
||||
trailer = [Subscript(Number(str(i)))]
|
||||
if isinstance(obj, list):
|
||||
map_to_index(obj, trailer, d=d)
|
||||
else:
|
||||
d[obj] = prefix + trailer
|
||||
return d
|
||||
|
||||
def tuple_name(param_list):
|
||||
l = []
|
||||
for obj in param_list:
|
||||
if isinstance(obj, list):
|
||||
l.append(tuple_name(obj))
|
||||
else:
|
||||
l.append(obj)
|
||||
return "_".join(l)
|
61
third_party/python/Lib/lib2to3/fixes/fix_types.py
vendored
Normal file
61
third_party/python/Lib/lib2to3/fixes/fix_types.py
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer for removing uses of the types module.
|
||||
|
||||
These work for only the known names in the types module. The forms above
|
||||
can include types. or not. ie, It is assumed the module is imported either as:
|
||||
|
||||
import types
|
||||
from types import ... # either * or specific types
|
||||
|
||||
The import statements are not modified.
|
||||
|
||||
There should be another fixer that handles at least the following constants:
|
||||
|
||||
type([]) -> list
|
||||
type(()) -> tuple
|
||||
type('') -> str
|
||||
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
_TYPE_MAPPING = {
|
||||
'BooleanType' : 'bool',
|
||||
'BufferType' : 'memoryview',
|
||||
'ClassType' : 'type',
|
||||
'ComplexType' : 'complex',
|
||||
'DictType': 'dict',
|
||||
'DictionaryType' : 'dict',
|
||||
'EllipsisType' : 'type(Ellipsis)',
|
||||
#'FileType' : 'io.IOBase',
|
||||
'FloatType': 'float',
|
||||
'IntType': 'int',
|
||||
'ListType': 'list',
|
||||
'LongType': 'int',
|
||||
'ObjectType' : 'object',
|
||||
'NoneType': 'type(None)',
|
||||
'NotImplementedType' : 'type(NotImplemented)',
|
||||
'SliceType' : 'slice',
|
||||
'StringType': 'bytes', # XXX ?
|
||||
'StringTypes' : '(str,)', # XXX ?
|
||||
'TupleType': 'tuple',
|
||||
'TypeType' : 'type',
|
||||
'UnicodeType': 'str',
|
||||
'XRangeType' : 'range',
|
||||
}
|
||||
|
||||
_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
|
||||
|
||||
class FixTypes(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = '|'.join(_pats)
|
||||
|
||||
def transform(self, node, results):
|
||||
new_value = _TYPE_MAPPING.get(results["name"].value)
|
||||
if new_value:
|
||||
return Name(new_value, prefix=node.prefix)
|
||||
return None
|
42
third_party/python/Lib/lib2to3/fixes/fix_unicode.py
vendored
Normal file
42
third_party/python/Lib/lib2to3/fixes/fix_unicode.py
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
r"""Fixer for unicode.
|
||||
|
||||
* Changes unicode to str and unichr to chr.
|
||||
|
||||
* If "...\u..." is not unicode literal change it into "...\\u...".
|
||||
|
||||
* Change u"..." into "...".
|
||||
|
||||
"""
|
||||
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
|
||||
_mapping = {"unichr" : "chr", "unicode" : "str"}
|
||||
|
||||
class FixUnicode(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = "STRING | 'unicode' | 'unichr'"
|
||||
|
||||
def start_tree(self, tree, filename):
|
||||
super(FixUnicode, self).start_tree(tree, filename)
|
||||
self.unicode_literals = 'unicode_literals' in tree.future_features
|
||||
|
||||
def transform(self, node, results):
|
||||
if node.type == token.NAME:
|
||||
new = node.clone()
|
||||
new.value = _mapping[node.value]
|
||||
return new
|
||||
elif node.type == token.STRING:
|
||||
val = node.value
|
||||
if not self.unicode_literals and val[0] in '\'"' and '\\' in val:
|
||||
val = r'\\'.join([
|
||||
v.replace('\\u', r'\\u').replace('\\U', r'\\U')
|
||||
for v in val.split(r'\\')
|
||||
])
|
||||
if val[0] in 'uU':
|
||||
val = val[1:]
|
||||
if val == node.value:
|
||||
return node
|
||||
new = node.clone()
|
||||
new.value = val
|
||||
return new
|
196
third_party/python/Lib/lib2to3/fixes/fix_urllib.py
vendored
Normal file
196
third_party/python/Lib/lib2to3/fixes/fix_urllib.py
vendored
Normal file
|
@ -0,0 +1,196 @@
|
|||
"""Fix changes imports of urllib which are now incompatible.
|
||||
This is rather similar to fix_imports, but because of the more
|
||||
complex nature of the fixing for urllib, it has its own fixer.
|
||||
"""
|
||||
# Author: Nick Edds
|
||||
|
||||
# Local imports
|
||||
from lib2to3.fixes.fix_imports import alternates, FixImports
|
||||
from lib2to3.fixer_util import (Name, Comma, FromImport, Newline,
|
||||
find_indentation, Node, syms)
|
||||
|
||||
MAPPING = {"urllib": [
|
||||
("urllib.request",
|
||||
["URLopener", "FancyURLopener", "urlretrieve",
|
||||
"_urlopener", "urlopen", "urlcleanup",
|
||||
"pathname2url", "url2pathname"]),
|
||||
("urllib.parse",
|
||||
["quote", "quote_plus", "unquote", "unquote_plus",
|
||||
"urlencode", "splitattr", "splithost", "splitnport",
|
||||
"splitpasswd", "splitport", "splitquery", "splittag",
|
||||
"splittype", "splituser", "splitvalue", ]),
|
||||
("urllib.error",
|
||||
["ContentTooShortError"])],
|
||||
"urllib2" : [
|
||||
("urllib.request",
|
||||
["urlopen", "install_opener", "build_opener",
|
||||
"Request", "OpenerDirector", "BaseHandler",
|
||||
"HTTPDefaultErrorHandler", "HTTPRedirectHandler",
|
||||
"HTTPCookieProcessor", "ProxyHandler",
|
||||
"HTTPPasswordMgr",
|
||||
"HTTPPasswordMgrWithDefaultRealm",
|
||||
"AbstractBasicAuthHandler",
|
||||
"HTTPBasicAuthHandler", "ProxyBasicAuthHandler",
|
||||
"AbstractDigestAuthHandler",
|
||||
"HTTPDigestAuthHandler", "ProxyDigestAuthHandler",
|
||||
"HTTPHandler", "HTTPSHandler", "FileHandler",
|
||||
"FTPHandler", "CacheFTPHandler",
|
||||
"UnknownHandler"]),
|
||||
("urllib.error",
|
||||
["URLError", "HTTPError"]),
|
||||
]
|
||||
}
|
||||
|
||||
# Duplicate the url parsing functions for urllib2.
|
||||
MAPPING["urllib2"].append(MAPPING["urllib"][1])
|
||||
|
||||
|
||||
def build_pattern():
|
||||
bare = set()
|
||||
for old_module, changes in MAPPING.items():
|
||||
for change in changes:
|
||||
new_module, members = change
|
||||
members = alternates(members)
|
||||
yield """import_name< 'import' (module=%r
|
||||
| dotted_as_names< any* module=%r any* >) >
|
||||
""" % (old_module, old_module)
|
||||
yield """import_from< 'from' mod_member=%r 'import'
|
||||
( member=%s | import_as_name< member=%s 'as' any > |
|
||||
import_as_names< members=any* >) >
|
||||
""" % (old_module, members, members)
|
||||
yield """import_from< 'from' module_star=%r 'import' star='*' >
|
||||
""" % old_module
|
||||
yield """import_name< 'import'
|
||||
dotted_as_name< module_as=%r 'as' any > >
|
||||
""" % old_module
|
||||
# bare_with_attr has a special significance for FixImports.match().
|
||||
yield """power< bare_with_attr=%r trailer< '.' member=%s > any* >
|
||||
""" % (old_module, members)
|
||||
|
||||
|
||||
class FixUrllib(FixImports):
|
||||
|
||||
def build_pattern(self):
|
||||
return "|".join(build_pattern())
|
||||
|
||||
def transform_import(self, node, results):
|
||||
"""Transform for the basic import case. Replaces the old
|
||||
import name with a comma separated list of its
|
||||
replacements.
|
||||
"""
|
||||
import_mod = results.get("module")
|
||||
pref = import_mod.prefix
|
||||
|
||||
names = []
|
||||
|
||||
# create a Node list of the replacement modules
|
||||
for name in MAPPING[import_mod.value][:-1]:
|
||||
names.extend([Name(name[0], prefix=pref), Comma()])
|
||||
names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref))
|
||||
import_mod.replace(names)
|
||||
|
||||
def transform_member(self, node, results):
|
||||
"""Transform for imports of specific module elements. Replaces
|
||||
the module to be imported from with the appropriate new
|
||||
module.
|
||||
"""
|
||||
mod_member = results.get("mod_member")
|
||||
pref = mod_member.prefix
|
||||
member = results.get("member")
|
||||
|
||||
# Simple case with only a single member being imported
|
||||
if member:
|
||||
# this may be a list of length one, or just a node
|
||||
if isinstance(member, list):
|
||||
member = member[0]
|
||||
new_name = None
|
||||
for change in MAPPING[mod_member.value]:
|
||||
if member.value in change[1]:
|
||||
new_name = change[0]
|
||||
break
|
||||
if new_name:
|
||||
mod_member.replace(Name(new_name, prefix=pref))
|
||||
else:
|
||||
self.cannot_convert(node, "This is an invalid module element")
|
||||
|
||||
# Multiple members being imported
|
||||
else:
|
||||
# a dictionary for replacements, order matters
|
||||
modules = []
|
||||
mod_dict = {}
|
||||
members = results["members"]
|
||||
for member in members:
|
||||
# we only care about the actual members
|
||||
if member.type == syms.import_as_name:
|
||||
as_name = member.children[2].value
|
||||
member_name = member.children[0].value
|
||||
else:
|
||||
member_name = member.value
|
||||
as_name = None
|
||||
if member_name != ",":
|
||||
for change in MAPPING[mod_member.value]:
|
||||
if member_name in change[1]:
|
||||
if change[0] not in mod_dict:
|
||||
modules.append(change[0])
|
||||
mod_dict.setdefault(change[0], []).append(member)
|
||||
|
||||
new_nodes = []
|
||||
indentation = find_indentation(node)
|
||||
first = True
|
||||
def handle_name(name, prefix):
|
||||
if name.type == syms.import_as_name:
|
||||
kids = [Name(name.children[0].value, prefix=prefix),
|
||||
name.children[1].clone(),
|
||||
name.children[2].clone()]
|
||||
return [Node(syms.import_as_name, kids)]
|
||||
return [Name(name.value, prefix=prefix)]
|
||||
for module in modules:
|
||||
elts = mod_dict[module]
|
||||
names = []
|
||||
for elt in elts[:-1]:
|
||||
names.extend(handle_name(elt, pref))
|
||||
names.append(Comma())
|
||||
names.extend(handle_name(elts[-1], pref))
|
||||
new = FromImport(module, names)
|
||||
if not first or node.parent.prefix.endswith(indentation):
|
||||
new.prefix = indentation
|
||||
new_nodes.append(new)
|
||||
first = False
|
||||
if new_nodes:
|
||||
nodes = []
|
||||
for new_node in new_nodes[:-1]:
|
||||
nodes.extend([new_node, Newline()])
|
||||
nodes.append(new_nodes[-1])
|
||||
node.replace(nodes)
|
||||
else:
|
||||
self.cannot_convert(node, "All module elements are invalid")
|
||||
|
||||
def transform_dot(self, node, results):
|
||||
"""Transform for calls to module members in code."""
|
||||
module_dot = results.get("bare_with_attr")
|
||||
member = results.get("member")
|
||||
new_name = None
|
||||
if isinstance(member, list):
|
||||
member = member[0]
|
||||
for change in MAPPING[module_dot.value]:
|
||||
if member.value in change[1]:
|
||||
new_name = change[0]
|
||||
break
|
||||
if new_name:
|
||||
module_dot.replace(Name(new_name,
|
||||
prefix=module_dot.prefix))
|
||||
else:
|
||||
self.cannot_convert(node, "This is an invalid module element")
|
||||
|
||||
def transform(self, node, results):
|
||||
if results.get("module"):
|
||||
self.transform_import(node, results)
|
||||
elif results.get("mod_member"):
|
||||
self.transform_member(node, results)
|
||||
elif results.get("bare_with_attr"):
|
||||
self.transform_dot(node, results)
|
||||
# Renaming and star imports are not supported for these modules.
|
||||
elif results.get("module_star"):
|
||||
self.cannot_convert(node, "Cannot handle star imports.")
|
||||
elif results.get("module_as"):
|
||||
self.cannot_convert(node, "This module is now multiple modules")
|
39
third_party/python/Lib/lib2to3/fixes/fix_ws_comma.py
vendored
Normal file
39
third_party/python/Lib/lib2to3/fixes/fix_ws_comma.py
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
"""Fixer that changes 'a ,b' into 'a, b'.
|
||||
|
||||
This also changes '{a :b}' into '{a: b}', but does not touch other
|
||||
uses of colons. It does not touch other uses of whitespace.
|
||||
|
||||
"""
|
||||
|
||||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
|
||||
class FixWsComma(fixer_base.BaseFix):
|
||||
|
||||
explicit = True # The user must ask for this fixers
|
||||
|
||||
PATTERN = """
|
||||
any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]>
|
||||
"""
|
||||
|
||||
COMMA = pytree.Leaf(token.COMMA, ",")
|
||||
COLON = pytree.Leaf(token.COLON, ":")
|
||||
SEPS = (COMMA, COLON)
|
||||
|
||||
def transform(self, node, results):
|
||||
new = node.clone()
|
||||
comma = False
|
||||
for child in new.children:
|
||||
if child in self.SEPS:
|
||||
prefix = child.prefix
|
||||
if prefix.isspace() and "\n" not in prefix:
|
||||
child.prefix = ""
|
||||
comma = True
|
||||
else:
|
||||
if comma:
|
||||
prefix = child.prefix
|
||||
if not prefix:
|
||||
child.prefix = " "
|
||||
comma = False
|
||||
return new
|
73
third_party/python/Lib/lib2to3/fixes/fix_xrange.py
vendored
Normal file
73
third_party/python/Lib/lib2to3/fixes/fix_xrange.py
vendored
Normal file
|
@ -0,0 +1,73 @@
|
|||
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Fixer that changes xrange(...) into range(...)."""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, Call, consuming_calls
|
||||
from .. import patcomp
|
||||
|
||||
|
||||
class FixXrange(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power<
|
||||
(name='range'|name='xrange') trailer< '(' args=any ')' >
|
||||
rest=any* >
|
||||
"""
|
||||
|
||||
def start_tree(self, tree, filename):
|
||||
super(FixXrange, self).start_tree(tree, filename)
|
||||
self.transformed_xranges = set()
|
||||
|
||||
def finish_tree(self, tree, filename):
|
||||
self.transformed_xranges = None
|
||||
|
||||
def transform(self, node, results):
|
||||
name = results["name"]
|
||||
if name.value == "xrange":
|
||||
return self.transform_xrange(node, results)
|
||||
elif name.value == "range":
|
||||
return self.transform_range(node, results)
|
||||
else:
|
||||
raise ValueError(repr(name))
|
||||
|
||||
def transform_xrange(self, node, results):
|
||||
name = results["name"]
|
||||
name.replace(Name("range", prefix=name.prefix))
|
||||
# This prevents the new range call from being wrapped in a list later.
|
||||
self.transformed_xranges.add(id(node))
|
||||
|
||||
def transform_range(self, node, results):
|
||||
if (id(node) not in self.transformed_xranges and
|
||||
not self.in_special_context(node)):
|
||||
range_call = Call(Name("range"), [results["args"].clone()])
|
||||
# Encase the range call in list().
|
||||
list_call = Call(Name("list"), [range_call],
|
||||
prefix=node.prefix)
|
||||
# Put things that were after the range() call after the list call.
|
||||
for n in results["rest"]:
|
||||
list_call.append_child(n)
|
||||
return list_call
|
||||
|
||||
P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
|
||||
p1 = patcomp.compile_pattern(P1)
|
||||
|
||||
P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
|
||||
| comp_for< 'for' any 'in' node=any any* >
|
||||
| comparison< any 'in' node=any any*>
|
||||
"""
|
||||
p2 = patcomp.compile_pattern(P2)
|
||||
|
||||
def in_special_context(self, node):
|
||||
if node.parent is None:
|
||||
return False
|
||||
results = {}
|
||||
if (node.parent.parent is not None and
|
||||
self.p1.match(node.parent.parent, results) and
|
||||
results["node"] is node):
|
||||
# list(d.keys()) -> list(d.keys()), etc.
|
||||
return results["func"].value in consuming_calls
|
||||
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
|
||||
return self.p2.match(node.parent, results) and results["node"] is node
|
25
third_party/python/Lib/lib2to3/fixes/fix_xreadlines.py
vendored
Normal file
25
third_party/python/Lib/lib2to3/fixes/fix_xreadlines.py
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
"""Fix "for x in f.xreadlines()" -> "for x in f".
|
||||
|
||||
This fixer will also convert g(f.xreadlines) into g(f.__iter__)."""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name
|
||||
|
||||
|
||||
class FixXreadlines(fixer_base.BaseFix):
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
|
||||
|
|
||||
power< any+ trailer< '.' no_call='xreadlines' > >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
no_call = results.get("no_call")
|
||||
|
||||
if no_call:
|
||||
no_call.replace(Name("__iter__", prefix=no_call.prefix))
|
||||
else:
|
||||
node.replace([x.clone() for x in results["call"]])
|
46
third_party/python/Lib/lib2to3/fixes/fix_zip.py
vendored
Normal file
46
third_party/python/Lib/lib2to3/fixes/fix_zip.py
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
"""
|
||||
Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...)
|
||||
unless there exists a 'from future_builtins import zip' statement in the
|
||||
top-level namespace.
|
||||
|
||||
We avoid the transformation if the zip() call is directly contained in
|
||||
iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..pytree import Node
|
||||
from ..pygram import python_symbols as syms
|
||||
from ..fixer_util import Name, ArgList, in_special_context
|
||||
|
||||
|
||||
class FixZip(fixer_base.ConditionalFix):
|
||||
|
||||
BM_compatible = True
|
||||
PATTERN = """
|
||||
power< 'zip' args=trailer< '(' [any] ')' > [trailers=trailer*]
|
||||
>
|
||||
"""
|
||||
|
||||
skip_on = "future_builtins.zip"
|
||||
|
||||
def transform(self, node, results):
|
||||
if self.should_skip(node):
|
||||
return
|
||||
|
||||
if in_special_context(node):
|
||||
return None
|
||||
|
||||
args = results['args'].clone()
|
||||
args.prefix = ""
|
||||
|
||||
trailers = []
|
||||
if 'trailers' in results:
|
||||
trailers = [n.clone() for n in results['trailers']]
|
||||
for n in trailers:
|
||||
n.prefix = ""
|
||||
|
||||
new = Node(syms.power, [Name("zip"), args], prefix="")
|
||||
new = Node(syms.power, [Name("list"), ArgList([new])] + trailers)
|
||||
new.prefix = node.prefix
|
||||
return new
|
Loading…
Add table
Add a link
Reference in a new issue