first commit
This commit is contained in:
commit
417e54da96
5696 changed files with 900003 additions and 0 deletions
|
@ -0,0 +1,142 @@
|
|||
"""
|
||||
Compile a Python script into an executable that embeds CPython and run it.
|
||||
Requires CPython to be built as a shared library ('libpythonX.Y').
|
||||
|
||||
Basic usage:
|
||||
|
||||
python cythonrun somefile.py [ARGS]
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
DEBUG = True
|
||||
|
||||
import sys
|
||||
import os
|
||||
from distutils import sysconfig
|
||||
|
||||
|
||||
def get_config_var(name, default=''):
|
||||
return sysconfig.get_config_var(name) or default
|
||||
|
||||
INCDIR = sysconfig.get_python_inc()
|
||||
LIBDIR1 = get_config_var('LIBDIR')
|
||||
LIBDIR2 = get_config_var('LIBPL')
|
||||
PYLIB = get_config_var('LIBRARY')
|
||||
PYLIB_DYN = get_config_var('LDLIBRARY')
|
||||
if PYLIB_DYN == PYLIB:
|
||||
# no shared library
|
||||
PYLIB_DYN = ''
|
||||
else:
|
||||
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
|
||||
|
||||
CC = get_config_var('CC', os.environ.get('CC', ''))
|
||||
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
|
||||
LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC))
|
||||
LINKFORSHARED = get_config_var('LINKFORSHARED')
|
||||
LIBS = get_config_var('LIBS')
|
||||
SYSLIBS = get_config_var('SYSLIBS')
|
||||
EXE_EXT = sysconfig.get_config_var('EXE')
|
||||
|
||||
def _debug(msg, *args):
|
||||
if DEBUG:
|
||||
if args:
|
||||
msg = msg % args
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
def dump_config():
|
||||
_debug('INCDIR: %s', INCDIR)
|
||||
_debug('LIBDIR1: %s', LIBDIR1)
|
||||
_debug('LIBDIR2: %s', LIBDIR2)
|
||||
_debug('PYLIB: %s', PYLIB)
|
||||
_debug('PYLIB_DYN: %s', PYLIB_DYN)
|
||||
_debug('CC: %s', CC)
|
||||
_debug('CFLAGS: %s', CFLAGS)
|
||||
_debug('LINKCC: %s', LINKCC)
|
||||
_debug('LINKFORSHARED: %s', LINKFORSHARED)
|
||||
_debug('LIBS: %s', LIBS)
|
||||
_debug('SYSLIBS: %s', SYSLIBS)
|
||||
_debug('EXE_EXT: %s', EXE_EXT)
|
||||
|
||||
def runcmd(cmd, shell=True):
|
||||
if shell:
|
||||
cmd = ' '.join(cmd)
|
||||
_debug(cmd)
|
||||
else:
|
||||
_debug(' '.join(cmd))
|
||||
|
||||
try:
|
||||
import subprocess
|
||||
except ImportError: # Python 2.3 ...
|
||||
returncode = os.system(cmd)
|
||||
else:
|
||||
returncode = subprocess.call(cmd, shell=shell)
|
||||
|
||||
if returncode:
|
||||
sys.exit(returncode)
|
||||
|
||||
def clink(basename):
|
||||
runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2]
|
||||
+ [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)]
|
||||
+ LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split())
|
||||
|
||||
def ccompile(basename):
|
||||
runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split())
|
||||
|
||||
def cycompile(input_file, options=()):
|
||||
from ..Compiler import Version, CmdLine, Main
|
||||
options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file])
|
||||
_debug('Using Cython %s to compile %s', Version.version, input_file)
|
||||
result = Main.compile(sources, options)
|
||||
if result.num_errors > 0:
|
||||
sys.exit(1)
|
||||
|
||||
def exec_file(program_name, args=()):
|
||||
runcmd([os.path.abspath(program_name)] + list(args), shell=False)
|
||||
|
||||
def build(input_file, compiler_args=(), force=False):
|
||||
"""
|
||||
Build an executable program from a Cython module.
|
||||
|
||||
Returns the name of the executable file.
|
||||
"""
|
||||
basename = os.path.splitext(input_file)[0]
|
||||
exe_file = basename + EXE_EXT
|
||||
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
|
||||
raise ValueError("Input and output file names are the same, refusing to overwrite")
|
||||
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
|
||||
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
|
||||
_debug("File is up to date, not regenerating %s", exe_file)
|
||||
return exe_file
|
||||
cycompile(input_file, compiler_args)
|
||||
ccompile(basename)
|
||||
clink(basename)
|
||||
return exe_file
|
||||
|
||||
def build_and_run(args):
|
||||
"""
|
||||
Build an executable program from a Cython module and runs it.
|
||||
|
||||
Arguments after the module name will be passed verbatimely to the
|
||||
program.
|
||||
"""
|
||||
cy_args = []
|
||||
last_arg = None
|
||||
for i, arg in enumerate(args):
|
||||
if arg.startswith('-'):
|
||||
cy_args.append(arg)
|
||||
elif last_arg in ('-X', '--directive'):
|
||||
cy_args.append(arg)
|
||||
else:
|
||||
input_file = arg
|
||||
args = args[i+1:]
|
||||
break
|
||||
last_arg = arg
|
||||
else:
|
||||
raise ValueError('no input file provided')
|
||||
|
||||
program_name = build(input_file, cy_args)
|
||||
exec_file(program_name, args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
build_and_run(sys.argv[1:])
|
229
kivy_venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py
Normal file
229
kivy_venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py
Normal file
|
@ -0,0 +1,229 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from distutils.core import setup
|
||||
|
||||
from .Dependencies import cythonize, extended_iglob
|
||||
from ..Utils import is_package_dir
|
||||
from ..Compiler import Options
|
||||
|
||||
try:
|
||||
import multiprocessing
|
||||
parallel_compiles = int(multiprocessing.cpu_count() * 1.5)
|
||||
except ImportError:
|
||||
multiprocessing = None
|
||||
parallel_compiles = 0
|
||||
|
||||
|
||||
class _FakePool(object):
|
||||
def map_async(self, func, args):
|
||||
try:
|
||||
from itertools import imap
|
||||
except ImportError:
|
||||
imap=map
|
||||
for _ in imap(func, args):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def terminate(self):
|
||||
pass
|
||||
|
||||
def join(self):
|
||||
pass
|
||||
|
||||
|
||||
def parse_directives(option, name, value, parser):
|
||||
dest = option.dest
|
||||
old_directives = dict(getattr(parser.values, dest,
|
||||
Options.get_directive_defaults()))
|
||||
directives = Options.parse_directive_list(
|
||||
value, relaxed_bool=True, current_settings=old_directives)
|
||||
setattr(parser.values, dest, directives)
|
||||
|
||||
|
||||
def parse_options(option, name, value, parser):
|
||||
dest = option.dest
|
||||
options = dict(getattr(parser.values, dest, {}))
|
||||
for opt in value.split(','):
|
||||
if '=' in opt:
|
||||
n, v = opt.split('=', 1)
|
||||
v = v.lower() not in ('false', 'f', '0', 'no')
|
||||
else:
|
||||
n, v = opt, True
|
||||
options[n] = v
|
||||
setattr(parser.values, dest, options)
|
||||
|
||||
|
||||
def parse_compile_time_env(option, name, value, parser):
|
||||
dest = option.dest
|
||||
old_env = dict(getattr(parser.values, dest, {}))
|
||||
new_env = Options.parse_compile_time_env(value, current_settings=old_env)
|
||||
setattr(parser.values, dest, new_env)
|
||||
|
||||
|
||||
def find_package_base(path):
|
||||
base_dir, package_path = os.path.split(path)
|
||||
while os.path.isfile(os.path.join(base_dir, '__init__.py')):
|
||||
base_dir, parent = os.path.split(base_dir)
|
||||
package_path = '%s/%s' % (parent, package_path)
|
||||
return base_dir, package_path
|
||||
|
||||
|
||||
def cython_compile(path_pattern, options):
|
||||
pool = None
|
||||
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
|
||||
try:
|
||||
for path in all_paths:
|
||||
if options.build_inplace:
|
||||
base_dir = path
|
||||
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
|
||||
base_dir = os.path.dirname(base_dir)
|
||||
else:
|
||||
base_dir = None
|
||||
|
||||
if os.path.isdir(path):
|
||||
# recursively compiling a package
|
||||
paths = [os.path.join(path, '**', '*.{py,pyx}')]
|
||||
else:
|
||||
# assume it's a file(-like thing)
|
||||
paths = [path]
|
||||
|
||||
ext_modules = cythonize(
|
||||
paths,
|
||||
nthreads=options.parallel,
|
||||
exclude_failures=options.keep_going,
|
||||
exclude=options.excludes,
|
||||
compiler_directives=options.directives,
|
||||
compile_time_env=options.compile_time_env,
|
||||
force=options.force,
|
||||
quiet=options.quiet,
|
||||
depfile=options.depfile,
|
||||
**options.options)
|
||||
|
||||
if ext_modules and options.build:
|
||||
if len(ext_modules) > 1 and options.parallel > 1:
|
||||
if pool is None:
|
||||
try:
|
||||
pool = multiprocessing.Pool(options.parallel)
|
||||
except OSError:
|
||||
pool = _FakePool()
|
||||
pool.map_async(run_distutils, [
|
||||
(base_dir, [ext]) for ext in ext_modules])
|
||||
else:
|
||||
run_distutils((base_dir, ext_modules))
|
||||
except:
|
||||
if pool is not None:
|
||||
pool.terminate()
|
||||
raise
|
||||
else:
|
||||
if pool is not None:
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
|
||||
def run_distutils(args):
|
||||
base_dir, ext_modules = args
|
||||
script_args = ['build_ext', '-i']
|
||||
cwd = os.getcwd()
|
||||
temp_dir = None
|
||||
try:
|
||||
if base_dir:
|
||||
os.chdir(base_dir)
|
||||
temp_dir = tempfile.mkdtemp(dir=base_dir)
|
||||
script_args.extend(['--build-temp', temp_dir])
|
||||
setup(
|
||||
script_name='setup.py',
|
||||
script_args=script_args,
|
||||
ext_modules=ext_modules,
|
||||
)
|
||||
finally:
|
||||
if base_dir:
|
||||
os.chdir(cwd)
|
||||
if temp_dir and os.path.isdir(temp_dir):
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def parse_args(args):
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage='%prog [options] [sources and packages]+')
|
||||
|
||||
parser.add_option('-X', '--directive', metavar='NAME=VALUE,...',
|
||||
dest='directives', default={}, type="str",
|
||||
action='callback', callback=parse_directives,
|
||||
help='set a compiler directive')
|
||||
parser.add_option('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
||||
dest='compile_time_env', default={}, type="str",
|
||||
action='callback', callback=parse_compile_time_env,
|
||||
help='set a compile time environment variable')
|
||||
parser.add_option('-s', '--option', metavar='NAME=VALUE',
|
||||
dest='options', default={}, type="str",
|
||||
action='callback', callback=parse_options,
|
||||
help='set a cythonize option')
|
||||
parser.add_option('-2', dest='language_level', action='store_const', const=2, default=None,
|
||||
help='use Python 2 syntax mode by default')
|
||||
parser.add_option('-3', dest='language_level', action='store_const', const=3,
|
||||
help='use Python 3 syntax mode by default')
|
||||
parser.add_option('--3str', dest='language_level', action='store_const', const='3str',
|
||||
help='use Python 3 syntax mode by default')
|
||||
parser.add_option('-a', '--annotate', dest='annotate', action='store_true',
|
||||
help='generate annotated HTML page for source files')
|
||||
|
||||
parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes',
|
||||
action='append', default=[],
|
||||
help='exclude certain file patterns from the compilation')
|
||||
|
||||
parser.add_option('-b', '--build', dest='build', action='store_true',
|
||||
help='build extension modules using distutils')
|
||||
parser.add_option('-i', '--inplace', dest='build_inplace', action='store_true',
|
||||
help='build extension modules in place using distutils (implies -b)')
|
||||
parser.add_option('-j', '--parallel', dest='parallel', metavar='N',
|
||||
type=int, default=parallel_compiles,
|
||||
help=('run builds in N parallel jobs (default: %d)' %
|
||||
parallel_compiles or 1))
|
||||
parser.add_option('-f', '--force', dest='force', action='store_true',
|
||||
help='force recompilation')
|
||||
parser.add_option('-q', '--quiet', dest='quiet', action='store_true',
|
||||
help='be less verbose during compilation')
|
||||
|
||||
parser.add_option('--lenient', dest='lenient', action='store_true',
|
||||
help='increase Python compatibility by ignoring some compile time errors')
|
||||
parser.add_option('-k', '--keep-going', dest='keep_going', action='store_true',
|
||||
help='compile as much as possible, ignore compilation failures')
|
||||
parser.add_option('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
||||
|
||||
options, args = parser.parse_args(args)
|
||||
if not args:
|
||||
parser.error("no source files provided")
|
||||
if options.build_inplace:
|
||||
options.build = True
|
||||
if multiprocessing is None:
|
||||
options.parallel = 0
|
||||
if options.language_level:
|
||||
assert options.language_level in (2, 3, '3str')
|
||||
options.options['language_level'] = options.language_level
|
||||
return options, args
|
||||
|
||||
|
||||
def main(args=None):
|
||||
options, paths = parse_args(args)
|
||||
|
||||
if options.lenient:
|
||||
# increase Python compatibility by ignoring compile time errors
|
||||
Options.error_on_unknown_names = False
|
||||
Options.error_on_uninitialized = False
|
||||
|
||||
if options.annotate:
|
||||
Options.annotate = True
|
||||
|
||||
for path in paths:
|
||||
cython_compile(path, options)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
1293
kivy_venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py
Normal file
1293
kivy_venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1 @@
|
|||
from Cython.Distutils.build_ext import build_ext
|
376
kivy_venv/lib/python3.11/site-packages/Cython/Build/Inline.py
Normal file
376
kivy_venv/lib/python3.11/site-packages/Cython/Build/Inline.py
Normal file
|
@ -0,0 +1,376 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from distutils.core import Distribution, Extension
|
||||
from distutils.command.build_ext import build_ext
|
||||
|
||||
import Cython
|
||||
from ..Compiler.Main import Context, default_options
|
||||
|
||||
from ..Compiler.Visitor import CythonTransform, EnvTransform
|
||||
from ..Compiler.ParseTreeTransforms import SkipDeclarations
|
||||
from ..Compiler.TreeFragment import parse_from_strings
|
||||
from ..Compiler.StringEncoding import _unicode
|
||||
from .Dependencies import strip_string_literals, cythonize, cached_function
|
||||
from ..Compiler import Pipeline
|
||||
from ..Utils import get_cython_cache_dir
|
||||
import cython as cython_module
|
||||
|
||||
|
||||
IS_PY3 = sys.version_info >= (3,)
|
||||
|
||||
# A utility function to convert user-supplied ASCII strings to unicode.
|
||||
if not IS_PY3:
|
||||
def to_unicode(s):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('ascii')
|
||||
else:
|
||||
return s
|
||||
else:
|
||||
to_unicode = lambda x: x
|
||||
|
||||
if sys.version_info < (3, 5):
|
||||
import imp
|
||||
def load_dynamic(name, module_path):
|
||||
return imp.load_dynamic(name, module_path)
|
||||
else:
|
||||
import importlib.util as _importlib_util
|
||||
def load_dynamic(name, module_path):
|
||||
spec = _importlib_util.spec_from_file_location(name, module_path)
|
||||
module = _importlib_util.module_from_spec(spec)
|
||||
# sys.modules[name] = module
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
class UnboundSymbols(EnvTransform, SkipDeclarations):
|
||||
def __init__(self):
|
||||
CythonTransform.__init__(self, None)
|
||||
self.unbound = set()
|
||||
def visit_NameNode(self, node):
|
||||
if not self.current_env().lookup(node.name):
|
||||
self.unbound.add(node.name)
|
||||
return node
|
||||
def __call__(self, node):
|
||||
super(UnboundSymbols, self).__call__(node)
|
||||
return self.unbound
|
||||
|
||||
|
||||
@cached_function
|
||||
def unbound_symbols(code, context=None):
|
||||
code = to_unicode(code)
|
||||
if context is None:
|
||||
context = Context([], default_options)
|
||||
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
|
||||
tree = parse_from_strings('(tree fragment)', code)
|
||||
for phase in Pipeline.create_pipeline(context, 'pyx'):
|
||||
if phase is None:
|
||||
continue
|
||||
tree = phase(tree)
|
||||
if isinstance(phase, AnalyseDeclarationsTransform):
|
||||
break
|
||||
try:
|
||||
import builtins
|
||||
except ImportError:
|
||||
import __builtin__ as builtins
|
||||
return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
|
||||
|
||||
|
||||
def unsafe_type(arg, context=None):
|
||||
py_type = type(arg)
|
||||
if py_type is int:
|
||||
return 'long'
|
||||
else:
|
||||
return safe_type(arg, context)
|
||||
|
||||
|
||||
def safe_type(arg, context=None):
|
||||
py_type = type(arg)
|
||||
if py_type in (list, tuple, dict, str):
|
||||
return py_type.__name__
|
||||
elif py_type is complex:
|
||||
return 'double complex'
|
||||
elif py_type is float:
|
||||
return 'double'
|
||||
elif py_type is bool:
|
||||
return 'bint'
|
||||
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
|
||||
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
|
||||
else:
|
||||
for base_type in py_type.__mro__:
|
||||
if base_type.__module__ in ('__builtin__', 'builtins'):
|
||||
return 'object'
|
||||
module = context.find_module(base_type.__module__, need_pxd=False)
|
||||
if module:
|
||||
entry = module.lookup(base_type.__name__)
|
||||
if entry.is_type:
|
||||
return '%s.%s' % (base_type.__module__, base_type.__name__)
|
||||
return 'object'
|
||||
|
||||
|
||||
def _get_build_extension():
|
||||
dist = Distribution()
|
||||
# Ensure the build respects distutils configuration by parsing
|
||||
# the configuration files
|
||||
config_files = dist.find_config_files()
|
||||
dist.parse_config_files(config_files)
|
||||
build_extension = build_ext(dist)
|
||||
build_extension.finalize_options()
|
||||
return build_extension
|
||||
|
||||
|
||||
@cached_function
|
||||
def _create_context(cython_include_dirs):
|
||||
return Context(list(cython_include_dirs), default_options)
|
||||
|
||||
|
||||
_cython_inline_cache = {}
|
||||
_cython_inline_default_context = _create_context(('.',))
|
||||
|
||||
|
||||
def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
|
||||
for symbol in unbound_symbols:
|
||||
if symbol not in kwds:
|
||||
if locals is None or globals is None:
|
||||
calling_frame = inspect.currentframe().f_back.f_back.f_back
|
||||
if locals is None:
|
||||
locals = calling_frame.f_locals
|
||||
if globals is None:
|
||||
globals = calling_frame.f_globals
|
||||
if symbol in locals:
|
||||
kwds[symbol] = locals[symbol]
|
||||
elif symbol in globals:
|
||||
kwds[symbol] = globals[symbol]
|
||||
else:
|
||||
print("Couldn't find %r" % symbol)
|
||||
|
||||
|
||||
def _inline_key(orig_code, arg_sigs, language_level):
|
||||
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
|
||||
return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
def cython_inline(code, get_type=unsafe_type,
|
||||
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
|
||||
cython_include_dirs=None, cython_compiler_directives=None,
|
||||
force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
|
||||
|
||||
if get_type is None:
|
||||
get_type = lambda x: 'object'
|
||||
ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
|
||||
|
||||
cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {}
|
||||
if language_level is None and 'language_level' not in cython_compiler_directives:
|
||||
language_level = '3str'
|
||||
if language_level is not None:
|
||||
cython_compiler_directives['language_level'] = language_level
|
||||
|
||||
# Fast path if this has been called in this session.
|
||||
_unbound_symbols = _cython_inline_cache.get(code)
|
||||
if _unbound_symbols is not None:
|
||||
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
||||
args = sorted(kwds.items())
|
||||
arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
|
||||
key_hash = _inline_key(code, arg_sigs, language_level)
|
||||
invoke = _cython_inline_cache.get((code, arg_sigs, key_hash))
|
||||
if invoke is not None:
|
||||
arg_list = [arg[1] for arg in args]
|
||||
return invoke(*arg_list)
|
||||
|
||||
orig_code = code
|
||||
code = to_unicode(code)
|
||||
code, literals = strip_string_literals(code)
|
||||
code = strip_common_indent(code)
|
||||
if locals is None:
|
||||
locals = inspect.currentframe().f_back.f_back.f_locals
|
||||
if globals is None:
|
||||
globals = inspect.currentframe().f_back.f_back.f_globals
|
||||
try:
|
||||
_cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
|
||||
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
||||
except AssertionError:
|
||||
if not quiet:
|
||||
# Parsing from strings not fully supported (e.g. cimports).
|
||||
print("Could not parse code as a string (to extract unbound symbols).")
|
||||
|
||||
cimports = []
|
||||
for name, arg in list(kwds.items()):
|
||||
if arg is cython_module:
|
||||
cimports.append('\ncimport cython as %s' % name)
|
||||
del kwds[name]
|
||||
arg_names = sorted(kwds)
|
||||
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
|
||||
key_hash = _inline_key(orig_code, arg_sigs, language_level)
|
||||
module_name = "_cython_inline_" + key_hash
|
||||
|
||||
if module_name in sys.modules:
|
||||
module = sys.modules[module_name]
|
||||
|
||||
else:
|
||||
build_extension = None
|
||||
if cython_inline.so_ext is None:
|
||||
# Figure out and cache current extension suffix
|
||||
build_extension = _get_build_extension()
|
||||
cython_inline.so_ext = build_extension.get_ext_filename('')
|
||||
|
||||
module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext)
|
||||
|
||||
if not os.path.exists(lib_dir):
|
||||
os.makedirs(lib_dir)
|
||||
if force or not os.path.isfile(module_path):
|
||||
cflags = []
|
||||
c_include_dirs = []
|
||||
qualified = re.compile(r'([.\w]+)[.]')
|
||||
for type, _ in arg_sigs:
|
||||
m = qualified.match(type)
|
||||
if m:
|
||||
cimports.append('\ncimport %s' % m.groups()[0])
|
||||
# one special case
|
||||
if m.groups()[0] == 'numpy':
|
||||
import numpy
|
||||
c_include_dirs.append(numpy.get_include())
|
||||
# cflags.append('-Wno-unused')
|
||||
module_body, func_body = extract_func_code(code)
|
||||
params = ', '.join(['%s %s' % a for a in arg_sigs])
|
||||
module_code = """
|
||||
%(module_body)s
|
||||
%(cimports)s
|
||||
def __invoke(%(params)s):
|
||||
%(func_body)s
|
||||
return locals()
|
||||
""" % {'cimports': '\n'.join(cimports),
|
||||
'module_body': module_body,
|
||||
'params': params,
|
||||
'func_body': func_body }
|
||||
for key, value in literals.items():
|
||||
module_code = module_code.replace(key, value)
|
||||
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
||||
fh = open(pyx_file, 'w')
|
||||
try:
|
||||
fh.write(module_code)
|
||||
finally:
|
||||
fh.close()
|
||||
extension = Extension(
|
||||
name = module_name,
|
||||
sources = [pyx_file],
|
||||
include_dirs = c_include_dirs,
|
||||
extra_compile_args = cflags)
|
||||
if build_extension is None:
|
||||
build_extension = _get_build_extension()
|
||||
build_extension.extensions = cythonize(
|
||||
[extension],
|
||||
include_path=cython_include_dirs or ['.'],
|
||||
compiler_directives=cython_compiler_directives,
|
||||
quiet=quiet)
|
||||
build_extension.build_temp = os.path.dirname(pyx_file)
|
||||
build_extension.build_lib = lib_dir
|
||||
build_extension.run()
|
||||
|
||||
module = load_dynamic(module_name, module_path)
|
||||
|
||||
_cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke
|
||||
arg_list = [kwds[arg] for arg in arg_names]
|
||||
return module.__invoke(*arg_list)
|
||||
|
||||
|
||||
# Cached suffix used by cython_inline above. None should get
|
||||
# overridden with actual value upon the first cython_inline invocation
|
||||
cython_inline.so_ext = None
|
||||
|
||||
_find_non_space = re.compile('[^ ]').search
|
||||
|
||||
|
||||
def strip_common_indent(code):
|
||||
min_indent = None
|
||||
lines = code.splitlines()
|
||||
for line in lines:
|
||||
match = _find_non_space(line)
|
||||
if not match:
|
||||
continue # blank
|
||||
indent = match.start()
|
||||
if line[indent] == '#':
|
||||
continue # comment
|
||||
if min_indent is None or min_indent > indent:
|
||||
min_indent = indent
|
||||
for ix, line in enumerate(lines):
|
||||
match = _find_non_space(line)
|
||||
if not match or not line or line[indent:indent+1] == '#':
|
||||
continue
|
||||
lines[ix] = line[min_indent:]
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
|
||||
def extract_func_code(code):
|
||||
module = []
|
||||
function = []
|
||||
current = function
|
||||
code = code.replace('\t', ' ')
|
||||
lines = code.split('\n')
|
||||
for line in lines:
|
||||
if not line.startswith(' '):
|
||||
if module_statement.match(line):
|
||||
current = module
|
||||
else:
|
||||
current = function
|
||||
current.append(line)
|
||||
return '\n'.join(module), ' ' + '\n '.join(function)
|
||||
|
||||
|
||||
try:
|
||||
from inspect import getcallargs
|
||||
except ImportError:
|
||||
def getcallargs(func, *arg_values, **kwd_values):
|
||||
all = {}
|
||||
args, varargs, kwds, defaults = inspect.getargspec(func)
|
||||
if varargs is not None:
|
||||
all[varargs] = arg_values[len(args):]
|
||||
for name, value in zip(args, arg_values):
|
||||
all[name] = value
|
||||
for name, value in list(kwd_values.items()):
|
||||
if name in args:
|
||||
if name in all:
|
||||
raise TypeError("Duplicate argument %s" % name)
|
||||
all[name] = kwd_values.pop(name)
|
||||
if kwds is not None:
|
||||
all[kwds] = kwd_values
|
||||
elif kwd_values:
|
||||
raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values))
|
||||
if defaults is None:
|
||||
defaults = ()
|
||||
first_default = len(args) - len(defaults)
|
||||
for ix, name in enumerate(args):
|
||||
if name not in all:
|
||||
if ix >= first_default:
|
||||
all[name] = defaults[ix - first_default]
|
||||
else:
|
||||
raise TypeError("Missing argument: %s" % name)
|
||||
return all
|
||||
|
||||
|
||||
def get_body(source):
|
||||
ix = source.index(':')
|
||||
if source[:5] == 'lambda':
|
||||
return "return %s" % source[ix+1:]
|
||||
else:
|
||||
return source[ix+1:]
|
||||
|
||||
|
||||
# Lots to be done here... It would be especially cool if compiled functions
|
||||
# could invoke each other quickly.
|
||||
class RuntimeCompiledFunction(object):
|
||||
|
||||
def __init__(self, f):
|
||||
self._f = f
|
||||
self._body = get_body(inspect.getsource(f))
|
||||
|
||||
def __call__(self, *args, **kwds):
|
||||
all = getcallargs(self._f, *args, **kwds)
|
||||
if IS_PY3:
|
||||
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
|
||||
else:
|
||||
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
|
|
@ -0,0 +1,565 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
=====================
|
||||
Cython related magics
|
||||
=====================
|
||||
|
||||
Magic command interface for interactive work with Cython
|
||||
|
||||
.. note::
|
||||
|
||||
The ``Cython`` package needs to be installed separately. It
|
||||
can be obtained using ``easy_install`` or ``pip``.
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
To enable the magics below, execute ``%load_ext cython``.
|
||||
|
||||
``%%cython``
|
||||
|
||||
{CYTHON_DOC}
|
||||
|
||||
``%%cython_inline``
|
||||
|
||||
{CYTHON_INLINE_DOC}
|
||||
|
||||
``%%cython_pyximport``
|
||||
|
||||
{CYTHON_PYXIMPORT_DOC}
|
||||
|
||||
Author:
|
||||
* Brian Granger
|
||||
|
||||
Code moved from IPython and adapted by:
|
||||
* Martín Gaitán
|
||||
|
||||
Parts of this code were taken from Cython.inline.
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2010-2011, IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file ipython-COPYING.rst, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import imp
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import copy
|
||||
import distutils.log
|
||||
import textwrap
|
||||
|
||||
IO_ENCODING = sys.getfilesystemencoding()
|
||||
IS_PY2 = sys.version_info[0] < 3
|
||||
|
||||
try:
|
||||
reload
|
||||
except NameError: # Python 3
|
||||
from imp import reload
|
||||
|
||||
try:
|
||||
import hashlib
|
||||
except ImportError:
|
||||
import md5 as hashlib
|
||||
|
||||
from distutils.core import Distribution, Extension
|
||||
from distutils.command.build_ext import build_ext
|
||||
|
||||
from IPython.core import display
|
||||
from IPython.core import magic_arguments
|
||||
from IPython.core.magic import Magics, magics_class, cell_magic
|
||||
try:
|
||||
from IPython.paths import get_ipython_cache_dir
|
||||
except ImportError:
|
||||
# older IPython version
|
||||
from IPython.utils.path import get_ipython_cache_dir
|
||||
from IPython.utils.text import dedent
|
||||
|
||||
from ..Shadow import __version__ as cython_version
|
||||
from ..Compiler.Errors import CompileError
|
||||
from .Inline import cython_inline
|
||||
from .Dependencies import cythonize
|
||||
|
||||
|
||||
PGO_CONFIG = {
|
||||
'gcc': {
|
||||
'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
|
||||
'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
|
||||
},
|
||||
# blind copy from 'configure' script in CPython 3.7
|
||||
'icc': {
|
||||
'gen': ['-prof-gen'],
|
||||
'use': ['-prof-use'],
|
||||
}
|
||||
}
|
||||
PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
|
||||
|
||||
|
||||
if IS_PY2:
|
||||
def encode_fs(name):
|
||||
return name if isinstance(name, bytes) else name.encode(IO_ENCODING)
|
||||
else:
|
||||
def encode_fs(name):
|
||||
return name
|
||||
|
||||
|
||||
@magics_class
|
||||
class CythonMagics(Magics):
|
||||
|
||||
def __init__(self, shell):
|
||||
super(CythonMagics, self).__init__(shell)
|
||||
self._reloads = {}
|
||||
self._code_cache = {}
|
||||
self._pyximport_installed = False
|
||||
|
||||
def _import_all(self, module):
|
||||
mdict = module.__dict__
|
||||
if '__all__' in mdict:
|
||||
keys = mdict['__all__']
|
||||
else:
|
||||
keys = [k for k in mdict if not k.startswith('_')]
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
self.shell.push({k: mdict[k]})
|
||||
except KeyError:
|
||||
msg = "'module' object has no attribute '%s'" % k
|
||||
raise AttributeError(msg)
|
||||
|
||||
@cell_magic
|
||||
def cython_inline(self, line, cell):
|
||||
"""Compile and run a Cython code cell using Cython.inline.
|
||||
|
||||
This magic simply passes the body of the cell to Cython.inline
|
||||
and returns the result. If the variables `a` and `b` are defined
|
||||
in the user's namespace, here is a simple example that returns
|
||||
their sum::
|
||||
|
||||
%%cython_inline
|
||||
return a+b
|
||||
|
||||
For most purposes, we recommend the usage of the `%%cython` magic.
|
||||
"""
|
||||
locs = self.shell.user_global_ns
|
||||
globs = self.shell.user_ns
|
||||
return cython_inline(cell, locals=locs, globals=globs)
|
||||
|
||||
@cell_magic
|
||||
def cython_pyximport(self, line, cell):
|
||||
"""Compile and import a Cython code cell using pyximport.
|
||||
|
||||
The contents of the cell are written to a `.pyx` file in the current
|
||||
working directory, which is then imported using `pyximport`. This
|
||||
magic requires a module name to be passed::
|
||||
|
||||
%%cython_pyximport modulename
|
||||
def f(x):
|
||||
return 2.0*x
|
||||
|
||||
The compiled module is then imported and all of its symbols are
|
||||
injected into the user's namespace. For most purposes, we recommend
|
||||
the usage of the `%%cython` magic.
|
||||
"""
|
||||
module_name = line.strip()
|
||||
if not module_name:
|
||||
raise ValueError('module name must be given')
|
||||
fname = module_name + '.pyx'
|
||||
with io.open(fname, 'w', encoding='utf-8') as f:
|
||||
f.write(cell)
|
||||
if 'pyximport' not in sys.modules or not self._pyximport_installed:
|
||||
import pyximport
|
||||
pyximport.install()
|
||||
self._pyximport_installed = True
|
||||
if module_name in self._reloads:
|
||||
module = self._reloads[module_name]
|
||||
# Note: reloading extension modules is not actually supported
|
||||
# (requires PEP-489 reinitialisation support).
|
||||
# Don't know why this should ever have worked as it reads here.
|
||||
# All we really need to do is to update the globals below.
|
||||
#reload(module)
|
||||
else:
|
||||
__import__(module_name)
|
||||
module = sys.modules[module_name]
|
||||
self._reloads[module_name] = module
|
||||
self._import_all(module)
|
||||
|
||||
@magic_arguments.magic_arguments()
|
||||
@magic_arguments.argument(
|
||||
'-a', '--annotate', action='store_true', default=False,
|
||||
help="Produce a colorized HTML version of the source."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-+', '--cplus', action='store_true', default=False,
|
||||
help="Output a C++ rather than C file."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-3', dest='language_level', action='store_const', const=3, default=None,
|
||||
help="Select Python 3 syntax."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-2', dest='language_level', action='store_const', const=2, default=None,
|
||||
help="Select Python 2 syntax."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-f', '--force', action='store_true', default=False,
|
||||
help="Force the compilation of a new module, even if the source has been "
|
||||
"previously compiled."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-c', '--compile-args', action='append', default=[],
|
||||
help="Extra flags to pass to compiler via the `extra_compile_args` "
|
||||
"Extension flag (can be specified multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--link-args', action='append', default=[],
|
||||
help="Extra flags to pass to linker via the `extra_link_args` "
|
||||
"Extension flag (can be specified multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-l', '--lib', action='append', default=[],
|
||||
help="Add a library to link the extension against (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-n', '--name',
|
||||
help="Specify a name for the Cython module."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-L', dest='library_dirs', metavar='dir', action='append', default=[],
|
||||
help="Add a path to the list of library directories (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-I', '--include', action='append', default=[],
|
||||
help="Add a path to the list of include directories (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-S', '--src', action='append', default=[],
|
||||
help="Add a path to the list of src files (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--pgo', dest='pgo', action='store_true', default=False,
|
||||
help=("Enable profile guided optimisation in the C compiler. "
|
||||
"Compiles the cell twice and executes it in between to generate a runtime profile.")
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--verbose', dest='quiet', action='store_false', default=True,
|
||||
help=("Print debug information like generated .c/.cpp file location "
|
||||
"and exact gcc/g++ command invoked.")
|
||||
)
|
||||
@cell_magic
|
||||
def cython(self, line, cell):
|
||||
"""Compile and import everything from a Cython code cell.
|
||||
|
||||
The contents of the cell are written to a `.pyx` file in the
|
||||
directory `IPYTHONDIR/cython` using a filename with the hash of the
|
||||
code. This file is then cythonized and compiled. The resulting module
|
||||
is imported and all of its symbols are injected into the user's
|
||||
namespace. The usage is similar to that of `%%cython_pyximport` but
|
||||
you don't have to pass a module name::
|
||||
|
||||
%%cython
|
||||
def f(x):
|
||||
return 2.0*x
|
||||
|
||||
To compile OpenMP codes, pass the required `--compile-args`
|
||||
and `--link-args`. For example with gcc::
|
||||
|
||||
%%cython --compile-args=-fopenmp --link-args=-fopenmp
|
||||
...
|
||||
|
||||
To enable profile guided optimisation, pass the ``--pgo`` option.
|
||||
Note that the cell itself needs to take care of establishing a suitable
|
||||
profile when executed. This can be done by implementing the functions to
|
||||
optimise, and then calling them directly in the same cell on some realistic
|
||||
training data like this::
|
||||
|
||||
%%cython --pgo
|
||||
def critical_function(data):
|
||||
for item in data:
|
||||
...
|
||||
|
||||
# execute function several times to build profile
|
||||
from somewhere import some_typical_data
|
||||
for _ in range(100):
|
||||
critical_function(some_typical_data)
|
||||
|
||||
In Python 3.5 and later, you can distinguish between the profile and
|
||||
non-profile runs as follows::
|
||||
|
||||
if "_pgo_" in __name__:
|
||||
... # execute critical code here
|
||||
"""
|
||||
args = magic_arguments.parse_argstring(self.cython, line)
|
||||
code = cell if cell.endswith('\n') else cell + '\n'
|
||||
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
|
||||
key = (code, line, sys.version_info, sys.executable, cython_version)
|
||||
|
||||
if not os.path.exists(lib_dir):
|
||||
os.makedirs(lib_dir)
|
||||
|
||||
if args.pgo:
|
||||
key += ('pgo',)
|
||||
if args.force:
|
||||
# Force a new module name by adding the current time to the
|
||||
# key which is hashed to determine the module name.
|
||||
key += (time.time(),)
|
||||
|
||||
if args.name:
|
||||
module_name = str(args.name) # no-op in Py3
|
||||
else:
|
||||
module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
|
||||
html_file = os.path.join(lib_dir, module_name + '.html')
|
||||
module_path = os.path.join(lib_dir, module_name + self.so_ext)
|
||||
|
||||
have_module = os.path.isfile(module_path)
|
||||
need_cythonize = args.pgo or not have_module
|
||||
|
||||
if args.annotate:
|
||||
if not os.path.isfile(html_file):
|
||||
need_cythonize = True
|
||||
|
||||
extension = None
|
||||
if need_cythonize:
|
||||
extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
|
||||
if extensions is None:
|
||||
# Compilation failed and printed error message
|
||||
return None
|
||||
assert len(extensions) == 1
|
||||
extension = extensions[0]
|
||||
self._code_cache[key] = module_name
|
||||
|
||||
if args.pgo:
|
||||
self._profile_pgo_wrapper(extension, lib_dir)
|
||||
|
||||
try:
|
||||
self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None,
|
||||
quiet=args.quiet)
|
||||
except distutils.errors.CompileError:
|
||||
# Build failed and printed error message
|
||||
return None
|
||||
|
||||
module = imp.load_dynamic(module_name, module_path)
|
||||
self._import_all(module)
|
||||
|
||||
if args.annotate:
|
||||
try:
|
||||
with io.open(html_file, encoding='utf-8') as f:
|
||||
annotated_html = f.read()
|
||||
except IOError as e:
|
||||
# File could not be opened. Most likely the user has a version
|
||||
# of Cython before 0.15.1 (when `cythonize` learned the
|
||||
# `force` keyword argument) and has already compiled this
|
||||
# exact source without annotation.
|
||||
print('Cython completed successfully but the annotated '
|
||||
'source could not be read.', file=sys.stderr)
|
||||
print(e, file=sys.stderr)
|
||||
else:
|
||||
return display.HTML(self.clean_annotated_html(annotated_html))
|
||||
|
||||
def _profile_pgo_wrapper(self, extension, lib_dir):
|
||||
"""
|
||||
Generate a .c file for a separate extension module that calls the
|
||||
module init function of the original module. This makes sure that the
|
||||
PGO profiler sees the correct .o file of the final module, but it still
|
||||
allows us to import the module under a different name for profiling,
|
||||
before recompiling it into the PGO optimised module. Overwriting and
|
||||
reimporting the same shared library is not portable.
|
||||
"""
|
||||
extension = copy.copy(extension) # shallow copy, do not modify sources in place!
|
||||
module_name = extension.name
|
||||
pgo_module_name = '_pgo_' + module_name
|
||||
pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
|
||||
with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
|
||||
f.write(textwrap.dedent(u"""
|
||||
#include "Python.h"
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
extern PyMODINIT_FUNC init%(module_name)s(void);
|
||||
PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/
|
||||
PyMODINIT_FUNC init%(pgo_module_name)s(void) {
|
||||
PyObject *sys_modules;
|
||||
init%(module_name)s(); if (PyErr_Occurred()) return;
|
||||
sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */
|
||||
if (sys_modules) {
|
||||
PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return;
|
||||
PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module);
|
||||
Py_DECREF(module);
|
||||
}
|
||||
}
|
||||
#else
|
||||
extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
|
||||
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
|
||||
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
|
||||
return PyInit_%(module_name)s();
|
||||
}
|
||||
#endif
|
||||
""" % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
|
||||
|
||||
extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
|
||||
extension.name = pgo_module_name
|
||||
|
||||
self._build_extension(extension, lib_dir, pgo_step_name='gen')
|
||||
|
||||
# import and execute module code to generate profile
|
||||
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
|
||||
imp.load_dynamic(pgo_module_name, so_module_path)
|
||||
|
||||
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
|
||||
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
||||
pyx_file = encode_fs(pyx_file)
|
||||
|
||||
c_include_dirs = args.include
|
||||
c_src_files = list(map(str, args.src))
|
||||
if 'numpy' in code:
|
||||
import numpy
|
||||
c_include_dirs.append(numpy.get_include())
|
||||
with io.open(pyx_file, 'w', encoding='utf-8') as f:
|
||||
f.write(code)
|
||||
extension = Extension(
|
||||
name=module_name,
|
||||
sources=[pyx_file] + c_src_files,
|
||||
include_dirs=c_include_dirs,
|
||||
library_dirs=args.library_dirs,
|
||||
extra_compile_args=args.compile_args,
|
||||
extra_link_args=args.link_args,
|
||||
libraries=args.lib,
|
||||
language='c++' if args.cplus else 'c',
|
||||
)
|
||||
try:
|
||||
opts = dict(
|
||||
quiet=quiet,
|
||||
annotate=args.annotate,
|
||||
force=True,
|
||||
)
|
||||
if args.language_level is not None:
|
||||
assert args.language_level in (2, 3)
|
||||
opts['language_level'] = args.language_level
|
||||
elif sys.version_info[0] >= 3:
|
||||
opts['language_level'] = 3
|
||||
return cythonize([extension], **opts)
|
||||
except CompileError:
|
||||
return None
|
||||
|
||||
def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
|
||||
build_extension = self._get_build_extension(
|
||||
extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
|
||||
old_threshold = None
|
||||
try:
|
||||
if not quiet:
|
||||
old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
|
||||
build_extension.run()
|
||||
finally:
|
||||
if not quiet and old_threshold is not None:
|
||||
distutils.log.set_threshold(old_threshold)
|
||||
|
||||
def _add_pgo_flags(self, build_extension, step_name, temp_dir):
|
||||
compiler_type = build_extension.compiler.compiler_type
|
||||
if compiler_type == 'unix':
|
||||
compiler_cmd = build_extension.compiler.compiler_so
|
||||
# TODO: we could try to call "[cmd] --version" for better insights
|
||||
if not compiler_cmd:
|
||||
pass
|
||||
elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
|
||||
compiler_type = 'clang'
|
||||
elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
|
||||
compiler_type = 'icc'
|
||||
elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
|
||||
compiler_type = 'gcc'
|
||||
elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
|
||||
compiler_type = 'gcc'
|
||||
config = PGO_CONFIG.get(compiler_type)
|
||||
orig_flags = []
|
||||
if config and step_name in config:
|
||||
flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
|
||||
for extension in build_extension.extensions:
|
||||
orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
|
||||
extension.extra_compile_args = extension.extra_compile_args + flags
|
||||
extension.extra_link_args = extension.extra_link_args + flags
|
||||
else:
|
||||
print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
|
||||
file=sys.stderr)
|
||||
return orig_flags
|
||||
|
||||
@property
|
||||
def so_ext(self):
|
||||
"""The extension suffix for compiled modules."""
|
||||
try:
|
||||
return self._so_ext
|
||||
except AttributeError:
|
||||
self._so_ext = self._get_build_extension().get_ext_filename('')
|
||||
return self._so_ext
|
||||
|
||||
def _clear_distutils_mkpath_cache(self):
|
||||
"""clear distutils mkpath cache
|
||||
|
||||
prevents distutils from skipping re-creation of dirs that have been removed
|
||||
"""
|
||||
try:
|
||||
from distutils.dir_util import _path_created
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
_path_created.clear()
|
||||
|
||||
def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
|
||||
pgo_step_name=None, _build_ext=build_ext):
|
||||
self._clear_distutils_mkpath_cache()
|
||||
dist = Distribution()
|
||||
config_files = dist.find_config_files()
|
||||
try:
|
||||
config_files.remove('setup.cfg')
|
||||
except ValueError:
|
||||
pass
|
||||
dist.parse_config_files(config_files)
|
||||
|
||||
if not temp_dir:
|
||||
temp_dir = lib_dir
|
||||
add_pgo_flags = self._add_pgo_flags
|
||||
|
||||
if pgo_step_name:
|
||||
base_build_ext = _build_ext
|
||||
class _build_ext(_build_ext):
|
||||
def build_extensions(self):
|
||||
add_pgo_flags(self, pgo_step_name, temp_dir)
|
||||
base_build_ext.build_extensions(self)
|
||||
|
||||
build_extension = _build_ext(dist)
|
||||
build_extension.finalize_options()
|
||||
if temp_dir:
|
||||
temp_dir = encode_fs(temp_dir)
|
||||
build_extension.build_temp = temp_dir
|
||||
if lib_dir:
|
||||
lib_dir = encode_fs(lib_dir)
|
||||
build_extension.build_lib = lib_dir
|
||||
if extension is not None:
|
||||
build_extension.extensions = [extension]
|
||||
return build_extension
|
||||
|
||||
@staticmethod
|
||||
def clean_annotated_html(html):
|
||||
"""Clean up the annotated HTML source.
|
||||
|
||||
Strips the link to the generated C or C++ file, which we do not
|
||||
present to the user.
|
||||
"""
|
||||
r = re.compile('<p>Raw output: <a href="(.*)">(.*)</a>')
|
||||
html = '\n'.join(l for l in html.splitlines() if not r.match(l))
|
||||
return html
|
||||
|
||||
__doc__ = __doc__.format(
|
||||
# rST doesn't see the -+ flag as part of an option list, so we
|
||||
# hide it from the module-level docstring.
|
||||
CYTHON_DOC=dedent(CythonMagics.cython.__doc__\
|
||||
.replace('-+, --cplus', '--cplus ')),
|
||||
CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
|
||||
CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
|
||||
)
|
|
@ -0,0 +1,106 @@
|
|||
import difflib
|
||||
import glob
|
||||
import gzip
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import Cython.Build.Dependencies
|
||||
import Cython.Utils
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
|
||||
class TestCyCache(CythonTest):
|
||||
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self.temp_dir = tempfile.mkdtemp(
|
||||
prefix='cycache-test',
|
||||
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None)
|
||||
self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
||||
self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir)
|
||||
|
||||
def cache_files(self, file_glob):
|
||||
return glob.glob(os.path.join(self.cache_dir, file_glob))
|
||||
|
||||
def fresh_cythonize(self, *args, **kwargs):
|
||||
Cython.Utils.clear_function_caches()
|
||||
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
||||
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
||||
|
||||
def test_cycache_switch(self):
|
||||
content1 = 'value = 1\n'
|
||||
content2 = 'value = 2\n'
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
|
||||
open(a_pyx, 'w').write(content1)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.assertEqual(1, len(self.cache_files('a.c*')))
|
||||
a_contents1 = open(a_c).read()
|
||||
os.unlink(a_c)
|
||||
|
||||
open(a_pyx, 'w').write(content2)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
a_contents2 = open(a_c).read()
|
||||
os.unlink(a_c)
|
||||
|
||||
self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
|
||||
self.assertEqual(2, len(self.cache_files('a.c*')))
|
||||
|
||||
open(a_pyx, 'w').write(content1)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.assertEqual(2, len(self.cache_files('a.c*')))
|
||||
a_contents = open(a_c).read()
|
||||
self.assertEqual(
|
||||
a_contents, a_contents1,
|
||||
msg='\n'.join(list(difflib.unified_diff(
|
||||
a_contents.split('\n'), a_contents1.split('\n')))[:10]))
|
||||
|
||||
def test_cycache_uses_cache(self):
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
open(a_pyx, 'w').write('pass')
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
|
||||
gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii'))
|
||||
os.unlink(a_c)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
a_contents = open(a_c).read()
|
||||
self.assertEqual(a_contents, 'fake stuff',
|
||||
'Unexpected contents: %s...' % a_contents[:100])
|
||||
|
||||
def test_multi_file_output(self):
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
a_h = a_pyx[:-4] + '.h'
|
||||
a_api_h = a_pyx[:-4] + '_api.h'
|
||||
open(a_pyx, 'w').write('cdef public api int foo(int x): return x\n')
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
expected = [a_c, a_h, a_api_h]
|
||||
for output in expected:
|
||||
self.assertTrue(os.path.exists(output), output)
|
||||
os.unlink(output)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
for output in expected:
|
||||
self.assertTrue(os.path.exists(output), output)
|
||||
|
||||
def test_options_invalidation(self):
|
||||
hash_pyx = os.path.join(self.src_dir, 'options.pyx')
|
||||
hash_c = hash_pyx[:-len('.pyx')] + '.c'
|
||||
|
||||
open(hash_pyx, 'w').write('pass')
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
|
||||
self.assertEqual(1, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
|
@ -0,0 +1,96 @@
|
|||
import os, tempfile
|
||||
from Cython.Shadow import inline
|
||||
from Cython.Build.Inline import safe_type
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
try:
|
||||
import numpy
|
||||
has_numpy = True
|
||||
except:
|
||||
has_numpy = False
|
||||
|
||||
test_kwds = dict(force=True, quiet=True)
|
||||
|
||||
global_value = 100
|
||||
|
||||
class TestInline(CythonTest):
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self.test_kwds = dict(test_kwds)
|
||||
if os.path.isdir('TEST_TMP'):
|
||||
lib_dir = os.path.join('TEST_TMP','inline')
|
||||
else:
|
||||
lib_dir = tempfile.mkdtemp(prefix='cython_inline_')
|
||||
self.test_kwds['lib_dir'] = lib_dir
|
||||
|
||||
def test_simple(self):
|
||||
self.assertEqual(inline("return 1+2", **self.test_kwds), 3)
|
||||
|
||||
def test_types(self):
|
||||
self.assertEqual(inline("""
|
||||
cimport cython
|
||||
return cython.typeof(a), cython.typeof(b)
|
||||
""", a=1.0, b=[], **self.test_kwds), ('double', 'list object'))
|
||||
|
||||
def test_locals(self):
|
||||
a = 1
|
||||
b = 2
|
||||
self.assertEqual(inline("return a+b", **self.test_kwds), 3)
|
||||
|
||||
def test_globals(self):
|
||||
self.assertEqual(inline("return global_value + 1", **self.test_kwds), global_value + 1)
|
||||
|
||||
def test_no_return(self):
|
||||
self.assertEqual(inline("""
|
||||
a = 1
|
||||
cdef double b = 2
|
||||
cdef c = []
|
||||
""", **self.test_kwds), dict(a=1, b=2.0, c=[]))
|
||||
|
||||
def test_def_node(self):
|
||||
foo = inline("def foo(x): return x * x", **self.test_kwds)['foo']
|
||||
self.assertEqual(foo(7), 49)
|
||||
|
||||
def test_class_ref(self):
|
||||
class Type(object):
|
||||
pass
|
||||
tp = inline("Type")['Type']
|
||||
self.assertEqual(tp, Type)
|
||||
|
||||
def test_pure(self):
|
||||
import cython as cy
|
||||
b = inline("""
|
||||
b = cy.declare(float, a)
|
||||
c = cy.declare(cy.pointer(cy.float), &b)
|
||||
return b
|
||||
""", a=3, **self.test_kwds)
|
||||
self.assertEqual(type(b), float)
|
||||
|
||||
def test_compiler_directives(self):
|
||||
self.assertEqual(
|
||||
inline('return sum(x)',
|
||||
x=[1, 2, 3],
|
||||
cython_compiler_directives={'boundscheck': False}),
|
||||
6
|
||||
)
|
||||
|
||||
def test_lang_version(self):
|
||||
# GH-3419. Caching for inline code didn't always respect compiler directives.
|
||||
inline_divcode = "def f(int a, int b): return a/b"
|
||||
self.assertEqual(
|
||||
inline(inline_divcode, language_level=2)['f'](5,2),
|
||||
2
|
||||
)
|
||||
self.assertEqual(
|
||||
inline(inline_divcode, language_level=3)['f'](5,2),
|
||||
2.5
|
||||
)
|
||||
|
||||
if has_numpy:
|
||||
|
||||
def test_numpy(self):
|
||||
import numpy
|
||||
a = numpy.ndarray((10, 20))
|
||||
a[0,0] = 10
|
||||
self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
|
||||
self.assertEqual(inline("return a[0,0]", a=a, **self.test_kwds), 10.0)
|
|
@ -0,0 +1,205 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# tag: ipython
|
||||
|
||||
"""Tests for the Cython magics extension."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from Cython.Build import IpythonMagic
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
try:
|
||||
import IPython.testing.globalipapp
|
||||
except ImportError:
|
||||
# Disable tests and fake helpers for initialisation below.
|
||||
def skip_if_not_installed(_):
|
||||
return None
|
||||
else:
|
||||
def skip_if_not_installed(c):
|
||||
return c
|
||||
|
||||
try:
|
||||
# disable IPython history thread before it gets started to avoid having to clean it up
|
||||
from IPython.core.history import HistoryManager
|
||||
HistoryManager.enabled = False
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
code = u"""\
|
||||
def f(x):
|
||||
return 2*x
|
||||
"""
|
||||
|
||||
cython3_code = u"""\
|
||||
def f(int x):
|
||||
return 2 / x
|
||||
|
||||
def call(x):
|
||||
return f(*(x,))
|
||||
"""
|
||||
|
||||
pgo_cython3_code = cython3_code + u"""\
|
||||
def main():
|
||||
for _ in range(100): call(5)
|
||||
main()
|
||||
"""
|
||||
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# not using IPython's decorators here because they depend on "nose"
|
||||
try:
|
||||
from unittest import skip as skip_win32
|
||||
except ImportError:
|
||||
# poor dev's silent @unittest.skip()
|
||||
def skip_win32(dummy):
|
||||
def _skip_win32(func):
|
||||
return None
|
||||
return _skip_win32
|
||||
else:
|
||||
def skip_win32(dummy):
|
||||
def _skip_win32(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
func(*args, **kwargs)
|
||||
return wrapper
|
||||
return _skip_win32
|
||||
|
||||
|
||||
@skip_if_not_installed
|
||||
class TestIPythonMagic(CythonTest):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
CythonTest.setUpClass()
|
||||
cls._ip = IPython.testing.globalipapp.get_ipython()
|
||||
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self._ip.extension_manager.load_extension('cython')
|
||||
|
||||
def test_cython_inline(self):
|
||||
ip = self._ip
|
||||
ip.ex('a=10; b=20')
|
||||
result = ip.run_cell_magic('cython_inline', '', 'return a+b')
|
||||
self.assertEqual(result, 30)
|
||||
|
||||
@skip_win32('Skip on Windows')
|
||||
def test_cython_pyximport(self):
|
||||
ip = self._ip
|
||||
module_name = '_test_cython_pyximport'
|
||||
ip.run_cell_magic('cython_pyximport', module_name, code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
ip.run_cell_magic('cython_pyximport', module_name, code)
|
||||
ip.ex('h = f(-10)')
|
||||
self.assertEqual(ip.user_ns['h'], -20.0)
|
||||
try:
|
||||
os.remove(module_name + '.pyx')
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_cython(self):
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_name(self):
|
||||
# The Cython module named 'mymodule' defines the function f.
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '--name=mymodule', code)
|
||||
# This module can now be imported in the interactive namespace.
|
||||
ip.ex('import mymodule; g = mymodule.f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_language_level(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
if sys.version_info[0] < 3:
|
||||
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
||||
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
||||
else:
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
def test_cython3(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-3', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
def test_cython2(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-2', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
||||
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
||||
|
||||
@skip_win32('Skip on Windows')
|
||||
def test_cython3_pgo(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
|
||||
ip.ex('g = f(10); h = call(10); main()')
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
@skip_win32('Skip on Windows')
|
||||
def test_extlibs(self):
|
||||
ip = self._ip
|
||||
code = u"""
|
||||
from libc.math cimport sin
|
||||
x = sin(0.0)
|
||||
"""
|
||||
ip.user_ns['x'] = 1
|
||||
ip.run_cell_magic('cython', '-l m', code)
|
||||
self.assertEqual(ip.user_ns['x'], 0)
|
||||
|
||||
|
||||
def test_cython_verbose(self):
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '--verbose', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_verbose_thresholds(self):
|
||||
@contextmanager
|
||||
def mock_distutils():
|
||||
class MockLog:
|
||||
DEBUG = 1
|
||||
INFO = 2
|
||||
thresholds = [INFO]
|
||||
|
||||
def set_threshold(self, val):
|
||||
self.thresholds.append(val)
|
||||
return self.thresholds[-2]
|
||||
|
||||
|
||||
new_log = MockLog()
|
||||
old_log = IpythonMagic.distutils.log
|
||||
try:
|
||||
IpythonMagic.distutils.log = new_log
|
||||
yield new_log
|
||||
finally:
|
||||
IpythonMagic.distutils.log = old_log
|
||||
|
||||
ip = self._ip
|
||||
with mock_distutils() as verbose_log:
|
||||
ip.run_cell_magic('cython', '--verbose', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
|
||||
verbose_log.thresholds)
|
||||
|
||||
with mock_distutils() as normal_log:
|
||||
ip.run_cell_magic('cython', '', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
self.assertEqual([normal_log.INFO], normal_log.thresholds)
|
|
@ -0,0 +1,57 @@
|
|||
from Cython.Build.Dependencies import strip_string_literals
|
||||
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
class TestStripLiterals(CythonTest):
|
||||
|
||||
def t(self, before, expected):
|
||||
actual, literals = strip_string_literals(before, prefix="_L")
|
||||
self.assertEqual(expected, actual)
|
||||
for key, value in literals.items():
|
||||
actual = actual.replace(key, value)
|
||||
self.assertEqual(before, actual)
|
||||
|
||||
def test_empty(self):
|
||||
self.t("", "")
|
||||
|
||||
def test_single_quote(self):
|
||||
self.t("'x'", "'_L1_'")
|
||||
|
||||
def test_double_quote(self):
|
||||
self.t('"x"', '"_L1_"')
|
||||
|
||||
def test_nested_quotes(self):
|
||||
self.t(""" '"' "'" """, """ '_L1_' "_L2_" """)
|
||||
|
||||
def test_triple_quote(self):
|
||||
self.t(" '''a\n''' ", " '''_L1_''' ")
|
||||
|
||||
def test_backslash(self):
|
||||
self.t(r"'a\'b'", "'_L1_'")
|
||||
self.t(r"'a\\'", "'_L1_'")
|
||||
self.t(r"'a\\\'b'", "'_L1_'")
|
||||
|
||||
def test_unicode(self):
|
||||
self.t("u'abc'", "u'_L1_'")
|
||||
|
||||
def test_raw(self):
|
||||
self.t(r"r'abc\\'", "r'_L1_'")
|
||||
|
||||
def test_raw_unicode(self):
|
||||
self.t(r"ru'abc\\'", "ru'_L1_'")
|
||||
|
||||
def test_comment(self):
|
||||
self.t("abc # foo", "abc #_L1_")
|
||||
|
||||
def test_comment_and_quote(self):
|
||||
self.t("abc # 'x'", "abc #_L1_")
|
||||
self.t("'abc#'", "'_L1_'")
|
||||
|
||||
def test_include(self):
|
||||
self.t("include 'a.pxi' # something here",
|
||||
"include '_L1_' #_L2_")
|
||||
|
||||
def test_extern(self):
|
||||
self.t("cdef extern from 'a.h': # comment",
|
||||
"cdef extern from '_L1_': #_L2_")
|
||||
|
|
@ -0,0 +1 @@
|
|||
# empty file
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,2 @@
|
|||
from .Dependencies import cythonize
|
||||
from .Distutils import build_ext
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
816
kivy_venv/lib/python3.11/site-packages/Cython/CodeWriter.py
Normal file
816
kivy_venv/lib/python3.11/site-packages/Cython/CodeWriter.py
Normal file
|
@ -0,0 +1,816 @@
|
|||
"""
|
||||
Serializes a Cython code tree to Cython code. This is primarily useful for
|
||||
debugging and testing purposes.
|
||||
|
||||
The output is in a strict format, no whitespace or comments from the input
|
||||
is preserved (and it could not be as it is not present in the code tree).
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from .Compiler.Visitor import TreeVisitor
|
||||
from .Compiler.ExprNodes import *
|
||||
|
||||
|
||||
class LinesResult(object):
|
||||
def __init__(self):
|
||||
self.lines = []
|
||||
self.s = u""
|
||||
|
||||
def put(self, s):
|
||||
self.s += s
|
||||
|
||||
def newline(self):
|
||||
self.lines.append(self.s)
|
||||
self.s = u""
|
||||
|
||||
def putline(self, s):
|
||||
self.put(s)
|
||||
self.newline()
|
||||
|
||||
class DeclarationWriter(TreeVisitor):
|
||||
|
||||
indent_string = u" "
|
||||
|
||||
def __init__(self, result=None):
|
||||
super(DeclarationWriter, self).__init__()
|
||||
if result is None:
|
||||
result = LinesResult()
|
||||
self.result = result
|
||||
self.numindents = 0
|
||||
self.tempnames = {}
|
||||
self.tempblockindex = 0
|
||||
|
||||
def write(self, tree):
|
||||
self.visit(tree)
|
||||
return self.result
|
||||
|
||||
def indent(self):
|
||||
self.numindents += 1
|
||||
|
||||
def dedent(self):
|
||||
self.numindents -= 1
|
||||
|
||||
def startline(self, s=u""):
|
||||
self.result.put(self.indent_string * self.numindents + s)
|
||||
|
||||
def put(self, s):
|
||||
self.result.put(s)
|
||||
|
||||
def putline(self, s):
|
||||
self.result.putline(self.indent_string * self.numindents + s)
|
||||
|
||||
def endline(self, s=u""):
|
||||
self.result.putline(s)
|
||||
|
||||
def line(self, s):
|
||||
self.startline(s)
|
||||
self.endline()
|
||||
|
||||
def comma_separated_list(self, items, output_rhs=False):
|
||||
if len(items) > 0:
|
||||
for item in items[:-1]:
|
||||
self.visit(item)
|
||||
if output_rhs and item.default is not None:
|
||||
self.put(u" = ")
|
||||
self.visit(item.default)
|
||||
self.put(u", ")
|
||||
self.visit(items[-1])
|
||||
|
||||
def visit_Node(self, node):
|
||||
raise AssertionError("Node not handled by serializer: %r" % node)
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
self.visitchildren(node)
|
||||
|
||||
def visit_StatListNode(self, node):
|
||||
self.visitchildren(node)
|
||||
|
||||
def visit_CDefExternNode(self, node):
|
||||
if node.include_file is None:
|
||||
file = u'*'
|
||||
else:
|
||||
file = u'"%s"' % node.include_file
|
||||
self.putline(u"cdef extern from %s:" % file)
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_CPtrDeclaratorNode(self, node):
|
||||
self.put('*')
|
||||
self.visit(node.base)
|
||||
|
||||
def visit_CReferenceDeclaratorNode(self, node):
|
||||
self.put('&')
|
||||
self.visit(node.base)
|
||||
|
||||
def visit_CArrayDeclaratorNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u'[')
|
||||
if node.dimension is not None:
|
||||
self.visit(node.dimension)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CArrayDeclaratorNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u'[')
|
||||
if node.dimension is not None:
|
||||
self.visit(node.dimension)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CFuncDeclaratorNode(self, node):
|
||||
# TODO: except, gil, etc.
|
||||
self.visit(node.base)
|
||||
self.put(u'(')
|
||||
self.comma_separated_list(node.args)
|
||||
self.endline(u')')
|
||||
|
||||
def visit_CNameDeclaratorNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_CSimpleBaseTypeNode(self, node):
|
||||
# See Parsing.p_sign_and_longness
|
||||
if node.is_basic_c_type:
|
||||
self.put(("unsigned ", "", "signed ")[node.signed])
|
||||
if node.longness < 0:
|
||||
self.put("short " * -node.longness)
|
||||
elif node.longness > 0:
|
||||
self.put("long " * node.longness)
|
||||
self.put(node.name)
|
||||
|
||||
def visit_CComplexBaseTypeNode(self, node):
|
||||
self.put(u'(')
|
||||
self.visit(node.base_type)
|
||||
self.visit(node.declarator)
|
||||
self.put(u')')
|
||||
|
||||
def visit_CNestedBaseTypeNode(self, node):
|
||||
self.visit(node.base_type)
|
||||
self.put(u'.')
|
||||
self.put(node.name)
|
||||
|
||||
def visit_TemplatedTypeNode(self, node):
|
||||
self.visit(node.base_type_node)
|
||||
self.put(u'[')
|
||||
self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CVarDefNode(self, node):
|
||||
self.startline(u"cdef ")
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.comma_separated_list(node.declarators, output_rhs=True)
|
||||
self.endline()
|
||||
|
||||
def visit_container_node(self, node, decl, extras, attributes):
|
||||
# TODO: visibility
|
||||
self.startline(decl)
|
||||
if node.name:
|
||||
self.put(u' ')
|
||||
self.put(node.name)
|
||||
if node.cname is not None:
|
||||
self.put(u' "%s"' % node.cname)
|
||||
if extras:
|
||||
self.put(extras)
|
||||
self.endline(':')
|
||||
self.indent()
|
||||
if not attributes:
|
||||
self.putline('pass')
|
||||
else:
|
||||
for attribute in attributes:
|
||||
self.visit(attribute)
|
||||
self.dedent()
|
||||
|
||||
def visit_CStructOrUnionDefNode(self, node):
|
||||
if node.typedef_flag:
|
||||
decl = u'ctypedef '
|
||||
else:
|
||||
decl = u'cdef '
|
||||
if node.visibility == 'public':
|
||||
decl += u'public '
|
||||
if node.packed:
|
||||
decl += u'packed '
|
||||
decl += node.kind
|
||||
self.visit_container_node(node, decl, None, node.attributes)
|
||||
|
||||
def visit_CppClassNode(self, node):
|
||||
extras = ""
|
||||
if node.templates:
|
||||
extras = u"[%s]" % ", ".join(node.templates)
|
||||
if node.base_classes:
|
||||
extras += "(%s)" % ", ".join(node.base_classes)
|
||||
self.visit_container_node(node, u"cdef cppclass", extras, node.attributes)
|
||||
|
||||
def visit_CEnumDefNode(self, node):
|
||||
self.visit_container_node(node, u"cdef enum", None, node.items)
|
||||
|
||||
def visit_CEnumDefItemNode(self, node):
|
||||
self.startline(node.name)
|
||||
if node.cname:
|
||||
self.put(u' "%s"' % node.cname)
|
||||
if node.value:
|
||||
self.put(u" = ")
|
||||
self.visit(node.value)
|
||||
self.endline()
|
||||
|
||||
def visit_CClassDefNode(self, node):
|
||||
assert not node.module_name
|
||||
if node.decorators:
|
||||
for decorator in node.decorators:
|
||||
self.visit(decorator)
|
||||
self.startline(u"cdef class ")
|
||||
self.put(node.class_name)
|
||||
if node.base_class_name:
|
||||
self.put(u"(")
|
||||
if node.base_class_module:
|
||||
self.put(node.base_class_module)
|
||||
self.put(u".")
|
||||
self.put(node.base_class_name)
|
||||
self.put(u")")
|
||||
self.endline(u":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_CTypeDefNode(self, node):
|
||||
self.startline(u"ctypedef ")
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.visit(node.declarator)
|
||||
self.endline()
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
self.startline(u"def %s(" % node.name)
|
||||
self.comma_separated_list(node.args)
|
||||
self.endline(u"):")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_CArgDeclNode(self, node):
|
||||
if node.base_type.name is not None:
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.visit(node.declarator)
|
||||
if node.default is not None:
|
||||
self.put(u" = ")
|
||||
self.visit(node.default)
|
||||
|
||||
def visit_CImportStatNode(self, node):
|
||||
self.startline(u"cimport ")
|
||||
self.put(node.module_name)
|
||||
if node.as_name:
|
||||
self.put(u" as ")
|
||||
self.put(node.as_name)
|
||||
self.endline()
|
||||
|
||||
def visit_FromCImportStatNode(self, node):
|
||||
self.startline(u"from ")
|
||||
self.put(node.module_name)
|
||||
self.put(u" cimport ")
|
||||
first = True
|
||||
for pos, name, as_name, kind in node.imported_names:
|
||||
assert kind is None
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
self.put(u", ")
|
||||
self.put(name)
|
||||
if as_name:
|
||||
self.put(u" as ")
|
||||
self.put(as_name)
|
||||
self.endline()
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_IntNode(self, node):
|
||||
self.put(node.value)
|
||||
|
||||
def visit_NoneNode(self, node):
|
||||
self.put(u"None")
|
||||
|
||||
def visit_NotNode(self, node):
|
||||
self.put(u"(not ")
|
||||
self.visit(node.operand)
|
||||
self.put(u")")
|
||||
|
||||
def visit_DecoratorNode(self, node):
|
||||
self.startline("@")
|
||||
self.visit(node.decorator)
|
||||
self.endline()
|
||||
|
||||
def visit_BinopNode(self, node):
|
||||
self.visit(node.operand1)
|
||||
self.put(u" %s " % node.operator)
|
||||
self.visit(node.operand2)
|
||||
|
||||
def visit_AttributeNode(self, node):
|
||||
self.visit(node.obj)
|
||||
self.put(u".%s" % node.attribute)
|
||||
|
||||
def visit_BoolNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
# FIXME: represent string nodes correctly
|
||||
def visit_StringNode(self, node):
|
||||
value = node.value
|
||||
if value.encoding is not None:
|
||||
value = value.encode(value.encoding)
|
||||
self.put(repr(value))
|
||||
|
||||
def visit_PassStatNode(self, node):
|
||||
self.startline(u"pass")
|
||||
self.endline()
|
||||
|
||||
class CodeWriter(DeclarationWriter):
|
||||
|
||||
def visit_SingleAssignmentNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.lhs)
|
||||
self.put(u" = ")
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_CascadedAssignmentNode(self, node):
|
||||
self.startline()
|
||||
for lhs in node.lhs_list:
|
||||
self.visit(lhs)
|
||||
self.put(u" = ")
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_PrintStatNode(self, node):
|
||||
self.startline(u"print ")
|
||||
self.comma_separated_list(node.arg_tuple.args)
|
||||
if not node.append_newline:
|
||||
self.put(u",")
|
||||
self.endline()
|
||||
|
||||
def visit_ForInStatNode(self, node):
|
||||
self.startline(u"for ")
|
||||
self.visit(node.target)
|
||||
self.put(u" in ")
|
||||
self.visit(node.iterator.sequence)
|
||||
self.endline(u":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
if node.else_clause is not None:
|
||||
self.line(u"else:")
|
||||
self.indent()
|
||||
self.visit(node.else_clause)
|
||||
self.dedent()
|
||||
|
||||
def visit_IfStatNode(self, node):
|
||||
# The IfClauseNode is handled directly without a separate match
|
||||
# for clariy.
|
||||
self.startline(u"if ")
|
||||
self.visit(node.if_clauses[0].condition)
|
||||
self.endline(":")
|
||||
self.indent()
|
||||
self.visit(node.if_clauses[0].body)
|
||||
self.dedent()
|
||||
for clause in node.if_clauses[1:]:
|
||||
self.startline("elif ")
|
||||
self.visit(clause.condition)
|
||||
self.endline(":")
|
||||
self.indent()
|
||||
self.visit(clause.body)
|
||||
self.dedent()
|
||||
if node.else_clause is not None:
|
||||
self.line("else:")
|
||||
self.indent()
|
||||
self.visit(node.else_clause)
|
||||
self.dedent()
|
||||
|
||||
def visit_SequenceNode(self, node):
|
||||
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
|
||||
|
||||
def visit_SimpleCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.comma_separated_list(node.args)
|
||||
self.put(")")
|
||||
|
||||
def visit_GeneralCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
posarg = node.positional_args
|
||||
if isinstance(posarg, AsTupleNode):
|
||||
self.visit(posarg.arg)
|
||||
else:
|
||||
self.comma_separated_list(posarg.args) # TupleNode.args
|
||||
if node.keyword_args:
|
||||
if isinstance(node.keyword_args, DictNode):
|
||||
for i, (name, value) in enumerate(node.keyword_args.key_value_pairs):
|
||||
if i > 0:
|
||||
self.put(', ')
|
||||
self.visit(name)
|
||||
self.put('=')
|
||||
self.visit(value)
|
||||
else:
|
||||
raise Exception("Not implemented yet")
|
||||
self.put(u")")
|
||||
|
||||
def visit_ExprStatNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.expr)
|
||||
self.endline()
|
||||
|
||||
def visit_InPlaceAssignmentNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.lhs)
|
||||
self.put(u" %s= " % node.operator)
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_WithStatNode(self, node):
|
||||
self.startline()
|
||||
self.put(u"with ")
|
||||
self.visit(node.manager)
|
||||
if node.target is not None:
|
||||
self.put(u" as ")
|
||||
self.visit(node.target)
|
||||
self.endline(u":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_TryFinallyStatNode(self, node):
|
||||
self.line(u"try:")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
self.line(u"finally:")
|
||||
self.indent()
|
||||
self.visit(node.finally_clause)
|
||||
self.dedent()
|
||||
|
||||
def visit_TryExceptStatNode(self, node):
|
||||
self.line(u"try:")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
for x in node.except_clauses:
|
||||
self.visit(x)
|
||||
if node.else_clause is not None:
|
||||
self.visit(node.else_clause)
|
||||
|
||||
def visit_ExceptClauseNode(self, node):
|
||||
self.startline(u"except")
|
||||
if node.pattern is not None:
|
||||
self.put(u" ")
|
||||
self.visit(node.pattern)
|
||||
if node.target is not None:
|
||||
self.put(u", ")
|
||||
self.visit(node.target)
|
||||
self.endline(":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_ReturnStatNode(self, node):
|
||||
self.startline("return ")
|
||||
self.visit(node.value)
|
||||
self.endline()
|
||||
|
||||
def visit_ReraiseStatNode(self, node):
|
||||
self.line("raise")
|
||||
|
||||
def visit_ImportNode(self, node):
|
||||
self.put(u"(import %s)" % node.module_name.value)
|
||||
|
||||
def visit_TempsBlockNode(self, node):
|
||||
"""
|
||||
Temporaries are output like $1_1', where the first number is
|
||||
an index of the TempsBlockNode and the second number is an index
|
||||
of the temporary which that block allocates.
|
||||
"""
|
||||
idx = 0
|
||||
for handle in node.temps:
|
||||
self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx)
|
||||
idx += 1
|
||||
self.tempblockindex += 1
|
||||
self.visit(node.body)
|
||||
|
||||
def visit_TempRefNode(self, node):
|
||||
self.put(self.tempnames[node.handle])
|
||||
|
||||
|
||||
class PxdWriter(DeclarationWriter):
|
||||
def __call__(self, node):
|
||||
print(u'\n'.join(self.write(node).lines))
|
||||
return node
|
||||
|
||||
def visit_CFuncDefNode(self, node):
|
||||
if 'inline' in node.modifiers:
|
||||
return
|
||||
if node.overridable:
|
||||
self.startline(u'cpdef ')
|
||||
else:
|
||||
self.startline(u'cdef ')
|
||||
if node.visibility != 'private':
|
||||
self.put(node.visibility)
|
||||
self.put(u' ')
|
||||
if node.api:
|
||||
self.put(u'api ')
|
||||
self.visit(node.declarator)
|
||||
|
||||
def visit_StatNode(self, node):
|
||||
pass
|
||||
|
||||
|
||||
class ExpressionWriter(TreeVisitor):
|
||||
|
||||
def __init__(self, result=None):
|
||||
super(ExpressionWriter, self).__init__()
|
||||
if result is None:
|
||||
result = u""
|
||||
self.result = result
|
||||
self.precedence = [0]
|
||||
|
||||
def write(self, tree):
|
||||
self.visit(tree)
|
||||
return self.result
|
||||
|
||||
def put(self, s):
|
||||
self.result += s
|
||||
|
||||
def remove(self, s):
|
||||
if self.result.endswith(s):
|
||||
self.result = self.result[:-len(s)]
|
||||
|
||||
def comma_separated_list(self, items):
|
||||
if len(items) > 0:
|
||||
for item in items[:-1]:
|
||||
self.visit(item)
|
||||
self.put(u", ")
|
||||
self.visit(items[-1])
|
||||
|
||||
def visit_Node(self, node):
|
||||
raise AssertionError("Node not handled by serializer: %r" % node)
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_NoneNode(self, node):
|
||||
self.put(u"None")
|
||||
|
||||
def visit_EllipsisNode(self, node):
|
||||
self.put(u"...")
|
||||
|
||||
def visit_BoolNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
def visit_ConstNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
def visit_ImagNode(self, node):
|
||||
self.put(node.value)
|
||||
self.put(u"j")
|
||||
|
||||
def emit_string(self, node, prefix=u""):
|
||||
repr_val = repr(node.value)
|
||||
if repr_val[0] in 'ub':
|
||||
repr_val = repr_val[1:]
|
||||
self.put(u"%s%s" % (prefix, repr_val))
|
||||
|
||||
def visit_BytesNode(self, node):
|
||||
self.emit_string(node, u"b")
|
||||
|
||||
def visit_StringNode(self, node):
|
||||
self.emit_string(node)
|
||||
|
||||
def visit_UnicodeNode(self, node):
|
||||
self.emit_string(node, u"u")
|
||||
|
||||
def emit_sequence(self, node, parens=(u"", u"")):
|
||||
open_paren, close_paren = parens
|
||||
items = node.subexpr_nodes()
|
||||
self.put(open_paren)
|
||||
self.comma_separated_list(items)
|
||||
self.put(close_paren)
|
||||
|
||||
def visit_ListNode(self, node):
|
||||
self.emit_sequence(node, u"[]")
|
||||
|
||||
def visit_TupleNode(self, node):
|
||||
self.emit_sequence(node, u"()")
|
||||
|
||||
def visit_SetNode(self, node):
|
||||
if len(node.subexpr_nodes()) > 0:
|
||||
self.emit_sequence(node, u"{}")
|
||||
else:
|
||||
self.put(u"set()")
|
||||
|
||||
def visit_DictNode(self, node):
|
||||
self.emit_sequence(node, u"{}")
|
||||
|
||||
def visit_DictItemNode(self, node):
|
||||
self.visit(node.key)
|
||||
self.put(u": ")
|
||||
self.visit(node.value)
|
||||
|
||||
unop_precedence = {
|
||||
'not': 3, '!': 3,
|
||||
'+': 11, '-': 11, '~': 11,
|
||||
}
|
||||
binop_precedence = {
|
||||
'or': 1,
|
||||
'and': 2,
|
||||
# unary: 'not': 3, '!': 3,
|
||||
'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
|
||||
'|': 5,
|
||||
'^': 6,
|
||||
'&': 7,
|
||||
'<<': 8, '>>': 8,
|
||||
'+': 9, '-': 9,
|
||||
'*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
|
||||
# unary: '+': 11, '-': 11, '~': 11
|
||||
'**': 12,
|
||||
}
|
||||
|
||||
def operator_enter(self, new_prec):
|
||||
old_prec = self.precedence[-1]
|
||||
if old_prec > new_prec:
|
||||
self.put(u"(")
|
||||
self.precedence.append(new_prec)
|
||||
|
||||
def operator_exit(self):
|
||||
old_prec, new_prec = self.precedence[-2:]
|
||||
if old_prec > new_prec:
|
||||
self.put(u")")
|
||||
self.precedence.pop()
|
||||
|
||||
def visit_NotNode(self, node):
|
||||
op = 'not'
|
||||
prec = self.unop_precedence[op]
|
||||
self.operator_enter(prec)
|
||||
self.put(u"not ")
|
||||
self.visit(node.operand)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_UnopNode(self, node):
|
||||
op = node.operator
|
||||
prec = self.unop_precedence[op]
|
||||
self.operator_enter(prec)
|
||||
self.put(u"%s" % node.operator)
|
||||
self.visit(node.operand)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_BinopNode(self, node):
|
||||
op = node.operator
|
||||
prec = self.binop_precedence.get(op, 0)
|
||||
self.operator_enter(prec)
|
||||
self.visit(node.operand1)
|
||||
self.put(u" %s " % op.replace('_', ' '))
|
||||
self.visit(node.operand2)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_BoolBinopNode(self, node):
|
||||
self.visit_BinopNode(node)
|
||||
|
||||
def visit_PrimaryCmpNode(self, node):
|
||||
self.visit_BinopNode(node)
|
||||
|
||||
def visit_IndexNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u"[")
|
||||
if isinstance(node.index, TupleNode):
|
||||
self.emit_sequence(node.index)
|
||||
else:
|
||||
self.visit(node.index)
|
||||
self.put(u"]")
|
||||
|
||||
def visit_SliceIndexNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u"[")
|
||||
if node.start:
|
||||
self.visit(node.start)
|
||||
self.put(u":")
|
||||
if node.stop:
|
||||
self.visit(node.stop)
|
||||
if node.slice:
|
||||
self.put(u":")
|
||||
self.visit(node.slice)
|
||||
self.put(u"]")
|
||||
|
||||
def visit_SliceNode(self, node):
|
||||
if not node.start.is_none:
|
||||
self.visit(node.start)
|
||||
self.put(u":")
|
||||
if not node.stop.is_none:
|
||||
self.visit(node.stop)
|
||||
if not node.step.is_none:
|
||||
self.put(u":")
|
||||
self.visit(node.step)
|
||||
|
||||
def visit_CondExprNode(self, node):
|
||||
self.visit(node.true_val)
|
||||
self.put(u" if ")
|
||||
self.visit(node.test)
|
||||
self.put(u" else ")
|
||||
self.visit(node.false_val)
|
||||
|
||||
def visit_AttributeNode(self, node):
|
||||
self.visit(node.obj)
|
||||
self.put(u".%s" % node.attribute)
|
||||
|
||||
def visit_SimpleCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.comma_separated_list(node.args)
|
||||
self.put(")")
|
||||
|
||||
def emit_pos_args(self, node):
|
||||
if node is None:
|
||||
return
|
||||
if isinstance(node, AddNode):
|
||||
self.emit_pos_args(node.operand1)
|
||||
self.emit_pos_args(node.operand2)
|
||||
elif isinstance(node, TupleNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.visit(expr)
|
||||
self.put(u", ")
|
||||
elif isinstance(node, AsTupleNode):
|
||||
self.put("*")
|
||||
self.visit(node.arg)
|
||||
self.put(u", ")
|
||||
else:
|
||||
self.visit(node)
|
||||
self.put(u", ")
|
||||
|
||||
def emit_kwd_args(self, node):
|
||||
if node is None:
|
||||
return
|
||||
if isinstance(node, MergedDictNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.emit_kwd_args(expr)
|
||||
elif isinstance(node, DictNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.put(u"%s=" % expr.key.value)
|
||||
self.visit(expr.value)
|
||||
self.put(u", ")
|
||||
else:
|
||||
self.put(u"**")
|
||||
self.visit(node)
|
||||
self.put(u", ")
|
||||
|
||||
def visit_GeneralCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.emit_pos_args(node.positional_args)
|
||||
self.emit_kwd_args(node.keyword_args)
|
||||
self.remove(u", ")
|
||||
self.put(")")
|
||||
|
||||
def emit_comprehension(self, body, target,
|
||||
sequence, condition,
|
||||
parens=(u"", u"")):
|
||||
open_paren, close_paren = parens
|
||||
self.put(open_paren)
|
||||
self.visit(body)
|
||||
self.put(u" for ")
|
||||
self.visit(target)
|
||||
self.put(u" in ")
|
||||
self.visit(sequence)
|
||||
if condition:
|
||||
self.put(u" if ")
|
||||
self.visit(condition)
|
||||
self.put(close_paren)
|
||||
|
||||
def visit_ComprehensionAppendNode(self, node):
|
||||
self.visit(node.expr)
|
||||
|
||||
def visit_DictComprehensionAppendNode(self, node):
|
||||
self.visit(node.key_expr)
|
||||
self.put(u": ")
|
||||
self.visit(node.value_expr)
|
||||
|
||||
def visit_ComprehensionNode(self, node):
|
||||
tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"}
|
||||
parens = tpmap[node.type.py_type_name()]
|
||||
body = node.loop.body
|
||||
target = node.loop.target
|
||||
sequence = node.loop.iterator.sequence
|
||||
condition = None
|
||||
if hasattr(body, 'if_clauses'):
|
||||
# type(body) is Nodes.IfStatNode
|
||||
condition = body.if_clauses[0].condition
|
||||
body = body.if_clauses[0].body
|
||||
self.emit_comprehension(body, target, sequence, condition, parens)
|
||||
|
||||
def visit_GeneratorExpressionNode(self, node):
|
||||
body = node.loop.body
|
||||
target = node.loop.target
|
||||
sequence = node.loop.iterator.sequence
|
||||
condition = None
|
||||
if hasattr(body, 'if_clauses'):
|
||||
# type(body) is Nodes.IfStatNode
|
||||
condition = body.if_clauses[0].condition
|
||||
body = body.if_clauses[0].body.expr.arg
|
||||
elif hasattr(body, 'expr'):
|
||||
# type(body) is Nodes.ExprStatNode
|
||||
body = body.expr.arg
|
||||
self.emit_comprehension(body, target, sequence, condition, u"()")
|
|
@ -0,0 +1,99 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .Visitor import ScopeTrackingTransform
|
||||
from .Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode
|
||||
from .ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode
|
||||
from .PyrexTypes import py_object_type
|
||||
from .StringEncoding import EncodedString
|
||||
from . import Symtab
|
||||
|
||||
class AutoTestDictTransform(ScopeTrackingTransform):
|
||||
# Handles autotestdict directive
|
||||
|
||||
blacklist = ['__cinit__', '__dealloc__', '__richcmp__',
|
||||
'__nonzero__', '__bool__',
|
||||
'__len__', '__contains__']
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
if node.is_pxd:
|
||||
return node
|
||||
self.scope_type = 'module'
|
||||
self.scope_node = node
|
||||
|
||||
if not self.current_directives['autotestdict']:
|
||||
return node
|
||||
self.all_docstrings = self.current_directives['autotestdict.all']
|
||||
self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef']
|
||||
|
||||
assert isinstance(node.body, StatListNode)
|
||||
|
||||
# First see if __test__ is already created
|
||||
if u'__test__' in node.scope.entries:
|
||||
# Do nothing
|
||||
return node
|
||||
|
||||
pos = node.pos
|
||||
|
||||
self.tests = []
|
||||
self.testspos = node.pos
|
||||
|
||||
test_dict_entry = node.scope.declare_var(EncodedString(u'__test__'),
|
||||
py_object_type,
|
||||
pos,
|
||||
visibility='public')
|
||||
create_test_dict_assignment = SingleAssignmentNode(pos,
|
||||
lhs=NameNode(pos, name=EncodedString(u'__test__'),
|
||||
entry=test_dict_entry),
|
||||
rhs=DictNode(pos, key_value_pairs=self.tests))
|
||||
self.visitchildren(node)
|
||||
node.body.stats.append(create_test_dict_assignment)
|
||||
return node
|
||||
|
||||
def add_test(self, testpos, path, doctest):
|
||||
pos = self.testspos
|
||||
keystr = u'%s (line %d)' % (path, testpos[1])
|
||||
key = UnicodeNode(pos, value=EncodedString(keystr))
|
||||
value = UnicodeNode(pos, value=doctest)
|
||||
self.tests.append(DictItemNode(pos, key=key, value=value))
|
||||
|
||||
def visit_ExprNode(self, node):
|
||||
# expressions cannot contain functions and lambda expressions
|
||||
# do not have a docstring
|
||||
return node
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
if not node.doc or (isinstance(node, DefNode) and node.fused_py_func):
|
||||
return node
|
||||
if not self.cdef_docstrings:
|
||||
if isinstance(node, CFuncDefNode) and not node.py_func:
|
||||
return node
|
||||
if not self.all_docstrings and '>>>' not in node.doc:
|
||||
return node
|
||||
|
||||
pos = self.testspos
|
||||
if self.scope_type == 'module':
|
||||
path = node.entry.name
|
||||
elif self.scope_type in ('pyclass', 'cclass'):
|
||||
if isinstance(node, CFuncDefNode):
|
||||
if node.py_func is not None:
|
||||
name = node.py_func.name
|
||||
else:
|
||||
name = node.entry.name
|
||||
else:
|
||||
name = node.name
|
||||
if self.scope_type == 'cclass' and name in self.blacklist:
|
||||
return node
|
||||
if self.scope_type == 'pyclass':
|
||||
class_name = self.scope_node.name
|
||||
else:
|
||||
class_name = self.scope_node.class_name
|
||||
if isinstance(node.entry.scope, Symtab.PropertyScope):
|
||||
property_method_name = node.entry.scope.name
|
||||
path = "%s.%s.%s" % (class_name, node.entry.scope.name,
|
||||
node.entry.name)
|
||||
else:
|
||||
path = "%s.%s" % (class_name, node.entry.name)
|
||||
else:
|
||||
assert False
|
||||
self.add_test(node.pos, path, node.doc)
|
||||
return node
|
|
@ -0,0 +1,317 @@
|
|||
# Note: Work in progress
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import codecs
|
||||
import textwrap
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from collections import defaultdict
|
||||
from xml.sax.saxutils import escape as html_escape
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO # does not support writing 'str' in Py2
|
||||
|
||||
from . import Version
|
||||
from .Code import CCodeWriter
|
||||
from .. import Utils
|
||||
|
||||
|
||||
class AnnotationCCodeWriter(CCodeWriter):
|
||||
|
||||
def __init__(self, create_from=None, buffer=None, copy_formatting=True):
|
||||
CCodeWriter.__init__(self, create_from, buffer, copy_formatting=copy_formatting)
|
||||
if create_from is None:
|
||||
self.annotation_buffer = StringIO()
|
||||
self.last_annotated_pos = None
|
||||
# annotations[filename][line] -> [(column, AnnotationItem)*]
|
||||
self.annotations = defaultdict(partial(defaultdict, list))
|
||||
# code[filename][line] -> str
|
||||
self.code = defaultdict(partial(defaultdict, str))
|
||||
# scopes[filename][line] -> set(scopes)
|
||||
self.scopes = defaultdict(partial(defaultdict, set))
|
||||
else:
|
||||
# When creating an insertion point, keep references to the same database
|
||||
self.annotation_buffer = create_from.annotation_buffer
|
||||
self.annotations = create_from.annotations
|
||||
self.code = create_from.code
|
||||
self.scopes = create_from.scopes
|
||||
self.last_annotated_pos = create_from.last_annotated_pos
|
||||
|
||||
def create_new(self, create_from, buffer, copy_formatting):
|
||||
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
|
||||
|
||||
def write(self, s):
|
||||
CCodeWriter.write(self, s)
|
||||
self.annotation_buffer.write(s)
|
||||
|
||||
def mark_pos(self, pos, trace=True):
|
||||
if pos is not None:
|
||||
CCodeWriter.mark_pos(self, pos, trace)
|
||||
if self.funcstate and self.funcstate.scope:
|
||||
# lambdas and genexprs can result in multiple scopes per line => keep them in a set
|
||||
self.scopes[pos[0].filename][pos[1]].add(self.funcstate.scope)
|
||||
if self.last_annotated_pos:
|
||||
source_desc, line, _ = self.last_annotated_pos
|
||||
pos_code = self.code[source_desc.filename]
|
||||
pos_code[line] += self.annotation_buffer.getvalue()
|
||||
self.annotation_buffer = StringIO()
|
||||
self.last_annotated_pos = pos
|
||||
|
||||
def annotate(self, pos, item):
|
||||
self.annotations[pos[0].filename][pos[1]].append((pos[2], item))
|
||||
|
||||
def _css(self):
|
||||
"""css template will later allow to choose a colormap"""
|
||||
css = [self._css_template]
|
||||
for i in range(255):
|
||||
color = u"FFFF%02x" % int(255/(1+i/10.0))
|
||||
css.append('.cython.score-%d {background-color: #%s;}' % (i, color))
|
||||
try:
|
||||
from pygments.formatters import HtmlFormatter
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
css.append(HtmlFormatter().get_style_defs('.cython'))
|
||||
return '\n'.join(css)
|
||||
|
||||
_css_template = textwrap.dedent("""
|
||||
body.cython { font-family: courier; font-size: 12; }
|
||||
|
||||
.cython.tag { }
|
||||
.cython.line { margin: 0em }
|
||||
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
|
||||
|
||||
.cython.line .run { background-color: #B0FFB0; }
|
||||
.cython.line .mis { background-color: #FFB0B0; }
|
||||
.cython.code.run { border-left: 8px solid #B0FFB0; }
|
||||
.cython.code.mis { border-left: 8px solid #FFB0B0; }
|
||||
|
||||
.cython.code .py_c_api { color: red; }
|
||||
.cython.code .py_macro_api { color: #FF7000; }
|
||||
.cython.code .pyx_c_api { color: #FF3000; }
|
||||
.cython.code .pyx_macro_api { color: #FF7000; }
|
||||
.cython.code .refnanny { color: #FFA000; }
|
||||
.cython.code .trace { color: #FFA000; }
|
||||
.cython.code .error_goto { color: #FFA000; }
|
||||
|
||||
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
|
||||
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_attr { color: #0000FF; }
|
||||
.cython.code .py_call { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_call { color: #0000FF; }
|
||||
""")
|
||||
|
||||
# on-click toggle function to show/hide C source code
|
||||
_onclick_attr = ' onclick="{0}"'.format((
|
||||
"(function(s){"
|
||||
" s.display = s.display === 'block' ? 'none' : 'block'"
|
||||
"})(this.nextElementSibling.style)"
|
||||
).replace(' ', '') # poor dev's JS minification
|
||||
)
|
||||
|
||||
def save_annotation(self, source_filename, target_filename, coverage_xml=None):
|
||||
with Utils.open_source_file(source_filename) as f:
|
||||
code = f.read()
|
||||
generated_code = self.code.get(source_filename, {})
|
||||
c_file = Utils.decode_filename(os.path.basename(target_filename))
|
||||
html_filename = os.path.splitext(target_filename)[0] + ".html"
|
||||
|
||||
with codecs.open(html_filename, "w", encoding="UTF-8") as out_buffer:
|
||||
out_buffer.write(self._save_annotation(code, generated_code, c_file, source_filename, coverage_xml))
|
||||
|
||||
def _save_annotation_header(self, c_file, source_filename, coverage_timestamp=None):
|
||||
coverage_info = ''
|
||||
if coverage_timestamp:
|
||||
coverage_info = u' with coverage data from {timestamp}'.format(
|
||||
timestamp=datetime.fromtimestamp(int(coverage_timestamp) // 1000))
|
||||
|
||||
outlist = [
|
||||
textwrap.dedent(u'''\
|
||||
<!DOCTYPE html>
|
||||
<!-- Generated by Cython {watermark} -->
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Cython: {filename}</title>
|
||||
<style type="text/css">
|
||||
{css}
|
||||
</style>
|
||||
</head>
|
||||
<body class="cython">
|
||||
<p><span style="border-bottom: solid 1px grey;">Generated by Cython {watermark}</span>{more_info}</p>
|
||||
<p>
|
||||
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
|
||||
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
|
||||
</p>
|
||||
''').format(css=self._css(), watermark=Version.watermark,
|
||||
filename=os.path.basename(source_filename) if source_filename else '',
|
||||
more_info=coverage_info)
|
||||
]
|
||||
if c_file:
|
||||
outlist.append(u'<p>Raw output: <a href="%s">%s</a></p>\n' % (c_file, c_file))
|
||||
return outlist
|
||||
|
||||
def _save_annotation_footer(self):
|
||||
return (u'</body></html>\n',)
|
||||
|
||||
def _save_annotation(self, code, generated_code, c_file=None, source_filename=None, coverage_xml=None):
|
||||
"""
|
||||
lines : original cython source code split by lines
|
||||
generated_code : generated c code keyed by line number in original file
|
||||
target filename : name of the file in which to store the generated html
|
||||
c_file : filename in which the c_code has been written
|
||||
"""
|
||||
if coverage_xml is not None and source_filename:
|
||||
coverage_timestamp = coverage_xml.get('timestamp', '').strip()
|
||||
covered_lines = self._get_line_coverage(coverage_xml, source_filename)
|
||||
else:
|
||||
coverage_timestamp = covered_lines = None
|
||||
annotation_items = dict(self.annotations[source_filename])
|
||||
scopes = dict(self.scopes[source_filename])
|
||||
|
||||
outlist = []
|
||||
outlist.extend(self._save_annotation_header(c_file, source_filename, coverage_timestamp))
|
||||
outlist.extend(self._save_annotation_body(code, generated_code, annotation_items, scopes, covered_lines))
|
||||
outlist.extend(self._save_annotation_footer())
|
||||
return ''.join(outlist)
|
||||
|
||||
def _get_line_coverage(self, coverage_xml, source_filename):
|
||||
coverage_data = None
|
||||
for entry in coverage_xml.iterfind('.//class'):
|
||||
if not entry.get('filename'):
|
||||
continue
|
||||
if (entry.get('filename') == source_filename or
|
||||
os.path.abspath(entry.get('filename')) == source_filename):
|
||||
coverage_data = entry
|
||||
break
|
||||
elif source_filename.endswith(entry.get('filename')):
|
||||
coverage_data = entry # but we might still find a better match...
|
||||
if coverage_data is None:
|
||||
return None
|
||||
return dict(
|
||||
(int(line.get('number')), int(line.get('hits')))
|
||||
for line in coverage_data.iterfind('lines/line')
|
||||
)
|
||||
|
||||
def _htmlify_code(self, code):
|
||||
try:
|
||||
from pygments import highlight
|
||||
from pygments.lexers import CythonLexer
|
||||
from pygments.formatters import HtmlFormatter
|
||||
except ImportError:
|
||||
# no Pygments, just escape the code
|
||||
return html_escape(code)
|
||||
|
||||
html_code = highlight(
|
||||
code, CythonLexer(stripnl=False, stripall=False),
|
||||
HtmlFormatter(nowrap=True))
|
||||
return html_code
|
||||
|
||||
def _save_annotation_body(self, cython_code, generated_code, annotation_items, scopes, covered_lines=None):
|
||||
outlist = [u'<div class="cython">']
|
||||
pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n'
|
||||
new_calls_map = dict(
|
||||
(name, 0) for name in
|
||||
'refnanny trace py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'.split()
|
||||
).copy
|
||||
|
||||
self.mark_pos(None)
|
||||
|
||||
def annotate(match):
|
||||
group_name = match.lastgroup
|
||||
calls[group_name] += 1
|
||||
return u"<span class='%s'>%s</span>" % (
|
||||
group_name, match.group(group_name))
|
||||
|
||||
lines = self._htmlify_code(cython_code).splitlines()
|
||||
lineno_width = len(str(len(lines)))
|
||||
if not covered_lines:
|
||||
covered_lines = None
|
||||
|
||||
for k, line in enumerate(lines, 1):
|
||||
try:
|
||||
c_code = generated_code[k]
|
||||
except KeyError:
|
||||
c_code = ''
|
||||
else:
|
||||
c_code = _replace_pos_comment(pos_comment_marker, c_code)
|
||||
if c_code.startswith(pos_comment_marker):
|
||||
c_code = c_code[len(pos_comment_marker):]
|
||||
c_code = html_escape(c_code)
|
||||
|
||||
calls = new_calls_map()
|
||||
c_code = _parse_code(annotate, c_code)
|
||||
score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] +
|
||||
calls['py_macro_api'] + calls['pyx_macro_api'])
|
||||
|
||||
if c_code:
|
||||
onclick = self._onclick_attr
|
||||
expandsymbol = '+'
|
||||
else:
|
||||
onclick = ''
|
||||
expandsymbol = ' '
|
||||
|
||||
covered = ''
|
||||
if covered_lines is not None and k in covered_lines:
|
||||
hits = covered_lines[k]
|
||||
if hits is not None:
|
||||
covered = 'run' if hits else 'mis'
|
||||
|
||||
outlist.append(
|
||||
u'<pre class="cython line score-{score}"{onclick}>'
|
||||
# generate line number with expand symbol in front,
|
||||
# and the right number of digit
|
||||
u'{expandsymbol}<span class="{covered}">{line:0{lineno_width}d}</span>: {code}</pre>\n'.format(
|
||||
score=score,
|
||||
expandsymbol=expandsymbol,
|
||||
covered=covered,
|
||||
lineno_width=lineno_width,
|
||||
line=k,
|
||||
code=line.rstrip(),
|
||||
onclick=onclick,
|
||||
))
|
||||
if c_code:
|
||||
outlist.append(u"<pre class='cython code score-{score} {covered}'>{code}</pre>".format(
|
||||
score=score, covered=covered, code=c_code))
|
||||
outlist.append(u"</div>")
|
||||
return outlist
|
||||
|
||||
|
||||
_parse_code = re.compile((
|
||||
br'(?P<refnanny>__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|'
|
||||
br'(?P<trace>__Pyx_Trace[A-Za-z]+)|'
|
||||
br'(?:'
|
||||
br'(?P<pyx_macro_api>__Pyx_[A-Z][A-Z_]+)|'
|
||||
br'(?P<pyx_c_api>(?:__Pyx_[A-Z][a-z_][A-Za-z_]*)|__pyx_convert_[A-Za-z_]*)|'
|
||||
br'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
|
||||
br'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
|
||||
br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
|
||||
br'(?P<error_goto>(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))'
|
||||
).decode('ascii')).sub
|
||||
|
||||
|
||||
_replace_pos_comment = re.compile(
|
||||
# this matches what Cython generates as code line marker comment
|
||||
br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'),
|
||||
re.M
|
||||
).sub
|
||||
|
||||
|
||||
class AnnotationItem(object):
|
||||
|
||||
def __init__(self, style, text, tag="", size=0):
|
||||
self.style = style
|
||||
self.text = text
|
||||
self.tag = tag
|
||||
self.size = size
|
||||
|
||||
def start(self):
|
||||
return u"<span class='cython tag %s' title='%s'>%s" % (self.style, self.text, self.tag)
|
||||
|
||||
def end(self):
|
||||
return self.size, u"</span>"
|
|
@ -0,0 +1,214 @@
|
|||
from __future__ import absolute_import, print_function
|
||||
|
||||
from .Visitor import CythonTransform
|
||||
from .StringEncoding import EncodedString
|
||||
from . import Options
|
||||
from . import PyrexTypes, ExprNodes
|
||||
from ..CodeWriter import ExpressionWriter
|
||||
|
||||
|
||||
class AnnotationWriter(ExpressionWriter):
|
||||
|
||||
def visit_Node(self, node):
|
||||
self.put(u"<???>")
|
||||
|
||||
def visit_LambdaNode(self, node):
|
||||
# XXX Should we do better?
|
||||
self.put("<lambda>")
|
||||
|
||||
|
||||
class EmbedSignature(CythonTransform):
|
||||
|
||||
def __init__(self, context):
|
||||
super(EmbedSignature, self).__init__(context)
|
||||
self.class_name = None
|
||||
self.class_node = None
|
||||
|
||||
def _fmt_expr(self, node):
|
||||
writer = AnnotationWriter()
|
||||
result = writer.write(node)
|
||||
# print(type(node).__name__, '-->', result)
|
||||
return result
|
||||
|
||||
def _fmt_arg(self, arg):
|
||||
if arg.type is PyrexTypes.py_object_type or arg.is_self_arg:
|
||||
doc = arg.name
|
||||
else:
|
||||
doc = arg.type.declaration_code(arg.name, for_display=1)
|
||||
|
||||
if arg.annotation:
|
||||
annotation = self._fmt_expr(arg.annotation)
|
||||
doc = doc + (': %s' % annotation)
|
||||
if arg.default:
|
||||
default = self._fmt_expr(arg.default)
|
||||
doc = doc + (' = %s' % default)
|
||||
elif arg.default:
|
||||
default = self._fmt_expr(arg.default)
|
||||
doc = doc + ('=%s' % default)
|
||||
return doc
|
||||
|
||||
def _fmt_star_arg(self, arg):
|
||||
arg_doc = arg.name
|
||||
if arg.annotation:
|
||||
annotation = self._fmt_expr(arg.annotation)
|
||||
arg_doc = arg_doc + (': %s' % annotation)
|
||||
return arg_doc
|
||||
|
||||
def _fmt_arglist(self, args,
|
||||
npargs=0, pargs=None,
|
||||
nkargs=0, kargs=None,
|
||||
hide_self=False):
|
||||
arglist = []
|
||||
for arg in args:
|
||||
if not hide_self or not arg.entry.is_self_arg:
|
||||
arg_doc = self._fmt_arg(arg)
|
||||
arglist.append(arg_doc)
|
||||
if pargs:
|
||||
arg_doc = self._fmt_star_arg(pargs)
|
||||
arglist.insert(npargs, '*%s' % arg_doc)
|
||||
elif nkargs:
|
||||
arglist.insert(npargs, '*')
|
||||
if kargs:
|
||||
arg_doc = self._fmt_star_arg(kargs)
|
||||
arglist.append('**%s' % arg_doc)
|
||||
return arglist
|
||||
|
||||
def _fmt_ret_type(self, ret):
|
||||
if ret is PyrexTypes.py_object_type:
|
||||
return None
|
||||
else:
|
||||
return ret.declaration_code("", for_display=1)
|
||||
|
||||
def _fmt_signature(self, cls_name, func_name, args,
|
||||
npargs=0, pargs=None,
|
||||
nkargs=0, kargs=None,
|
||||
return_expr=None,
|
||||
return_type=None, hide_self=False):
|
||||
arglist = self._fmt_arglist(args,
|
||||
npargs, pargs,
|
||||
nkargs, kargs,
|
||||
hide_self=hide_self)
|
||||
arglist_doc = ', '.join(arglist)
|
||||
func_doc = '%s(%s)' % (func_name, arglist_doc)
|
||||
if cls_name:
|
||||
func_doc = '%s.%s' % (cls_name, func_doc)
|
||||
ret_doc = None
|
||||
if return_expr:
|
||||
ret_doc = self._fmt_expr(return_expr)
|
||||
elif return_type:
|
||||
ret_doc = self._fmt_ret_type(return_type)
|
||||
if ret_doc:
|
||||
func_doc = '%s -> %s' % (func_doc, ret_doc)
|
||||
return func_doc
|
||||
|
||||
def _embed_signature(self, signature, node_doc):
|
||||
if node_doc:
|
||||
return "%s\n%s" % (signature, node_doc)
|
||||
else:
|
||||
return signature
|
||||
|
||||
def __call__(self, node):
|
||||
if not Options.docstrings:
|
||||
return node
|
||||
else:
|
||||
return super(EmbedSignature, self).__call__(node)
|
||||
|
||||
def visit_ClassDefNode(self, node):
|
||||
oldname = self.class_name
|
||||
oldclass = self.class_node
|
||||
self.class_node = node
|
||||
try:
|
||||
# PyClassDefNode
|
||||
self.class_name = node.name
|
||||
except AttributeError:
|
||||
# CClassDefNode
|
||||
self.class_name = node.class_name
|
||||
self.visitchildren(node)
|
||||
self.class_name = oldname
|
||||
self.class_node = oldclass
|
||||
return node
|
||||
|
||||
def visit_LambdaNode(self, node):
|
||||
# lambda expressions so not have signature or inner functions
|
||||
return node
|
||||
|
||||
def visit_DefNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
|
||||
is_constructor = False
|
||||
hide_self = False
|
||||
if node.entry.is_special:
|
||||
is_constructor = self.class_node and node.name == '__init__'
|
||||
if not is_constructor:
|
||||
return node
|
||||
class_name, func_name = None, self.class_name
|
||||
hide_self = True
|
||||
else:
|
||||
class_name, func_name = self.class_name, node.name
|
||||
|
||||
nkargs = getattr(node, 'num_kwonly_args', 0)
|
||||
npargs = len(node.args) - nkargs
|
||||
signature = self._fmt_signature(
|
||||
class_name, func_name, node.args,
|
||||
npargs, node.star_arg,
|
||||
nkargs, node.starstar_arg,
|
||||
return_expr=node.return_type_annotation,
|
||||
return_type=None, hide_self=hide_self)
|
||||
if signature:
|
||||
if is_constructor:
|
||||
doc_holder = self.class_node.entry.type.scope
|
||||
else:
|
||||
doc_holder = node.entry
|
||||
|
||||
if doc_holder.doc is not None:
|
||||
old_doc = doc_holder.doc
|
||||
elif not is_constructor and getattr(node, 'py_func', None) is not None:
|
||||
old_doc = node.py_func.entry.doc
|
||||
else:
|
||||
old_doc = None
|
||||
new_doc = self._embed_signature(signature, old_doc)
|
||||
doc_holder.doc = EncodedString(new_doc)
|
||||
if not is_constructor and getattr(node, 'py_func', None) is not None:
|
||||
node.py_func.entry.doc = EncodedString(new_doc)
|
||||
return node
|
||||
|
||||
def visit_CFuncDefNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
if not node.overridable: # not cpdef FOO(...):
|
||||
return node
|
||||
|
||||
signature = self._fmt_signature(
|
||||
self.class_name, node.declarator.base.name,
|
||||
node.declarator.args,
|
||||
return_type=node.return_type)
|
||||
if signature:
|
||||
if node.entry.doc is not None:
|
||||
old_doc = node.entry.doc
|
||||
elif getattr(node, 'py_func', None) is not None:
|
||||
old_doc = node.py_func.entry.doc
|
||||
else:
|
||||
old_doc = None
|
||||
new_doc = self._embed_signature(signature, old_doc)
|
||||
node.entry.doc = EncodedString(new_doc)
|
||||
if hasattr(node, 'py_func') and node.py_func is not None:
|
||||
node.py_func.entry.doc = EncodedString(new_doc)
|
||||
return node
|
||||
|
||||
def visit_PropertyNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
|
||||
entry = node.entry
|
||||
if entry.visibility == 'public':
|
||||
# property synthesised from a cdef public attribute
|
||||
type_name = entry.type.declaration_code("", for_display=1)
|
||||
if not entry.type.is_pyobject:
|
||||
type_name = "'%s'" % type_name
|
||||
elif entry.type.is_extension_type:
|
||||
type_name = entry.type.module_name + '.' + type_name
|
||||
signature = '%s: %s' % (entry.name, type_name)
|
||||
new_doc = self._embed_signature(signature, entry.doc)
|
||||
entry.doc = EncodedString(new_doc)
|
||||
return node
|
740
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Buffer.py
Normal file
740
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Buffer.py
Normal file
|
@ -0,0 +1,740 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .Visitor import CythonTransform
|
||||
from .ModuleNode import ModuleNode
|
||||
from .Errors import CompileError
|
||||
from .UtilityCode import CythonUtilityCode
|
||||
from .Code import UtilityCode, TempitaUtilityCode
|
||||
|
||||
from . import Options
|
||||
from . import Interpreter
|
||||
from . import PyrexTypes
|
||||
from . import Naming
|
||||
from . import Symtab
|
||||
|
||||
def dedent(text, reindent=0):
|
||||
from textwrap import dedent
|
||||
text = dedent(text)
|
||||
if reindent > 0:
|
||||
indent = " " * reindent
|
||||
text = '\n'.join([indent + x for x in text.split('\n')])
|
||||
return text
|
||||
|
||||
class IntroduceBufferAuxiliaryVars(CythonTransform):
|
||||
|
||||
#
|
||||
# Entry point
|
||||
#
|
||||
|
||||
buffers_exists = False
|
||||
using_memoryview = False
|
||||
|
||||
def __call__(self, node):
|
||||
assert isinstance(node, ModuleNode)
|
||||
self.max_ndim = 0
|
||||
result = super(IntroduceBufferAuxiliaryVars, self).__call__(node)
|
||||
if self.buffers_exists:
|
||||
use_bufstruct_declare_code(node.scope)
|
||||
use_py2_buffer_functions(node.scope)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#
|
||||
# Basic operations for transforms
|
||||
#
|
||||
def handle_scope(self, node, scope):
|
||||
# For all buffers, insert extra variables in the scope.
|
||||
# The variables are also accessible from the buffer_info
|
||||
# on the buffer entry
|
||||
scope_items = scope.entries.items()
|
||||
bufvars = [entry for name, entry in scope_items if entry.type.is_buffer]
|
||||
if len(bufvars) > 0:
|
||||
bufvars.sort(key=lambda entry: entry.name)
|
||||
self.buffers_exists = True
|
||||
|
||||
memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice]
|
||||
if len(memviewslicevars) > 0:
|
||||
self.buffers_exists = True
|
||||
|
||||
|
||||
for (name, entry) in scope_items:
|
||||
if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode):
|
||||
self.using_memoryview = True
|
||||
break
|
||||
del scope_items
|
||||
|
||||
if isinstance(node, ModuleNode) and len(bufvars) > 0:
|
||||
# for now...note that pos is wrong
|
||||
raise CompileError(node.pos, "Buffer vars not allowed in module scope")
|
||||
for entry in bufvars:
|
||||
if entry.type.dtype.is_ptr:
|
||||
raise CompileError(node.pos, "Buffers with pointer types not yet supported.")
|
||||
|
||||
name = entry.name
|
||||
buftype = entry.type
|
||||
if buftype.ndim > Options.buffer_max_dims:
|
||||
raise CompileError(node.pos,
|
||||
"Buffer ndims exceeds Options.buffer_max_dims = %d" % Options.buffer_max_dims)
|
||||
if buftype.ndim > self.max_ndim:
|
||||
self.max_ndim = buftype.ndim
|
||||
|
||||
# Declare auxiliary vars
|
||||
def decvar(type, prefix):
|
||||
cname = scope.mangle(prefix, name)
|
||||
aux_var = scope.declare_var(name=None, cname=cname,
|
||||
type=type, pos=node.pos)
|
||||
if entry.is_arg:
|
||||
aux_var.used = True # otherwise, NameNode will mark whether it is used
|
||||
|
||||
return aux_var
|
||||
|
||||
auxvars = ((PyrexTypes.c_pyx_buffer_nd_type, Naming.pybuffernd_prefix),
|
||||
(PyrexTypes.c_pyx_buffer_type, Naming.pybufferstruct_prefix))
|
||||
pybuffernd, rcbuffer = [decvar(type, prefix) for (type, prefix) in auxvars]
|
||||
|
||||
entry.buffer_aux = Symtab.BufferAux(pybuffernd, rcbuffer)
|
||||
|
||||
scope.buffer_entries = bufvars
|
||||
self.scope = scope
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
self.handle_scope(node, node.scope)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
self.handle_scope(node, node.local_scope)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
#
|
||||
# Analysis
|
||||
#
|
||||
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
|
||||
buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False}
|
||||
buffer_positional_options_count = 1 # anything beyond this needs keyword argument
|
||||
|
||||
ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option'
|
||||
ERR_BUF_TOO_MANY = 'Too many buffer options'
|
||||
ERR_BUF_DUP = '"%s" buffer option already supplied'
|
||||
ERR_BUF_MISSING = '"%s" missing'
|
||||
ERR_BUF_MODE = 'Only allowed buffer modes are: "c", "fortran", "full", "strided" (as a compile-time string)'
|
||||
ERR_BUF_NDIM = 'ndim must be a non-negative integer'
|
||||
ERR_BUF_DTYPE = 'dtype must be "object", numeric type or a struct'
|
||||
ERR_BUF_BOOL = '"%s" must be a boolean'
|
||||
|
||||
def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, need_complete=True):
|
||||
"""
|
||||
Must be called during type analysis, as analyse is called
|
||||
on the dtype argument.
|
||||
|
||||
posargs and dictargs should consist of a list and a dict
|
||||
of tuples (value, pos). Defaults should be a dict of values.
|
||||
|
||||
Returns a dict containing all the options a buffer can have and
|
||||
its value (with the positions stripped).
|
||||
"""
|
||||
if defaults is None:
|
||||
defaults = buffer_defaults
|
||||
|
||||
posargs, dictargs = Interpreter.interpret_compiletime_options(
|
||||
posargs, dictargs, type_env=env, type_args=(0, 'dtype'))
|
||||
|
||||
if len(posargs) > buffer_positional_options_count:
|
||||
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
|
||||
|
||||
options = {}
|
||||
for name, (value, pos) in dictargs.items():
|
||||
if not name in buffer_options:
|
||||
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
||||
options[name] = value
|
||||
|
||||
for name, (value, pos) in zip(buffer_options, posargs):
|
||||
if not name in buffer_options:
|
||||
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
||||
if name in options:
|
||||
raise CompileError(pos, ERR_BUF_DUP % name)
|
||||
options[name] = value
|
||||
|
||||
# Check that they are all there and copy defaults
|
||||
for name in buffer_options:
|
||||
if not name in options:
|
||||
try:
|
||||
options[name] = defaults[name]
|
||||
except KeyError:
|
||||
if need_complete:
|
||||
raise CompileError(globalpos, ERR_BUF_MISSING % name)
|
||||
|
||||
dtype = options.get("dtype")
|
||||
if dtype and dtype.is_extension_type:
|
||||
raise CompileError(globalpos, ERR_BUF_DTYPE)
|
||||
|
||||
ndim = options.get("ndim")
|
||||
if ndim and (not isinstance(ndim, int) or ndim < 0):
|
||||
raise CompileError(globalpos, ERR_BUF_NDIM)
|
||||
|
||||
mode = options.get("mode")
|
||||
if mode and not (mode in ('full', 'strided', 'c', 'fortran')):
|
||||
raise CompileError(globalpos, ERR_BUF_MODE)
|
||||
|
||||
def assert_bool(name):
|
||||
x = options.get(name)
|
||||
if not isinstance(x, bool):
|
||||
raise CompileError(globalpos, ERR_BUF_BOOL % name)
|
||||
|
||||
assert_bool('negative_indices')
|
||||
assert_bool('cast')
|
||||
|
||||
return options
|
||||
|
||||
|
||||
#
|
||||
# Code generation
|
||||
#
|
||||
|
||||
class BufferEntry(object):
|
||||
def __init__(self, entry):
|
||||
self.entry = entry
|
||||
self.type = entry.type
|
||||
self.cname = entry.buffer_aux.buflocal_nd_var.cname
|
||||
self.buf_ptr = "%s.rcbuffer->pybuffer.buf" % self.cname
|
||||
self.buf_ptr_type = entry.type.buffer_ptr_type
|
||||
self.init_attributes()
|
||||
|
||||
def init_attributes(self):
|
||||
self.shape = self.get_buf_shapevars()
|
||||
self.strides = self.get_buf_stridevars()
|
||||
self.suboffsets = self.get_buf_suboffsetvars()
|
||||
|
||||
def get_buf_suboffsetvars(self):
|
||||
return self._for_all_ndim("%s.diminfo[%d].suboffsets")
|
||||
|
||||
def get_buf_stridevars(self):
|
||||
return self._for_all_ndim("%s.diminfo[%d].strides")
|
||||
|
||||
def get_buf_shapevars(self):
|
||||
return self._for_all_ndim("%s.diminfo[%d].shape")
|
||||
|
||||
def _for_all_ndim(self, s):
|
||||
return [s % (self.cname, i) for i in range(self.type.ndim)]
|
||||
|
||||
def generate_buffer_lookup_code(self, code, index_cnames):
|
||||
# Create buffer lookup and return it
|
||||
# This is done via utility macros/inline functions, which vary
|
||||
# according to the access mode used.
|
||||
params = []
|
||||
nd = self.type.ndim
|
||||
mode = self.type.mode
|
||||
if mode == 'full':
|
||||
for i, s, o in zip(index_cnames,
|
||||
self.get_buf_stridevars(),
|
||||
self.get_buf_suboffsetvars()):
|
||||
params.append(i)
|
||||
params.append(s)
|
||||
params.append(o)
|
||||
funcname = "__Pyx_BufPtrFull%dd" % nd
|
||||
funcgen = buf_lookup_full_code
|
||||
else:
|
||||
if mode == 'strided':
|
||||
funcname = "__Pyx_BufPtrStrided%dd" % nd
|
||||
funcgen = buf_lookup_strided_code
|
||||
elif mode == 'c':
|
||||
funcname = "__Pyx_BufPtrCContig%dd" % nd
|
||||
funcgen = buf_lookup_c_code
|
||||
elif mode == 'fortran':
|
||||
funcname = "__Pyx_BufPtrFortranContig%dd" % nd
|
||||
funcgen = buf_lookup_fortran_code
|
||||
else:
|
||||
assert False
|
||||
for i, s in zip(index_cnames, self.get_buf_stridevars()):
|
||||
params.append(i)
|
||||
params.append(s)
|
||||
|
||||
# Make sure the utility code is available
|
||||
if funcname not in code.globalstate.utility_codes:
|
||||
code.globalstate.utility_codes.add(funcname)
|
||||
protocode = code.globalstate['utility_code_proto']
|
||||
defcode = code.globalstate['utility_code_def']
|
||||
funcgen(protocode, defcode, name=funcname, nd=nd)
|
||||
|
||||
buf_ptr_type_code = self.buf_ptr_type.empty_declaration_code()
|
||||
ptrcode = "%s(%s, %s, %s)" % (funcname, buf_ptr_type_code, self.buf_ptr,
|
||||
", ".join(params))
|
||||
return ptrcode
|
||||
|
||||
|
||||
def get_flags(buffer_aux, buffer_type):
|
||||
flags = 'PyBUF_FORMAT'
|
||||
mode = buffer_type.mode
|
||||
if mode == 'full':
|
||||
flags += '| PyBUF_INDIRECT'
|
||||
elif mode == 'strided':
|
||||
flags += '| PyBUF_STRIDES'
|
||||
elif mode == 'c':
|
||||
flags += '| PyBUF_C_CONTIGUOUS'
|
||||
elif mode == 'fortran':
|
||||
flags += '| PyBUF_F_CONTIGUOUS'
|
||||
else:
|
||||
assert False
|
||||
if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE"
|
||||
return flags
|
||||
|
||||
def used_buffer_aux_vars(entry):
|
||||
buffer_aux = entry.buffer_aux
|
||||
buffer_aux.buflocal_nd_var.used = True
|
||||
buffer_aux.rcbuf_var.used = True
|
||||
|
||||
def put_unpack_buffer_aux_into_scope(buf_entry, code):
|
||||
# Generate code to copy the needed struct info into local
|
||||
# variables.
|
||||
buffer_aux, mode = buf_entry.buffer_aux, buf_entry.type.mode
|
||||
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
||||
|
||||
fldnames = ['strides', 'shape']
|
||||
if mode == 'full':
|
||||
fldnames.append('suboffsets')
|
||||
|
||||
ln = []
|
||||
for i in range(buf_entry.type.ndim):
|
||||
for fldname in fldnames:
|
||||
ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % \
|
||||
(pybuffernd_struct, i, fldname,
|
||||
pybuffernd_struct, fldname, i))
|
||||
code.putln(' '.join(ln))
|
||||
|
||||
def put_init_vars(entry, code):
|
||||
bufaux = entry.buffer_aux
|
||||
pybuffernd_struct = bufaux.buflocal_nd_var.cname
|
||||
pybuffer_struct = bufaux.rcbuf_var.cname
|
||||
# init pybuffer_struct
|
||||
code.putln("%s.pybuffer.buf = NULL;" % pybuffer_struct)
|
||||
code.putln("%s.refcount = 0;" % pybuffer_struct)
|
||||
# init the buffer object
|
||||
# code.put_init_var_to_py_none(entry)
|
||||
# init the pybuffernd_struct
|
||||
code.putln("%s.data = NULL;" % pybuffernd_struct)
|
||||
code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct))
|
||||
|
||||
|
||||
def put_acquire_arg_buffer(entry, code, pos):
|
||||
buffer_aux = entry.buffer_aux
|
||||
getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type)
|
||||
|
||||
# Acquire any new buffer
|
||||
code.putln("{")
|
||||
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
|
||||
code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
|
||||
code.putln("}")
|
||||
# An exception raised in arg parsing cannot be caught, so no
|
||||
# need to care about the buffer then.
|
||||
put_unpack_buffer_aux_into_scope(entry, code)
|
||||
|
||||
|
||||
def put_release_buffer_code(code, entry):
|
||||
code.globalstate.use_utility_code(acquire_utility_code)
|
||||
code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname)
|
||||
|
||||
|
||||
def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
|
||||
ndim = buffer_type.ndim
|
||||
cast = int(buffer_type.cast)
|
||||
flags = get_flags(buffer_aux, buffer_type)
|
||||
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
||||
|
||||
dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)
|
||||
|
||||
code.globalstate.use_utility_code(acquire_utility_code)
|
||||
return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, "
|
||||
"(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
|
||||
"%(cast)d, __pyx_stack)" % locals())
|
||||
|
||||
|
||||
def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
|
||||
is_initialized, pos, code):
|
||||
"""
|
||||
Generate code for reassigning a buffer variables. This only deals with getting
|
||||
the buffer auxiliary structure and variables set up correctly, the assignment
|
||||
itself and refcounting is the responsibility of the caller.
|
||||
|
||||
However, the assignment operation may throw an exception so that the reassignment
|
||||
never happens.
|
||||
|
||||
Depending on the circumstances there are two possible outcomes:
|
||||
- Old buffer released, new acquired, rhs assigned to lhs
|
||||
- Old buffer released, new acquired which fails, reaqcuire old lhs buffer
|
||||
(which may or may not succeed).
|
||||
"""
|
||||
|
||||
buffer_aux, buffer_type = buf_entry.buffer_aux, buf_entry.type
|
||||
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
||||
flags = get_flags(buffer_aux, buffer_type)
|
||||
|
||||
code.putln("{") # Set up necessary stack for getbuffer
|
||||
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
|
||||
|
||||
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
|
||||
|
||||
if is_initialized:
|
||||
# Release any existing buffer
|
||||
code.putln('__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);' % pybuffernd_struct)
|
||||
# Acquire
|
||||
retcode_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
||||
code.putln("%s = %s;" % (retcode_cname, getbuffer % rhs_cname))
|
||||
code.putln('if (%s) {' % (code.unlikely("%s < 0" % retcode_cname)))
|
||||
# If acquisition failed, attempt to reacquire the old buffer
|
||||
# before raising the exception. A failure of reacquisition
|
||||
# will cause the reacquisition exception to be reported, one
|
||||
# can consider working around this later.
|
||||
exc_temps = tuple(code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
|
||||
for _ in range(3))
|
||||
code.putln('PyErr_Fetch(&%s, &%s, &%s);' % exc_temps)
|
||||
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname)))
|
||||
code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % exc_temps) # Do not refnanny these!
|
||||
code.globalstate.use_utility_code(raise_buffer_fallback_code)
|
||||
code.putln('__Pyx_RaiseBufferFallbackError();')
|
||||
code.putln('} else {')
|
||||
code.putln('PyErr_Restore(%s, %s, %s);' % exc_temps)
|
||||
code.putln('}')
|
||||
code.putln('%s = %s = %s = 0;' % exc_temps)
|
||||
for t in exc_temps:
|
||||
code.funcstate.release_temp(t)
|
||||
code.putln('}')
|
||||
# Unpack indices
|
||||
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
||||
code.putln(code.error_goto_if_neg(retcode_cname, pos))
|
||||
code.funcstate.release_temp(retcode_cname)
|
||||
else:
|
||||
# Our entry had no previous value, so set to None when acquisition fails.
|
||||
# In this case, auxiliary vars should be set up right in initialization to a zero-buffer,
|
||||
# so it suffices to set the buf field to NULL.
|
||||
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % rhs_cname)))
|
||||
code.putln('%s = %s; __Pyx_INCREF(Py_None); %s.rcbuffer->pybuffer.buf = NULL;' %
|
||||
(lhs_cname,
|
||||
PyrexTypes.typecast(buffer_type, PyrexTypes.py_object_type, "Py_None"),
|
||||
pybuffernd_struct))
|
||||
code.putln(code.error_goto(pos))
|
||||
code.put('} else {')
|
||||
# Unpack indices
|
||||
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
||||
code.putln('}')
|
||||
|
||||
code.putln("}") # Release stack
|
||||
|
||||
|
||||
def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives,
|
||||
pos, code, negative_indices, in_nogil_context):
|
||||
"""
|
||||
Generates code to process indices and calculate an offset into
|
||||
a buffer. Returns a C string which gives a pointer which can be
|
||||
read from or written to at will (it is an expression so caller should
|
||||
store it in a temporary if it is used more than once).
|
||||
|
||||
As the bounds checking can have any number of combinations of unsigned
|
||||
arguments, smart optimizations etc. we insert it directly in the function
|
||||
body. The lookup however is delegated to a inline function that is instantiated
|
||||
once per ndim (lookup with suboffsets tend to get quite complicated).
|
||||
|
||||
entry is a BufferEntry
|
||||
"""
|
||||
negative_indices = directives['wraparound'] and negative_indices
|
||||
|
||||
if directives['boundscheck']:
|
||||
# Check bounds and fix negative indices.
|
||||
# We allocate a temporary which is initialized to -1, meaning OK (!).
|
||||
# If an error occurs, the temp is set to the index dimension the
|
||||
# error is occurring at.
|
||||
failed_dim_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
||||
code.putln("%s = -1;" % failed_dim_temp)
|
||||
for dim, (signed, cname, shape) in enumerate(zip(index_signeds, index_cnames, entry.get_buf_shapevars())):
|
||||
if signed != 0:
|
||||
# not unsigned, deal with negative index
|
||||
code.putln("if (%s < 0) {" % cname)
|
||||
if negative_indices:
|
||||
code.putln("%s += %s;" % (cname, shape))
|
||||
code.putln("if (%s) %s = %d;" % (
|
||||
code.unlikely("%s < 0" % cname),
|
||||
failed_dim_temp, dim))
|
||||
else:
|
||||
code.putln("%s = %d;" % (failed_dim_temp, dim))
|
||||
code.put("} else ")
|
||||
# check bounds in positive direction
|
||||
if signed != 0:
|
||||
cast = ""
|
||||
else:
|
||||
cast = "(size_t)"
|
||||
code.putln("if (%s) %s = %d;" % (
|
||||
code.unlikely("%s >= %s%s" % (cname, cast, shape)),
|
||||
failed_dim_temp, dim))
|
||||
|
||||
if in_nogil_context:
|
||||
code.globalstate.use_utility_code(raise_indexerror_nogil)
|
||||
func = '__Pyx_RaiseBufferIndexErrorNogil'
|
||||
else:
|
||||
code.globalstate.use_utility_code(raise_indexerror_code)
|
||||
func = '__Pyx_RaiseBufferIndexError'
|
||||
|
||||
code.putln("if (%s) {" % code.unlikely("%s != -1" % failed_dim_temp))
|
||||
code.putln('%s(%s);' % (func, failed_dim_temp))
|
||||
code.putln(code.error_goto(pos))
|
||||
code.putln('}')
|
||||
code.funcstate.release_temp(failed_dim_temp)
|
||||
elif negative_indices:
|
||||
# Only fix negative indices.
|
||||
for signed, cname, shape in zip(index_signeds, index_cnames, entry.get_buf_shapevars()):
|
||||
if signed != 0:
|
||||
code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape))
|
||||
|
||||
return entry.generate_buffer_lookup_code(code, index_cnames)
|
||||
|
||||
|
||||
def use_bufstruct_declare_code(env):
|
||||
env.use_utility_code(buffer_struct_declare_code)
|
||||
|
||||
|
||||
def buf_lookup_full_code(proto, defin, name, nd):
|
||||
"""
|
||||
Generates a buffer lookup function for the right number
|
||||
of dimensions. The function gives back a void* at the right location.
|
||||
"""
|
||||
# _i_ndex, _s_tride, sub_o_ffset
|
||||
macroargs = ", ".join(["i%d, s%d, o%d" % (i, i, i) for i in range(nd)])
|
||||
proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs))
|
||||
|
||||
funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)])
|
||||
proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs))
|
||||
defin.putln(dedent("""
|
||||
static CYTHON_INLINE void* %s_imp(void* buf, %s) {
|
||||
char* ptr = (char*)buf;
|
||||
""") % (name, funcargs) + "".join([dedent("""\
|
||||
ptr += s%d * i%d;
|
||||
if (o%d >= 0) ptr = *((char**)ptr) + o%d;
|
||||
""") % (i, i, i, i) for i in range(nd)]
|
||||
) + "\nreturn ptr;\n}")
|
||||
|
||||
|
||||
def buf_lookup_strided_code(proto, defin, name, nd):
|
||||
"""
|
||||
Generates a buffer lookup function for the right number
|
||||
of dimensions. The function gives back a void* at the right location.
|
||||
"""
|
||||
# _i_ndex, _s_tride
|
||||
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
||||
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)])
|
||||
proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset))
|
||||
|
||||
|
||||
def buf_lookup_c_code(proto, defin, name, nd):
|
||||
"""
|
||||
Similar to strided lookup, but can assume that the last dimension
|
||||
doesn't need a multiplication as long as.
|
||||
Still we keep the same signature for now.
|
||||
"""
|
||||
if nd == 1:
|
||||
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
||||
else:
|
||||
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
||||
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)])
|
||||
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1))
|
||||
|
||||
|
||||
def buf_lookup_fortran_code(proto, defin, name, nd):
|
||||
"""
|
||||
Like C lookup, but the first index is optimized instead.
|
||||
"""
|
||||
if nd == 1:
|
||||
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
||||
else:
|
||||
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
||||
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(1, nd)])
|
||||
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, 0))
|
||||
|
||||
|
||||
def use_py2_buffer_functions(env):
|
||||
env.use_utility_code(GetAndReleaseBufferUtilityCode())
|
||||
|
||||
|
||||
class GetAndReleaseBufferUtilityCode(object):
|
||||
# Emulation of PyObject_GetBuffer and PyBuffer_Release for Python 2.
|
||||
# For >= 2.6 we do double mode -- use the new buffer interface on objects
|
||||
# which has the right tp_flags set, but emulation otherwise.
|
||||
|
||||
requires = None
|
||||
is_cython_utility = False
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, GetAndReleaseBufferUtilityCode)
|
||||
|
||||
def __hash__(self):
|
||||
return 24342342
|
||||
|
||||
def get_tree(self, **kwargs): pass
|
||||
|
||||
def put_code(self, output):
|
||||
code = output['utility_code_def']
|
||||
proto_code = output['utility_code_proto']
|
||||
env = output.module_node.scope
|
||||
cython_scope = env.context.cython_scope
|
||||
|
||||
# Search all types for __getbuffer__ overloads
|
||||
types = []
|
||||
visited_scopes = set()
|
||||
def find_buffer_types(scope):
|
||||
if scope in visited_scopes:
|
||||
return
|
||||
visited_scopes.add(scope)
|
||||
for m in scope.cimported_modules:
|
||||
find_buffer_types(m)
|
||||
for e in scope.type_entries:
|
||||
if isinstance(e.utility_code_definition, CythonUtilityCode):
|
||||
continue
|
||||
t = e.type
|
||||
if t.is_extension_type:
|
||||
if scope is cython_scope and not e.used:
|
||||
continue
|
||||
release = get = None
|
||||
for x in t.scope.pyfunc_entries:
|
||||
if x.name == u"__getbuffer__": get = x.func_cname
|
||||
elif x.name == u"__releasebuffer__": release = x.func_cname
|
||||
if get:
|
||||
types.append((t.typeptr_cname, get, release))
|
||||
|
||||
find_buffer_types(env)
|
||||
|
||||
util_code = TempitaUtilityCode.load(
|
||||
"GetAndReleaseBuffer", from_file="Buffer.c",
|
||||
context=dict(types=types))
|
||||
|
||||
proto = util_code.format_code(util_code.proto)
|
||||
impl = util_code.format_code(
|
||||
util_code.inject_string_constants(util_code.impl, output)[1])
|
||||
|
||||
proto_code.putln(proto)
|
||||
code.putln(impl)
|
||||
|
||||
|
||||
def mangle_dtype_name(dtype):
|
||||
# Use prefixes to separate user defined types from builtins
|
||||
# (consider "typedef float unsigned_int")
|
||||
if dtype.is_pyobject:
|
||||
return "object"
|
||||
elif dtype.is_ptr:
|
||||
return "ptr"
|
||||
else:
|
||||
if dtype.is_typedef or dtype.is_struct_or_union:
|
||||
prefix = "nn_"
|
||||
else:
|
||||
prefix = ""
|
||||
return prefix + dtype.specialization_name()
|
||||
|
||||
def get_type_information_cname(code, dtype, maxdepth=None):
|
||||
"""
|
||||
Output the run-time type information (__Pyx_TypeInfo) for given dtype,
|
||||
and return the name of the type info struct.
|
||||
|
||||
Structs with two floats of the same size are encoded as complex numbers.
|
||||
One can separate between complex numbers declared as struct or with native
|
||||
encoding by inspecting to see if the fields field of the type is
|
||||
filled in.
|
||||
"""
|
||||
namesuffix = mangle_dtype_name(dtype)
|
||||
name = "__Pyx_TypeInfo_%s" % namesuffix
|
||||
structinfo_name = "__Pyx_StructFields_%s" % namesuffix
|
||||
|
||||
if dtype.is_error: return "<error>"
|
||||
|
||||
# It's critical that walking the type info doesn't use more stack
|
||||
# depth than dtype.struct_nesting_depth() returns, so use an assertion for this
|
||||
if maxdepth is None: maxdepth = dtype.struct_nesting_depth()
|
||||
if maxdepth <= 0:
|
||||
assert False
|
||||
|
||||
if name not in code.globalstate.utility_codes:
|
||||
code.globalstate.utility_codes.add(name)
|
||||
typecode = code.globalstate['typeinfo']
|
||||
|
||||
arraysizes = []
|
||||
if dtype.is_array:
|
||||
while dtype.is_array:
|
||||
arraysizes.append(dtype.size)
|
||||
dtype = dtype.base_type
|
||||
|
||||
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
|
||||
|
||||
declcode = dtype.empty_declaration_code()
|
||||
if dtype.is_simple_buffer_dtype():
|
||||
structinfo_name = "NULL"
|
||||
elif dtype.is_struct:
|
||||
struct_scope = dtype.scope
|
||||
if dtype.is_const:
|
||||
struct_scope = struct_scope.const_base_type_scope
|
||||
# Must pre-call all used types in order not to recurse during utility code writing.
|
||||
fields = struct_scope.var_entries
|
||||
assert len(fields) > 0
|
||||
types = [get_type_information_cname(code, f.type, maxdepth - 1)
|
||||
for f in fields]
|
||||
typecode.putln("static __Pyx_StructField %s[] = {" % structinfo_name, safe=True)
|
||||
for f, typeinfo in zip(fields, types):
|
||||
typecode.putln(' {&%s, "%s", offsetof(%s, %s)},' %
|
||||
(typeinfo, f.name, dtype.empty_declaration_code(), f.cname), safe=True)
|
||||
typecode.putln(' {NULL, NULL, 0}', safe=True)
|
||||
typecode.putln("};", safe=True)
|
||||
else:
|
||||
assert False
|
||||
|
||||
rep = str(dtype)
|
||||
|
||||
flags = "0"
|
||||
is_unsigned = "0"
|
||||
if dtype is PyrexTypes.c_char_type:
|
||||
is_unsigned = "IS_UNSIGNED(%s)" % declcode
|
||||
typegroup = "'H'"
|
||||
elif dtype.is_int:
|
||||
is_unsigned = "IS_UNSIGNED(%s)" % declcode
|
||||
typegroup = "%s ? 'U' : 'I'" % is_unsigned
|
||||
elif complex_possible or dtype.is_complex:
|
||||
typegroup = "'C'"
|
||||
elif dtype.is_float:
|
||||
typegroup = "'R'"
|
||||
elif dtype.is_struct:
|
||||
typegroup = "'S'"
|
||||
if dtype.packed:
|
||||
flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
|
||||
elif dtype.is_pyobject:
|
||||
typegroup = "'O'"
|
||||
else:
|
||||
assert False, dtype
|
||||
|
||||
typeinfo = ('static __Pyx_TypeInfo %s = '
|
||||
'{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };')
|
||||
tup = (name, rep, structinfo_name, declcode,
|
||||
', '.join([str(x) for x in arraysizes]) or '0', len(arraysizes),
|
||||
typegroup, is_unsigned, flags)
|
||||
typecode.putln(typeinfo % tup, safe=True)
|
||||
|
||||
return name
|
||||
|
||||
def load_buffer_utility(util_code_name, context=None, **kwargs):
|
||||
if context is None:
|
||||
return UtilityCode.load(util_code_name, "Buffer.c", **kwargs)
|
||||
else:
|
||||
return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs)
|
||||
|
||||
context = dict(max_dims=Options.buffer_max_dims)
|
||||
buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare", context=context)
|
||||
buffer_formats_declare_code = load_buffer_utility("BufferFormatStructs")
|
||||
|
||||
# Utility function to set the right exception
|
||||
# The caller should immediately goto_error
|
||||
raise_indexerror_code = load_buffer_utility("BufferIndexError")
|
||||
raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil")
|
||||
raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError")
|
||||
|
||||
acquire_utility_code = load_buffer_utility("BufferGetAndValidate", context=context)
|
||||
buffer_format_check_code = load_buffer_utility("BufferFormatCheck", context=context)
|
||||
|
||||
# See utility code BufferFormatFromTypeInfo
|
||||
_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat")
|
|
@ -0,0 +1,444 @@
|
|||
#
|
||||
# Builtin Definitions
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Symtab import BuiltinScope, StructOrUnionScope
|
||||
from .Code import UtilityCode
|
||||
from .TypeSlots import Signature
|
||||
from . import PyrexTypes
|
||||
from . import Options
|
||||
|
||||
|
||||
# C-level implementations of builtin types, functions and methods
|
||||
|
||||
iter_next_utility_code = UtilityCode.load("IterNext", "ObjectHandling.c")
|
||||
getattr_utility_code = UtilityCode.load("GetAttr", "ObjectHandling.c")
|
||||
getattr3_utility_code = UtilityCode.load("GetAttr3", "Builtins.c")
|
||||
pyexec_utility_code = UtilityCode.load("PyExec", "Builtins.c")
|
||||
pyexec_globals_utility_code = UtilityCode.load("PyExecGlobals", "Builtins.c")
|
||||
globals_utility_code = UtilityCode.load("Globals", "Builtins.c")
|
||||
|
||||
builtin_utility_code = {
|
||||
'StopAsyncIteration': UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"),
|
||||
}
|
||||
|
||||
|
||||
# mapping from builtins to their C-level equivalents
|
||||
|
||||
class _BuiltinOverride(object):
|
||||
def __init__(self, py_name, args, ret_type, cname, py_equiv="*",
|
||||
utility_code=None, sig=None, func_type=None,
|
||||
is_strict_signature=False, builtin_return_type=None):
|
||||
self.py_name, self.cname, self.py_equiv = py_name, cname, py_equiv
|
||||
self.args, self.ret_type = args, ret_type
|
||||
self.func_type, self.sig = func_type, sig
|
||||
self.builtin_return_type = builtin_return_type
|
||||
self.is_strict_signature = is_strict_signature
|
||||
self.utility_code = utility_code
|
||||
|
||||
def build_func_type(self, sig=None, self_arg=None):
|
||||
if sig is None:
|
||||
sig = Signature(self.args, self.ret_type)
|
||||
sig.exception_check = False # not needed for the current builtins
|
||||
func_type = sig.function_type(self_arg)
|
||||
if self.is_strict_signature:
|
||||
func_type.is_strict_signature = True
|
||||
if self.builtin_return_type:
|
||||
func_type.return_type = builtin_types[self.builtin_return_type]
|
||||
return func_type
|
||||
|
||||
|
||||
class BuiltinAttribute(object):
|
||||
def __init__(self, py_name, cname=None, field_type=None, field_type_name=None):
|
||||
self.py_name = py_name
|
||||
self.cname = cname or py_name
|
||||
self.field_type_name = field_type_name # can't do the lookup before the type is declared!
|
||||
self.field_type = field_type
|
||||
|
||||
def declare_in_type(self, self_type):
|
||||
if self.field_type_name is not None:
|
||||
# lazy type lookup
|
||||
field_type = builtin_scope.lookup(self.field_type_name).type
|
||||
else:
|
||||
field_type = self.field_type or PyrexTypes.py_object_type
|
||||
entry = self_type.scope.declare(self.py_name, self.cname, field_type, None, 'private')
|
||||
entry.is_variable = True
|
||||
|
||||
|
||||
class BuiltinFunction(_BuiltinOverride):
|
||||
def declare_in_scope(self, scope):
|
||||
func_type, sig = self.func_type, self.sig
|
||||
if func_type is None:
|
||||
func_type = self.build_func_type(sig)
|
||||
scope.declare_builtin_cfunction(self.py_name, func_type, self.cname,
|
||||
self.py_equiv, self.utility_code)
|
||||
|
||||
|
||||
class BuiltinMethod(_BuiltinOverride):
|
||||
def declare_in_type(self, self_type):
|
||||
method_type, sig = self.func_type, self.sig
|
||||
if method_type is None:
|
||||
# override 'self' type (first argument)
|
||||
self_arg = PyrexTypes.CFuncTypeArg("", self_type, None)
|
||||
self_arg.not_none = True
|
||||
self_arg.accept_builtin_subtypes = True
|
||||
method_type = self.build_func_type(sig, self_arg)
|
||||
self_type.scope.declare_builtin_cfunction(
|
||||
self.py_name, method_type, self.cname, utility_code=self.utility_code)
|
||||
|
||||
|
||||
builtin_function_table = [
|
||||
# name, args, return, C API func, py equiv = "*"
|
||||
BuiltinFunction('abs', "d", "d", "fabs",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', "f", "f", "fabsf",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', "i", "i", "abs",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', "l", "l", "labs",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', None, None, "__Pyx_abs_longlong",
|
||||
utility_code = UtilityCode.load("abs_longlong", "Builtins.c"),
|
||||
func_type = PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_longlong_type, [
|
||||
PyrexTypes.CFuncTypeArg("arg", PyrexTypes.c_longlong_type, None)
|
||||
],
|
||||
is_strict_signature = True, nogil=True)),
|
||||
] + list(
|
||||
BuiltinFunction('abs', None, None, "/*abs_{0}*/".format(t.specialization_name()),
|
||||
func_type = PyrexTypes.CFuncType(
|
||||
t,
|
||||
[PyrexTypes.CFuncTypeArg("arg", t, None)],
|
||||
is_strict_signature = True, nogil=True))
|
||||
for t in (PyrexTypes.c_uint_type, PyrexTypes.c_ulong_type, PyrexTypes.c_ulonglong_type)
|
||||
) + list(
|
||||
BuiltinFunction('abs', None, None, "__Pyx_c_abs{0}".format(t.funcsuffix),
|
||||
func_type = PyrexTypes.CFuncType(
|
||||
t.real_type, [
|
||||
PyrexTypes.CFuncTypeArg("arg", t, None)
|
||||
],
|
||||
is_strict_signature = True, nogil=True))
|
||||
for t in (PyrexTypes.c_float_complex_type,
|
||||
PyrexTypes.c_double_complex_type,
|
||||
PyrexTypes.c_longdouble_complex_type)
|
||||
) + [
|
||||
BuiltinFunction('abs', "O", "O", "__Pyx_PyNumber_Absolute",
|
||||
utility_code=UtilityCode.load("py_abs", "Builtins.c")),
|
||||
#('all', "", "", ""),
|
||||
#('any', "", "", ""),
|
||||
#('ascii', "", "", ""),
|
||||
#('bin', "", "", ""),
|
||||
BuiltinFunction('callable', "O", "b", "__Pyx_PyCallable_Check",
|
||||
utility_code = UtilityCode.load("CallableCheck", "ObjectHandling.c")),
|
||||
#('chr', "", "", ""),
|
||||
#('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
|
||||
#('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
|
||||
BuiltinFunction('delattr', "OO", "r", "PyObject_DelAttr"),
|
||||
BuiltinFunction('dir', "O", "O", "PyObject_Dir"),
|
||||
BuiltinFunction('divmod', "OO", "O", "PyNumber_Divmod"),
|
||||
BuiltinFunction('exec', "O", "O", "__Pyx_PyExecGlobals",
|
||||
utility_code = pyexec_globals_utility_code),
|
||||
BuiltinFunction('exec', "OO", "O", "__Pyx_PyExec2",
|
||||
utility_code = pyexec_utility_code),
|
||||
BuiltinFunction('exec', "OOO", "O", "__Pyx_PyExec3",
|
||||
utility_code = pyexec_utility_code),
|
||||
#('eval', "", "", ""),
|
||||
#('execfile', "", "", ""),
|
||||
#('filter', "", "", ""),
|
||||
BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr",
|
||||
utility_code=getattr3_utility_code), # Pyrex legacy
|
||||
BuiltinFunction('getattr', "OOO", "O", "__Pyx_GetAttr3",
|
||||
utility_code=getattr3_utility_code),
|
||||
BuiltinFunction('getattr', "OO", "O", "__Pyx_GetAttr",
|
||||
utility_code=getattr_utility_code),
|
||||
BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
|
||||
utility_code = UtilityCode.load("HasAttr", "Builtins.c")),
|
||||
BuiltinFunction('hash', "O", "h", "PyObject_Hash"),
|
||||
#('hex', "", "", ""),
|
||||
#('id', "", "", ""),
|
||||
#('input', "", "", ""),
|
||||
BuiltinFunction('intern', "O", "O", "__Pyx_Intern",
|
||||
utility_code = UtilityCode.load("Intern", "Builtins.c")),
|
||||
BuiltinFunction('isinstance', "OO", "b", "PyObject_IsInstance"),
|
||||
BuiltinFunction('issubclass', "OO", "b", "PyObject_IsSubclass"),
|
||||
BuiltinFunction('iter', "OO", "O", "PyCallIter_New"),
|
||||
BuiltinFunction('iter', "O", "O", "PyObject_GetIter"),
|
||||
BuiltinFunction('len', "O", "z", "PyObject_Length"),
|
||||
BuiltinFunction('locals', "", "O", "__pyx_locals"),
|
||||
#('map', "", "", ""),
|
||||
#('max', "", "", ""),
|
||||
#('min', "", "", ""),
|
||||
BuiltinFunction('next', "O", "O", "__Pyx_PyIter_Next",
|
||||
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
||||
BuiltinFunction('next', "OO", "O", "__Pyx_PyIter_Next2",
|
||||
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
||||
#('oct', "", "", ""),
|
||||
#('open', "ss", "O", "PyFile_FromString"), # not in Py3
|
||||
] + [
|
||||
BuiltinFunction('ord', None, None, "__Pyx_long_cast",
|
||||
func_type=PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_long_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
||||
is_strict_signature=True))
|
||||
for c_type in [PyrexTypes.c_py_ucs4_type, PyrexTypes.c_py_unicode_type]
|
||||
] + [
|
||||
BuiltinFunction('ord', None, None, "__Pyx_uchar_cast",
|
||||
func_type=PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_uchar_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
||||
is_strict_signature=True))
|
||||
for c_type in [PyrexTypes.c_char_type, PyrexTypes.c_schar_type, PyrexTypes.c_uchar_type]
|
||||
] + [
|
||||
BuiltinFunction('ord', None, None, "__Pyx_PyObject_Ord",
|
||||
utility_code=UtilityCode.load_cached("object_ord", "Builtins.c"),
|
||||
func_type=PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_long_type, [
|
||||
PyrexTypes.CFuncTypeArg("c", PyrexTypes.py_object_type, None)
|
||||
],
|
||||
exception_value="(long)(Py_UCS4)-1")),
|
||||
BuiltinFunction('pow', "OOO", "O", "PyNumber_Power"),
|
||||
BuiltinFunction('pow', "OO", "O", "__Pyx_PyNumber_Power2",
|
||||
utility_code = UtilityCode.load("pow2", "Builtins.c")),
|
||||
#('range', "", "", ""),
|
||||
#('raw_input', "", "", ""),
|
||||
#('reduce', "", "", ""),
|
||||
BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"),
|
||||
BuiltinFunction('repr', "O", "O", "PyObject_Repr"), # , builtin_return_type='str'), # add in Cython 3.1
|
||||
#('round', "", "", ""),
|
||||
BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"),
|
||||
#('sum', "", "", ""),
|
||||
#('sorted', "", "", ""),
|
||||
#('type', "O", "O", "PyObject_Type"),
|
||||
#('unichr', "", "", ""),
|
||||
#('unicode', "", "", ""),
|
||||
#('vars', "", "", ""),
|
||||
#('zip', "", "", ""),
|
||||
# Can't do these easily until we have builtin type entries.
|
||||
#('typecheck', "OO", "i", "PyObject_TypeCheck", False),
|
||||
#('issubtype', "OO", "i", "PyType_IsSubtype", False),
|
||||
|
||||
# Put in namespace append optimization.
|
||||
BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"),
|
||||
|
||||
# This is conditionally looked up based on a compiler directive.
|
||||
BuiltinFunction('__Pyx_Globals', "", "O", "__Pyx_Globals",
|
||||
utility_code=globals_utility_code),
|
||||
]
|
||||
|
||||
|
||||
# Builtin types
|
||||
# bool
|
||||
# buffer
|
||||
# classmethod
|
||||
# dict
|
||||
# enumerate
|
||||
# file
|
||||
# float
|
||||
# int
|
||||
# list
|
||||
# long
|
||||
# object
|
||||
# property
|
||||
# slice
|
||||
# staticmethod
|
||||
# super
|
||||
# str
|
||||
# tuple
|
||||
# type
|
||||
# xrange
|
||||
|
||||
builtin_types_table = [
|
||||
|
||||
("type", "PyType_Type", []),
|
||||
|
||||
# This conflicts with the C++ bool type, and unfortunately
|
||||
# C++ is too liberal about PyObject* <-> bool conversions,
|
||||
# resulting in unintuitive runtime behavior and segfaults.
|
||||
# ("bool", "PyBool_Type", []),
|
||||
|
||||
("int", "PyInt_Type", []),
|
||||
("long", "PyLong_Type", []),
|
||||
("float", "PyFloat_Type", []),
|
||||
|
||||
("complex", "PyComplex_Type", [BuiltinAttribute('cval', field_type_name = 'Py_complex'),
|
||||
BuiltinAttribute('real', 'cval.real', field_type = PyrexTypes.c_double_type),
|
||||
BuiltinAttribute('imag', 'cval.imag', field_type = PyrexTypes.c_double_type),
|
||||
]),
|
||||
|
||||
("basestring", "PyBaseString_Type", [
|
||||
BuiltinMethod("join", "TO", "T", "__Pyx_PyBaseString_Join",
|
||||
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
||||
]),
|
||||
("bytearray", "PyByteArray_Type", [
|
||||
]),
|
||||
("bytes", "PyBytes_Type", [BuiltinMethod("join", "TO", "O", "__Pyx_PyBytes_Join",
|
||||
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
||||
]),
|
||||
("str", "PyString_Type", [BuiltinMethod("join", "TO", "O", "__Pyx_PyString_Join",
|
||||
builtin_return_type='basestring',
|
||||
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
||||
]),
|
||||
("unicode", "PyUnicode_Type", [BuiltinMethod("__contains__", "TO", "b", "PyUnicode_Contains"),
|
||||
BuiltinMethod("join", "TO", "T", "PyUnicode_Join"),
|
||||
]),
|
||||
|
||||
("tuple", "PyTuple_Type", []),
|
||||
|
||||
("list", "PyList_Type", [BuiltinMethod("insert", "TzO", "r", "PyList_Insert"),
|
||||
BuiltinMethod("reverse", "T", "r", "PyList_Reverse"),
|
||||
BuiltinMethod("append", "TO", "r", "__Pyx_PyList_Append",
|
||||
utility_code=UtilityCode.load("ListAppend", "Optimize.c")),
|
||||
BuiltinMethod("extend", "TO", "r", "__Pyx_PyList_Extend",
|
||||
utility_code=UtilityCode.load("ListExtend", "Optimize.c")),
|
||||
]),
|
||||
|
||||
("dict", "PyDict_Type", [BuiltinMethod("__contains__", "TO", "b", "PyDict_Contains"),
|
||||
BuiltinMethod("has_key", "TO", "b", "PyDict_Contains"),
|
||||
BuiltinMethod("items", "T", "O", "__Pyx_PyDict_Items",
|
||||
utility_code=UtilityCode.load("py_dict_items", "Builtins.c")),
|
||||
BuiltinMethod("keys", "T", "O", "__Pyx_PyDict_Keys",
|
||||
utility_code=UtilityCode.load("py_dict_keys", "Builtins.c")),
|
||||
BuiltinMethod("values", "T", "O", "__Pyx_PyDict_Values",
|
||||
utility_code=UtilityCode.load("py_dict_values", "Builtins.c")),
|
||||
BuiltinMethod("iteritems", "T", "O", "__Pyx_PyDict_IterItems",
|
||||
utility_code=UtilityCode.load("py_dict_iteritems", "Builtins.c")),
|
||||
BuiltinMethod("iterkeys", "T", "O", "__Pyx_PyDict_IterKeys",
|
||||
utility_code=UtilityCode.load("py_dict_iterkeys", "Builtins.c")),
|
||||
BuiltinMethod("itervalues", "T", "O", "__Pyx_PyDict_IterValues",
|
||||
utility_code=UtilityCode.load("py_dict_itervalues", "Builtins.c")),
|
||||
BuiltinMethod("viewitems", "T", "O", "__Pyx_PyDict_ViewItems",
|
||||
utility_code=UtilityCode.load("py_dict_viewitems", "Builtins.c")),
|
||||
BuiltinMethod("viewkeys", "T", "O", "__Pyx_PyDict_ViewKeys",
|
||||
utility_code=UtilityCode.load("py_dict_viewkeys", "Builtins.c")),
|
||||
BuiltinMethod("viewvalues", "T", "O", "__Pyx_PyDict_ViewValues",
|
||||
utility_code=UtilityCode.load("py_dict_viewvalues", "Builtins.c")),
|
||||
BuiltinMethod("clear", "T", "r", "__Pyx_PyDict_Clear",
|
||||
utility_code=UtilityCode.load("py_dict_clear", "Optimize.c")),
|
||||
BuiltinMethod("copy", "T", "T", "PyDict_Copy")]),
|
||||
|
||||
("slice", "PySlice_Type", [BuiltinAttribute('start'),
|
||||
BuiltinAttribute('stop'),
|
||||
BuiltinAttribute('step'),
|
||||
]),
|
||||
# ("file", "PyFile_Type", []), # not in Py3
|
||||
|
||||
("set", "PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear"),
|
||||
# discard() and remove() have a special treatment for unhashable values
|
||||
BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard",
|
||||
utility_code=UtilityCode.load("py_set_discard", "Optimize.c")),
|
||||
BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove",
|
||||
utility_code=UtilityCode.load("py_set_remove", "Optimize.c")),
|
||||
# update is actually variadic (see Github issue #1645)
|
||||
# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update",
|
||||
# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")),
|
||||
BuiltinMethod("add", "TO", "r", "PySet_Add"),
|
||||
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
|
||||
("frozenset", "PyFrozenSet_Type", []),
|
||||
("Exception", "((PyTypeObject*)PyExc_Exception)[0]", []),
|
||||
("StopAsyncIteration", "((PyTypeObject*)__Pyx_PyExc_StopAsyncIteration)[0]", []),
|
||||
]
|
||||
|
||||
|
||||
types_that_construct_their_instance = set([
|
||||
# some builtin types do not always return an instance of
|
||||
# themselves - these do:
|
||||
'type', 'bool', 'long', 'float', 'complex',
|
||||
'bytes', 'unicode', 'bytearray',
|
||||
'tuple', 'list', 'dict', 'set', 'frozenset'
|
||||
# 'str', # only in Py3.x
|
||||
# 'file', # only in Py2.x
|
||||
])
|
||||
|
||||
|
||||
builtin_structs_table = [
|
||||
('Py_buffer', 'Py_buffer',
|
||||
[("buf", PyrexTypes.c_void_ptr_type),
|
||||
("obj", PyrexTypes.py_object_type),
|
||||
("len", PyrexTypes.c_py_ssize_t_type),
|
||||
("itemsize", PyrexTypes.c_py_ssize_t_type),
|
||||
("readonly", PyrexTypes.c_bint_type),
|
||||
("ndim", PyrexTypes.c_int_type),
|
||||
("format", PyrexTypes.c_char_ptr_type),
|
||||
("shape", PyrexTypes.c_py_ssize_t_ptr_type),
|
||||
("strides", PyrexTypes.c_py_ssize_t_ptr_type),
|
||||
("suboffsets", PyrexTypes.c_py_ssize_t_ptr_type),
|
||||
("smalltable", PyrexTypes.CArrayType(PyrexTypes.c_py_ssize_t_type, 2)),
|
||||
("internal", PyrexTypes.c_void_ptr_type),
|
||||
]),
|
||||
('Py_complex', 'Py_complex',
|
||||
[('real', PyrexTypes.c_double_type),
|
||||
('imag', PyrexTypes.c_double_type),
|
||||
])
|
||||
]
|
||||
|
||||
# set up builtin scope
|
||||
|
||||
builtin_scope = BuiltinScope()
|
||||
|
||||
def init_builtin_funcs():
|
||||
for bf in builtin_function_table:
|
||||
bf.declare_in_scope(builtin_scope)
|
||||
|
||||
builtin_types = {}
|
||||
|
||||
def init_builtin_types():
|
||||
global builtin_types
|
||||
for name, cname, methods in builtin_types_table:
|
||||
utility = builtin_utility_code.get(name)
|
||||
if name == 'frozenset':
|
||||
objstruct_cname = 'PySetObject'
|
||||
elif name == 'bytearray':
|
||||
objstruct_cname = 'PyByteArrayObject'
|
||||
elif name == 'bool':
|
||||
objstruct_cname = None
|
||||
elif name == 'Exception':
|
||||
objstruct_cname = "PyBaseExceptionObject"
|
||||
elif name == 'StopAsyncIteration':
|
||||
objstruct_cname = "PyBaseExceptionObject"
|
||||
else:
|
||||
objstruct_cname = 'Py%sObject' % name.capitalize()
|
||||
the_type = builtin_scope.declare_builtin_type(name, cname, utility, objstruct_cname)
|
||||
builtin_types[name] = the_type
|
||||
for method in methods:
|
||||
method.declare_in_type(the_type)
|
||||
|
||||
def init_builtin_structs():
|
||||
for name, cname, attribute_types in builtin_structs_table:
|
||||
scope = StructOrUnionScope(name)
|
||||
for attribute_name, attribute_type in attribute_types:
|
||||
scope.declare_var(attribute_name, attribute_type, None,
|
||||
attribute_name, allow_pyobject=True)
|
||||
builtin_scope.declare_struct_or_union(
|
||||
name, "struct", scope, 1, None, cname = cname)
|
||||
|
||||
|
||||
def init_builtins():
|
||||
init_builtin_structs()
|
||||
init_builtin_types()
|
||||
init_builtin_funcs()
|
||||
|
||||
builtin_scope.declare_var(
|
||||
'__debug__', PyrexTypes.c_const_type(PyrexTypes.c_bint_type),
|
||||
pos=None, cname='(!Py_OptimizeFlag)', is_cdef=True)
|
||||
|
||||
global list_type, tuple_type, dict_type, set_type, frozenset_type
|
||||
global bytes_type, str_type, unicode_type, basestring_type, slice_type
|
||||
global float_type, bool_type, type_type, complex_type, bytearray_type
|
||||
type_type = builtin_scope.lookup('type').type
|
||||
list_type = builtin_scope.lookup('list').type
|
||||
tuple_type = builtin_scope.lookup('tuple').type
|
||||
dict_type = builtin_scope.lookup('dict').type
|
||||
set_type = builtin_scope.lookup('set').type
|
||||
frozenset_type = builtin_scope.lookup('frozenset').type
|
||||
slice_type = builtin_scope.lookup('slice').type
|
||||
bytes_type = builtin_scope.lookup('bytes').type
|
||||
str_type = builtin_scope.lookup('str').type
|
||||
unicode_type = builtin_scope.lookup('unicode').type
|
||||
basestring_type = builtin_scope.lookup('basestring').type
|
||||
bytearray_type = builtin_scope.lookup('bytearray').type
|
||||
float_type = builtin_scope.lookup('float').type
|
||||
bool_type = builtin_scope.lookup('bool').type
|
||||
complex_type = builtin_scope.lookup('complex').type
|
||||
|
||||
|
||||
init_builtins()
|
|
@ -0,0 +1,240 @@
|
|||
#
|
||||
# Cython - Command Line Parsing
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
from . import Options
|
||||
|
||||
usage = """\
|
||||
Cython (http://cython.org) is a compiler for code written in the
|
||||
Cython language. Cython is based on Pyrex by Greg Ewing.
|
||||
|
||||
Usage: cython [options] sourcefile.{pyx,py} ...
|
||||
|
||||
Options:
|
||||
-V, --version Display version number of cython compiler
|
||||
-l, --create-listing Write error messages to a listing file
|
||||
-I, --include-dir <directory> Search for include files in named directory
|
||||
(multiple include directories are allowed).
|
||||
-o, --output-file <filename> Specify name of generated C file
|
||||
-t, --timestamps Only compile newer source files
|
||||
-f, --force Compile all source files (overrides implied -t)
|
||||
-v, --verbose Be verbose, print file names on multiple compilation
|
||||
-p, --embed-positions If specified, the positions in Cython files of each
|
||||
function definition is embedded in its docstring.
|
||||
--cleanup <level> Release interned objects on python exit, for memory debugging.
|
||||
Level indicates aggressiveness, default 0 releases nothing.
|
||||
-w, --working <directory> Sets the working directory for Cython (the directory modules
|
||||
are searched from)
|
||||
--gdb Output debug information for cygdb
|
||||
--gdb-outdir <directory> Specify gdb debug information output directory. Implies --gdb.
|
||||
|
||||
-D, --no-docstrings Strip docstrings from the compiled module.
|
||||
-a, --annotate Produce a colorized HTML version of the source.
|
||||
--annotate-coverage <cov.xml> Annotate and include coverage information from cov.xml.
|
||||
--line-directives Produce #line directives pointing to the .pyx source
|
||||
--cplus Output a C++ rather than C file.
|
||||
--embed[=<method_name>] Generate a main() function that embeds the Python interpreter.
|
||||
-2 Compile based on Python-2 syntax and code semantics.
|
||||
-3 Compile based on Python-3 syntax and code semantics.
|
||||
--3str Compile based on Python-3 syntax and code semantics without
|
||||
assuming unicode by default for string literals under Python 2.
|
||||
--lenient Change some compile time errors to runtime errors to
|
||||
improve Python compatibility
|
||||
--capi-reexport-cincludes Add cincluded headers to any auto-generated header files.
|
||||
--fast-fail Abort the compilation on the first error
|
||||
--warning-errors, -Werror Make all warnings into errors
|
||||
--warning-extra, -Wextra Enable extra warnings
|
||||
-X, --directive <name>=<value>[,<name=value,...] Overrides a compiler directive
|
||||
-E, --compile-time-env name=value[,<name=value,...] Provides compile time env like DEF would do.
|
||||
--module-name Fully qualified module name. If not given, it is deduced from the
|
||||
import path if source file is in a package, or equals the
|
||||
filename otherwise.
|
||||
-M, --depfile Produce depfiles for the sources
|
||||
"""
|
||||
|
||||
|
||||
# The following experimental options are supported only on MacOSX:
|
||||
# -C, --compile Compile generated .c file to .o file
|
||||
# --link Link .o file to produce extension module (implies -C)
|
||||
# -+, --cplus Use C++ compiler for compiling and linking
|
||||
# Additional .o files to link may be supplied when using -X."""
|
||||
|
||||
def bad_usage():
|
||||
sys.stderr.write(usage)
|
||||
sys.exit(1)
|
||||
|
||||
def parse_command_line(args):
|
||||
from .Main import CompilationOptions, default_options
|
||||
|
||||
pending_arg = []
|
||||
|
||||
def pop_arg():
|
||||
if not args or pending_arg:
|
||||
bad_usage()
|
||||
if '=' in args[0] and args[0].startswith('--'): # allow "--long-option=xyz"
|
||||
name, value = args.pop(0).split('=', 1)
|
||||
pending_arg.append(value)
|
||||
return name
|
||||
return args.pop(0)
|
||||
|
||||
def pop_value(default=None):
|
||||
if pending_arg:
|
||||
return pending_arg.pop()
|
||||
elif default is not None:
|
||||
return default
|
||||
elif not args:
|
||||
bad_usage()
|
||||
return args.pop(0)
|
||||
|
||||
def get_param(option):
|
||||
tail = option[2:]
|
||||
if tail:
|
||||
return tail
|
||||
else:
|
||||
return pop_arg()
|
||||
|
||||
options = CompilationOptions(default_options)
|
||||
sources = []
|
||||
while args:
|
||||
if args[0].startswith("-"):
|
||||
option = pop_arg()
|
||||
if option in ("-V", "--version"):
|
||||
options.show_version = 1
|
||||
elif option in ("-l", "--create-listing"):
|
||||
options.use_listing_file = 1
|
||||
elif option in ("-+", "--cplus"):
|
||||
options.cplus = 1
|
||||
elif option == "--embed":
|
||||
Options.embed = pop_value("main")
|
||||
elif option.startswith("-I"):
|
||||
options.include_path.append(get_param(option))
|
||||
elif option == "--include-dir":
|
||||
options.include_path.append(pop_value())
|
||||
elif option in ("-w", "--working"):
|
||||
options.working_path = pop_value()
|
||||
elif option in ("-o", "--output-file"):
|
||||
options.output_file = pop_value()
|
||||
elif option in ("-t", "--timestamps"):
|
||||
options.timestamps = 1
|
||||
elif option in ("-f", "--force"):
|
||||
options.timestamps = 0
|
||||
elif option in ("-v", "--verbose"):
|
||||
options.verbose += 1
|
||||
elif option in ("-p", "--embed-positions"):
|
||||
Options.embed_pos_in_docstring = 1
|
||||
elif option in ("-z", "--pre-import"):
|
||||
Options.pre_import = pop_value()
|
||||
elif option == "--cleanup":
|
||||
Options.generate_cleanup_code = int(pop_value())
|
||||
elif option in ("-D", "--no-docstrings"):
|
||||
Options.docstrings = False
|
||||
elif option in ("-a", "--annotate"):
|
||||
Options.annotate = True
|
||||
elif option == "--annotate-coverage":
|
||||
Options.annotate = True
|
||||
Options.annotate_coverage_xml = pop_value()
|
||||
elif option == "--convert-range":
|
||||
Options.convert_range = True
|
||||
elif option == "--line-directives":
|
||||
options.emit_linenums = True
|
||||
elif option == "--no-c-in-traceback":
|
||||
options.c_line_in_traceback = False
|
||||
elif option == "--gdb":
|
||||
options.gdb_debug = True
|
||||
options.output_dir = os.curdir
|
||||
elif option == "--gdb-outdir":
|
||||
options.gdb_debug = True
|
||||
options.output_dir = pop_value()
|
||||
elif option == "--lenient":
|
||||
Options.error_on_unknown_names = False
|
||||
Options.error_on_uninitialized = False
|
||||
elif option == '-2':
|
||||
options.language_level = 2
|
||||
elif option == '-3':
|
||||
options.language_level = 3
|
||||
elif option == '--3str':
|
||||
options.language_level = '3str'
|
||||
elif option == "--capi-reexport-cincludes":
|
||||
options.capi_reexport_cincludes = True
|
||||
elif option == "--fast-fail":
|
||||
Options.fast_fail = True
|
||||
elif option == "--cimport-from-pyx":
|
||||
Options.cimport_from_pyx = True
|
||||
elif option in ('-Werror', '--warning-errors'):
|
||||
Options.warning_errors = True
|
||||
elif option in ('-Wextra', '--warning-extra'):
|
||||
options.compiler_directives.update(Options.extra_warnings)
|
||||
elif option == "--old-style-globals":
|
||||
Options.old_style_globals = True
|
||||
elif option == "--directive" or option.startswith('-X'):
|
||||
if option.startswith('-X') and option[2:].strip():
|
||||
x_args = option[2:]
|
||||
else:
|
||||
x_args = pop_value()
|
||||
try:
|
||||
options.compiler_directives = Options.parse_directive_list(
|
||||
x_args, relaxed_bool=True,
|
||||
current_settings=options.compiler_directives)
|
||||
except ValueError as e:
|
||||
sys.stderr.write("Error in compiler directive: %s\n" % e.args[0])
|
||||
sys.exit(1)
|
||||
elif option == "--compile-time-env" or option.startswith('-E'):
|
||||
if option.startswith('-E') and option[2:].strip():
|
||||
x_args = option[2:]
|
||||
else:
|
||||
x_args = pop_value()
|
||||
try:
|
||||
options.compile_time_env = Options.parse_compile_time_env(
|
||||
x_args, current_settings=options.compile_time_env)
|
||||
except ValueError as e:
|
||||
sys.stderr.write("Error in compile-time-env: %s\n" % e.args[0])
|
||||
sys.exit(1)
|
||||
elif option == "--module-name":
|
||||
options.module_name = pop_value()
|
||||
elif option in ('-M', '--depfile'):
|
||||
options.depfile = True
|
||||
elif option.startswith('--debug'):
|
||||
option = option[2:].replace('-', '_')
|
||||
from . import DebugFlags
|
||||
if option in dir(DebugFlags):
|
||||
setattr(DebugFlags, option, True)
|
||||
else:
|
||||
sys.stderr.write("Unknown debug flag: %s\n" % option)
|
||||
bad_usage()
|
||||
elif option in ('-h', '--help'):
|
||||
sys.stdout.write(usage)
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.stderr.write(usage)
|
||||
sys.stderr.write("Unknown compiler flag: %s\n" % option)
|
||||
sys.exit(1)
|
||||
else:
|
||||
sources.append(pop_arg())
|
||||
|
||||
if pending_arg:
|
||||
bad_usage()
|
||||
|
||||
if options.use_listing_file and len(sources) > 1:
|
||||
sys.stderr.write(
|
||||
"cython: Only one source file allowed when using -o\n")
|
||||
sys.exit(1)
|
||||
if len(sources) == 0 and not options.show_version:
|
||||
bad_usage()
|
||||
if Options.embed and len(sources) > 1:
|
||||
sys.stderr.write(
|
||||
"cython: Only one source file allowed when using --embed\n")
|
||||
sys.exit(1)
|
||||
if options.module_name:
|
||||
if options.timestamps:
|
||||
sys.stderr.write(
|
||||
"cython: Cannot use --module-name with --timestamps\n")
|
||||
sys.exit(1)
|
||||
if len(sources) > 1:
|
||||
sys.stderr.write(
|
||||
"cython: Only one source file allowed when using --module-name\n")
|
||||
sys.exit(1)
|
||||
return options, sources
|
124
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.pxd
Normal file
124
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.pxd
Normal file
|
@ -0,0 +1,124 @@
|
|||
|
||||
from __future__ import absolute_import
|
||||
|
||||
cimport cython
|
||||
from ..StringIOTree cimport StringIOTree
|
||||
|
||||
|
||||
cdef class UtilityCodeBase(object):
|
||||
cpdef format_code(self, code_string, replace_empty_lines=*)
|
||||
|
||||
|
||||
cdef class UtilityCode(UtilityCodeBase):
|
||||
cdef public object name
|
||||
cdef public object proto
|
||||
cdef public object impl
|
||||
cdef public object init
|
||||
cdef public object cleanup
|
||||
cdef public object proto_block
|
||||
cdef public object requires
|
||||
cdef public dict _cache
|
||||
cdef public list specialize_list
|
||||
cdef public object file
|
||||
|
||||
cpdef none_or_sub(self, s, context)
|
||||
|
||||
|
||||
cdef class FunctionState:
|
||||
cdef public set names_taken
|
||||
cdef public object owner
|
||||
cdef public object scope
|
||||
|
||||
cdef public object error_label
|
||||
cdef public size_t label_counter
|
||||
cdef public set labels_used
|
||||
cdef public object return_label
|
||||
cdef public object continue_label
|
||||
cdef public object break_label
|
||||
cdef public list yield_labels
|
||||
|
||||
cdef public object return_from_error_cleanup_label # not used in __init__ ?
|
||||
|
||||
cdef public object exc_vars
|
||||
cdef public object current_except
|
||||
cdef public bint in_try_finally
|
||||
cdef public bint can_trace
|
||||
cdef public bint gil_owned
|
||||
|
||||
cdef public list temps_allocated
|
||||
cdef public dict temps_free
|
||||
cdef public dict temps_used_type
|
||||
cdef public set zombie_temps
|
||||
cdef public size_t temp_counter
|
||||
cdef public list collect_temps_stack
|
||||
|
||||
cdef public object closure_temps
|
||||
cdef public bint should_declare_error_indicator
|
||||
cdef public bint uses_error_indicator
|
||||
|
||||
@cython.locals(n=size_t)
|
||||
cpdef new_label(self, name=*)
|
||||
cpdef tuple get_loop_labels(self)
|
||||
cpdef set_loop_labels(self, labels)
|
||||
cpdef tuple get_all_labels(self)
|
||||
cpdef set_all_labels(self, labels)
|
||||
cpdef start_collecting_temps(self)
|
||||
cpdef stop_collecting_temps(self)
|
||||
|
||||
cpdef list temps_in_use(self)
|
||||
|
||||
cdef class IntConst:
|
||||
cdef public object cname
|
||||
cdef public object value
|
||||
cdef public bint is_long
|
||||
|
||||
cdef class PyObjectConst:
|
||||
cdef public object cname
|
||||
cdef public object type
|
||||
|
||||
cdef class StringConst:
|
||||
cdef public object cname
|
||||
cdef public object text
|
||||
cdef public object escaped_value
|
||||
cdef public dict py_strings
|
||||
cdef public list py_versions
|
||||
|
||||
@cython.locals(intern=bint, is_str=bint, is_unicode=bint)
|
||||
cpdef get_py_string_const(self, encoding, identifier=*, is_str=*, py3str_cstring=*)
|
||||
|
||||
## cdef class PyStringConst:
|
||||
## cdef public object cname
|
||||
## cdef public object encoding
|
||||
## cdef public bint is_str
|
||||
## cdef public bint is_unicode
|
||||
## cdef public bint intern
|
||||
|
||||
#class GlobalState(object):
|
||||
|
||||
#def funccontext_property(name):
|
||||
|
||||
cdef class CCodeWriter(object):
|
||||
cdef readonly StringIOTree buffer
|
||||
cdef readonly list pyclass_stack
|
||||
cdef readonly object globalstate
|
||||
cdef readonly object funcstate
|
||||
cdef object code_config
|
||||
cdef object last_pos
|
||||
cdef object last_marked_pos
|
||||
cdef Py_ssize_t level
|
||||
cdef public Py_ssize_t call_level # debug-only, see Nodes.py
|
||||
cdef bint bol
|
||||
|
||||
cpdef write(self, s)
|
||||
cpdef put(self, code)
|
||||
cpdef put_safe(self, code)
|
||||
cpdef putln(self, code=*, bint safe=*)
|
||||
@cython.final
|
||||
cdef increase_indent(self)
|
||||
@cython.final
|
||||
cdef decrease_indent(self)
|
||||
|
||||
|
||||
cdef class PyrexCodeWriter:
|
||||
cdef public object f
|
||||
cdef public Py_ssize_t level
|
2597
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.py
Normal file
2597
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,35 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .Visitor import VisitorTransform
|
||||
from .Nodes import StatListNode
|
||||
|
||||
|
||||
class ExtractPxdCode(VisitorTransform):
|
||||
"""
|
||||
Finds nodes in a pxd file that should generate code, and
|
||||
returns them in a StatListNode.
|
||||
|
||||
The result is a tuple (StatListNode, ModuleScope), i.e.
|
||||
everything that is needed from the pxd after it is processed.
|
||||
|
||||
A purer approach would be to separately compile the pxd code,
|
||||
but the result would have to be slightly more sophisticated
|
||||
than pure strings (functions + wanted interned strings +
|
||||
wanted utility code + wanted cached objects) so for now this
|
||||
approach is taken.
|
||||
"""
|
||||
|
||||
def __call__(self, root):
|
||||
self.funcs = []
|
||||
self.visitchildren(root)
|
||||
return (StatListNode(root.pos, stats=self.funcs), root.scope)
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
self.funcs.append(node)
|
||||
# Do not visit children, nested funcdefnodes will
|
||||
# also be moved by this action...
|
||||
return node
|
||||
|
||||
def visit_Node(self, node):
|
||||
self.visitchildren(node)
|
||||
return node
|
|
@ -0,0 +1,164 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .Symtab import ModuleScope
|
||||
from .PyrexTypes import *
|
||||
from .UtilityCode import CythonUtilityCode
|
||||
from .Errors import error
|
||||
from .Scanning import StringSourceDescriptor
|
||||
from . import MemoryView
|
||||
|
||||
|
||||
class CythonScope(ModuleScope):
|
||||
is_cython_builtin = 1
|
||||
_cythonscope_initialized = False
|
||||
|
||||
def __init__(self, context):
|
||||
ModuleScope.__init__(self, u'cython', None, None)
|
||||
self.pxd_file_loaded = True
|
||||
self.populate_cython_scope()
|
||||
# The Main.Context object
|
||||
self.context = context
|
||||
|
||||
for fused_type in (cy_integral_type, cy_floating_type, cy_numeric_type):
|
||||
entry = self.declare_typedef(fused_type.name,
|
||||
fused_type,
|
||||
None,
|
||||
cname='<error>')
|
||||
entry.in_cinclude = True
|
||||
|
||||
def is_cpp(self):
|
||||
# Allow C++ utility code in C++ contexts.
|
||||
return self.context.cpp
|
||||
|
||||
def lookup_type(self, name):
|
||||
# This function should go away when types are all first-level objects.
|
||||
type = parse_basic_type(name)
|
||||
if type:
|
||||
return type
|
||||
|
||||
return super(CythonScope, self).lookup_type(name)
|
||||
|
||||
def lookup(self, name):
|
||||
entry = super(CythonScope, self).lookup(name)
|
||||
|
||||
if entry is None and not self._cythonscope_initialized:
|
||||
self.load_cythonscope()
|
||||
entry = super(CythonScope, self).lookup(name)
|
||||
|
||||
return entry
|
||||
|
||||
def find_module(self, module_name, pos):
|
||||
error("cython.%s is not available" % module_name, pos)
|
||||
|
||||
def find_submodule(self, module_name):
|
||||
entry = self.entries.get(module_name, None)
|
||||
if not entry:
|
||||
self.load_cythonscope()
|
||||
entry = self.entries.get(module_name, None)
|
||||
|
||||
if entry and entry.as_module:
|
||||
return entry.as_module
|
||||
else:
|
||||
# TODO: fix find_submodule control flow so that we're not
|
||||
# expected to create a submodule here (to protect CythonScope's
|
||||
# possible immutability). Hack ourselves out of the situation
|
||||
# for now.
|
||||
raise error((StringSourceDescriptor(u"cython", u""), 0, 0),
|
||||
"cython.%s is not available" % module_name)
|
||||
|
||||
def lookup_qualified_name(self, qname):
|
||||
# ExprNode.as_cython_attribute generates qnames and we untangle it here...
|
||||
name_path = qname.split(u'.')
|
||||
scope = self
|
||||
while len(name_path) > 1:
|
||||
scope = scope.lookup_here(name_path[0])
|
||||
if scope:
|
||||
scope = scope.as_module
|
||||
del name_path[0]
|
||||
if scope is None:
|
||||
return None
|
||||
else:
|
||||
return scope.lookup_here(name_path[0])
|
||||
|
||||
def populate_cython_scope(self):
|
||||
# These are used to optimize isinstance in FinalOptimizePhase
|
||||
type_object = self.declare_typedef(
|
||||
'PyTypeObject',
|
||||
base_type = c_void_type,
|
||||
pos = None,
|
||||
cname = 'PyTypeObject')
|
||||
type_object.is_void = True
|
||||
type_object_type = type_object.type
|
||||
|
||||
self.declare_cfunction(
|
||||
'PyObject_TypeCheck',
|
||||
CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None),
|
||||
CFuncTypeArg("t", c_ptr_type(type_object_type), None)]),
|
||||
pos = None,
|
||||
defining = 1,
|
||||
cname = 'PyObject_TypeCheck')
|
||||
|
||||
def load_cythonscope(self):
|
||||
"""
|
||||
Creates some entries for testing purposes and entries for
|
||||
cython.array() and for cython.view.*.
|
||||
"""
|
||||
if self._cythonscope_initialized:
|
||||
return
|
||||
|
||||
self._cythonscope_initialized = True
|
||||
cython_testscope_utility_code.declare_in_scope(
|
||||
self, cython_scope=self)
|
||||
cython_test_extclass_utility_code.declare_in_scope(
|
||||
self, cython_scope=self)
|
||||
|
||||
#
|
||||
# The view sub-scope
|
||||
#
|
||||
self.viewscope = viewscope = ModuleScope(u'view', self, None)
|
||||
self.declare_module('view', viewscope, None).as_module = viewscope
|
||||
viewscope.is_cython_builtin = True
|
||||
viewscope.pxd_file_loaded = True
|
||||
|
||||
cythonview_testscope_utility_code.declare_in_scope(
|
||||
viewscope, cython_scope=self)
|
||||
|
||||
view_utility_scope = MemoryView.view_utility_code.declare_in_scope(
|
||||
self.viewscope, cython_scope=self,
|
||||
whitelist=MemoryView.view_utility_whitelist)
|
||||
|
||||
# self.entries["array"] = view_utility_scope.entries.pop("array")
|
||||
|
||||
|
||||
def create_cython_scope(context):
|
||||
# One could in fact probably make it a singleton,
|
||||
# but not sure yet whether any code mutates it (which would kill reusing
|
||||
# it across different contexts)
|
||||
return CythonScope(context)
|
||||
|
||||
# Load test utilities for the cython scope
|
||||
|
||||
def load_testscope_utility(cy_util_name, **kwargs):
|
||||
return CythonUtilityCode.load(cy_util_name, "TestCythonScope.pyx", **kwargs)
|
||||
|
||||
|
||||
undecorated_methods_protos = UtilityCode(proto=u"""
|
||||
/* These methods are undecorated and have therefore no prototype */
|
||||
static PyObject *__pyx_TestClass_cdef_method(
|
||||
struct __pyx_TestClass_obj *self, int value);
|
||||
static PyObject *__pyx_TestClass_cpdef_method(
|
||||
struct __pyx_TestClass_obj *self, int value, int skip_dispatch);
|
||||
static PyObject *__pyx_TestClass_def_method(
|
||||
PyObject *self, PyObject *value);
|
||||
""")
|
||||
|
||||
cython_testscope_utility_code = load_testscope_utility("TestScope")
|
||||
|
||||
test_cython_utility_dep = load_testscope_utility("TestDep")
|
||||
|
||||
cython_test_extclass_utility_code = \
|
||||
load_testscope_utility("TestClass", name="TestClass",
|
||||
requires=[undecorated_methods_protos,
|
||||
test_cython_utility_dep])
|
||||
|
||||
cythonview_testscope_utility_code = load_testscope_utility("View.TestScope")
|
|
@ -0,0 +1,21 @@
|
|||
# Can be enabled at the command line with --debug-xxx.
|
||||
|
||||
debug_disposal_code = 0
|
||||
debug_temp_alloc = 0
|
||||
debug_coercion = 0
|
||||
|
||||
# Write comments into the C code that show where temporary variables
|
||||
# are allocated and released.
|
||||
debug_temp_code_comments = 0
|
||||
|
||||
# Write a call trace of the code generation phase into the C code.
|
||||
debug_trace_code_generation = 0
|
||||
|
||||
# Do not replace exceptions with user-friendly error messages.
|
||||
debug_no_exception_intercept = 0
|
||||
|
||||
# Print a message each time a new stage in the pipeline is entered.
|
||||
debug_verbose_pipeline = 0
|
||||
|
||||
# Raise an exception when an error is encountered.
|
||||
debug_exception_on_error = 0
|
265
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Errors.py
Normal file
265
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Errors.py
Normal file
|
@ -0,0 +1,265 @@
|
|||
#
|
||||
# Errors
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
from __builtin__ import basestring as any_string_type
|
||||
except ImportError:
|
||||
any_string_type = (bytes, str)
|
||||
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ..Utils import open_new_file
|
||||
from . import DebugFlags
|
||||
from . import Options
|
||||
|
||||
|
||||
class PyrexError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PyrexWarning(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def context(position):
|
||||
source = position[0]
|
||||
assert not (isinstance(source, any_string_type)), (
|
||||
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
|
||||
try:
|
||||
F = source.get_lines()
|
||||
except UnicodeDecodeError:
|
||||
# file has an encoding problem
|
||||
s = u"[unprintable code]\n"
|
||||
else:
|
||||
s = u''.join(F[max(0, position[1]-6):position[1]])
|
||||
s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1))
|
||||
s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60)
|
||||
return s
|
||||
|
||||
def format_position(position):
|
||||
if position:
|
||||
return u"%s:%d:%d: " % (position[0].get_error_description(),
|
||||
position[1], position[2])
|
||||
return u''
|
||||
|
||||
def format_error(message, position):
|
||||
if position:
|
||||
pos_str = format_position(position)
|
||||
cont = context(position)
|
||||
message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'')
|
||||
return message
|
||||
|
||||
class CompileError(PyrexError):
|
||||
|
||||
def __init__(self, position = None, message = u""):
|
||||
self.position = position
|
||||
self.message_only = message
|
||||
self.formatted_message = format_error(message, position)
|
||||
self.reported = False
|
||||
# Deprecated and withdrawn in 2.6:
|
||||
# self.message = message
|
||||
Exception.__init__(self, self.formatted_message)
|
||||
# Python Exception subclass pickling is broken,
|
||||
# see http://bugs.python.org/issue1692335
|
||||
self.args = (position, message)
|
||||
|
||||
def __str__(self):
|
||||
return self.formatted_message
|
||||
|
||||
class CompileWarning(PyrexWarning):
|
||||
|
||||
def __init__(self, position = None, message = ""):
|
||||
self.position = position
|
||||
# Deprecated and withdrawn in 2.6:
|
||||
# self.message = message
|
||||
Exception.__init__(self, format_position(position) + message)
|
||||
|
||||
class InternalError(Exception):
|
||||
# If this is ever raised, there is a bug in the compiler.
|
||||
|
||||
def __init__(self, message):
|
||||
self.message_only = message
|
||||
Exception.__init__(self, u"Internal compiler error: %s"
|
||||
% message)
|
||||
|
||||
class AbortError(Exception):
|
||||
# Throw this to stop the compilation immediately.
|
||||
|
||||
def __init__(self, message):
|
||||
self.message_only = message
|
||||
Exception.__init__(self, u"Abort error: %s" % message)
|
||||
|
||||
class CompilerCrash(CompileError):
|
||||
# raised when an unexpected exception occurs in a transform
|
||||
def __init__(self, pos, context, message, cause, stacktrace=None):
|
||||
if message:
|
||||
message = u'\n' + message
|
||||
else:
|
||||
message = u'\n'
|
||||
self.message_only = message
|
||||
if context:
|
||||
message = u"Compiler crash in %s%s" % (context, message)
|
||||
if stacktrace:
|
||||
import traceback
|
||||
message += (
|
||||
u'\n\nCompiler crash traceback from this point on:\n' +
|
||||
u''.join(traceback.format_tb(stacktrace)))
|
||||
if cause:
|
||||
if not stacktrace:
|
||||
message += u'\n'
|
||||
message += u'%s: %s' % (cause.__class__.__name__, cause)
|
||||
CompileError.__init__(self, pos, message)
|
||||
# Python Exception subclass pickling is broken,
|
||||
# see http://bugs.python.org/issue1692335
|
||||
self.args = (pos, context, message, cause, stacktrace)
|
||||
|
||||
class NoElementTreeInstalledException(PyrexError):
|
||||
"""raised when the user enabled options.gdb_debug but no ElementTree
|
||||
implementation was found
|
||||
"""
|
||||
|
||||
listing_file = None
|
||||
num_errors = 0
|
||||
echo_file = None
|
||||
|
||||
def open_listing_file(path, echo_to_stderr = 1):
|
||||
# Begin a new error listing. If path is None, no file
|
||||
# is opened, the error counter is just reset.
|
||||
global listing_file, num_errors, echo_file
|
||||
if path is not None:
|
||||
listing_file = open_new_file(path)
|
||||
else:
|
||||
listing_file = None
|
||||
if echo_to_stderr:
|
||||
echo_file = sys.stderr
|
||||
else:
|
||||
echo_file = None
|
||||
num_errors = 0
|
||||
|
||||
def close_listing_file():
|
||||
global listing_file
|
||||
if listing_file:
|
||||
listing_file.close()
|
||||
listing_file = None
|
||||
|
||||
def report_error(err, use_stack=True):
|
||||
if error_stack and use_stack:
|
||||
error_stack[-1].append(err)
|
||||
else:
|
||||
global num_errors
|
||||
# See Main.py for why dual reporting occurs. Quick fix for now.
|
||||
if err.reported: return
|
||||
err.reported = True
|
||||
try: line = u"%s\n" % err
|
||||
except UnicodeEncodeError:
|
||||
# Python <= 2.5 does this for non-ASCII Unicode exceptions
|
||||
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
|
||||
getattr(err, 'position', None)) + u'\n'
|
||||
if listing_file:
|
||||
try: listing_file.write(line)
|
||||
except UnicodeEncodeError:
|
||||
listing_file.write(line.encode('ASCII', 'replace'))
|
||||
if echo_file:
|
||||
try: echo_file.write(line)
|
||||
except UnicodeEncodeError:
|
||||
echo_file.write(line.encode('ASCII', 'replace'))
|
||||
num_errors += 1
|
||||
if Options.fast_fail:
|
||||
raise AbortError("fatal errors")
|
||||
|
||||
|
||||
def error(position, message):
|
||||
#print("Errors.error:", repr(position), repr(message)) ###
|
||||
if position is None:
|
||||
raise InternalError(message)
|
||||
err = CompileError(position, message)
|
||||
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
|
||||
report_error(err)
|
||||
return err
|
||||
|
||||
|
||||
LEVEL = 1 # warn about all errors level 1 or higher
|
||||
|
||||
|
||||
def message(position, message, level=1):
|
||||
if level < LEVEL:
|
||||
return
|
||||
warn = CompileWarning(position, message)
|
||||
line = "note: %s\n" % warn
|
||||
if listing_file:
|
||||
listing_file.write(line)
|
||||
if echo_file:
|
||||
echo_file.write(line)
|
||||
return warn
|
||||
|
||||
|
||||
def warning(position, message, level=0):
|
||||
if level < LEVEL:
|
||||
return
|
||||
if Options.warning_errors and position:
|
||||
return error(position, message)
|
||||
warn = CompileWarning(position, message)
|
||||
line = "warning: %s\n" % warn
|
||||
if listing_file:
|
||||
listing_file.write(line)
|
||||
if echo_file:
|
||||
echo_file.write(line)
|
||||
return warn
|
||||
|
||||
|
||||
_warn_once_seen = {}
|
||||
def warn_once(position, message, level=0):
|
||||
if level < LEVEL or message in _warn_once_seen:
|
||||
return
|
||||
warn = CompileWarning(position, message)
|
||||
line = "warning: %s\n" % warn
|
||||
if listing_file:
|
||||
listing_file.write(line)
|
||||
if echo_file:
|
||||
echo_file.write(line)
|
||||
_warn_once_seen[message] = True
|
||||
return warn
|
||||
|
||||
|
||||
# These functions can be used to momentarily suppress errors.
|
||||
|
||||
error_stack = []
|
||||
|
||||
|
||||
def hold_errors():
|
||||
error_stack.append([])
|
||||
|
||||
|
||||
def release_errors(ignore=False):
|
||||
held_errors = error_stack.pop()
|
||||
if not ignore:
|
||||
for err in held_errors:
|
||||
report_error(err)
|
||||
|
||||
|
||||
def held_errors():
|
||||
return error_stack[-1]
|
||||
|
||||
|
||||
# same as context manager:
|
||||
|
||||
@contextmanager
|
||||
def local_errors(ignore=False):
|
||||
errors = []
|
||||
error_stack.append(errors)
|
||||
try:
|
||||
yield errors
|
||||
finally:
|
||||
release_errors(ignore=ignore)
|
||||
|
||||
|
||||
# this module needs a redesign to support parallel cythonisation, but
|
||||
# for now, the following works at least in sequential compiler runs
|
||||
|
||||
def reset():
|
||||
_warn_once_seen.clear()
|
||||
del error_stack[:]
|
13717
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ExprNodes.py
Normal file
13717
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ExprNodes.py
Normal file
File diff suppressed because it is too large
Load diff
Binary file not shown.
|
@ -0,0 +1,111 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
cimport cython
|
||||
|
||||
from .Visitor cimport CythonTransform, TreeVisitor
|
||||
|
||||
cdef class ControlBlock:
|
||||
cdef public set children
|
||||
cdef public set parents
|
||||
cdef public set positions
|
||||
cdef public list stats
|
||||
cdef public dict gen
|
||||
cdef public set bounded
|
||||
|
||||
# Big integer bitsets
|
||||
cdef public object i_input
|
||||
cdef public object i_output
|
||||
cdef public object i_gen
|
||||
cdef public object i_kill
|
||||
cdef public object i_state
|
||||
|
||||
cpdef bint empty(self)
|
||||
cpdef detach(self)
|
||||
cpdef add_child(self, block)
|
||||
|
||||
cdef class ExitBlock(ControlBlock):
|
||||
cpdef bint empty(self)
|
||||
|
||||
cdef class NameAssignment:
|
||||
cdef public bint is_arg
|
||||
cdef public bint is_deletion
|
||||
cdef public object lhs
|
||||
cdef public object rhs
|
||||
cdef public object entry
|
||||
cdef public object pos
|
||||
cdef public set refs
|
||||
cdef public object bit
|
||||
cdef public object inferred_type
|
||||
|
||||
cdef class AssignmentList:
|
||||
cdef public object bit
|
||||
cdef public object mask
|
||||
cdef public list stats
|
||||
|
||||
cdef class AssignmentCollector(TreeVisitor):
|
||||
cdef list assignments
|
||||
|
||||
@cython.final
|
||||
cdef class ControlFlow:
|
||||
cdef public set blocks
|
||||
cdef public set entries
|
||||
cdef public list loops
|
||||
cdef public list exceptions
|
||||
|
||||
cdef public ControlBlock entry_point
|
||||
cdef public ExitBlock exit_point
|
||||
cdef public ControlBlock block
|
||||
|
||||
cdef public dict assmts
|
||||
|
||||
cpdef newblock(self, ControlBlock parent=*)
|
||||
cpdef nextblock(self, ControlBlock parent=*)
|
||||
cpdef bint is_tracked(self, entry)
|
||||
cpdef bint is_statically_assigned(self, entry)
|
||||
cpdef mark_position(self, node)
|
||||
cpdef mark_assignment(self, lhs, rhs, entry)
|
||||
cpdef mark_argument(self, lhs, rhs, entry)
|
||||
cpdef mark_deletion(self, node, entry)
|
||||
cpdef mark_reference(self, node, entry)
|
||||
|
||||
@cython.locals(block=ControlBlock, parent=ControlBlock, unreachable=set)
|
||||
cpdef normalize(self)
|
||||
|
||||
@cython.locals(bit=object, assmts=AssignmentList,
|
||||
block=ControlBlock)
|
||||
cpdef initialize(self)
|
||||
|
||||
@cython.locals(assmts=AssignmentList, assmt=NameAssignment)
|
||||
cpdef set map_one(self, istate, entry)
|
||||
|
||||
@cython.locals(block=ControlBlock, parent=ControlBlock)
|
||||
cdef reaching_definitions(self)
|
||||
|
||||
cdef class Uninitialized:
|
||||
pass
|
||||
|
||||
cdef class Unknown:
|
||||
pass
|
||||
|
||||
|
||||
cdef class MessageCollection:
|
||||
cdef set messages
|
||||
|
||||
|
||||
@cython.locals(dirty=bint, block=ControlBlock, parent=ControlBlock,
|
||||
assmt=NameAssignment)
|
||||
cdef check_definitions(ControlFlow flow, dict compiler_directives)
|
||||
|
||||
@cython.final
|
||||
cdef class ControlFlowAnalysis(CythonTransform):
|
||||
cdef object gv_ctx
|
||||
cdef object constant_folder
|
||||
cdef set reductions
|
||||
cdef list env_stack
|
||||
cdef list stack
|
||||
cdef object env
|
||||
cdef ControlFlow flow
|
||||
cdef bint in_inplace_assignment
|
||||
|
||||
cpdef mark_assignment(self, lhs, rhs=*)
|
||||
cpdef mark_position(self, node)
|
File diff suppressed because it is too large
Load diff
Binary file not shown.
|
@ -0,0 +1,901 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
|
||||
from . import (ExprNodes, PyrexTypes, MemoryView,
|
||||
ParseTreeTransforms, StringEncoding, Errors)
|
||||
from .ExprNodes import CloneNode, ProxyNode, TupleNode
|
||||
from .Nodes import FuncDefNode, CFuncDefNode, StatListNode, DefNode
|
||||
from ..Utils import OrderedSet
|
||||
|
||||
|
||||
class FusedCFuncDefNode(StatListNode):
|
||||
"""
|
||||
This node replaces a function with fused arguments. It deep-copies the
|
||||
function for every permutation of fused types, and allocates a new local
|
||||
scope for it. It keeps track of the original function in self.node, and
|
||||
the entry of the original function in the symbol table is given the
|
||||
'fused_cfunction' attribute which points back to us.
|
||||
Then when a function lookup occurs (to e.g. call it), the call can be
|
||||
dispatched to the right function.
|
||||
|
||||
node FuncDefNode the original function
|
||||
nodes [FuncDefNode] list of copies of node with different specific types
|
||||
py_func DefNode the fused python function subscriptable from
|
||||
Python space
|
||||
__signatures__ A DictNode mapping signature specialization strings
|
||||
to PyCFunction nodes
|
||||
resulting_fused_function PyCFunction for the fused DefNode that delegates
|
||||
to specializations
|
||||
fused_func_assignment Assignment of the fused function to the function name
|
||||
defaults_tuple TupleNode of defaults (letting PyCFunctionNode build
|
||||
defaults would result in many different tuples)
|
||||
specialized_pycfuncs List of synthesized pycfunction nodes for the
|
||||
specializations
|
||||
code_object CodeObjectNode shared by all specializations and the
|
||||
fused function
|
||||
|
||||
fused_compound_types All fused (compound) types (e.g. floating[:])
|
||||
"""
|
||||
|
||||
__signatures__ = None
|
||||
resulting_fused_function = None
|
||||
fused_func_assignment = None
|
||||
defaults_tuple = None
|
||||
decorators = None
|
||||
|
||||
child_attrs = StatListNode.child_attrs + [
|
||||
'__signatures__', 'resulting_fused_function', 'fused_func_assignment']
|
||||
|
||||
def __init__(self, node, env):
|
||||
super(FusedCFuncDefNode, self).__init__(node.pos)
|
||||
|
||||
self.nodes = []
|
||||
self.node = node
|
||||
|
||||
is_def = isinstance(self.node, DefNode)
|
||||
if is_def:
|
||||
# self.node.decorators = []
|
||||
self.copy_def(env)
|
||||
else:
|
||||
self.copy_cdef(env)
|
||||
|
||||
# Perform some sanity checks. If anything fails, it's a bug
|
||||
for n in self.nodes:
|
||||
assert not n.entry.type.is_fused
|
||||
assert not n.local_scope.return_type.is_fused
|
||||
if node.return_type.is_fused:
|
||||
assert not n.return_type.is_fused
|
||||
|
||||
if not is_def and n.cfunc_declarator.optional_arg_count:
|
||||
assert n.type.op_arg_struct
|
||||
|
||||
node.entry.fused_cfunction = self
|
||||
# Copy the nodes as AnalyseDeclarationsTransform will prepend
|
||||
# self.py_func to self.stats, as we only want specialized
|
||||
# CFuncDefNodes in self.nodes
|
||||
self.stats = self.nodes[:]
|
||||
|
||||
def copy_def(self, env):
|
||||
"""
|
||||
Create a copy of the original def or lambda function for specialized
|
||||
versions.
|
||||
"""
|
||||
fused_compound_types = PyrexTypes.unique(
|
||||
[arg.type for arg in self.node.args if arg.type.is_fused])
|
||||
fused_types = self._get_fused_base_types(fused_compound_types)
|
||||
permutations = PyrexTypes.get_all_specialized_permutations(fused_types)
|
||||
|
||||
self.fused_compound_types = fused_compound_types
|
||||
|
||||
if self.node.entry in env.pyfunc_entries:
|
||||
env.pyfunc_entries.remove(self.node.entry)
|
||||
|
||||
for cname, fused_to_specific in permutations:
|
||||
copied_node = copy.deepcopy(self.node)
|
||||
# keep signature object identity for special casing in DefNode.analyse_declarations()
|
||||
copied_node.entry.signature = self.node.entry.signature
|
||||
|
||||
self._specialize_function_args(copied_node.args, fused_to_specific)
|
||||
copied_node.return_type = self.node.return_type.specialize(
|
||||
fused_to_specific)
|
||||
|
||||
copied_node.analyse_declarations(env)
|
||||
# copied_node.is_staticmethod = self.node.is_staticmethod
|
||||
# copied_node.is_classmethod = self.node.is_classmethod
|
||||
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
||||
self.specialize_copied_def(copied_node, cname, self.node.entry,
|
||||
fused_to_specific, fused_compound_types)
|
||||
|
||||
PyrexTypes.specialize_entry(copied_node.entry, cname)
|
||||
copied_node.entry.used = True
|
||||
env.entries[copied_node.entry.name] = copied_node.entry
|
||||
|
||||
if not self.replace_fused_typechecks(copied_node):
|
||||
break
|
||||
|
||||
self.orig_py_func = self.node
|
||||
self.py_func = self.make_fused_cpdef(self.node, env, is_def=True)
|
||||
|
||||
def copy_cdef(self, env):
|
||||
"""
|
||||
Create a copy of the original c(p)def function for all specialized
|
||||
versions.
|
||||
"""
|
||||
permutations = self.node.type.get_all_specialized_permutations()
|
||||
# print 'Node %s has %d specializations:' % (self.node.entry.name,
|
||||
# len(permutations))
|
||||
# import pprint; pprint.pprint([d for cname, d in permutations])
|
||||
|
||||
# Prevent copying of the python function
|
||||
self.orig_py_func = orig_py_func = self.node.py_func
|
||||
self.node.py_func = None
|
||||
if orig_py_func:
|
||||
env.pyfunc_entries.remove(orig_py_func.entry)
|
||||
|
||||
fused_types = self.node.type.get_fused_types()
|
||||
self.fused_compound_types = fused_types
|
||||
|
||||
new_cfunc_entries = []
|
||||
for cname, fused_to_specific in permutations:
|
||||
copied_node = copy.deepcopy(self.node)
|
||||
|
||||
# Make the types in our CFuncType specific.
|
||||
type = copied_node.type.specialize(fused_to_specific)
|
||||
entry = copied_node.entry
|
||||
type.specialize_entry(entry, cname)
|
||||
|
||||
# Reuse existing Entries (e.g. from .pxd files).
|
||||
for i, orig_entry in enumerate(env.cfunc_entries):
|
||||
if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type):
|
||||
copied_node.entry = env.cfunc_entries[i]
|
||||
if not copied_node.entry.func_cname:
|
||||
copied_node.entry.func_cname = entry.func_cname
|
||||
entry = copied_node.entry
|
||||
type = entry.type
|
||||
break
|
||||
else:
|
||||
new_cfunc_entries.append(entry)
|
||||
|
||||
copied_node.type = type
|
||||
entry.type, type.entry = type, entry
|
||||
|
||||
entry.used = (entry.used or
|
||||
self.node.entry.defined_in_pxd or
|
||||
env.is_c_class_scope or
|
||||
entry.is_cmethod)
|
||||
|
||||
if self.node.cfunc_declarator.optional_arg_count:
|
||||
self.node.cfunc_declarator.declare_optional_arg_struct(
|
||||
type, env, fused_cname=cname)
|
||||
|
||||
copied_node.return_type = type.return_type
|
||||
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
||||
|
||||
# Make the argument types in the CFuncDeclarator specific
|
||||
self._specialize_function_args(copied_node.cfunc_declarator.args,
|
||||
fused_to_specific)
|
||||
|
||||
# If a cpdef, declare all specialized cpdefs (this
|
||||
# also calls analyse_declarations)
|
||||
copied_node.declare_cpdef_wrapper(env)
|
||||
if copied_node.py_func:
|
||||
env.pyfunc_entries.remove(copied_node.py_func.entry)
|
||||
|
||||
self.specialize_copied_def(
|
||||
copied_node.py_func, cname, self.node.entry.as_variable,
|
||||
fused_to_specific, fused_types)
|
||||
|
||||
if not self.replace_fused_typechecks(copied_node):
|
||||
break
|
||||
|
||||
# replace old entry with new entries
|
||||
try:
|
||||
cindex = env.cfunc_entries.index(self.node.entry)
|
||||
except ValueError:
|
||||
env.cfunc_entries.extend(new_cfunc_entries)
|
||||
else:
|
||||
env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries
|
||||
|
||||
if orig_py_func:
|
||||
self.py_func = self.make_fused_cpdef(orig_py_func, env,
|
||||
is_def=False)
|
||||
else:
|
||||
self.py_func = orig_py_func
|
||||
|
||||
def _get_fused_base_types(self, fused_compound_types):
|
||||
"""
|
||||
Get a list of unique basic fused types, from a list of
|
||||
(possibly) compound fused types.
|
||||
"""
|
||||
base_types = []
|
||||
seen = set()
|
||||
for fused_type in fused_compound_types:
|
||||
fused_type.get_fused_types(result=base_types, seen=seen)
|
||||
return base_types
|
||||
|
||||
def _specialize_function_args(self, args, fused_to_specific):
|
||||
for arg in args:
|
||||
if arg.type.is_fused:
|
||||
arg.type = arg.type.specialize(fused_to_specific)
|
||||
if arg.type.is_memoryviewslice:
|
||||
arg.type.validate_memslice_dtype(arg.pos)
|
||||
|
||||
def create_new_local_scope(self, node, env, f2s):
|
||||
"""
|
||||
Create a new local scope for the copied node and append it to
|
||||
self.nodes. A new local scope is needed because the arguments with the
|
||||
fused types are already in the local scope, and we need the specialized
|
||||
entries created after analyse_declarations on each specialized version
|
||||
of the (CFunc)DefNode.
|
||||
f2s is a dict mapping each fused type to its specialized version
|
||||
"""
|
||||
node.create_local_scope(env)
|
||||
node.local_scope.fused_to_specific = f2s
|
||||
|
||||
# This is copied from the original function, set it to false to
|
||||
# stop recursion
|
||||
node.has_fused_arguments = False
|
||||
self.nodes.append(node)
|
||||
|
||||
def specialize_copied_def(self, node, cname, py_entry, f2s, fused_compound_types):
|
||||
"""Specialize the copy of a DefNode given the copied node,
|
||||
the specialization cname and the original DefNode entry"""
|
||||
fused_types = self._get_fused_base_types(fused_compound_types)
|
||||
type_strings = [
|
||||
PyrexTypes.specialization_signature_string(fused_type, f2s)
|
||||
for fused_type in fused_types
|
||||
]
|
||||
|
||||
node.specialized_signature_string = '|'.join(type_strings)
|
||||
|
||||
node.entry.pymethdef_cname = PyrexTypes.get_fused_cname(
|
||||
cname, node.entry.pymethdef_cname)
|
||||
node.entry.doc = py_entry.doc
|
||||
node.entry.doc_cname = py_entry.doc_cname
|
||||
|
||||
def replace_fused_typechecks(self, copied_node):
|
||||
"""
|
||||
Branch-prune fused type checks like
|
||||
|
||||
if fused_t is int:
|
||||
...
|
||||
|
||||
Returns whether an error was issued and whether we should stop in
|
||||
in order to prevent a flood of errors.
|
||||
"""
|
||||
num_errors = Errors.num_errors
|
||||
transform = ParseTreeTransforms.ReplaceFusedTypeChecks(
|
||||
copied_node.local_scope)
|
||||
transform(copied_node)
|
||||
|
||||
if Errors.num_errors > num_errors:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _fused_instance_checks(self, normal_types, pyx_code, env):
|
||||
"""
|
||||
Generate Cython code for instance checks, matching an object to
|
||||
specialized types.
|
||||
"""
|
||||
for specialized_type in normal_types:
|
||||
# all_numeric = all_numeric and specialized_type.is_numeric
|
||||
pyx_code.context.update(
|
||||
py_type_name=specialized_type.py_type_name(),
|
||||
specialized_type_name=specialized_type.specialization_string,
|
||||
)
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
if isinstance(arg, {{py_type_name}}):
|
||||
dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'; break
|
||||
""")
|
||||
|
||||
def _dtype_name(self, dtype):
|
||||
if dtype.is_typedef:
|
||||
return '___pyx_%s' % dtype
|
||||
return str(dtype).replace(' ', '_')
|
||||
|
||||
def _dtype_type(self, dtype):
|
||||
if dtype.is_typedef:
|
||||
return self._dtype_name(dtype)
|
||||
return str(dtype)
|
||||
|
||||
def _sizeof_dtype(self, dtype):
|
||||
if dtype.is_pyobject:
|
||||
return 'sizeof(void *)'
|
||||
else:
|
||||
return "sizeof(%s)" % self._dtype_type(dtype)
|
||||
|
||||
def _buffer_check_numpy_dtype_setup_cases(self, pyx_code):
|
||||
"Setup some common cases to match dtypes against specializations"
|
||||
if pyx_code.indenter("if kind in b'iu':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_int")
|
||||
pyx_code.dedent()
|
||||
|
||||
if pyx_code.indenter("elif kind == b'f':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_float")
|
||||
pyx_code.dedent()
|
||||
|
||||
if pyx_code.indenter("elif kind == b'c':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_complex")
|
||||
pyx_code.dedent()
|
||||
|
||||
if pyx_code.indenter("elif kind == b'O':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_object")
|
||||
pyx_code.dedent()
|
||||
|
||||
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
|
||||
no_match = "dest_sig[{{dest_sig_idx}}] = None"
|
||||
def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types):
|
||||
"""
|
||||
Match a numpy dtype object to the individual specializations.
|
||||
"""
|
||||
self._buffer_check_numpy_dtype_setup_cases(pyx_code)
|
||||
|
||||
for specialized_type in pythran_types+specialized_buffer_types:
|
||||
final_type = specialized_type
|
||||
if specialized_type.is_pythran_expr:
|
||||
specialized_type = specialized_type.org_buffer
|
||||
dtype = specialized_type.dtype
|
||||
pyx_code.context.update(
|
||||
itemsize_match=self._sizeof_dtype(dtype) + " == itemsize",
|
||||
signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype),
|
||||
dtype=dtype,
|
||||
specialized_type_name=final_type.specialization_string)
|
||||
|
||||
dtypes = [
|
||||
(dtype.is_int, pyx_code.dtype_int),
|
||||
(dtype.is_float, pyx_code.dtype_float),
|
||||
(dtype.is_complex, pyx_code.dtype_complex)
|
||||
]
|
||||
|
||||
for dtype_category, codewriter in dtypes:
|
||||
if dtype_category:
|
||||
cond = '{{itemsize_match}} and (<Py_ssize_t>arg.ndim) == %d' % (
|
||||
specialized_type.ndim,)
|
||||
if dtype.is_int:
|
||||
cond += ' and {{signed_match}}'
|
||||
|
||||
if final_type.is_pythran_expr:
|
||||
cond += ' and arg_is_pythran_compatible'
|
||||
|
||||
if codewriter.indenter("if %s:" % cond):
|
||||
#codewriter.putln("print 'buffer match found based on numpy dtype'")
|
||||
codewriter.putln(self.match)
|
||||
codewriter.putln("break")
|
||||
codewriter.dedent()
|
||||
|
||||
def _buffer_parse_format_string_check(self, pyx_code, decl_code,
|
||||
specialized_type, env):
|
||||
"""
|
||||
For each specialized type, try to coerce the object to a memoryview
|
||||
slice of that type. This means obtaining a buffer and parsing the
|
||||
format string.
|
||||
TODO: separate buffer acquisition from format parsing
|
||||
"""
|
||||
dtype = specialized_type.dtype
|
||||
if specialized_type.is_buffer:
|
||||
axes = [('direct', 'strided')] * specialized_type.ndim
|
||||
else:
|
||||
axes = specialized_type.axes
|
||||
|
||||
memslice_type = PyrexTypes.MemoryViewSliceType(dtype, axes)
|
||||
memslice_type.create_from_py_utility_code(env)
|
||||
pyx_code.context.update(
|
||||
coerce_from_py_func=memslice_type.from_py_function,
|
||||
dtype=dtype)
|
||||
decl_code.putln(
|
||||
"{{memviewslice_cname}} {{coerce_from_py_func}}(object, int)")
|
||||
|
||||
pyx_code.context.update(
|
||||
specialized_type_name=specialized_type.specialization_string,
|
||||
sizeof_dtype=self._sizeof_dtype(dtype))
|
||||
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
# try {{dtype}}
|
||||
if itemsize == -1 or itemsize == {{sizeof_dtype}}:
|
||||
memslice = {{coerce_from_py_func}}(arg, 0)
|
||||
if memslice.memview:
|
||||
__PYX_XDEC_MEMVIEW(&memslice, 1)
|
||||
# print 'found a match for the buffer through format parsing'
|
||||
%s
|
||||
break
|
||||
else:
|
||||
__pyx_PyErr_Clear()
|
||||
""" % self.match)
|
||||
|
||||
def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, env):
|
||||
"""
|
||||
Generate Cython code to match objects to buffer specializations.
|
||||
First try to get a numpy dtype object and match it against the individual
|
||||
specializations. If that fails, try naively to coerce the object
|
||||
to each specialization, which obtains the buffer each time and tries
|
||||
to match the format string.
|
||||
"""
|
||||
# The first thing to find a match in this loop breaks out of the loop
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
""" + (u"arg_is_pythran_compatible = False" if pythran_types else u"") + u"""
|
||||
if ndarray is not None:
|
||||
if isinstance(arg, ndarray):
|
||||
dtype = arg.dtype
|
||||
""" + (u"arg_is_pythran_compatible = True" if pythran_types else u"") + u"""
|
||||
elif __pyx_memoryview_check(arg):
|
||||
arg_base = arg.base
|
||||
if isinstance(arg_base, ndarray):
|
||||
dtype = arg_base.dtype
|
||||
else:
|
||||
dtype = None
|
||||
else:
|
||||
dtype = None
|
||||
|
||||
itemsize = -1
|
||||
if dtype is not None:
|
||||
itemsize = dtype.itemsize
|
||||
kind = ord(dtype.kind)
|
||||
dtype_signed = kind == 'i'
|
||||
""")
|
||||
pyx_code.indent(2)
|
||||
if pythran_types:
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
# Pythran only supports the endianness of the current compiler
|
||||
byteorder = dtype.byteorder
|
||||
if byteorder == "<" and not __Pyx_Is_Little_Endian():
|
||||
arg_is_pythran_compatible = False
|
||||
elif byteorder == ">" and __Pyx_Is_Little_Endian():
|
||||
arg_is_pythran_compatible = False
|
||||
if arg_is_pythran_compatible:
|
||||
cur_stride = itemsize
|
||||
shape = arg.shape
|
||||
strides = arg.strides
|
||||
for i in range(arg.ndim-1, -1, -1):
|
||||
if (<Py_ssize_t>strides[i]) != cur_stride:
|
||||
arg_is_pythran_compatible = False
|
||||
break
|
||||
cur_stride *= <Py_ssize_t> shape[i]
|
||||
else:
|
||||
arg_is_pythran_compatible = not (arg.flags.f_contiguous and (<Py_ssize_t>arg.ndim) > 1)
|
||||
""")
|
||||
pyx_code.named_insertion_point("numpy_dtype_checks")
|
||||
self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types)
|
||||
pyx_code.dedent(2)
|
||||
|
||||
for specialized_type in buffer_types:
|
||||
self._buffer_parse_format_string_check(
|
||||
pyx_code, decl_code, specialized_type, env)
|
||||
|
||||
def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types, pythran_types):
|
||||
"""
|
||||
If we have any buffer specializations, write out some variable
|
||||
declarations and imports.
|
||||
"""
|
||||
decl_code.put_chunk(
|
||||
u"""
|
||||
ctypedef struct {{memviewslice_cname}}:
|
||||
void *memview
|
||||
|
||||
void __PYX_XDEC_MEMVIEW({{memviewslice_cname}} *, int have_gil)
|
||||
bint __pyx_memoryview_check(object)
|
||||
""")
|
||||
|
||||
pyx_code.local_variable_declarations.put_chunk(
|
||||
u"""
|
||||
cdef {{memviewslice_cname}} memslice
|
||||
cdef Py_ssize_t itemsize
|
||||
cdef bint dtype_signed
|
||||
cdef char kind
|
||||
|
||||
itemsize = -1
|
||||
""")
|
||||
|
||||
if pythran_types:
|
||||
pyx_code.local_variable_declarations.put_chunk(u"""
|
||||
cdef bint arg_is_pythran_compatible
|
||||
cdef Py_ssize_t cur_stride
|
||||
""")
|
||||
|
||||
pyx_code.imports.put_chunk(
|
||||
u"""
|
||||
cdef type ndarray
|
||||
ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable()
|
||||
""")
|
||||
|
||||
seen_typedefs = set()
|
||||
seen_int_dtypes = set()
|
||||
for buffer_type in all_buffer_types:
|
||||
dtype = buffer_type.dtype
|
||||
dtype_name = self._dtype_name(dtype)
|
||||
if dtype.is_typedef:
|
||||
if dtype_name not in seen_typedefs:
|
||||
seen_typedefs.add(dtype_name)
|
||||
decl_code.putln(
|
||||
'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name,
|
||||
dtype.empty_declaration_code()))
|
||||
|
||||
if buffer_type.dtype.is_int:
|
||||
if str(dtype) not in seen_int_dtypes:
|
||||
seen_int_dtypes.add(str(dtype))
|
||||
pyx_code.context.update(dtype_name=dtype_name,
|
||||
dtype_type=self._dtype_type(dtype))
|
||||
pyx_code.local_variable_declarations.put_chunk(
|
||||
u"""
|
||||
cdef bint {{dtype_name}}_is_signed
|
||||
{{dtype_name}}_is_signed = not (<{{dtype_type}}> -1 > 0)
|
||||
""")
|
||||
|
||||
def _split_fused_types(self, arg):
|
||||
"""
|
||||
Specialize fused types and split into normal types and buffer types.
|
||||
"""
|
||||
specialized_types = PyrexTypes.get_specialized_types(arg.type)
|
||||
|
||||
# Prefer long over int, etc by sorting (see type classes in PyrexTypes.py)
|
||||
specialized_types.sort()
|
||||
|
||||
seen_py_type_names = set()
|
||||
normal_types, buffer_types, pythran_types = [], [], []
|
||||
has_object_fallback = False
|
||||
for specialized_type in specialized_types:
|
||||
py_type_name = specialized_type.py_type_name()
|
||||
if py_type_name:
|
||||
if py_type_name in seen_py_type_names:
|
||||
continue
|
||||
seen_py_type_names.add(py_type_name)
|
||||
if py_type_name == 'object':
|
||||
has_object_fallback = True
|
||||
else:
|
||||
normal_types.append(specialized_type)
|
||||
elif specialized_type.is_pythran_expr:
|
||||
pythran_types.append(specialized_type)
|
||||
elif specialized_type.is_buffer or specialized_type.is_memoryviewslice:
|
||||
buffer_types.append(specialized_type)
|
||||
|
||||
return normal_types, buffer_types, pythran_types, has_object_fallback
|
||||
|
||||
def _unpack_argument(self, pyx_code):
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
# PROCESSING ARGUMENT {{arg_tuple_idx}}
|
||||
if {{arg_tuple_idx}} < len(<tuple>args):
|
||||
arg = (<tuple>args)[{{arg_tuple_idx}}]
|
||||
elif kwargs is not None and '{{arg.name}}' in <dict>kwargs:
|
||||
arg = (<dict>kwargs)['{{arg.name}}']
|
||||
else:
|
||||
{{if arg.default}}
|
||||
arg = (<tuple>defaults)[{{default_idx}}]
|
||||
{{else}}
|
||||
{{if arg_tuple_idx < min_positional_args}}
|
||||
raise TypeError("Expected at least %d argument%s, got %d" % (
|
||||
{{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(<tuple>args)))
|
||||
{{else}}
|
||||
raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}")
|
||||
{{endif}}
|
||||
{{endif}}
|
||||
""")
|
||||
|
||||
def make_fused_cpdef(self, orig_py_func, env, is_def):
|
||||
"""
|
||||
This creates the function that is indexable from Python and does
|
||||
runtime dispatch based on the argument types. The function gets the
|
||||
arg tuple and kwargs dict (or None) and the defaults tuple
|
||||
as arguments from the Binding Fused Function's tp_call.
|
||||
"""
|
||||
from . import TreeFragment, Code, UtilityCode
|
||||
|
||||
fused_types = self._get_fused_base_types([
|
||||
arg.type for arg in self.node.args if arg.type.is_fused])
|
||||
|
||||
context = {
|
||||
'memviewslice_cname': MemoryView.memviewslice_cname,
|
||||
'func_args': self.node.args,
|
||||
'n_fused': len(fused_types),
|
||||
'min_positional_args':
|
||||
self.node.num_required_args - self.node.num_required_kw_args
|
||||
if is_def else
|
||||
sum(1 for arg in self.node.args if arg.default is None),
|
||||
'name': orig_py_func.entry.name,
|
||||
}
|
||||
|
||||
pyx_code = Code.PyxCodeWriter(context=context)
|
||||
decl_code = Code.PyxCodeWriter(context=context)
|
||||
decl_code.put_chunk(
|
||||
u"""
|
||||
cdef extern from *:
|
||||
void __pyx_PyErr_Clear "PyErr_Clear" ()
|
||||
type __Pyx_ImportNumPyArrayTypeIfAvailable()
|
||||
int __Pyx_Is_Little_Endian()
|
||||
""")
|
||||
decl_code.indent()
|
||||
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
def __pyx_fused_cpdef(signatures, args, kwargs, defaults):
|
||||
# FIXME: use a typed signature - currently fails badly because
|
||||
# default arguments inherit the types we specify here!
|
||||
|
||||
dest_sig = [None] * {{n_fused}}
|
||||
|
||||
if kwargs is not None and not kwargs:
|
||||
kwargs = None
|
||||
|
||||
cdef Py_ssize_t i
|
||||
|
||||
# instance check body
|
||||
""")
|
||||
|
||||
pyx_code.indent() # indent following code to function body
|
||||
pyx_code.named_insertion_point("imports")
|
||||
pyx_code.named_insertion_point("func_defs")
|
||||
pyx_code.named_insertion_point("local_variable_declarations")
|
||||
|
||||
fused_index = 0
|
||||
default_idx = 0
|
||||
all_buffer_types = OrderedSet()
|
||||
seen_fused_types = set()
|
||||
for i, arg in enumerate(self.node.args):
|
||||
if arg.type.is_fused:
|
||||
arg_fused_types = arg.type.get_fused_types()
|
||||
if len(arg_fused_types) > 1:
|
||||
raise NotImplementedError("Determination of more than one fused base "
|
||||
"type per argument is not implemented.")
|
||||
fused_type = arg_fused_types[0]
|
||||
|
||||
if arg.type.is_fused and fused_type not in seen_fused_types:
|
||||
seen_fused_types.add(fused_type)
|
||||
|
||||
context.update(
|
||||
arg_tuple_idx=i,
|
||||
arg=arg,
|
||||
dest_sig_idx=fused_index,
|
||||
default_idx=default_idx,
|
||||
)
|
||||
|
||||
normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg)
|
||||
self._unpack_argument(pyx_code)
|
||||
|
||||
# 'unrolled' loop, first match breaks out of it
|
||||
if pyx_code.indenter("while 1:"):
|
||||
if normal_types:
|
||||
self._fused_instance_checks(normal_types, pyx_code, env)
|
||||
if buffer_types or pythran_types:
|
||||
env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c"))
|
||||
self._buffer_checks(buffer_types, pythran_types, pyx_code, decl_code, env)
|
||||
if has_object_fallback:
|
||||
pyx_code.context.update(specialized_type_name='object')
|
||||
pyx_code.putln(self.match)
|
||||
else:
|
||||
pyx_code.putln(self.no_match)
|
||||
pyx_code.putln("break")
|
||||
pyx_code.dedent()
|
||||
|
||||
fused_index += 1
|
||||
all_buffer_types.update(buffer_types)
|
||||
all_buffer_types.update(ty.org_buffer for ty in pythran_types)
|
||||
|
||||
if arg.default:
|
||||
default_idx += 1
|
||||
|
||||
if all_buffer_types:
|
||||
self._buffer_declarations(pyx_code, decl_code, all_buffer_types, pythran_types)
|
||||
env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c"))
|
||||
env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c"))
|
||||
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
candidates = []
|
||||
for sig in <dict>signatures:
|
||||
match_found = False
|
||||
src_sig = sig.strip('()').split('|')
|
||||
for i in range(len(dest_sig)):
|
||||
dst_type = dest_sig[i]
|
||||
if dst_type is not None:
|
||||
if src_sig[i] == dst_type:
|
||||
match_found = True
|
||||
else:
|
||||
match_found = False
|
||||
break
|
||||
|
||||
if match_found:
|
||||
candidates.append(sig)
|
||||
|
||||
if not candidates:
|
||||
raise TypeError("No matching signature found")
|
||||
elif len(candidates) > 1:
|
||||
raise TypeError("Function call with ambiguous argument types")
|
||||
else:
|
||||
return (<dict>signatures)[candidates[0]]
|
||||
""")
|
||||
|
||||
fragment_code = pyx_code.getvalue()
|
||||
# print decl_code.getvalue()
|
||||
# print fragment_code
|
||||
from .Optimize import ConstantFolding
|
||||
fragment = TreeFragment.TreeFragment(
|
||||
fragment_code, level='module', pipeline=[ConstantFolding()])
|
||||
ast = TreeFragment.SetPosTransform(self.node.pos)(fragment.root)
|
||||
UtilityCode.declare_declarations_in_scope(
|
||||
decl_code.getvalue(), env.global_scope())
|
||||
ast.scope = env
|
||||
# FIXME: for static methods of cdef classes, we build the wrong signature here: first arg becomes 'self'
|
||||
ast.analyse_declarations(env)
|
||||
py_func = ast.stats[-1] # the DefNode
|
||||
self.fragment_scope = ast.scope
|
||||
|
||||
if isinstance(self.node, DefNode):
|
||||
py_func.specialized_cpdefs = self.nodes[:]
|
||||
else:
|
||||
py_func.specialized_cpdefs = [n.py_func for n in self.nodes]
|
||||
|
||||
return py_func
|
||||
|
||||
def update_fused_defnode_entry(self, env):
|
||||
copy_attributes = (
|
||||
'name', 'pos', 'cname', 'func_cname', 'pyfunc_cname',
|
||||
'pymethdef_cname', 'doc', 'doc_cname', 'is_member',
|
||||
'scope'
|
||||
)
|
||||
|
||||
entry = self.py_func.entry
|
||||
|
||||
for attr in copy_attributes:
|
||||
setattr(entry, attr,
|
||||
getattr(self.orig_py_func.entry, attr))
|
||||
|
||||
self.py_func.name = self.orig_py_func.name
|
||||
self.py_func.doc = self.orig_py_func.doc
|
||||
|
||||
env.entries.pop('__pyx_fused_cpdef', None)
|
||||
if isinstance(self.node, DefNode):
|
||||
env.entries[entry.name] = entry
|
||||
else:
|
||||
env.entries[entry.name].as_variable = entry
|
||||
|
||||
env.pyfunc_entries.append(entry)
|
||||
|
||||
self.py_func.entry.fused_cfunction = self
|
||||
for node in self.nodes:
|
||||
if isinstance(self.node, DefNode):
|
||||
node.fused_py_func = self.py_func
|
||||
else:
|
||||
node.py_func.fused_py_func = self.py_func
|
||||
node.entry.as_variable = entry
|
||||
|
||||
self.synthesize_defnodes()
|
||||
self.stats.append(self.__signatures__)
|
||||
|
||||
def analyse_expressions(self, env):
|
||||
"""
|
||||
Analyse the expressions. Take care to only evaluate default arguments
|
||||
once and clone the result for all specializations
|
||||
"""
|
||||
for fused_compound_type in self.fused_compound_types:
|
||||
for fused_type in fused_compound_type.get_fused_types():
|
||||
for specialization_type in fused_type.types:
|
||||
if specialization_type.is_complex:
|
||||
specialization_type.create_declaration_utility_code(env)
|
||||
|
||||
if self.py_func:
|
||||
self.__signatures__ = self.__signatures__.analyse_expressions(env)
|
||||
self.py_func = self.py_func.analyse_expressions(env)
|
||||
self.resulting_fused_function = self.resulting_fused_function.analyse_expressions(env)
|
||||
self.fused_func_assignment = self.fused_func_assignment.analyse_expressions(env)
|
||||
|
||||
self.defaults = defaults = []
|
||||
|
||||
for arg in self.node.args:
|
||||
if arg.default:
|
||||
arg.default = arg.default.analyse_expressions(env)
|
||||
defaults.append(ProxyNode(arg.default))
|
||||
else:
|
||||
defaults.append(None)
|
||||
|
||||
for i, stat in enumerate(self.stats):
|
||||
stat = self.stats[i] = stat.analyse_expressions(env)
|
||||
if isinstance(stat, FuncDefNode):
|
||||
for arg, default in zip(stat.args, defaults):
|
||||
if default is not None:
|
||||
arg.default = CloneNode(default).coerce_to(arg.type, env)
|
||||
|
||||
if self.py_func:
|
||||
args = [CloneNode(default) for default in defaults if default]
|
||||
self.defaults_tuple = TupleNode(self.pos, args=args)
|
||||
self.defaults_tuple = self.defaults_tuple.analyse_types(env, skip_children=True).coerce_to_pyobject(env)
|
||||
self.defaults_tuple = ProxyNode(self.defaults_tuple)
|
||||
self.code_object = ProxyNode(self.specialized_pycfuncs[0].code_object)
|
||||
|
||||
fused_func = self.resulting_fused_function.arg
|
||||
fused_func.defaults_tuple = CloneNode(self.defaults_tuple)
|
||||
fused_func.code_object = CloneNode(self.code_object)
|
||||
|
||||
for i, pycfunc in enumerate(self.specialized_pycfuncs):
|
||||
pycfunc.code_object = CloneNode(self.code_object)
|
||||
pycfunc = self.specialized_pycfuncs[i] = pycfunc.analyse_types(env)
|
||||
pycfunc.defaults_tuple = CloneNode(self.defaults_tuple)
|
||||
return self
|
||||
|
||||
def synthesize_defnodes(self):
|
||||
"""
|
||||
Create the __signatures__ dict of PyCFunctionNode specializations.
|
||||
"""
|
||||
if isinstance(self.nodes[0], CFuncDefNode):
|
||||
nodes = [node.py_func for node in self.nodes]
|
||||
else:
|
||||
nodes = self.nodes
|
||||
|
||||
signatures = [StringEncoding.EncodedString(node.specialized_signature_string)
|
||||
for node in nodes]
|
||||
keys = [ExprNodes.StringNode(node.pos, value=sig)
|
||||
for node, sig in zip(nodes, signatures)]
|
||||
values = [ExprNodes.PyCFunctionNode.from_defnode(node, binding=True)
|
||||
for node in nodes]
|
||||
|
||||
self.__signatures__ = ExprNodes.DictNode.from_pairs(self.pos, zip(keys, values))
|
||||
|
||||
self.specialized_pycfuncs = values
|
||||
for pycfuncnode in values:
|
||||
pycfuncnode.is_specialization = True
|
||||
|
||||
def generate_function_definitions(self, env, code):
|
||||
if self.py_func:
|
||||
self.py_func.pymethdef_required = True
|
||||
self.fused_func_assignment.generate_function_definitions(env, code)
|
||||
|
||||
for stat in self.stats:
|
||||
if isinstance(stat, FuncDefNode) and stat.entry.used:
|
||||
code.mark_pos(stat.pos)
|
||||
stat.generate_function_definitions(env, code)
|
||||
|
||||
def generate_execution_code(self, code):
|
||||
# Note: all def function specialization are wrapped in PyCFunction
|
||||
# nodes in the self.__signatures__ dictnode.
|
||||
for default in self.defaults:
|
||||
if default is not None:
|
||||
default.generate_evaluation_code(code)
|
||||
|
||||
if self.py_func:
|
||||
self.defaults_tuple.generate_evaluation_code(code)
|
||||
self.code_object.generate_evaluation_code(code)
|
||||
|
||||
for stat in self.stats:
|
||||
code.mark_pos(stat.pos)
|
||||
if isinstance(stat, ExprNodes.ExprNode):
|
||||
stat.generate_evaluation_code(code)
|
||||
else:
|
||||
stat.generate_execution_code(code)
|
||||
|
||||
if self.__signatures__:
|
||||
self.resulting_fused_function.generate_evaluation_code(code)
|
||||
|
||||
code.putln(
|
||||
"((__pyx_FusedFunctionObject *) %s)->__signatures__ = %s;" %
|
||||
(self.resulting_fused_function.result(),
|
||||
self.__signatures__.result()))
|
||||
code.put_giveref(self.__signatures__.result())
|
||||
self.__signatures__.generate_post_assignment_code(code)
|
||||
self.__signatures__.free_temps(code)
|
||||
|
||||
self.fused_func_assignment.generate_execution_code(code)
|
||||
|
||||
# Dispose of results
|
||||
self.resulting_fused_function.generate_disposal_code(code)
|
||||
self.resulting_fused_function.free_temps(code)
|
||||
self.defaults_tuple.generate_disposal_code(code)
|
||||
self.defaults_tuple.free_temps(code)
|
||||
self.code_object.generate_disposal_code(code)
|
||||
self.code_object.free_temps(code)
|
||||
|
||||
for default in self.defaults:
|
||||
if default is not None:
|
||||
default.generate_disposal_code(code)
|
||||
default.free_temps(code)
|
||||
|
||||
def annotate(self, code):
|
||||
for stat in self.stats:
|
||||
stat.annotate(code)
|
|
@ -0,0 +1,15 @@
|
|||
def _get_feature(name):
|
||||
import __future__
|
||||
# fall back to a unique fake object for earlier Python versions or Python 3
|
||||
return getattr(__future__, name, object())
|
||||
|
||||
unicode_literals = _get_feature("unicode_literals")
|
||||
with_statement = _get_feature("with_statement") # dummy
|
||||
division = _get_feature("division")
|
||||
print_function = _get_feature("print_function")
|
||||
absolute_import = _get_feature("absolute_import")
|
||||
nested_scopes = _get_feature("nested_scopes") # dummy
|
||||
generators = _get_feature("generators") # dummy
|
||||
generator_stop = _get_feature("generator_stop")
|
||||
|
||||
del _get_feature
|
|
@ -0,0 +1,64 @@
|
|||
"""
|
||||
This module deals with interpreting the parse tree as Python
|
||||
would have done, in the compiler.
|
||||
|
||||
For now this only covers parse tree to value conversion of
|
||||
compile-time values.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Nodes import *
|
||||
from .ExprNodes import *
|
||||
from .Errors import CompileError
|
||||
|
||||
|
||||
class EmptyScope(object):
|
||||
def lookup(self, name):
|
||||
return None
|
||||
|
||||
empty_scope = EmptyScope()
|
||||
|
||||
def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()):
|
||||
"""
|
||||
Tries to interpret a list of compile time option nodes.
|
||||
The result will be a tuple (optlist, optdict) but where
|
||||
all expression nodes have been interpreted. The result is
|
||||
in the form of tuples (value, pos).
|
||||
|
||||
optlist is a list of nodes, while optdict is a DictNode (the
|
||||
result optdict is a dict)
|
||||
|
||||
If type_env is set, all type nodes will be analysed and the resulting
|
||||
type set. Otherwise only interpretateable ExprNodes
|
||||
are allowed, other nodes raises errors.
|
||||
|
||||
A CompileError will be raised if there are problems.
|
||||
"""
|
||||
|
||||
def interpret(node, ix):
|
||||
if ix in type_args:
|
||||
if type_env:
|
||||
type = node.analyse_as_type(type_env)
|
||||
if not type:
|
||||
raise CompileError(node.pos, "Invalid type.")
|
||||
return (type, node.pos)
|
||||
else:
|
||||
raise CompileError(node.pos, "Type not allowed here.")
|
||||
else:
|
||||
if (sys.version_info[0] >=3 and
|
||||
isinstance(node, StringNode) and
|
||||
node.unicode_value is not None):
|
||||
return (node.unicode_value, node.pos)
|
||||
return (node.compile_time_value(empty_scope), node.pos)
|
||||
|
||||
if optlist:
|
||||
optlist = [interpret(x, ix) for ix, x in enumerate(optlist)]
|
||||
if optdict:
|
||||
assert isinstance(optdict, DictNode)
|
||||
new_optdict = {}
|
||||
for item in optdict.key_value_pairs:
|
||||
new_key, dummy = interpret(item.key, None)
|
||||
new_optdict[new_key] = interpret(item.value, item.key.value)
|
||||
optdict = new_optdict
|
||||
return (optlist, new_optdict)
|
|
@ -0,0 +1,138 @@
|
|||
# cython: language_level=3, py2_import=True
|
||||
#
|
||||
# Cython Scanner - Lexical Definitions
|
||||
#
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
raw_prefixes = "rR"
|
||||
bytes_prefixes = "bB"
|
||||
string_prefixes = "fFuU" + bytes_prefixes
|
||||
char_prefixes = "cC"
|
||||
any_string_prefix = raw_prefixes + string_prefixes + char_prefixes
|
||||
IDENT = 'IDENT'
|
||||
|
||||
|
||||
def make_lexicon():
|
||||
from ..Plex import \
|
||||
Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
|
||||
TEXT, IGNORE, State, Lexicon
|
||||
from .Scanning import Method
|
||||
|
||||
letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_")
|
||||
digit = Any("0123456789")
|
||||
bindigit = Any("01")
|
||||
octdigit = Any("01234567")
|
||||
hexdigit = Any("0123456789ABCDEFabcdef")
|
||||
indentation = Bol + Rep(Any(" \t"))
|
||||
|
||||
def underscore_digits(d):
|
||||
return Rep1(d) + Rep(Str("_") + Rep1(d))
|
||||
|
||||
decimal = underscore_digits(digit)
|
||||
dot = Str(".")
|
||||
exponent = Any("Ee") + Opt(Any("+-")) + decimal
|
||||
decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
|
||||
|
||||
name = letter + Rep(letter | digit)
|
||||
intconst = decimal | (Str("0") + ((Any("Xx") + underscore_digits(hexdigit)) |
|
||||
(Any("Oo") + underscore_digits(octdigit)) |
|
||||
(Any("Bb") + underscore_digits(bindigit)) ))
|
||||
intsuffix = (Opt(Any("Uu")) + Opt(Any("Ll")) + Opt(Any("Ll"))) | (Opt(Any("Ll")) + Opt(Any("Ll")) + Opt(Any("Uu")))
|
||||
intliteral = intconst + intsuffix
|
||||
fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
|
||||
imagconst = (intconst | fltconst) + Any("jJ")
|
||||
|
||||
# invalid combinations of prefixes are caught in p_string_literal
|
||||
beginstring = Opt(Rep(Any(string_prefixes + raw_prefixes)) |
|
||||
Any(char_prefixes)
|
||||
) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
|
||||
two_oct = octdigit + octdigit
|
||||
three_oct = octdigit + octdigit + octdigit
|
||||
two_hex = hexdigit + hexdigit
|
||||
four_hex = two_hex + two_hex
|
||||
escapeseq = Str("\\") + (two_oct | three_oct |
|
||||
Str('N{') + Rep(AnyBut('}')) + Str('}') |
|
||||
Str('u') + four_hex | Str('x') + two_hex |
|
||||
Str('U') + four_hex + four_hex | AnyChar)
|
||||
|
||||
bra = Any("([{")
|
||||
ket = Any(")]}")
|
||||
punct = Any(":,;+-*/|&<>=.%`~^?!@")
|
||||
diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//",
|
||||
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
|
||||
"<<=", ">>=", "**=", "//=", "->", "@=")
|
||||
spaces = Rep1(Any(" \t\f"))
|
||||
escaped_newline = Str("\\\n")
|
||||
lineterm = Eol + Opt(Str("\n"))
|
||||
|
||||
comment = Str("#") + Rep(AnyBut("\n"))
|
||||
|
||||
return Lexicon([
|
||||
(name, IDENT),
|
||||
(intliteral, Method('strip_underscores', symbol='INT')),
|
||||
(fltconst, Method('strip_underscores', symbol='FLOAT')),
|
||||
(imagconst, Method('strip_underscores', symbol='IMAG')),
|
||||
(punct | diphthong, TEXT),
|
||||
|
||||
(bra, Method('open_bracket_action')),
|
||||
(ket, Method('close_bracket_action')),
|
||||
(lineterm, Method('newline_action')),
|
||||
|
||||
(beginstring, Method('begin_string_action')),
|
||||
|
||||
(comment, IGNORE),
|
||||
(spaces, IGNORE),
|
||||
(escaped_newline, IGNORE),
|
||||
|
||||
State('INDENT', [
|
||||
(comment + lineterm, Method('commentline')),
|
||||
(Opt(spaces) + Opt(comment) + lineterm, IGNORE),
|
||||
(indentation, Method('indentation_action')),
|
||||
(Eof, Method('eof_action'))
|
||||
]),
|
||||
|
||||
State('SQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
|
||||
(Str('"'), 'CHARS'),
|
||||
(Str("\n"), Method('unclosed_string_action')),
|
||||
(Str("'"), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
State('DQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut('"\n\\')), 'CHARS'),
|
||||
(Str("'"), 'CHARS'),
|
||||
(Str("\n"), Method('unclosed_string_action')),
|
||||
(Str('"'), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
State('TSQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
|
||||
(Any("'\""), 'CHARS'),
|
||||
(Str("\n"), 'NEWLINE'),
|
||||
(Str("'''"), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
State('TDQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut('"\'\n\\')), 'CHARS'),
|
||||
(Any("'\""), 'CHARS'),
|
||||
(Str("\n"), 'NEWLINE'),
|
||||
(Str('"""'), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
(Eof, Method('eof_action'))
|
||||
],
|
||||
|
||||
# FIXME: Plex 1.9 needs different args here from Plex 1.1.4
|
||||
#debug_flags = scanner_debug_flags,
|
||||
#debug_file = scanner_dump_file
|
||||
)
|
||||
|
914
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Main.py
Normal file
914
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Main.py
Normal file
|
@ -0,0 +1,914 @@
|
|||
#
|
||||
# Cython Top Level
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import io
|
||||
|
||||
if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[:2] < (3, 3):
|
||||
sys.stderr.write("Sorry, Cython requires Python 2.6+ or 3.3+, found %d.%d\n" % tuple(sys.version_info[:2]))
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
from __builtin__ import basestring
|
||||
except ImportError:
|
||||
basestring = str
|
||||
|
||||
# Do not import Parsing here, import it when needed, because Parsing imports
|
||||
# Nodes, which globally needs debug command line options initialized to set a
|
||||
# conditional metaclass. These options are processed by CmdLine called from
|
||||
# main() in this file.
|
||||
# import Parsing
|
||||
from . import Errors
|
||||
from .StringEncoding import EncodedString
|
||||
from .Scanning import PyrexScanner, FileSourceDescriptor
|
||||
from .Errors import PyrexError, CompileError, error, warning
|
||||
from .Symtab import ModuleScope
|
||||
from .. import Utils
|
||||
from . import Options
|
||||
|
||||
from . import Version # legacy import needed by old PyTables versions
|
||||
version = Version.version # legacy attribute - use "Cython.__version__" instead
|
||||
|
||||
module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$")
|
||||
|
||||
verbose = 0
|
||||
|
||||
standard_include_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
os.path.pardir, 'Includes'))
|
||||
|
||||
class CompilationData(object):
|
||||
# Bundles the information that is passed from transform to transform.
|
||||
# (For now, this is only)
|
||||
|
||||
# While Context contains every pxd ever loaded, path information etc.,
|
||||
# this only contains the data related to a single compilation pass
|
||||
#
|
||||
# pyx ModuleNode Main code tree of this compilation.
|
||||
# pxds {string : ModuleNode} Trees for the pxds used in the pyx.
|
||||
# codewriter CCodeWriter Where to output final code.
|
||||
# options CompilationOptions
|
||||
# result CompilationResult
|
||||
pass
|
||||
|
||||
|
||||
class Context(object):
|
||||
# This class encapsulates the context needed for compiling
|
||||
# one or more Cython implementation files along with their
|
||||
# associated and imported declaration files. It includes
|
||||
# the root of the module import namespace and the list
|
||||
# of directories to search for include files.
|
||||
#
|
||||
# modules {string : ModuleScope}
|
||||
# include_directories [string]
|
||||
# future_directives [object]
|
||||
# language_level int currently 2 or 3 for Python 2/3
|
||||
|
||||
cython_scope = None
|
||||
language_level = None # warn when not set but default to Py2
|
||||
|
||||
def __init__(self, include_directories, compiler_directives, cpp=False,
|
||||
language_level=None, options=None):
|
||||
# cython_scope is a hack, set to False by subclasses, in order to break
|
||||
# an infinite loop.
|
||||
# Better code organization would fix it.
|
||||
|
||||
from . import Builtin, CythonScope
|
||||
self.modules = {"__builtin__" : Builtin.builtin_scope}
|
||||
self.cython_scope = CythonScope.create_cython_scope(self)
|
||||
self.modules["cython"] = self.cython_scope
|
||||
self.include_directories = include_directories
|
||||
self.future_directives = set()
|
||||
self.compiler_directives = compiler_directives
|
||||
self.cpp = cpp
|
||||
self.options = options
|
||||
|
||||
self.pxds = {} # full name -> node tree
|
||||
self._interned = {} # (type(value), value, *key_args) -> interned_value
|
||||
|
||||
if language_level is not None:
|
||||
self.set_language_level(language_level)
|
||||
|
||||
self.gdb_debug_outputwriter = None
|
||||
|
||||
def set_language_level(self, level):
|
||||
from .Future import print_function, unicode_literals, absolute_import, division
|
||||
future_directives = set()
|
||||
if level == '3str':
|
||||
level = 3
|
||||
else:
|
||||
level = int(level)
|
||||
if level >= 3:
|
||||
future_directives.add(unicode_literals)
|
||||
if level >= 3:
|
||||
future_directives.update([print_function, absolute_import, division])
|
||||
self.language_level = level
|
||||
self.future_directives = future_directives
|
||||
if level >= 3:
|
||||
self.modules['builtins'] = self.modules['__builtin__']
|
||||
|
||||
def intern_ustring(self, value, encoding=None):
|
||||
key = (EncodedString, value, encoding)
|
||||
try:
|
||||
return self._interned[key]
|
||||
except KeyError:
|
||||
pass
|
||||
value = EncodedString(value)
|
||||
if encoding:
|
||||
value.encoding = encoding
|
||||
self._interned[key] = value
|
||||
return value
|
||||
|
||||
def intern_value(self, value, *key):
|
||||
key = (type(value), value) + key
|
||||
try:
|
||||
return self._interned[key]
|
||||
except KeyError:
|
||||
pass
|
||||
self._interned[key] = value
|
||||
return value
|
||||
|
||||
# pipeline creation functions can now be found in Pipeline.py
|
||||
|
||||
def process_pxd(self, source_desc, scope, module_name):
|
||||
from . import Pipeline
|
||||
if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx':
|
||||
source = CompilationSource(source_desc, module_name, os.getcwd())
|
||||
result_sink = create_default_resultobj(source, self.options)
|
||||
pipeline = Pipeline.create_pyx_as_pxd_pipeline(self, result_sink)
|
||||
result = Pipeline.run_pipeline(pipeline, source)
|
||||
else:
|
||||
pipeline = Pipeline.create_pxd_pipeline(self, scope, module_name)
|
||||
result = Pipeline.run_pipeline(pipeline, source_desc)
|
||||
return result
|
||||
|
||||
def nonfatal_error(self, exc):
|
||||
return Errors.report_error(exc)
|
||||
|
||||
def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1,
|
||||
absolute_fallback=True):
|
||||
# Finds and returns the module scope corresponding to
|
||||
# the given relative or absolute module name. If this
|
||||
# is the first time the module has been requested, finds
|
||||
# the corresponding .pxd file and process it.
|
||||
# If relative_to is not None, it must be a module scope,
|
||||
# and the module will first be searched for relative to
|
||||
# that module, provided its name is not a dotted name.
|
||||
debug_find_module = 0
|
||||
if debug_find_module:
|
||||
print("Context.find_module: module_name = %s, relative_to = %s, pos = %s, need_pxd = %s" % (
|
||||
module_name, relative_to, pos, need_pxd))
|
||||
|
||||
scope = None
|
||||
pxd_pathname = None
|
||||
if relative_to:
|
||||
if module_name:
|
||||
# from .module import ...
|
||||
qualified_name = relative_to.qualify_name(module_name)
|
||||
else:
|
||||
# from . import ...
|
||||
qualified_name = relative_to.qualified_name
|
||||
scope = relative_to
|
||||
relative_to = None
|
||||
else:
|
||||
qualified_name = module_name
|
||||
|
||||
if not module_name_pattern.match(qualified_name):
|
||||
raise CompileError(pos or (module_name, 0, 0),
|
||||
"'%s' is not a valid module name" % module_name)
|
||||
|
||||
if relative_to:
|
||||
if debug_find_module:
|
||||
print("...trying relative import")
|
||||
scope = relative_to.lookup_submodule(module_name)
|
||||
if not scope:
|
||||
pxd_pathname = self.find_pxd_file(qualified_name, pos)
|
||||
if pxd_pathname:
|
||||
scope = relative_to.find_submodule(module_name)
|
||||
if not scope:
|
||||
if debug_find_module:
|
||||
print("...trying absolute import")
|
||||
if absolute_fallback:
|
||||
qualified_name = module_name
|
||||
scope = self
|
||||
for name in qualified_name.split("."):
|
||||
scope = scope.find_submodule(name)
|
||||
|
||||
if debug_find_module:
|
||||
print("...scope = %s" % scope)
|
||||
if not scope.pxd_file_loaded:
|
||||
if debug_find_module:
|
||||
print("...pxd not loaded")
|
||||
if not pxd_pathname:
|
||||
if debug_find_module:
|
||||
print("...looking for pxd file")
|
||||
# Only look in sys.path if we are explicitly looking
|
||||
# for a .pxd file.
|
||||
pxd_pathname = self.find_pxd_file(qualified_name, pos, sys_path=need_pxd)
|
||||
if debug_find_module:
|
||||
print("......found %s" % pxd_pathname)
|
||||
if not pxd_pathname and need_pxd:
|
||||
# Set pxd_file_loaded such that we don't need to
|
||||
# look for the non-existing pxd file next time.
|
||||
scope.pxd_file_loaded = True
|
||||
package_pathname = self.search_include_directories(qualified_name, ".py", pos)
|
||||
if package_pathname and package_pathname.endswith('__init__.py'):
|
||||
pass
|
||||
else:
|
||||
error(pos, "'%s.pxd' not found" % qualified_name.replace('.', os.sep))
|
||||
if pxd_pathname:
|
||||
scope.pxd_file_loaded = True
|
||||
try:
|
||||
if debug_find_module:
|
||||
print("Context.find_module: Parsing %s" % pxd_pathname)
|
||||
rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1]
|
||||
if not pxd_pathname.endswith(rel_path):
|
||||
rel_path = pxd_pathname # safety measure to prevent printing incorrect paths
|
||||
source_desc = FileSourceDescriptor(pxd_pathname, rel_path)
|
||||
err, result = self.process_pxd(source_desc, scope, qualified_name)
|
||||
if err:
|
||||
raise err
|
||||
(pxd_codenodes, pxd_scope) = result
|
||||
self.pxds[module_name] = (pxd_codenodes, pxd_scope)
|
||||
except CompileError:
|
||||
pass
|
||||
return scope
|
||||
|
||||
def find_pxd_file(self, qualified_name, pos, sys_path=True):
|
||||
# Search include path (and sys.path if sys_path is True) for
|
||||
# the .pxd file corresponding to the given fully-qualified
|
||||
# module name.
|
||||
# Will find either a dotted filename or a file in a
|
||||
# package directory. If a source file position is given,
|
||||
# the directory containing the source file is searched first
|
||||
# for a dotted filename, and its containing package root
|
||||
# directory is searched first for a non-dotted filename.
|
||||
pxd = self.search_include_directories(qualified_name, ".pxd", pos, sys_path=sys_path)
|
||||
if pxd is None: # XXX Keep this until Includes/Deprecated is removed
|
||||
if (qualified_name.startswith('python') or
|
||||
qualified_name in ('stdlib', 'stdio', 'stl')):
|
||||
standard_include_path = os.path.abspath(os.path.normpath(
|
||||
os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes')))
|
||||
deprecated_include_path = os.path.join(standard_include_path, 'Deprecated')
|
||||
self.include_directories.append(deprecated_include_path)
|
||||
try:
|
||||
pxd = self.search_include_directories(qualified_name, ".pxd", pos)
|
||||
finally:
|
||||
self.include_directories.pop()
|
||||
if pxd:
|
||||
name = qualified_name
|
||||
if name.startswith('python'):
|
||||
warning(pos, "'%s' is deprecated, use 'cpython'" % name, 1)
|
||||
elif name in ('stdlib', 'stdio'):
|
||||
warning(pos, "'%s' is deprecated, use 'libc.%s'" % (name, name), 1)
|
||||
elif name in ('stl'):
|
||||
warning(pos, "'%s' is deprecated, use 'libcpp.*.*'" % name, 1)
|
||||
if pxd is None and Options.cimport_from_pyx:
|
||||
return self.find_pyx_file(qualified_name, pos)
|
||||
return pxd
|
||||
|
||||
def find_pyx_file(self, qualified_name, pos):
|
||||
# Search include path for the .pyx file corresponding to the
|
||||
# given fully-qualified module name, as for find_pxd_file().
|
||||
return self.search_include_directories(qualified_name, ".pyx", pos)
|
||||
|
||||
def find_include_file(self, filename, pos):
|
||||
# Search list of include directories for filename.
|
||||
# Reports an error and returns None if not found.
|
||||
path = self.search_include_directories(filename, "", pos,
|
||||
include=True)
|
||||
if not path:
|
||||
error(pos, "'%s' not found" % filename)
|
||||
return path
|
||||
|
||||
def search_include_directories(self, qualified_name, suffix, pos,
|
||||
include=False, sys_path=False):
|
||||
include_dirs = self.include_directories
|
||||
if sys_path:
|
||||
include_dirs = include_dirs + sys.path
|
||||
# include_dirs must be hashable for caching in @cached_function
|
||||
include_dirs = tuple(include_dirs + [standard_include_path])
|
||||
return search_include_directories(include_dirs, qualified_name,
|
||||
suffix, pos, include)
|
||||
|
||||
def find_root_package_dir(self, file_path):
|
||||
return Utils.find_root_package_dir(file_path)
|
||||
|
||||
def check_package_dir(self, dir, package_names):
|
||||
return Utils.check_package_dir(dir, tuple(package_names))
|
||||
|
||||
def c_file_out_of_date(self, source_path, output_path):
|
||||
if not os.path.exists(output_path):
|
||||
return 1
|
||||
c_time = Utils.modification_time(output_path)
|
||||
if Utils.file_newer_than(source_path, c_time):
|
||||
return 1
|
||||
pos = [source_path]
|
||||
pxd_path = Utils.replace_suffix(source_path, ".pxd")
|
||||
if os.path.exists(pxd_path) and Utils.file_newer_than(pxd_path, c_time):
|
||||
return 1
|
||||
for kind, name in self.read_dependency_file(source_path):
|
||||
if kind == "cimport":
|
||||
dep_path = self.find_pxd_file(name, pos)
|
||||
elif kind == "include":
|
||||
dep_path = self.search_include_directories(name, pos)
|
||||
else:
|
||||
continue
|
||||
if dep_path and Utils.file_newer_than(dep_path, c_time):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def find_cimported_module_names(self, source_path):
|
||||
return [ name for kind, name in self.read_dependency_file(source_path)
|
||||
if kind == "cimport" ]
|
||||
|
||||
def is_package_dir(self, dir_path):
|
||||
return Utils.is_package_dir(dir_path)
|
||||
|
||||
def read_dependency_file(self, source_path):
|
||||
dep_path = Utils.replace_suffix(source_path, ".dep")
|
||||
if os.path.exists(dep_path):
|
||||
f = open(dep_path, "rU")
|
||||
chunks = [ line.strip().split(" ", 1)
|
||||
for line in f.readlines()
|
||||
if " " in line.strip() ]
|
||||
f.close()
|
||||
return chunks
|
||||
else:
|
||||
return ()
|
||||
|
||||
def lookup_submodule(self, name):
|
||||
# Look up a top-level module. Returns None if not found.
|
||||
return self.modules.get(name, None)
|
||||
|
||||
def find_submodule(self, name):
|
||||
# Find a top-level module, creating a new one if needed.
|
||||
scope = self.lookup_submodule(name)
|
||||
if not scope:
|
||||
scope = ModuleScope(name,
|
||||
parent_module = None, context = self)
|
||||
self.modules[name] = scope
|
||||
return scope
|
||||
|
||||
def parse(self, source_desc, scope, pxd, full_module_name):
|
||||
if not isinstance(source_desc, FileSourceDescriptor):
|
||||
raise RuntimeError("Only file sources for code supported")
|
||||
source_filename = source_desc.filename
|
||||
scope.cpp = self.cpp
|
||||
# Parse the given source file and return a parse tree.
|
||||
num_errors = Errors.num_errors
|
||||
try:
|
||||
with Utils.open_source_file(source_filename) as f:
|
||||
from . import Parsing
|
||||
s = PyrexScanner(f, source_desc, source_encoding = f.encoding,
|
||||
scope = scope, context = self)
|
||||
tree = Parsing.p_module(s, pxd, full_module_name)
|
||||
if self.options.formal_grammar:
|
||||
try:
|
||||
from ..Parser import ConcreteSyntaxTree
|
||||
except ImportError:
|
||||
raise RuntimeError(
|
||||
"Formal grammar can only be used with compiled Cython with an available pgen.")
|
||||
ConcreteSyntaxTree.p_module(source_filename)
|
||||
except UnicodeDecodeError as e:
|
||||
#import traceback
|
||||
#traceback.print_exc()
|
||||
raise self._report_decode_error(source_desc, e)
|
||||
|
||||
if Errors.num_errors > num_errors:
|
||||
raise CompileError()
|
||||
return tree
|
||||
|
||||
def _report_decode_error(self, source_desc, exc):
|
||||
msg = exc.args[-1]
|
||||
position = exc.args[2]
|
||||
encoding = exc.args[0]
|
||||
|
||||
line = 1
|
||||
column = idx = 0
|
||||
with io.open(source_desc.filename, "r", encoding='iso8859-1', newline='') as f:
|
||||
for line, data in enumerate(f, 1):
|
||||
idx += len(data)
|
||||
if idx >= position:
|
||||
column = position - (idx - len(data)) + 1
|
||||
break
|
||||
|
||||
return error((source_desc, line, column),
|
||||
"Decoding error, missing or incorrect coding=<encoding-name> "
|
||||
"at top of source (cannot decode with encoding %r: %s)" % (encoding, msg))
|
||||
|
||||
def extract_module_name(self, path, options):
|
||||
# Find fully_qualified module name from the full pathname
|
||||
# of a source file.
|
||||
dir, filename = os.path.split(path)
|
||||
module_name, _ = os.path.splitext(filename)
|
||||
if "." in module_name:
|
||||
return module_name
|
||||
names = [module_name]
|
||||
while self.is_package_dir(dir):
|
||||
parent, package_name = os.path.split(dir)
|
||||
if parent == dir:
|
||||
break
|
||||
names.append(package_name)
|
||||
dir = parent
|
||||
names.reverse()
|
||||
return ".".join(names)
|
||||
|
||||
def setup_errors(self, options, result):
|
||||
Errors.reset() # clear any remaining error state
|
||||
if options.use_listing_file:
|
||||
path = result.listing_file = Utils.replace_suffix(result.main_source_file, ".lis")
|
||||
else:
|
||||
path = None
|
||||
Errors.open_listing_file(path=path,
|
||||
echo_to_stderr=options.errors_to_stderr)
|
||||
|
||||
def teardown_errors(self, err, options, result):
|
||||
source_desc = result.compilation_source.source_desc
|
||||
if not isinstance(source_desc, FileSourceDescriptor):
|
||||
raise RuntimeError("Only file sources for code supported")
|
||||
Errors.close_listing_file()
|
||||
result.num_errors = Errors.num_errors
|
||||
if result.num_errors > 0:
|
||||
err = True
|
||||
if err and result.c_file:
|
||||
try:
|
||||
Utils.castrate_file(result.c_file, os.stat(source_desc.filename))
|
||||
except EnvironmentError:
|
||||
pass
|
||||
result.c_file = None
|
||||
|
||||
|
||||
def get_output_filename(source_filename, cwd, options):
|
||||
if options.cplus:
|
||||
c_suffix = ".cpp"
|
||||
else:
|
||||
c_suffix = ".c"
|
||||
suggested_file_name = Utils.replace_suffix(source_filename, c_suffix)
|
||||
if options.output_file:
|
||||
out_path = os.path.join(cwd, options.output_file)
|
||||
if os.path.isdir(out_path):
|
||||
return os.path.join(out_path, os.path.basename(suggested_file_name))
|
||||
else:
|
||||
return out_path
|
||||
else:
|
||||
return suggested_file_name
|
||||
|
||||
|
||||
def create_default_resultobj(compilation_source, options):
|
||||
result = CompilationResult()
|
||||
result.main_source_file = compilation_source.source_desc.filename
|
||||
result.compilation_source = compilation_source
|
||||
source_desc = compilation_source.source_desc
|
||||
result.c_file = get_output_filename(source_desc.filename,
|
||||
compilation_source.cwd, options)
|
||||
result.embedded_metadata = options.embedded_metadata
|
||||
return result
|
||||
|
||||
|
||||
def run_pipeline(source, options, full_module_name=None, context=None):
|
||||
from . import Pipeline
|
||||
|
||||
source_ext = os.path.splitext(source)[1]
|
||||
options.configure_language_defaults(source_ext[1:]) # py/pyx
|
||||
if context is None:
|
||||
context = options.create_context()
|
||||
|
||||
# Set up source object
|
||||
cwd = os.getcwd()
|
||||
abs_path = os.path.abspath(source)
|
||||
full_module_name = full_module_name or context.extract_module_name(source, options)
|
||||
|
||||
Utils.raise_error_if_module_name_forbidden(full_module_name)
|
||||
|
||||
if options.relative_path_in_code_position_comments:
|
||||
rel_path = full_module_name.replace('.', os.sep) + source_ext
|
||||
if not abs_path.endswith(rel_path):
|
||||
rel_path = source # safety measure to prevent printing incorrect paths
|
||||
else:
|
||||
rel_path = abs_path
|
||||
source_desc = FileSourceDescriptor(abs_path, rel_path)
|
||||
source = CompilationSource(source_desc, full_module_name, cwd)
|
||||
|
||||
# Set up result object
|
||||
result = create_default_resultobj(source, options)
|
||||
|
||||
if options.annotate is None:
|
||||
# By default, decide based on whether an html file already exists.
|
||||
html_filename = os.path.splitext(result.c_file)[0] + ".html"
|
||||
if os.path.exists(html_filename):
|
||||
with io.open(html_filename, "r", encoding="UTF-8") as html_file:
|
||||
if u'<!-- Generated by Cython' in html_file.read(100):
|
||||
options.annotate = True
|
||||
|
||||
# Get pipeline
|
||||
if source_ext.lower() == '.py' or not source_ext:
|
||||
pipeline = Pipeline.create_py_pipeline(context, options, result)
|
||||
else:
|
||||
pipeline = Pipeline.create_pyx_pipeline(context, options, result)
|
||||
|
||||
context.setup_errors(options, result)
|
||||
err, enddata = Pipeline.run_pipeline(pipeline, source)
|
||||
context.teardown_errors(err, options, result)
|
||||
if options.depfile:
|
||||
from ..Build.Dependencies import create_dependency_tree
|
||||
dependencies = create_dependency_tree(context).all_dependencies(result.main_source_file)
|
||||
Utils.write_depfile(result.c_file, result.main_source_file, dependencies)
|
||||
return result
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
#
|
||||
# Main Python entry points
|
||||
#
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
class CompilationSource(object):
|
||||
"""
|
||||
Contains the data necessary to start up a compilation pipeline for
|
||||
a single compilation unit.
|
||||
"""
|
||||
def __init__(self, source_desc, full_module_name, cwd):
|
||||
self.source_desc = source_desc
|
||||
self.full_module_name = full_module_name
|
||||
self.cwd = cwd
|
||||
|
||||
|
||||
class CompilationOptions(object):
|
||||
r"""
|
||||
See default_options at the end of this module for a list of all possible
|
||||
options and CmdLine.usage and CmdLine.parse_command_line() for their
|
||||
meaning.
|
||||
"""
|
||||
def __init__(self, defaults=None, **kw):
|
||||
self.include_path = []
|
||||
if defaults:
|
||||
if isinstance(defaults, CompilationOptions):
|
||||
defaults = defaults.__dict__
|
||||
else:
|
||||
defaults = default_options
|
||||
|
||||
options = dict(defaults)
|
||||
options.update(kw)
|
||||
|
||||
# let's assume 'default_options' contains a value for most known compiler options
|
||||
# and validate against them
|
||||
unknown_options = set(options) - set(default_options)
|
||||
# ignore valid options that are not in the defaults
|
||||
unknown_options.difference_update(['include_path'])
|
||||
if unknown_options:
|
||||
message = "got unknown compilation option%s, please remove: %s" % (
|
||||
's' if len(unknown_options) > 1 else '',
|
||||
', '.join(unknown_options))
|
||||
raise ValueError(message)
|
||||
|
||||
directive_defaults = Options.get_directive_defaults()
|
||||
directives = dict(options['compiler_directives']) # copy mutable field
|
||||
# check for invalid directives
|
||||
unknown_directives = set(directives) - set(directive_defaults)
|
||||
if unknown_directives:
|
||||
message = "got unknown compiler directive%s: %s" % (
|
||||
's' if len(unknown_directives) > 1 else '',
|
||||
', '.join(unknown_directives))
|
||||
raise ValueError(message)
|
||||
options['compiler_directives'] = directives
|
||||
if directives.get('np_pythran', False) and not options['cplus']:
|
||||
import warnings
|
||||
warnings.warn("C++ mode forced when in Pythran mode!")
|
||||
options['cplus'] = True
|
||||
if 'language_level' in directives and 'language_level' not in kw:
|
||||
options['language_level'] = directives['language_level']
|
||||
elif not options.get('language_level'):
|
||||
options['language_level'] = directive_defaults.get('language_level')
|
||||
if 'formal_grammar' in directives and 'formal_grammar' not in kw:
|
||||
options['formal_grammar'] = directives['formal_grammar']
|
||||
if options['cache'] is True:
|
||||
options['cache'] = os.path.join(Utils.get_cython_cache_dir(), 'compiler')
|
||||
|
||||
self.__dict__.update(options)
|
||||
|
||||
def configure_language_defaults(self, source_extension):
|
||||
if source_extension == 'py':
|
||||
if self.compiler_directives.get('binding') is None:
|
||||
self.compiler_directives['binding'] = True
|
||||
|
||||
def create_context(self):
|
||||
return Context(self.include_path, self.compiler_directives,
|
||||
self.cplus, self.language_level, options=self)
|
||||
|
||||
def get_fingerprint(self):
|
||||
r"""
|
||||
Return a string that contains all the options that are relevant for cache invalidation.
|
||||
"""
|
||||
# Collect only the data that can affect the generated file(s).
|
||||
data = {}
|
||||
|
||||
for key, value in self.__dict__.items():
|
||||
if key in ['show_version', 'errors_to_stderr', 'verbose', 'quiet']:
|
||||
# verbosity flags have no influence on the compilation result
|
||||
continue
|
||||
elif key in ['output_file', 'output_dir']:
|
||||
# ignore the exact name of the output file
|
||||
continue
|
||||
elif key in ['timestamps']:
|
||||
# the cache cares about the content of files, not about the timestamps of sources
|
||||
continue
|
||||
elif key in ['cache']:
|
||||
# hopefully caching has no influence on the compilation result
|
||||
continue
|
||||
elif key in ['compiler_directives']:
|
||||
# directives passed on to the C compiler do not influence the generated C code
|
||||
continue
|
||||
elif key in ['include_path']:
|
||||
# this path changes which headers are tracked as dependencies,
|
||||
# it has no influence on the generated C code
|
||||
continue
|
||||
elif key in ['working_path']:
|
||||
# this path changes where modules and pxd files are found;
|
||||
# their content is part of the fingerprint anyway, their
|
||||
# absolute path does not matter
|
||||
continue
|
||||
elif key in ['create_extension']:
|
||||
# create_extension() has already mangled the options, e.g.,
|
||||
# embedded_metadata, when the fingerprint is computed so we
|
||||
# ignore it here.
|
||||
continue
|
||||
elif key in ['build_dir']:
|
||||
# the (temporary) directory where we collect dependencies
|
||||
# has no influence on the C output
|
||||
continue
|
||||
elif key in ['use_listing_file', 'generate_pxi', 'annotate', 'annotate_coverage_xml']:
|
||||
# all output files are contained in the cache so the types of
|
||||
# files generated must be part of the fingerprint
|
||||
data[key] = value
|
||||
elif key in ['formal_grammar', 'evaluate_tree_assertions']:
|
||||
# these bits can change whether compilation to C passes/fails
|
||||
data[key] = value
|
||||
elif key in ['embedded_metadata', 'emit_linenums', 'c_line_in_traceback', 'gdb_debug', 'relative_path_in_code_position_comments']:
|
||||
# the generated code contains additional bits when these are set
|
||||
data[key] = value
|
||||
elif key in ['cplus', 'language_level', 'compile_time_env', 'np_pythran']:
|
||||
# assorted bits that, e.g., influence the parser
|
||||
data[key] = value
|
||||
elif key == ['capi_reexport_cincludes']:
|
||||
if self.capi_reexport_cincludes:
|
||||
# our caching implementation does not yet include fingerprints of all the header files
|
||||
raise NotImplementedError('capi_reexport_cincludes is not compatible with Cython caching')
|
||||
elif key == ['common_utility_include_dir']:
|
||||
if self.common_utility_include_dir:
|
||||
raise NotImplementedError('common_utility_include_dir is not compatible with Cython caching yet')
|
||||
else:
|
||||
# any unexpected option should go into the fingerprint; it's better
|
||||
# to recompile than to return incorrect results from the cache.
|
||||
data[key] = value
|
||||
|
||||
def to_fingerprint(item):
|
||||
r"""
|
||||
Recursively turn item into a string, turning dicts into lists with
|
||||
deterministic ordering.
|
||||
"""
|
||||
if isinstance(item, dict):
|
||||
item = sorted([(repr(key), to_fingerprint(value)) for key, value in item.items()])
|
||||
return repr(item)
|
||||
|
||||
return to_fingerprint(data)
|
||||
|
||||
|
||||
class CompilationResult(object):
|
||||
"""
|
||||
Results from the Cython compiler:
|
||||
|
||||
c_file string or None The generated C source file
|
||||
h_file string or None The generated C header file
|
||||
i_file string or None The generated .pxi file
|
||||
api_file string or None The generated C API .h file
|
||||
listing_file string or None File of error messages
|
||||
object_file string or None Result of compiling the C file
|
||||
extension_file string or None Result of linking the object file
|
||||
num_errors integer Number of compilation errors
|
||||
compilation_source CompilationSource
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.c_file = None
|
||||
self.h_file = None
|
||||
self.i_file = None
|
||||
self.api_file = None
|
||||
self.listing_file = None
|
||||
self.object_file = None
|
||||
self.extension_file = None
|
||||
self.main_source_file = None
|
||||
|
||||
|
||||
class CompilationResultSet(dict):
|
||||
"""
|
||||
Results from compiling multiple Pyrex source files. A mapping
|
||||
from source file paths to CompilationResult instances. Also
|
||||
has the following attributes:
|
||||
|
||||
num_errors integer Total number of compilation errors
|
||||
"""
|
||||
|
||||
num_errors = 0
|
||||
|
||||
def add(self, source, result):
|
||||
self[source] = result
|
||||
self.num_errors += result.num_errors
|
||||
|
||||
|
||||
def compile_single(source, options, full_module_name = None):
|
||||
"""
|
||||
compile_single(source, options, full_module_name)
|
||||
|
||||
Compile the given Pyrex implementation file and return a CompilationResult.
|
||||
Always compiles a single file; does not perform timestamp checking or
|
||||
recursion.
|
||||
"""
|
||||
return run_pipeline(source, options, full_module_name)
|
||||
|
||||
|
||||
def compile_multiple(sources, options):
|
||||
"""
|
||||
compile_multiple(sources, options)
|
||||
|
||||
Compiles the given sequence of Pyrex implementation files and returns
|
||||
a CompilationResultSet. Performs timestamp checking and/or recursion
|
||||
if these are specified in the options.
|
||||
"""
|
||||
if options.module_name and len(sources) > 1:
|
||||
raise RuntimeError('Full module name can only be set '
|
||||
'for single source compilation')
|
||||
# run_pipeline creates the context
|
||||
# context = options.create_context()
|
||||
sources = [os.path.abspath(source) for source in sources]
|
||||
processed = set()
|
||||
results = CompilationResultSet()
|
||||
timestamps = options.timestamps
|
||||
verbose = options.verbose
|
||||
context = None
|
||||
cwd = os.getcwd()
|
||||
for source in sources:
|
||||
if source not in processed:
|
||||
if context is None:
|
||||
context = options.create_context()
|
||||
output_filename = get_output_filename(source, cwd, options)
|
||||
out_of_date = context.c_file_out_of_date(source, output_filename)
|
||||
if (not timestamps) or out_of_date:
|
||||
if verbose:
|
||||
sys.stderr.write("Compiling %s\n" % source)
|
||||
result = run_pipeline(source, options,
|
||||
full_module_name=options.module_name,
|
||||
context=context)
|
||||
results.add(source, result)
|
||||
# Compiling multiple sources in one context doesn't quite
|
||||
# work properly yet.
|
||||
context = None
|
||||
processed.add(source)
|
||||
return results
|
||||
|
||||
|
||||
def compile(source, options = None, full_module_name = None, **kwds):
|
||||
"""
|
||||
compile(source [, options], [, <option> = <value>]...)
|
||||
|
||||
Compile one or more Pyrex implementation files, with optional timestamp
|
||||
checking and recursing on dependencies. The source argument may be a string
|
||||
or a sequence of strings. If it is a string and no recursion or timestamp
|
||||
checking is requested, a CompilationResult is returned, otherwise a
|
||||
CompilationResultSet is returned.
|
||||
"""
|
||||
options = CompilationOptions(defaults = options, **kwds)
|
||||
if isinstance(source, basestring) and not options.timestamps:
|
||||
return compile_single(source, options, full_module_name)
|
||||
else:
|
||||
return compile_multiple(source, options)
|
||||
|
||||
|
||||
@Utils.cached_function
|
||||
def search_include_directories(dirs, qualified_name, suffix, pos, include=False):
|
||||
"""
|
||||
Search the list of include directories for the given file name.
|
||||
|
||||
If a source file position is given, first searches the directory
|
||||
containing that file. Returns None if not found, but does not
|
||||
report an error.
|
||||
|
||||
The 'include' option will disable package dereferencing.
|
||||
"""
|
||||
|
||||
if pos:
|
||||
file_desc = pos[0]
|
||||
if not isinstance(file_desc, FileSourceDescriptor):
|
||||
raise RuntimeError("Only file sources for code supported")
|
||||
if include:
|
||||
dirs = (os.path.dirname(file_desc.filename),) + dirs
|
||||
else:
|
||||
dirs = (Utils.find_root_package_dir(file_desc.filename),) + dirs
|
||||
|
||||
dotted_filename = qualified_name
|
||||
if suffix:
|
||||
dotted_filename += suffix
|
||||
|
||||
if not include:
|
||||
names = qualified_name.split('.')
|
||||
package_names = tuple(names[:-1])
|
||||
module_name = names[-1]
|
||||
module_filename = module_name + suffix
|
||||
package_filename = "__init__" + suffix
|
||||
|
||||
for dirname in dirs:
|
||||
path = os.path.join(dirname, dotted_filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
if not include:
|
||||
package_dir = Utils.check_package_dir(dirname, package_names)
|
||||
if package_dir is not None:
|
||||
path = os.path.join(package_dir, module_filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
path = os.path.join(package_dir, module_name,
|
||||
package_filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
#
|
||||
# Main command-line entry point
|
||||
#
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
def setuptools_main():
|
||||
return main(command_line = 1)
|
||||
|
||||
|
||||
def main(command_line = 0):
|
||||
args = sys.argv[1:]
|
||||
any_failures = 0
|
||||
if command_line:
|
||||
from .CmdLine import parse_command_line
|
||||
options, sources = parse_command_line(args)
|
||||
else:
|
||||
options = CompilationOptions(default_options)
|
||||
sources = args
|
||||
|
||||
if options.show_version:
|
||||
sys.stderr.write("Cython version %s\n" % version)
|
||||
if options.working_path!="":
|
||||
os.chdir(options.working_path)
|
||||
try:
|
||||
result = compile(sources, options)
|
||||
if result.num_errors > 0:
|
||||
any_failures = 1
|
||||
except (EnvironmentError, PyrexError) as e:
|
||||
sys.stderr.write(str(e) + '\n')
|
||||
any_failures = 1
|
||||
if any_failures:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
#
|
||||
# Set the default options depending on the platform
|
||||
#
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
default_options = dict(
|
||||
show_version = 0,
|
||||
use_listing_file = 0,
|
||||
errors_to_stderr = 1,
|
||||
cplus = 0,
|
||||
output_file = None,
|
||||
depfile = None,
|
||||
annotate = None,
|
||||
annotate_coverage_xml = None,
|
||||
generate_pxi = 0,
|
||||
capi_reexport_cincludes = 0,
|
||||
working_path = "",
|
||||
timestamps = None,
|
||||
verbose = 0,
|
||||
quiet = 0,
|
||||
compiler_directives = {},
|
||||
embedded_metadata = {},
|
||||
evaluate_tree_assertions = False,
|
||||
emit_linenums = False,
|
||||
relative_path_in_code_position_comments = True,
|
||||
c_line_in_traceback = True,
|
||||
language_level = None, # warn but default to 2
|
||||
formal_grammar = False,
|
||||
gdb_debug = False,
|
||||
compile_time_env = None,
|
||||
common_utility_include_dir = None,
|
||||
output_dir=None,
|
||||
build_dir=None,
|
||||
cache=None,
|
||||
create_extension=None,
|
||||
module_name=None,
|
||||
np_pythran=False
|
||||
)
|
|
@ -0,0 +1,858 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .Errors import CompileError, error
|
||||
from . import ExprNodes
|
||||
from .ExprNodes import IntNode, NameNode, AttributeNode
|
||||
from . import Options
|
||||
from .Code import UtilityCode, TempitaUtilityCode
|
||||
from .UtilityCode import CythonUtilityCode
|
||||
from . import Buffer
|
||||
from . import PyrexTypes
|
||||
from . import ModuleNode
|
||||
|
||||
START_ERR = "Start must not be given."
|
||||
STOP_ERR = "Axis specification only allowed in the 'step' slot."
|
||||
STEP_ERR = "Step must be omitted, 1, or a valid specifier."
|
||||
BOTH_CF_ERR = "Cannot specify an array that is both C and Fortran contiguous."
|
||||
INVALID_ERR = "Invalid axis specification."
|
||||
NOT_CIMPORTED_ERR = "Variable was not cimported from cython.view"
|
||||
EXPR_ERR = "no expressions allowed in axis spec, only names and literals."
|
||||
CF_ERR = "Invalid axis specification for a C/Fortran contiguous array."
|
||||
ERR_UNINITIALIZED = ("Cannot check if memoryview %s is initialized without the "
|
||||
"GIL, consider using initializedcheck(False)")
|
||||
|
||||
|
||||
def concat_flags(*flags):
|
||||
return "(%s)" % "|".join(flags)
|
||||
|
||||
|
||||
format_flag = "PyBUF_FORMAT"
|
||||
|
||||
memview_c_contiguous = "(PyBUF_C_CONTIGUOUS | PyBUF_FORMAT)"
|
||||
memview_f_contiguous = "(PyBUF_F_CONTIGUOUS | PyBUF_FORMAT)"
|
||||
memview_any_contiguous = "(PyBUF_ANY_CONTIGUOUS | PyBUF_FORMAT)"
|
||||
memview_full_access = "PyBUF_FULL_RO"
|
||||
#memview_strided_access = "PyBUF_STRIDED_RO"
|
||||
memview_strided_access = "PyBUF_RECORDS_RO"
|
||||
|
||||
MEMVIEW_DIRECT = '__Pyx_MEMVIEW_DIRECT'
|
||||
MEMVIEW_PTR = '__Pyx_MEMVIEW_PTR'
|
||||
MEMVIEW_FULL = '__Pyx_MEMVIEW_FULL'
|
||||
MEMVIEW_CONTIG = '__Pyx_MEMVIEW_CONTIG'
|
||||
MEMVIEW_STRIDED= '__Pyx_MEMVIEW_STRIDED'
|
||||
MEMVIEW_FOLLOW = '__Pyx_MEMVIEW_FOLLOW'
|
||||
|
||||
_spec_to_const = {
|
||||
'direct' : MEMVIEW_DIRECT,
|
||||
'ptr' : MEMVIEW_PTR,
|
||||
'full' : MEMVIEW_FULL,
|
||||
'contig' : MEMVIEW_CONTIG,
|
||||
'strided': MEMVIEW_STRIDED,
|
||||
'follow' : MEMVIEW_FOLLOW,
|
||||
}
|
||||
|
||||
_spec_to_abbrev = {
|
||||
'direct' : 'd',
|
||||
'ptr' : 'p',
|
||||
'full' : 'f',
|
||||
'contig' : 'c',
|
||||
'strided' : 's',
|
||||
'follow' : '_',
|
||||
}
|
||||
|
||||
memslice_entry_init = "{ 0, 0, { 0 }, { 0 }, { 0 } }"
|
||||
|
||||
memview_name = u'memoryview'
|
||||
memview_typeptr_cname = '__pyx_memoryview_type'
|
||||
memview_objstruct_cname = '__pyx_memoryview_obj'
|
||||
memviewslice_cname = u'__Pyx_memviewslice'
|
||||
|
||||
|
||||
def put_init_entry(mv_cname, code):
|
||||
code.putln("%s.data = NULL;" % mv_cname)
|
||||
code.putln("%s.memview = NULL;" % mv_cname)
|
||||
|
||||
|
||||
#def axes_to_str(axes):
|
||||
# return "".join([access[0].upper()+packing[0] for (access, packing) in axes])
|
||||
|
||||
|
||||
def put_acquire_memoryviewslice(lhs_cname, lhs_type, lhs_pos, rhs, code,
|
||||
have_gil=False, first_assignment=True):
|
||||
"We can avoid decreffing the lhs if we know it is the first assignment"
|
||||
assert rhs.type.is_memoryviewslice
|
||||
|
||||
pretty_rhs = rhs.result_in_temp() or rhs.is_simple()
|
||||
if pretty_rhs:
|
||||
rhstmp = rhs.result()
|
||||
else:
|
||||
rhstmp = code.funcstate.allocate_temp(lhs_type, manage_ref=False)
|
||||
code.putln("%s = %s;" % (rhstmp, rhs.result_as(lhs_type)))
|
||||
|
||||
# Allow uninitialized assignment
|
||||
#code.putln(code.put_error_if_unbound(lhs_pos, rhs.entry))
|
||||
put_assign_to_memviewslice(lhs_cname, rhs, rhstmp, lhs_type, code,
|
||||
have_gil=have_gil, first_assignment=first_assignment)
|
||||
|
||||
if not pretty_rhs:
|
||||
code.funcstate.release_temp(rhstmp)
|
||||
|
||||
|
||||
def put_assign_to_memviewslice(lhs_cname, rhs, rhs_cname, memviewslicetype, code,
|
||||
have_gil=False, first_assignment=False):
|
||||
if not first_assignment:
|
||||
code.put_xdecref_memoryviewslice(lhs_cname, have_gil=have_gil)
|
||||
|
||||
if not rhs.result_in_temp():
|
||||
rhs.make_owned_memoryviewslice(code)
|
||||
|
||||
code.putln("%s = %s;" % (lhs_cname, rhs_cname))
|
||||
|
||||
|
||||
def get_buf_flags(specs):
|
||||
is_c_contig, is_f_contig = is_cf_contig(specs)
|
||||
|
||||
if is_c_contig:
|
||||
return memview_c_contiguous
|
||||
elif is_f_contig:
|
||||
return memview_f_contiguous
|
||||
|
||||
access, packing = zip(*specs)
|
||||
|
||||
if 'full' in access or 'ptr' in access:
|
||||
return memview_full_access
|
||||
else:
|
||||
return memview_strided_access
|
||||
|
||||
|
||||
def insert_newaxes(memoryviewtype, n):
|
||||
axes = [('direct', 'strided')] * n
|
||||
axes.extend(memoryviewtype.axes)
|
||||
return PyrexTypes.MemoryViewSliceType(memoryviewtype.dtype, axes)
|
||||
|
||||
|
||||
def broadcast_types(src, dst):
|
||||
n = abs(src.ndim - dst.ndim)
|
||||
if src.ndim < dst.ndim:
|
||||
return insert_newaxes(src, n), dst
|
||||
else:
|
||||
return src, insert_newaxes(dst, n)
|
||||
|
||||
|
||||
def valid_memslice_dtype(dtype, i=0):
|
||||
"""
|
||||
Return whether type dtype can be used as the base type of a
|
||||
memoryview slice.
|
||||
|
||||
We support structs, numeric types and objects
|
||||
"""
|
||||
if dtype.is_complex and dtype.real_type.is_int:
|
||||
return False
|
||||
|
||||
if dtype is PyrexTypes.c_bint_type:
|
||||
return False
|
||||
|
||||
if dtype.is_struct and dtype.kind == 'struct':
|
||||
for member in dtype.scope.var_entries:
|
||||
if not valid_memslice_dtype(member.type):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return (
|
||||
dtype.is_error or
|
||||
# Pointers are not valid (yet)
|
||||
# (dtype.is_ptr and valid_memslice_dtype(dtype.base_type)) or
|
||||
(dtype.is_array and i < 8 and
|
||||
valid_memslice_dtype(dtype.base_type, i + 1)) or
|
||||
dtype.is_numeric or
|
||||
dtype.is_pyobject or
|
||||
dtype.is_fused or # accept this as it will be replaced by specializations later
|
||||
(dtype.is_typedef and valid_memslice_dtype(dtype.typedef_base_type))
|
||||
)
|
||||
|
||||
|
||||
class MemoryViewSliceBufferEntry(Buffer.BufferEntry):
|
||||
"""
|
||||
May be used during code generation time to be queried for
|
||||
shape/strides/suboffsets attributes, or to perform indexing or slicing.
|
||||
"""
|
||||
def __init__(self, entry):
|
||||
self.entry = entry
|
||||
self.type = entry.type
|
||||
self.cname = entry.cname
|
||||
|
||||
self.buf_ptr = "%s.data" % self.cname
|
||||
|
||||
dtype = self.entry.type.dtype
|
||||
self.buf_ptr_type = PyrexTypes.CPtrType(dtype)
|
||||
self.init_attributes()
|
||||
|
||||
def get_buf_suboffsetvars(self):
|
||||
return self._for_all_ndim("%s.suboffsets[%d]")
|
||||
|
||||
def get_buf_stridevars(self):
|
||||
return self._for_all_ndim("%s.strides[%d]")
|
||||
|
||||
def get_buf_shapevars(self):
|
||||
return self._for_all_ndim("%s.shape[%d]")
|
||||
|
||||
def generate_buffer_lookup_code(self, code, index_cnames):
|
||||
axes = [(dim, index_cnames[dim], access, packing)
|
||||
for dim, (access, packing) in enumerate(self.type.axes)]
|
||||
return self._generate_buffer_lookup_code(code, axes)
|
||||
|
||||
def _generate_buffer_lookup_code(self, code, axes, cast_result=True):
|
||||
"""
|
||||
Generate a single expression that indexes the memory view slice
|
||||
in each dimension.
|
||||
"""
|
||||
bufp = self.buf_ptr
|
||||
type_decl = self.type.dtype.empty_declaration_code()
|
||||
|
||||
for dim, index, access, packing in axes:
|
||||
shape = "%s.shape[%d]" % (self.cname, dim)
|
||||
stride = "%s.strides[%d]" % (self.cname, dim)
|
||||
suboffset = "%s.suboffsets[%d]" % (self.cname, dim)
|
||||
|
||||
flag = get_memoryview_flag(access, packing)
|
||||
|
||||
if flag in ("generic", "generic_contiguous"):
|
||||
# Note: we cannot do cast tricks to avoid stride multiplication
|
||||
# for generic_contiguous, as we may have to do (dtype *)
|
||||
# or (dtype **) arithmetic, we won't know which unless
|
||||
# we check suboffsets
|
||||
code.globalstate.use_utility_code(memviewslice_index_helpers)
|
||||
bufp = ('__pyx_memviewslice_index_full(%s, %s, %s, %s)' %
|
||||
(bufp, index, stride, suboffset))
|
||||
|
||||
elif flag == "indirect":
|
||||
bufp = "(%s + %s * %s)" % (bufp, index, stride)
|
||||
bufp = ("(*((char **) %s) + %s)" % (bufp, suboffset))
|
||||
|
||||
elif flag == "indirect_contiguous":
|
||||
# Note: we do char ** arithmetic
|
||||
bufp = "(*((char **) %s + %s) + %s)" % (bufp, index, suboffset)
|
||||
|
||||
elif flag == "strided":
|
||||
bufp = "(%s + %s * %s)" % (bufp, index, stride)
|
||||
|
||||
else:
|
||||
assert flag == 'contiguous', flag
|
||||
bufp = '((char *) (((%s *) %s) + %s))' % (type_decl, bufp, index)
|
||||
|
||||
bufp = '( /* dim=%d */ %s )' % (dim, bufp)
|
||||
|
||||
if cast_result:
|
||||
return "((%s *) %s)" % (type_decl, bufp)
|
||||
|
||||
return bufp
|
||||
|
||||
def generate_buffer_slice_code(self, code, indices, dst, have_gil,
|
||||
have_slices, directives):
|
||||
"""
|
||||
Slice a memoryviewslice.
|
||||
|
||||
indices - list of index nodes. If not a SliceNode, or NoneNode,
|
||||
then it must be coercible to Py_ssize_t
|
||||
|
||||
Simply call __pyx_memoryview_slice_memviewslice with the right
|
||||
arguments, unless the dimension is omitted or a bare ':', in which
|
||||
case we copy over the shape/strides/suboffsets attributes directly
|
||||
for that dimension.
|
||||
"""
|
||||
src = self.cname
|
||||
|
||||
code.putln("%(dst)s.data = %(src)s.data;" % locals())
|
||||
code.putln("%(dst)s.memview = %(src)s.memview;" % locals())
|
||||
code.put_incref_memoryviewslice(dst)
|
||||
|
||||
all_dimensions_direct = all(access == 'direct' for access, packing in self.type.axes)
|
||||
suboffset_dim_temp = []
|
||||
|
||||
def get_suboffset_dim():
|
||||
# create global temp variable at request
|
||||
if not suboffset_dim_temp:
|
||||
suboffset_dim = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
||||
code.putln("%s = -1;" % suboffset_dim)
|
||||
suboffset_dim_temp.append(suboffset_dim)
|
||||
return suboffset_dim_temp[0]
|
||||
|
||||
dim = -1
|
||||
new_ndim = 0
|
||||
for index in indices:
|
||||
if index.is_none:
|
||||
# newaxis
|
||||
for attrib, value in [('shape', 1), ('strides', 0), ('suboffsets', -1)]:
|
||||
code.putln("%s.%s[%d] = %d;" % (dst, attrib, new_ndim, value))
|
||||
|
||||
new_ndim += 1
|
||||
continue
|
||||
|
||||
dim += 1
|
||||
access, packing = self.type.axes[dim]
|
||||
|
||||
if isinstance(index, ExprNodes.SliceNode):
|
||||
# slice, unspecified dimension, or part of ellipsis
|
||||
d = dict(locals())
|
||||
for s in "start stop step".split():
|
||||
idx = getattr(index, s)
|
||||
have_idx = d['have_' + s] = not idx.is_none
|
||||
d[s] = idx.result() if have_idx else "0"
|
||||
|
||||
if not (d['have_start'] or d['have_stop'] or d['have_step']):
|
||||
# full slice (:), simply copy over the extent, stride
|
||||
# and suboffset. Also update suboffset_dim if needed
|
||||
d['access'] = access
|
||||
util_name = "SimpleSlice"
|
||||
else:
|
||||
util_name = "ToughSlice"
|
||||
d['error_goto'] = code.error_goto(index.pos)
|
||||
|
||||
new_ndim += 1
|
||||
else:
|
||||
# normal index
|
||||
idx = index.result()
|
||||
|
||||
indirect = access != 'direct'
|
||||
if indirect:
|
||||
generic = access == 'full'
|
||||
if new_ndim != 0:
|
||||
return error(index.pos,
|
||||
"All preceding dimensions must be "
|
||||
"indexed and not sliced")
|
||||
|
||||
d = dict(
|
||||
locals(),
|
||||
wraparound=int(directives['wraparound']),
|
||||
boundscheck=int(directives['boundscheck']),
|
||||
)
|
||||
if d['boundscheck']:
|
||||
d['error_goto'] = code.error_goto(index.pos)
|
||||
util_name = "SliceIndex"
|
||||
|
||||
_, impl = TempitaUtilityCode.load_as_string(util_name, "MemoryView_C.c", context=d)
|
||||
code.put(impl)
|
||||
|
||||
if suboffset_dim_temp:
|
||||
code.funcstate.release_temp(suboffset_dim_temp[0])
|
||||
|
||||
|
||||
def empty_slice(pos):
|
||||
none = ExprNodes.NoneNode(pos)
|
||||
return ExprNodes.SliceNode(pos, start=none,
|
||||
stop=none, step=none)
|
||||
|
||||
|
||||
def unellipsify(indices, ndim):
|
||||
result = []
|
||||
seen_ellipsis = False
|
||||
have_slices = False
|
||||
|
||||
newaxes = [newaxis for newaxis in indices if newaxis.is_none]
|
||||
n_indices = len(indices) - len(newaxes)
|
||||
|
||||
for index in indices:
|
||||
if isinstance(index, ExprNodes.EllipsisNode):
|
||||
have_slices = True
|
||||
full_slice = empty_slice(index.pos)
|
||||
|
||||
if seen_ellipsis:
|
||||
result.append(full_slice)
|
||||
else:
|
||||
nslices = ndim - n_indices + 1
|
||||
result.extend([full_slice] * nslices)
|
||||
seen_ellipsis = True
|
||||
else:
|
||||
have_slices = have_slices or index.is_slice or index.is_none
|
||||
result.append(index)
|
||||
|
||||
result_length = len(result) - len(newaxes)
|
||||
if result_length < ndim:
|
||||
have_slices = True
|
||||
nslices = ndim - result_length
|
||||
result.extend([empty_slice(indices[-1].pos)] * nslices)
|
||||
|
||||
return have_slices, result, newaxes
|
||||
|
||||
|
||||
def get_memoryview_flag(access, packing):
|
||||
if access == 'full' and packing in ('strided', 'follow'):
|
||||
return 'generic'
|
||||
elif access == 'full' and packing == 'contig':
|
||||
return 'generic_contiguous'
|
||||
elif access == 'ptr' and packing in ('strided', 'follow'):
|
||||
return 'indirect'
|
||||
elif access == 'ptr' and packing == 'contig':
|
||||
return 'indirect_contiguous'
|
||||
elif access == 'direct' and packing in ('strided', 'follow'):
|
||||
return 'strided'
|
||||
else:
|
||||
assert (access, packing) == ('direct', 'contig'), (access, packing)
|
||||
return 'contiguous'
|
||||
|
||||
|
||||
def get_is_contig_func_name(contig_type, ndim):
|
||||
assert contig_type in ('C', 'F')
|
||||
return "__pyx_memviewslice_is_contig_%s%d" % (contig_type, ndim)
|
||||
|
||||
|
||||
def get_is_contig_utility(contig_type, ndim):
|
||||
assert contig_type in ('C', 'F')
|
||||
C = dict(context, ndim=ndim, contig_type=contig_type)
|
||||
utility = load_memview_c_utility("MemviewSliceCheckContig", C, requires=[is_contig_utility])
|
||||
return utility
|
||||
|
||||
|
||||
def slice_iter(slice_type, slice_result, ndim, code):
|
||||
if slice_type.is_c_contig or slice_type.is_f_contig:
|
||||
return ContigSliceIter(slice_type, slice_result, ndim, code)
|
||||
else:
|
||||
return StridedSliceIter(slice_type, slice_result, ndim, code)
|
||||
|
||||
|
||||
class SliceIter(object):
|
||||
def __init__(self, slice_type, slice_result, ndim, code):
|
||||
self.slice_type = slice_type
|
||||
self.slice_result = slice_result
|
||||
self.code = code
|
||||
self.ndim = ndim
|
||||
|
||||
|
||||
class ContigSliceIter(SliceIter):
|
||||
def start_loops(self):
|
||||
code = self.code
|
||||
code.begin_block()
|
||||
|
||||
type_decl = self.slice_type.dtype.empty_declaration_code()
|
||||
|
||||
total_size = ' * '.join("%s.shape[%d]" % (self.slice_result, i)
|
||||
for i in range(self.ndim))
|
||||
code.putln("Py_ssize_t __pyx_temp_extent = %s;" % total_size)
|
||||
code.putln("Py_ssize_t __pyx_temp_idx;")
|
||||
code.putln("%s *__pyx_temp_pointer = (%s *) %s.data;" % (
|
||||
type_decl, type_decl, self.slice_result))
|
||||
code.putln("for (__pyx_temp_idx = 0; "
|
||||
"__pyx_temp_idx < __pyx_temp_extent; "
|
||||
"__pyx_temp_idx++) {")
|
||||
|
||||
return "__pyx_temp_pointer"
|
||||
|
||||
def end_loops(self):
|
||||
self.code.putln("__pyx_temp_pointer += 1;")
|
||||
self.code.putln("}")
|
||||
self.code.end_block()
|
||||
|
||||
|
||||
class StridedSliceIter(SliceIter):
|
||||
def start_loops(self):
|
||||
code = self.code
|
||||
code.begin_block()
|
||||
|
||||
for i in range(self.ndim):
|
||||
t = i, self.slice_result, i
|
||||
code.putln("Py_ssize_t __pyx_temp_extent_%d = %s.shape[%d];" % t)
|
||||
code.putln("Py_ssize_t __pyx_temp_stride_%d = %s.strides[%d];" % t)
|
||||
code.putln("char *__pyx_temp_pointer_%d;" % i)
|
||||
code.putln("Py_ssize_t __pyx_temp_idx_%d;" % i)
|
||||
|
||||
code.putln("__pyx_temp_pointer_0 = %s.data;" % self.slice_result)
|
||||
|
||||
for i in range(self.ndim):
|
||||
if i > 0:
|
||||
code.putln("__pyx_temp_pointer_%d = __pyx_temp_pointer_%d;" % (i, i - 1))
|
||||
|
||||
code.putln("for (__pyx_temp_idx_%d = 0; "
|
||||
"__pyx_temp_idx_%d < __pyx_temp_extent_%d; "
|
||||
"__pyx_temp_idx_%d++) {" % (i, i, i, i))
|
||||
|
||||
return "__pyx_temp_pointer_%d" % (self.ndim - 1)
|
||||
|
||||
def end_loops(self):
|
||||
code = self.code
|
||||
for i in range(self.ndim - 1, -1, -1):
|
||||
code.putln("__pyx_temp_pointer_%d += __pyx_temp_stride_%d;" % (i, i))
|
||||
code.putln("}")
|
||||
|
||||
code.end_block()
|
||||
|
||||
|
||||
def copy_c_or_fortran_cname(memview):
|
||||
if memview.is_c_contig:
|
||||
c_or_f = 'c'
|
||||
else:
|
||||
c_or_f = 'f'
|
||||
|
||||
return "__pyx_memoryview_copy_slice_%s_%s" % (
|
||||
memview.specialization_suffix(), c_or_f)
|
||||
|
||||
|
||||
def get_copy_new_utility(pos, from_memview, to_memview):
|
||||
if (from_memview.dtype != to_memview.dtype and
|
||||
not (from_memview.dtype.is_const and from_memview.dtype.const_base_type == to_memview.dtype)):
|
||||
error(pos, "dtypes must be the same!")
|
||||
return
|
||||
if len(from_memview.axes) != len(to_memview.axes):
|
||||
error(pos, "number of dimensions must be same")
|
||||
return
|
||||
if not (to_memview.is_c_contig or to_memview.is_f_contig):
|
||||
error(pos, "to_memview must be c or f contiguous.")
|
||||
return
|
||||
|
||||
for (access, packing) in from_memview.axes:
|
||||
if access != 'direct':
|
||||
error(pos, "cannot handle 'full' or 'ptr' access at this time.")
|
||||
return
|
||||
|
||||
if to_memview.is_c_contig:
|
||||
mode = 'c'
|
||||
contig_flag = memview_c_contiguous
|
||||
elif to_memview.is_f_contig:
|
||||
mode = 'fortran'
|
||||
contig_flag = memview_f_contiguous
|
||||
|
||||
return load_memview_c_utility(
|
||||
"CopyContentsUtility",
|
||||
context=dict(
|
||||
context,
|
||||
mode=mode,
|
||||
dtype_decl=to_memview.dtype.empty_declaration_code(),
|
||||
contig_flag=contig_flag,
|
||||
ndim=to_memview.ndim,
|
||||
func_cname=copy_c_or_fortran_cname(to_memview),
|
||||
dtype_is_object=int(to_memview.dtype.is_pyobject)),
|
||||
requires=[copy_contents_new_utility])
|
||||
|
||||
|
||||
def get_axes_specs(env, axes):
|
||||
'''
|
||||
get_axes_specs(env, axes) -> list of (access, packing) specs for each axis.
|
||||
access is one of 'full', 'ptr' or 'direct'
|
||||
packing is one of 'contig', 'strided' or 'follow'
|
||||
'''
|
||||
|
||||
cythonscope = env.global_scope().context.cython_scope
|
||||
cythonscope.load_cythonscope()
|
||||
viewscope = cythonscope.viewscope
|
||||
|
||||
access_specs = tuple([viewscope.lookup(name)
|
||||
for name in ('full', 'direct', 'ptr')])
|
||||
packing_specs = tuple([viewscope.lookup(name)
|
||||
for name in ('contig', 'strided', 'follow')])
|
||||
|
||||
is_f_contig, is_c_contig = False, False
|
||||
default_access, default_packing = 'direct', 'strided'
|
||||
cf_access, cf_packing = default_access, 'follow'
|
||||
|
||||
axes_specs = []
|
||||
# analyse all axes.
|
||||
for idx, axis in enumerate(axes):
|
||||
if not axis.start.is_none:
|
||||
raise CompileError(axis.start.pos, START_ERR)
|
||||
|
||||
if not axis.stop.is_none:
|
||||
raise CompileError(axis.stop.pos, STOP_ERR)
|
||||
|
||||
if axis.step.is_none:
|
||||
axes_specs.append((default_access, default_packing))
|
||||
|
||||
elif isinstance(axis.step, IntNode):
|
||||
# the packing for the ::1 axis is contiguous,
|
||||
# all others are cf_packing.
|
||||
if axis.step.compile_time_value(env) != 1:
|
||||
raise CompileError(axis.step.pos, STEP_ERR)
|
||||
|
||||
axes_specs.append((cf_access, 'cfcontig'))
|
||||
|
||||
elif isinstance(axis.step, (NameNode, AttributeNode)):
|
||||
entry = _get_resolved_spec(env, axis.step)
|
||||
if entry.name in view_constant_to_access_packing:
|
||||
axes_specs.append(view_constant_to_access_packing[entry.name])
|
||||
else:
|
||||
raise CompileError(axis.step.pos, INVALID_ERR)
|
||||
|
||||
else:
|
||||
raise CompileError(axis.step.pos, INVALID_ERR)
|
||||
|
||||
# First, find out if we have a ::1 somewhere
|
||||
contig_dim = 0
|
||||
is_contig = False
|
||||
for idx, (access, packing) in enumerate(axes_specs):
|
||||
if packing == 'cfcontig':
|
||||
if is_contig:
|
||||
raise CompileError(axis.step.pos, BOTH_CF_ERR)
|
||||
|
||||
contig_dim = idx
|
||||
axes_specs[idx] = (access, 'contig')
|
||||
is_contig = True
|
||||
|
||||
if is_contig:
|
||||
# We have a ::1 somewhere, see if we're C or Fortran contiguous
|
||||
if contig_dim == len(axes) - 1:
|
||||
is_c_contig = True
|
||||
else:
|
||||
is_f_contig = True
|
||||
|
||||
if contig_dim and not axes_specs[contig_dim - 1][0] in ('full', 'ptr'):
|
||||
raise CompileError(axes[contig_dim].pos,
|
||||
"Fortran contiguous specifier must follow an indirect dimension")
|
||||
|
||||
if is_c_contig:
|
||||
# Contiguous in the last dimension, find the last indirect dimension
|
||||
contig_dim = -1
|
||||
for idx, (access, packing) in enumerate(reversed(axes_specs)):
|
||||
if access in ('ptr', 'full'):
|
||||
contig_dim = len(axes) - idx - 1
|
||||
|
||||
# Replace 'strided' with 'follow' for any dimension following the last
|
||||
# indirect dimension, the first dimension or the dimension following
|
||||
# the ::1.
|
||||
# int[::indirect, ::1, :, :]
|
||||
# ^ ^
|
||||
# int[::indirect, :, :, ::1]
|
||||
# ^ ^
|
||||
start = contig_dim + 1
|
||||
stop = len(axes) - is_c_contig
|
||||
for idx, (access, packing) in enumerate(axes_specs[start:stop]):
|
||||
idx = contig_dim + 1 + idx
|
||||
if access != 'direct':
|
||||
raise CompileError(axes[idx].pos,
|
||||
"Indirect dimension may not follow "
|
||||
"Fortran contiguous dimension")
|
||||
if packing == 'contig':
|
||||
raise CompileError(axes[idx].pos,
|
||||
"Dimension may not be contiguous")
|
||||
axes_specs[idx] = (access, cf_packing)
|
||||
|
||||
if is_c_contig:
|
||||
# For C contiguity, we need to fix the 'contig' dimension
|
||||
# after the loop
|
||||
a, p = axes_specs[-1]
|
||||
axes_specs[-1] = a, 'contig'
|
||||
|
||||
validate_axes_specs([axis.start.pos for axis in axes],
|
||||
axes_specs,
|
||||
is_c_contig,
|
||||
is_f_contig)
|
||||
|
||||
return axes_specs
|
||||
|
||||
|
||||
def validate_axes(pos, axes):
|
||||
if len(axes) >= Options.buffer_max_dims:
|
||||
error(pos, "More dimensions than the maximum number"
|
||||
" of buffer dimensions were used.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_cf_contig(specs):
|
||||
is_c_contig = is_f_contig = False
|
||||
|
||||
if len(specs) == 1 and specs == [('direct', 'contig')]:
|
||||
is_c_contig = True
|
||||
|
||||
elif (specs[-1] == ('direct','contig') and
|
||||
all(axis == ('direct','follow') for axis in specs[:-1])):
|
||||
# c_contiguous: 'follow', 'follow', ..., 'follow', 'contig'
|
||||
is_c_contig = True
|
||||
|
||||
elif (len(specs) > 1 and
|
||||
specs[0] == ('direct','contig') and
|
||||
all(axis == ('direct','follow') for axis in specs[1:])):
|
||||
# f_contiguous: 'contig', 'follow', 'follow', ..., 'follow'
|
||||
is_f_contig = True
|
||||
|
||||
return is_c_contig, is_f_contig
|
||||
|
||||
|
||||
def get_mode(specs):
|
||||
is_c_contig, is_f_contig = is_cf_contig(specs)
|
||||
|
||||
if is_c_contig:
|
||||
return 'c'
|
||||
elif is_f_contig:
|
||||
return 'fortran'
|
||||
|
||||
for access, packing in specs:
|
||||
if access in ('ptr', 'full'):
|
||||
return 'full'
|
||||
|
||||
return 'strided'
|
||||
|
||||
view_constant_to_access_packing = {
|
||||
'generic': ('full', 'strided'),
|
||||
'strided': ('direct', 'strided'),
|
||||
'indirect': ('ptr', 'strided'),
|
||||
'generic_contiguous': ('full', 'contig'),
|
||||
'contiguous': ('direct', 'contig'),
|
||||
'indirect_contiguous': ('ptr', 'contig'),
|
||||
}
|
||||
|
||||
def validate_axes_specs(positions, specs, is_c_contig, is_f_contig):
|
||||
|
||||
packing_specs = ('contig', 'strided', 'follow')
|
||||
access_specs = ('direct', 'ptr', 'full')
|
||||
|
||||
# is_c_contig, is_f_contig = is_cf_contig(specs)
|
||||
|
||||
has_contig = has_follow = has_strided = has_generic_contig = False
|
||||
|
||||
last_indirect_dimension = -1
|
||||
for idx, (access, packing) in enumerate(specs):
|
||||
if access == 'ptr':
|
||||
last_indirect_dimension = idx
|
||||
|
||||
for idx, (pos, (access, packing)) in enumerate(zip(positions, specs)):
|
||||
|
||||
if not (access in access_specs and
|
||||
packing in packing_specs):
|
||||
raise CompileError(pos, "Invalid axes specification.")
|
||||
|
||||
if packing == 'strided':
|
||||
has_strided = True
|
||||
elif packing == 'contig':
|
||||
if has_contig:
|
||||
raise CompileError(pos, "Only one direct contiguous "
|
||||
"axis may be specified.")
|
||||
|
||||
valid_contig_dims = last_indirect_dimension + 1, len(specs) - 1
|
||||
if idx not in valid_contig_dims and access != 'ptr':
|
||||
if last_indirect_dimension + 1 != len(specs) - 1:
|
||||
dims = "dimensions %d and %d" % valid_contig_dims
|
||||
else:
|
||||
dims = "dimension %d" % valid_contig_dims[0]
|
||||
|
||||
raise CompileError(pos, "Only %s may be contiguous and direct" % dims)
|
||||
|
||||
has_contig = access != 'ptr'
|
||||
elif packing == 'follow':
|
||||
if has_strided:
|
||||
raise CompileError(pos, "A memoryview cannot have both follow and strided axis specifiers.")
|
||||
if not (is_c_contig or is_f_contig):
|
||||
raise CompileError(pos, "Invalid use of the follow specifier.")
|
||||
|
||||
if access in ('ptr', 'full'):
|
||||
has_strided = False
|
||||
|
||||
def _get_resolved_spec(env, spec):
|
||||
# spec must be a NameNode or an AttributeNode
|
||||
if isinstance(spec, NameNode):
|
||||
return _resolve_NameNode(env, spec)
|
||||
elif isinstance(spec, AttributeNode):
|
||||
return _resolve_AttributeNode(env, spec)
|
||||
else:
|
||||
raise CompileError(spec.pos, INVALID_ERR)
|
||||
|
||||
def _resolve_NameNode(env, node):
|
||||
try:
|
||||
resolved_name = env.lookup(node.name).name
|
||||
except AttributeError:
|
||||
raise CompileError(node.pos, INVALID_ERR)
|
||||
|
||||
viewscope = env.global_scope().context.cython_scope.viewscope
|
||||
entry = viewscope.lookup(resolved_name)
|
||||
if entry is None:
|
||||
raise CompileError(node.pos, NOT_CIMPORTED_ERR)
|
||||
|
||||
return entry
|
||||
|
||||
def _resolve_AttributeNode(env, node):
|
||||
path = []
|
||||
while isinstance(node, AttributeNode):
|
||||
path.insert(0, node.attribute)
|
||||
node = node.obj
|
||||
if isinstance(node, NameNode):
|
||||
path.insert(0, node.name)
|
||||
else:
|
||||
raise CompileError(node.pos, EXPR_ERR)
|
||||
modnames = path[:-1]
|
||||
# must be at least 1 module name, o/w not an AttributeNode.
|
||||
assert modnames
|
||||
|
||||
scope = env
|
||||
for modname in modnames:
|
||||
mod = scope.lookup(modname)
|
||||
if not mod or not mod.as_module:
|
||||
raise CompileError(
|
||||
node.pos, "undeclared name not builtin: %s" % modname)
|
||||
scope = mod.as_module
|
||||
|
||||
entry = scope.lookup(path[-1])
|
||||
if not entry:
|
||||
raise CompileError(node.pos, "No such attribute '%s'" % path[-1])
|
||||
|
||||
return entry
|
||||
|
||||
#
|
||||
### Utility loading
|
||||
#
|
||||
|
||||
def load_memview_cy_utility(util_code_name, context=None, **kwargs):
|
||||
return CythonUtilityCode.load(util_code_name, "MemoryView.pyx",
|
||||
context=context, **kwargs)
|
||||
|
||||
def load_memview_c_utility(util_code_name, context=None, **kwargs):
|
||||
if context is None:
|
||||
return UtilityCode.load(util_code_name, "MemoryView_C.c", **kwargs)
|
||||
else:
|
||||
return TempitaUtilityCode.load(util_code_name, "MemoryView_C.c",
|
||||
context=context, **kwargs)
|
||||
|
||||
def use_cython_array_utility_code(env):
|
||||
cython_scope = env.global_scope().context.cython_scope
|
||||
cython_scope.load_cythonscope()
|
||||
cython_scope.viewscope.lookup('array_cwrapper').used = True
|
||||
|
||||
context = {
|
||||
'memview_struct_name': memview_objstruct_cname,
|
||||
'max_dims': Options.buffer_max_dims,
|
||||
'memviewslice_name': memviewslice_cname,
|
||||
'memslice_init': memslice_entry_init,
|
||||
}
|
||||
memviewslice_declare_code = load_memview_c_utility(
|
||||
"MemviewSliceStruct",
|
||||
context=context,
|
||||
requires=[])
|
||||
|
||||
atomic_utility = load_memview_c_utility("Atomics", context)
|
||||
|
||||
memviewslice_init_code = load_memview_c_utility(
|
||||
"MemviewSliceInit",
|
||||
context=dict(context, BUF_MAX_NDIMS=Options.buffer_max_dims),
|
||||
requires=[memviewslice_declare_code,
|
||||
atomic_utility],
|
||||
)
|
||||
|
||||
memviewslice_index_helpers = load_memview_c_utility("MemviewSliceIndex")
|
||||
|
||||
typeinfo_to_format_code = load_memview_cy_utility(
|
||||
"BufferFormatFromTypeInfo", requires=[Buffer._typeinfo_to_format_code])
|
||||
|
||||
is_contig_utility = load_memview_c_utility("MemviewSliceIsContig", context)
|
||||
overlapping_utility = load_memview_c_utility("OverlappingSlices", context)
|
||||
copy_contents_new_utility = load_memview_c_utility(
|
||||
"MemviewSliceCopyTemplate",
|
||||
context,
|
||||
requires=[], # require cython_array_utility_code
|
||||
)
|
||||
|
||||
view_utility_code = load_memview_cy_utility(
|
||||
"View.MemoryView",
|
||||
context=context,
|
||||
requires=[Buffer.GetAndReleaseBufferUtilityCode(),
|
||||
Buffer.buffer_struct_declare_code,
|
||||
Buffer.buffer_formats_declare_code,
|
||||
memviewslice_init_code,
|
||||
is_contig_utility,
|
||||
overlapping_utility,
|
||||
copy_contents_new_utility,
|
||||
ModuleNode.capsule_utility_code],
|
||||
)
|
||||
view_utility_whitelist = ('array', 'memoryview', 'array_cwrapper',
|
||||
'generic', 'strided', 'indirect', 'contiguous',
|
||||
'indirect_contiguous')
|
||||
|
||||
memviewslice_declare_code.requires.append(view_utility_code)
|
||||
copy_contents_new_utility.requires.append(view_utility_code)
|
3216
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ModuleNode.py
Normal file
3216
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ModuleNode.py
Normal file
File diff suppressed because it is too large
Load diff
162
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Naming.py
Normal file
162
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Naming.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
#
|
||||
# C naming conventions
|
||||
#
|
||||
#
|
||||
# Prefixes for generating C names.
|
||||
# Collected here to facilitate ensuring uniqueness.
|
||||
#
|
||||
|
||||
pyrex_prefix = "__pyx_"
|
||||
|
||||
|
||||
codewriter_temp_prefix = pyrex_prefix + "t_"
|
||||
|
||||
temp_prefix = u"__cyt_"
|
||||
|
||||
builtin_prefix = pyrex_prefix + "builtin_"
|
||||
arg_prefix = pyrex_prefix + "arg_"
|
||||
funcdoc_prefix = pyrex_prefix + "doc_"
|
||||
enum_prefix = pyrex_prefix + "e_"
|
||||
func_prefix = pyrex_prefix + "f_"
|
||||
func_prefix_api = pyrex_prefix + "api_f_"
|
||||
pyfunc_prefix = pyrex_prefix + "pf_"
|
||||
pywrap_prefix = pyrex_prefix + "pw_"
|
||||
genbody_prefix = pyrex_prefix + "gb_"
|
||||
gstab_prefix = pyrex_prefix + "getsets_"
|
||||
prop_get_prefix = pyrex_prefix + "getprop_"
|
||||
const_prefix = pyrex_prefix + "k_"
|
||||
py_const_prefix = pyrex_prefix + "kp_"
|
||||
label_prefix = pyrex_prefix + "L"
|
||||
pymethdef_prefix = pyrex_prefix + "mdef_"
|
||||
method_wrapper_prefix = pyrex_prefix + "specialmethod_"
|
||||
methtab_prefix = pyrex_prefix + "methods_"
|
||||
memtab_prefix = pyrex_prefix + "members_"
|
||||
objstruct_prefix = pyrex_prefix + "obj_"
|
||||
typeptr_prefix = pyrex_prefix + "ptype_"
|
||||
prop_set_prefix = pyrex_prefix + "setprop_"
|
||||
type_prefix = pyrex_prefix + "t_"
|
||||
typeobj_prefix = pyrex_prefix + "type_"
|
||||
var_prefix = pyrex_prefix + "v_"
|
||||
varptr_prefix = pyrex_prefix + "vp_"
|
||||
varptr_prefix_api = pyrex_prefix + "api_vp_"
|
||||
wrapperbase_prefix= pyrex_prefix + "wrapperbase_"
|
||||
pybuffernd_prefix = pyrex_prefix + "pybuffernd_"
|
||||
pybufferstruct_prefix = pyrex_prefix + "pybuffer_"
|
||||
vtable_prefix = pyrex_prefix + "vtable_"
|
||||
vtabptr_prefix = pyrex_prefix + "vtabptr_"
|
||||
vtabstruct_prefix = pyrex_prefix + "vtabstruct_"
|
||||
opt_arg_prefix = pyrex_prefix + "opt_args_"
|
||||
convert_func_prefix = pyrex_prefix + "convert_"
|
||||
closure_scope_prefix = pyrex_prefix + "scope_"
|
||||
closure_class_prefix = pyrex_prefix + "scope_struct_"
|
||||
lambda_func_prefix = pyrex_prefix + "lambda_"
|
||||
module_is_main = pyrex_prefix + "module_is_main_"
|
||||
defaults_struct_prefix = pyrex_prefix + "defaults"
|
||||
dynamic_args_cname = pyrex_prefix + "dynamic_args"
|
||||
|
||||
interned_prefixes = {
|
||||
'str': pyrex_prefix + "n_",
|
||||
'int': pyrex_prefix + "int_",
|
||||
'float': pyrex_prefix + "float_",
|
||||
'tuple': pyrex_prefix + "tuple_",
|
||||
'codeobj': pyrex_prefix + "codeobj_",
|
||||
'slice': pyrex_prefix + "slice_",
|
||||
'ustring': pyrex_prefix + "ustring_",
|
||||
'umethod': pyrex_prefix + "umethod_",
|
||||
}
|
||||
|
||||
ctuple_type_prefix = pyrex_prefix + "ctuple_"
|
||||
args_cname = pyrex_prefix + "args"
|
||||
generator_cname = pyrex_prefix + "generator"
|
||||
sent_value_cname = pyrex_prefix + "sent_value"
|
||||
pykwdlist_cname = pyrex_prefix + "pyargnames"
|
||||
obj_base_cname = pyrex_prefix + "base"
|
||||
builtins_cname = pyrex_prefix + "b"
|
||||
preimport_cname = pyrex_prefix + "i"
|
||||
moddict_cname = pyrex_prefix + "d"
|
||||
dummy_cname = pyrex_prefix + "dummy"
|
||||
filename_cname = pyrex_prefix + "filename"
|
||||
modulename_cname = pyrex_prefix + "modulename"
|
||||
filetable_cname = pyrex_prefix + "f"
|
||||
intern_tab_cname = pyrex_prefix + "intern_tab"
|
||||
kwds_cname = pyrex_prefix + "kwds"
|
||||
lineno_cname = pyrex_prefix + "lineno"
|
||||
clineno_cname = pyrex_prefix + "clineno"
|
||||
cfilenm_cname = pyrex_prefix + "cfilenm"
|
||||
local_tstate_cname = pyrex_prefix + "tstate"
|
||||
module_cname = pyrex_prefix + "m"
|
||||
moddoc_cname = pyrex_prefix + "mdoc"
|
||||
methtable_cname = pyrex_prefix + "methods"
|
||||
retval_cname = pyrex_prefix + "r"
|
||||
reqd_kwds_cname = pyrex_prefix + "reqd_kwds"
|
||||
self_cname = pyrex_prefix + "self"
|
||||
stringtab_cname = pyrex_prefix + "string_tab"
|
||||
vtabslot_cname = pyrex_prefix + "vtab"
|
||||
c_api_tab_cname = pyrex_prefix + "c_api_tab"
|
||||
gilstate_cname = pyrex_prefix + "state"
|
||||
skip_dispatch_cname = pyrex_prefix + "skip_dispatch"
|
||||
empty_tuple = pyrex_prefix + "empty_tuple"
|
||||
empty_bytes = pyrex_prefix + "empty_bytes"
|
||||
empty_unicode = pyrex_prefix + "empty_unicode"
|
||||
print_function = pyrex_prefix + "print"
|
||||
print_function_kwargs = pyrex_prefix + "print_kwargs"
|
||||
cleanup_cname = pyrex_prefix + "module_cleanup"
|
||||
pymoduledef_cname = pyrex_prefix + "moduledef"
|
||||
pymoduledef_slots_cname = pyrex_prefix + "moduledef_slots"
|
||||
pymodinit_module_arg = pyrex_prefix + "pyinit_module"
|
||||
pymodule_create_func_cname = pyrex_prefix + "pymod_create"
|
||||
pymodule_exec_func_cname = pyrex_prefix + "pymod_exec"
|
||||
optional_args_cname = pyrex_prefix + "optional_args"
|
||||
import_star = pyrex_prefix + "import_star"
|
||||
import_star_set = pyrex_prefix + "import_star_set"
|
||||
outer_scope_cname= pyrex_prefix + "outer_scope"
|
||||
cur_scope_cname = pyrex_prefix + "cur_scope"
|
||||
enc_scope_cname = pyrex_prefix + "enc_scope"
|
||||
frame_cname = pyrex_prefix + "frame"
|
||||
frame_code_cname = pyrex_prefix + "frame_code"
|
||||
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
|
||||
fused_func_prefix = pyrex_prefix + 'fuse_'
|
||||
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
|
||||
tp_dict_version_temp = pyrex_prefix + "tp_dict_version"
|
||||
obj_dict_version_temp = pyrex_prefix + "obj_dict_version"
|
||||
type_dict_guard_temp = pyrex_prefix + "type_dict_guard"
|
||||
cython_runtime_cname = pyrex_prefix + "cython_runtime"
|
||||
|
||||
global_code_object_cache_find = pyrex_prefix + 'find_code_object'
|
||||
global_code_object_cache_insert = pyrex_prefix + 'insert_code_object'
|
||||
|
||||
genexpr_id_ref = 'genexpr'
|
||||
freelist_name = 'freelist'
|
||||
freecount_name = 'freecount'
|
||||
|
||||
line_c_macro = "__LINE__"
|
||||
|
||||
file_c_macro = "__FILE__"
|
||||
|
||||
extern_c_macro = pyrex_prefix.upper() + "EXTERN_C"
|
||||
|
||||
exc_type_name = pyrex_prefix + "exc_type"
|
||||
exc_value_name = pyrex_prefix + "exc_value"
|
||||
exc_tb_name = pyrex_prefix + "exc_tb"
|
||||
exc_lineno_name = pyrex_prefix + "exc_lineno"
|
||||
|
||||
parallel_exc_type = pyrex_prefix + "parallel_exc_type"
|
||||
parallel_exc_value = pyrex_prefix + "parallel_exc_value"
|
||||
parallel_exc_tb = pyrex_prefix + "parallel_exc_tb"
|
||||
parallel_filename = pyrex_prefix + "parallel_filename"
|
||||
parallel_lineno = pyrex_prefix + "parallel_lineno"
|
||||
parallel_clineno = pyrex_prefix + "parallel_clineno"
|
||||
parallel_why = pyrex_prefix + "parallel_why"
|
||||
|
||||
exc_vars = (exc_type_name, exc_value_name, exc_tb_name)
|
||||
|
||||
api_name = pyrex_prefix + "capi__"
|
||||
|
||||
h_guard_prefix = "__PYX_HAVE__"
|
||||
api_guard_prefix = "__PYX_HAVE_API__"
|
||||
api_func_guard = "__PYX_HAVE_API_FUNC_"
|
||||
|
||||
PYX_NAN = "__PYX_NAN()"
|
||||
|
||||
def py_version_hex(major, minor=0, micro=0, release_level=0, release_serial=0):
|
||||
return (major << 24) | (minor << 16) | (micro << 8) | (release_level << 4) | (release_serial)
|
9450
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Nodes.py
Normal file
9450
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Nodes.py
Normal file
File diff suppressed because it is too large
Load diff
4857
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Optimize.py
Normal file
4857
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Optimize.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,552 @@
|
|||
#
|
||||
# Cython - Compilation-wide options and pragma declarations
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
class ShouldBeFromDirective(object):
|
||||
|
||||
known_directives = []
|
||||
|
||||
def __init__(self, options_name, directive_name=None, disallow=False):
|
||||
self.options_name = options_name
|
||||
self.directive_name = directive_name or options_name
|
||||
self.disallow = disallow
|
||||
self.known_directives.append(self)
|
||||
|
||||
def __nonzero__(self):
|
||||
self._bad_access()
|
||||
|
||||
def __int__(self):
|
||||
self._bad_access()
|
||||
|
||||
def _bad_access(self):
|
||||
raise RuntimeError(repr(self))
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"Illegal access of '%s' from Options module rather than directive '%s'"
|
||||
% (self.options_name, self.directive_name))
|
||||
|
||||
|
||||
"""
|
||||
The members of this module are documented using autodata in
|
||||
Cython/docs/src/reference/compilation.rst.
|
||||
See http://www.sphinx-doc.org/en/master/ext/autodoc.html#directive-autoattribute
|
||||
for how autodata works.
|
||||
Descriptions of those members should start with a #:
|
||||
Donc forget to keep the docs in sync by removing and adding
|
||||
the members in both this file and the .rst file.
|
||||
"""
|
||||
|
||||
#: Whether or not to include docstring in the Python extension. If False, the binary size
|
||||
#: will be smaller, but the ``__doc__`` attribute of any class or function will be an
|
||||
#: empty string.
|
||||
docstrings = True
|
||||
|
||||
#: Embed the source code position in the docstrings of functions and classes.
|
||||
embed_pos_in_docstring = False
|
||||
|
||||
#: Copy the original source code line by line into C code comments
|
||||
#: in the generated code file to help with understanding the output.
|
||||
#: This is also required for coverage analysis.
|
||||
emit_code_comments = True
|
||||
|
||||
# undocumented
|
||||
pre_import = None
|
||||
|
||||
#: Decref global variables in each module on exit for garbage collection.
|
||||
#: 0: None, 1+: interned objects, 2+: cdef globals, 3+: types objects
|
||||
#: Mostly for reducing noise in Valgrind as it typically executes at process exit
|
||||
#: (when all memory will be reclaimed anyways).
|
||||
#: Note that directly or indirectly executed cleanup code that makes use of global
|
||||
#: variables or types may no longer be safe when enabling the respective level since
|
||||
#: there is no guaranteed order in which the (reference counted) objects will
|
||||
#: be cleaned up. The order can change due to live references and reference cycles.
|
||||
generate_cleanup_code = False
|
||||
|
||||
#: Should tp_clear() set object fields to None instead of clearing them to NULL?
|
||||
clear_to_none = True
|
||||
|
||||
#: Generate an annotated HTML version of the input source files for debugging and optimisation purposes.
|
||||
#: This has the same effect as the ``annotate`` argument in :func:`cythonize`.
|
||||
annotate = False
|
||||
|
||||
# When annotating source files in HTML, include coverage information from
|
||||
# this file.
|
||||
annotate_coverage_xml = None
|
||||
|
||||
#: This will abort the compilation on the first error occurred rather than trying
|
||||
#: to keep going and printing further error messages.
|
||||
fast_fail = False
|
||||
|
||||
#: Turn all warnings into errors.
|
||||
warning_errors = False
|
||||
|
||||
#: Make unknown names an error. Python raises a NameError when
|
||||
#: encountering unknown names at runtime, whereas this option makes
|
||||
#: them a compile time error. If you want full Python compatibility,
|
||||
#: you should disable this option and also 'cache_builtins'.
|
||||
error_on_unknown_names = True
|
||||
|
||||
#: Make uninitialized local variable reference a compile time error.
|
||||
#: Python raises UnboundLocalError at runtime, whereas this option makes
|
||||
#: them a compile time error. Note that this option affects only variables
|
||||
#: of "python object" type.
|
||||
error_on_uninitialized = True
|
||||
|
||||
#: This will convert statements of the form ``for i in range(...)``
|
||||
#: to ``for i from ...`` when ``i`` is a C integer type, and the direction
|
||||
#: (i.e. sign of step) can be determined.
|
||||
#: WARNING: This may change the semantics if the range causes assignment to
|
||||
#: i to overflow. Specifically, if this option is set, an error will be
|
||||
#: raised before the loop is entered, whereas without this option the loop
|
||||
#: will execute until an overflowing value is encountered.
|
||||
convert_range = True
|
||||
|
||||
#: Perform lookups on builtin names only once, at module initialisation
|
||||
#: time. This will prevent the module from getting imported if a
|
||||
#: builtin name that it uses cannot be found during initialisation.
|
||||
#: Default is True.
|
||||
#: Note that some legacy builtins are automatically remapped
|
||||
#: from their Python 2 names to their Python 3 names by Cython
|
||||
#: when building in Python 3.x,
|
||||
#: so that they do not get in the way even if this option is enabled.
|
||||
cache_builtins = True
|
||||
|
||||
#: Generate branch prediction hints to speed up error handling etc.
|
||||
gcc_branch_hints = True
|
||||
|
||||
#: Enable this to allow one to write ``your_module.foo = ...`` to overwrite the
|
||||
#: definition if the cpdef function foo, at the cost of an extra dictionary
|
||||
#: lookup on every call.
|
||||
#: If this is false it generates only the Python wrapper and no override check.
|
||||
lookup_module_cpdef = False
|
||||
|
||||
#: Whether or not to embed the Python interpreter, for use in making a
|
||||
#: standalone executable or calling from external libraries.
|
||||
#: This will provide a C function which initialises the interpreter and
|
||||
#: executes the body of this module.
|
||||
#: See `this demo <https://github.com/cython/cython/tree/master/Demos/embed>`_
|
||||
#: for a concrete example.
|
||||
#: If true, the initialisation function is the C main() function, but
|
||||
#: this option can also be set to a non-empty string to provide a function name explicitly.
|
||||
#: Default is False.
|
||||
embed = None
|
||||
|
||||
# In previous iterations of Cython, globals() gave the first non-Cython module
|
||||
# globals in the call stack. Sage relies on this behavior for variable injection.
|
||||
old_style_globals = ShouldBeFromDirective('old_style_globals')
|
||||
|
||||
#: Allows cimporting from a pyx file without a pxd file.
|
||||
cimport_from_pyx = False
|
||||
|
||||
#: Maximum number of dimensions for buffers -- set lower than number of
|
||||
#: dimensions in numpy, as
|
||||
#: slices are passed by value and involve a lot of copying.
|
||||
buffer_max_dims = 8
|
||||
|
||||
#: Number of function closure instances to keep in a freelist (0: no freelists)
|
||||
closure_freelist_size = 8
|
||||
|
||||
|
||||
def get_directive_defaults():
|
||||
# To add an item to this list, all accesses should be changed to use the new
|
||||
# directive, and the global option itself should be set to an instance of
|
||||
# ShouldBeFromDirective.
|
||||
for old_option in ShouldBeFromDirective.known_directives:
|
||||
value = globals().get(old_option.options_name)
|
||||
assert old_option.directive_name in _directive_defaults
|
||||
if not isinstance(value, ShouldBeFromDirective):
|
||||
if old_option.disallow:
|
||||
raise RuntimeError(
|
||||
"Option '%s' must be set from directive '%s'" % (
|
||||
old_option.option_name, old_option.directive_name))
|
||||
else:
|
||||
# Warn?
|
||||
_directive_defaults[old_option.directive_name] = value
|
||||
return _directive_defaults
|
||||
|
||||
# Declare compiler directives
|
||||
_directive_defaults = {
|
||||
'boundscheck' : True,
|
||||
'nonecheck' : False,
|
||||
'initializedcheck' : True,
|
||||
'embedsignature' : False,
|
||||
'auto_cpdef': False,
|
||||
'auto_pickle': None,
|
||||
'cdivision': False, # was True before 0.12
|
||||
'cdivision_warnings': False,
|
||||
'c_api_binop_methods': True,
|
||||
'cpow': True,
|
||||
'overflowcheck': False,
|
||||
'overflowcheck.fold': True,
|
||||
'always_allow_keywords': False,
|
||||
'allow_none_for_extension_args': True,
|
||||
'wraparound' : True,
|
||||
'ccomplex' : False, # use C99/C++ for complex types and arith
|
||||
'callspec' : "",
|
||||
'nogil' : False,
|
||||
'profile': False,
|
||||
'linetrace': False,
|
||||
'emit_code_comments': True, # copy original source code into C code comments
|
||||
'annotation_typing': True, # read type declarations from Python function annotations
|
||||
'infer_types': None,
|
||||
'infer_types.verbose': False,
|
||||
'autotestdict': True,
|
||||
'autotestdict.cdef': False,
|
||||
'autotestdict.all': False,
|
||||
'language_level': None,
|
||||
'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere.
|
||||
'py2_import': False, # For backward compatibility of Cython's source code in Py3 source mode
|
||||
'preliminary_late_includes_cy28': False, # Temporary directive in 0.28, to be removed in a later version (see GH#2079).
|
||||
'iterable_coroutine': False, # Make async coroutines backwards compatible with the old asyncio yield-from syntax.
|
||||
'c_string_type': 'bytes',
|
||||
'c_string_encoding': '',
|
||||
'type_version_tag': True, # enables Py_TPFLAGS_HAVE_VERSION_TAG on extension types
|
||||
'unraisable_tracebacks': True,
|
||||
'old_style_globals': False,
|
||||
'np_pythran': False,
|
||||
'fast_gil': False,
|
||||
|
||||
# set __file__ and/or __path__ to known source/target path at import time (instead of not having them available)
|
||||
'set_initial_path' : None, # SOURCEFILE or "/full/path/to/module"
|
||||
|
||||
'warn': None,
|
||||
'warn.undeclared': False,
|
||||
'warn.unreachable': True,
|
||||
'warn.maybe_uninitialized': False,
|
||||
'warn.unused': False,
|
||||
'warn.unused_arg': False,
|
||||
'warn.unused_result': False,
|
||||
'warn.multiple_declarators': True,
|
||||
|
||||
# optimizations
|
||||
'optimize.inline_defnode_calls': True,
|
||||
'optimize.unpack_method_calls': True, # increases code size when True
|
||||
'optimize.unpack_method_calls_in_pyinit': False, # uselessly increases code size when True
|
||||
'optimize.use_switch': True,
|
||||
|
||||
# remove unreachable code
|
||||
'remove_unreachable': True,
|
||||
|
||||
# control flow debug directives
|
||||
'control_flow.dot_output': "", # Graphviz output filename
|
||||
'control_flow.dot_annotate_defs': False, # Annotate definitions
|
||||
|
||||
# test support
|
||||
'test_assert_path_exists' : [],
|
||||
'test_fail_if_path_exists' : [],
|
||||
|
||||
# experimental, subject to change
|
||||
'binding': None,
|
||||
|
||||
'formal_grammar': False,
|
||||
}
|
||||
|
||||
# Extra warning directives
|
||||
extra_warnings = {
|
||||
'warn.maybe_uninitialized': True,
|
||||
'warn.unreachable': True,
|
||||
'warn.unused': True,
|
||||
}
|
||||
|
||||
def one_of(*args):
|
||||
def validate(name, value):
|
||||
if value not in args:
|
||||
raise ValueError("%s directive must be one of %s, got '%s'" % (
|
||||
name, args, value))
|
||||
else:
|
||||
return value
|
||||
return validate
|
||||
|
||||
|
||||
def normalise_encoding_name(option_name, encoding):
|
||||
"""
|
||||
>>> normalise_encoding_name('c_string_encoding', 'ascii')
|
||||
'ascii'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'AsCIi')
|
||||
'ascii'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'us-ascii')
|
||||
'ascii'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'utF8')
|
||||
'utf8'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'utF-8')
|
||||
'utf8'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'deFAuLT')
|
||||
'default'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'default')
|
||||
'default'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'SeriousLyNoSuch--Encoding')
|
||||
'SeriousLyNoSuch--Encoding'
|
||||
"""
|
||||
if not encoding:
|
||||
return ''
|
||||
if encoding.lower() in ('default', 'ascii', 'utf8'):
|
||||
return encoding.lower()
|
||||
import codecs
|
||||
try:
|
||||
decoder = codecs.getdecoder(encoding)
|
||||
except LookupError:
|
||||
return encoding # may exists at runtime ...
|
||||
for name in ('ascii', 'utf8'):
|
||||
if codecs.getdecoder(name) == decoder:
|
||||
return name
|
||||
return encoding
|
||||
|
||||
|
||||
# Override types possibilities above, if needed
|
||||
directive_types = {
|
||||
'language_level': str, # values can be None/2/3/'3str', where None == 2+warning
|
||||
'auto_pickle': bool,
|
||||
'locals': dict,
|
||||
'final' : bool, # final cdef classes and methods
|
||||
'nogil' : bool,
|
||||
'internal' : bool, # cdef class visibility in the module dict
|
||||
'infer_types' : bool, # values can be True/None/False
|
||||
'binding' : bool,
|
||||
'cfunc' : None, # decorators do not take directive value
|
||||
'ccall' : None,
|
||||
'inline' : None,
|
||||
'staticmethod' : None,
|
||||
'cclass' : None,
|
||||
'no_gc_clear' : bool,
|
||||
'no_gc' : bool,
|
||||
'returns' : type,
|
||||
'exceptval': type, # actually (type, check=True/False), but has its own parser
|
||||
'set_initial_path': str,
|
||||
'freelist': int,
|
||||
'c_string_type': one_of('bytes', 'bytearray', 'str', 'unicode'),
|
||||
'c_string_encoding': normalise_encoding_name,
|
||||
'cpow': bool
|
||||
}
|
||||
|
||||
for key, val in _directive_defaults.items():
|
||||
if key not in directive_types:
|
||||
directive_types[key] = type(val)
|
||||
|
||||
directive_scopes = { # defaults to available everywhere
|
||||
# 'module', 'function', 'class', 'with statement'
|
||||
'auto_pickle': ('module', 'cclass'),
|
||||
'final' : ('cclass', 'function'),
|
||||
'nogil' : ('function', 'with statement'),
|
||||
'inline' : ('function',),
|
||||
'cfunc' : ('function', 'with statement'),
|
||||
'ccall' : ('function', 'with statement'),
|
||||
'returns' : ('function',),
|
||||
'exceptval' : ('function',),
|
||||
'locals' : ('function',),
|
||||
'staticmethod' : ('function',), # FIXME: analysis currently lacks more specific function scope
|
||||
'no_gc_clear' : ('cclass',),
|
||||
'no_gc' : ('cclass',),
|
||||
'internal' : ('cclass',),
|
||||
'cclass' : ('class', 'cclass', 'with statement'),
|
||||
'autotestdict' : ('module',),
|
||||
'autotestdict.all' : ('module',),
|
||||
'autotestdict.cdef' : ('module',),
|
||||
'set_initial_path' : ('module',),
|
||||
'test_assert_path_exists' : ('function', 'class', 'cclass'),
|
||||
'test_fail_if_path_exists' : ('function', 'class', 'cclass'),
|
||||
'freelist': ('cclass',),
|
||||
'emit_code_comments': ('module',),
|
||||
'annotation_typing': ('module',), # FIXME: analysis currently lacks more specific function scope
|
||||
# Avoid scope-specific to/from_py_functions for c_string.
|
||||
'c_string_type': ('module',),
|
||||
'c_string_encoding': ('module',),
|
||||
'type_version_tag': ('module', 'cclass'),
|
||||
'language_level': ('module',),
|
||||
# globals() could conceivably be controlled at a finer granularity,
|
||||
# but that would complicate the implementation
|
||||
'old_style_globals': ('module',),
|
||||
'np_pythran': ('module',),
|
||||
'fast_gil': ('module',),
|
||||
'iterable_coroutine': ('module', 'function'),
|
||||
}
|
||||
|
||||
|
||||
def parse_directive_value(name, value, relaxed_bool=False):
|
||||
"""
|
||||
Parses value as an option value for the given name and returns
|
||||
the interpreted value. None is returned if the option does not exist.
|
||||
|
||||
>>> print(parse_directive_value('nonexisting', 'asdf asdfd'))
|
||||
None
|
||||
>>> parse_directive_value('boundscheck', 'True')
|
||||
True
|
||||
>>> parse_directive_value('boundscheck', 'true')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: boundscheck directive must be set to True or False, got 'true'
|
||||
|
||||
>>> parse_directive_value('c_string_encoding', 'us-ascii')
|
||||
'ascii'
|
||||
>>> parse_directive_value('c_string_type', 'str')
|
||||
'str'
|
||||
>>> parse_directive_value('c_string_type', 'bytes')
|
||||
'bytes'
|
||||
>>> parse_directive_value('c_string_type', 'bytearray')
|
||||
'bytearray'
|
||||
>>> parse_directive_value('c_string_type', 'unicode')
|
||||
'unicode'
|
||||
>>> parse_directive_value('c_string_type', 'unnicode')
|
||||
Traceback (most recent call last):
|
||||
ValueError: c_string_type directive must be one of ('bytes', 'bytearray', 'str', 'unicode'), got 'unnicode'
|
||||
"""
|
||||
type = directive_types.get(name)
|
||||
if not type:
|
||||
return None
|
||||
orig_value = value
|
||||
if type is bool:
|
||||
value = str(value)
|
||||
if value == 'True':
|
||||
return True
|
||||
if value == 'False':
|
||||
return False
|
||||
if relaxed_bool:
|
||||
value = value.lower()
|
||||
if value in ("true", "yes"):
|
||||
return True
|
||||
elif value in ("false", "no"):
|
||||
return False
|
||||
raise ValueError("%s directive must be set to True or False, got '%s'" % (
|
||||
name, orig_value))
|
||||
elif type is int:
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
raise ValueError("%s directive must be set to an integer, got '%s'" % (
|
||||
name, orig_value))
|
||||
elif type is str:
|
||||
return str(value)
|
||||
elif callable(type):
|
||||
return type(name, value)
|
||||
else:
|
||||
assert False
|
||||
|
||||
|
||||
def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False,
|
||||
current_settings=None):
|
||||
"""
|
||||
Parses a comma-separated list of pragma options. Whitespace
|
||||
is not considered.
|
||||
|
||||
>>> parse_directive_list(' ')
|
||||
{}
|
||||
>>> (parse_directive_list('boundscheck=True') ==
|
||||
... {'boundscheck': True})
|
||||
True
|
||||
>>> parse_directive_list(' asdf')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: Expected "=" in option "asdf"
|
||||
>>> parse_directive_list('boundscheck=hey')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: boundscheck directive must be set to True or False, got 'hey'
|
||||
>>> parse_directive_list('unknown=True')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: Unknown option: "unknown"
|
||||
>>> warnings = parse_directive_list('warn.all=True')
|
||||
>>> len(warnings) > 1
|
||||
True
|
||||
>>> sum(warnings.values()) == len(warnings) # all true.
|
||||
True
|
||||
"""
|
||||
if current_settings is None:
|
||||
result = {}
|
||||
else:
|
||||
result = current_settings
|
||||
for item in s.split(','):
|
||||
item = item.strip()
|
||||
if not item:
|
||||
continue
|
||||
if '=' not in item:
|
||||
raise ValueError('Expected "=" in option "%s"' % item)
|
||||
name, value = [s.strip() for s in item.strip().split('=', 1)]
|
||||
if name not in _directive_defaults:
|
||||
found = False
|
||||
if name.endswith('.all'):
|
||||
prefix = name[:-3]
|
||||
for directive in _directive_defaults:
|
||||
if directive.startswith(prefix):
|
||||
found = True
|
||||
parsed_value = parse_directive_value(directive, value, relaxed_bool=relaxed_bool)
|
||||
result[directive] = parsed_value
|
||||
if not found and not ignore_unknown:
|
||||
raise ValueError('Unknown option: "%s"' % name)
|
||||
else:
|
||||
parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool)
|
||||
result[name] = parsed_value
|
||||
return result
|
||||
|
||||
|
||||
def parse_variable_value(value):
|
||||
"""
|
||||
Parses value as an option value for the given name and returns
|
||||
the interpreted value.
|
||||
|
||||
>>> parse_variable_value('True')
|
||||
True
|
||||
>>> parse_variable_value('true')
|
||||
'true'
|
||||
>>> parse_variable_value('us-ascii')
|
||||
'us-ascii'
|
||||
>>> parse_variable_value('str')
|
||||
'str'
|
||||
>>> parse_variable_value('123')
|
||||
123
|
||||
>>> parse_variable_value('1.23')
|
||||
1.23
|
||||
|
||||
"""
|
||||
if value == "True":
|
||||
return True
|
||||
elif value == "False":
|
||||
return False
|
||||
elif value == "None":
|
||||
return None
|
||||
elif value.isdigit():
|
||||
return int(value)
|
||||
else:
|
||||
try:
|
||||
value = float(value)
|
||||
except Exception:
|
||||
# Not a float
|
||||
pass
|
||||
return value
|
||||
|
||||
|
||||
def parse_compile_time_env(s, current_settings=None):
|
||||
"""
|
||||
Parses a comma-separated list of pragma options. Whitespace
|
||||
is not considered.
|
||||
|
||||
>>> parse_compile_time_env(' ')
|
||||
{}
|
||||
>>> (parse_compile_time_env('HAVE_OPENMP=True') ==
|
||||
... {'HAVE_OPENMP': True})
|
||||
True
|
||||
>>> parse_compile_time_env(' asdf')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: Expected "=" in option "asdf"
|
||||
>>> parse_compile_time_env('NUM_THREADS=4') == {'NUM_THREADS': 4}
|
||||
True
|
||||
>>> parse_compile_time_env('unknown=anything') == {'unknown': 'anything'}
|
||||
True
|
||||
"""
|
||||
if current_settings is None:
|
||||
result = {}
|
||||
else:
|
||||
result = current_settings
|
||||
for item in s.split(','):
|
||||
item = item.strip()
|
||||
if not item:
|
||||
continue
|
||||
if '=' not in item:
|
||||
raise ValueError('Expected "=" in option "%s"' % item)
|
||||
name, value = [s.strip() for s in item.split('=', 1)]
|
||||
result[name] = parse_variable_value(value)
|
||||
return result
|
|
@ -0,0 +1,82 @@
|
|||
|
||||
from __future__ import absolute_import
|
||||
|
||||
cimport cython
|
||||
|
||||
from .Visitor cimport (
|
||||
CythonTransform, VisitorTransform, TreeVisitor,
|
||||
ScopeTrackingTransform, EnvTransform)
|
||||
|
||||
cdef class SkipDeclarations: # (object):
|
||||
pass
|
||||
|
||||
cdef class NormalizeTree(CythonTransform):
|
||||
cdef bint is_in_statlist
|
||||
cdef bint is_in_expr
|
||||
cpdef visit_StatNode(self, node, is_listcontainer=*)
|
||||
|
||||
cdef class PostParse(ScopeTrackingTransform):
|
||||
cdef dict specialattribute_handlers
|
||||
cdef size_t lambda_counter
|
||||
cdef size_t genexpr_counter
|
||||
cdef _visit_assignment_node(self, node, list expr_list)
|
||||
|
||||
|
||||
#def eliminate_rhs_duplicates(list expr_list_list, list ref_node_sequence)
|
||||
#def sort_common_subsequences(list items)
|
||||
@cython.locals(starred_targets=Py_ssize_t, lhs_size=Py_ssize_t, rhs_size=Py_ssize_t)
|
||||
cdef flatten_parallel_assignments(list input, list output)
|
||||
cdef map_starred_assignment(list lhs_targets, list starred_assignments, list lhs_args, list rhs_args)
|
||||
|
||||
#class PxdPostParse(CythonTransform, SkipDeclarations):
|
||||
#class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
|
||||
#class WithTransform(CythonTransform, SkipDeclarations):
|
||||
#class DecoratorTransform(CythonTransform, SkipDeclarations):
|
||||
|
||||
#class AnalyseDeclarationsTransform(EnvTransform):
|
||||
|
||||
cdef class AnalyseExpressionsTransform(CythonTransform):
|
||||
pass
|
||||
|
||||
cdef class ExpandInplaceOperators(EnvTransform):
|
||||
pass
|
||||
|
||||
cdef class AlignFunctionDefinitions(CythonTransform):
|
||||
cdef dict directives
|
||||
cdef set imported_names
|
||||
cdef object scope
|
||||
|
||||
@cython.final
|
||||
cdef class YieldNodeCollector(TreeVisitor):
|
||||
cdef public list yields
|
||||
cdef public list returns
|
||||
cdef public list finallys
|
||||
cdef public list excepts
|
||||
cdef public bint has_return_value
|
||||
cdef public bint has_yield
|
||||
cdef public bint has_await
|
||||
|
||||
@cython.final
|
||||
cdef class MarkClosureVisitor(CythonTransform):
|
||||
cdef bint needs_closure
|
||||
|
||||
@cython.final
|
||||
cdef class CreateClosureClasses(CythonTransform):
|
||||
cdef list path
|
||||
cdef bint in_lambda
|
||||
cdef module_scope
|
||||
cdef generator_class
|
||||
|
||||
cdef create_class_from_scope(self, node, target_module_scope, inner_node=*)
|
||||
cdef find_entries_used_in_closures(self, node)
|
||||
|
||||
#cdef class InjectGilHandling(VisitorTransform, SkipDeclarations):
|
||||
# cdef bint nogil
|
||||
|
||||
cdef class GilCheck(VisitorTransform):
|
||||
cdef list env_stack
|
||||
cdef bint nogil
|
||||
cdef bint nogil_declarator_only
|
||||
|
||||
cdef class TransformBuiltinMethods(EnvTransform):
|
||||
cdef visit_cython_attribute(self, node)
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,199 @@
|
|||
# We declare all of these here to type the first argument.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
cimport cython
|
||||
from .Scanning cimport PyrexScanner
|
||||
|
||||
ctypedef object (*p_sub_expr_func)(PyrexScanner obj)
|
||||
|
||||
# entry points
|
||||
|
||||
cpdef p_module(PyrexScanner s, pxd, full_module_name, ctx=*)
|
||||
cpdef p_code(PyrexScanner s, level= *, ctx=*)
|
||||
|
||||
# internal parser states
|
||||
|
||||
cdef p_ident(PyrexScanner s, message =*)
|
||||
cdef p_ident_list(PyrexScanner s)
|
||||
|
||||
cdef tuple p_binop_operator(PyrexScanner s)
|
||||
cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr)
|
||||
cdef p_lambdef(PyrexScanner s, bint allow_conditional=*)
|
||||
cdef p_lambdef_nocond(PyrexScanner s)
|
||||
cdef p_test(PyrexScanner s)
|
||||
cdef p_test_nocond(PyrexScanner s)
|
||||
cdef p_or_test(PyrexScanner s)
|
||||
cdef p_rassoc_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_subexpr)
|
||||
cdef p_and_test(PyrexScanner s)
|
||||
cdef p_not_test(PyrexScanner s)
|
||||
cdef p_comparison(PyrexScanner s)
|
||||
cdef p_test_or_starred_expr(PyrexScanner s)
|
||||
cdef p_starred_expr(PyrexScanner s)
|
||||
cdef p_cascaded_cmp(PyrexScanner s)
|
||||
cdef p_cmp_op(PyrexScanner s)
|
||||
cdef p_bit_expr(PyrexScanner s)
|
||||
cdef p_xor_expr(PyrexScanner s)
|
||||
cdef p_and_expr(PyrexScanner s)
|
||||
cdef p_shift_expr(PyrexScanner s)
|
||||
cdef p_arith_expr(PyrexScanner s)
|
||||
cdef p_term(PyrexScanner s)
|
||||
cdef p_factor(PyrexScanner s)
|
||||
cdef _p_factor(PyrexScanner s)
|
||||
cdef p_typecast(PyrexScanner s)
|
||||
cdef p_sizeof(PyrexScanner s)
|
||||
cdef p_yield_expression(PyrexScanner s)
|
||||
cdef p_yield_statement(PyrexScanner s)
|
||||
cdef p_async_statement(PyrexScanner s, ctx, decorators)
|
||||
cdef p_power(PyrexScanner s)
|
||||
cdef p_new_expr(PyrexScanner s)
|
||||
cdef p_trailer(PyrexScanner s, node1)
|
||||
cdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *)
|
||||
cdef p_call_build_packed_args(pos, positional_args, keyword_args)
|
||||
cdef p_call(PyrexScanner s, function)
|
||||
cdef p_index(PyrexScanner s, base)
|
||||
cdef tuple p_subscript_list(PyrexScanner s)
|
||||
cdef p_subscript(PyrexScanner s)
|
||||
cdef p_slice_element(PyrexScanner s, follow_set)
|
||||
cdef expect_ellipsis(PyrexScanner s)
|
||||
cdef make_slice_nodes(pos, subscripts)
|
||||
cpdef make_slice_node(pos, start, stop = *, step = *)
|
||||
cdef p_atom(PyrexScanner s)
|
||||
@cython.locals(value=unicode)
|
||||
cdef p_int_literal(PyrexScanner s)
|
||||
cdef p_name(PyrexScanner s, name)
|
||||
cdef wrap_compile_time_constant(pos, value)
|
||||
cdef p_cat_string_literal(PyrexScanner s)
|
||||
cdef p_opt_string_literal(PyrexScanner s, required_type=*)
|
||||
cdef bint check_for_non_ascii_characters(unicode string)
|
||||
@cython.locals(systr=unicode, is_python3_source=bint, is_raw=bint)
|
||||
cdef p_string_literal(PyrexScanner s, kind_override=*)
|
||||
cdef _append_escape_sequence(kind, builder, unicode escape_sequence, PyrexScanner s)
|
||||
cdef tuple _f_string_error_pos(pos, string, Py_ssize_t i)
|
||||
@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, next_start=Py_ssize_t)
|
||||
cdef list p_f_string(PyrexScanner s, unicode_value, pos, bint is_raw)
|
||||
@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, quote_char=Py_UCS4, NO_CHAR=Py_UCS4)
|
||||
cdef tuple p_f_string_expr(PyrexScanner s, unicode_value, pos, Py_ssize_t starting_index, bint is_raw)
|
||||
cdef p_list_maker(PyrexScanner s)
|
||||
cdef p_comp_iter(PyrexScanner s, body)
|
||||
cdef p_comp_for(PyrexScanner s, body)
|
||||
cdef p_comp_if(PyrexScanner s, body)
|
||||
cdef p_dict_or_set_maker(PyrexScanner s)
|
||||
cdef p_backquote_expr(PyrexScanner s)
|
||||
cdef p_simple_expr_list(PyrexScanner s, expr=*)
|
||||
cdef p_test_or_starred_expr_list(PyrexScanner s, expr=*)
|
||||
cdef p_testlist(PyrexScanner s)
|
||||
cdef p_testlist_star_expr(PyrexScanner s)
|
||||
cdef p_testlist_comp(PyrexScanner s)
|
||||
cdef p_genexp(PyrexScanner s, expr)
|
||||
|
||||
#-------------------------------------------------------
|
||||
#
|
||||
# Statements
|
||||
#
|
||||
#-------------------------------------------------------
|
||||
|
||||
cdef p_global_statement(PyrexScanner s)
|
||||
cdef p_nonlocal_statement(PyrexScanner s)
|
||||
cdef p_expression_or_assignment(PyrexScanner s)
|
||||
cdef p_print_statement(PyrexScanner s)
|
||||
cdef p_exec_statement(PyrexScanner s)
|
||||
cdef p_del_statement(PyrexScanner s)
|
||||
cdef p_pass_statement(PyrexScanner s, bint with_newline = *)
|
||||
cdef p_break_statement(PyrexScanner s)
|
||||
cdef p_continue_statement(PyrexScanner s)
|
||||
cdef p_return_statement(PyrexScanner s)
|
||||
cdef p_raise_statement(PyrexScanner s)
|
||||
cdef p_import_statement(PyrexScanner s)
|
||||
cdef p_from_import_statement(PyrexScanner s, bint first_statement = *)
|
||||
cdef p_imported_name(PyrexScanner s, bint is_cimport)
|
||||
cdef p_dotted_name(PyrexScanner s, bint as_allowed)
|
||||
cdef p_as_name(PyrexScanner s)
|
||||
cdef p_assert_statement(PyrexScanner s)
|
||||
cdef p_if_statement(PyrexScanner s)
|
||||
cdef p_if_clause(PyrexScanner s)
|
||||
cdef p_else_clause(PyrexScanner s)
|
||||
cdef p_while_statement(PyrexScanner s)
|
||||
cdef p_for_statement(PyrexScanner s, bint is_async=*)
|
||||
cdef dict p_for_bounds(PyrexScanner s, bint allow_testlist=*, bint is_async=*)
|
||||
cdef p_for_from_relation(PyrexScanner s)
|
||||
cdef p_for_from_step(PyrexScanner s)
|
||||
cdef p_target(PyrexScanner s, terminator)
|
||||
cdef p_for_target(PyrexScanner s)
|
||||
cdef p_for_iterator(PyrexScanner s, bint allow_testlist=*, bint is_async=*)
|
||||
cdef p_try_statement(PyrexScanner s)
|
||||
cdef p_except_clause(PyrexScanner s)
|
||||
cdef p_include_statement(PyrexScanner s, ctx)
|
||||
cdef p_with_statement(PyrexScanner s)
|
||||
cdef p_with_items(PyrexScanner s, bint is_async=*)
|
||||
cdef p_with_template(PyrexScanner s)
|
||||
cdef p_simple_statement(PyrexScanner s, bint first_statement = *)
|
||||
cdef p_simple_statement_list(PyrexScanner s, ctx, bint first_statement = *)
|
||||
cdef p_compile_time_expr(PyrexScanner s)
|
||||
cdef p_DEF_statement(PyrexScanner s)
|
||||
cdef p_IF_statement(PyrexScanner s, ctx)
|
||||
cdef p_statement(PyrexScanner s, ctx, bint first_statement = *)
|
||||
cdef p_statement_list(PyrexScanner s, ctx, bint first_statement = *)
|
||||
cdef p_suite(PyrexScanner s, ctx = *)
|
||||
cdef tuple p_suite_with_docstring(PyrexScanner s, ctx, bint with_doc_only=*)
|
||||
cdef tuple _extract_docstring(node)
|
||||
cdef p_positional_and_keyword_args(PyrexScanner s, end_sy_set, templates = *)
|
||||
|
||||
cpdef p_c_base_type(PyrexScanner s, bint self_flag = *, bint nonempty = *, templates = *)
|
||||
cdef p_calling_convention(PyrexScanner s)
|
||||
cdef p_c_complex_base_type(PyrexScanner s, templates = *)
|
||||
cdef p_c_simple_base_type(PyrexScanner s, bint self_flag, bint nonempty, templates = *)
|
||||
cdef p_buffer_or_template(PyrexScanner s, base_type_node, templates)
|
||||
cdef p_bracketed_base_type(PyrexScanner s, base_type_node, nonempty, empty)
|
||||
cdef is_memoryviewslice_access(PyrexScanner s)
|
||||
cdef p_memoryviewslice_access(PyrexScanner s, base_type_node)
|
||||
cdef bint looking_at_name(PyrexScanner s) except -2
|
||||
cdef object looking_at_expr(PyrexScanner s)# except -2
|
||||
cdef bint looking_at_base_type(PyrexScanner s) except -2
|
||||
cdef bint looking_at_dotted_name(PyrexScanner s) except -2
|
||||
cdef bint looking_at_call(PyrexScanner s) except -2
|
||||
cdef p_sign_and_longness(PyrexScanner s)
|
||||
cdef p_opt_cname(PyrexScanner s)
|
||||
cpdef p_c_declarator(PyrexScanner s, ctx = *, bint empty = *, bint is_type = *, bint cmethod_flag = *,
|
||||
bint assignable = *, bint nonempty = *,
|
||||
bint calling_convention_allowed = *)
|
||||
cdef p_c_array_declarator(PyrexScanner s, base)
|
||||
cdef p_c_func_declarator(PyrexScanner s, pos, ctx, base, bint cmethod_flag)
|
||||
cdef p_c_simple_declarator(PyrexScanner s, ctx, bint empty, bint is_type, bint cmethod_flag,
|
||||
bint assignable, bint nonempty)
|
||||
cdef p_nogil(PyrexScanner s)
|
||||
cdef p_with_gil(PyrexScanner s)
|
||||
cdef p_exception_value_clause(PyrexScanner s)
|
||||
cpdef p_c_arg_list(PyrexScanner s, ctx = *, bint in_pyfunc = *, bint cmethod_flag = *,
|
||||
bint nonempty_declarators = *, bint kw_only = *, bint annotated = *)
|
||||
cdef p_optional_ellipsis(PyrexScanner s)
|
||||
cdef p_c_arg_decl(PyrexScanner s, ctx, in_pyfunc, bint cmethod_flag = *, bint nonempty = *, bint kw_only = *, bint annotated = *)
|
||||
cdef p_api(PyrexScanner s)
|
||||
cdef p_cdef_statement(PyrexScanner s, ctx)
|
||||
cdef p_cdef_block(PyrexScanner s, ctx)
|
||||
cdef p_cdef_extern_block(PyrexScanner s, pos, ctx)
|
||||
cdef p_c_enum_definition(PyrexScanner s, pos, ctx)
|
||||
cdef p_c_enum_line(PyrexScanner s, ctx, list items)
|
||||
cdef p_c_enum_item(PyrexScanner s, ctx, list items)
|
||||
cdef p_c_struct_or_union_definition(PyrexScanner s, pos, ctx)
|
||||
cdef p_fused_definition(PyrexScanner s, pos, ctx)
|
||||
cdef p_struct_enum(PyrexScanner s, pos, ctx)
|
||||
cdef p_visibility(PyrexScanner s, prev_visibility)
|
||||
cdef p_c_modifiers(PyrexScanner s)
|
||||
cdef p_c_func_or_var_declaration(PyrexScanner s, pos, ctx)
|
||||
cdef p_ctypedef_statement(PyrexScanner s, ctx)
|
||||
cdef p_decorators(PyrexScanner s)
|
||||
cdef _reject_cdef_modifier_in_py(PyrexScanner s, name)
|
||||
cdef p_def_statement(PyrexScanner s, list decorators=*, bint is_async_def=*)
|
||||
cdef p_varargslist(PyrexScanner s, terminator=*, bint annotated = *)
|
||||
cdef p_py_arg_decl(PyrexScanner s, bint annotated = *)
|
||||
cdef p_class_statement(PyrexScanner s, decorators)
|
||||
cdef p_c_class_definition(PyrexScanner s, pos, ctx)
|
||||
cdef tuple p_c_class_options(PyrexScanner s)
|
||||
cdef p_property_decl(PyrexScanner s)
|
||||
cdef p_doc_string(PyrexScanner s)
|
||||
cdef p_ignorable_statement(PyrexScanner s)
|
||||
cdef dict p_compiler_directive_comments(PyrexScanner s)
|
||||
cdef p_template_definition(PyrexScanner s)
|
||||
cdef p_cpp_class_definition(PyrexScanner s, pos, ctx)
|
||||
cdef p_cpp_class_attribute(PyrexScanner s, ctx)
|
3852
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Parsing.py
Normal file
3852
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Parsing.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,369 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
from time import time
|
||||
|
||||
from . import Errors
|
||||
from . import DebugFlags
|
||||
from . import Options
|
||||
from .Errors import CompileError, InternalError, AbortError
|
||||
from . import Naming
|
||||
|
||||
#
|
||||
# Really small pipeline stages
|
||||
#
|
||||
def dumptree(t):
|
||||
# For quick debugging in pipelines
|
||||
print(t.dump())
|
||||
return t
|
||||
|
||||
def abort_on_errors(node):
|
||||
# Stop the pipeline if there are any errors.
|
||||
if Errors.num_errors != 0:
|
||||
raise AbortError("pipeline break")
|
||||
return node
|
||||
|
||||
def parse_stage_factory(context):
|
||||
def parse(compsrc):
|
||||
source_desc = compsrc.source_desc
|
||||
full_module_name = compsrc.full_module_name
|
||||
initial_pos = (source_desc, 1, 0)
|
||||
saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
|
||||
scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
|
||||
Options.cimport_from_pyx = saved_cimport_from_pyx
|
||||
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
|
||||
tree.compilation_source = compsrc
|
||||
tree.scope = scope
|
||||
tree.is_pxd = False
|
||||
return tree
|
||||
return parse
|
||||
|
||||
def parse_pxd_stage_factory(context, scope, module_name):
|
||||
def parse(source_desc):
|
||||
tree = context.parse(source_desc, scope, pxd=True,
|
||||
full_module_name=module_name)
|
||||
tree.scope = scope
|
||||
tree.is_pxd = True
|
||||
return tree
|
||||
return parse
|
||||
|
||||
def generate_pyx_code_stage_factory(options, result):
|
||||
def generate_pyx_code_stage(module_node):
|
||||
module_node.process_implementation(options, result)
|
||||
result.compilation_source = module_node.compilation_source
|
||||
return result
|
||||
return generate_pyx_code_stage
|
||||
|
||||
|
||||
def inject_pxd_code_stage_factory(context):
|
||||
def inject_pxd_code_stage(module_node):
|
||||
for name, (statlistnode, scope) in context.pxds.items():
|
||||
module_node.merge_in(statlistnode, scope)
|
||||
return module_node
|
||||
return inject_pxd_code_stage
|
||||
|
||||
|
||||
def use_utility_code_definitions(scope, target, seen=None):
|
||||
if seen is None:
|
||||
seen = set()
|
||||
|
||||
for entry in scope.entries.values():
|
||||
if entry in seen:
|
||||
continue
|
||||
|
||||
seen.add(entry)
|
||||
if entry.used and entry.utility_code_definition:
|
||||
target.use_utility_code(entry.utility_code_definition)
|
||||
for required_utility in entry.utility_code_definition.requires:
|
||||
target.use_utility_code(required_utility)
|
||||
elif entry.as_module:
|
||||
use_utility_code_definitions(entry.as_module, target, seen)
|
||||
|
||||
|
||||
def sort_utility_codes(utilcodes):
|
||||
ranks = {}
|
||||
def get_rank(utilcode):
|
||||
if utilcode not in ranks:
|
||||
ranks[utilcode] = 0 # prevent infinite recursion on circular dependencies
|
||||
original_order = len(ranks)
|
||||
ranks[utilcode] = 1 + min([get_rank(dep) for dep in utilcode.requires or ()] or [-1]) + original_order * 1e-8
|
||||
return ranks[utilcode]
|
||||
for utilcode in utilcodes:
|
||||
get_rank(utilcode)
|
||||
return [utilcode for utilcode, _ in sorted(ranks.items(), key=lambda kv: kv[1])]
|
||||
|
||||
|
||||
def normalize_deps(utilcodes):
|
||||
deps = {}
|
||||
for utilcode in utilcodes:
|
||||
deps[utilcode] = utilcode
|
||||
|
||||
def unify_dep(dep):
|
||||
if dep in deps:
|
||||
return deps[dep]
|
||||
else:
|
||||
deps[dep] = dep
|
||||
return dep
|
||||
|
||||
for utilcode in utilcodes:
|
||||
utilcode.requires = [unify_dep(dep) for dep in utilcode.requires or ()]
|
||||
|
||||
|
||||
def inject_utility_code_stage_factory(context):
|
||||
def inject_utility_code_stage(module_node):
|
||||
module_node.prepare_utility_code()
|
||||
use_utility_code_definitions(context.cython_scope, module_node.scope)
|
||||
module_node.scope.utility_code_list = sort_utility_codes(module_node.scope.utility_code_list)
|
||||
normalize_deps(module_node.scope.utility_code_list)
|
||||
added = []
|
||||
# Note: the list might be extended inside the loop (if some utility code
|
||||
# pulls in other utility code, explicitly or implicitly)
|
||||
for utilcode in module_node.scope.utility_code_list:
|
||||
if utilcode in added:
|
||||
continue
|
||||
added.append(utilcode)
|
||||
if utilcode.requires:
|
||||
for dep in utilcode.requires:
|
||||
if dep not in added and dep not in module_node.scope.utility_code_list:
|
||||
module_node.scope.utility_code_list.append(dep)
|
||||
tree = utilcode.get_tree(cython_scope=context.cython_scope)
|
||||
if tree:
|
||||
module_node.merge_in(tree.body, tree.scope, merge_scope=True)
|
||||
return module_node
|
||||
return inject_utility_code_stage
|
||||
|
||||
|
||||
#
|
||||
# Pipeline factories
|
||||
#
|
||||
|
||||
def create_pipeline(context, mode, exclude_classes=()):
|
||||
assert mode in ('pyx', 'py', 'pxd')
|
||||
from .Visitor import PrintTree
|
||||
from .ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
|
||||
from .ParseTreeTransforms import ForwardDeclareTypes, InjectGilHandling, AnalyseDeclarationsTransform
|
||||
from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes
|
||||
from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
|
||||
from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods
|
||||
from .ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
|
||||
from .ParseTreeTransforms import CalculateQualifiedNamesTransform
|
||||
from .TypeInference import MarkParallelAssignments, MarkOverflowingArithmetic
|
||||
from .ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
|
||||
from .ParseTreeTransforms import RemoveUnreachableCode, GilCheck
|
||||
from .FlowControl import ControlFlowAnalysis
|
||||
from .AnalysedTreeTransforms import AutoTestDictTransform
|
||||
from .AutoDocTransforms import EmbedSignature
|
||||
from .Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
|
||||
from .Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls
|
||||
from .Optimize import InlineDefNodeCalls
|
||||
from .Optimize import ConstantFolding, FinalOptimizePhase
|
||||
from .Optimize import DropRefcountingTransform
|
||||
from .Optimize import ConsolidateOverflowCheck
|
||||
from .Buffer import IntroduceBufferAuxiliaryVars
|
||||
from .ModuleNode import check_c_declarations, check_c_declarations_pxd
|
||||
|
||||
|
||||
if mode == 'pxd':
|
||||
_check_c_declarations = check_c_declarations_pxd
|
||||
_specific_post_parse = PxdPostParse(context)
|
||||
else:
|
||||
_check_c_declarations = check_c_declarations
|
||||
_specific_post_parse = None
|
||||
|
||||
if mode == 'py':
|
||||
_align_function_definitions = AlignFunctionDefinitions(context)
|
||||
else:
|
||||
_align_function_definitions = None
|
||||
|
||||
# NOTE: This is the "common" parts of the pipeline, which is also
|
||||
# code in pxd files. So it will be run multiple times in a
|
||||
# compilation stage.
|
||||
stages = [
|
||||
NormalizeTree(context),
|
||||
PostParse(context),
|
||||
_specific_post_parse,
|
||||
TrackNumpyAttributes(),
|
||||
InterpretCompilerDirectives(context, context.compiler_directives),
|
||||
ParallelRangeTransform(context),
|
||||
AdjustDefByDirectives(context),
|
||||
WithTransform(context),
|
||||
MarkClosureVisitor(context),
|
||||
_align_function_definitions,
|
||||
RemoveUnreachableCode(context),
|
||||
ConstantFolding(),
|
||||
FlattenInListTransform(),
|
||||
DecoratorTransform(context),
|
||||
ForwardDeclareTypes(context),
|
||||
InjectGilHandling(),
|
||||
AnalyseDeclarationsTransform(context),
|
||||
AutoTestDictTransform(context),
|
||||
EmbedSignature(context),
|
||||
EarlyReplaceBuiltinCalls(context), ## Necessary?
|
||||
TransformBuiltinMethods(context),
|
||||
MarkParallelAssignments(context),
|
||||
ControlFlowAnalysis(context),
|
||||
RemoveUnreachableCode(context),
|
||||
# MarkParallelAssignments(context),
|
||||
MarkOverflowingArithmetic(context),
|
||||
IntroduceBufferAuxiliaryVars(context),
|
||||
_check_c_declarations,
|
||||
InlineDefNodeCalls(context),
|
||||
AnalyseExpressionsTransform(context),
|
||||
FindInvalidUseOfFusedTypes(context),
|
||||
ExpandInplaceOperators(context),
|
||||
IterationTransform(context),
|
||||
SwitchTransform(context),
|
||||
OptimizeBuiltinCalls(context), ## Necessary?
|
||||
CreateClosureClasses(context), ## After all lookups and type inference
|
||||
CalculateQualifiedNamesTransform(context),
|
||||
ConsolidateOverflowCheck(context),
|
||||
DropRefcountingTransform(),
|
||||
FinalOptimizePhase(context),
|
||||
GilCheck(),
|
||||
]
|
||||
filtered_stages = []
|
||||
for s in stages:
|
||||
if s.__class__ not in exclude_classes:
|
||||
filtered_stages.append(s)
|
||||
return filtered_stages
|
||||
|
||||
def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
|
||||
if py:
|
||||
mode = 'py'
|
||||
else:
|
||||
mode = 'pyx'
|
||||
test_support = []
|
||||
if options.evaluate_tree_assertions:
|
||||
from ..TestUtils import TreeAssertVisitor
|
||||
test_support.append(TreeAssertVisitor())
|
||||
|
||||
if options.gdb_debug:
|
||||
from ..Debugger import DebugWriter # requires Py2.5+
|
||||
from .ParseTreeTransforms import DebugTransform
|
||||
context.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
|
||||
options.output_dir)
|
||||
debug_transform = [DebugTransform(context, options, result)]
|
||||
else:
|
||||
debug_transform = []
|
||||
|
||||
return list(itertools.chain(
|
||||
[parse_stage_factory(context)],
|
||||
create_pipeline(context, mode, exclude_classes=exclude_classes),
|
||||
test_support,
|
||||
[inject_pxd_code_stage_factory(context),
|
||||
inject_utility_code_stage_factory(context),
|
||||
abort_on_errors],
|
||||
debug_transform,
|
||||
[generate_pyx_code_stage_factory(options, result)]))
|
||||
|
||||
def create_pxd_pipeline(context, scope, module_name):
|
||||
from .CodeGeneration import ExtractPxdCode
|
||||
|
||||
# The pxd pipeline ends up with a CCodeWriter containing the
|
||||
# code of the pxd, as well as a pxd scope.
|
||||
return [
|
||||
parse_pxd_stage_factory(context, scope, module_name)
|
||||
] + create_pipeline(context, 'pxd') + [
|
||||
ExtractPxdCode()
|
||||
]
|
||||
|
||||
def create_py_pipeline(context, options, result):
|
||||
return create_pyx_pipeline(context, options, result, py=True)
|
||||
|
||||
def create_pyx_as_pxd_pipeline(context, result):
|
||||
from .ParseTreeTransforms import AlignFunctionDefinitions, \
|
||||
MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform
|
||||
from .Optimize import ConstantFolding, FlattenInListTransform
|
||||
from .Nodes import StatListNode
|
||||
pipeline = []
|
||||
pyx_pipeline = create_pyx_pipeline(context, context.options, result,
|
||||
exclude_classes=[
|
||||
AlignFunctionDefinitions,
|
||||
MarkClosureVisitor,
|
||||
ConstantFolding,
|
||||
FlattenInListTransform,
|
||||
WithTransform
|
||||
])
|
||||
for stage in pyx_pipeline:
|
||||
pipeline.append(stage)
|
||||
if isinstance(stage, AnalyseDeclarationsTransform):
|
||||
# This is the last stage we need.
|
||||
break
|
||||
def fake_pxd(root):
|
||||
for entry in root.scope.entries.values():
|
||||
if not entry.in_cinclude:
|
||||
entry.defined_in_pxd = 1
|
||||
if entry.name == entry.cname and entry.visibility != 'extern':
|
||||
# Always mangle non-extern cimported entries.
|
||||
entry.cname = entry.scope.mangle(Naming.func_prefix, entry.name)
|
||||
return StatListNode(root.pos, stats=[]), root.scope
|
||||
pipeline.append(fake_pxd)
|
||||
return pipeline
|
||||
|
||||
def insert_into_pipeline(pipeline, transform, before=None, after=None):
|
||||
"""
|
||||
Insert a new transform into the pipeline after or before an instance of
|
||||
the given class. e.g.
|
||||
|
||||
pipeline = insert_into_pipeline(pipeline, transform,
|
||||
after=AnalyseDeclarationsTransform)
|
||||
"""
|
||||
assert before or after
|
||||
|
||||
cls = before or after
|
||||
for i, t in enumerate(pipeline):
|
||||
if isinstance(t, cls):
|
||||
break
|
||||
|
||||
if after:
|
||||
i += 1
|
||||
|
||||
return pipeline[:i] + [transform] + pipeline[i:]
|
||||
|
||||
#
|
||||
# Running a pipeline
|
||||
#
|
||||
|
||||
_pipeline_entry_points = {}
|
||||
|
||||
|
||||
def run_pipeline(pipeline, source, printtree=True):
|
||||
from .Visitor import PrintTree
|
||||
exec_ns = globals().copy() if DebugFlags.debug_verbose_pipeline else None
|
||||
|
||||
def run(phase, data):
|
||||
return phase(data)
|
||||
|
||||
error = None
|
||||
data = source
|
||||
try:
|
||||
try:
|
||||
for phase in pipeline:
|
||||
if phase is not None:
|
||||
if not printtree and isinstance(phase, PrintTree):
|
||||
continue
|
||||
if DebugFlags.debug_verbose_pipeline:
|
||||
t = time()
|
||||
print("Entering pipeline phase %r" % phase)
|
||||
# create a new wrapper for each step to show the name in profiles
|
||||
phase_name = getattr(phase, '__name__', type(phase).__name__)
|
||||
try:
|
||||
run = _pipeline_entry_points[phase_name]
|
||||
except KeyError:
|
||||
exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
|
||||
run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
|
||||
data = run(phase, data)
|
||||
if DebugFlags.debug_verbose_pipeline:
|
||||
print(" %.3f seconds" % (time() - t))
|
||||
except CompileError as err:
|
||||
# err is set
|
||||
Errors.report_error(err, use_stack=False)
|
||||
error = err
|
||||
except InternalError as err:
|
||||
# Only raise if there was not an earlier error
|
||||
if Errors.num_errors == 0:
|
||||
raise
|
||||
error = err
|
||||
except AbortError as err:
|
||||
error = err
|
||||
return (error, data)
|
4736
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/PyrexTypes.py
Normal file
4736
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/PyrexTypes.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,227 @@
|
|||
# cython: language_level=3
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .PyrexTypes import CType, CTypedefType, CStructOrUnionType
|
||||
|
||||
import cython
|
||||
|
||||
try:
|
||||
import pythran
|
||||
pythran_is_pre_0_9 = tuple(map(int, pythran.__version__.split('.')[0:2])) < (0, 9)
|
||||
pythran_is_pre_0_9_6 = tuple(map(int, pythran.__version__.split('.')[0:3])) < (0, 9, 6)
|
||||
except ImportError:
|
||||
pythran = None
|
||||
pythran_is_pre_0_9 = True
|
||||
pythran_is_pre_0_9_6 = True
|
||||
|
||||
if pythran_is_pre_0_9_6:
|
||||
pythran_builtins = '__builtin__'
|
||||
else:
|
||||
pythran_builtins = 'builtins'
|
||||
|
||||
|
||||
# Pythran/Numpy specific operations
|
||||
|
||||
def has_np_pythran(env):
|
||||
if env is None:
|
||||
return False
|
||||
directives = getattr(env, 'directives', None)
|
||||
return (directives and directives.get('np_pythran', False))
|
||||
|
||||
@cython.ccall
|
||||
def is_pythran_supported_dtype(type_):
|
||||
if isinstance(type_, CTypedefType):
|
||||
return is_pythran_supported_type(type_.typedef_base_type)
|
||||
return type_.is_numeric
|
||||
|
||||
|
||||
def pythran_type(Ty, ptype="ndarray"):
|
||||
if Ty.is_buffer:
|
||||
ndim,dtype = Ty.ndim, Ty.dtype
|
||||
if isinstance(dtype, CStructOrUnionType):
|
||||
ctype = dtype.cname
|
||||
elif isinstance(dtype, CType):
|
||||
ctype = dtype.sign_and_name()
|
||||
elif isinstance(dtype, CTypedefType):
|
||||
ctype = dtype.typedef_cname
|
||||
else:
|
||||
raise ValueError("unsupported type %s!" % dtype)
|
||||
if pythran_is_pre_0_9:
|
||||
return "pythonic::types::%s<%s,%d>" % (ptype,ctype, ndim)
|
||||
else:
|
||||
return "pythonic::types::%s<%s,pythonic::types::pshape<%s>>" % (ptype,ctype, ",".join(("long",)*ndim))
|
||||
if Ty.is_pythran_expr:
|
||||
return Ty.pythran_type
|
||||
#if Ty.is_none:
|
||||
# return "decltype(pythonic::builtins::None)"
|
||||
if Ty.is_numeric:
|
||||
return Ty.sign_and_name()
|
||||
raise ValueError("unsupported pythran type %s (%s)" % (Ty, type(Ty)))
|
||||
|
||||
|
||||
@cython.cfunc
|
||||
def type_remove_ref(ty):
|
||||
return "typename std::remove_reference<%s>::type" % ty
|
||||
|
||||
|
||||
def pythran_binop_type(op, tA, tB):
|
||||
if op == '**':
|
||||
return 'decltype(pythonic::numpy::functor::power{}(std::declval<%s>(), std::declval<%s>()))' % (
|
||||
pythran_type(tA), pythran_type(tB))
|
||||
else:
|
||||
return "decltype(std::declval<%s>() %s std::declval<%s>())" % (
|
||||
pythran_type(tA), op, pythran_type(tB))
|
||||
|
||||
|
||||
def pythran_unaryop_type(op, type_):
|
||||
return "decltype(%sstd::declval<%s>())" % (
|
||||
op, pythran_type(type_))
|
||||
|
||||
|
||||
@cython.cfunc
|
||||
def _index_access(index_code, indices):
|
||||
indexing = ",".join([index_code(idx) for idx in indices])
|
||||
return ('[%s]' if len(indices) == 1 else '(%s)') % indexing
|
||||
|
||||
|
||||
def _index_type_code(index_with_type):
|
||||
idx, index_type = index_with_type
|
||||
if idx.is_slice:
|
||||
n = 2 + int(not idx.step.is_none)
|
||||
return "pythonic::%s::functor::slice{}(%s)" % (
|
||||
pythran_builtins,
|
||||
",".join(["0"]*n))
|
||||
elif index_type.is_int:
|
||||
return "std::declval<%s>()" % index_type.sign_and_name()
|
||||
elif index_type.is_pythran_expr:
|
||||
return "std::declval<%s>()" % index_type.pythran_type
|
||||
raise ValueError("unsupported indexing type %s!" % index_type)
|
||||
|
||||
|
||||
def _index_code(idx):
|
||||
if idx.is_slice:
|
||||
values = idx.start, idx.stop, idx.step
|
||||
if idx.step.is_none:
|
||||
func = "contiguous_slice"
|
||||
values = values[:2]
|
||||
else:
|
||||
func = "slice"
|
||||
return "pythonic::types::%s(%s)" % (
|
||||
func, ",".join((v.pythran_result() for v in values)))
|
||||
elif idx.type.is_int:
|
||||
return to_pythran(idx)
|
||||
elif idx.type.is_pythran_expr:
|
||||
return idx.pythran_result()
|
||||
raise ValueError("unsupported indexing type %s" % idx.type)
|
||||
|
||||
|
||||
def pythran_indexing_type(type_, indices):
|
||||
return type_remove_ref("decltype(std::declval<%s>()%s)" % (
|
||||
pythran_type(type_),
|
||||
_index_access(_index_type_code, indices),
|
||||
))
|
||||
|
||||
|
||||
def pythran_indexing_code(indices):
|
||||
return _index_access(_index_code, indices)
|
||||
|
||||
def np_func_to_list(func):
|
||||
if not func.is_numpy_attribute:
|
||||
return []
|
||||
return np_func_to_list(func.obj) + [func.attribute]
|
||||
|
||||
if pythran is None:
|
||||
def pythran_is_numpy_func_supported(name):
|
||||
return False
|
||||
else:
|
||||
def pythran_is_numpy_func_supported(func):
|
||||
CurF = pythran.tables.MODULES['numpy']
|
||||
FL = np_func_to_list(func)
|
||||
for F in FL:
|
||||
CurF = CurF.get(F, None)
|
||||
if CurF is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
def pythran_functor(func):
|
||||
func = np_func_to_list(func)
|
||||
submodules = "::".join(func[:-1] + ["functor"])
|
||||
return "pythonic::numpy::%s::%s" % (submodules, func[-1])
|
||||
|
||||
def pythran_func_type(func, args):
|
||||
args = ",".join(("std::declval<%s>()" % pythran_type(a.type) for a in args))
|
||||
return "decltype(%s{}(%s))" % (pythran_functor(func), args)
|
||||
|
||||
|
||||
@cython.ccall
|
||||
def to_pythran(op, ptype=None):
|
||||
op_type = op.type
|
||||
if op_type.is_int:
|
||||
# Make sure that integer literals always have exactly the type that the templates expect.
|
||||
return op_type.cast_code(op.result())
|
||||
if is_type(op_type, ["is_pythran_expr", "is_numeric", "is_float", "is_complex"]):
|
||||
return op.result()
|
||||
if op.is_none:
|
||||
return "pythonic::%s::None" % pythran_builtins
|
||||
if ptype is None:
|
||||
ptype = pythran_type(op_type)
|
||||
|
||||
assert op.type.is_pyobject
|
||||
return "from_python<%s>(%s)" % (ptype, op.py_result())
|
||||
|
||||
|
||||
@cython.cfunc
|
||||
def is_type(type_, types):
|
||||
for attr in types:
|
||||
if getattr(type_, attr, False):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_pythran_supported_node_or_none(node):
|
||||
return node.is_none or is_pythran_supported_type(node.type)
|
||||
|
||||
|
||||
@cython.ccall
|
||||
def is_pythran_supported_type(type_):
|
||||
pythran_supported = (
|
||||
"is_pythran_expr", "is_int", "is_numeric", "is_float", "is_none", "is_complex")
|
||||
return is_type(type_, pythran_supported) or is_pythran_expr(type_)
|
||||
|
||||
|
||||
def is_pythran_supported_operation_type(type_):
|
||||
pythran_supported = (
|
||||
"is_pythran_expr", "is_int", "is_numeric", "is_float", "is_complex")
|
||||
return is_type(type_,pythran_supported) or is_pythran_expr(type_)
|
||||
|
||||
|
||||
@cython.ccall
|
||||
def is_pythran_expr(type_):
|
||||
return type_.is_pythran_expr
|
||||
|
||||
|
||||
def is_pythran_buffer(type_):
|
||||
return (type_.is_numpy_buffer and is_pythran_supported_dtype(type_.dtype) and
|
||||
type_.mode in ("c", "strided") and not type_.cast)
|
||||
|
||||
def pythran_get_func_include_file(func):
|
||||
func = np_func_to_list(func)
|
||||
return "pythonic/numpy/%s.hpp" % "/".join(func)
|
||||
|
||||
def include_pythran_generic(env):
|
||||
# Generic files
|
||||
env.add_include_file("pythonic/core.hpp")
|
||||
env.add_include_file("pythonic/python/core.hpp")
|
||||
env.add_include_file("pythonic/types/bool.hpp")
|
||||
env.add_include_file("pythonic/types/ndarray.hpp")
|
||||
env.add_include_file("pythonic/numpy/power.hpp")
|
||||
env.add_include_file("pythonic/%s/slice.hpp" % pythran_builtins)
|
||||
env.add_include_file("<new>") # for placement new
|
||||
|
||||
for i in (8, 16, 32, 64):
|
||||
env.add_include_file("pythonic/types/uint%d.hpp" % i)
|
||||
env.add_include_file("pythonic/types/int%d.hpp" % i)
|
||||
for t in ("float", "float32", "float64", "set", "slice", "tuple", "int",
|
||||
"complex", "complex64", "complex128"):
|
||||
env.add_include_file("pythonic/types/%s.hpp" % t)
|
Binary file not shown.
|
@ -0,0 +1,67 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import cython
|
||||
|
||||
from ..Plex.Scanners cimport Scanner
|
||||
|
||||
cdef unicode any_string_prefix, IDENT
|
||||
|
||||
cdef get_lexicon()
|
||||
cdef initial_compile_time_env()
|
||||
|
||||
cdef class Method:
|
||||
cdef object name
|
||||
cdef dict kwargs
|
||||
cdef readonly object __name__ # for tracing the scanner
|
||||
|
||||
## methods commented with '##' out are used by Parsing.py when compiled.
|
||||
|
||||
@cython.final
|
||||
cdef class CompileTimeScope:
|
||||
cdef public dict entries
|
||||
cdef public CompileTimeScope outer
|
||||
##cdef declare(self, name, value)
|
||||
##cdef lookup_here(self, name)
|
||||
##cpdef lookup(self, name)
|
||||
|
||||
@cython.final
|
||||
cdef class PyrexScanner(Scanner):
|
||||
cdef public context
|
||||
cdef public list included_files
|
||||
cdef public CompileTimeScope compile_time_env
|
||||
cdef public bint compile_time_eval
|
||||
cdef public bint compile_time_expr
|
||||
cdef public bint parse_comments
|
||||
cdef public bint in_python_file
|
||||
cdef public source_encoding
|
||||
cdef set keywords
|
||||
cdef public list indentation_stack
|
||||
cdef public indentation_char
|
||||
cdef public int bracket_nesting_level
|
||||
cdef readonly bint async_enabled
|
||||
cdef public sy
|
||||
cdef public systring
|
||||
|
||||
cdef long current_level(self)
|
||||
#cpdef commentline(self, text)
|
||||
#cpdef open_bracket_action(self, text)
|
||||
#cpdef close_bracket_action(self, text)
|
||||
#cpdef newline_action(self, text)
|
||||
#cpdef begin_string_action(self, text)
|
||||
#cpdef end_string_action(self, text)
|
||||
#cpdef unclosed_string_action(self, text)
|
||||
@cython.locals(current_level=cython.long, new_level=cython.long)
|
||||
cpdef indentation_action(self, text)
|
||||
#cpdef eof_action(self, text)
|
||||
##cdef next(self)
|
||||
##cdef peek(self)
|
||||
#cpdef put_back(self, sy, systring)
|
||||
#cdef unread(self, token, value)
|
||||
##cdef bint expect(self, what, message = *) except -2
|
||||
##cdef expect_keyword(self, what, message = *)
|
||||
##cdef expected(self, what, message = *)
|
||||
##cdef expect_indent(self)
|
||||
##cdef expect_dedent(self)
|
||||
##cdef expect_newline(self, message=*, bint ignore_semicolon=*)
|
||||
##cdef int enter_async(self) except -1
|
||||
##cdef int exit_async(self) except -1
|
|
@ -0,0 +1,551 @@
|
|||
# cython: infer_types=True, language_level=3, py2_import=True, auto_pickle=False
|
||||
#
|
||||
# Cython Scanner
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import cython
|
||||
cython.declare(make_lexicon=object, lexicon=object,
|
||||
print_function=object, error=object, warning=object,
|
||||
os=object, platform=object)
|
||||
|
||||
import os
|
||||
import platform
|
||||
|
||||
from .. import Utils
|
||||
from ..Plex.Scanners import Scanner
|
||||
from ..Plex.Errors import UnrecognizedInput
|
||||
from .Errors import error, warning
|
||||
from .Lexicon import any_string_prefix, make_lexicon, IDENT
|
||||
from .Future import print_function
|
||||
|
||||
debug_scanner = 0
|
||||
trace_scanner = 0
|
||||
scanner_debug_flags = 0
|
||||
scanner_dump_file = None
|
||||
|
||||
lexicon = None
|
||||
|
||||
|
||||
def get_lexicon():
|
||||
global lexicon
|
||||
if not lexicon:
|
||||
lexicon = make_lexicon()
|
||||
return lexicon
|
||||
|
||||
|
||||
#------------------------------------------------------------------
|
||||
|
||||
py_reserved_words = [
|
||||
"global", "nonlocal", "def", "class", "print", "del", "pass", "break",
|
||||
"continue", "return", "raise", "import", "exec", "try",
|
||||
"except", "finally", "while", "if", "elif", "else", "for",
|
||||
"in", "assert", "and", "or", "not", "is", "lambda",
|
||||
"from", "yield", "with",
|
||||
]
|
||||
|
||||
pyx_reserved_words = py_reserved_words + [
|
||||
"include", "ctypedef", "cdef", "cpdef",
|
||||
"cimport", "DEF", "IF", "ELIF", "ELSE"
|
||||
]
|
||||
|
||||
|
||||
class Method(object):
|
||||
|
||||
def __init__(self, name, **kwargs):
|
||||
self.name = name
|
||||
self.kwargs = kwargs or None
|
||||
self.__name__ = name # for Plex tracing
|
||||
|
||||
def __call__(self, stream, text):
|
||||
method = getattr(stream, self.name)
|
||||
# self.kwargs is almost always unused => avoid call overhead
|
||||
return method(text, **self.kwargs) if self.kwargs is not None else method(text)
|
||||
|
||||
def __copy__(self):
|
||||
return self # immutable, no need to copy
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return self # immutable, no need to copy
|
||||
|
||||
|
||||
#------------------------------------------------------------------
|
||||
|
||||
class CompileTimeScope(object):
|
||||
|
||||
def __init__(self, outer=None):
|
||||
self.entries = {}
|
||||
self.outer = outer
|
||||
|
||||
def declare(self, name, value):
|
||||
self.entries[name] = value
|
||||
|
||||
def update(self, other):
|
||||
self.entries.update(other)
|
||||
|
||||
def lookup_here(self, name):
|
||||
return self.entries[name]
|
||||
|
||||
def __contains__(self, name):
|
||||
return name in self.entries
|
||||
|
||||
def lookup(self, name):
|
||||
try:
|
||||
return self.lookup_here(name)
|
||||
except KeyError:
|
||||
outer = self.outer
|
||||
if outer:
|
||||
return outer.lookup(name)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def initial_compile_time_env():
|
||||
benv = CompileTimeScope()
|
||||
names = ('UNAME_SYSNAME', 'UNAME_NODENAME', 'UNAME_RELEASE', 'UNAME_VERSION', 'UNAME_MACHINE')
|
||||
for name, value in zip(names, platform.uname()):
|
||||
benv.declare(name, value)
|
||||
try:
|
||||
import __builtin__ as builtins
|
||||
except ImportError:
|
||||
import builtins
|
||||
|
||||
names = (
|
||||
'False', 'True',
|
||||
'abs', 'all', 'any', 'ascii', 'bin', 'bool', 'bytearray', 'bytes',
|
||||
'chr', 'cmp', 'complex', 'dict', 'divmod', 'enumerate', 'filter',
|
||||
'float', 'format', 'frozenset', 'hash', 'hex', 'int', 'len',
|
||||
'list', 'map', 'max', 'min', 'oct', 'ord', 'pow', 'range',
|
||||
'repr', 'reversed', 'round', 'set', 'slice', 'sorted', 'str',
|
||||
'sum', 'tuple', 'zip',
|
||||
### defined below in a platform independent way
|
||||
# 'long', 'unicode', 'reduce', 'xrange'
|
||||
)
|
||||
|
||||
for name in names:
|
||||
try:
|
||||
benv.declare(name, getattr(builtins, name))
|
||||
except AttributeError:
|
||||
# ignore, likely Py3
|
||||
pass
|
||||
|
||||
# Py2/3 adaptations
|
||||
from functools import reduce
|
||||
benv.declare('reduce', reduce)
|
||||
benv.declare('unicode', getattr(builtins, 'unicode', getattr(builtins, 'str')))
|
||||
benv.declare('long', getattr(builtins, 'long', getattr(builtins, 'int')))
|
||||
benv.declare('xrange', getattr(builtins, 'xrange', getattr(builtins, 'range')))
|
||||
|
||||
denv = CompileTimeScope(benv)
|
||||
return denv
|
||||
|
||||
|
||||
#------------------------------------------------------------------
|
||||
|
||||
class SourceDescriptor(object):
|
||||
"""
|
||||
A SourceDescriptor should be considered immutable.
|
||||
"""
|
||||
filename = None
|
||||
|
||||
_file_type = 'pyx'
|
||||
|
||||
_escaped_description = None
|
||||
_cmp_name = ''
|
||||
def __str__(self):
|
||||
assert False # To catch all places where a descriptor is used directly as a filename
|
||||
|
||||
def set_file_type_from_name(self, filename):
|
||||
name, ext = os.path.splitext(filename)
|
||||
self._file_type = ext in ('.pyx', '.pxd', '.py') and ext[1:] or 'pyx'
|
||||
|
||||
def is_cython_file(self):
|
||||
return self._file_type in ('pyx', 'pxd')
|
||||
|
||||
def is_python_file(self):
|
||||
return self._file_type == 'py'
|
||||
|
||||
def get_escaped_description(self):
|
||||
if self._escaped_description is None:
|
||||
esc_desc = \
|
||||
self.get_description().encode('ASCII', 'replace').decode("ASCII")
|
||||
# Use forward slashes on Windows since these paths
|
||||
# will be used in the #line directives in the C/C++ files.
|
||||
self._escaped_description = esc_desc.replace('\\', '/')
|
||||
return self._escaped_description
|
||||
|
||||
def __gt__(self, other):
|
||||
# this is only used to provide some sort of order
|
||||
try:
|
||||
return self._cmp_name > other._cmp_name
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def __lt__(self, other):
|
||||
# this is only used to provide some sort of order
|
||||
try:
|
||||
return self._cmp_name < other._cmp_name
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
# this is only used to provide some sort of order
|
||||
try:
|
||||
return self._cmp_name <= other._cmp_name
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def __copy__(self):
|
||||
return self # immutable, no need to copy
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return self # immutable, no need to copy
|
||||
|
||||
|
||||
class FileSourceDescriptor(SourceDescriptor):
|
||||
"""
|
||||
Represents a code source. A code source is a more generic abstraction
|
||||
for a "filename" (as sometimes the code doesn't come from a file).
|
||||
Instances of code sources are passed to Scanner.__init__ as the
|
||||
optional name argument and will be passed back when asking for
|
||||
the position()-tuple.
|
||||
"""
|
||||
def __init__(self, filename, path_description=None):
|
||||
filename = Utils.decode_filename(filename)
|
||||
self.path_description = path_description or filename
|
||||
self.filename = filename
|
||||
# Prefer relative paths to current directory (which is most likely the project root) over absolute paths.
|
||||
workdir = os.path.abspath('.') + os.sep
|
||||
self.file_path = filename[len(workdir):] if filename.startswith(workdir) else filename
|
||||
self.set_file_type_from_name(filename)
|
||||
self._cmp_name = filename
|
||||
self._lines = {}
|
||||
|
||||
def get_lines(self, encoding=None, error_handling=None):
|
||||
# we cache the lines only the second time this is called, in
|
||||
# order to save memory when they are only used once
|
||||
key = (encoding, error_handling)
|
||||
try:
|
||||
lines = self._lines[key]
|
||||
if lines is not None:
|
||||
return lines
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
with Utils.open_source_file(self.filename, encoding=encoding, error_handling=error_handling) as f:
|
||||
lines = list(f)
|
||||
|
||||
if key in self._lines:
|
||||
self._lines[key] = lines
|
||||
else:
|
||||
# do not cache the first access, but remember that we
|
||||
# already read it once
|
||||
self._lines[key] = None
|
||||
return lines
|
||||
|
||||
def get_description(self):
|
||||
try:
|
||||
return os.path.relpath(self.path_description)
|
||||
except ValueError:
|
||||
# path not under current directory => use complete file path
|
||||
return self.path_description
|
||||
|
||||
def get_error_description(self):
|
||||
path = self.filename
|
||||
cwd = Utils.decode_filename(os.getcwd() + os.path.sep)
|
||||
if path.startswith(cwd):
|
||||
return path[len(cwd):]
|
||||
return path
|
||||
|
||||
def get_filenametable_entry(self):
|
||||
return self.file_path
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, FileSourceDescriptor) and self.filename == other.filename
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.filename)
|
||||
|
||||
def __repr__(self):
|
||||
return "<FileSourceDescriptor:%s>" % self.filename
|
||||
|
||||
|
||||
class StringSourceDescriptor(SourceDescriptor):
|
||||
"""
|
||||
Instances of this class can be used instead of a filenames if the
|
||||
code originates from a string object.
|
||||
"""
|
||||
def __init__(self, name, code):
|
||||
self.name = name
|
||||
#self.set_file_type_from_name(name)
|
||||
self.codelines = [x + "\n" for x in code.split("\n")]
|
||||
self._cmp_name = name
|
||||
|
||||
def get_lines(self, encoding=None, error_handling=None):
|
||||
if not encoding:
|
||||
return self.codelines
|
||||
else:
|
||||
return [line.encode(encoding, error_handling).decode(encoding)
|
||||
for line in self.codelines]
|
||||
|
||||
def get_description(self):
|
||||
return self.name
|
||||
|
||||
get_error_description = get_description
|
||||
|
||||
def get_filenametable_entry(self):
|
||||
return "stringsource"
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
# Do not hash on the name, an identical string source should be the
|
||||
# same object (name is often defaulted in other places)
|
||||
# return hash(self.name)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, StringSourceDescriptor) and self.name == other.name
|
||||
|
||||
def __repr__(self):
|
||||
return "<StringSourceDescriptor:%s>" % self.name
|
||||
|
||||
|
||||
#------------------------------------------------------------------
|
||||
|
||||
class PyrexScanner(Scanner):
|
||||
# context Context Compilation context
|
||||
# included_files [string] Files included with 'include' statement
|
||||
# compile_time_env dict Environment for conditional compilation
|
||||
# compile_time_eval boolean In a true conditional compilation context
|
||||
# compile_time_expr boolean In a compile-time expression context
|
||||
|
||||
def __init__(self, file, filename, parent_scanner=None,
|
||||
scope=None, context=None, source_encoding=None, parse_comments=True, initial_pos=None):
|
||||
Scanner.__init__(self, get_lexicon(), file, filename, initial_pos)
|
||||
|
||||
if filename.is_python_file():
|
||||
self.in_python_file = True
|
||||
self.keywords = set(py_reserved_words)
|
||||
else:
|
||||
self.in_python_file = False
|
||||
self.keywords = set(pyx_reserved_words)
|
||||
|
||||
self.async_enabled = 0
|
||||
|
||||
if parent_scanner:
|
||||
self.context = parent_scanner.context
|
||||
self.included_files = parent_scanner.included_files
|
||||
self.compile_time_env = parent_scanner.compile_time_env
|
||||
self.compile_time_eval = parent_scanner.compile_time_eval
|
||||
self.compile_time_expr = parent_scanner.compile_time_expr
|
||||
|
||||
if parent_scanner.async_enabled:
|
||||
self.enter_async()
|
||||
else:
|
||||
self.context = context
|
||||
self.included_files = scope.included_files
|
||||
self.compile_time_env = initial_compile_time_env()
|
||||
self.compile_time_eval = 1
|
||||
self.compile_time_expr = 0
|
||||
if getattr(context.options, 'compile_time_env', None):
|
||||
self.compile_time_env.update(context.options.compile_time_env)
|
||||
self.parse_comments = parse_comments
|
||||
self.source_encoding = source_encoding
|
||||
self.trace = trace_scanner
|
||||
self.indentation_stack = [0]
|
||||
self.indentation_char = None
|
||||
self.bracket_nesting_level = 0
|
||||
|
||||
self.begin('INDENT')
|
||||
self.sy = ''
|
||||
self.next()
|
||||
|
||||
def commentline(self, text):
|
||||
if self.parse_comments:
|
||||
self.produce('commentline', text)
|
||||
|
||||
def strip_underscores(self, text, symbol):
|
||||
self.produce(symbol, text.replace('_', ''))
|
||||
|
||||
def current_level(self):
|
||||
return self.indentation_stack[-1]
|
||||
|
||||
def open_bracket_action(self, text):
|
||||
self.bracket_nesting_level += 1
|
||||
return text
|
||||
|
||||
def close_bracket_action(self, text):
|
||||
self.bracket_nesting_level -= 1
|
||||
return text
|
||||
|
||||
def newline_action(self, text):
|
||||
if self.bracket_nesting_level == 0:
|
||||
self.begin('INDENT')
|
||||
self.produce('NEWLINE', '')
|
||||
|
||||
string_states = {
|
||||
"'": 'SQ_STRING',
|
||||
'"': 'DQ_STRING',
|
||||
"'''": 'TSQ_STRING',
|
||||
'"""': 'TDQ_STRING'
|
||||
}
|
||||
|
||||
def begin_string_action(self, text):
|
||||
while text[:1] in any_string_prefix:
|
||||
text = text[1:]
|
||||
self.begin(self.string_states[text])
|
||||
self.produce('BEGIN_STRING')
|
||||
|
||||
def end_string_action(self, text):
|
||||
self.begin('')
|
||||
self.produce('END_STRING')
|
||||
|
||||
def unclosed_string_action(self, text):
|
||||
self.end_string_action(text)
|
||||
self.error("Unclosed string literal")
|
||||
|
||||
def indentation_action(self, text):
|
||||
self.begin('')
|
||||
# Indentation within brackets should be ignored.
|
||||
#if self.bracket_nesting_level > 0:
|
||||
# return
|
||||
# Check that tabs and spaces are being used consistently.
|
||||
if text:
|
||||
c = text[0]
|
||||
#print "Scanner.indentation_action: indent with", repr(c) ###
|
||||
if self.indentation_char is None:
|
||||
self.indentation_char = c
|
||||
#print "Scanner.indentation_action: setting indent_char to", repr(c)
|
||||
else:
|
||||
if self.indentation_char != c:
|
||||
self.error("Mixed use of tabs and spaces")
|
||||
if text.replace(c, "") != "":
|
||||
self.error("Mixed use of tabs and spaces")
|
||||
# Figure out how many indents/dedents to do
|
||||
current_level = self.current_level()
|
||||
new_level = len(text)
|
||||
#print "Changing indent level from", current_level, "to", new_level ###
|
||||
if new_level == current_level:
|
||||
return
|
||||
elif new_level > current_level:
|
||||
#print "...pushing level", new_level ###
|
||||
self.indentation_stack.append(new_level)
|
||||
self.produce('INDENT', '')
|
||||
else:
|
||||
while new_level < self.current_level():
|
||||
#print "...popping level", self.indentation_stack[-1] ###
|
||||
self.indentation_stack.pop()
|
||||
self.produce('DEDENT', '')
|
||||
#print "...current level now", self.current_level() ###
|
||||
if new_level != self.current_level():
|
||||
self.error("Inconsistent indentation")
|
||||
|
||||
def eof_action(self, text):
|
||||
while len(self.indentation_stack) > 1:
|
||||
self.produce('DEDENT', '')
|
||||
self.indentation_stack.pop()
|
||||
self.produce('EOF', '')
|
||||
|
||||
def next(self):
|
||||
try:
|
||||
sy, systring = self.read()
|
||||
except UnrecognizedInput:
|
||||
self.error("Unrecognized character")
|
||||
return # just a marker, error() always raises
|
||||
if sy == IDENT:
|
||||
if systring in self.keywords:
|
||||
if systring == u'print' and print_function in self.context.future_directives:
|
||||
self.keywords.discard('print')
|
||||
elif systring == u'exec' and self.context.language_level >= 3:
|
||||
self.keywords.discard('exec')
|
||||
else:
|
||||
sy = systring
|
||||
systring = self.context.intern_ustring(systring)
|
||||
self.sy = sy
|
||||
self.systring = systring
|
||||
if False: # debug_scanner:
|
||||
_, line, col = self.position()
|
||||
if not self.systring or self.sy == self.systring:
|
||||
t = self.sy
|
||||
else:
|
||||
t = "%s %s" % (self.sy, self.systring)
|
||||
print("--- %3d %2d %s" % (line, col, t))
|
||||
|
||||
def peek(self):
|
||||
saved = self.sy, self.systring
|
||||
self.next()
|
||||
next = self.sy, self.systring
|
||||
self.unread(*next)
|
||||
self.sy, self.systring = saved
|
||||
return next
|
||||
|
||||
def put_back(self, sy, systring):
|
||||
self.unread(self.sy, self.systring)
|
||||
self.sy = sy
|
||||
self.systring = systring
|
||||
|
||||
def unread(self, token, value):
|
||||
# This method should be added to Plex
|
||||
self.queue.insert(0, (token, value))
|
||||
|
||||
def error(self, message, pos=None, fatal=True):
|
||||
if pos is None:
|
||||
pos = self.position()
|
||||
if self.sy == 'INDENT':
|
||||
error(pos, "Possible inconsistent indentation")
|
||||
err = error(pos, message)
|
||||
if fatal: raise err
|
||||
|
||||
def expect(self, what, message=None):
|
||||
if self.sy == what:
|
||||
self.next()
|
||||
else:
|
||||
self.expected(what, message)
|
||||
|
||||
def expect_keyword(self, what, message=None):
|
||||
if self.sy == IDENT and self.systring == what:
|
||||
self.next()
|
||||
else:
|
||||
self.expected(what, message)
|
||||
|
||||
def expected(self, what, message=None):
|
||||
if message:
|
||||
self.error(message)
|
||||
else:
|
||||
if self.sy == IDENT:
|
||||
found = self.systring
|
||||
else:
|
||||
found = self.sy
|
||||
self.error("Expected '%s', found '%s'" % (what, found))
|
||||
|
||||
def expect_indent(self):
|
||||
self.expect('INDENT', "Expected an increase in indentation level")
|
||||
|
||||
def expect_dedent(self):
|
||||
self.expect('DEDENT', "Expected a decrease in indentation level")
|
||||
|
||||
def expect_newline(self, message="Expected a newline", ignore_semicolon=False):
|
||||
# Expect either a newline or end of file
|
||||
useless_trailing_semicolon = None
|
||||
if ignore_semicolon and self.sy == ';':
|
||||
useless_trailing_semicolon = self.position()
|
||||
self.next()
|
||||
if self.sy != 'EOF':
|
||||
self.expect('NEWLINE', message)
|
||||
if useless_trailing_semicolon is not None:
|
||||
warning(useless_trailing_semicolon, "useless trailing semicolon")
|
||||
|
||||
def enter_async(self):
|
||||
self.async_enabled += 1
|
||||
if self.async_enabled == 1:
|
||||
self.keywords.add('async')
|
||||
self.keywords.add('await')
|
||||
|
||||
def exit_async(self):
|
||||
assert self.async_enabled > 0
|
||||
self.async_enabled -= 1
|
||||
if not self.async_enabled:
|
||||
self.keywords.discard('await')
|
||||
self.keywords.discard('async')
|
||||
if self.sy in ('async', 'await'):
|
||||
self.sy, self.systring = IDENT, self.context.intern_ustring(self.sy)
|
|
@ -0,0 +1,363 @@
|
|||
#
|
||||
# Cython -- encoding related tools
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
_unicode, _str, _bytes, _unichr = str, str, bytes, chr
|
||||
IS_PYTHON3 = True
|
||||
else:
|
||||
_unicode, _str, _bytes, _unichr = unicode, str, str, unichr
|
||||
IS_PYTHON3 = False
|
||||
|
||||
empty_bytes = _bytes()
|
||||
empty_unicode = _unicode()
|
||||
|
||||
join_bytes = empty_bytes.join
|
||||
|
||||
|
||||
class UnicodeLiteralBuilder(object):
|
||||
"""Assemble a unicode string.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.chars = []
|
||||
|
||||
def append(self, characters):
|
||||
if isinstance(characters, _bytes):
|
||||
# this came from a Py2 string literal in the parser code
|
||||
characters = characters.decode("ASCII")
|
||||
assert isinstance(characters, _unicode), str(type(characters))
|
||||
self.chars.append(characters)
|
||||
|
||||
if sys.maxunicode == 65535:
|
||||
def append_charval(self, char_number):
|
||||
if char_number > 65535:
|
||||
# wide Unicode character on narrow platform => replace
|
||||
# by surrogate pair
|
||||
char_number -= 0x10000
|
||||
self.chars.append( _unichr((char_number // 1024) + 0xD800) )
|
||||
self.chars.append( _unichr((char_number % 1024) + 0xDC00) )
|
||||
else:
|
||||
self.chars.append( _unichr(char_number) )
|
||||
else:
|
||||
def append_charval(self, char_number):
|
||||
self.chars.append( _unichr(char_number) )
|
||||
|
||||
def append_uescape(self, char_number, escape_string):
|
||||
self.append_charval(char_number)
|
||||
|
||||
def getstring(self):
|
||||
return EncodedString(u''.join(self.chars))
|
||||
|
||||
def getstrings(self):
|
||||
return (None, self.getstring())
|
||||
|
||||
|
||||
class BytesLiteralBuilder(object):
|
||||
"""Assemble a byte string or char value.
|
||||
"""
|
||||
def __init__(self, target_encoding):
|
||||
self.chars = []
|
||||
self.target_encoding = target_encoding
|
||||
|
||||
def append(self, characters):
|
||||
if isinstance(characters, _unicode):
|
||||
characters = characters.encode(self.target_encoding)
|
||||
assert isinstance(characters, _bytes), str(type(characters))
|
||||
self.chars.append(characters)
|
||||
|
||||
def append_charval(self, char_number):
|
||||
self.chars.append( _unichr(char_number).encode('ISO-8859-1') )
|
||||
|
||||
def append_uescape(self, char_number, escape_string):
|
||||
self.append(escape_string)
|
||||
|
||||
def getstring(self):
|
||||
# this *must* return a byte string!
|
||||
return bytes_literal(join_bytes(self.chars), self.target_encoding)
|
||||
|
||||
def getchar(self):
|
||||
# this *must* return a byte string!
|
||||
return self.getstring()
|
||||
|
||||
def getstrings(self):
|
||||
return (self.getstring(), None)
|
||||
|
||||
|
||||
class StrLiteralBuilder(object):
|
||||
"""Assemble both a bytes and a unicode representation of a string.
|
||||
"""
|
||||
def __init__(self, target_encoding):
|
||||
self._bytes = BytesLiteralBuilder(target_encoding)
|
||||
self._unicode = UnicodeLiteralBuilder()
|
||||
|
||||
def append(self, characters):
|
||||
self._bytes.append(characters)
|
||||
self._unicode.append(characters)
|
||||
|
||||
def append_charval(self, char_number):
|
||||
self._bytes.append_charval(char_number)
|
||||
self._unicode.append_charval(char_number)
|
||||
|
||||
def append_uescape(self, char_number, escape_string):
|
||||
self._bytes.append(escape_string)
|
||||
self._unicode.append_charval(char_number)
|
||||
|
||||
def getstrings(self):
|
||||
return (self._bytes.getstring(), self._unicode.getstring())
|
||||
|
||||
|
||||
class EncodedString(_unicode):
|
||||
# unicode string subclass to keep track of the original encoding.
|
||||
# 'encoding' is None for unicode strings and the source encoding
|
||||
# otherwise
|
||||
encoding = None
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return self
|
||||
|
||||
def byteencode(self):
|
||||
assert self.encoding is not None
|
||||
return self.encode(self.encoding)
|
||||
|
||||
def utf8encode(self):
|
||||
assert self.encoding is None
|
||||
return self.encode("UTF-8")
|
||||
|
||||
@property
|
||||
def is_unicode(self):
|
||||
return self.encoding is None
|
||||
|
||||
def contains_surrogates(self):
|
||||
return string_contains_surrogates(self)
|
||||
|
||||
def as_utf8_string(self):
|
||||
return bytes_literal(self.utf8encode(), 'utf8')
|
||||
|
||||
|
||||
def string_contains_surrogates(ustring):
|
||||
"""
|
||||
Check if the unicode string contains surrogate code points
|
||||
on a CPython platform with wide (UCS-4) or narrow (UTF-16)
|
||||
Unicode, i.e. characters that would be spelled as two
|
||||
separate code units on a narrow platform.
|
||||
"""
|
||||
for c in map(ord, ustring):
|
||||
if c > 65535: # can only happen on wide platforms
|
||||
return True
|
||||
if 0xD800 <= c <= 0xDFFF:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def string_contains_lone_surrogates(ustring):
|
||||
"""
|
||||
Check if the unicode string contains lone surrogate code points
|
||||
on a CPython platform with wide (UCS-4) or narrow (UTF-16)
|
||||
Unicode, i.e. characters that would be spelled as two
|
||||
separate code units on a narrow platform, but that do not form a pair.
|
||||
"""
|
||||
last_was_start = False
|
||||
unicode_uses_surrogate_encoding = sys.maxunicode == 65535
|
||||
for c in map(ord, ustring):
|
||||
# surrogates tend to be rare
|
||||
if c < 0xD800 or c > 0xDFFF:
|
||||
if last_was_start:
|
||||
return True
|
||||
elif not unicode_uses_surrogate_encoding:
|
||||
# on 32bit Unicode platforms, there is never a pair
|
||||
return True
|
||||
elif c <= 0xDBFF:
|
||||
if last_was_start:
|
||||
return True # lone start
|
||||
last_was_start = True
|
||||
else:
|
||||
if not last_was_start:
|
||||
return True # lone end
|
||||
last_was_start = False
|
||||
return last_was_start
|
||||
|
||||
|
||||
class BytesLiteral(_bytes):
|
||||
# bytes subclass that is compatible with EncodedString
|
||||
encoding = None
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return self
|
||||
|
||||
def byteencode(self):
|
||||
if IS_PYTHON3:
|
||||
return _bytes(self)
|
||||
else:
|
||||
# fake-recode the string to make it a plain bytes object
|
||||
return self.decode('ISO-8859-1').encode('ISO-8859-1')
|
||||
|
||||
def utf8encode(self):
|
||||
assert False, "this is not a unicode string: %r" % self
|
||||
|
||||
def __str__(self):
|
||||
"""Fake-decode the byte string to unicode to support %
|
||||
formatting of unicode strings.
|
||||
"""
|
||||
return self.decode('ISO-8859-1')
|
||||
|
||||
is_unicode = False
|
||||
|
||||
def as_c_string_literal(self):
|
||||
value = split_string_literal(escape_byte_string(self))
|
||||
return '"%s"' % value
|
||||
|
||||
|
||||
def bytes_literal(s, encoding):
|
||||
assert isinstance(s, bytes)
|
||||
s = BytesLiteral(s)
|
||||
s.encoding = encoding
|
||||
return s
|
||||
|
||||
|
||||
def encoded_string(s, encoding):
|
||||
assert isinstance(s, (_unicode, bytes))
|
||||
s = EncodedString(s)
|
||||
if encoding is not None:
|
||||
s.encoding = encoding
|
||||
return s
|
||||
|
||||
|
||||
char_from_escape_sequence = {
|
||||
r'\a' : u'\a',
|
||||
r'\b' : u'\b',
|
||||
r'\f' : u'\f',
|
||||
r'\n' : u'\n',
|
||||
r'\r' : u'\r',
|
||||
r'\t' : u'\t',
|
||||
r'\v' : u'\v',
|
||||
}.get
|
||||
|
||||
_c_special = ('\\', '??', '"') + tuple(map(chr, range(32)))
|
||||
|
||||
|
||||
def _to_escape_sequence(s):
|
||||
if s in '\n\r\t':
|
||||
return repr(s)[1:-1]
|
||||
elif s == '"':
|
||||
return r'\"'
|
||||
elif s == '\\':
|
||||
return r'\\'
|
||||
else:
|
||||
# within a character sequence, oct passes much better than hex
|
||||
return ''.join(['\\%03o' % ord(c) for c in s])
|
||||
|
||||
|
||||
def _build_specials_replacer():
|
||||
subexps = []
|
||||
replacements = {}
|
||||
for special in _c_special:
|
||||
regexp = ''.join(['[%s]' % c.replace('\\', '\\\\') for c in special])
|
||||
subexps.append(regexp)
|
||||
replacements[special.encode('ASCII')] = _to_escape_sequence(special).encode('ASCII')
|
||||
sub = re.compile(('(%s)' % '|'.join(subexps)).encode('ASCII')).sub
|
||||
def replace_specials(m):
|
||||
return replacements[m.group(1)]
|
||||
def replace(s):
|
||||
return sub(replace_specials, s)
|
||||
return replace
|
||||
|
||||
_replace_specials = _build_specials_replacer()
|
||||
|
||||
|
||||
def escape_char(c):
|
||||
if IS_PYTHON3:
|
||||
c = c.decode('ISO-8859-1')
|
||||
if c in '\n\r\t\\':
|
||||
return repr(c)[1:-1]
|
||||
elif c == "'":
|
||||
return "\\'"
|
||||
n = ord(c)
|
||||
if n < 32 or n > 127:
|
||||
# hex works well for characters
|
||||
return "\\x%02X" % n
|
||||
else:
|
||||
return c
|
||||
|
||||
def escape_byte_string(s):
|
||||
"""Escape a byte string so that it can be written into C code.
|
||||
Note that this returns a Unicode string instead which, when
|
||||
encoded as ISO-8859-1, will result in the correct byte sequence
|
||||
being written.
|
||||
"""
|
||||
s = _replace_specials(s)
|
||||
try:
|
||||
return s.decode("ASCII") # trial decoding: plain ASCII => done
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
if IS_PYTHON3:
|
||||
s_new = bytearray()
|
||||
append, extend = s_new.append, s_new.extend
|
||||
for b in s:
|
||||
if b >= 128:
|
||||
extend(('\\%3o' % b).encode('ASCII'))
|
||||
else:
|
||||
append(b)
|
||||
return s_new.decode('ISO-8859-1')
|
||||
else:
|
||||
l = []
|
||||
append = l.append
|
||||
for c in s:
|
||||
o = ord(c)
|
||||
if o >= 128:
|
||||
append('\\%3o' % o)
|
||||
else:
|
||||
append(c)
|
||||
return join_bytes(l).decode('ISO-8859-1')
|
||||
|
||||
def split_string_literal(s, limit=2000):
|
||||
# MSVC can't handle long string literals.
|
||||
if len(s) < limit:
|
||||
return s
|
||||
else:
|
||||
start = 0
|
||||
chunks = []
|
||||
while start < len(s):
|
||||
end = start + limit
|
||||
if len(s) > end-4 and '\\' in s[end-4:end]:
|
||||
end -= 4 - s[end-4:end].find('\\') # just before the backslash
|
||||
while s[end-1] == '\\':
|
||||
end -= 1
|
||||
if end == start:
|
||||
# must have been a long line of backslashes
|
||||
end = start + limit - (limit % 2) - 4
|
||||
break
|
||||
chunks.append(s[start:end])
|
||||
start = end
|
||||
return '""'.join(chunks)
|
||||
|
||||
def encode_pyunicode_string(s):
|
||||
"""Create Py_UNICODE[] representation of a given unicode string.
|
||||
"""
|
||||
s = list(map(ord, s)) + [0]
|
||||
|
||||
if sys.maxunicode >= 0x10000: # Wide build or Py3.3
|
||||
utf16, utf32 = [], s
|
||||
for code_point in s:
|
||||
if code_point >= 0x10000: # outside of BMP
|
||||
high, low = divmod(code_point - 0x10000, 1024)
|
||||
utf16.append(high + 0xD800)
|
||||
utf16.append(low + 0xDC00)
|
||||
else:
|
||||
utf16.append(code_point)
|
||||
else:
|
||||
utf16, utf32 = s, []
|
||||
for code_unit in s:
|
||||
if 0xDC00 <= code_unit <= 0xDFFF and utf32 and 0xD800 <= utf32[-1] <= 0xDBFF:
|
||||
high, low = utf32[-1], code_unit
|
||||
utf32[-1] = ((high & 0x3FF) << 10) + (low & 0x3FF) + 0x10000
|
||||
else:
|
||||
utf32.append(code_unit)
|
||||
|
||||
if utf16 == utf32:
|
||||
utf16 = []
|
||||
return ",".join(map(_unicode, utf16)), ",".join(map(_unicode, utf32))
|
2552
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Symtab.py
Normal file
2552
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Symtab.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,105 @@
|
|||
from Cython.TestUtils import CythonTest
|
||||
import Cython.Compiler.Errors as Errors
|
||||
from Cython.Compiler.Nodes import *
|
||||
from Cython.Compiler.ParseTreeTransforms import *
|
||||
from Cython.Compiler.Buffer import *
|
||||
|
||||
|
||||
class TestBufferParsing(CythonTest):
|
||||
# First, we only test the raw parser, i.e.
|
||||
# the number and contents of arguments are NOT checked.
|
||||
# However "dtype"/the first positional argument is special-cased
|
||||
# to parse a type argument rather than an expression
|
||||
|
||||
def parse(self, s):
|
||||
return self.should_not_fail(lambda: self.fragment(s)).root
|
||||
|
||||
def not_parseable(self, expected_error, s):
|
||||
e = self.should_fail(lambda: self.fragment(s), Errors.CompileError)
|
||||
self.assertEqual(expected_error, e.message_only)
|
||||
|
||||
def test_basic(self):
|
||||
t = self.parse(u"cdef object[float, 4, ndim=2, foo=foo] x")
|
||||
bufnode = t.stats[0].base_type
|
||||
self.assertTrue(isinstance(bufnode, TemplatedTypeNode))
|
||||
self.assertEqual(2, len(bufnode.positional_args))
|
||||
# print bufnode.dump()
|
||||
# should put more here...
|
||||
|
||||
def test_type_pos(self):
|
||||
self.parse(u"cdef object[short unsigned int, 3] x")
|
||||
|
||||
def test_type_keyword(self):
|
||||
self.parse(u"cdef object[foo=foo, dtype=short unsigned int] x")
|
||||
|
||||
def test_pos_after_key(self):
|
||||
self.not_parseable("Non-keyword arg following keyword arg",
|
||||
u"cdef object[foo=1, 2] x")
|
||||
|
||||
|
||||
# See also tests/error/e_bufaccess.pyx and tets/run/bufaccess.pyx
|
||||
# THESE TESTS ARE NOW DISABLED, the code they test was pretty much
|
||||
# refactored away
|
||||
class TestBufferOptions(CythonTest):
|
||||
# Tests the full parsing of the options within the brackets
|
||||
|
||||
def nonfatal_error(self, error):
|
||||
# We're passing self as context to transform to trap this
|
||||
self.error = error
|
||||
self.assertTrue(self.expect_error)
|
||||
|
||||
def parse_opts(self, opts, expect_error=False):
|
||||
assert opts != ""
|
||||
s = u"def f():\n cdef object[%s] x" % opts
|
||||
self.expect_error = expect_error
|
||||
root = self.fragment(s, pipeline=[NormalizeTree(self), PostParse(self)]).root
|
||||
if not expect_error:
|
||||
vardef = root.stats[0].body.stats[0]
|
||||
assert isinstance(vardef, CVarDefNode) # use normal assert as this is to validate the test code
|
||||
buftype = vardef.base_type
|
||||
self.assertTrue(isinstance(buftype, TemplatedTypeNode))
|
||||
self.assertTrue(isinstance(buftype.base_type_node, CSimpleBaseTypeNode))
|
||||
self.assertEqual(u"object", buftype.base_type_node.name)
|
||||
return buftype
|
||||
else:
|
||||
self.assertTrue(len(root.stats[0].body.stats) == 0)
|
||||
|
||||
def non_parse(self, expected_err, opts):
|
||||
self.parse_opts(opts, expect_error=True)
|
||||
# e = self.should_fail(lambda: self.parse_opts(opts))
|
||||
self.assertEqual(expected_err, self.error.message_only)
|
||||
|
||||
def __test_basic(self):
|
||||
buf = self.parse_opts(u"unsigned short int, 3")
|
||||
self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode))
|
||||
self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1)
|
||||
self.assertEqual(3, buf.ndim)
|
||||
|
||||
def __test_dict(self):
|
||||
buf = self.parse_opts(u"ndim=3, dtype=unsigned short int")
|
||||
self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode))
|
||||
self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1)
|
||||
self.assertEqual(3, buf.ndim)
|
||||
|
||||
def __test_ndim(self):
|
||||
self.parse_opts(u"int, 2")
|
||||
self.non_parse(ERR_BUF_NDIM, u"int, 'a'")
|
||||
self.non_parse(ERR_BUF_NDIM, u"int, -34")
|
||||
|
||||
def __test_use_DEF(self):
|
||||
t = self.fragment(u"""
|
||||
DEF ndim = 3
|
||||
def f():
|
||||
cdef object[int, ndim] x
|
||||
cdef object[ndim=ndim, dtype=int] y
|
||||
""", pipeline=[NormalizeTree(self), PostParse(self)]).root
|
||||
stats = t.stats[0].body.stats
|
||||
self.assertTrue(stats[0].base_type.ndim == 3)
|
||||
self.assertTrue(stats[1].base_type.ndim == 3)
|
||||
|
||||
# add exotic and impossible combinations as they come along...
|
||||
|
||||
if __name__ == '__main__':
|
||||
import unittest
|
||||
unittest.main()
|
||||
|
|
@ -0,0 +1,170 @@
|
|||
|
||||
import sys
|
||||
import re
|
||||
from unittest import TestCase
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO # doesn't accept 'str' in Py2
|
||||
|
||||
from .. import Options
|
||||
from ..CmdLine import parse_command_line
|
||||
|
||||
|
||||
def check_global_options(expected_options, white_list=[]):
|
||||
"""
|
||||
returns error message of "" if check Ok
|
||||
"""
|
||||
no_value = object()
|
||||
for name, orig_value in expected_options.items():
|
||||
if name not in white_list:
|
||||
if getattr(Options, name, no_value) != orig_value:
|
||||
return "error in option " + name
|
||||
return ""
|
||||
|
||||
|
||||
class CmdLineParserTest(TestCase):
|
||||
def setUp(self):
|
||||
backup = {}
|
||||
for name, value in vars(Options).items():
|
||||
backup[name] = value
|
||||
self._options_backup = backup
|
||||
|
||||
def tearDown(self):
|
||||
no_value = object()
|
||||
for name, orig_value in self._options_backup.items():
|
||||
if getattr(Options, name, no_value) != orig_value:
|
||||
setattr(Options, name, orig_value)
|
||||
|
||||
def check_default_global_options(self, white_list=[]):
|
||||
self.assertEqual(check_global_options(self._options_backup, white_list), "")
|
||||
|
||||
def check_default_options(self, options, white_list=[]):
|
||||
from ..Main import CompilationOptions, default_options
|
||||
default_options = CompilationOptions(default_options)
|
||||
no_value = object()
|
||||
for name in default_options.__dict__.keys():
|
||||
if name not in white_list:
|
||||
self.assertEqual(getattr(options, name, no_value), getattr(default_options, name), msg="error in option " + name)
|
||||
|
||||
def test_short_options(self):
|
||||
options, sources = parse_command_line([
|
||||
'-V', '-l', '-+', '-t', '-v', '-v', '-v', '-p', '-D', '-a', '-3',
|
||||
])
|
||||
self.assertFalse(sources)
|
||||
self.assertTrue(options.show_version)
|
||||
self.assertTrue(options.use_listing_file)
|
||||
self.assertTrue(options.cplus)
|
||||
self.assertTrue(options.timestamps)
|
||||
self.assertTrue(options.verbose >= 3)
|
||||
self.assertTrue(Options.embed_pos_in_docstring)
|
||||
self.assertFalse(Options.docstrings)
|
||||
self.assertTrue(Options.annotate)
|
||||
self.assertEqual(options.language_level, 3)
|
||||
|
||||
options, sources = parse_command_line([
|
||||
'-f', '-2', 'source.pyx',
|
||||
])
|
||||
self.assertTrue(sources)
|
||||
self.assertTrue(len(sources) == 1)
|
||||
self.assertFalse(options.timestamps)
|
||||
self.assertEqual(options.language_level, 2)
|
||||
|
||||
def test_long_options(self):
|
||||
options, sources = parse_command_line([
|
||||
'--version', '--create-listing', '--cplus', '--embed', '--timestamps',
|
||||
'--verbose', '--verbose', '--verbose',
|
||||
'--embed-positions', '--no-docstrings', '--annotate', '--lenient',
|
||||
])
|
||||
self.assertFalse(sources)
|
||||
self.assertTrue(options.show_version)
|
||||
self.assertTrue(options.use_listing_file)
|
||||
self.assertTrue(options.cplus)
|
||||
self.assertEqual(Options.embed, 'main')
|
||||
self.assertTrue(options.timestamps)
|
||||
self.assertTrue(options.verbose >= 3)
|
||||
self.assertTrue(Options.embed_pos_in_docstring)
|
||||
self.assertFalse(Options.docstrings)
|
||||
self.assertTrue(Options.annotate)
|
||||
self.assertFalse(Options.error_on_unknown_names)
|
||||
self.assertFalse(Options.error_on_uninitialized)
|
||||
|
||||
options, sources = parse_command_line([
|
||||
'--force', 'source.pyx',
|
||||
])
|
||||
self.assertTrue(sources)
|
||||
self.assertTrue(len(sources) == 1)
|
||||
self.assertFalse(options.timestamps)
|
||||
|
||||
def test_options_with_values(self):
|
||||
options, sources = parse_command_line([
|
||||
'--embed=huhu',
|
||||
'-I/test/include/dir1', '--include-dir=/test/include/dir2',
|
||||
'--include-dir', '/test/include/dir3',
|
||||
'--working=/work/dir',
|
||||
'source.pyx',
|
||||
'--output-file=/output/dir',
|
||||
'--pre-import=/pre/import',
|
||||
'--cleanup=3',
|
||||
'--annotate-coverage=cov.xml',
|
||||
'--gdb-outdir=/gdb/outdir',
|
||||
'--directive=wraparound=false',
|
||||
])
|
||||
self.assertEqual(sources, ['source.pyx'])
|
||||
self.assertEqual(Options.embed, 'huhu')
|
||||
self.assertEqual(options.include_path, ['/test/include/dir1', '/test/include/dir2', '/test/include/dir3'])
|
||||
self.assertEqual(options.working_path, '/work/dir')
|
||||
self.assertEqual(options.output_file, '/output/dir')
|
||||
self.assertEqual(Options.pre_import, '/pre/import')
|
||||
self.assertEqual(Options.generate_cleanup_code, 3)
|
||||
self.assertTrue(Options.annotate)
|
||||
self.assertEqual(Options.annotate_coverage_xml, 'cov.xml')
|
||||
self.assertTrue(options.gdb_debug)
|
||||
self.assertEqual(options.output_dir, '/gdb/outdir')
|
||||
|
||||
def test_module_name(self):
|
||||
options, sources = parse_command_line([
|
||||
'source.pyx'
|
||||
])
|
||||
self.assertEqual(options.module_name, None)
|
||||
self.check_default_global_options()
|
||||
self.check_default_options(options)
|
||||
options, sources = parse_command_line([
|
||||
'--module-name', 'foo.bar',
|
||||
'source.pyx'
|
||||
])
|
||||
self.assertEqual(options.module_name, 'foo.bar')
|
||||
self.check_default_global_options()
|
||||
self.check_default_options(options, ['module_name'])
|
||||
|
||||
def test_errors(self):
|
||||
def error(args, regex=None):
|
||||
old_stderr = sys.stderr
|
||||
stderr = sys.stderr = StringIO()
|
||||
try:
|
||||
self.assertRaises(SystemExit, parse_command_line, list(args))
|
||||
finally:
|
||||
sys.stderr = old_stderr
|
||||
msg = stderr.getvalue().strip()
|
||||
self.assertTrue(msg)
|
||||
if regex:
|
||||
self.assertTrue(re.search(regex, msg),
|
||||
'"%s" does not match search "%s"' %
|
||||
(msg, regex))
|
||||
|
||||
error(['-1'],
|
||||
'Unknown compiler flag: -1')
|
||||
error(['-I'])
|
||||
error(['--version=-a'])
|
||||
error(['--version=--annotate=true'])
|
||||
error(['--working'])
|
||||
error(['--verbose=1'])
|
||||
error(['--cleanup'])
|
||||
error(['--debug-disposal-code-wrong-name', 'file3.pyx'],
|
||||
"Unknown debug flag: debug_disposal_code_wrong_name")
|
||||
error(['--module-name', 'foo.pyx'])
|
||||
error(['--module-name', 'foo.bar'])
|
||||
error(['--module-name', 'foo.bar', 'foo.pyx', 'bar.pyx'],
|
||||
"Only one source file allowed when using --module-name")
|
||||
error(['--module-name', 'foo.bar', '--timestamps', 'foo.pyx'],
|
||||
"Cannot use --module-name with --timestamps")
|
|
@ -0,0 +1,68 @@
|
|||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from copy import deepcopy
|
||||
from unittest import TestCase
|
||||
|
||||
from Cython.Compiler.FlowControl import (
|
||||
NameAssignment, StaticAssignment, Argument, NameDeletion)
|
||||
|
||||
|
||||
class FakeType(object):
|
||||
is_pyobject = True
|
||||
|
||||
|
||||
class FakeNode(object):
|
||||
pos = ('filename.pyx', 1, 2)
|
||||
cf_state = None
|
||||
type = FakeType()
|
||||
|
||||
def infer_type(self, scope):
|
||||
return self.type
|
||||
|
||||
|
||||
class FakeEntry(object):
|
||||
type = FakeType()
|
||||
|
||||
|
||||
class TestGraph(TestCase):
|
||||
def test_deepcopy(self):
|
||||
lhs, rhs = FakeNode(), FakeNode()
|
||||
entry = FakeEntry()
|
||||
entry.pos = lhs.pos
|
||||
|
||||
name_ass = NameAssignment(lhs, rhs, entry)
|
||||
ass = deepcopy(name_ass)
|
||||
self.assertTrue(ass.lhs)
|
||||
self.assertTrue(ass.rhs)
|
||||
self.assertTrue(ass.entry)
|
||||
self.assertEqual(ass.pos, name_ass.pos)
|
||||
self.assertFalse(ass.is_arg)
|
||||
self.assertFalse(ass.is_deletion)
|
||||
|
||||
static_ass = StaticAssignment(entry)
|
||||
ass = deepcopy(static_ass)
|
||||
self.assertTrue(ass.lhs)
|
||||
self.assertTrue(ass.rhs)
|
||||
self.assertTrue(ass.entry)
|
||||
self.assertEqual(ass.pos, static_ass.pos)
|
||||
self.assertFalse(ass.is_arg)
|
||||
self.assertFalse(ass.is_deletion)
|
||||
|
||||
arg_ass = Argument(lhs, rhs, entry)
|
||||
ass = deepcopy(arg_ass)
|
||||
self.assertTrue(ass.lhs)
|
||||
self.assertTrue(ass.rhs)
|
||||
self.assertTrue(ass.entry)
|
||||
self.assertEqual(ass.pos, arg_ass.pos)
|
||||
self.assertTrue(ass.is_arg)
|
||||
self.assertFalse(ass.is_deletion)
|
||||
|
||||
name_del = NameDeletion(lhs, entry)
|
||||
ass = deepcopy(name_del)
|
||||
self.assertTrue(ass.lhs)
|
||||
self.assertTrue(ass.rhs)
|
||||
self.assertTrue(ass.entry)
|
||||
self.assertEqual(ass.pos, name_del.pos)
|
||||
self.assertFalse(ass.is_arg)
|
||||
self.assertTrue(ass.is_deletion)
|
|
@ -0,0 +1,129 @@
|
|||
# mode: run
|
||||
# tag: syntax
|
||||
|
||||
"""
|
||||
Uses TreeFragment to test invalid syntax.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from ...TestUtils import CythonTest
|
||||
from ..Errors import CompileError
|
||||
from .. import ExprNodes
|
||||
|
||||
# Copied from CPython's test_grammar.py
|
||||
VALID_UNDERSCORE_LITERALS = [
|
||||
'0_0_0',
|
||||
'4_2',
|
||||
'1_0000_0000',
|
||||
'0b1001_0100',
|
||||
'0xffff_ffff',
|
||||
'0o5_7_7',
|
||||
'1_00_00.5',
|
||||
'1_00_00.5j',
|
||||
'1_00_00.5e5',
|
||||
'1_00_00j',
|
||||
'1_00_00e5_1',
|
||||
'1e1_0',
|
||||
'.1_4',
|
||||
'.1_4e1',
|
||||
'.1_4j',
|
||||
]
|
||||
|
||||
# Copied from CPython's test_grammar.py
|
||||
INVALID_UNDERSCORE_LITERALS = [
|
||||
# Trailing underscores:
|
||||
'0_',
|
||||
'42_',
|
||||
'1.4j_',
|
||||
'0b1_',
|
||||
'0xf_',
|
||||
'0o5_',
|
||||
# Underscores in the base selector:
|
||||
'0_b0',
|
||||
'0_xf',
|
||||
'0_o5',
|
||||
# Underscore right after the base selector:
|
||||
'0b_0',
|
||||
'0x_f',
|
||||
'0o_5',
|
||||
# Old-style octal, still disallowed:
|
||||
#'0_7',
|
||||
#'09_99',
|
||||
# Special case with exponent:
|
||||
'0 if 1_Else 1',
|
||||
# Underscore right before a dot:
|
||||
'1_.4',
|
||||
'1_.4j',
|
||||
# Underscore right after a dot:
|
||||
'1._4',
|
||||
'1._4j',
|
||||
'._5',
|
||||
# Underscore right after a sign:
|
||||
'1.0e+_1',
|
||||
# Multiple consecutive underscores:
|
||||
'4_______2',
|
||||
'0.1__4',
|
||||
'0b1001__0100',
|
||||
'0xffff__ffff',
|
||||
'0o5__77',
|
||||
'1e1__0',
|
||||
# Underscore right before j:
|
||||
'1.4_j',
|
||||
'1.4e5_j',
|
||||
# Underscore right before e:
|
||||
'1_e1',
|
||||
'1.4_e1',
|
||||
# Underscore right after e:
|
||||
'1e_1',
|
||||
'1.4e_1',
|
||||
# Whitespace in literals
|
||||
'1_ 2',
|
||||
'1 _2',
|
||||
'1_2.2_ 1',
|
||||
'1_2.2 _1',
|
||||
'1_2e _1',
|
||||
'1_2e2 _1',
|
||||
'1_2e 2_1',
|
||||
]
|
||||
|
||||
|
||||
class TestGrammar(CythonTest):
|
||||
|
||||
def test_invalid_number_literals(self):
|
||||
for literal in INVALID_UNDERSCORE_LITERALS:
|
||||
for expression in ['%s', '1 + %s', '%s + 1', '2 * %s', '%s * 2']:
|
||||
code = 'x = ' + expression % literal
|
||||
try:
|
||||
self.fragment(u'''\
|
||||
# cython: language_level=3
|
||||
''' + code)
|
||||
except CompileError as exc:
|
||||
assert code in [s.strip() for s in str(exc).splitlines()], str(exc)
|
||||
else:
|
||||
assert False, "Invalid Cython code '%s' failed to raise an exception" % code
|
||||
|
||||
def test_valid_number_literals(self):
|
||||
for literal in VALID_UNDERSCORE_LITERALS:
|
||||
for i, expression in enumerate(['%s', '1 + %s', '%s + 1', '2 * %s', '%s * 2']):
|
||||
code = 'x = ' + expression % literal
|
||||
node = self.fragment(u'''\
|
||||
# cython: language_level=3
|
||||
''' + code).root
|
||||
assert node is not None
|
||||
|
||||
literal_node = node.stats[0].rhs # StatListNode([SingleAssignmentNode('x', expr)])
|
||||
if i > 0:
|
||||
# Add/MulNode() -> literal is first or second operand
|
||||
literal_node = literal_node.operand2 if i % 2 else literal_node.operand1
|
||||
if 'j' in literal or 'J' in literal:
|
||||
assert isinstance(literal_node, ExprNodes.ImagNode)
|
||||
elif '.' in literal or 'e' in literal or 'E' in literal and not ('0x' in literal or '0X' in literal):
|
||||
assert isinstance(literal_node, ExprNodes.FloatNode)
|
||||
else:
|
||||
assert isinstance(literal_node, ExprNodes.IntNode)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import unittest
|
||||
unittest.main()
|
|
@ -0,0 +1,71 @@
|
|||
from Cython.TestUtils import CythonTest
|
||||
import Cython.Compiler.Errors as Errors
|
||||
from Cython.Compiler.Nodes import *
|
||||
from Cython.Compiler.ParseTreeTransforms import *
|
||||
from Cython.Compiler.Buffer import *
|
||||
|
||||
|
||||
class TestMemviewParsing(CythonTest):
|
||||
|
||||
def parse(self, s):
|
||||
return self.should_not_fail(lambda: self.fragment(s)).root
|
||||
|
||||
def not_parseable(self, expected_error, s):
|
||||
e = self.should_fail(lambda: self.fragment(s), Errors.CompileError)
|
||||
self.assertEqual(expected_error, e.message_only)
|
||||
|
||||
def test_default_1dim(self):
|
||||
self.parse(u"cdef int[:] x")
|
||||
self.parse(u"cdef short int[:] x")
|
||||
|
||||
def test_default_ndim(self):
|
||||
self.parse(u"cdef int[:,:,:,:,:] x")
|
||||
self.parse(u"cdef unsigned long int[:,:,:,:,:] x")
|
||||
self.parse(u"cdef unsigned int[:,:,:,:,:] x")
|
||||
|
||||
def test_zero_offset(self):
|
||||
self.parse(u"cdef long double[0:] x")
|
||||
self.parse(u"cdef int[0:] x")
|
||||
|
||||
def test_zero_offset_ndim(self):
|
||||
self.parse(u"cdef int[0:,0:,0:,0:] x")
|
||||
|
||||
def test_def_arg(self):
|
||||
self.parse(u"def foo(int[:,:] x): pass")
|
||||
|
||||
def test_cdef_arg(self):
|
||||
self.parse(u"cdef foo(int[:,:] x): pass")
|
||||
|
||||
def test_general_slice(self):
|
||||
self.parse(u'cdef float[::ptr, ::direct & contig, 0::full & strided] x')
|
||||
|
||||
def test_non_slice_memview(self):
|
||||
self.not_parseable(u"An axis specification in memoryview declaration does not have a ':'.",
|
||||
u"cdef double[:foo, bar] x")
|
||||
self.not_parseable(u"An axis specification in memoryview declaration does not have a ':'.",
|
||||
u"cdef double[0:foo, bar] x")
|
||||
|
||||
def test_basic(self):
|
||||
t = self.parse(u"cdef int[:] x")
|
||||
memv_node = t.stats[0].base_type
|
||||
self.assertTrue(isinstance(memv_node, MemoryViewSliceTypeNode))
|
||||
|
||||
# we also test other similar declarations (buffers, anonymous C arrays)
|
||||
# since the parsing has to distinguish between them.
|
||||
|
||||
def disable_test_no_buf_arg(self): # TODO
|
||||
self.not_parseable(u"Expected ']'",
|
||||
u"cdef extern foo(object[int, ndim=2])")
|
||||
|
||||
def disable_test_parse_sizeof(self): # TODO
|
||||
self.parse(u"sizeof(int[NN])")
|
||||
self.parse(u"sizeof(int[])")
|
||||
self.parse(u"sizeof(int[][NN])")
|
||||
self.not_parseable(u"Expected an identifier or literal",
|
||||
u"sizeof(int[:NN])")
|
||||
self.not_parseable(u"Expected ']'",
|
||||
u"sizeof(foo[dtype=bar]")
|
||||
|
||||
if __name__ == '__main__':
|
||||
import unittest
|
||||
unittest.main()
|
|
@ -0,0 +1,289 @@
|
|||
import os.path
|
||||
import unittest
|
||||
|
||||
from Cython.TestUtils import TransformTest
|
||||
from Cython.Compiler.ParseTreeTransforms import *
|
||||
from Cython.Compiler.ParseTreeTransforms import _calculate_pickle_checksums
|
||||
from Cython.Compiler.Nodes import *
|
||||
from Cython.Compiler import Main, Symtab
|
||||
|
||||
|
||||
class TestNormalizeTree(TransformTest):
|
||||
def test_parserbehaviour_is_what_we_coded_for(self):
|
||||
t = self.fragment(u"if x: y").root
|
||||
self.assertLines(u"""
|
||||
(root): StatListNode
|
||||
stats[0]: IfStatNode
|
||||
if_clauses[0]: IfClauseNode
|
||||
condition: NameNode
|
||||
body: ExprStatNode
|
||||
expr: NameNode
|
||||
""", self.treetypes(t))
|
||||
|
||||
def test_wrap_singlestat(self):
|
||||
t = self.run_pipeline([NormalizeTree(None)], u"if x: y")
|
||||
self.assertLines(u"""
|
||||
(root): StatListNode
|
||||
stats[0]: IfStatNode
|
||||
if_clauses[0]: IfClauseNode
|
||||
condition: NameNode
|
||||
body: StatListNode
|
||||
stats[0]: ExprStatNode
|
||||
expr: NameNode
|
||||
""", self.treetypes(t))
|
||||
|
||||
def test_wrap_multistat(self):
|
||||
t = self.run_pipeline([NormalizeTree(None)], u"""
|
||||
if z:
|
||||
x
|
||||
y
|
||||
""")
|
||||
self.assertLines(u"""
|
||||
(root): StatListNode
|
||||
stats[0]: IfStatNode
|
||||
if_clauses[0]: IfClauseNode
|
||||
condition: NameNode
|
||||
body: StatListNode
|
||||
stats[0]: ExprStatNode
|
||||
expr: NameNode
|
||||
stats[1]: ExprStatNode
|
||||
expr: NameNode
|
||||
""", self.treetypes(t))
|
||||
|
||||
def test_statinexpr(self):
|
||||
t = self.run_pipeline([NormalizeTree(None)], u"""
|
||||
a, b = x, y
|
||||
""")
|
||||
self.assertLines(u"""
|
||||
(root): StatListNode
|
||||
stats[0]: SingleAssignmentNode
|
||||
lhs: TupleNode
|
||||
args[0]: NameNode
|
||||
args[1]: NameNode
|
||||
rhs: TupleNode
|
||||
args[0]: NameNode
|
||||
args[1]: NameNode
|
||||
""", self.treetypes(t))
|
||||
|
||||
def test_wrap_offagain(self):
|
||||
t = self.run_pipeline([NormalizeTree(None)], u"""
|
||||
x
|
||||
y
|
||||
if z:
|
||||
x
|
||||
""")
|
||||
self.assertLines(u"""
|
||||
(root): StatListNode
|
||||
stats[0]: ExprStatNode
|
||||
expr: NameNode
|
||||
stats[1]: ExprStatNode
|
||||
expr: NameNode
|
||||
stats[2]: IfStatNode
|
||||
if_clauses[0]: IfClauseNode
|
||||
condition: NameNode
|
||||
body: StatListNode
|
||||
stats[0]: ExprStatNode
|
||||
expr: NameNode
|
||||
""", self.treetypes(t))
|
||||
|
||||
|
||||
def test_pass_eliminated(self):
|
||||
t = self.run_pipeline([NormalizeTree(None)], u"pass")
|
||||
self.assertTrue(len(t.stats) == 0)
|
||||
|
||||
class TestWithTransform(object): # (TransformTest): # Disabled!
|
||||
|
||||
def test_simplified(self):
|
||||
t = self.run_pipeline([WithTransform(None)], u"""
|
||||
with x:
|
||||
y = z ** 3
|
||||
""")
|
||||
|
||||
self.assertCode(u"""
|
||||
|
||||
$0_0 = x
|
||||
$0_2 = $0_0.__exit__
|
||||
$0_0.__enter__()
|
||||
$0_1 = True
|
||||
try:
|
||||
try:
|
||||
$1_0 = None
|
||||
y = z ** 3
|
||||
except:
|
||||
$0_1 = False
|
||||
if (not $0_2($1_0)):
|
||||
raise
|
||||
finally:
|
||||
if $0_1:
|
||||
$0_2(None, None, None)
|
||||
|
||||
""", t)
|
||||
|
||||
def test_basic(self):
|
||||
t = self.run_pipeline([WithTransform(None)], u"""
|
||||
with x as y:
|
||||
y = z ** 3
|
||||
""")
|
||||
self.assertCode(u"""
|
||||
|
||||
$0_0 = x
|
||||
$0_2 = $0_0.__exit__
|
||||
$0_3 = $0_0.__enter__()
|
||||
$0_1 = True
|
||||
try:
|
||||
try:
|
||||
$1_0 = None
|
||||
y = $0_3
|
||||
y = z ** 3
|
||||
except:
|
||||
$0_1 = False
|
||||
if (not $0_2($1_0)):
|
||||
raise
|
||||
finally:
|
||||
if $0_1:
|
||||
$0_2(None, None, None)
|
||||
|
||||
""", t)
|
||||
|
||||
|
||||
class TestInterpretCompilerDirectives(TransformTest):
|
||||
"""
|
||||
This class tests the parallel directives AST-rewriting and importing.
|
||||
"""
|
||||
|
||||
# Test the parallel directives (c)importing
|
||||
|
||||
import_code = u"""
|
||||
cimport cython.parallel
|
||||
cimport cython.parallel as par
|
||||
from cython cimport parallel as par2
|
||||
from cython cimport parallel
|
||||
|
||||
from cython.parallel cimport threadid as tid
|
||||
from cython.parallel cimport threadavailable as tavail
|
||||
from cython.parallel cimport prange
|
||||
"""
|
||||
|
||||
expected_directives_dict = {
|
||||
u'cython.parallel': u'cython.parallel',
|
||||
u'par': u'cython.parallel',
|
||||
u'par2': u'cython.parallel',
|
||||
u'parallel': u'cython.parallel',
|
||||
|
||||
u"tid": u"cython.parallel.threadid",
|
||||
u"tavail": u"cython.parallel.threadavailable",
|
||||
u"prange": u"cython.parallel.prange",
|
||||
}
|
||||
|
||||
|
||||
def setUp(self):
|
||||
super(TestInterpretCompilerDirectives, self).setUp()
|
||||
|
||||
compilation_options = Main.CompilationOptions(Main.default_options)
|
||||
ctx = compilation_options.create_context()
|
||||
|
||||
transform = InterpretCompilerDirectives(ctx, ctx.compiler_directives)
|
||||
transform.module_scope = Symtab.ModuleScope('__main__', None, ctx)
|
||||
self.pipeline = [transform]
|
||||
|
||||
self.debug_exception_on_error = DebugFlags.debug_exception_on_error
|
||||
|
||||
def tearDown(self):
|
||||
DebugFlags.debug_exception_on_error = self.debug_exception_on_error
|
||||
|
||||
def test_parallel_directives_cimports(self):
|
||||
self.run_pipeline(self.pipeline, self.import_code)
|
||||
parallel_directives = self.pipeline[0].parallel_directives
|
||||
self.assertEqual(parallel_directives, self.expected_directives_dict)
|
||||
|
||||
def test_parallel_directives_imports(self):
|
||||
self.run_pipeline(self.pipeline,
|
||||
self.import_code.replace(u'cimport', u'import'))
|
||||
parallel_directives = self.pipeline[0].parallel_directives
|
||||
self.assertEqual(parallel_directives, self.expected_directives_dict)
|
||||
|
||||
|
||||
# TODO: Re-enable once they're more robust.
|
||||
if False:
|
||||
from Cython.Debugger import DebugWriter
|
||||
from Cython.Debugger.Tests.TestLibCython import DebuggerTestCase
|
||||
else:
|
||||
# skip test, don't let it inherit unittest.TestCase
|
||||
DebuggerTestCase = object
|
||||
|
||||
|
||||
class TestDebugTransform(DebuggerTestCase):
|
||||
|
||||
def elem_hasattrs(self, elem, attrs):
|
||||
return all(attr in elem.attrib for attr in attrs)
|
||||
|
||||
def test_debug_info(self):
|
||||
try:
|
||||
assert os.path.exists(self.debug_dest)
|
||||
|
||||
t = DebugWriter.etree.parse(self.debug_dest)
|
||||
# the xpath of the standard ElementTree is primitive, don't use
|
||||
# anything fancy
|
||||
L = list(t.find('/Module/Globals'))
|
||||
assert L
|
||||
xml_globals = dict((e.attrib['name'], e.attrib['type']) for e in L)
|
||||
self.assertEqual(len(L), len(xml_globals))
|
||||
|
||||
L = list(t.find('/Module/Functions'))
|
||||
assert L
|
||||
xml_funcs = dict((e.attrib['qualified_name'], e) for e in L)
|
||||
self.assertEqual(len(L), len(xml_funcs))
|
||||
|
||||
# test globals
|
||||
self.assertEqual('CObject', xml_globals.get('c_var'))
|
||||
self.assertEqual('PythonObject', xml_globals.get('python_var'))
|
||||
|
||||
# test functions
|
||||
funcnames = ('codefile.spam', 'codefile.ham', 'codefile.eggs',
|
||||
'codefile.closure', 'codefile.inner')
|
||||
required_xml_attrs = 'name', 'cname', 'qualified_name'
|
||||
assert all(f in xml_funcs for f in funcnames)
|
||||
spam, ham, eggs = [xml_funcs[funcname] for funcname in funcnames]
|
||||
|
||||
self.assertEqual(spam.attrib['name'], 'spam')
|
||||
self.assertNotEqual('spam', spam.attrib['cname'])
|
||||
assert self.elem_hasattrs(spam, required_xml_attrs)
|
||||
|
||||
# test locals of functions
|
||||
spam_locals = list(spam.find('Locals'))
|
||||
assert spam_locals
|
||||
spam_locals.sort(key=lambda e: e.attrib['name'])
|
||||
names = [e.attrib['name'] for e in spam_locals]
|
||||
self.assertEqual(list('abcd'), names)
|
||||
assert self.elem_hasattrs(spam_locals[0], required_xml_attrs)
|
||||
|
||||
# test arguments of functions
|
||||
spam_arguments = list(spam.find('Arguments'))
|
||||
assert spam_arguments
|
||||
self.assertEqual(1, len(list(spam_arguments)))
|
||||
|
||||
# test step-into functions
|
||||
step_into = spam.find('StepIntoFunctions')
|
||||
spam_stepinto = [x.attrib['name'] for x in step_into]
|
||||
assert spam_stepinto
|
||||
self.assertEqual(2, len(spam_stepinto))
|
||||
assert 'puts' in spam_stepinto
|
||||
assert 'some_c_function' in spam_stepinto
|
||||
except:
|
||||
f = open(self.debug_dest)
|
||||
try:
|
||||
print(f.read())
|
||||
finally:
|
||||
f.close()
|
||||
raise
|
||||
|
||||
|
||||
class TestAnalyseDeclarationsTransform(unittest.TestCase):
|
||||
def test_calculate_pickle_checksums(self):
|
||||
checksums = _calculate_pickle_checksums(['member1', 'member2', 'member3'])
|
||||
assert 2 <= len(checksums) <= 3, checksums # expecting ['0xc0af380' (MD5), '0x0c75bd4', '0xa7a7b94']
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import unittest
|
||||
unittest.main()
|
|
@ -0,0 +1,73 @@
|
|||
import unittest
|
||||
|
||||
from Cython.Compiler import PyrexTypes as pt
|
||||
from Cython.Compiler.ExprNodes import NameNode
|
||||
from Cython.Compiler.PyrexTypes import CFuncTypeArg
|
||||
|
||||
def cfunctype(*arg_types):
|
||||
return pt.CFuncType(pt.c_int_type,
|
||||
[ CFuncTypeArg("name", arg_type, None) for arg_type in arg_types ])
|
||||
|
||||
def cppclasstype(name, base_classes):
|
||||
return pt.CppClassType(name, None, 'CPP_'+name, base_classes)
|
||||
|
||||
class SignatureMatcherTest(unittest.TestCase):
|
||||
"""
|
||||
Test the signature matching algorithm for overloaded signatures.
|
||||
"""
|
||||
def assertMatches(self, expected_type, arg_types, functions):
|
||||
match = pt.best_match(arg_types, functions)
|
||||
if expected_type is not None:
|
||||
self.assertNotEqual(None, match)
|
||||
self.assertEqual(expected_type, match.type)
|
||||
|
||||
def test_cpp_reference_single_arg(self):
|
||||
function_types = [
|
||||
cfunctype(pt.CReferenceType(pt.c_int_type)),
|
||||
cfunctype(pt.CReferenceType(pt.c_long_type)),
|
||||
cfunctype(pt.CReferenceType(pt.c_double_type)),
|
||||
]
|
||||
|
||||
functions = [ NameNode(None, type=t) for t in function_types ]
|
||||
self.assertMatches(function_types[0], [pt.c_int_type], functions)
|
||||
self.assertMatches(function_types[1], [pt.c_long_type], functions)
|
||||
self.assertMatches(function_types[2], [pt.c_double_type], functions)
|
||||
|
||||
def test_cpp_reference_two_args(self):
|
||||
function_types = [
|
||||
cfunctype(
|
||||
pt.CReferenceType(pt.c_int_type), pt.CReferenceType(pt.c_long_type)),
|
||||
cfunctype(
|
||||
pt.CReferenceType(pt.c_long_type), pt.CReferenceType(pt.c_long_type)),
|
||||
]
|
||||
|
||||
functions = [ NameNode(None, type=t) for t in function_types ]
|
||||
self.assertMatches(function_types[0], [pt.c_int_type, pt.c_long_type], functions)
|
||||
self.assertMatches(function_types[1], [pt.c_long_type, pt.c_long_type], functions)
|
||||
self.assertMatches(function_types[1], [pt.c_long_type, pt.c_int_type], functions)
|
||||
|
||||
def test_cpp_reference_cpp_class(self):
|
||||
classes = [ cppclasstype("Test%d"%i, []) for i in range(2) ]
|
||||
function_types = [
|
||||
cfunctype(pt.CReferenceType(classes[0])),
|
||||
cfunctype(pt.CReferenceType(classes[1])),
|
||||
]
|
||||
|
||||
functions = [ NameNode(None, type=t) for t in function_types ]
|
||||
self.assertMatches(function_types[0], [classes[0]], functions)
|
||||
self.assertMatches(function_types[1], [classes[1]], functions)
|
||||
|
||||
def test_cpp_reference_cpp_class_and_int(self):
|
||||
classes = [ cppclasstype("Test%d"%i, []) for i in range(2) ]
|
||||
function_types = [
|
||||
cfunctype(pt.CReferenceType(classes[0]), pt.c_int_type),
|
||||
cfunctype(pt.CReferenceType(classes[0]), pt.c_long_type),
|
||||
cfunctype(pt.CReferenceType(classes[1]), pt.c_int_type),
|
||||
cfunctype(pt.CReferenceType(classes[1]), pt.c_long_type),
|
||||
]
|
||||
|
||||
functions = [ NameNode(None, type=t) for t in function_types ]
|
||||
self.assertMatches(function_types[0], [classes[0], pt.c_int_type], functions)
|
||||
self.assertMatches(function_types[1], [classes[0], pt.c_long_type], functions)
|
||||
self.assertMatches(function_types[2], [classes[1], pt.c_int_type], functions)
|
||||
self.assertMatches(function_types[3], [classes[1], pt.c_long_type], functions)
|
|
@ -0,0 +1,44 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import Cython.Compiler.StringEncoding as StringEncoding
|
||||
|
||||
|
||||
class StringEncodingTest(unittest.TestCase):
|
||||
"""
|
||||
Test the StringEncoding module.
|
||||
"""
|
||||
def test_string_contains_lone_surrogates(self):
|
||||
self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"abc"))
|
||||
self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\uABCD"))
|
||||
self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\N{SNOWMAN}"))
|
||||
|
||||
# This behaves differently in Py2 when freshly parsed and read from a .pyc file,
|
||||
# but it seems to be a marshalling bug in Py2, which doesn't hurt us in Cython.
|
||||
if sys.version_info[0] != 2:
|
||||
self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800\uDFFF"))
|
||||
|
||||
# In Py2 with 16bit Unicode, the following is indistinguishable from the 32bit character.
|
||||
obfuscated_surrogate_pair = (u"\uDFFF" + "\uD800")[::-1]
|
||||
if sys.version_info[0] == 2 and sys.maxunicode == 65565:
|
||||
self.assertFalse(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair))
|
||||
else:
|
||||
self.assertTrue(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair))
|
||||
|
||||
self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800"))
|
||||
self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF"))
|
||||
self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF\uD800"))
|
||||
self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800x\uDFFF"))
|
||||
|
||||
def test_string_contains_surrogates(self):
|
||||
self.assertFalse(StringEncoding.string_contains_surrogates(u"abc"))
|
||||
self.assertFalse(StringEncoding.string_contains_surrogates(u"\uABCD"))
|
||||
self.assertFalse(StringEncoding.string_contains_surrogates(u"\N{SNOWMAN}"))
|
||||
|
||||
self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800"))
|
||||
self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF"))
|
||||
self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800\uDFFF"))
|
||||
self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF\uD800"))
|
||||
self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800x\uDFFF"))
|
|
@ -0,0 +1,64 @@
|
|||
from Cython.TestUtils import CythonTest
|
||||
from Cython.Compiler.TreeFragment import *
|
||||
from Cython.Compiler.Nodes import *
|
||||
from Cython.Compiler.UtilNodes import *
|
||||
import Cython.Compiler.Naming as Naming
|
||||
|
||||
class TestTreeFragments(CythonTest):
|
||||
|
||||
def test_basic(self):
|
||||
F = self.fragment(u"x = 4")
|
||||
T = F.copy()
|
||||
self.assertCode(u"x = 4", T)
|
||||
|
||||
def test_copy_is_taken(self):
|
||||
F = self.fragment(u"if True: x = 4")
|
||||
T1 = F.root
|
||||
T2 = F.copy()
|
||||
self.assertEqual("x", T2.stats[0].if_clauses[0].body.lhs.name)
|
||||
T2.stats[0].if_clauses[0].body.lhs.name = "other"
|
||||
self.assertEqual("x", T1.stats[0].if_clauses[0].body.lhs.name)
|
||||
|
||||
def test_substitutions_are_copied(self):
|
||||
T = self.fragment(u"y + y").substitute({"y": NameNode(pos=None, name="x")})
|
||||
self.assertEqual("x", T.stats[0].expr.operand1.name)
|
||||
self.assertEqual("x", T.stats[0].expr.operand2.name)
|
||||
self.assertTrue(T.stats[0].expr.operand1 is not T.stats[0].expr.operand2)
|
||||
|
||||
def test_substitution(self):
|
||||
F = self.fragment(u"x = 4")
|
||||
y = NameNode(pos=None, name=u"y")
|
||||
T = F.substitute({"x" : y})
|
||||
self.assertCode(u"y = 4", T)
|
||||
|
||||
def test_exprstat(self):
|
||||
F = self.fragment(u"PASS")
|
||||
pass_stat = PassStatNode(pos=None)
|
||||
T = F.substitute({"PASS" : pass_stat})
|
||||
self.assertTrue(isinstance(T.stats[0], PassStatNode), T)
|
||||
|
||||
def test_pos_is_transferred(self):
|
||||
F = self.fragment(u"""
|
||||
x = y
|
||||
x = u * v ** w
|
||||
""")
|
||||
T = F.substitute({"v" : NameNode(pos=None, name="a")})
|
||||
v = F.root.stats[1].rhs.operand2.operand1
|
||||
a = T.stats[1].rhs.operand2.operand1
|
||||
self.assertEqual(v.pos, a.pos)
|
||||
|
||||
def test_temps(self):
|
||||
TemplateTransform.temp_name_counter = 0
|
||||
F = self.fragment(u"""
|
||||
TMP
|
||||
x = TMP
|
||||
""")
|
||||
T = F.substitute(temps=[u"TMP"])
|
||||
s = T.body.stats
|
||||
self.assertTrue(isinstance(s[0].expr, TempRefNode))
|
||||
self.assertTrue(isinstance(s[1].rhs, TempRefNode))
|
||||
self.assertTrue(s[0].expr.handle is s[1].rhs.handle)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import unittest
|
||||
unittest.main()
|
|
@ -0,0 +1,94 @@
|
|||
import unittest
|
||||
from Cython.Compiler.Visitor import PrintTree
|
||||
from Cython.TestUtils import TransformTest
|
||||
from Cython.Compiler.TreePath import find_first, find_all
|
||||
from Cython.Compiler import Nodes, ExprNodes
|
||||
|
||||
class TestTreePath(TransformTest):
|
||||
_tree = None
|
||||
|
||||
def _build_tree(self):
|
||||
if self._tree is None:
|
||||
self._tree = self.run_pipeline([], u"""
|
||||
def decorator(fun): # DefNode
|
||||
return fun # ReturnStatNode, NameNode
|
||||
@decorator # NameNode
|
||||
def decorated(): # DefNode
|
||||
pass
|
||||
""")
|
||||
return self._tree
|
||||
|
||||
def test_node_path(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(2, len(find_all(t, "//DefNode")))
|
||||
self.assertEqual(2, len(find_all(t, "//NameNode")))
|
||||
self.assertEqual(1, len(find_all(t, "//ReturnStatNode")))
|
||||
self.assertEqual(1, len(find_all(t, "//DefNode//ReturnStatNode")))
|
||||
|
||||
def test_node_path_star(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(10, len(find_all(t, "//*")))
|
||||
self.assertEqual(8, len(find_all(t, "//DefNode//*")))
|
||||
self.assertEqual(0, len(find_all(t, "//NameNode//*")))
|
||||
|
||||
def test_node_path_attribute(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(2, len(find_all(t, "//NameNode/@name")))
|
||||
self.assertEqual(['fun', 'decorator'], find_all(t, "//NameNode/@name"))
|
||||
|
||||
def test_node_path_attribute_dotted(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(1, len(find_all(t, "//ReturnStatNode/@value.name")))
|
||||
self.assertEqual(['fun'], find_all(t, "//ReturnStatNode/@value.name"))
|
||||
|
||||
def test_node_path_child(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(1, len(find_all(t, "//DefNode/ReturnStatNode/NameNode")))
|
||||
self.assertEqual(1, len(find_all(t, "//ReturnStatNode/NameNode")))
|
||||
|
||||
def test_node_path_node_predicate(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(0, len(find_all(t, "//DefNode[.//ForInStatNode]")))
|
||||
self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode]")))
|
||||
self.assertEqual(1, len(find_all(t, "//ReturnStatNode[./NameNode]")))
|
||||
self.assertEqual(Nodes.ReturnStatNode,
|
||||
type(find_first(t, "//ReturnStatNode[./NameNode]")))
|
||||
|
||||
def test_node_path_node_predicate_step(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode]")))
|
||||
self.assertEqual(8, len(find_all(t, "//DefNode[.//NameNode]//*")))
|
||||
self.assertEqual(1, len(find_all(t, "//DefNode[.//NameNode]//ReturnStatNode")))
|
||||
self.assertEqual(Nodes.ReturnStatNode,
|
||||
type(find_first(t, "//DefNode[.//NameNode]//ReturnStatNode")))
|
||||
|
||||
def test_node_path_attribute_exists(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(2, len(find_all(t, "//NameNode[@name]")))
|
||||
self.assertEqual(ExprNodes.NameNode,
|
||||
type(find_first(t, "//NameNode[@name]")))
|
||||
|
||||
def test_node_path_attribute_exists_not(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(0, len(find_all(t, "//NameNode[not(@name)]")))
|
||||
self.assertEqual(2, len(find_all(t, "//NameNode[not(@honking)]")))
|
||||
|
||||
def test_node_path_and(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(1, len(find_all(t, "//DefNode[.//ReturnStatNode and .//NameNode]")))
|
||||
self.assertEqual(0, len(find_all(t, "//NameNode[@honking and @name]")))
|
||||
self.assertEqual(0, len(find_all(t, "//NameNode[@name and @honking]")))
|
||||
self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode[@name] and @name]")))
|
||||
|
||||
def test_node_path_attribute_string_predicate(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(1, len(find_all(t, "//NameNode[@name = 'decorator']")))
|
||||
|
||||
def test_node_path_recursive_predicate(self):
|
||||
t = self._build_tree()
|
||||
self.assertEqual(2, len(find_all(t, "//DefNode[.//NameNode[@name]]")))
|
||||
self.assertEqual(1, len(find_all(t, "//DefNode[.//NameNode[@name = 'decorator']]")))
|
||||
self.assertEqual(1, len(find_all(t, "//DefNode[.//ReturnStatNode[./NameNode[@name = 'fun']]/NameNode]")))
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -0,0 +1,19 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import unittest
|
||||
|
||||
import Cython.Compiler.PyrexTypes as PT
|
||||
|
||||
|
||||
class TestMethodDispatcherTransform(unittest.TestCase):
|
||||
|
||||
def test_widest_numeric_type(self):
|
||||
def assert_widest(type1, type2, widest):
|
||||
self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
|
||||
|
||||
assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
|
||||
assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
|
||||
assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
|
||||
|
||||
cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
|
||||
assert_widest(PT.c_int_type, cenum, PT.c_int_type)
|
|
@ -0,0 +1,101 @@
|
|||
import unittest
|
||||
|
||||
from Cython.Compiler import Code, UtilityCode
|
||||
|
||||
|
||||
def strip_2tup(tup):
|
||||
return tup[0] and tup[0].strip(), tup[1] and tup[1].strip()
|
||||
|
||||
class TestUtilityLoader(unittest.TestCase):
|
||||
"""
|
||||
Test loading UtilityCodes
|
||||
"""
|
||||
|
||||
expected = "test {{loader}} prototype", "test {{loader}} impl"
|
||||
|
||||
required = "req {{loader}} proto", "req {{loader}} impl"
|
||||
|
||||
context = dict(loader='Loader')
|
||||
|
||||
name = "TestUtilityLoader"
|
||||
filename = "TestUtilityLoader.c"
|
||||
cls = Code.UtilityCode
|
||||
|
||||
def test_load_as_string(self):
|
||||
got = strip_2tup(self.cls.load_as_string(self.name))
|
||||
self.assertEqual(got, self.expected)
|
||||
|
||||
got = strip_2tup(self.cls.load_as_string(self.name, self.filename))
|
||||
self.assertEqual(got, self.expected)
|
||||
|
||||
def test_load(self):
|
||||
utility = self.cls.load(self.name)
|
||||
got = strip_2tup((utility.proto, utility.impl))
|
||||
self.assertEqual(got, self.expected)
|
||||
|
||||
required, = utility.requires
|
||||
got = strip_2tup((required.proto, required.impl))
|
||||
self.assertEqual(got, self.required)
|
||||
|
||||
utility = self.cls.load(self.name, from_file=self.filename)
|
||||
got = strip_2tup((utility.proto, utility.impl))
|
||||
self.assertEqual(got, self.expected)
|
||||
|
||||
utility = self.cls.load_cached(self.name, from_file=self.filename)
|
||||
got = strip_2tup((utility.proto, utility.impl))
|
||||
self.assertEqual(got, self.expected)
|
||||
|
||||
|
||||
class TestTempitaUtilityLoader(TestUtilityLoader):
|
||||
"""
|
||||
Test loading UtilityCodes with Tempita substitution
|
||||
"""
|
||||
expected_tempita = (TestUtilityLoader.expected[0].replace('{{loader}}', 'Loader'),
|
||||
TestUtilityLoader.expected[1].replace('{{loader}}', 'Loader'))
|
||||
|
||||
required_tempita = (TestUtilityLoader.required[0].replace('{{loader}}', 'Loader'),
|
||||
TestUtilityLoader.required[1].replace('{{loader}}', 'Loader'))
|
||||
|
||||
cls = Code.TempitaUtilityCode
|
||||
|
||||
def test_load_as_string(self):
|
||||
got = strip_2tup(self.cls.load_as_string(self.name, context=self.context))
|
||||
self.assertEqual(got, self.expected_tempita)
|
||||
|
||||
def test_load(self):
|
||||
utility = self.cls.load(self.name, context=self.context)
|
||||
got = strip_2tup((utility.proto, utility.impl))
|
||||
self.assertEqual(got, self.expected_tempita)
|
||||
|
||||
required, = utility.requires
|
||||
got = strip_2tup((required.proto, required.impl))
|
||||
self.assertEqual(got, self.required_tempita)
|
||||
|
||||
utility = self.cls.load(self.name, from_file=self.filename, context=self.context)
|
||||
got = strip_2tup((utility.proto, utility.impl))
|
||||
self.assertEqual(got, self.expected_tempita)
|
||||
|
||||
|
||||
class TestCythonUtilityLoader(TestTempitaUtilityLoader):
|
||||
"""
|
||||
Test loading CythonUtilityCodes
|
||||
"""
|
||||
|
||||
# Just change the attributes and run the same tests
|
||||
expected = None, "test {{cy_loader}} impl"
|
||||
expected_tempita = None, "test CyLoader impl"
|
||||
|
||||
required = None, "req {{cy_loader}} impl"
|
||||
required_tempita = None, "req CyLoader impl"
|
||||
|
||||
context = dict(cy_loader='CyLoader')
|
||||
|
||||
name = "TestCyUtilityLoader"
|
||||
filename = "TestCyUtilityLoader.pyx"
|
||||
cls = UtilityCode.CythonUtilityCode
|
||||
|
||||
# Small hack to pass our tests above
|
||||
cls.proto = None
|
||||
|
||||
test_load = TestUtilityLoader.test_load
|
||||
test_load_tempita = TestTempitaUtilityLoader.test_load
|
|
@ -0,0 +1,61 @@
|
|||
from Cython.Compiler.ModuleNode import ModuleNode
|
||||
from Cython.Compiler.Symtab import ModuleScope
|
||||
from Cython.TestUtils import TransformTest
|
||||
from Cython.Compiler.Visitor import MethodDispatcherTransform
|
||||
from Cython.Compiler.ParseTreeTransforms import (
|
||||
NormalizeTree, AnalyseDeclarationsTransform,
|
||||
AnalyseExpressionsTransform, InterpretCompilerDirectives)
|
||||
|
||||
|
||||
class TestMethodDispatcherTransform(TransformTest):
|
||||
_tree = None
|
||||
|
||||
def _build_tree(self):
|
||||
if self._tree is None:
|
||||
context = None
|
||||
|
||||
def fake_module(node):
|
||||
scope = ModuleScope('test', None, None)
|
||||
return ModuleNode(node.pos, doc=None, body=node,
|
||||
scope=scope, full_module_name='test',
|
||||
directive_comments={})
|
||||
pipeline = [
|
||||
fake_module,
|
||||
NormalizeTree(context),
|
||||
InterpretCompilerDirectives(context, {}),
|
||||
AnalyseDeclarationsTransform(context),
|
||||
AnalyseExpressionsTransform(context),
|
||||
]
|
||||
self._tree = self.run_pipeline(pipeline, u"""
|
||||
cdef bytes s = b'asdfg'
|
||||
cdef dict d = {1:2}
|
||||
x = s * 3
|
||||
d.get('test')
|
||||
""")
|
||||
return self._tree
|
||||
|
||||
def test_builtin_method(self):
|
||||
calls = [0]
|
||||
class Test(MethodDispatcherTransform):
|
||||
def _handle_simple_method_dict_get(self, node, func, args, unbound):
|
||||
calls[0] += 1
|
||||
return node
|
||||
|
||||
tree = self._build_tree()
|
||||
Test(None)(tree)
|
||||
self.assertEqual(1, calls[0])
|
||||
|
||||
def test_binop_method(self):
|
||||
calls = {'bytes': 0, 'object': 0}
|
||||
class Test(MethodDispatcherTransform):
|
||||
def _handle_simple_method_bytes___mul__(self, node, func, args, unbound):
|
||||
calls['bytes'] += 1
|
||||
return node
|
||||
def _handle_simple_method_object___mul__(self, node, func, args, unbound):
|
||||
calls['object'] += 1
|
||||
return node
|
||||
|
||||
tree = self._build_tree()
|
||||
Test(None)(tree)
|
||||
self.assertEqual(1, calls['bytes'])
|
||||
self.assertEqual(0, calls['object'])
|
|
@ -0,0 +1 @@
|
|||
# empty file
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,275 @@
|
|||
#
|
||||
# TreeFragments - parsing of strings to trees
|
||||
#
|
||||
|
||||
"""
|
||||
Support for parsing strings into code trees.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
from io import StringIO
|
||||
|
||||
from .Scanning import PyrexScanner, StringSourceDescriptor
|
||||
from .Symtab import ModuleScope
|
||||
from . import PyrexTypes
|
||||
from .Visitor import VisitorTransform
|
||||
from .Nodes import Node, StatListNode
|
||||
from .ExprNodes import NameNode
|
||||
from .StringEncoding import _unicode
|
||||
from . import Parsing
|
||||
from . import Main
|
||||
from . import UtilNodes
|
||||
|
||||
|
||||
class StringParseContext(Main.Context):
|
||||
def __init__(self, name, include_directories=None, compiler_directives=None, cpp=False):
|
||||
if include_directories is None:
|
||||
include_directories = []
|
||||
if compiler_directives is None:
|
||||
compiler_directives = {}
|
||||
# TODO: see if "language_level=3" also works for our internal code here.
|
||||
Main.Context.__init__(self, include_directories, compiler_directives, cpp=cpp, language_level=2)
|
||||
self.module_name = name
|
||||
|
||||
def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1, absolute_fallback=True):
|
||||
if module_name not in (self.module_name, 'cython'):
|
||||
raise AssertionError("Not yet supporting any cimports/includes from string code snippets")
|
||||
return ModuleScope(module_name, parent_module=None, context=self)
|
||||
|
||||
|
||||
def parse_from_strings(name, code, pxds=None, level=None, initial_pos=None,
|
||||
context=None, allow_struct_enum_decorator=False):
|
||||
"""
|
||||
Utility method to parse a (unicode) string of code. This is mostly
|
||||
used for internal Cython compiler purposes (creating code snippets
|
||||
that transforms should emit, as well as unit testing).
|
||||
|
||||
code - a unicode string containing Cython (module-level) code
|
||||
name - a descriptive name for the code source (to use in error messages etc.)
|
||||
|
||||
RETURNS
|
||||
|
||||
The tree, i.e. a ModuleNode. The ModuleNode's scope attribute is
|
||||
set to the scope used when parsing.
|
||||
"""
|
||||
if context is None:
|
||||
context = StringParseContext(name)
|
||||
# Since source files carry an encoding, it makes sense in this context
|
||||
# to use a unicode string so that code fragments don't have to bother
|
||||
# with encoding. This means that test code passed in should not have an
|
||||
# encoding header.
|
||||
assert isinstance(code, _unicode), "unicode code snippets only please"
|
||||
encoding = "UTF-8"
|
||||
|
||||
module_name = name
|
||||
if initial_pos is None:
|
||||
initial_pos = (name, 1, 0)
|
||||
code_source = StringSourceDescriptor(name, code)
|
||||
|
||||
scope = context.find_module(module_name, pos=initial_pos, need_pxd=False)
|
||||
|
||||
buf = StringIO(code)
|
||||
|
||||
scanner = PyrexScanner(buf, code_source, source_encoding = encoding,
|
||||
scope = scope, context = context, initial_pos = initial_pos)
|
||||
ctx = Parsing.Ctx(allow_struct_enum_decorator=allow_struct_enum_decorator)
|
||||
|
||||
if level is None:
|
||||
tree = Parsing.p_module(scanner, 0, module_name, ctx=ctx)
|
||||
tree.scope = scope
|
||||
tree.is_pxd = False
|
||||
else:
|
||||
tree = Parsing.p_code(scanner, level=level, ctx=ctx)
|
||||
|
||||
tree.scope = scope
|
||||
return tree
|
||||
|
||||
|
||||
class TreeCopier(VisitorTransform):
|
||||
def visit_Node(self, node):
|
||||
if node is None:
|
||||
return node
|
||||
else:
|
||||
c = node.clone_node()
|
||||
self.visitchildren(c)
|
||||
return c
|
||||
|
||||
|
||||
class ApplyPositionAndCopy(TreeCopier):
|
||||
def __init__(self, pos):
|
||||
super(ApplyPositionAndCopy, self).__init__()
|
||||
self.pos = pos
|
||||
|
||||
def visit_Node(self, node):
|
||||
copy = super(ApplyPositionAndCopy, self).visit_Node(node)
|
||||
copy.pos = self.pos
|
||||
return copy
|
||||
|
||||
|
||||
class TemplateTransform(VisitorTransform):
|
||||
"""
|
||||
Makes a copy of a template tree while doing substitutions.
|
||||
|
||||
A dictionary "substitutions" should be passed in when calling
|
||||
the transform; mapping names to replacement nodes. Then replacement
|
||||
happens like this:
|
||||
- If an ExprStatNode contains a single NameNode, whose name is
|
||||
a key in the substitutions dictionary, the ExprStatNode is
|
||||
replaced with a copy of the tree given in the dictionary.
|
||||
It is the responsibility of the caller that the replacement
|
||||
node is a valid statement.
|
||||
- If a single NameNode is otherwise encountered, it is replaced
|
||||
if its name is listed in the substitutions dictionary in the
|
||||
same way. It is the responsibility of the caller to make sure
|
||||
that the replacement nodes is a valid expression.
|
||||
|
||||
Also a list "temps" should be passed. Any names listed will
|
||||
be transformed into anonymous, temporary names.
|
||||
|
||||
Currently supported for tempnames is:
|
||||
NameNode
|
||||
(various function and class definition nodes etc. should be added to this)
|
||||
|
||||
Each replacement node gets the position of the substituted node
|
||||
recursively applied to every member node.
|
||||
"""
|
||||
|
||||
temp_name_counter = 0
|
||||
|
||||
def __call__(self, node, substitutions, temps, pos):
|
||||
self.substitutions = substitutions
|
||||
self.pos = pos
|
||||
tempmap = {}
|
||||
temphandles = []
|
||||
for temp in temps:
|
||||
TemplateTransform.temp_name_counter += 1
|
||||
handle = UtilNodes.TempHandle(PyrexTypes.py_object_type)
|
||||
tempmap[temp] = handle
|
||||
temphandles.append(handle)
|
||||
self.tempmap = tempmap
|
||||
result = super(TemplateTransform, self).__call__(node)
|
||||
if temps:
|
||||
result = UtilNodes.TempsBlockNode(self.get_pos(node),
|
||||
temps=temphandles,
|
||||
body=result)
|
||||
return result
|
||||
|
||||
def get_pos(self, node):
|
||||
if self.pos:
|
||||
return self.pos
|
||||
else:
|
||||
return node.pos
|
||||
|
||||
def visit_Node(self, node):
|
||||
if node is None:
|
||||
return None
|
||||
else:
|
||||
c = node.clone_node()
|
||||
if self.pos is not None:
|
||||
c.pos = self.pos
|
||||
self.visitchildren(c)
|
||||
return c
|
||||
|
||||
def try_substitution(self, node, key):
|
||||
sub = self.substitutions.get(key)
|
||||
if sub is not None:
|
||||
pos = self.pos
|
||||
if pos is None: pos = node.pos
|
||||
return ApplyPositionAndCopy(pos)(sub)
|
||||
else:
|
||||
return self.visit_Node(node) # make copy as usual
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
temphandle = self.tempmap.get(node.name)
|
||||
if temphandle:
|
||||
# Replace name with temporary
|
||||
return temphandle.ref(self.get_pos(node))
|
||||
else:
|
||||
return self.try_substitution(node, node.name)
|
||||
|
||||
def visit_ExprStatNode(self, node):
|
||||
# If an expression-as-statement consists of only a replaceable
|
||||
# NameNode, we replace the entire statement, not only the NameNode
|
||||
if isinstance(node.expr, NameNode):
|
||||
return self.try_substitution(node, node.expr.name)
|
||||
else:
|
||||
return self.visit_Node(node)
|
||||
|
||||
|
||||
def copy_code_tree(node):
|
||||
return TreeCopier()(node)
|
||||
|
||||
|
||||
_match_indent = re.compile(u"^ *").match
|
||||
|
||||
|
||||
def strip_common_indent(lines):
|
||||
"""Strips empty lines and common indentation from the list of strings given in lines"""
|
||||
# TODO: Facilitate textwrap.indent instead
|
||||
lines = [x for x in lines if x.strip() != u""]
|
||||
if lines:
|
||||
minindent = min([len(_match_indent(x).group(0)) for x in lines])
|
||||
lines = [x[minindent:] for x in lines]
|
||||
return lines
|
||||
|
||||
|
||||
class TreeFragment(object):
|
||||
def __init__(self, code, name=None, pxds=None, temps=None, pipeline=None, level=None, initial_pos=None):
|
||||
if pxds is None:
|
||||
pxds = {}
|
||||
if temps is None:
|
||||
temps = []
|
||||
if pipeline is None:
|
||||
pipeline = []
|
||||
if not name:
|
||||
name = "(tree fragment)"
|
||||
|
||||
if isinstance(code, _unicode):
|
||||
def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n")))
|
||||
|
||||
fmt_code = fmt(code)
|
||||
fmt_pxds = {}
|
||||
for key, value in pxds.items():
|
||||
fmt_pxds[key] = fmt(value)
|
||||
mod = t = parse_from_strings(name, fmt_code, fmt_pxds, level=level, initial_pos=initial_pos)
|
||||
if level is None:
|
||||
t = t.body # Make sure a StatListNode is at the top
|
||||
if not isinstance(t, StatListNode):
|
||||
t = StatListNode(pos=mod.pos, stats=[t])
|
||||
for transform in pipeline:
|
||||
if transform is None:
|
||||
continue
|
||||
t = transform(t)
|
||||
self.root = t
|
||||
elif isinstance(code, Node):
|
||||
if pxds:
|
||||
raise NotImplementedError()
|
||||
self.root = code
|
||||
else:
|
||||
raise ValueError("Unrecognized code format (accepts unicode and Node)")
|
||||
self.temps = temps
|
||||
|
||||
def copy(self):
|
||||
return copy_code_tree(self.root)
|
||||
|
||||
def substitute(self, nodes=None, temps=None, pos = None):
|
||||
if nodes is None:
|
||||
nodes = {}
|
||||
if temps is None:
|
||||
temps = []
|
||||
return TemplateTransform()(self.root,
|
||||
substitutions = nodes,
|
||||
temps = self.temps + temps, pos = pos)
|
||||
|
||||
|
||||
class SetPosTransform(VisitorTransform):
|
||||
def __init__(self, pos):
|
||||
super(SetPosTransform, self).__init__()
|
||||
self.pos = pos
|
||||
|
||||
def visit_Node(self, node):
|
||||
node.pos = self.pos
|
||||
self.visitchildren(node)
|
||||
return node
|
|
@ -0,0 +1,296 @@
|
|||
"""
|
||||
A simple XPath-like language for tree traversal.
|
||||
|
||||
This works by creating a filter chain of generator functions. Each
|
||||
function selects a part of the expression, e.g. a child node, a
|
||||
specific descendant or a node that holds an attribute.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import operator
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
_unicode = str
|
||||
else:
|
||||
_unicode = unicode
|
||||
|
||||
path_tokenizer = re.compile(
|
||||
r"("
|
||||
r"'[^']*'|\"[^\"]*\"|"
|
||||
r"//?|"
|
||||
r"\(\)|"
|
||||
r"==?|"
|
||||
r"[/.*\[\]()@])|"
|
||||
r"([^/\[\]()@=\s]+)|"
|
||||
r"\s+"
|
||||
).findall
|
||||
|
||||
def iterchildren(node, attr_name):
|
||||
# returns an iterable of all child nodes of that name
|
||||
child = getattr(node, attr_name)
|
||||
if child is not None:
|
||||
if type(child) is list:
|
||||
return child
|
||||
else:
|
||||
return [child]
|
||||
else:
|
||||
return ()
|
||||
|
||||
def _get_first_or_none(it):
|
||||
try:
|
||||
try:
|
||||
_next = it.next
|
||||
except AttributeError:
|
||||
return next(it)
|
||||
else:
|
||||
return _next()
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
def type_name(node):
|
||||
return node.__class__.__name__.split('.')[-1]
|
||||
|
||||
def parse_func(next, token):
|
||||
name = token[1]
|
||||
token = next()
|
||||
if token[0] != '(':
|
||||
raise ValueError("Expected '(' after function name '%s'" % name)
|
||||
predicate = handle_predicate(next, token)
|
||||
return name, predicate
|
||||
|
||||
def handle_func_not(next, token):
|
||||
"""
|
||||
not(...)
|
||||
"""
|
||||
name, predicate = parse_func(next, token)
|
||||
|
||||
def select(result):
|
||||
for node in result:
|
||||
if _get_first_or_none(predicate([node])) is None:
|
||||
yield node
|
||||
return select
|
||||
|
||||
def handle_name(next, token):
|
||||
"""
|
||||
/NodeName/
|
||||
or
|
||||
func(...)
|
||||
"""
|
||||
name = token[1]
|
||||
if name in functions:
|
||||
return functions[name](next, token)
|
||||
def select(result):
|
||||
for node in result:
|
||||
for attr_name in node.child_attrs:
|
||||
for child in iterchildren(node, attr_name):
|
||||
if type_name(child) == name:
|
||||
yield child
|
||||
return select
|
||||
|
||||
def handle_star(next, token):
|
||||
"""
|
||||
/*/
|
||||
"""
|
||||
def select(result):
|
||||
for node in result:
|
||||
for name in node.child_attrs:
|
||||
for child in iterchildren(node, name):
|
||||
yield child
|
||||
return select
|
||||
|
||||
def handle_dot(next, token):
|
||||
"""
|
||||
/./
|
||||
"""
|
||||
def select(result):
|
||||
return result
|
||||
return select
|
||||
|
||||
def handle_descendants(next, token):
|
||||
"""
|
||||
//...
|
||||
"""
|
||||
token = next()
|
||||
if token[0] == "*":
|
||||
def iter_recursive(node):
|
||||
for name in node.child_attrs:
|
||||
for child in iterchildren(node, name):
|
||||
yield child
|
||||
for c in iter_recursive(child):
|
||||
yield c
|
||||
elif not token[0]:
|
||||
node_name = token[1]
|
||||
def iter_recursive(node):
|
||||
for name in node.child_attrs:
|
||||
for child in iterchildren(node, name):
|
||||
if type_name(child) == node_name:
|
||||
yield child
|
||||
for c in iter_recursive(child):
|
||||
yield c
|
||||
else:
|
||||
raise ValueError("Expected node name after '//'")
|
||||
|
||||
def select(result):
|
||||
for node in result:
|
||||
for child in iter_recursive(node):
|
||||
yield child
|
||||
|
||||
return select
|
||||
|
||||
|
||||
def handle_attribute(next, token):
|
||||
token = next()
|
||||
if token[0]:
|
||||
raise ValueError("Expected attribute name")
|
||||
name = token[1]
|
||||
value = None
|
||||
try:
|
||||
token = next()
|
||||
except StopIteration:
|
||||
pass
|
||||
else:
|
||||
if token[0] == '=':
|
||||
value = parse_path_value(next)
|
||||
readattr = operator.attrgetter(name)
|
||||
if value is None:
|
||||
def select(result):
|
||||
for node in result:
|
||||
try:
|
||||
attr_value = readattr(node)
|
||||
except AttributeError:
|
||||
continue
|
||||
if attr_value is not None:
|
||||
yield attr_value
|
||||
else:
|
||||
def select(result):
|
||||
for node in result:
|
||||
try:
|
||||
attr_value = readattr(node)
|
||||
except AttributeError:
|
||||
continue
|
||||
if attr_value == value:
|
||||
yield attr_value
|
||||
elif (isinstance(attr_value, bytes) and isinstance(value, _unicode) and
|
||||
attr_value == value.encode()):
|
||||
# allow a bytes-to-string comparison too
|
||||
yield attr_value
|
||||
|
||||
return select
|
||||
|
||||
|
||||
def parse_path_value(next):
|
||||
token = next()
|
||||
value = token[0]
|
||||
if value:
|
||||
if value[:1] == "'" or value[:1] == '"':
|
||||
return value[1:-1]
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
pass
|
||||
elif token[1].isdigit():
|
||||
return int(token[1])
|
||||
else:
|
||||
name = token[1].lower()
|
||||
if name == 'true':
|
||||
return True
|
||||
elif name == 'false':
|
||||
return False
|
||||
raise ValueError("Invalid attribute predicate: '%s'" % value)
|
||||
|
||||
def handle_predicate(next, token):
|
||||
token = next()
|
||||
selector = []
|
||||
while token[0] != ']':
|
||||
selector.append( operations[token[0]](next, token) )
|
||||
try:
|
||||
token = next()
|
||||
except StopIteration:
|
||||
break
|
||||
else:
|
||||
if token[0] == "/":
|
||||
token = next()
|
||||
|
||||
if not token[0] and token[1] == 'and':
|
||||
return logical_and(selector, handle_predicate(next, token))
|
||||
|
||||
def select(result):
|
||||
for node in result:
|
||||
subresult = iter((node,))
|
||||
for select in selector:
|
||||
subresult = select(subresult)
|
||||
predicate_result = _get_first_or_none(subresult)
|
||||
if predicate_result is not None:
|
||||
yield node
|
||||
return select
|
||||
|
||||
def logical_and(lhs_selects, rhs_select):
|
||||
def select(result):
|
||||
for node in result:
|
||||
subresult = iter((node,))
|
||||
for select in lhs_selects:
|
||||
subresult = select(subresult)
|
||||
predicate_result = _get_first_or_none(subresult)
|
||||
subresult = iter((node,))
|
||||
if predicate_result is not None:
|
||||
for result_node in rhs_select(subresult):
|
||||
yield node
|
||||
return select
|
||||
|
||||
|
||||
operations = {
|
||||
"@": handle_attribute,
|
||||
"": handle_name,
|
||||
"*": handle_star,
|
||||
".": handle_dot,
|
||||
"//": handle_descendants,
|
||||
"[": handle_predicate,
|
||||
}
|
||||
|
||||
functions = {
|
||||
'not' : handle_func_not
|
||||
}
|
||||
|
||||
def _build_path_iterator(path):
|
||||
# parse pattern
|
||||
stream = iter([ (special,text)
|
||||
for (special,text) in path_tokenizer(path)
|
||||
if special or text ])
|
||||
try:
|
||||
_next = stream.next
|
||||
except AttributeError:
|
||||
# Python 3
|
||||
def _next():
|
||||
return next(stream)
|
||||
token = _next()
|
||||
selector = []
|
||||
while 1:
|
||||
try:
|
||||
selector.append(operations[token[0]](_next, token))
|
||||
except StopIteration:
|
||||
raise ValueError("invalid path")
|
||||
try:
|
||||
token = _next()
|
||||
if token[0] == "/":
|
||||
token = _next()
|
||||
except StopIteration:
|
||||
break
|
||||
return selector
|
||||
|
||||
# main module API
|
||||
|
||||
def iterfind(node, path):
|
||||
selector_chain = _build_path_iterator(path)
|
||||
result = iter((node,))
|
||||
for select in selector_chain:
|
||||
result = select(result)
|
||||
return result
|
||||
|
||||
def find_first(node, path):
|
||||
return _get_first_or_none(iterfind(node, path))
|
||||
|
||||
def find_all(node, path):
|
||||
return list(iterfind(node, path))
|
|
@ -0,0 +1,591 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .Errors import error, message
|
||||
from . import ExprNodes
|
||||
from . import Nodes
|
||||
from . import Builtin
|
||||
from . import PyrexTypes
|
||||
from .. import Utils
|
||||
from .PyrexTypes import py_object_type, unspecified_type
|
||||
from .Visitor import CythonTransform, EnvTransform
|
||||
|
||||
try:
|
||||
reduce
|
||||
except NameError:
|
||||
from functools import reduce
|
||||
|
||||
|
||||
class TypedExprNode(ExprNodes.ExprNode):
|
||||
# Used for declaring assignments of a specified type without a known entry.
|
||||
subexprs = []
|
||||
|
||||
def __init__(self, type, pos=None):
|
||||
super(TypedExprNode, self).__init__(pos, type=type)
|
||||
|
||||
object_expr = TypedExprNode(py_object_type)
|
||||
|
||||
|
||||
class MarkParallelAssignments(EnvTransform):
|
||||
# Collects assignments inside parallel blocks prange, with parallel.
|
||||
# Perhaps it's better to move it to ControlFlowAnalysis.
|
||||
|
||||
# tells us whether we're in a normal loop
|
||||
in_loop = False
|
||||
|
||||
parallel_errors = False
|
||||
|
||||
def __init__(self, context):
|
||||
# Track the parallel block scopes (with parallel, for i in prange())
|
||||
self.parallel_block_stack = []
|
||||
super(MarkParallelAssignments, self).__init__(context)
|
||||
|
||||
def mark_assignment(self, lhs, rhs, inplace_op=None):
|
||||
if isinstance(lhs, (ExprNodes.NameNode, Nodes.PyArgDeclNode)):
|
||||
if lhs.entry is None:
|
||||
# TODO: This shouldn't happen...
|
||||
return
|
||||
|
||||
if self.parallel_block_stack:
|
||||
parallel_node = self.parallel_block_stack[-1]
|
||||
previous_assignment = parallel_node.assignments.get(lhs.entry)
|
||||
|
||||
# If there was a previous assignment to the variable, keep the
|
||||
# previous assignment position
|
||||
if previous_assignment:
|
||||
pos, previous_inplace_op = previous_assignment
|
||||
|
||||
if (inplace_op and previous_inplace_op and
|
||||
inplace_op != previous_inplace_op):
|
||||
# x += y; x *= y
|
||||
t = (inplace_op, previous_inplace_op)
|
||||
error(lhs.pos,
|
||||
"Reduction operator '%s' is inconsistent "
|
||||
"with previous reduction operator '%s'" % t)
|
||||
else:
|
||||
pos = lhs.pos
|
||||
|
||||
parallel_node.assignments[lhs.entry] = (pos, inplace_op)
|
||||
parallel_node.assigned_nodes.append(lhs)
|
||||
|
||||
elif isinstance(lhs, ExprNodes.SequenceNode):
|
||||
for i, arg in enumerate(lhs.args):
|
||||
if not rhs or arg.is_starred:
|
||||
item_node = None
|
||||
else:
|
||||
item_node = rhs.inferable_item_node(i)
|
||||
self.mark_assignment(arg, item_node)
|
||||
else:
|
||||
# Could use this info to infer cdef class attributes...
|
||||
pass
|
||||
|
||||
def visit_WithTargetAssignmentStatNode(self, node):
|
||||
self.mark_assignment(node.lhs, node.with_node.enter_call)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_SingleAssignmentNode(self, node):
|
||||
self.mark_assignment(node.lhs, node.rhs)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_CascadedAssignmentNode(self, node):
|
||||
for lhs in node.lhs_list:
|
||||
self.mark_assignment(lhs, node.rhs)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_InPlaceAssignmentNode(self, node):
|
||||
self.mark_assignment(node.lhs, node.create_binop_node(), node.operator)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_ForInStatNode(self, node):
|
||||
# TODO: Remove redundancy with range optimization...
|
||||
is_special = False
|
||||
sequence = node.iterator.sequence
|
||||
target = node.target
|
||||
if isinstance(sequence, ExprNodes.SimpleCallNode):
|
||||
function = sequence.function
|
||||
if sequence.self is None and function.is_name:
|
||||
entry = self.current_env().lookup(function.name)
|
||||
if not entry or entry.is_builtin:
|
||||
if function.name == 'reversed' and len(sequence.args) == 1:
|
||||
sequence = sequence.args[0]
|
||||
elif function.name == 'enumerate' and len(sequence.args) == 1:
|
||||
if target.is_sequence_constructor and len(target.args) == 2:
|
||||
iterator = sequence.args[0]
|
||||
if iterator.is_name:
|
||||
iterator_type = iterator.infer_type(self.current_env())
|
||||
if iterator_type.is_builtin_type:
|
||||
# assume that builtin types have a length within Py_ssize_t
|
||||
self.mark_assignment(
|
||||
target.args[0],
|
||||
ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX',
|
||||
type=PyrexTypes.c_py_ssize_t_type))
|
||||
target = target.args[1]
|
||||
sequence = sequence.args[0]
|
||||
if isinstance(sequence, ExprNodes.SimpleCallNode):
|
||||
function = sequence.function
|
||||
if sequence.self is None and function.is_name:
|
||||
entry = self.current_env().lookup(function.name)
|
||||
if not entry or entry.is_builtin:
|
||||
if function.name in ('range', 'xrange'):
|
||||
is_special = True
|
||||
for arg in sequence.args[:2]:
|
||||
self.mark_assignment(target, arg)
|
||||
if len(sequence.args) > 2:
|
||||
self.mark_assignment(
|
||||
target,
|
||||
ExprNodes.binop_node(node.pos,
|
||||
'+',
|
||||
sequence.args[0],
|
||||
sequence.args[2]))
|
||||
|
||||
if not is_special:
|
||||
# A for-loop basically translates to subsequent calls to
|
||||
# __getitem__(), so using an IndexNode here allows us to
|
||||
# naturally infer the base type of pointers, C arrays,
|
||||
# Python strings, etc., while correctly falling back to an
|
||||
# object type when the base type cannot be handled.
|
||||
self.mark_assignment(target, ExprNodes.IndexNode(
|
||||
node.pos,
|
||||
base=sequence,
|
||||
index=ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX',
|
||||
type=PyrexTypes.c_py_ssize_t_type)))
|
||||
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_ForFromStatNode(self, node):
|
||||
self.mark_assignment(node.target, node.bound1)
|
||||
if node.step is not None:
|
||||
self.mark_assignment(node.target,
|
||||
ExprNodes.binop_node(node.pos,
|
||||
'+',
|
||||
node.bound1,
|
||||
node.step))
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_WhileStatNode(self, node):
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_ExceptClauseNode(self, node):
|
||||
if node.target is not None:
|
||||
self.mark_assignment(node.target, object_expr)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_FromCImportStatNode(self, node):
|
||||
pass # Can't be assigned to...
|
||||
|
||||
def visit_FromImportStatNode(self, node):
|
||||
for name, target in node.items:
|
||||
if name != "*":
|
||||
self.mark_assignment(target, object_expr)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_DefNode(self, node):
|
||||
# use fake expressions with the right result type
|
||||
if node.star_arg:
|
||||
self.mark_assignment(
|
||||
node.star_arg, TypedExprNode(Builtin.tuple_type, node.pos))
|
||||
if node.starstar_arg:
|
||||
self.mark_assignment(
|
||||
node.starstar_arg, TypedExprNode(Builtin.dict_type, node.pos))
|
||||
EnvTransform.visit_FuncDefNode(self, node)
|
||||
return node
|
||||
|
||||
def visit_DelStatNode(self, node):
|
||||
for arg in node.args:
|
||||
self.mark_assignment(arg, arg)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_ParallelStatNode(self, node):
|
||||
if self.parallel_block_stack:
|
||||
node.parent = self.parallel_block_stack[-1]
|
||||
else:
|
||||
node.parent = None
|
||||
|
||||
nested = False
|
||||
if node.is_prange:
|
||||
if not node.parent:
|
||||
node.is_parallel = True
|
||||
else:
|
||||
node.is_parallel = (node.parent.is_prange or not
|
||||
node.parent.is_parallel)
|
||||
nested = node.parent.is_prange
|
||||
else:
|
||||
node.is_parallel = True
|
||||
# Note: nested with parallel() blocks are handled by
|
||||
# ParallelRangeTransform!
|
||||
# nested = node.parent
|
||||
nested = node.parent and node.parent.is_prange
|
||||
|
||||
self.parallel_block_stack.append(node)
|
||||
|
||||
nested = nested or len(self.parallel_block_stack) > 2
|
||||
if not self.parallel_errors and nested and not node.is_prange:
|
||||
error(node.pos, "Only prange() may be nested")
|
||||
self.parallel_errors = True
|
||||
|
||||
if node.is_prange:
|
||||
child_attrs = node.child_attrs
|
||||
node.child_attrs = ['body', 'target', 'args']
|
||||
self.visitchildren(node)
|
||||
node.child_attrs = child_attrs
|
||||
|
||||
self.parallel_block_stack.pop()
|
||||
if node.else_clause:
|
||||
node.else_clause = self.visit(node.else_clause)
|
||||
else:
|
||||
self.visitchildren(node)
|
||||
self.parallel_block_stack.pop()
|
||||
|
||||
self.parallel_errors = False
|
||||
return node
|
||||
|
||||
def visit_YieldExprNode(self, node):
|
||||
if self.parallel_block_stack:
|
||||
error(node.pos, "'%s' not allowed in parallel sections" % node.expr_keyword)
|
||||
return node
|
||||
|
||||
def visit_ReturnStatNode(self, node):
|
||||
node.in_parallel = bool(self.parallel_block_stack)
|
||||
return node
|
||||
|
||||
|
||||
class MarkOverflowingArithmetic(CythonTransform):
|
||||
|
||||
# It may be possible to integrate this with the above for
|
||||
# performance improvements (though likely not worth it).
|
||||
|
||||
might_overflow = False
|
||||
|
||||
def __call__(self, root):
|
||||
self.env_stack = []
|
||||
self.env = root.scope
|
||||
return super(MarkOverflowingArithmetic, self).__call__(root)
|
||||
|
||||
def visit_safe_node(self, node):
|
||||
self.might_overflow, saved = False, self.might_overflow
|
||||
self.visitchildren(node)
|
||||
self.might_overflow = saved
|
||||
return node
|
||||
|
||||
def visit_neutral_node(self, node):
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_dangerous_node(self, node):
|
||||
self.might_overflow, saved = True, self.might_overflow
|
||||
self.visitchildren(node)
|
||||
self.might_overflow = saved
|
||||
return node
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
self.env_stack.append(self.env)
|
||||
self.env = node.local_scope
|
||||
self.visit_safe_node(node)
|
||||
self.env = self.env_stack.pop()
|
||||
return node
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
if self.might_overflow:
|
||||
entry = node.entry or self.env.lookup(node.name)
|
||||
if entry:
|
||||
entry.might_overflow = True
|
||||
return node
|
||||
|
||||
def visit_BinopNode(self, node):
|
||||
if node.operator in '&|^':
|
||||
return self.visit_neutral_node(node)
|
||||
else:
|
||||
return self.visit_dangerous_node(node)
|
||||
|
||||
def visit_SimpleCallNode(self, node):
|
||||
if node.function.is_name and node.function.name == 'abs':
|
||||
# Overflows for minimum value of fixed size ints.
|
||||
return self.visit_dangerous_node(node)
|
||||
else:
|
||||
return self.visit_neutral_node(node)
|
||||
|
||||
visit_UnopNode = visit_neutral_node
|
||||
|
||||
visit_UnaryMinusNode = visit_dangerous_node
|
||||
|
||||
visit_InPlaceAssignmentNode = visit_dangerous_node
|
||||
|
||||
visit_Node = visit_safe_node
|
||||
|
||||
def visit_assignment(self, lhs, rhs):
|
||||
if (isinstance(rhs, ExprNodes.IntNode)
|
||||
and isinstance(lhs, ExprNodes.NameNode)
|
||||
and Utils.long_literal(rhs.value)):
|
||||
entry = lhs.entry or self.env.lookup(lhs.name)
|
||||
if entry:
|
||||
entry.might_overflow = True
|
||||
|
||||
def visit_SingleAssignmentNode(self, node):
|
||||
self.visit_assignment(node.lhs, node.rhs)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_CascadedAssignmentNode(self, node):
|
||||
for lhs in node.lhs_list:
|
||||
self.visit_assignment(lhs, node.rhs)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
class PyObjectTypeInferer(object):
|
||||
"""
|
||||
If it's not declared, it's a PyObject.
|
||||
"""
|
||||
def infer_types(self, scope):
|
||||
"""
|
||||
Given a dict of entries, map all unspecified types to a specified type.
|
||||
"""
|
||||
for name, entry in scope.entries.items():
|
||||
if entry.type is unspecified_type:
|
||||
entry.type = py_object_type
|
||||
|
||||
class SimpleAssignmentTypeInferer(object):
|
||||
"""
|
||||
Very basic type inference.
|
||||
|
||||
Note: in order to support cross-closure type inference, this must be
|
||||
applies to nested scopes in top-down order.
|
||||
"""
|
||||
def set_entry_type(self, entry, entry_type):
|
||||
entry.type = entry_type
|
||||
for e in entry.all_entries():
|
||||
e.type = entry_type
|
||||
|
||||
def infer_types(self, scope):
|
||||
enabled = scope.directives['infer_types']
|
||||
verbose = scope.directives['infer_types.verbose']
|
||||
|
||||
if enabled == True:
|
||||
spanning_type = aggressive_spanning_type
|
||||
elif enabled is None: # safe mode
|
||||
spanning_type = safe_spanning_type
|
||||
else:
|
||||
for entry in scope.entries.values():
|
||||
if entry.type is unspecified_type:
|
||||
self.set_entry_type(entry, py_object_type)
|
||||
return
|
||||
|
||||
# Set of assignments
|
||||
assignments = set()
|
||||
assmts_resolved = set()
|
||||
dependencies = {}
|
||||
assmt_to_names = {}
|
||||
|
||||
for name, entry in scope.entries.items():
|
||||
for assmt in entry.cf_assignments:
|
||||
names = assmt.type_dependencies()
|
||||
assmt_to_names[assmt] = names
|
||||
assmts = set()
|
||||
for node in names:
|
||||
assmts.update(node.cf_state)
|
||||
dependencies[assmt] = assmts
|
||||
if entry.type is unspecified_type:
|
||||
assignments.update(entry.cf_assignments)
|
||||
else:
|
||||
assmts_resolved.update(entry.cf_assignments)
|
||||
|
||||
def infer_name_node_type(node):
|
||||
types = [assmt.inferred_type for assmt in node.cf_state]
|
||||
if not types:
|
||||
node_type = py_object_type
|
||||
else:
|
||||
entry = node.entry
|
||||
node_type = spanning_type(
|
||||
types, entry.might_overflow, entry.pos, scope)
|
||||
node.inferred_type = node_type
|
||||
|
||||
def infer_name_node_type_partial(node):
|
||||
types = [assmt.inferred_type for assmt in node.cf_state
|
||||
if assmt.inferred_type is not None]
|
||||
if not types:
|
||||
return
|
||||
entry = node.entry
|
||||
return spanning_type(types, entry.might_overflow, entry.pos, scope)
|
||||
|
||||
def inferred_types(entry):
|
||||
has_none = False
|
||||
has_pyobjects = False
|
||||
types = []
|
||||
for assmt in entry.cf_assignments:
|
||||
if assmt.rhs.is_none:
|
||||
has_none = True
|
||||
else:
|
||||
rhs_type = assmt.inferred_type
|
||||
if rhs_type and rhs_type.is_pyobject:
|
||||
has_pyobjects = True
|
||||
types.append(rhs_type)
|
||||
# Ignore None assignments as long as there are concrete Python type assignments.
|
||||
# but include them if None is the only assigned Python object.
|
||||
if has_none and not has_pyobjects:
|
||||
types.append(py_object_type)
|
||||
return types
|
||||
|
||||
def resolve_assignments(assignments):
|
||||
resolved = set()
|
||||
for assmt in assignments:
|
||||
deps = dependencies[assmt]
|
||||
# All assignments are resolved
|
||||
if assmts_resolved.issuperset(deps):
|
||||
for node in assmt_to_names[assmt]:
|
||||
infer_name_node_type(node)
|
||||
# Resolve assmt
|
||||
inferred_type = assmt.infer_type()
|
||||
assmts_resolved.add(assmt)
|
||||
resolved.add(assmt)
|
||||
assignments.difference_update(resolved)
|
||||
return resolved
|
||||
|
||||
def partial_infer(assmt):
|
||||
partial_types = []
|
||||
for node in assmt_to_names[assmt]:
|
||||
partial_type = infer_name_node_type_partial(node)
|
||||
if partial_type is None:
|
||||
return False
|
||||
partial_types.append((node, partial_type))
|
||||
for node, partial_type in partial_types:
|
||||
node.inferred_type = partial_type
|
||||
assmt.infer_type()
|
||||
return True
|
||||
|
||||
partial_assmts = set()
|
||||
def resolve_partial(assignments):
|
||||
# try to handle circular references
|
||||
partials = set()
|
||||
for assmt in assignments:
|
||||
if assmt in partial_assmts:
|
||||
continue
|
||||
if partial_infer(assmt):
|
||||
partials.add(assmt)
|
||||
assmts_resolved.add(assmt)
|
||||
partial_assmts.update(partials)
|
||||
return partials
|
||||
|
||||
# Infer assignments
|
||||
while True:
|
||||
if not resolve_assignments(assignments):
|
||||
if not resolve_partial(assignments):
|
||||
break
|
||||
inferred = set()
|
||||
# First pass
|
||||
for entry in scope.entries.values():
|
||||
if entry.type is not unspecified_type:
|
||||
continue
|
||||
entry_type = py_object_type
|
||||
if assmts_resolved.issuperset(entry.cf_assignments):
|
||||
types = inferred_types(entry)
|
||||
if types and all(types):
|
||||
entry_type = spanning_type(
|
||||
types, entry.might_overflow, entry.pos, scope)
|
||||
inferred.add(entry)
|
||||
self.set_entry_type(entry, entry_type)
|
||||
|
||||
def reinfer():
|
||||
dirty = False
|
||||
for entry in inferred:
|
||||
for assmt in entry.cf_assignments:
|
||||
assmt.infer_type()
|
||||
types = inferred_types(entry)
|
||||
new_type = spanning_type(types, entry.might_overflow, entry.pos, scope)
|
||||
if new_type != entry.type:
|
||||
self.set_entry_type(entry, new_type)
|
||||
dirty = True
|
||||
return dirty
|
||||
|
||||
# types propagation
|
||||
while reinfer():
|
||||
pass
|
||||
|
||||
if verbose:
|
||||
for entry in inferred:
|
||||
message(entry.pos, "inferred '%s' to be of type '%s'" % (
|
||||
entry.name, entry.type))
|
||||
|
||||
|
||||
def find_spanning_type(type1, type2):
|
||||
if type1 is type2:
|
||||
result_type = type1
|
||||
elif type1 is PyrexTypes.c_bint_type or type2 is PyrexTypes.c_bint_type:
|
||||
# type inference can break the coercion back to a Python bool
|
||||
# if it returns an arbitrary int type here
|
||||
return py_object_type
|
||||
else:
|
||||
result_type = PyrexTypes.spanning_type(type1, type2)
|
||||
if result_type in (PyrexTypes.c_double_type, PyrexTypes.c_float_type,
|
||||
Builtin.float_type):
|
||||
# Python's float type is just a C double, so it's safe to
|
||||
# use the C type instead
|
||||
return PyrexTypes.c_double_type
|
||||
return result_type
|
||||
|
||||
def simply_type(result_type, pos):
|
||||
if result_type.is_reference:
|
||||
result_type = result_type.ref_base_type
|
||||
if result_type.is_const:
|
||||
result_type = result_type.const_base_type
|
||||
if result_type.is_cpp_class:
|
||||
result_type.check_nullary_constructor(pos)
|
||||
if result_type.is_array:
|
||||
result_type = PyrexTypes.c_ptr_type(result_type.base_type)
|
||||
return result_type
|
||||
|
||||
def aggressive_spanning_type(types, might_overflow, pos, scope):
|
||||
return simply_type(reduce(find_spanning_type, types), pos)
|
||||
|
||||
def safe_spanning_type(types, might_overflow, pos, scope):
|
||||
result_type = simply_type(reduce(find_spanning_type, types), pos)
|
||||
if result_type.is_pyobject:
|
||||
# In theory, any specific Python type is always safe to
|
||||
# infer. However, inferring str can cause some existing code
|
||||
# to break, since we are also now much more strict about
|
||||
# coercion from str to char *. See trac #553.
|
||||
if result_type.name == 'str':
|
||||
return py_object_type
|
||||
else:
|
||||
return result_type
|
||||
elif result_type is PyrexTypes.c_double_type:
|
||||
# Python's float type is just a C double, so it's safe to use
|
||||
# the C type instead
|
||||
return result_type
|
||||
elif result_type is PyrexTypes.c_bint_type:
|
||||
# find_spanning_type() only returns 'bint' for clean boolean
|
||||
# operations without other int types, so this is safe, too
|
||||
return result_type
|
||||
elif result_type.is_pythran_expr:
|
||||
return result_type
|
||||
elif result_type.is_ptr:
|
||||
# Any pointer except (signed|unsigned|) char* can't implicitly
|
||||
# become a PyObject, and inferring char* is now accepted, too.
|
||||
return result_type
|
||||
elif result_type.is_cpp_class:
|
||||
# These can't implicitly become Python objects either.
|
||||
return result_type
|
||||
elif result_type.is_struct:
|
||||
# Though we have struct -> object for some structs, this is uncommonly
|
||||
# used, won't arise in pure Python, and there shouldn't be side
|
||||
# effects, so I'm declaring this safe.
|
||||
return result_type
|
||||
# TODO: double complex should be OK as well, but we need
|
||||
# to make sure everything is supported.
|
||||
elif (result_type.is_int or result_type.is_enum) and not might_overflow:
|
||||
return result_type
|
||||
elif (not result_type.can_coerce_to_pyobject(scope)
|
||||
and not result_type.is_error):
|
||||
return result_type
|
||||
return py_object_type
|
||||
|
||||
|
||||
def get_type_inferer():
|
||||
return SimpleAssignmentTypeInferer()
|
|
@ -0,0 +1,941 @@
|
|||
#
|
||||
# Tables describing slots in the CPython type object
|
||||
# and associated know-how.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from . import Naming
|
||||
from . import PyrexTypes
|
||||
from .Errors import error
|
||||
|
||||
invisible = ['__cinit__', '__dealloc__', '__richcmp__',
|
||||
'__nonzero__', '__bool__']
|
||||
|
||||
richcmp_special_methods = ['__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__']
|
||||
|
||||
|
||||
class Signature(object):
|
||||
# Method slot signature descriptor.
|
||||
#
|
||||
# has_dummy_arg boolean
|
||||
# has_generic_args boolean
|
||||
# fixed_arg_format string
|
||||
# ret_format string
|
||||
# error_value string
|
||||
#
|
||||
# The formats are strings made up of the following
|
||||
# characters:
|
||||
#
|
||||
# 'O' Python object
|
||||
# 'T' Python object of the type of 'self'
|
||||
# 'v' void
|
||||
# 'p' void *
|
||||
# 'P' void **
|
||||
# 'i' int
|
||||
# 'b' bint
|
||||
# 'I' int *
|
||||
# 'l' long
|
||||
# 'f' float
|
||||
# 'd' double
|
||||
# 'h' Py_hash_t
|
||||
# 'z' Py_ssize_t
|
||||
# 'Z' Py_ssize_t *
|
||||
# 's' char *
|
||||
# 'S' char **
|
||||
# 'r' int used only to signal exception
|
||||
# 'B' Py_buffer *
|
||||
# '-' dummy 'self' argument (not used)
|
||||
# '*' rest of args passed as generic Python
|
||||
# arg tuple and kw dict (must be last
|
||||
# char in format string)
|
||||
|
||||
format_map = {
|
||||
'O': PyrexTypes.py_object_type,
|
||||
'v': PyrexTypes.c_void_type,
|
||||
'p': PyrexTypes.c_void_ptr_type,
|
||||
'P': PyrexTypes.c_void_ptr_ptr_type,
|
||||
'i': PyrexTypes.c_int_type,
|
||||
'b': PyrexTypes.c_bint_type,
|
||||
'I': PyrexTypes.c_int_ptr_type,
|
||||
'l': PyrexTypes.c_long_type,
|
||||
'f': PyrexTypes.c_float_type,
|
||||
'd': PyrexTypes.c_double_type,
|
||||
'h': PyrexTypes.c_py_hash_t_type,
|
||||
'z': PyrexTypes.c_py_ssize_t_type,
|
||||
'Z': PyrexTypes.c_py_ssize_t_ptr_type,
|
||||
's': PyrexTypes.c_char_ptr_type,
|
||||
'S': PyrexTypes.c_char_ptr_ptr_type,
|
||||
'r': PyrexTypes.c_returncode_type,
|
||||
'B': PyrexTypes.c_py_buffer_ptr_type,
|
||||
# 'T', '-' and '*' are handled otherwise
|
||||
# and are not looked up in here
|
||||
}
|
||||
|
||||
type_to_format_map = dict(
|
||||
(type_, format_) for format_, type_ in format_map.items())
|
||||
|
||||
error_value_map = {
|
||||
'O': "NULL",
|
||||
'T': "NULL",
|
||||
'i': "-1",
|
||||
'b': "-1",
|
||||
'l': "-1",
|
||||
'r': "-1",
|
||||
'h': "-1",
|
||||
'z': "-1",
|
||||
}
|
||||
|
||||
def __init__(self, arg_format, ret_format):
|
||||
self.has_dummy_arg = 0
|
||||
self.has_generic_args = 0
|
||||
if arg_format[:1] == '-':
|
||||
self.has_dummy_arg = 1
|
||||
arg_format = arg_format[1:]
|
||||
if arg_format[-1:] == '*':
|
||||
self.has_generic_args = 1
|
||||
arg_format = arg_format[:-1]
|
||||
self.fixed_arg_format = arg_format
|
||||
self.ret_format = ret_format
|
||||
self.error_value = self.error_value_map.get(ret_format, None)
|
||||
self.exception_check = ret_format != 'r' and self.error_value is not None
|
||||
self.is_staticmethod = False
|
||||
|
||||
def __repr__(self):
|
||||
return '<Signature[%s(%s%s)]>' % (
|
||||
self.ret_format,
|
||||
', '.join(self.fixed_arg_format),
|
||||
'*' if self.has_generic_args else '')
|
||||
|
||||
def num_fixed_args(self):
|
||||
return len(self.fixed_arg_format)
|
||||
|
||||
def is_self_arg(self, i):
|
||||
# argument is 'self' for methods or 'class' for classmethods
|
||||
return self.fixed_arg_format[i] == 'T'
|
||||
|
||||
def returns_self_type(self):
|
||||
# return type is same as 'self' argument type
|
||||
return self.ret_format == 'T'
|
||||
|
||||
def fixed_arg_type(self, i):
|
||||
return self.format_map[self.fixed_arg_format[i]]
|
||||
|
||||
def return_type(self):
|
||||
return self.format_map[self.ret_format]
|
||||
|
||||
def format_from_type(self, arg_type):
|
||||
if arg_type.is_pyobject:
|
||||
arg_type = PyrexTypes.py_object_type
|
||||
return self.type_to_format_map[arg_type]
|
||||
|
||||
def exception_value(self):
|
||||
return self.error_value_map.get(self.ret_format)
|
||||
|
||||
def function_type(self, self_arg_override=None):
|
||||
# Construct a C function type descriptor for this signature
|
||||
args = []
|
||||
for i in range(self.num_fixed_args()):
|
||||
if self_arg_override is not None and self.is_self_arg(i):
|
||||
assert isinstance(self_arg_override, PyrexTypes.CFuncTypeArg)
|
||||
args.append(self_arg_override)
|
||||
else:
|
||||
arg_type = self.fixed_arg_type(i)
|
||||
args.append(PyrexTypes.CFuncTypeArg("", arg_type, None))
|
||||
if self_arg_override is not None and self.returns_self_type():
|
||||
ret_type = self_arg_override.type
|
||||
else:
|
||||
ret_type = self.return_type()
|
||||
exc_value = self.exception_value()
|
||||
return PyrexTypes.CFuncType(
|
||||
ret_type, args, exception_value=exc_value,
|
||||
exception_check=self.exception_check)
|
||||
|
||||
def method_flags(self):
|
||||
if self.ret_format == "O":
|
||||
full_args = self.fixed_arg_format
|
||||
if self.has_dummy_arg:
|
||||
full_args = "O" + full_args
|
||||
if full_args in ["O", "T"]:
|
||||
if self.has_generic_args:
|
||||
return [method_varargs, method_keywords]
|
||||
else:
|
||||
return [method_noargs]
|
||||
elif full_args in ["OO", "TO"] and not self.has_generic_args:
|
||||
return [method_onearg]
|
||||
|
||||
if self.is_staticmethod:
|
||||
return [method_varargs, method_keywords]
|
||||
return None
|
||||
|
||||
|
||||
class SlotDescriptor(object):
|
||||
# Abstract base class for type slot descriptors.
|
||||
#
|
||||
# slot_name string Member name of the slot in the type object
|
||||
# is_initialised_dynamically Is initialised by code in the module init function
|
||||
# is_inherited Is inherited by subtypes (see PyType_Ready())
|
||||
# py3 Indicates presence of slot in Python 3
|
||||
# py2 Indicates presence of slot in Python 2
|
||||
# ifdef Full #ifdef string that slot is wrapped in. Using this causes py3, py2 and flags to be ignored.)
|
||||
|
||||
def __init__(self, slot_name, dynamic=False, inherited=False,
|
||||
py3=True, py2=True, ifdef=None):
|
||||
self.slot_name = slot_name
|
||||
self.is_initialised_dynamically = dynamic
|
||||
self.is_inherited = inherited
|
||||
self.ifdef = ifdef
|
||||
self.py3 = py3
|
||||
self.py2 = py2
|
||||
|
||||
def preprocessor_guard_code(self):
|
||||
ifdef = self.ifdef
|
||||
py2 = self.py2
|
||||
py3 = self.py3
|
||||
guard = None
|
||||
if ifdef:
|
||||
guard = ("#if %s" % ifdef)
|
||||
elif not py3 or py3 == '<RESERVED>':
|
||||
guard = ("#if PY_MAJOR_VERSION < 3")
|
||||
elif not py2:
|
||||
guard = ("#if PY_MAJOR_VERSION >= 3")
|
||||
return guard
|
||||
|
||||
def generate(self, scope, code):
|
||||
preprocessor_guard = self.preprocessor_guard_code()
|
||||
if preprocessor_guard:
|
||||
code.putln(preprocessor_guard)
|
||||
|
||||
end_pypy_guard = False
|
||||
if self.is_initialised_dynamically:
|
||||
value = "0"
|
||||
else:
|
||||
value = self.slot_code(scope)
|
||||
if value == "0" and self.is_inherited:
|
||||
# PyPy currently has a broken PyType_Ready() that fails to
|
||||
# inherit some slots. To work around this, we explicitly
|
||||
# set inherited slots here, but only in PyPy since CPython
|
||||
# handles this better than we do.
|
||||
inherited_value = value
|
||||
current_scope = scope
|
||||
while (inherited_value == "0"
|
||||
and current_scope.parent_type
|
||||
and current_scope.parent_type.base_type
|
||||
and current_scope.parent_type.base_type.scope):
|
||||
current_scope = current_scope.parent_type.base_type.scope
|
||||
inherited_value = self.slot_code(current_scope)
|
||||
if inherited_value != "0":
|
||||
code.putln("#if CYTHON_COMPILING_IN_PYPY")
|
||||
code.putln("%s, /*%s*/" % (inherited_value, self.slot_name))
|
||||
code.putln("#else")
|
||||
end_pypy_guard = True
|
||||
|
||||
code.putln("%s, /*%s*/" % (value, self.slot_name))
|
||||
|
||||
if end_pypy_guard:
|
||||
code.putln("#endif")
|
||||
|
||||
if self.py3 == '<RESERVED>':
|
||||
code.putln("#else")
|
||||
code.putln("0, /*reserved*/")
|
||||
if preprocessor_guard:
|
||||
code.putln("#endif")
|
||||
|
||||
# Some C implementations have trouble statically
|
||||
# initialising a global with a pointer to an extern
|
||||
# function, so we initialise some of the type slots
|
||||
# in the module init function instead.
|
||||
|
||||
def generate_dynamic_init_code(self, scope, code):
|
||||
if self.is_initialised_dynamically:
|
||||
value = self.slot_code(scope)
|
||||
if value != "0":
|
||||
code.putln("%s.%s = %s;" % (
|
||||
scope.parent_type.typeobj_cname,
|
||||
self.slot_name,
|
||||
value
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class FixedSlot(SlotDescriptor):
|
||||
# Descriptor for a type slot with a fixed value.
|
||||
#
|
||||
# value string
|
||||
|
||||
def __init__(self, slot_name, value, py3=True, py2=True, ifdef=None):
|
||||
SlotDescriptor.__init__(self, slot_name, py3=py3, py2=py2, ifdef=ifdef)
|
||||
self.value = value
|
||||
|
||||
def slot_code(self, scope):
|
||||
return self.value
|
||||
|
||||
|
||||
class EmptySlot(FixedSlot):
|
||||
# Descriptor for a type slot whose value is always 0.
|
||||
|
||||
def __init__(self, slot_name, py3=True, py2=True, ifdef=None):
|
||||
FixedSlot.__init__(self, slot_name, "0", py3=py3, py2=py2, ifdef=ifdef)
|
||||
|
||||
|
||||
class MethodSlot(SlotDescriptor):
|
||||
# Type slot descriptor for a user-definable method.
|
||||
#
|
||||
# signature Signature
|
||||
# method_name string The __xxx__ name of the method
|
||||
# alternatives [string] Alternative list of __xxx__ names for the method
|
||||
|
||||
def __init__(self, signature, slot_name, method_name, fallback=None,
|
||||
py3=True, py2=True, ifdef=None, inherited=True):
|
||||
SlotDescriptor.__init__(self, slot_name, py3=py3, py2=py2,
|
||||
ifdef=ifdef, inherited=inherited)
|
||||
self.signature = signature
|
||||
self.slot_name = slot_name
|
||||
self.method_name = method_name
|
||||
self.alternatives = []
|
||||
method_name_to_slot[method_name] = self
|
||||
#
|
||||
if fallback:
|
||||
self.alternatives.append(fallback)
|
||||
for alt in (self.py2, self.py3):
|
||||
if isinstance(alt, (tuple, list)):
|
||||
slot_name, method_name = alt
|
||||
self.alternatives.append(method_name)
|
||||
method_name_to_slot[method_name] = self
|
||||
|
||||
def slot_code(self, scope):
|
||||
entry = scope.lookup_here(self.method_name)
|
||||
if entry and entry.is_special and entry.func_cname:
|
||||
return entry.func_cname
|
||||
for method_name in self.alternatives:
|
||||
entry = scope.lookup_here(method_name)
|
||||
if entry and entry.is_special and entry.func_cname:
|
||||
return entry.func_cname
|
||||
return "0"
|
||||
|
||||
|
||||
class InternalMethodSlot(SlotDescriptor):
|
||||
# Type slot descriptor for a method which is always
|
||||
# synthesized by Cython.
|
||||
#
|
||||
# slot_name string Member name of the slot in the type object
|
||||
|
||||
def __init__(self, slot_name, **kargs):
|
||||
SlotDescriptor.__init__(self, slot_name, **kargs)
|
||||
|
||||
def slot_code(self, scope):
|
||||
return scope.mangle_internal(self.slot_name)
|
||||
|
||||
|
||||
class GCDependentSlot(InternalMethodSlot):
|
||||
# Descriptor for a slot whose value depends on whether
|
||||
# the type participates in GC.
|
||||
|
||||
def __init__(self, slot_name, **kargs):
|
||||
InternalMethodSlot.__init__(self, slot_name, **kargs)
|
||||
|
||||
def slot_code(self, scope):
|
||||
if not scope.needs_gc():
|
||||
return "0"
|
||||
if not scope.has_cyclic_pyobject_attrs:
|
||||
# if the type does not have GC relevant object attributes, it can
|
||||
# delegate GC methods to its parent - iff the parent functions
|
||||
# are defined in the same module
|
||||
parent_type_scope = scope.parent_type.base_type.scope
|
||||
if scope.parent_scope is parent_type_scope.parent_scope:
|
||||
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
|
||||
if entry.visibility != 'extern':
|
||||
return self.slot_code(parent_type_scope)
|
||||
return InternalMethodSlot.slot_code(self, scope)
|
||||
|
||||
|
||||
class GCClearReferencesSlot(GCDependentSlot):
|
||||
|
||||
def slot_code(self, scope):
|
||||
if scope.needs_tp_clear():
|
||||
return GCDependentSlot.slot_code(self, scope)
|
||||
return "0"
|
||||
|
||||
|
||||
class ConstructorSlot(InternalMethodSlot):
|
||||
# Descriptor for tp_new and tp_dealloc.
|
||||
|
||||
def __init__(self, slot_name, method, **kargs):
|
||||
InternalMethodSlot.__init__(self, slot_name, **kargs)
|
||||
self.method = method
|
||||
|
||||
def slot_code(self, scope):
|
||||
entry = scope.lookup_here(self.method)
|
||||
if (self.slot_name != 'tp_new'
|
||||
and scope.parent_type.base_type
|
||||
and not scope.has_pyobject_attrs
|
||||
and not scope.has_memoryview_attrs
|
||||
and not scope.has_cpp_class_attrs
|
||||
and not (entry and entry.is_special)):
|
||||
# if the type does not have object attributes, it can
|
||||
# delegate GC methods to its parent - iff the parent
|
||||
# functions are defined in the same module
|
||||
parent_type_scope = scope.parent_type.base_type.scope
|
||||
if scope.parent_scope is parent_type_scope.parent_scope:
|
||||
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
|
||||
if entry.visibility != 'extern':
|
||||
return self.slot_code(parent_type_scope)
|
||||
if entry and not entry.is_special:
|
||||
return "0"
|
||||
return InternalMethodSlot.slot_code(self, scope)
|
||||
|
||||
|
||||
class SyntheticSlot(InternalMethodSlot):
|
||||
# Type slot descriptor for a synthesized method which
|
||||
# dispatches to one or more user-defined methods depending
|
||||
# on its arguments. If none of the relevant methods are
|
||||
# defined, the method will not be synthesized and an
|
||||
# alternative default value will be placed in the type
|
||||
# slot.
|
||||
|
||||
def __init__(self, slot_name, user_methods, default_value, **kargs):
|
||||
InternalMethodSlot.__init__(self, slot_name, **kargs)
|
||||
self.user_methods = user_methods
|
||||
self.default_value = default_value
|
||||
|
||||
def slot_code(self, scope):
|
||||
if scope.defines_any_special(self.user_methods):
|
||||
return InternalMethodSlot.slot_code(self, scope)
|
||||
else:
|
||||
return self.default_value
|
||||
|
||||
|
||||
class RichcmpSlot(MethodSlot):
|
||||
def slot_code(self, scope):
|
||||
entry = scope.lookup_here(self.method_name)
|
||||
if entry and entry.is_special and entry.func_cname:
|
||||
return entry.func_cname
|
||||
elif scope.defines_any_special(richcmp_special_methods):
|
||||
return scope.mangle_internal(self.slot_name)
|
||||
else:
|
||||
return "0"
|
||||
|
||||
|
||||
class TypeFlagsSlot(SlotDescriptor):
|
||||
# Descriptor for the type flags slot.
|
||||
|
||||
def slot_code(self, scope):
|
||||
value = "Py_TPFLAGS_DEFAULT"
|
||||
if scope.directives['type_version_tag']:
|
||||
# it's not in 'Py_TPFLAGS_DEFAULT' in Py2
|
||||
value += "|Py_TPFLAGS_HAVE_VERSION_TAG"
|
||||
else:
|
||||
# it's enabled in 'Py_TPFLAGS_DEFAULT' in Py3
|
||||
value = "(%s&~Py_TPFLAGS_HAVE_VERSION_TAG)" % value
|
||||
value += "|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER"
|
||||
if not scope.parent_type.is_final_type:
|
||||
value += "|Py_TPFLAGS_BASETYPE"
|
||||
if scope.needs_gc():
|
||||
value += "|Py_TPFLAGS_HAVE_GC"
|
||||
return value
|
||||
|
||||
|
||||
class DocStringSlot(SlotDescriptor):
|
||||
# Descriptor for the docstring slot.
|
||||
|
||||
def slot_code(self, scope):
|
||||
doc = scope.doc
|
||||
if doc is None:
|
||||
return "0"
|
||||
if doc.is_unicode:
|
||||
doc = doc.as_utf8_string()
|
||||
return doc.as_c_string_literal()
|
||||
|
||||
|
||||
class SuiteSlot(SlotDescriptor):
|
||||
# Descriptor for a substructure of the type object.
|
||||
#
|
||||
# sub_slots [SlotDescriptor]
|
||||
|
||||
def __init__(self, sub_slots, slot_type, slot_name, ifdef=None):
|
||||
SlotDescriptor.__init__(self, slot_name, ifdef=ifdef)
|
||||
self.sub_slots = sub_slots
|
||||
self.slot_type = slot_type
|
||||
substructures.append(self)
|
||||
|
||||
def is_empty(self, scope):
|
||||
for slot in self.sub_slots:
|
||||
if slot.slot_code(scope) != "0":
|
||||
return False
|
||||
return True
|
||||
|
||||
def substructure_cname(self, scope):
|
||||
return "%s%s_%s" % (Naming.pyrex_prefix, self.slot_name, scope.class_name)
|
||||
|
||||
def slot_code(self, scope):
|
||||
if not self.is_empty(scope):
|
||||
return "&%s" % self.substructure_cname(scope)
|
||||
return "0"
|
||||
|
||||
def generate_substructure(self, scope, code):
|
||||
if not self.is_empty(scope):
|
||||
code.putln("")
|
||||
if self.ifdef:
|
||||
code.putln("#if %s" % self.ifdef)
|
||||
code.putln(
|
||||
"static %s %s = {" % (
|
||||
self.slot_type,
|
||||
self.substructure_cname(scope)))
|
||||
for slot in self.sub_slots:
|
||||
slot.generate(scope, code)
|
||||
code.putln("};")
|
||||
if self.ifdef:
|
||||
code.putln("#endif")
|
||||
|
||||
substructures = [] # List of all SuiteSlot instances
|
||||
|
||||
class MethodTableSlot(SlotDescriptor):
|
||||
# Slot descriptor for the method table.
|
||||
|
||||
def slot_code(self, scope):
|
||||
if scope.pyfunc_entries:
|
||||
return scope.method_table_cname
|
||||
else:
|
||||
return "0"
|
||||
|
||||
|
||||
class MemberTableSlot(SlotDescriptor):
|
||||
# Slot descriptor for the table of Python-accessible attributes.
|
||||
|
||||
def slot_code(self, scope):
|
||||
return "0"
|
||||
|
||||
|
||||
class GetSetSlot(SlotDescriptor):
|
||||
# Slot descriptor for the table of attribute get & set methods.
|
||||
|
||||
def slot_code(self, scope):
|
||||
if scope.property_entries:
|
||||
return scope.getset_table_cname
|
||||
else:
|
||||
return "0"
|
||||
|
||||
|
||||
class BaseClassSlot(SlotDescriptor):
|
||||
# Slot descriptor for the base class slot.
|
||||
|
||||
def __init__(self, name):
|
||||
SlotDescriptor.__init__(self, name, dynamic = 1)
|
||||
|
||||
def generate_dynamic_init_code(self, scope, code):
|
||||
base_type = scope.parent_type.base_type
|
||||
if base_type:
|
||||
code.putln("%s.%s = %s;" % (
|
||||
scope.parent_type.typeobj_cname,
|
||||
self.slot_name,
|
||||
base_type.typeptr_cname))
|
||||
|
||||
|
||||
class DictOffsetSlot(SlotDescriptor):
|
||||
# Slot descriptor for a class' dict offset, for dynamic attributes.
|
||||
|
||||
def slot_code(self, scope):
|
||||
dict_entry = scope.lookup_here("__dict__") if not scope.is_closure_class_scope else None
|
||||
if dict_entry and dict_entry.is_variable:
|
||||
if getattr(dict_entry.type, 'cname', None) != 'PyDict_Type':
|
||||
error(dict_entry.pos, "__dict__ slot must be of type 'dict'")
|
||||
return "0"
|
||||
type = scope.parent_type
|
||||
if type.typedef_flag:
|
||||
objstruct = type.objstruct_cname
|
||||
else:
|
||||
objstruct = "struct %s" % type.objstruct_cname
|
||||
return ("offsetof(%s, %s)" % (
|
||||
objstruct,
|
||||
dict_entry.cname))
|
||||
else:
|
||||
return "0"
|
||||
|
||||
|
||||
# The following dictionary maps __xxx__ method names to slot descriptors.
|
||||
|
||||
method_name_to_slot = {}
|
||||
|
||||
## The following slots are (or could be) initialised with an
|
||||
## extern function pointer.
|
||||
#
|
||||
#slots_initialised_from_extern = (
|
||||
# "tp_free",
|
||||
#)
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Utility functions for accessing slot table data structures
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
def get_special_method_signature(name):
|
||||
# Given a method name, if it is a special method,
|
||||
# return its signature, else return None.
|
||||
slot = method_name_to_slot.get(name)
|
||||
if slot:
|
||||
return slot.signature
|
||||
elif name in richcmp_special_methods:
|
||||
return ibinaryfunc
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_property_accessor_signature(name):
|
||||
# Return signature of accessor for an extension type
|
||||
# property, else None.
|
||||
return property_accessor_signatures.get(name)
|
||||
|
||||
|
||||
def get_base_slot_function(scope, slot):
|
||||
# Returns the function implementing this slot in the baseclass.
|
||||
# This is useful for enabling the compiler to optimize calls
|
||||
# that recursively climb the class hierarchy.
|
||||
base_type = scope.parent_type.base_type
|
||||
if scope.parent_scope is base_type.scope.parent_scope:
|
||||
parent_slot = slot.slot_code(base_type.scope)
|
||||
if parent_slot != '0':
|
||||
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
|
||||
if entry.visibility != 'extern':
|
||||
return parent_slot
|
||||
return None
|
||||
|
||||
|
||||
def get_slot_function(scope, slot):
|
||||
# Returns the function implementing this slot in the baseclass.
|
||||
# This is useful for enabling the compiler to optimize calls
|
||||
# that recursively climb the class hierarchy.
|
||||
slot_code = slot.slot_code(scope)
|
||||
if slot_code != '0':
|
||||
entry = scope.parent_scope.lookup_here(scope.parent_type.name)
|
||||
if entry.visibility != 'extern':
|
||||
return slot_code
|
||||
return None
|
||||
|
||||
|
||||
def get_slot_by_name(slot_name):
|
||||
# For now, only search the type struct, no referenced sub-structs.
|
||||
for slot in slot_table:
|
||||
if slot.slot_name == slot_name:
|
||||
return slot
|
||||
assert False, "Slot not found: %s" % slot_name
|
||||
|
||||
|
||||
def get_slot_code_by_name(scope, slot_name):
|
||||
slot = get_slot_by_name(slot_name)
|
||||
return slot.slot_code(scope)
|
||||
|
||||
def is_reverse_number_slot(name):
|
||||
"""
|
||||
Tries to identify __radd__ and friends (so the METH_COEXIST flag can be applied).
|
||||
|
||||
There's no great consequence if it inadvertently identifies a few other methods
|
||||
so just use a simple rule rather than an exact list.
|
||||
"""
|
||||
if name.startswith("__r") and name.endswith("__"):
|
||||
forward_name = name.replace("r", "", 1)
|
||||
for meth in PyNumberMethods:
|
||||
if getattr(meth, "method_name", None) == forward_name:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Signatures for generic Python functions and methods.
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
pyfunction_signature = Signature("-*", "O")
|
||||
pymethod_signature = Signature("T*", "O")
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Signatures for simple Python functions.
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
pyfunction_noargs = Signature("-", "O")
|
||||
pyfunction_onearg = Signature("-O", "O")
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Signatures for the various kinds of function that
|
||||
# can appear in the type object and its substructures.
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
unaryfunc = Signature("T", "O") # typedef PyObject * (*unaryfunc)(PyObject *);
|
||||
binaryfunc = Signature("OO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
|
||||
ibinaryfunc = Signature("TO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
|
||||
ternaryfunc = Signature("OOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
|
||||
iternaryfunc = Signature("TOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
|
||||
callfunc = Signature("T*", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
|
||||
inquiry = Signature("T", "i") # typedef int (*inquiry)(PyObject *);
|
||||
lenfunc = Signature("T", "z") # typedef Py_ssize_t (*lenfunc)(PyObject *);
|
||||
|
||||
# typedef int (*coercion)(PyObject **, PyObject **);
|
||||
intargfunc = Signature("Ti", "O") # typedef PyObject *(*intargfunc)(PyObject *, int);
|
||||
ssizeargfunc = Signature("Tz", "O") # typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
|
||||
intintargfunc = Signature("Tii", "O") # typedef PyObject *(*intintargfunc)(PyObject *, int, int);
|
||||
ssizessizeargfunc = Signature("Tzz", "O") # typedef PyObject *(*ssizessizeargfunc)(PyObject *, Py_ssize_t, Py_ssize_t);
|
||||
intobjargproc = Signature("TiO", 'r') # typedef int(*intobjargproc)(PyObject *, int, PyObject *);
|
||||
ssizeobjargproc = Signature("TzO", 'r') # typedef int(*ssizeobjargproc)(PyObject *, Py_ssize_t, PyObject *);
|
||||
intintobjargproc = Signature("TiiO", 'r') # typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *);
|
||||
ssizessizeobjargproc = Signature("TzzO", 'r') # typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
|
||||
|
||||
intintargproc = Signature("Tii", 'r')
|
||||
ssizessizeargproc = Signature("Tzz", 'r')
|
||||
objargfunc = Signature("TO", "O")
|
||||
objobjargproc = Signature("TOO", 'r') # typedef int (*objobjargproc)(PyObject *, PyObject *, PyObject *);
|
||||
readbufferproc = Signature("TzP", "z") # typedef Py_ssize_t (*readbufferproc)(PyObject *, Py_ssize_t, void **);
|
||||
writebufferproc = Signature("TzP", "z") # typedef Py_ssize_t (*writebufferproc)(PyObject *, Py_ssize_t, void **);
|
||||
segcountproc = Signature("TZ", "z") # typedef Py_ssize_t (*segcountproc)(PyObject *, Py_ssize_t *);
|
||||
charbufferproc = Signature("TzS", "z") # typedef Py_ssize_t (*charbufferproc)(PyObject *, Py_ssize_t, char **);
|
||||
objargproc = Signature("TO", 'r') # typedef int (*objobjproc)(PyObject *, PyObject *);
|
||||
# typedef int (*visitproc)(PyObject *, void *);
|
||||
# typedef int (*traverseproc)(PyObject *, visitproc, void *);
|
||||
|
||||
destructor = Signature("T", "v") # typedef void (*destructor)(PyObject *);
|
||||
# printfunc = Signature("TFi", 'r') # typedef int (*printfunc)(PyObject *, FILE *, int);
|
||||
# typedef PyObject *(*getattrfunc)(PyObject *, char *);
|
||||
getattrofunc = Signature("TO", "O") # typedef PyObject *(*getattrofunc)(PyObject *, PyObject *);
|
||||
# typedef int (*setattrfunc)(PyObject *, char *, PyObject *);
|
||||
setattrofunc = Signature("TOO", 'r') # typedef int (*setattrofunc)(PyObject *, PyObject *, PyObject *);
|
||||
delattrofunc = Signature("TO", 'r')
|
||||
cmpfunc = Signature("TO", "i") # typedef int (*cmpfunc)(PyObject *, PyObject *);
|
||||
reprfunc = Signature("T", "O") # typedef PyObject *(*reprfunc)(PyObject *);
|
||||
hashfunc = Signature("T", "h") # typedef Py_hash_t (*hashfunc)(PyObject *);
|
||||
richcmpfunc = Signature("TOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
|
||||
getiterfunc = Signature("T", "O") # typedef PyObject *(*getiterfunc) (PyObject *);
|
||||
iternextfunc = Signature("T", "O") # typedef PyObject *(*iternextfunc) (PyObject *);
|
||||
descrgetfunc = Signature("TOO", "O") # typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *);
|
||||
descrsetfunc = Signature("TOO", 'r') # typedef int (*descrsetfunc) (PyObject *, PyObject *, PyObject *);
|
||||
descrdelfunc = Signature("TO", 'r')
|
||||
initproc = Signature("T*", 'r') # typedef int (*initproc)(PyObject *, PyObject *, PyObject *);
|
||||
# typedef PyObject *(*newfunc)(struct _typeobject *, PyObject *, PyObject *);
|
||||
# typedef PyObject *(*allocfunc)(struct _typeobject *, int);
|
||||
|
||||
getbufferproc = Signature("TBi", "r") # typedef int (*getbufferproc)(PyObject *, Py_buffer *, int);
|
||||
releasebufferproc = Signature("TB", "v") # typedef void (*releasebufferproc)(PyObject *, Py_buffer *);
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Signatures for accessor methods of properties.
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
property_accessor_signatures = {
|
||||
'__get__': Signature("T", "O"),
|
||||
'__set__': Signature("TO", 'r'),
|
||||
'__del__': Signature("T", 'r')
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Descriptor tables for the slots of the various type object
|
||||
# substructures, in the order they appear in the structure.
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
PyNumberMethods_Py3_GUARD = "PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000)"
|
||||
|
||||
PyNumberMethods = (
|
||||
MethodSlot(binaryfunc, "nb_add", "__add__"),
|
||||
MethodSlot(binaryfunc, "nb_subtract", "__sub__"),
|
||||
MethodSlot(binaryfunc, "nb_multiply", "__mul__"),
|
||||
MethodSlot(binaryfunc, "nb_divide", "__div__", ifdef = PyNumberMethods_Py3_GUARD),
|
||||
MethodSlot(binaryfunc, "nb_remainder", "__mod__"),
|
||||
MethodSlot(binaryfunc, "nb_divmod", "__divmod__"),
|
||||
MethodSlot(ternaryfunc, "nb_power", "__pow__"),
|
||||
MethodSlot(unaryfunc, "nb_negative", "__neg__"),
|
||||
MethodSlot(unaryfunc, "nb_positive", "__pos__"),
|
||||
MethodSlot(unaryfunc, "nb_absolute", "__abs__"),
|
||||
MethodSlot(inquiry, "nb_nonzero", "__nonzero__", py3 = ("nb_bool", "__bool__")),
|
||||
MethodSlot(unaryfunc, "nb_invert", "__invert__"),
|
||||
MethodSlot(binaryfunc, "nb_lshift", "__lshift__"),
|
||||
MethodSlot(binaryfunc, "nb_rshift", "__rshift__"),
|
||||
MethodSlot(binaryfunc, "nb_and", "__and__"),
|
||||
MethodSlot(binaryfunc, "nb_xor", "__xor__"),
|
||||
MethodSlot(binaryfunc, "nb_or", "__or__"),
|
||||
EmptySlot("nb_coerce", ifdef = PyNumberMethods_Py3_GUARD),
|
||||
MethodSlot(unaryfunc, "nb_int", "__int__", fallback="__long__"),
|
||||
MethodSlot(unaryfunc, "nb_long", "__long__", fallback="__int__", py3 = "<RESERVED>"),
|
||||
MethodSlot(unaryfunc, "nb_float", "__float__"),
|
||||
MethodSlot(unaryfunc, "nb_oct", "__oct__", ifdef = PyNumberMethods_Py3_GUARD),
|
||||
MethodSlot(unaryfunc, "nb_hex", "__hex__", ifdef = PyNumberMethods_Py3_GUARD),
|
||||
|
||||
# Added in release 2.0
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_add", "__iadd__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_subtract", "__isub__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_multiply", "__imul__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_divide", "__idiv__", ifdef = PyNumberMethods_Py3_GUARD),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_remainder", "__imod__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_power", "__ipow__"), # actually ternaryfunc!!!
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_lshift", "__ilshift__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_rshift", "__irshift__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_and", "__iand__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_xor", "__ixor__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_or", "__ior__"),
|
||||
|
||||
# Added in release 2.2
|
||||
# The following require the Py_TPFLAGS_HAVE_CLASS flag
|
||||
MethodSlot(binaryfunc, "nb_floor_divide", "__floordiv__"),
|
||||
MethodSlot(binaryfunc, "nb_true_divide", "__truediv__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__"),
|
||||
|
||||
# Added in release 2.5
|
||||
MethodSlot(unaryfunc, "nb_index", "__index__"),
|
||||
|
||||
# Added in release 3.5
|
||||
MethodSlot(binaryfunc, "nb_matrix_multiply", "__matmul__", ifdef="PY_VERSION_HEX >= 0x03050000"),
|
||||
MethodSlot(ibinaryfunc, "nb_inplace_matrix_multiply", "__imatmul__", ifdef="PY_VERSION_HEX >= 0x03050000"),
|
||||
)
|
||||
|
||||
PySequenceMethods = (
|
||||
MethodSlot(lenfunc, "sq_length", "__len__"),
|
||||
EmptySlot("sq_concat"), # nb_add used instead
|
||||
EmptySlot("sq_repeat"), # nb_multiply used instead
|
||||
SyntheticSlot("sq_item", ["__getitem__"], "0"), #EmptySlot("sq_item"), # mp_subscript used instead
|
||||
MethodSlot(ssizessizeargfunc, "sq_slice", "__getslice__"),
|
||||
EmptySlot("sq_ass_item"), # mp_ass_subscript used instead
|
||||
SyntheticSlot("sq_ass_slice", ["__setslice__", "__delslice__"], "0"),
|
||||
MethodSlot(cmpfunc, "sq_contains", "__contains__"),
|
||||
EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead
|
||||
EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead
|
||||
)
|
||||
|
||||
PyMappingMethods = (
|
||||
MethodSlot(lenfunc, "mp_length", "__len__"),
|
||||
MethodSlot(objargfunc, "mp_subscript", "__getitem__"),
|
||||
SyntheticSlot("mp_ass_subscript", ["__setitem__", "__delitem__"], "0"),
|
||||
)
|
||||
|
||||
PyBufferProcs = (
|
||||
MethodSlot(readbufferproc, "bf_getreadbuffer", "__getreadbuffer__", py3 = False),
|
||||
MethodSlot(writebufferproc, "bf_getwritebuffer", "__getwritebuffer__", py3 = False),
|
||||
MethodSlot(segcountproc, "bf_getsegcount", "__getsegcount__", py3 = False),
|
||||
MethodSlot(charbufferproc, "bf_getcharbuffer", "__getcharbuffer__", py3 = False),
|
||||
|
||||
MethodSlot(getbufferproc, "bf_getbuffer", "__getbuffer__"),
|
||||
MethodSlot(releasebufferproc, "bf_releasebuffer", "__releasebuffer__")
|
||||
)
|
||||
|
||||
PyAsyncMethods = (
|
||||
MethodSlot(unaryfunc, "am_await", "__await__"),
|
||||
MethodSlot(unaryfunc, "am_aiter", "__aiter__"),
|
||||
MethodSlot(unaryfunc, "am_anext", "__anext__"),
|
||||
EmptySlot("am_send", ifdef="PY_VERSION_HEX >= 0x030A00A3"),
|
||||
)
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# The main slot table. This table contains descriptors for all the
|
||||
# top-level type slots, beginning with tp_dealloc, in the order they
|
||||
# appear in the type object.
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
slot_table = (
|
||||
ConstructorSlot("tp_dealloc", '__dealloc__'),
|
||||
EmptySlot("tp_print", ifdef="PY_VERSION_HEX < 0x030800b4"),
|
||||
EmptySlot("tp_vectorcall_offset", ifdef="PY_VERSION_HEX >= 0x030800b4"),
|
||||
EmptySlot("tp_getattr"),
|
||||
EmptySlot("tp_setattr"),
|
||||
|
||||
# tp_compare (Py2) / tp_reserved (Py3<3.5) / tp_as_async (Py3.5+) is always used as tp_as_async in Py3
|
||||
MethodSlot(cmpfunc, "tp_compare", "__cmp__", ifdef="PY_MAJOR_VERSION < 3"),
|
||||
SuiteSlot(PyAsyncMethods, "__Pyx_PyAsyncMethodsStruct", "tp_as_async", ifdef="PY_MAJOR_VERSION >= 3"),
|
||||
|
||||
MethodSlot(reprfunc, "tp_repr", "__repr__"),
|
||||
|
||||
SuiteSlot(PyNumberMethods, "PyNumberMethods", "tp_as_number"),
|
||||
SuiteSlot(PySequenceMethods, "PySequenceMethods", "tp_as_sequence"),
|
||||
SuiteSlot(PyMappingMethods, "PyMappingMethods", "tp_as_mapping"),
|
||||
|
||||
MethodSlot(hashfunc, "tp_hash", "__hash__", inherited=False), # Py3 checks for __richcmp__
|
||||
MethodSlot(callfunc, "tp_call", "__call__"),
|
||||
MethodSlot(reprfunc, "tp_str", "__str__"),
|
||||
|
||||
SyntheticSlot("tp_getattro", ["__getattr__","__getattribute__"], "0"), #"PyObject_GenericGetAttr"),
|
||||
SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"),
|
||||
|
||||
SuiteSlot(PyBufferProcs, "PyBufferProcs", "tp_as_buffer"),
|
||||
|
||||
TypeFlagsSlot("tp_flags"),
|
||||
DocStringSlot("tp_doc"),
|
||||
|
||||
GCDependentSlot("tp_traverse"),
|
||||
GCClearReferencesSlot("tp_clear"),
|
||||
|
||||
RichcmpSlot(richcmpfunc, "tp_richcompare", "__richcmp__", inherited=False), # Py3 checks for __hash__
|
||||
|
||||
EmptySlot("tp_weaklistoffset"),
|
||||
|
||||
MethodSlot(getiterfunc, "tp_iter", "__iter__"),
|
||||
MethodSlot(iternextfunc, "tp_iternext", "__next__"),
|
||||
|
||||
MethodTableSlot("tp_methods"),
|
||||
MemberTableSlot("tp_members"),
|
||||
GetSetSlot("tp_getset"),
|
||||
|
||||
BaseClassSlot("tp_base"), #EmptySlot("tp_base"),
|
||||
EmptySlot("tp_dict"),
|
||||
|
||||
SyntheticSlot("tp_descr_get", ["__get__"], "0"),
|
||||
SyntheticSlot("tp_descr_set", ["__set__", "__delete__"], "0"),
|
||||
|
||||
DictOffsetSlot("tp_dictoffset"),
|
||||
|
||||
MethodSlot(initproc, "tp_init", "__init__"),
|
||||
EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"),
|
||||
InternalMethodSlot("tp_new"),
|
||||
EmptySlot("tp_free"),
|
||||
|
||||
EmptySlot("tp_is_gc"),
|
||||
EmptySlot("tp_bases"),
|
||||
EmptySlot("tp_mro"),
|
||||
EmptySlot("tp_cache"),
|
||||
EmptySlot("tp_subclasses"),
|
||||
EmptySlot("tp_weaklist"),
|
||||
EmptySlot("tp_del"),
|
||||
EmptySlot("tp_version_tag"),
|
||||
EmptySlot("tp_finalize", ifdef="PY_VERSION_HEX >= 0x030400a1"),
|
||||
EmptySlot("tp_vectorcall", ifdef="PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)"),
|
||||
EmptySlot("tp_print", ifdef="PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000"),
|
||||
# PyPy specific extension - only here to avoid C compiler warnings.
|
||||
EmptySlot("tp_pypy_flags", ifdef="CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000"),
|
||||
)
|
||||
|
||||
#------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Descriptors for special methods which don't appear directly
|
||||
# in the type object or its substructures. These methods are
|
||||
# called from slot functions synthesized by Cython.
|
||||
#
|
||||
#------------------------------------------------------------------------------------------
|
||||
|
||||
MethodSlot(initproc, "", "__cinit__")
|
||||
MethodSlot(destructor, "", "__dealloc__")
|
||||
MethodSlot(objobjargproc, "", "__setitem__")
|
||||
MethodSlot(objargproc, "", "__delitem__")
|
||||
MethodSlot(ssizessizeobjargproc, "", "__setslice__")
|
||||
MethodSlot(ssizessizeargproc, "", "__delslice__")
|
||||
MethodSlot(getattrofunc, "", "__getattr__")
|
||||
MethodSlot(getattrofunc, "", "__getattribute__")
|
||||
MethodSlot(setattrofunc, "", "__setattr__")
|
||||
MethodSlot(delattrofunc, "", "__delattr__")
|
||||
MethodSlot(descrgetfunc, "", "__get__")
|
||||
MethodSlot(descrsetfunc, "", "__set__")
|
||||
MethodSlot(descrdelfunc, "", "__delete__")
|
||||
|
||||
|
||||
# Method flags for python-exposed methods.
|
||||
|
||||
method_noargs = "METH_NOARGS"
|
||||
method_onearg = "METH_O"
|
||||
method_varargs = "METH_VARARGS"
|
||||
method_keywords = "METH_KEYWORDS"
|
||||
method_coexist = "METH_COEXIST"
|
|
@ -0,0 +1,359 @@
|
|||
#
|
||||
# Nodes used as utilities and support for transforms etc.
|
||||
# These often make up sets including both Nodes and ExprNodes
|
||||
# so it is convenient to have them in a separate module.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from . import Nodes
|
||||
from . import ExprNodes
|
||||
from .Nodes import Node
|
||||
from .ExprNodes import AtomicExprNode
|
||||
from .PyrexTypes import c_ptr_type
|
||||
|
||||
|
||||
class TempHandle(object):
|
||||
# THIS IS DEPRECATED, USE LetRefNode instead
|
||||
temp = None
|
||||
needs_xdecref = False
|
||||
def __init__(self, type, needs_cleanup=None):
|
||||
self.type = type
|
||||
if needs_cleanup is None:
|
||||
self.needs_cleanup = type.is_pyobject
|
||||
else:
|
||||
self.needs_cleanup = needs_cleanup
|
||||
|
||||
def ref(self, pos):
|
||||
return TempRefNode(pos, handle=self, type=self.type)
|
||||
|
||||
|
||||
class TempRefNode(AtomicExprNode):
|
||||
# THIS IS DEPRECATED, USE LetRefNode instead
|
||||
# handle TempHandle
|
||||
|
||||
def analyse_types(self, env):
|
||||
assert self.type == self.handle.type
|
||||
return self
|
||||
|
||||
def analyse_target_types(self, env):
|
||||
assert self.type == self.handle.type
|
||||
return self
|
||||
|
||||
def analyse_target_declaration(self, env):
|
||||
pass
|
||||
|
||||
def calculate_result_code(self):
|
||||
result = self.handle.temp
|
||||
if result is None: result = "<error>" # might be called and overwritten
|
||||
return result
|
||||
|
||||
def generate_result_code(self, code):
|
||||
pass
|
||||
|
||||
def generate_assignment_code(self, rhs, code, overloaded_assignment=False):
|
||||
if self.type.is_pyobject:
|
||||
rhs.make_owned_reference(code)
|
||||
# TODO: analyse control flow to see if this is necessary
|
||||
code.put_xdecref(self.result(), self.ctype())
|
||||
code.putln('%s = %s;' % (
|
||||
self.result(),
|
||||
rhs.result() if overloaded_assignment else rhs.result_as(self.ctype()),
|
||||
))
|
||||
rhs.generate_post_assignment_code(code)
|
||||
rhs.free_temps(code)
|
||||
|
||||
|
||||
class TempsBlockNode(Node):
|
||||
# THIS IS DEPRECATED, USE LetNode instead
|
||||
|
||||
"""
|
||||
Creates a block which allocates temporary variables.
|
||||
This is used by transforms to output constructs that need
|
||||
to make use of a temporary variable. Simply pass the types
|
||||
of the needed temporaries to the constructor.
|
||||
|
||||
The variables can be referred to using a TempRefNode
|
||||
(which can be constructed by calling get_ref_node).
|
||||
"""
|
||||
|
||||
# temps [TempHandle]
|
||||
# body StatNode
|
||||
|
||||
child_attrs = ["body"]
|
||||
|
||||
def generate_execution_code(self, code):
|
||||
for handle in self.temps:
|
||||
handle.temp = code.funcstate.allocate_temp(
|
||||
handle.type, manage_ref=handle.needs_cleanup)
|
||||
self.body.generate_execution_code(code)
|
||||
for handle in self.temps:
|
||||
if handle.needs_cleanup:
|
||||
if handle.needs_xdecref:
|
||||
code.put_xdecref_clear(handle.temp, handle.type)
|
||||
else:
|
||||
code.put_decref_clear(handle.temp, handle.type)
|
||||
code.funcstate.release_temp(handle.temp)
|
||||
|
||||
def analyse_declarations(self, env):
|
||||
self.body.analyse_declarations(env)
|
||||
|
||||
def analyse_expressions(self, env):
|
||||
self.body = self.body.analyse_expressions(env)
|
||||
return self
|
||||
|
||||
def generate_function_definitions(self, env, code):
|
||||
self.body.generate_function_definitions(env, code)
|
||||
|
||||
def annotate(self, code):
|
||||
self.body.annotate(code)
|
||||
|
||||
|
||||
class ResultRefNode(AtomicExprNode):
|
||||
# A reference to the result of an expression. The result_code
|
||||
# must be set externally (usually a temp name).
|
||||
|
||||
subexprs = []
|
||||
lhs_of_first_assignment = False
|
||||
|
||||
def __init__(self, expression=None, pos=None, type=None, may_hold_none=True, is_temp=False):
|
||||
self.expression = expression
|
||||
self.pos = None
|
||||
self.may_hold_none = may_hold_none
|
||||
if expression is not None:
|
||||
self.pos = expression.pos
|
||||
if hasattr(expression, "type"):
|
||||
self.type = expression.type
|
||||
if pos is not None:
|
||||
self.pos = pos
|
||||
if type is not None:
|
||||
self.type = type
|
||||
if is_temp:
|
||||
self.is_temp = True
|
||||
assert self.pos is not None
|
||||
|
||||
def clone_node(self):
|
||||
# nothing to do here
|
||||
return self
|
||||
|
||||
def type_dependencies(self, env):
|
||||
if self.expression:
|
||||
return self.expression.type_dependencies(env)
|
||||
else:
|
||||
return ()
|
||||
|
||||
def update_expression(self, expression):
|
||||
self.expression = expression
|
||||
if hasattr(expression, "type"):
|
||||
self.type = expression.type
|
||||
|
||||
def analyse_types(self, env):
|
||||
if self.expression is not None:
|
||||
if not self.expression.type:
|
||||
self.expression = self.expression.analyse_types(env)
|
||||
self.type = self.expression.type
|
||||
return self
|
||||
|
||||
def infer_type(self, env):
|
||||
if self.type is not None:
|
||||
return self.type
|
||||
if self.expression is not None:
|
||||
if self.expression.type is not None:
|
||||
return self.expression.type
|
||||
return self.expression.infer_type(env)
|
||||
assert False, "cannot infer type of ResultRefNode"
|
||||
|
||||
def may_be_none(self):
|
||||
if not self.type.is_pyobject:
|
||||
return False
|
||||
return self.may_hold_none
|
||||
|
||||
def _DISABLED_may_be_none(self):
|
||||
# not sure if this is safe - the expression may not be the
|
||||
# only value that gets assigned
|
||||
if self.expression is not None:
|
||||
return self.expression.may_be_none()
|
||||
if self.type is not None:
|
||||
return self.type.is_pyobject
|
||||
return True # play safe
|
||||
|
||||
def is_simple(self):
|
||||
return True
|
||||
|
||||
def result(self):
|
||||
try:
|
||||
return self.result_code
|
||||
except AttributeError:
|
||||
if self.expression is not None:
|
||||
self.result_code = self.expression.result()
|
||||
return self.result_code
|
||||
|
||||
def generate_evaluation_code(self, code):
|
||||
pass
|
||||
|
||||
def generate_result_code(self, code):
|
||||
pass
|
||||
|
||||
def generate_disposal_code(self, code):
|
||||
pass
|
||||
|
||||
def generate_assignment_code(self, rhs, code, overloaded_assignment=False):
|
||||
if self.type.is_pyobject:
|
||||
rhs.make_owned_reference(code)
|
||||
if not self.lhs_of_first_assignment:
|
||||
code.put_decref(self.result(), self.ctype())
|
||||
code.putln('%s = %s;' % (
|
||||
self.result(),
|
||||
rhs.result() if overloaded_assignment else rhs.result_as(self.ctype()),
|
||||
))
|
||||
rhs.generate_post_assignment_code(code)
|
||||
rhs.free_temps(code)
|
||||
|
||||
def allocate_temps(self, env):
|
||||
pass
|
||||
|
||||
def release_temp(self, env):
|
||||
pass
|
||||
|
||||
def free_temps(self, code):
|
||||
pass
|
||||
|
||||
|
||||
class LetNodeMixin:
|
||||
def set_temp_expr(self, lazy_temp):
|
||||
self.lazy_temp = lazy_temp
|
||||
self.temp_expression = lazy_temp.expression
|
||||
|
||||
def setup_temp_expr(self, code):
|
||||
self.temp_expression.generate_evaluation_code(code)
|
||||
self.temp_type = self.temp_expression.type
|
||||
if self.temp_type.is_array:
|
||||
self.temp_type = c_ptr_type(self.temp_type.base_type)
|
||||
self._result_in_temp = self.temp_expression.result_in_temp()
|
||||
if self._result_in_temp:
|
||||
self.temp = self.temp_expression.result()
|
||||
else:
|
||||
self.temp_expression.make_owned_reference(code)
|
||||
self.temp = code.funcstate.allocate_temp(
|
||||
self.temp_type, manage_ref=True)
|
||||
code.putln("%s = %s;" % (self.temp, self.temp_expression.result()))
|
||||
self.temp_expression.generate_disposal_code(code)
|
||||
self.temp_expression.free_temps(code)
|
||||
self.lazy_temp.result_code = self.temp
|
||||
|
||||
def teardown_temp_expr(self, code):
|
||||
if self._result_in_temp:
|
||||
self.temp_expression.generate_disposal_code(code)
|
||||
self.temp_expression.free_temps(code)
|
||||
else:
|
||||
if self.temp_type.is_pyobject:
|
||||
code.put_decref_clear(self.temp, self.temp_type)
|
||||
code.funcstate.release_temp(self.temp)
|
||||
|
||||
|
||||
class EvalWithTempExprNode(ExprNodes.ExprNode, LetNodeMixin):
|
||||
# A wrapper around a subexpression that moves an expression into a
|
||||
# temp variable and provides it to the subexpression.
|
||||
|
||||
subexprs = ['temp_expression', 'subexpression']
|
||||
|
||||
def __init__(self, lazy_temp, subexpression):
|
||||
self.set_temp_expr(lazy_temp)
|
||||
self.pos = subexpression.pos
|
||||
self.subexpression = subexpression
|
||||
# if called after type analysis, we already know the type here
|
||||
self.type = self.subexpression.type
|
||||
|
||||
def infer_type(self, env):
|
||||
return self.subexpression.infer_type(env)
|
||||
|
||||
def may_be_none(self):
|
||||
return self.subexpression.may_be_none()
|
||||
|
||||
def result(self):
|
||||
return self.subexpression.result()
|
||||
|
||||
def analyse_types(self, env):
|
||||
self.temp_expression = self.temp_expression.analyse_types(env)
|
||||
self.lazy_temp.update_expression(self.temp_expression) # overwrite in case it changed
|
||||
self.subexpression = self.subexpression.analyse_types(env)
|
||||
self.type = self.subexpression.type
|
||||
return self
|
||||
|
||||
def free_subexpr_temps(self, code):
|
||||
self.subexpression.free_temps(code)
|
||||
|
||||
def generate_subexpr_disposal_code(self, code):
|
||||
self.subexpression.generate_disposal_code(code)
|
||||
|
||||
def generate_evaluation_code(self, code):
|
||||
self.setup_temp_expr(code)
|
||||
self.subexpression.generate_evaluation_code(code)
|
||||
self.teardown_temp_expr(code)
|
||||
|
||||
|
||||
LetRefNode = ResultRefNode
|
||||
|
||||
|
||||
class LetNode(Nodes.StatNode, LetNodeMixin):
|
||||
# Implements a local temporary variable scope. Imagine this
|
||||
# syntax being present:
|
||||
# let temp = VALUE:
|
||||
# BLOCK (can modify temp)
|
||||
# if temp is an object, decref
|
||||
#
|
||||
# Usually used after analysis phase, but forwards analysis methods
|
||||
# to its children
|
||||
|
||||
child_attrs = ['temp_expression', 'body']
|
||||
|
||||
def __init__(self, lazy_temp, body):
|
||||
self.set_temp_expr(lazy_temp)
|
||||
self.pos = body.pos
|
||||
self.body = body
|
||||
|
||||
def analyse_declarations(self, env):
|
||||
self.temp_expression.analyse_declarations(env)
|
||||
self.body.analyse_declarations(env)
|
||||
|
||||
def analyse_expressions(self, env):
|
||||
self.temp_expression = self.temp_expression.analyse_expressions(env)
|
||||
self.body = self.body.analyse_expressions(env)
|
||||
return self
|
||||
|
||||
def generate_execution_code(self, code):
|
||||
self.setup_temp_expr(code)
|
||||
self.body.generate_execution_code(code)
|
||||
self.teardown_temp_expr(code)
|
||||
|
||||
def generate_function_definitions(self, env, code):
|
||||
self.temp_expression.generate_function_definitions(env, code)
|
||||
self.body.generate_function_definitions(env, code)
|
||||
|
||||
|
||||
class TempResultFromStatNode(ExprNodes.ExprNode):
|
||||
# An ExprNode wrapper around a StatNode that executes the StatNode
|
||||
# body. Requires a ResultRefNode that it sets up to refer to its
|
||||
# own temp result. The StatNode must assign a value to the result
|
||||
# node, which then becomes the result of this node.
|
||||
|
||||
subexprs = []
|
||||
child_attrs = ['body']
|
||||
|
||||
def __init__(self, result_ref, body):
|
||||
self.result_ref = result_ref
|
||||
self.pos = body.pos
|
||||
self.body = body
|
||||
self.type = result_ref.type
|
||||
self.is_temp = 1
|
||||
|
||||
def analyse_declarations(self, env):
|
||||
self.body.analyse_declarations(env)
|
||||
|
||||
def analyse_types(self, env):
|
||||
self.body = self.body.analyse_expressions(env)
|
||||
return self
|
||||
|
||||
def generate_result_code(self, code):
|
||||
self.result_ref.result_code = self.result()
|
||||
self.body.generate_execution_code(code)
|
|
@ -0,0 +1,237 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .TreeFragment import parse_from_strings, StringParseContext
|
||||
from . import Symtab
|
||||
from . import Naming
|
||||
from . import Code
|
||||
|
||||
|
||||
class NonManglingModuleScope(Symtab.ModuleScope):
|
||||
|
||||
def __init__(self, prefix, *args, **kw):
|
||||
self.prefix = prefix
|
||||
self.cython_scope = None
|
||||
self.cpp = kw.pop('cpp', False)
|
||||
Symtab.ModuleScope.__init__(self, *args, **kw)
|
||||
|
||||
def add_imported_entry(self, name, entry, pos):
|
||||
entry.used = True
|
||||
return super(NonManglingModuleScope, self).add_imported_entry(name, entry, pos)
|
||||
|
||||
def mangle(self, prefix, name=None):
|
||||
if name:
|
||||
if prefix in (Naming.typeobj_prefix, Naming.func_prefix, Naming.var_prefix, Naming.pyfunc_prefix):
|
||||
# Functions, classes etc. gets a manually defined prefix easily
|
||||
# manually callable instead (the one passed to CythonUtilityCode)
|
||||
prefix = self.prefix
|
||||
return "%s%s" % (prefix, name)
|
||||
else:
|
||||
return Symtab.ModuleScope.mangle(self, prefix)
|
||||
|
||||
|
||||
class CythonUtilityCodeContext(StringParseContext):
|
||||
scope = None
|
||||
|
||||
def find_module(self, module_name, relative_to=None, pos=None, need_pxd=True, absolute_fallback=True):
|
||||
if relative_to:
|
||||
raise AssertionError("Relative imports not supported in utility code.")
|
||||
if module_name != self.module_name:
|
||||
if module_name not in self.modules:
|
||||
raise AssertionError("Only the cython cimport is supported.")
|
||||
else:
|
||||
return self.modules[module_name]
|
||||
|
||||
if self.scope is None:
|
||||
self.scope = NonManglingModuleScope(
|
||||
self.prefix, module_name, parent_module=None, context=self, cpp=self.cpp)
|
||||
|
||||
return self.scope
|
||||
|
||||
|
||||
class CythonUtilityCode(Code.UtilityCodeBase):
|
||||
"""
|
||||
Utility code written in the Cython language itself.
|
||||
|
||||
The @cname decorator can set the cname for a function, method of cdef class.
|
||||
Functions decorated with @cname('c_func_name') get the given cname.
|
||||
|
||||
For cdef classes the rules are as follows:
|
||||
obj struct -> <cname>_obj
|
||||
obj type ptr -> <cname>_type
|
||||
methods -> <class_cname>_<method_cname>
|
||||
|
||||
For methods the cname decorator is optional, but without the decorator the
|
||||
methods will not be prototyped. See Cython.Compiler.CythonScope and
|
||||
tests/run/cythonscope.pyx for examples.
|
||||
"""
|
||||
|
||||
is_cython_utility = True
|
||||
|
||||
def __init__(self, impl, name="__pyxutil", prefix="", requires=None,
|
||||
file=None, from_scope=None, context=None, compiler_directives=None,
|
||||
outer_module_scope=None):
|
||||
# 1) We need to delay the parsing/processing, so that all modules can be
|
||||
# imported without import loops
|
||||
# 2) The same utility code object can be used for multiple source files;
|
||||
# while the generated node trees can be altered in the compilation of a
|
||||
# single file.
|
||||
# Hence, delay any processing until later.
|
||||
context_types = {}
|
||||
if context is not None:
|
||||
from .PyrexTypes import BaseType
|
||||
for key, value in context.items():
|
||||
if isinstance(value, BaseType):
|
||||
context[key] = key
|
||||
context_types[key] = value
|
||||
impl = Code.sub_tempita(impl, context, file, name)
|
||||
self.impl = impl
|
||||
self.name = name
|
||||
self.file = file
|
||||
self.prefix = prefix
|
||||
self.requires = requires or []
|
||||
self.from_scope = from_scope
|
||||
self.outer_module_scope = outer_module_scope
|
||||
self.compiler_directives = compiler_directives
|
||||
self.context_types = context_types
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, CythonUtilityCode):
|
||||
return self._equality_params() == other._equality_params()
|
||||
else:
|
||||
return False
|
||||
|
||||
def _equality_params(self):
|
||||
outer_scope = self.outer_module_scope
|
||||
while isinstance(outer_scope, NonManglingModuleScope):
|
||||
outer_scope = outer_scope.outer_scope
|
||||
return self.impl, outer_scope, self.compiler_directives
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.impl)
|
||||
|
||||
def get_tree(self, entries_only=False, cython_scope=None):
|
||||
from .AnalysedTreeTransforms import AutoTestDictTransform
|
||||
# The AutoTestDictTransform creates the statement "__test__ = {}",
|
||||
# which when copied into the main ModuleNode overwrites
|
||||
# any __test__ in user code; not desired
|
||||
excludes = [AutoTestDictTransform]
|
||||
|
||||
from . import Pipeline, ParseTreeTransforms
|
||||
context = CythonUtilityCodeContext(
|
||||
self.name, compiler_directives=self.compiler_directives,
|
||||
cpp=cython_scope.is_cpp() if cython_scope else False)
|
||||
context.prefix = self.prefix
|
||||
context.cython_scope = cython_scope
|
||||
#context = StringParseContext(self.name)
|
||||
tree = parse_from_strings(
|
||||
self.name, self.impl, context=context, allow_struct_enum_decorator=True)
|
||||
pipeline = Pipeline.create_pipeline(context, 'pyx', exclude_classes=excludes)
|
||||
|
||||
if entries_only:
|
||||
p = []
|
||||
for t in pipeline:
|
||||
p.append(t)
|
||||
if isinstance(p, ParseTreeTransforms.AnalyseDeclarationsTransform):
|
||||
break
|
||||
|
||||
pipeline = p
|
||||
|
||||
transform = ParseTreeTransforms.CnameDirectivesTransform(context)
|
||||
# InterpretCompilerDirectives already does a cdef declarator check
|
||||
#before = ParseTreeTransforms.DecoratorTransform
|
||||
before = ParseTreeTransforms.InterpretCompilerDirectives
|
||||
pipeline = Pipeline.insert_into_pipeline(pipeline, transform,
|
||||
before=before)
|
||||
|
||||
def merge_scope(scope):
|
||||
def merge_scope_transform(module_node):
|
||||
module_node.scope.merge_in(scope)
|
||||
return module_node
|
||||
return merge_scope_transform
|
||||
|
||||
if self.from_scope:
|
||||
pipeline = Pipeline.insert_into_pipeline(
|
||||
pipeline, merge_scope(self.from_scope),
|
||||
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
||||
|
||||
for dep in self.requires:
|
||||
if isinstance(dep, CythonUtilityCode) and hasattr(dep, 'tree') and not cython_scope:
|
||||
pipeline = Pipeline.insert_into_pipeline(
|
||||
pipeline, merge_scope(dep.tree.scope),
|
||||
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
||||
|
||||
if self.outer_module_scope:
|
||||
# inject outer module between utility code module and builtin module
|
||||
def scope_transform(module_node):
|
||||
module_node.scope.outer_scope = self.outer_module_scope
|
||||
return module_node
|
||||
|
||||
pipeline = Pipeline.insert_into_pipeline(
|
||||
pipeline, scope_transform,
|
||||
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
||||
|
||||
if self.context_types:
|
||||
# inject types into module scope
|
||||
def scope_transform(module_node):
|
||||
for name, type in self.context_types.items():
|
||||
entry = module_node.scope.declare_type(name, type, None, visibility='extern')
|
||||
entry.in_cinclude = True
|
||||
return module_node
|
||||
|
||||
pipeline = Pipeline.insert_into_pipeline(
|
||||
pipeline, scope_transform,
|
||||
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
||||
|
||||
(err, tree) = Pipeline.run_pipeline(pipeline, tree, printtree=False)
|
||||
assert not err, err
|
||||
self.tree = tree
|
||||
return tree
|
||||
|
||||
def put_code(self, output):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def load_as_string(cls, util_code_name, from_file=None, **kwargs):
|
||||
"""
|
||||
Load a utility code as a string. Returns (proto, implementation)
|
||||
"""
|
||||
util = cls.load(util_code_name, from_file, **kwargs)
|
||||
return util.proto, util.impl # keep line numbers => no lstrip()
|
||||
|
||||
def declare_in_scope(self, dest_scope, used=False, cython_scope=None,
|
||||
whitelist=None):
|
||||
"""
|
||||
Declare all entries from the utility code in dest_scope. Code will only
|
||||
be included for used entries. If module_name is given, declare the
|
||||
type entries with that name.
|
||||
"""
|
||||
tree = self.get_tree(entries_only=True, cython_scope=cython_scope)
|
||||
|
||||
entries = tree.scope.entries
|
||||
entries.pop('__name__')
|
||||
entries.pop('__file__')
|
||||
entries.pop('__builtins__')
|
||||
entries.pop('__doc__')
|
||||
|
||||
for entry in entries.values():
|
||||
entry.utility_code_definition = self
|
||||
entry.used = used
|
||||
|
||||
original_scope = tree.scope
|
||||
dest_scope.merge_in(original_scope, merge_unused=True, whitelist=whitelist)
|
||||
tree.scope = dest_scope
|
||||
|
||||
for dep in self.requires:
|
||||
if dep.is_cython_utility:
|
||||
dep.declare_in_scope(dest_scope, cython_scope=cython_scope)
|
||||
|
||||
return original_scope
|
||||
|
||||
|
||||
def declare_declarations_in_scope(declaration_string, env, private_type=True,
|
||||
*args, **kwargs):
|
||||
"""
|
||||
Declare some declarations given as Cython code in declaration_string
|
||||
in scope env.
|
||||
"""
|
||||
CythonUtilityCode(declaration_string, *args, **kwargs).declare_in_scope(env)
|
|
@ -0,0 +1,9 @@
|
|||
# for backwards compatibility
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import __version__ as version
|
||||
|
||||
# For 'generated by' header line in C files.
|
||||
|
||||
watermark = str(version)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue