Commit 468051bc authored by Dr Alex Meakins's avatar Dr Alex Meakins Committed by GitHub

Merge pull request #1 from cython/master

Resync with Cython repo
parents 6437de44 cd2a42e2
[run]
branch = True
parallel = True
concurrency = multiprocessing,thread
include = Cython/*
source = Cython
omit = Test*
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
max_line_length = 120
# 4 space indentation
[*.{py,pyx,pxi,pxd,c,cpp,h,hpp}]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
# Tab indentation (no size specified)
[Makefile]
indent_style = tab
# Matches the exact files either package.json or .travis.yml
[{package.json,.travis.yml}]
indent_style = space
indent_size = 2
......@@ -6,14 +6,20 @@ __pycache__
*.egg
*.egg-info
.*cache*/
*venv*/
Cython/Compiler/*.c
Cython/Plex/*.c
Cython/Runtime/refnanny.c
Cython/Tempita/*.c
Cython/*.c
Cython/*.html
Cython/*/*.html
Tools/*.elc
Demos/*.html
Demos/*/*.html
/TEST_TMP/
/build/
......@@ -22,11 +28,16 @@ Tools/*.elc
/dist/
.gitrev
.coverage
*.patch
*.diff
*.orig
*.prof
*.rej
*.log
*.dep
*.swp
*~
callgrind.out.*
.ipynb_checkpoints
docs/build
......@@ -36,3 +47,8 @@ TAGS
MANIFEST
.tox
# Jetbrains IDE project files
/.idea
/*.iml
os: linux
dist: trusty
language: python
# 'sudo' is enabled automatically by the 'apt' addon below.
#sudo: false
......@@ -18,33 +19,21 @@ cache:
directories:
- $HOME/.ccache
language: python
python:
- 2.7
- 3.6
- 2.6
- 3.4
- 3.5
- pypy
- pypy3
env:
global:
- USE_CCACHE=1
- CCACHE_SLOPPINESS=pch_defines,time_macros
- CCACHE_COMPRESS=1
- CCACHE_MAXSIZE=150M
- CCACHE_MAXSIZE=250M
- PATH="/usr/lib/ccache:$HOME/miniconda/bin:$PATH"
matrix:
- BACKEND=c
- BACKEND=cpp
- BACKEND=c,cpp
matrix:
include:
- python: 3.7
dist: xenial # Required for Python 3.7
sudo: required # travis-ci/travis-ci#9069
env: TEST_CODE_STYLE=1
- python: 2.7
env: BACKEND=c
- python: 2.7
env: BACKEND=cpp
- python: 3.7
dist: xenial # Required for Python 3.7
sudo: required # travis-ci/travis-ci#9069
......@@ -53,57 +42,58 @@ matrix:
dist: xenial # Required for Python 3.7
sudo: required # travis-ci/travis-ci#9069
env: BACKEND=cpp
- python: 3.8-dev
# Disabled: coverage analysis takes excessively long, several times longer than without.
# - python: 3.7
# dist: xenial # Required for Python 3.7
# sudo: required # travis-ci/travis-ci#9069
# env: COVERAGE=1
- python: 3.7
dist: xenial # Required for Python 3.7
sudo: required # travis-ci/travis-ci#9069
env: TEST_CODE_STYLE=1
- python: 3.4
env: BACKEND=c
- python: 3.4
env: BACKEND=cpp
- python: 3.5
env: BACKEND=c
- python: 3.5
env: BACKEND=cpp
- python: 3.6
env: BACKEND=c
- python: 3.6
env: BACKEND=cpp
- python: 3.8-dev
dist: xenial # Required for Python 3.7
sudo: required # travis-ci/travis-ci#9069
env: BACKEND=cpp
- os: osx
osx_image: xcode6.4
env: BACKEND=c PY=2
env: PY=2
python: 2
language: c
compiler: clang
cache: false
- os: osx
osx_image: xcode6.4
env: BACKEND=cpp PY=2
python: 2
language: cpp
compiler: clang
cache: false
- os: osx
osx_image: xcode6.4
env: BACKEND=c PY=3
env: PY=3
python: 3
language: c
compiler: clang
cache: false
- os: osx
osx_image: xcode6.4
env: BACKEND=cpp PY=3
python: 3
language: cpp
compiler: clang
cache: false
- env: STACKLESS=true BACKEND=c PY=2
python: 2.7
- env: STACKLESS=true BACKEND=c PY=3
python: 3.6
allow_failures:
- python: pypy
env: BACKEND=c
- python: pypy3
- python: 3.8-dev
- env: STACKLESS=true BACKEND=c PY=2
- env: STACKLESS=true BACKEND=c PY=3
exclude:
env: BACKEND=c
allow_failures:
- python: pypy
env: BACKEND=cpp
- python: pypy3
env: BACKEND=cpp
- python: 3.8-dev
#- env: STACKLESS=true BACKEND=c PY=2
#- env: STACKLESS=true BACKEND=c PY=3
branches:
only:
......@@ -114,18 +104,18 @@ before_install:
- |
if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
# adding apt repos in travis is really fragile => retry a couple of times.
for i in {1..10}; do sudo apt-add-repository --yes 'ppa:ubuntu-toolchain-r/test' && break; sleep 2; done
for i in {1..10}; do sudo apt-get update && sudo apt-get install --yes gcc-8 $(if [ "$BACKEND" = cpp ]; then echo -n "g++-8"; fi ) && break; sleep 2; done
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 60 $(if [ "$BACKEND" = cpp ]; then echo " --slave /usr/bin/g++ g++ /usr/bin/g++-8"; fi)
for i in {1..10}; do travis_retry sudo apt-add-repository --yes 'ppa:ubuntu-toolchain-r/test' && break; sleep 2; done
for i in {1..10}; do travis_retry sudo apt-get update && travis_retry sudo apt-get install --yes gcc-8 $(if [ -z "${BACKEND##*cpp*}" ]; then echo -n "g++-8"; fi ) && break; sleep 2; done
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 60 $(if [ -z "${BACKEND##*cpp*}" ]; then echo " --slave /usr/bin/g++ g++ /usr/bin/g++-8"; fi)
sudo update-alternatives --set gcc /usr/bin/gcc-8
export CC=gcc
if [ "$BACKEND" = cpp ]; then sudo update-alternatives --set g++ /usr/bin/g++-8; export CXX=g++; fi
if [ -z "${BACKEND##*cpp*}" ]; then sudo update-alternatives --set g++ /usr/bin/g++-8; export CXX=g++; fi
fi
- |
if [[ "$TRAVIS_OS_NAME" == "osx" ]] || [[ "$STACKLESS" == "true" ]]; then # Install Miniconda
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then CONDA_PLATFORM=MacOSX; else CONDA_PLATFORM=Linux; fi
curl -s -o miniconda.sh https://repo.continuum.io/miniconda/Miniconda$PY-latest-${CONDA_PLATFORM}-x86_64.sh
travis_retry curl -s -o miniconda.sh https://repo.continuum.io/miniconda/Miniconda$PY-latest-${CONDA_PLATFORM}-x86_64.sh
bash miniconda.sh -b -p $HOME/miniconda && rm miniconda.sh
#conda install --quiet --yes nomkl --file=test-requirements.txt --file=test-requirements-cpython.txt
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
......@@ -139,25 +129,26 @@ before_install:
- if [[ "$STACKLESS" == "true" ]]; then
conda config --add channels stackless;
conda install --quiet --yes stackless;
travis_retry conda install --quiet --yes stackless;
fi
install:
- python -c 'import sys; print("Python %s" % (sys.version,))'
- if [ -n "${TRAVIS_PYTHON_VERSION##*-dev}" -a -n "${TRAVIS_PYTHON_VERSION##2.6*}" ]; then pip install -r test-requirements.txt $( [ -z "${TRAVIS_PYTHON_VERSION##pypy*}" -o -z "${TRAVIS_PYTHON_VERSION##3.7*}" ] || echo " -r test-requirements-cpython.txt" ) ; fi
- CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build
- if [ -n "${TRAVIS_PYTHON_VERSION##*-dev}" ]; then pip install -r test-requirements.txt $( [ -z "${TRAVIS_PYTHON_VERSION##pypy*}" -o -z "${TRAVIS_PYTHON_VERSION##3.7*}" ] || echo " -r test-requirements-cpython.txt" ) ; fi
# - CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build
before_script: ccache -s || true
script:
- PYTHON_DBG="python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg"
- if [ "$TEST_CODE_STYLE" = "1" ]; then
STYLE_ARGS="--no-unit --no-doctest --no-file --no-pyregr --no-examples";
else
STYLE_ARGS=--no-code-style;
if $PYTHON_DBG -V >&2; then CFLAGS="-O0 -ggdb" $PYTHON_DBG runtests.py -vv --no-code-style Debugger --backends=$BACKEND; fi;
if [ -z "${BACKEND##*cpp*}" -a -n "${TRAVIS_PYTHON_VERSION##*-dev}" ]; then pip install pythran; fi;
if [ "$BACKEND" != "cpp" -a -n "${TRAVIS_PYTHON_VERSION##2*}" -a -n "${TRAVIS_PYTHON_VERSION##*-dev}" ]; then pip install mypy; fi;
fi
- PYTHON_DBG="python$( python -c 'import sys; print("%d.%d" % sys.version_info[:2])' )-dbg"
- if $PYTHON_DBG -V >&2; then CFLAGS="-O0 -ggdb" $PYTHON_DBG runtests.py -vv $STYLE_ARGS Debugger --backends=$BACKEND; fi
- if [ "$BACKEND" = "cpp" -a -n "${TRAVIS_PYTHON_VERSION##2.6*}" ]; then pip install pythran; fi
- if [ "$BACKEND" = "c" -a -n "${TRAVIS_PYTHON_VERSION##2*}" ]; then pip install mypy; fi
- CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build_ext -i
- CFLAGS="-O0 -ggdb -Wall -Wextra" python runtests.py -vv $STYLE_ARGS -x Debugger --backends=$BACKEND $(if [ -z "$TEST_CODE_STYLE"; then echo " -j7 "; fi)
- if [ "$COVERAGE" != "1" ]; then CFLAGS="-O2 -ggdb -Wall -Wextra $(python -c 'import sys; print("-fno-strict-aliasing" if sys.version_info[0] == 2 else "")')" python setup.py build_ext -i; fi
- CFLAGS="-O0 -ggdb -Wall -Wextra" python runtests.py -vv $STYLE_ARGS -x Debugger --backends=$BACKEND $(if [ "$COVERAGE" == "1" ]; then echo " --coverage"; fi) $(if [ -z "$TEST_CODE_STYLE" ]; then echo " -j7 "; fi)
- ccache -s || true
This diff is collapsed.
......@@ -21,7 +21,10 @@ except ImportError:
class _FakePool(object):
def map_async(self, func, args):
from itertools import imap
try:
from itertools import imap
except ImportError:
imap=map
for _ in imap(func, args):
pass
......@@ -66,7 +69,7 @@ def parse_compile_time_env(option, name, value, parser):
def find_package_base(path):
base_dir, package_path = os.path.split(path)
while os.path.isfile(os.path.join(base_dir, '__init__.py')):
while is_package_dir(base_dir):
base_dir, parent = os.path.split(base_dir)
package_path = '%s/%s' % (parent, package_path)
return base_dir, package_path
......@@ -161,7 +164,11 @@ def parse_args(args):
dest='options', default={}, type="str",
action='callback', callback=parse_options,
help='set a cythonize option')
parser.add_option('-3', dest='python3_mode', action='store_true',
parser.add_option('-2', dest='language_level', action='store_const', const=2, default=None,
help='use Python 2 syntax mode by default')
parser.add_option('-3', dest='language_level', action='store_const', const=3,
help='use Python 3 syntax mode by default')
parser.add_option('--3str', dest='language_level', action='store_const', const='3str',
help='use Python 3 syntax mode by default')
parser.add_option('-a', '--annotate', dest='annotate', action='store_true',
help='generate annotated HTML page for source files')
......@@ -187,6 +194,8 @@ def parse_args(args):
help='increase Python compatibility by ignoring some compile time errors')
parser.add_option('-k', '--keep-going', dest='keep_going', action='store_true',
help='compile as much as possible, ignore compilation failures')
parser.add_option('--no-docstrings', dest='no_docstrings', action='store_true',
help='strip docstrings')
options, args = parser.parse_args(args)
if not args:
......@@ -195,8 +204,9 @@ def parse_args(args):
options.build = True
if multiprocessing is None:
options.parallel = 0
if options.python3_mode:
options.options['language_level'] = 3
if options.language_level:
assert options.language_level in (2, 3, '3str')
options.options['language_level'] = options.language_level
return options, args
......@@ -211,6 +221,9 @@ def main(args=None):
if options.annotate:
Options.annotate = True
if options.no_docstrings:
Options.docstrings = False
for path in paths:
cython_compile(path, options)
......
This diff is collapsed.
......@@ -3,16 +3,13 @@ from __future__ import absolute_import
import sys, os, re, inspect
import imp
try:
import hashlib
except ImportError:
import md5 as hashlib
import hashlib
from distutils.core import Distribution, Extension
from distutils.command.build_ext import build_ext
import Cython
from ..Compiler.Main import Context, CompilationOptions, default_options
from ..Compiler.Main import Context
from ..Compiler.Options import CompilationOptions, default_options
from ..Compiler.ParseTreeTransforms import (CythonTransform,
SkipDeclarations, AnalyseDeclarationsTransform, EnvTransform)
......@@ -136,8 +133,10 @@ def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
else:
print("Couldn't find %r" % symbol)
def cython_inline(code, get_type=unsafe_type, lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
cython_include_dirs=None, force=False, quiet=False, locals=None, globals=None, **kwds):
def cython_inline(code, get_type=unsafe_type,
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
cython_include_dirs=None, cython_compiler_directives=None,
force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
if get_type is None:
get_type = lambda x: 'object'
......@@ -169,6 +168,11 @@ def cython_inline(code, get_type=unsafe_type, lib_dir=os.path.join(get_cython_ca
if not quiet:
# Parsing from strings not fully supported (e.g. cimports).
print("Could not parse code as a string (to extract unbound symbols).")
cython_compiler_directives = dict(cython_compiler_directives or {})
if language_level is not None:
cython_compiler_directives['language_level'] = language_level
cimports = []
for name, arg in list(kwds.items()):
if arg is cython_module:
......@@ -176,8 +180,8 @@ def cython_inline(code, get_type=unsafe_type, lib_dir=os.path.join(get_cython_ca
del kwds[name]
arg_names = sorted(kwds)
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
key = orig_code, arg_sigs, sys.version_info, sys.executable, Cython.__version__
module_name = "_cython_inline_" + hashlib.md5(_unicode(key).encode('utf-8')).hexdigest()
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
module_name = "_cython_inline_" + hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest()
if module_name in sys.modules:
module = sys.modules[module_name]
......@@ -233,7 +237,11 @@ def __invoke(%(params)s):
extra_compile_args = cflags)
if build_extension is None:
build_extension = _get_build_extension()
build_extension.extensions = cythonize([extension], include_path=cython_include_dirs or ['.'], quiet=quiet)
build_extension.extensions = cythonize(
[extension],
include_path=cython_include_dirs or ['.'],
compiler_directives=cython_compiler_directives,
quiet=quiet)
build_extension.build_temp = os.path.dirname(pyx_file)
build_extension.build_lib = lib_dir
build_extension.run()
......
......@@ -62,11 +62,7 @@ try:
except NameError: # Python 3
from imp import reload
try:
import hashlib
except ImportError:
import md5 as hashlib
import hashlib
from distutils.core import Distribution, Extension
from distutils.command.build_ext import build_ext
......@@ -308,7 +304,7 @@ class CythonMagics(Magics):
if args.name:
module_name = py3compat.unicode_to_str(args.name)
else:
module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
module_name = "_cython_magic_" + hashlib.sha1(str(key).encode('utf-8')).hexdigest()
html_file = os.path.join(lib_dir, module_name + '.html')
module_path = os.path.join(lib_dir, module_name + self.so_ext)
......@@ -423,12 +419,11 @@ class CythonMagics(Magics):
quiet=quiet,
annotate=args.annotate,
force=True,
language_level=min(3, sys.version_info[0]),
)
if args.language_level is not None:
assert args.language_level in (2, 3)
opts['language_level'] = args.language_level
elif sys.version_info[0] >= 3:
opts['language_level'] = 3
return cythonize([extension], **opts)
except CompileError:
return None
......
......@@ -33,25 +33,31 @@ class TestCyCache(CythonTest):
a_pyx = os.path.join(self.src_dir, 'a.pyx')
a_c = a_pyx[:-4] + '.c'
open(a_pyx, 'w').write(content1)
with open(a_pyx, 'w') as f:
f.write(content1)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
self.assertEqual(1, len(self.cache_files('a.c*')))
a_contents1 = open(a_c).read()
with open(a_c) as f:
a_contents1 = f.read()
os.unlink(a_c)
open(a_pyx, 'w').write(content2)
with open(a_pyx, 'w') as f:
f.write(content2)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
a_contents2 = open(a_c).read()
with open(a_c) as f:
a_contents2 = f.read()
os.unlink(a_c)
self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
self.assertEqual(2, len(self.cache_files('a.c*')))
open(a_pyx, 'w').write(content1)
with open(a_pyx, 'w') as f:
f.write(content1)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
self.assertEqual(2, len(self.cache_files('a.c*')))
a_contents = open(a_c).read()
with open(a_c) as f:
a_contents = f.read()
self.assertEqual(
a_contents, a_contents1,
msg='\n'.join(list(difflib.unified_diff(
......@@ -60,13 +66,15 @@ class TestCyCache(CythonTest):
def test_cycache_uses_cache(self):
a_pyx = os.path.join(self.src_dir, 'a.pyx')
a_c = a_pyx[:-4] + '.c'
open(a_pyx, 'w').write('pass')
with open(a_pyx, 'w') as f:
f.write('pass')
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii'))
os.unlink(a_c)
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
a_contents = open(a_c).read()
with open(a_c) as f:
a_contents = f.read()
self.assertEqual(a_contents, 'fake stuff',
'Unexpected contents: %s...' % a_contents[:100])
......@@ -75,7 +83,8 @@ class TestCyCache(CythonTest):
a_c = a_pyx[:-4] + '.c'
a_h = a_pyx[:-4] + '.h'
a_api_h = a_pyx[:-4] + '_api.h'
open(a_pyx, 'w').write('cdef public api int foo(int x): return x\n')
with open(a_pyx, 'w') as f:
f.write('cdef public api int foo(int x): return x\n')
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
expected = [a_c, a_h, a_api_h]
for output in expected:
......@@ -89,7 +98,8 @@ class TestCyCache(CythonTest):
hash_pyx = os.path.join(self.src_dir, 'options.pyx')
hash_c = hash_pyx[:-len('.pyx')] + '.c'
open(hash_pyx, 'w').write('pass')
with open(hash_pyx, 'w') as f:
f.write('pass')
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
self.assertEqual(1, len(self.cache_files('options.c*')))
......
......@@ -24,10 +24,10 @@ class TestInline(CythonTest):
self.test_kwds['lib_dir'] = lib_dir
def test_simple(self):
self.assertEquals(inline("return 1+2", **self.test_kwds), 3)
self.assertEqual(inline("return 1+2", **self.test_kwds), 3)
def test_types(self):
self.assertEquals(inline("""
self.assertEqual(inline("""
cimport cython
return cython.typeof(a), cython.typeof(b)
""", a=1.0, b=[], **self.test_kwds), ('double', 'list object'))
......@@ -35,13 +35,13 @@ class TestInline(CythonTest):
def test_locals(self):
a = 1
b = 2
self.assertEquals(inline("return a+b", **self.test_kwds), 3)
self.assertEqual(inline("return a+b", **self.test_kwds), 3)
def test_globals(self):
self.assertEquals(inline("return global_value + 1", **self.test_kwds), global_value + 1)
self.assertEqual(inline("return global_value + 1", **self.test_kwds), global_value + 1)
def test_no_return(self):
self.assertEquals(inline("""
self.assertEqual(inline("""
a = 1
cdef double b = 2
cdef c = []
......@@ -49,7 +49,7 @@ class TestInline(CythonTest):
def test_def_node(self):
foo = inline("def foo(x): return x * x", **self.test_kwds)['foo']
self.assertEquals(foo(7), 49)
self.assertEqual(foo(7), 49)
def test_pure(self):
import cython as cy
......@@ -58,7 +58,15 @@ class TestInline(CythonTest):
c = cy.declare(cy.pointer(cy.float), &b)
return b
""", a=3, **self.test_kwds)
self.assertEquals(type(b), float)
self.assertEqual(type(b), float)
def test_compiler_directives(self):
self.assertEqual(
inline('return sum(x)',
x=[1, 2, 3],
cython_compiler_directives={'boundscheck': False}),
6
)
if has_numpy:
......@@ -66,5 +74,5 @@ class TestInline(CythonTest):
import numpy
a = numpy.ndarray((10, 20))
a[0,0] = 10
self.assertEquals(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
self.assertEquals(inline("return a[0,0]", a=a, **self.test_kwds), 10.0)
self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
self.assertEqual(inline("return a[0,0]", a=a, **self.test_kwds), 10.0)
......@@ -202,11 +202,11 @@ x = sin(0.0)
ip.run_cell_magic('cython', '--verbose', code)
ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
self.assertEquals([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
verbose_log.thresholds)
with mock_distutils() as normal_log:
ip.run_cell_magic('cython', '', code)
ip.ex('g = f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
self.assertEquals([normal_log.INFO], normal_log.thresholds)
self.assertEqual([normal_log.INFO], normal_log.thresholds)
......@@ -6,10 +6,10 @@ class TestStripLiterals(CythonTest):
def t(self, before, expected):
actual, literals = strip_string_literals(before, prefix="_L")
self.assertEquals(expected, actual)
self.assertEqual(expected, actual)
for key, value in literals.items():
actual = actual.replace(key, value)
self.assertEquals(before, actual)
self.assertEqual(before, actual)
def test_empty(self):
self.t("", "")
......
......@@ -30,17 +30,19 @@ builtin_utility_code = {
class _BuiltinOverride(object):
def __init__(self, py_name, args, ret_type, cname, py_equiv="*",
utility_code=None, sig=None, func_type=None,
is_strict_signature=False, builtin_return_type=None):
is_strict_signature=False, builtin_return_type=None,
nogil=None):
self.py_name, self.cname, self.py_equiv = py_name, cname, py_equiv
self.args, self.ret_type = args, ret_type
self.func_type, self.sig = func_type, sig
self.builtin_return_type = builtin_return_type
self.is_strict_signature = is_strict_signature
self.utility_code = utility_code
self.nogil = nogil
def build_func_type(self, sig=None, self_arg=None):
if sig is None:
sig = Signature(self.args, self.ret_type)
sig = Signature(self.args, self.ret_type, nogil=self.nogil)
sig.exception_check = False # not needed for the current builtins
func_type = sig.function_type(self_arg)
if self.is_strict_signature:
......@@ -92,13 +94,13 @@ class BuiltinMethod(_BuiltinOverride):
builtin_function_table = [
# name, args, return, C API func, py equiv = "*"
BuiltinFunction('abs', "d", "d", "fabs",
is_strict_signature = True),
is_strict_signature=True, nogil=True),
BuiltinFunction('abs', "f", "f", "fabsf",
is_strict_signature = True),
is_strict_signature=True, nogil=True),
BuiltinFunction('abs', "i", "i", "abs",
is_strict_signature = True),
is_strict_signature=True, nogil=True),
BuiltinFunction('abs', "l", "l", "labs",
is_strict_signature = True),
is_strict_signature=True, nogil=True),
BuiltinFunction('abs', None, None, "__Pyx_abs_longlong",
utility_code = UtilityCode.load("abs_longlong", "Builtins.c"),
func_type = PyrexTypes.CFuncType(
......
......@@ -9,7 +9,7 @@ import sys
from . import Options
usage = """\
Cython (http://cython.org) is a compiler for code written in the
Cython (https://cython.org/) is a compiler for code written in the
Cython language. Cython is based on Pyrex by Greg Ewing.
Usage: cython [options] sourcefile.{pyx,py} ...
......@@ -40,6 +40,8 @@ Options:
--embed[=<method_name>] Generate a main() function that embeds the Python interpreter.
-2 Compile based on Python-2 syntax and code semantics.
-3 Compile based on Python-3 syntax and code semantics.
--3str Compile based on Python-3 syntax and code semantics without
assuming unicode by default for string literals under Python 2.
--lenient Change some compile time errors to runtime errors to
improve Python compatibility
--capi-reexport-cincludes Add cincluded headers to any auto-generated header files.
......@@ -63,8 +65,6 @@ def bad_usage():
def parse_command_line(args):
from .Main import CompilationOptions, default_options
pending_arg = []
def pop_arg():
......@@ -92,7 +92,7 @@ def parse_command_line(args):
else:
return pop_arg()
options = CompilationOptions(default_options)
options = Options.CompilationOptions(Options.default_options)
sources = []
while args:
if args[0].startswith("-"):
......@@ -151,6 +151,8 @@ def parse_command_line(args):
options.language_level = 2
elif option == '-3':
options.language_level = 3
elif option == '--3str':
options.language_level = '3str'
elif option == "--capi-reexport-cincludes":
options.capi_reexport_cincludes = True
elif option == "--fast-fail":
......
# cython: language_level = 2
# cython: language_level=3str
# cython: auto_pickle=False
#
# Code output module
......@@ -13,22 +13,17 @@ cython.declare(os=object, re=object, operator=object, textwrap=object,
DebugFlags=object, basestring=object, defaultdict=object,
closing=object, partial=object)
import hashlib
import operator
import os
import re
import shutil
import sys
import operator
import textwrap
from string import Template
from functools import partial
from contextlib import closing
from collections import defaultdict
try:
import hashlib
except ImportError:
import md5 as hashlib
from . import Naming
from . import Options
from . import DebugFlags
......@@ -43,8 +38,6 @@ try:
except ImportError:
from builtins import str as basestring
KEYWORDS_MUST_BE_BYTES = sys.version_info < (2, 7)
non_portable_builtins_map = {
# builtins that have different names in different Python versions
......@@ -259,15 +252,11 @@ class UtilityCodeBase(object):
utility[1] = code
else:
all_tags = utility[2]
if KEYWORDS_MUST_BE_BYTES:
type = type.encode('ASCII')
all_tags[type] = code
if tags:
all_tags = utility[2]
for name, values in tags.items():
if KEYWORDS_MUST_BE_BYTES:
name = name.encode('ASCII')
all_tags.setdefault(name, set()).update(values)
@classmethod
......@@ -290,7 +279,7 @@ class UtilityCodeBase(object):
(r'^%(C)s{5,30}\s*(?P<name>(?:\w|\.)+)\s*%(C)s{5,30}|'
r'^%(C)s+@(?P<tag>\w+)\s*:\s*(?P<value>(?:\w|[.:])+)') %
{'C': comment}).match
match_type = re.compile('(.+)[.](proto(?:[.]\S+)?|impl|init|cleanup)$').match
match_type = re.compile(r'(.+)[.](proto(?:[.]\S+)?|impl|init|cleanup)$').match
with closing(Utils.open_source_file(filename, encoding='UTF-8')) as f:
all_lines = f.readlines()
......@@ -828,8 +817,8 @@ class FunctionState(object):
A C string referring to the variable is returned.
"""
if type.is_const and not type.is_reference:
type = type.const_base_type
if type.is_cv_qualified and not type.is_reference:
type = type.cv_base_type
elif type.is_reference and not type.is_fake_reference:
type = type.ref_base_type
if not type.is_pyobject and not type.is_memoryviewslice:
......@@ -906,9 +895,11 @@ class FunctionState(object):
try-except and try-finally blocks to clean up temps in the
error case.
"""
return [(cname, type)
for (type, manage_ref), freelist in self.temps_free.items() if manage_ref
for cname in freelist[0]]
return sorted([ # Enforce deterministic order.
(cname, type)
for (type, manage_ref), freelist in self.temps_free.items() if manage_ref
for cname in freelist[0]
])
def start_collecting_temps(self):
"""
......@@ -1134,10 +1125,12 @@ class GlobalState(object):
self.const_cnames_used = {}
self.string_const_index = {}
self.dedup_const_index = {}
self.pyunicode_ptr_const_index = {}
self.num_const_index = {}
self.py_constants = []
self.cached_cmethods = {}
self.initialised_constants = set()
writer.set_global_state(self)
self.rootwriter = writer
......@@ -1247,7 +1240,12 @@ class GlobalState(object):
# constant handling at code generation time
def get_cached_constants_writer(self):
def get_cached_constants_writer(self, target=None):
if target is not None:
if target in self.initialised_constants:
# Return None on second/later calls to prevent duplicate creation code.
return None
self.initialised_constants.add(target)
return self.parts['cached_constants']
def get_int_const(self, str_value, longness=False):
......@@ -1265,13 +1263,19 @@ class GlobalState(object):
c = self.new_num_const(str_value, 'float', value_code)
return c
def get_py_const(self, type, prefix='', cleanup_level=None):
def get_py_const(self, type, prefix='', cleanup_level=None, dedup_key=None):
if dedup_key is not None:
const = self.dedup_const_index.get(dedup_key)
if const is not None:
return const
# create a new Python object constant
const = self.new_py_const(type, prefix)
if cleanup_level is not None \
and cleanup_level <= Options.generate_cleanup_code:
cleanup_writer = self.parts['cleanup_globals']
cleanup_writer.putln('Py_CLEAR(%s);' % const.cname)
if dedup_key is not None:
self.dedup_const_index[dedup_key] = const
return const
def get_string_const(self, text, py_version=None):
......@@ -1792,8 +1796,8 @@ class CCodeWriter(object):
def get_py_float(self, str_value, value_code):
return self.globalstate.get_float_const(str_value, value_code).cname
def get_py_const(self, type, prefix='', cleanup_level=None):
return self.globalstate.get_py_const(type, prefix, cleanup_level).cname
def get_py_const(self, type, prefix='', cleanup_level=None, dedup_key=None):
return self.globalstate.get_py_const(type, prefix, cleanup_level, dedup_key).cname
def get_string_const(self, text):
return self.globalstate.get_string_const(text).cname
......@@ -1815,8 +1819,8 @@ class CCodeWriter(object):
def intern_identifier(self, text):
return self.get_py_string_const(text, identifier=True)
def get_cached_constants_writer(self):
return self.globalstate.get_cached_constants_writer()
def get_cached_constants_writer(self, target=None):
return self.globalstate.get_cached_constants_writer(target)
# code generation
......@@ -1872,7 +1876,7 @@ class CCodeWriter(object):
include_dir = self.globalstate.common_utility_include_dir
if include_dir and len(code) > 1024:
include_file = "%s_%s.h" % (
name, hashlib.md5(code.encode('utf8')).hexdigest())
name, hashlib.sha1(code.encode('utf8')).hexdigest())
path = os.path.join(include_dir, include_file)
if not os.path.exists(path):
tmp_path = '%s.tmp%s' % (path, os.getpid())
......@@ -2224,6 +2228,12 @@ class CCodeWriter(object):
# GIL methods
def use_fast_gil_utility_code(self):
if self.globalstate.directives['fast_gil']:
self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
else:
self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
def put_ensure_gil(self, declare_gilstate=True, variable=None):
"""
Acquire the GIL. The generated code is safe even when no PyThreadState
......@@ -2233,10 +2243,7 @@ class CCodeWriter(object):
"""
self.globalstate.use_utility_code(
UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c"))
if self.globalstate.directives['fast_gil']:
self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
else:
self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
self.use_fast_gil_utility_code()
self.putln("#ifdef WITH_THREAD")
if not variable:
variable = '__pyx_gilstate_save'
......@@ -2249,10 +2256,7 @@ class CCodeWriter(object):
"""
Releases the GIL, corresponds to `put_ensure_gil`.
"""
if self.globalstate.directives['fast_gil']:
self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
else:
self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
self.use_fast_gil_utility_code()
if not variable:
variable = '__pyx_gilstate_save'
self.putln("#ifdef WITH_THREAD")
......@@ -2264,10 +2268,7 @@ class CCodeWriter(object):
Acquire the GIL. The thread's thread state must have been initialized
by a previous `put_release_gil`
"""
if self.globalstate.directives['fast_gil']:
self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
else:
self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
self.use_fast_gil_utility_code()
self.putln("#ifdef WITH_THREAD")
self.putln("__Pyx_FastGIL_Forget();")
if variable:
......@@ -2277,10 +2278,7 @@ class CCodeWriter(object):
def put_release_gil(self, variable=None):
"Release the GIL, corresponds to `put_acquire_gil`."
if self.globalstate.directives['fast_gil']:
self.globalstate.use_utility_code(UtilityCode.load_cached("FastGil", "ModuleSetupCode.c"))
else:
self.globalstate.use_utility_code(UtilityCode.load_cached("NoFastGil", "ModuleSetupCode.c"))
self.use_fast_gil_utility_code()
self.putln("#ifdef WITH_THREAD")
self.putln("PyThreadState *_save;")
self.putln("Py_UNBLOCK_THREADS")
......
......@@ -60,11 +60,9 @@ class CompileError(PyrexError):
self.message_only = message
self.formatted_message = format_error(message, position)
self.reported = False
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, self.formatted_message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
# see https://bugs.python.org/issue1692335
self.args = (position, message)
def __str__(self):
......@@ -74,8 +72,6 @@ class CompileWarning(PyrexWarning):
def __init__(self, position = None, message = ""):
self.position = position
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, format_position(position) + message)
class InternalError(Exception):
......@@ -114,7 +110,7 @@ class CompilerCrash(CompileError):
message += u'%s: %s' % (cause.__class__.__name__, cause)
CompileError.__init__(self, pos, message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
# see https://bugs.python.org/issue1692335
self.args = (pos, context, message, cause, stacktrace)
class NoElementTreeInstalledException(PyrexError):
......
This diff is collapsed.
......@@ -11,10 +11,8 @@ cdef class ControlBlock:
cdef public list stats
cdef public dict gen
cdef public set bounded
cdef public dict input
cdef public dict output
# Big integer it bitsets
# Big integer bitsets
cdef public object i_input
cdef public object i_output
cdef public object i_gen
......@@ -107,6 +105,7 @@ cdef class ControlFlowAnalysis(CythonTransform):
cdef list stack
cdef object env
cdef ControlFlow flow
cdef object object_expr
cdef bint in_inplace_assignment
cpdef mark_assignment(self, lhs, rhs=*)
......
# cython: language_level=3str
from __future__ import absolute_import
import cython
cython.declare(PyrexTypes=object, ExprNodes=object, Nodes=object,
Builtin=object, InternalError=object, error=object, warning=object,
py_object_type=object, unspecified_type=object,
object_expr=object, fake_rhs_expr=object, TypedExprNode=object)
fake_rhs_expr=object, TypedExprNode=object)
from . import Builtin
from . import ExprNodes
from . import Nodes
from . import Options
from .PyrexTypes import py_object_type, unspecified_type
from . import PyrexTypes
from .Visitor import TreeVisitor, CythonTransform
......@@ -28,9 +28,8 @@ class TypedExprNode(ExprNodes.ExprNode):
def may_be_none(self):
return self._may_be_none != False
object_expr = TypedExprNode(py_object_type, may_be_none=True)
# Fake rhs to silence "unused variable" warning
fake_rhs_expr = TypedExprNode(unspecified_type)
fake_rhs_expr = TypedExprNode(PyrexTypes.unspecified_type)
class ControlBlock(object):
......@@ -373,9 +372,9 @@ class NameDeletion(NameAssignment):
def infer_type(self):
inferred_type = self.rhs.infer_type(self.entry.scope)
if (not inferred_type.is_pyobject and
inferred_type.can_coerce_to_pyobject(self.entry.scope)):
return py_object_type
if (not inferred_type.is_pyobject
and inferred_type.can_coerce_to_pyobject(self.entry.scope)):
return PyrexTypes.py_object_type
self.inferred_type = inferred_type
return inferred_type
......@@ -674,7 +673,8 @@ class AssignmentCollector(TreeVisitor):
class ControlFlowAnalysis(CythonTransform):
def visit_ModuleNode(self, node):
self.gv_ctx = GVContext()
dot_output = self.current_directives['control_flow.dot_output']
self.gv_ctx = GVContext() if dot_output else None
self.constant_folder = ConstantFolding()
# Set of NameNode reductions
......@@ -685,18 +685,15 @@ class ControlFlowAnalysis(CythonTransform):
self.env = node.scope
self.stack = []
self.flow = ControlFlow()
self.object_expr = TypedExprNode(PyrexTypes.py_object_type, may_be_none=True)
self.visitchildren(node)
check_definitions(self.flow, self.current_directives)
dot_output = self.current_directives['control_flow.dot_output']
if dot_output:
annotate_defs = self.current_directives['control_flow.dot_annotate_defs']
fp = open(dot_output, 'wt')
try:
with open(dot_output, 'wt') as fp:
self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs)
finally:
fp.close()
return node
def visit_FuncDefNode(self, node):
......@@ -744,7 +741,8 @@ class ControlFlowAnalysis(CythonTransform):
check_definitions(self.flow, self.current_directives)
self.flow.blocks.add(self.flow.entry_point)
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
if self.gv_ctx is not None:
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
self.flow = self.stack.pop()
self.env = self.env_stack.pop()
......@@ -769,7 +767,7 @@ class ControlFlowAnalysis(CythonTransform):
self.flow.nextblock()
if not rhs:
rhs = object_expr
rhs = self.object_expr
if lhs.is_name:
if lhs.entry is not None:
entry = lhs.entry
......@@ -1300,6 +1298,8 @@ class ControlFlowAnalysis(CythonTransform):
self.env_stack.append(self.env)
self.env = node.scope
self.flow.nextblock()
if node.doc_node:
self.flow.mark_assignment(node.doc_node, fake_rhs_expr, node.doc_node.entry)
self.visitchildren(node, ('body',))
self.flow.nextblock()
self.env = self.env_stack.pop()
......
......@@ -4,7 +4,7 @@ def _get_feature(name):
return getattr(__future__, name, object())
unicode_literals = _get_feature("unicode_literals")
with_statement = _get_feature("with_statement")
with_statement = _get_feature("with_statement") # dummy
division = _get_feature("division")
print_function = _get_feature("print_function")
absolute_import = _get_feature("absolute_import")
......
......@@ -16,10 +16,10 @@ IDENT = 'IDENT'
def make_lexicon():
from ..Plex import \
Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
TEXT, IGNORE, State, Lexicon
from .Scanning import Method
TEXT, IGNORE, Method, State, Lexicon
letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_")
nonzero_digit = Any("123456789")
digit = Any("0123456789")
bindigit = Any("01")
octdigit = Any("01234567")
......@@ -29,15 +29,22 @@ def make_lexicon():
def underscore_digits(d):
return Rep1(d) + Rep(Str("_") + Rep1(d))
def prefixed_digits(prefix, digits):
return prefix + Opt(Str("_")) + underscore_digits(digits)
decimal = underscore_digits(digit)
dot = Str(".")
exponent = Any("Ee") + Opt(Any("+-")) + decimal
decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
name = letter + Rep(letter | digit)
intconst = decimal | (Str("0") + ((Any("Xx") + underscore_digits(hexdigit)) |
(Any("Oo") + underscore_digits(octdigit)) |
(Any("Bb") + underscore_digits(bindigit)) ))
intconst = (prefixed_digits(nonzero_digit, digit) | # decimal literals with underscores must not start with '0'
(Str("0") + (prefixed_digits(Any("Xx"), hexdigit) |
prefixed_digits(Any("Oo"), octdigit) |
prefixed_digits(Any("Bb"), bindigit) )) |
underscore_digits(Str('0')) # 0_0_0_0... is allowed as a decimal literal
| Rep1(digit) # FIXME: remove these Py2 style decimal/octal literals (PY_VERSION_HEX < 3)
)
intsuffix = (Opt(Any("Uu")) + Opt(Any("Ll")) + Opt(Any("Ll"))) | (Opt(Any("Ll")) + Opt(Any("Ll")) + Opt(Any("Uu")))
intliteral = intconst + intsuffix
fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
......
This diff is collapsed.
......@@ -487,7 +487,7 @@ def copy_c_or_fortran_cname(memview):
def get_copy_new_utility(pos, from_memview, to_memview):
if (from_memview.dtype != to_memview.dtype and
not (from_memview.dtype.is_const and from_memview.dtype.const_base_type == to_memview.dtype)):
not (from_memview.dtype.is_cv_qualified and from_memview.dtype.cv_base_type == to_memview.dtype)):
error(pos, "dtypes must be the same!")
return
if len(from_memview.axes) != len(to_memview.axes):
......
This diff is collapsed.
......@@ -117,6 +117,9 @@ frame_code_cname = pyrex_prefix + "frame_code"
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
fused_func_prefix = pyrex_prefix + 'fuse_'
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
tp_dict_version_temp = pyrex_prefix + "tp_dict_version"
obj_dict_version_temp = pyrex_prefix + "obj_dict_version"
type_dict_guard_temp = pyrex_prefix + "type_dict_guard"
cython_runtime_cname = pyrex_prefix + "cython_runtime"
global_code_object_cache_find = pyrex_prefix + 'find_code_object'
......
This diff is collapsed.
......@@ -2777,11 +2777,9 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
return node
type_arg = args[0]
if not obj.is_name or not type_arg.is_name:
# play safe
return node
return node # not a simple case
if obj.type != Builtin.type_type or type_arg.type != Builtin.type_type:
# not a known type, play safe
return node
return node # not a known type
if not type_arg.type_entry or not obj.type_entry:
if obj.name != type_arg.name:
return node
......@@ -3166,6 +3164,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
PyrexTypes.CFuncTypeArg("op2", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("cval", ctype, None),
PyrexTypes.CFuncTypeArg("inplace", PyrexTypes.c_bint_type, None),
PyrexTypes.CFuncTypeArg("zerodiv_check", PyrexTypes.c_bint_type, None),
], exception_value=None if ret_type.is_pyobject else ret_type.exception_value))
for ctype in (PyrexTypes.c_long_type, PyrexTypes.c_double_type)
for ret_type in (PyrexTypes.py_object_type, PyrexTypes.c_bint_type)
......@@ -3177,6 +3176,9 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
def _handle_simple_method_object___sub__(self, node, function, args, is_unbound_method):
return self._optimise_num_binop('Subtract', node, function, args, is_unbound_method)
def _handle_simple_method_object___mul__(self, node, function, args, is_unbound_method):
return self._optimise_num_binop('Multiply', node, function, args, is_unbound_method)
def _handle_simple_method_object___eq__(self, node, function, args, is_unbound_method):
return self._optimise_num_binop('Eq', node, function, args, is_unbound_method)
......@@ -3297,12 +3299,22 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
# Cut off at an integer border that is still safe for all operations.
return node
if operator in ('TrueDivide', 'FloorDivide', 'Divide', 'Remainder'):
if args[1].constant_result == 0:
# Don't optimise division by 0. :)
return node
args = list(args)
args.append((ExprNodes.FloatNode if is_float else ExprNodes.IntNode)(
numval.pos, value=numval.value, constant_result=numval.constant_result,
type=num_type))
inplace = node.inplace if isinstance(node, ExprNodes.NumBinopNode) else False
args.append(ExprNodes.BoolNode(node.pos, value=inplace, constant_result=inplace))
if is_float or operator not in ('Eq', 'Ne'):
# "PyFloatBinop" and "PyIntBinop" take an additional "check for zero division" argument.
zerodivision_check = arg_order == 'CObj' and (
not node.cdivision if isinstance(node, ExprNodes.DivNode) else False)
args.append(ExprNodes.BoolNode(node.pos, value=zerodivision_check, constant_result=zerodivision_check))
utility_code = TempitaUtilityCode.load_cached(
"PyFloatBinop" if is_float else "PyIntCompare" if operator in ('Eq', 'Ne') else "PyIntBinop",
......@@ -3374,6 +3386,8 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
PyrexTypes.CFuncTypeArg("uchar", PyrexTypes.c_py_ucs4_type, None),
])
# DISABLED: Return value can only be one character, which is not correct.
'''
def _inject_unicode_character_conversion(self, node, function, args, is_unbound_method):
if is_unbound_method or len(args) != 1:
return node
......@@ -3392,9 +3406,10 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
func_call = func_call.coerce_to_pyobject(self.current_env)
return func_call
_handle_simple_method_unicode_lower = _inject_unicode_character_conversion
_handle_simple_method_unicode_upper = _inject_unicode_character_conversion
_handle_simple_method_unicode_title = _inject_unicode_character_conversion
#_handle_simple_method_unicode_lower = _inject_unicode_character_conversion
#_handle_simple_method_unicode_upper = _inject_unicode_character_conversion
#_handle_simple_method_unicode_title = _inject_unicode_character_conversion
'''
PyUnicode_Splitlines_func_type = PyrexTypes.CFuncType(
Builtin.list_type, [
......@@ -4247,7 +4262,7 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
string_node.bytes_value.encoding)
else:
assert False, "unknown string node type: %s" % type(string_node)
string_node.value = build_string(
string_node.constant_result = string_node.value = build_string(
string_node.value * multiplier,
string_node.value.encoding)
return string_node
......@@ -4311,16 +4326,16 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
warning(pos, "Too few arguments for format placeholders", level=1)
can_be_optimised = False
break
if format_type in u'srfdoxX':
if format_type in u'asrfdoxX':
format_spec = s[1:]
if format_type in u'doxX' and u'.' in format_spec:
# Precision is not allowed for integers in format(), but ok in %-formatting.
can_be_optimised = False
elif format_type in u'rs':
elif format_type in u'ars':
format_spec = format_spec[:-1]
substrings.append(ExprNodes.FormattedValueNode(
arg.pos, value=arg,
conversion_char=format_type if format_type in u'rs' else None,
conversion_char=format_type if format_type in u'ars' else None,
format_spec=ExprNodes.UnicodeNode(
pos, value=EncodedString(format_spec), constant_result=format_spec)
if format_spec else None,
......@@ -4510,22 +4525,20 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
cascades = [[node.operand1]]
final_false_result = []
def split_cascades(cmp_node):
cmp_node = node
while cmp_node is not None:
if cmp_node.has_constant_result():
if not cmp_node.constant_result:
# False => short-circuit
final_false_result.append(self._bool_node(cmp_node, False))
return
break
else:
# True => discard and start new cascade
cascades.append([cmp_node.operand2])
else:
# not constant => append to current cascade
cascades[-1].append(cmp_node)
if cmp_node.cascade:
split_cascades(cmp_node.cascade)
split_cascades(node)
cmp_node = cmp_node.cascade
cmp_nodes = []
for cascade in cascades:
......@@ -4671,6 +4684,30 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
return None
return node
def visit_GILStatNode(self, node):
self.visitchildren(node)
if node.condition is None:
return node
if node.condition.has_constant_result():
# Condition is True - Modify node to be a normal
# GILStatNode with condition=None
if node.condition.constant_result:
node.condition = None
# Condition is False - the body of the GILStatNode
# should run without changing the state of the gil
# return the body of the GILStatNode
else:
return node.body
# If condition is not constant we keep the GILStatNode as it is.
# Either it will later become constant (e.g. a `numeric is int`
# expression in a fused type function) and then when ConstantFolding
# runs again it will be handled or a later transform (i.e. GilCheck)
# will raise an error
return node
# in the future, other nodes can have their own handler method here
# that can replace them with a constant result node
......
This diff is collapsed.
This diff is collapsed.
......@@ -188,7 +188,7 @@ cdef p_varargslist(PyrexScanner s, terminator=*, bint annotated = *)
cdef p_py_arg_decl(PyrexScanner s, bint annotated = *)
cdef p_class_statement(PyrexScanner s, decorators)
cdef p_c_class_definition(PyrexScanner s, pos, ctx)
cdef p_c_class_options(PyrexScanner s)
cdef tuple p_c_class_options(PyrexScanner s)
cdef p_property_decl(PyrexScanner s)
cdef p_doc_string(PyrexScanner s)
cdef p_ignorable_statement(PyrexScanner s)
......
......@@ -65,7 +65,7 @@ class Ctx(object):
def p_ident(s, message="Expected an identifier"):
if s.sy == 'IDENT':
name = s.systring
name = s.context.intern_ustring(s.systring)
s.next()
return name
else:
......@@ -74,7 +74,7 @@ def p_ident(s, message="Expected an identifier"):
def p_ident_list(s):
names = []
while s.sy == 'IDENT':
names.append(s.systring)
names.append(s.context.intern_ustring(s.systring))
s.next()
if s.sy != ',':
break
......@@ -317,9 +317,8 @@ def p_typecast(s):
base_type = p_c_base_type(s)
is_memslice = isinstance(base_type, Nodes.MemoryViewSliceTypeNode)
is_template = isinstance(base_type, Nodes.TemplatedTypeNode)
is_const = isinstance(base_type, Nodes.CConstTypeNode)
if (not is_memslice and not is_template and not is_const
and base_type.name is None):
is_const_volatile = isinstance(base_type, Nodes.CConstOrVolatileTypeNode)
if not is_memslice and not is_template and not is_const_volatile and base_type.name is None:
s.error("Unknown type")
declarator = p_c_declarator(s, empty = 1)
if s.sy == '?':
......@@ -330,8 +329,7 @@ def p_typecast(s):
s.expect(">")
operand = p_factor(s)
if is_memslice:
return ExprNodes.CythonArrayNode(pos, base_type_node=base_type,
operand=operand)
return ExprNodes.CythonArrayNode(pos, base_type_node=base_type, operand=operand)
return ExprNodes.TypecastNode(pos,
base_type = base_type,
......@@ -960,7 +958,7 @@ def p_string_literal(s, kind_override=None):
bytes_value, unicode_value = chars.getstrings()
if is_python3_source and has_non_ascii_literal_characters:
# Python 3 forbids literal non-ASCII characters in byte strings
if kind not in ('u', 'f'):
if kind == 'b':
s.error("bytes can only contain ASCII literal characters.", pos=pos)
bytes_value = None
if kind == 'f':
......@@ -1677,11 +1675,6 @@ def p_import_statement(s):
as_name=as_name,
is_absolute=is_absolute)
else:
if as_name and "." in dotted_name:
name_list = ExprNodes.ListNode(pos, args=[
ExprNodes.IdentifierStringNode(pos, value=s.context.intern_ustring("*"))])
else:
name_list = None
stat = Nodes.SingleAssignmentNode(
pos,
lhs=ExprNodes.NameNode(pos, name=as_name or target_name),
......@@ -1689,7 +1682,8 @@ def p_import_statement(s):
pos,
module_name=ExprNodes.IdentifierStringNode(pos, value=dotted_name),
level=0 if is_absolute else None,
name_list=name_list))
get_top_level_module='.' in dotted_name and as_name is None,
name_list=None))
stats.append(stat)
return Nodes.StatListNode(pos, stats=stats)
......@@ -2057,12 +2051,20 @@ def p_with_items(s, is_async=False):
s.error("with gil/nogil cannot be async")
state = s.systring
s.next()
# support conditional gil/nogil
condition = None
if s.sy == '(':
s.next()
condition = p_test(s)
s.expect(')')
if s.sy == ',':
s.next()
body = p_with_items(s)
else:
body = p_suite(s)
return Nodes.GILStatNode(pos, state=state, body=body)
return Nodes.GILStatNode(pos, state=state, body=body, condition=condition)
else:
manager = p_test(s)
target = None
......@@ -2479,16 +2481,31 @@ def p_c_simple_base_type(s, self_flag, nonempty, templates = None):
complex = 0
module_path = []
pos = s.position()
if not s.sy == 'IDENT':
error(pos, "Expected an identifier, found '%s'" % s.sy)
if s.systring == 'const':
# Handle const/volatile
is_const = is_volatile = 0
while s.sy == 'IDENT':
if s.systring == 'const':
if is_const: error(pos, "Duplicate 'const'")
is_const = 1
elif s.systring == 'volatile':
if is_volatile: error(pos, "Duplicate 'volatile'")
is_volatile = 1
else:
break
s.next()
if is_const or is_volatile:
base_type = p_c_base_type(s, self_flag=self_flag, nonempty=nonempty, templates=templates)
if isinstance(base_type, Nodes.MemoryViewSliceTypeNode):
# reverse order to avoid having to write "(const int)[:]"
base_type.base_type_node = Nodes.CConstTypeNode(pos, base_type=base_type.base_type_node)
base_type.base_type_node = Nodes.CConstOrVolatileTypeNode(pos,
base_type=base_type.base_type_node, is_const=is_const, is_volatile=is_volatile)
return base_type
return Nodes.CConstTypeNode(pos, base_type=base_type)
return Nodes.CConstOrVolatileTypeNode(pos,
base_type=base_type, is_const=is_const, is_volatile=is_volatile)
if s.sy != 'IDENT':
error(pos, "Expected an identifier, found '%s'" % s.sy)
if looking_at_base_type(s):
#print "p_c_simple_base_type: looking_at_base_type at", s.position()
is_basic = 1
......@@ -2937,6 +2954,9 @@ def p_exception_value_clause(s):
name = s.systring
s.next()
exc_val = p_name(s, name)
elif s.sy == '*':
exc_val = ExprNodes.CharNode(s.position(), value=u'*')
s.next()
else:
if s.sy == '?':
exc_check = 1
......@@ -3168,18 +3188,22 @@ def p_c_struct_or_union_definition(s, pos, ctx):
attributes = None
if s.sy == ':':
s.next()
s.expect('NEWLINE')
s.expect_indent()
attributes = []
body_ctx = Ctx()
while s.sy != 'DEDENT':
if s.sy != 'pass':
attributes.append(
p_c_func_or_var_declaration(s, s.position(), body_ctx))
else:
s.next()
s.expect_newline("Expected a newline")
s.expect_dedent()
if s.sy == 'pass':
s.next()
s.expect_newline("Expected a newline", ignore_semicolon=True)
else:
s.expect('NEWLINE')
s.expect_indent()
body_ctx = Ctx()
while s.sy != 'DEDENT':
if s.sy != 'pass':
attributes.append(
p_c_func_or_var_declaration(s, s.position(), body_ctx))
else:
s.next()
s.expect_newline("Expected a newline")
s.expect_dedent()
else:
s.expect_newline("Syntax error in struct or union definition")
return Nodes.CStructOrUnionDefNode(pos,
......@@ -3364,7 +3388,7 @@ def _reject_cdef_modifier_in_py(s, name):
def p_def_statement(s, decorators=None, is_async_def=False):
# s.sy == 'def'
pos = s.position()
pos = decorators[0].pos if decorators else s.position()
# PEP 492 switches the async/await keywords on in "async def" functions
if is_async_def:
s.enter_async()
......@@ -3468,6 +3492,7 @@ def p_c_class_definition(s, pos, ctx):
objstruct_name = None
typeobj_name = None
bases = None
check_size = None
if s.sy == '(':
positional_args, keyword_args = p_call_parse_args(s, allow_genexp=False)
if keyword_args:
......@@ -3479,7 +3504,7 @@ def p_c_class_definition(s, pos, ctx):
if s.sy == '[':
if ctx.visibility not in ('public', 'extern') and not ctx.api:
error(s.position(), "Name options only allowed for 'public', 'api', or 'extern' C class")
objstruct_name, typeobj_name = p_c_class_options(s)
objstruct_name, typeobj_name, check_size = p_c_class_options(s)
if s.sy == ':':
if ctx.level == 'module_pxd':
body_level = 'c_class_pxd'
......@@ -3518,13 +3543,16 @@ def p_c_class_definition(s, pos, ctx):
bases = bases,
objstruct_name = objstruct_name,
typeobj_name = typeobj_name,
check_size = check_size,
in_pxd = ctx.level == 'module_pxd',
doc = doc,
body = body)
def p_c_class_options(s):
objstruct_name = None
typeobj_name = None
check_size = None
s.expect('[')
while 1:
if s.sy != 'IDENT':
......@@ -3535,11 +3563,16 @@ def p_c_class_options(s):
elif s.systring == 'type':
s.next()
typeobj_name = p_ident(s)
elif s.systring == 'check_size':
s.next()
check_size = p_ident(s)
if check_size not in ('ignore', 'warn', 'error'):
s.error("Expected one of ignore, warn or error, found %r" % check_size)
if s.sy != ',':
break
s.next()
s.expect(']', "Expected 'object' or 'type'")
return objstruct_name, typeobj_name
s.expect(']', "Expected 'object', 'type' or 'check_size'")
return objstruct_name, typeobj_name, check_size
def p_property_decl(s):
......@@ -3662,6 +3695,17 @@ def p_module(s, pxd, full_module_name, ctx=Ctx):
directive_comments = p_compiler_directive_comments(s)
s.parse_comments = False
if s.context.language_level is None:
s.context.set_language_level('3str')
if pos[0].filename:
import warnings
warnings.warn(
"Cython directive 'language_level' not set, using '3str' for now (Py3). "
"This has changed from earlier releases! File: %s" % pos[0].filename,
FutureWarning,
stacklevel=1 if cython.compiled else 2,
)
doc = p_doc_string(s)
if pxd:
level = 'module_pxd'
......
......@@ -146,7 +146,7 @@ def create_pipeline(context, mode, exclude_classes=()):
from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods
from .ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
from .ParseTreeTransforms import CalculateQualifiedNamesTransform
from .ParseTreeTransforms import CalculateQualifiedNamesTransform, ReplacePropertyNode
from .TypeInference import MarkParallelAssignments, MarkOverflowingArithmetic
from .ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
from .ParseTreeTransforms import RemoveUnreachableCode, GilCheck
......@@ -198,6 +198,7 @@ def create_pipeline(context, mode, exclude_classes=()):
AnalyseDeclarationsTransform(context),
AutoTestDictTransform(context),
EmbedSignature(context),
ReplacePropertyNode(context),
EarlyReplaceBuiltinCalls(context), ## Necessary?
TransformBuiltinMethods(context),
MarkParallelAssignments(context),
......
This diff is collapsed.
This diff is collapsed.
......@@ -9,11 +9,6 @@ cdef unicode any_string_prefix, IDENT
cdef get_lexicon()
cdef initial_compile_time_env()
cdef class Method:
cdef object name
cdef dict kwargs
cdef readonly object __name__ # for tracing the scanner
## methods commented with '##' out are used by Parsing.py when compiled.
@cython.final
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -48,7 +48,7 @@ class TestMemviewParsing(CythonTest):
def test_basic(self):
t = self.parse(u"cdef int[:] x")
memv_node = t.stats[0].base_type
self.assert_(isinstance(memv_node, MemoryViewSliceTypeNode))
self.assertTrue(isinstance(memv_node, MemoryViewSliceTypeNode))
# we also test other similar declarations (buffers, anonymous C arrays)
# since the parsing has to distinguish between them.
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
# Present for backwards compatibility
from cpython cimport *
# Present for backwards compatibility
from cpython.bool cimport *
# Present for backwards compatibility
from cpython.buffer cimport *
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment