Commit b5505987 authored by Jason Madden's avatar Jason Madden

Merge branch 'source-reorg'

parents 391c4525 b1a87842
...@@ -3,20 +3,20 @@ build/ ...@@ -3,20 +3,20 @@ build/
.runtimes .runtimes
.tox/ .tox/
*.so *.so
*.o
*.egg-info *.egg-info
gevent.*.[ch] gevent.*.[ch]
gevent/corecext.pyx src/gevent/libev/corecext.pyx
gevent/__pycache__ src/gevent/__pycache__
gevent/libev src/gevent/libev/_corecffi.c
gevent/_corecffi.c src/gevent/libev/_corecffi.o
gevent/_corecffi.o
Makefile.ext Makefile.ext
MANIFEST MANIFEST
*_flymake.py *_flymake.py
greentest/.coverage\.* src/greentest/.coverage\.*
greentest/htmlcov src/greentest/htmlcov
greentest/.coverage src/greentest/.coverage
doc/changelog.rst doc/changelog.rst
doc/_build doc/_build
...@@ -33,40 +33,40 @@ doc/gevent.*.rst ...@@ -33,40 +33,40 @@ doc/gevent.*.rst
!doc/gevent.wsgi.rst !doc/gevent.wsgi.rst
# Artifacts of configuring in place # Artifacts of configuring in place
c-ares/config.log deps/c-ares/config.log
c-ares/config.status deps/c-ares/config.status
c-ares/stamp-h1 deps/c-ares/stamp-h1
c-ares/stamp-h2 deps/c-ares/stamp-h2
c-ares/ares_build.h.orig deps/c-ares/ares_build.h.orig
c-ares/ares_config.h deps/c-ares/ares_config.h
c-ares/.libs deps/c-ares/.libs
c-ares/*.o deps/c-ares/*.o
c-ares/*.lo deps/c-ares/*.lo
c-ares/*.la deps/c-ares/*.la
c-ares/.deps deps/c-ares/.deps
c-ares/acountry deps/c-ares/acountry
c-ares/adig deps/c-ares/adig
c-ares/ahost deps/c-ares/ahost
c-ares/Makefile deps/c-ares/Makefile
c-ares/libtool deps/c-ares/libtool
c-ares/libcares.pc deps/c-ares/libcares.pc
c-ares/test/.deps deps/c-ares/test/.deps
c-ares/test/Makefile deps/c-ares/test/Makefile
c-ares/test/config.log deps/c-ares/test/config.log
c-ares/test/config.status deps/c-ares/test/config.status
c-ares/test/libtool deps/c-ares/test/libtool
c-ares/test/stamp-h1 deps/c-ares/test/stamp-h1
libev/.deps deps/libev/.deps
libev/Makefile deps/libev/Makefile
libev/config.log deps/libev/config.log
libev/config.h deps/libev/config.h
libev/config.status deps/libev/config.status
libev/libtool deps/libev/libtool
libev/stamp-h1 deps/libev/stamp-h1
libev/.libs deps/libev/.libs
libev/*.lo deps/libev/*.lo
libev/*.la deps/libev/*.la
libev/*.o deps/libev/*.o
# running setup.py on PyPy # running setup.py on PyPy
config.h config.h
......
...@@ -24,29 +24,29 @@ ignore-paths: ...@@ -24,29 +24,29 @@ ignore-paths:
- scripts/ - scripts/
# This file has invalid syntax for Python 3, which is how # This file has invalid syntax for Python 3, which is how
# landscape.io runs things... # landscape.io runs things...
- gevent/_util_py2.py - src/gevent/_util_py2.py
# ...and this file has invalid syntax for Python 2, which is how # ...and this file has invalid syntax for Python 2, which is how
# travis currently runs things. sigh. # travis currently runs things. sigh.
- gevent/_socket3.py - src/gevent/_socket3.py
# This is vendored with minimal changes # This is vendored with minimal changes
- gevent/_tblib.py - src/gevent/_tblib.py
# likewise # likewise
- greentest/six.py - src/greentest/six.py
# This triggers https://github.com/PyCQA/pylint/issues/846 on Travis, # This triggers https://github.com/PyCQA/pylint/issues/846 on Travis,
# but the file is really small, so it's better to skip this one # but the file is really small, so it's better to skip this one
# file than disable that whole check. # file than disable that whole check.
- gevent/core.py - src/gevent/core.py
# sadly, this one is complicated # sadly, this one is complicated
- setup.py - setup.py
- greentest/getaddrinfo_module.py - src/greentest/getaddrinfo_module.py
ignore-patterns: ignore-patterns:
# disabled code # disabled code
- ^greentest/xtest_.*py - ^src/greentest/xtest_.*py
# standard library code # standard library code
- ^greentest/2.* - ^src/greentest/2.*
- ^greentest/3.* - ^src/greentest/3.*
# benchmarks that aren't used/changed much # benchmarks that aren't used/changed much
- ^greentest/bench_.*py - ^src/greentest/bench_.*py
pyroma: pyroma:
run: true run: true
......
recursive-include greentest * recursive-include src/greentest *
recursive-include examples * recursive-include examples *
recursive-include gevent * recursive-include src/gevent *
recursive-include doc * recursive-include doc *
recursive-include libev * recursive-include deps *
recursive-include c-ares *
recursive-include util * recursive-include util *
include LICENSE include LICENSE
...@@ -18,6 +17,7 @@ include Makefile.ext ...@@ -18,6 +17,7 @@ include Makefile.ext
include known_failures.py include known_failures.py
include *.yml include *.yml
include *.txt include *.txt
include _setup*.py
include tox.ini include tox.ini
include .pep8 include .pep8
...@@ -40,14 +40,14 @@ global-exclude *.la ...@@ -40,14 +40,14 @@ global-exclude *.la
global-exclude config.log config.status global-exclude config.log config.status
prune doc/_build prune doc/_build
global-exclude *.pyc global-exclude *.pyc
recursive-exclude greentest .coverage recursive-exclude src/greentest .coverage
prune greentest/htmlcov prune src/greentest/htmlcov
recursive-exclude c-ares stamp-h? ares_build.h.orig recursive-exclude deps/c-ares stamp-h? ares_build.h.orig
prune libev/.deps prune deps/libev/.deps
recursive-exclude libev Makefile libtool stamp-h? config.h recursive-exclude deps/libev Makefile libtool stamp-h? config.h
# This is the output of _corecffi_build.py and may be particular # This is the output of _corecffi_build.py and may be particular
# to each CFFI version/platform # to each CFFI version/platform
recursive-exclude gevent _corecffi.c recursive-exclude src/gevent _corecffi.c
# See comments in Makefile; this is renamed to Makefile.ext # See comments in Makefile; this is renamed to Makefile.ext
# this exclude keeps check-manifest from complaining # this exclude keeps check-manifest from complaining
exclude Makefile exclude Makefile
...@@ -12,48 +12,56 @@ export PATH:=$(BUILD_RUNTIMES)/snakepit:$(TOOLS):$(PATH) ...@@ -12,48 +12,56 @@ export PATH:=$(BUILD_RUNTIMES)/snakepit:$(TOOLS):$(PATH)
export LC_ALL=C.UTF-8 export LC_ALL=C.UTF-8
all: gevent/gevent.corecext.c gevent/gevent.ares.c gevent/gevent._semaphore.c all: src/gevent/libev/gevent.corecext.c src/gevent/gevent.ares.c src/gevent/gevent._semaphore.c
gevent/gevent.corecext.c: gevent/corecext.ppyx gevent/libev.pxd util/cythonpp.py src/gevent/libev/gevent.corecext.c: src/gevent/libev/corecext.ppyx src/gevent/libev/libev.pxd util/cythonpp.py
$(PYTHON) util/cythonpp.py -o gevent.corecext.c gevent/corecext.ppyx $(PYTHON) util/cythonpp.py -o gevent.corecext.c --module-name gevent.libev.corecext.pyx src/gevent/libev/corecext.ppyx
echo '#include "callbacks.c"' >> gevent.corecext.c echo '#include "callbacks.c"' >> gevent.corecext.c
mv gevent.corecext.* gevent/ mv gevent.corecext.* src/gevent/libev/
gevent/gevent.ares.c: gevent/ares.pyx gevent/*.pxd src/gevent/gevent.ares.c: src/gevent/ares.pyx src/gevent/*.pxd
$(CYTHON) -o gevent.ares.c gevent/ares.pyx $(CYTHON) -o gevent.ares.c src/gevent/ares.pyx
mv gevent.ares.* gevent/ mv gevent.ares.* src/gevent/
gevent/gevent._semaphore.c: gevent/_semaphore.py gevent/_semaphore.pxd src/gevent/gevent._semaphore.c: src/gevent/_semaphore.py src/gevent/_semaphore.pxd
# On PyPy, if we wanted to use Cython to compile _semaphore.py, we'd # On PyPy, if we wanted to use Cython to compile _semaphore.py, we'd
# need to have _semaphore named as a .pyx file so it doesn't get # need to have _semaphore named as a .pyx file so it doesn't get
# loaded in preference to the .so. (We want to keep the definitions # loaded in preference to the .so. (We want to keep the definitions
# separate in a .pxd file for ease of reading, and that only works # separate in a .pxd file for ease of reading, and that only works
# with .py files, so we'd have to copy them back and forth.) # with .py files, so we'd have to copy them back and forth.)
# cp gevent/_semaphore.pyx gevent/_semaphore.py # cp src/gevent/_semaphore.pyx src/gevent/_semaphore.py
$(CYTHON) -o gevent._semaphore.c gevent/_semaphore.py $(CYTHON) -o gevent._semaphore.c src/gevent/_semaphore.py
mv gevent._semaphore.* gevent/ mv gevent._semaphore.* src/gevent/
# rm gevent/_semaphore.py # rm src/gevent/_semaphore.py
clean: clean:
rm -f corecext.pyx gevent/corecext.pyx rm -f corecext.pyx src/gevent/libev/corecext.pyx
rm -f gevent.corecext.c gevent.corecext.h gevent/gevent.corecext.c gevent/gevent.corecext.h rm -f gevent.corecext.c gevent.corecext.h src/gevent/libev/gevent.corecext.c src/gevent/libev/gevent.corecext.h
rm -f gevent.ares.c gevent.ares.h gevent/gevent.ares.c gevent/gevent.ares.h rm -f gevent.ares.c gevent.ares.h src/gevent/gevent.ares.c src/gevent/gevent.ares.h
rm -f gevent._semaphore.c gevent._semaphore.h gevent/gevent._semaphore.c gevent/gevent._semaphore.h rm -f gevent._semaphore.c gevent._semaphore.h src/gevent/gevent._semaphore.c src/gevent/gevent._semaphore.h
rm -f gevent/*.so rm -f src/gevent/*.so src/gevent/libev/*.so
rm -rf gevent/__pycache__ rm -rf src/gevent/libev/*.o src/gevent/*.o
rm -rf gevent/*.pyc rm -rf src/gevent/__pycache__ src/greentest/__pycache__ src/gevent/libev/__pycache__
rm -rf src/gevent/*.pyc src/greentest/*.pyc src/gevent/libev/*.pyc
rm -rf src/greentest/htmlcov src/greentest/.coverage
rm -rf build
distclean: clean
rm -rf dist
rm -rf deps/libev/config.h deps/libev/config.log deps/libev/config.status deps/libev/.deps deps/libev/.libs
rm -rf deps/c-ares/config.h deps/c-ares/config.log deps/c-ares/config.status deps/c-ares/.deps deps/c-ares/.libs
doc: doc:
cd doc && PYTHONPATH=.. make html cd doc && PYTHONPATH=.. make html
whitespace: whitespace:
! find . -not -path "*.pem" -not -path "./.eggs/*" -not -path "./greentest/htmlcov/*" -not -path "./greentest/.coverage.*" -not -path "./.tox/*" -not -path "*/__pycache__/*" -not -path "*.so" -not -path "*.pyc" -not -path "./.git/*" -not -path "./build/*" -not -path "./libev/*" -not -path "./gevent/libev/*" -not -path "./gevent.egg-info/*" -not -path "./dist/*" -not -path "./.DS_Store" -not -path "./c-ares/*" -not -path "./gevent/gevent.*.[ch]" -not -path "./gevent/corecext.pyx" -not -path "./doc/_build/*" -not -path "./doc/mytheme/static/*" -type f | xargs egrep -l " $$" ! find . -not -path "*.pem" -not -path "./.eggs/*" -not -path "./src/greentest/htmlcov/*" -not -path "./src/greentest/.coverage.*" -not -path "./.tox/*" -not -path "*/__pycache__/*" -not -path "*.so" -not -path "*.pyc" -not -path "./.git/*" -not -path "./build/*" -not -path "./src/gevent/libev/*" -not -path "./src/gevent.egg-info/*" -not -path "./dist/*" -not -path "./.DS_Store" -not -path "./deps/*" -not -path "./src/gevent/gevent.*.[ch]" -not -path "./src/gevent/corecext.pyx" -not -path "./doc/_build/*" -not -path "./doc/mytheme/static/*" -type f | xargs egrep -l " $$"
prospector: prospector:
which prospector which prospector
which pylint which pylint
# debugging # debugging
# pylint --rcfile=.pylintrc --init-hook="import sys, code; sys.excepthook = lambda exc, exc_type, tb: print(tb.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_frame.f_locals['self'])" gevent greentest/* || true # pylint --rcfile=.pylintrc --init-hook="import sys, code; sys.excepthook = lambda exc, exc_type, tb: print(tb.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_next.tb_frame.f_locals['self'])" gevent src/greentest/* || true
${PYTHON} scripts/gprospector.py -X ${PYTHON} scripts/gprospector.py -X
lint: whitespace prospector lint: whitespace prospector
...@@ -66,18 +74,18 @@ test_prelim: ...@@ -66,18 +74,18 @@ test_prelim:
make bench make bench
toxtest: test_prelim toxtest: test_prelim
cd greentest && GEVENT_RESOLVER=thread ${PYTHON} testrunner.py --config ../known_failures.py cd src/greentest && GEVENT_RESOLVER=thread ${PYTHON} testrunner.py --config known_failures.py
fulltoxtest: test_prelim fulltoxtest: test_prelim
cd greentest && GEVENT_RESOLVER=thread ${PYTHON} testrunner.py --config ../known_failures.py cd src/greentest && GEVENT_RESOLVER=thread ${PYTHON} testrunner.py --config known_failures.py
cd greentest && GEVENT_RESOLVER=ares GEVENTARES_SERVERS=8.8.8.8 ${PYTHON} testrunner.py --config ../known_failures.py --ignore tests_that_dont_use_resolver.txt cd src/greentest && GEVENT_RESOLVER=ares GEVENTARES_SERVERS=8.8.8.8 ${PYTHON} testrunner.py --config known_failures.py --ignore tests_that_dont_use_resolver.txt
cd greentest && GEVENT_FILE=thread ${PYTHON} testrunner.py --config ../known_failures.py `grep -l subprocess test_*.py` cd src/greentest && GEVENT_FILE=thread ${PYTHON} testrunner.py --config known_failures.py `grep -l subprocess test_*.py`
leaktest: leaktest:
GEVENTSETUP_EV_VERIFY=3 GEVENTTEST_LEAKCHECK=1 make fulltoxtest GEVENTSETUP_EV_VERIFY=3 GEVENTTEST_LEAKCHECK=1 make fulltoxtest
bench: bench:
${PYTHON} greentest/bench_sendall.py ${PYTHON} src/greentest/bench_sendall.py
travis_test_linters: travis_test_linters:
...@@ -85,9 +93,9 @@ travis_test_linters: ...@@ -85,9 +93,9 @@ travis_test_linters:
GEVENTTEST_COVERAGE=1 make leaktest GEVENTTEST_COVERAGE=1 make leaktest
# because we set parallel=true, each run produces new and different coverage files; they all need # because we set parallel=true, each run produces new and different coverage files; they all need
# to be combined # to be combined
coverage combine . greentest/ coverage combine . src/greentest/
coveralls --rcfile=greentest/.coveragerc coveralls --rcfile=src/greentest/.coveragerc
.PHONY: clean all doc prospector whitespace lint travistest travis .PHONY: clean all doc prospector whitespace lint travistest travis
...@@ -171,6 +179,6 @@ test-py27-cffi: $(PY27) ...@@ -171,6 +179,6 @@ test-py27-cffi: $(PY27)
GEVENT_CORE_CFFI_ONLY=1 PYTHON=python2.7 PATH=$(BUILD_RUNTIMES)/versions/python2.7/bin:$(PATH) make develop toxtest GEVENT_CORE_CFFI_ONLY=1 PYTHON=python2.7 PATH=$(BUILD_RUNTIMES)/versions/python2.7/bin:$(PATH) make develop toxtest
test-py27-noembed: $(PY27) test-py27-noembed: $(PY27)
cd libev && ./configure --disable-dependency-tracking && make cd deps/libev && ./configure --disable-dependency-tracking && make
cd c-ares && ./configure --disable-dependency-tracking && make cd deps/c-ares && ./configure --disable-dependency-tracking && make
CPPFLAGS="-Ilibev -Ic-ares" LDFLAGS="-Llibev/.libs -Lc-ares/.libs" LD_LIBRARY_PATH="$(PWD)/libev/.libs:$(PWD)/c-ares/.libs" EMBED=0 GEVENT_CORE_CEXT_ONLY=1 PYTHON=python2.7 PATH=$(BUILD_RUNTIMES)/versions/python2.7/bin:$(PATH) make develop toxtest CPPFLAGS="-Ideps/libev -Ideps/c-ares" LDFLAGS="-Ldeps/libev/.libs -Ldeps/c-ares/.libs" LD_LIBRARY_PATH="$(PWD)/deps/libev/.libs:$(PWD)/deps/c-ares/.libs" EMBED=0 GEVENT_CORE_CEXT_ONLY=1 PYTHON=python2.7 PATH=$(BUILD_RUNTIMES)/versions/python2.7/bin:$(PATH) make develop toxtest
...@@ -77,8 +77,8 @@ There are a few different ways to run the tests. To simply run the ...@@ -77,8 +77,8 @@ There are a few different ways to run the tests. To simply run the
tests on one version of Python during development, try this:: tests on one version of Python during development, try this::
python setup.py develop python setup.py develop
cd greentest cd src/greentest
PYTHONPATH=.. python testrunner.py --config ../known_failures.py PYTHONPATH=.. python testrunner.py --config known_failures.py
Before submitting a pull request, it's a good idea to run the tests Before submitting a pull request, it's a good idea to run the tests
across all supported versions of Python, and to check the code quality across all supported versions of Python, and to check the code quality
...@@ -93,8 +93,8 @@ The testrunner accepts a ``--coverage`` argument to enable code ...@@ -93,8 +93,8 @@ The testrunner accepts a ``--coverage`` argument to enable code
coverage metrics through the `coverage.py`_ package. That would go coverage metrics through the `coverage.py`_ package. That would go
something like this:: something like this::
cd greentest cd src/greentest
PYTHONPATH=.. python testrunner.py --config ../known_failures.py --coverage PYTHONPATH=.. python testrunner.py --config known_failures.py --coverage
coverage combine coverage combine
coverage html -i coverage html -i
<open htmlcov/index.html> <open htmlcov/index.html>
......
# -*- coding: utf-8 -*-
"""
setup helpers for c-ares.
"""
from __future__ import print_function, absolute_import, division
import os
import os.path
import shutil
import sys
from _setuputils import Extension
import distutils.sysconfig # to get CFLAGS to pass into c-ares configure script pylint:disable=import-error
from _setuputils import WIN
from _setuputils import quoted_dep_abspath
from _setuputils import system
from _setuputils import make_universal_header
from _setuputils import should_embed
from _setuputils import LIBRARIES
from _setuputils import DEFINE_MACROS
from _setuputils import glob_many
from _setuputils import dep_abspath
CARES_EMBED = should_embed('c-ares')
# See #616, trouble building for a 32-bit python against a 64-bit platform
_config_vars = distutils.sysconfig.get_config_var("CFLAGS")
if _config_vars and "m32" in _config_vars:
_m32 = 'CFLAGS="' + os.getenv('CFLAGS', '') + ' -m32" '
else:
_m32 = ''
# Use -r, not -e, for support of old solaris. See
# https://github.com/gevent/gevent/issues/777
ares_configure_command = ' '.join([
"(cd ", quoted_dep_abspath('c-ares'),
" && if [ -r ares_build.h ]; then cp ares_build.h ares_build.h.orig; fi ",
" && /bin/sh ./configure --disable-dependency-tracking " + _m32 + "CONFIG_COMMANDS= ",
" && cp ares_config.h ares_build.h \"$OLDPWD\" ",
" && mv ares_build.h.orig ares_build.h)",
"> configure-output.txt"])
def configure_ares(bext, ext):
bdir = os.path.join(bext.build_temp, 'c-ares')
ext.include_dirs.insert(0, bdir)
if not os.path.isdir(bdir):
os.makedirs(bdir)
if WIN:
shutil.copy("deps\\c-ares\\ares_build.h.dist", os.path.join(bdir, "ares_build.h"))
return
cwd = os.getcwd()
os.chdir(bdir)
try:
if os.path.exists('ares_config.h') and os.path.exists('ares_build.h'):
return
try:
system(ares_configure_command)
except:
with open('configure-output.txt', 'r') as t:
print(t.read(), file=sys.stderr)
raise
if sys.platform == 'darwin':
make_universal_header('ares_build.h', 'CARES_SIZEOF_LONG')
make_universal_header('ares_config.h', 'SIZEOF_LONG', 'SIZEOF_SIZE_T', 'SIZEOF_TIME_T')
finally:
os.chdir(cwd)
ARES = Extension(name='gevent.ares',
sources=['src/gevent/gevent.ares.c'],
include_dirs=[dep_abspath('c-ares')] if CARES_EMBED else [],
libraries=list(LIBRARIES),
define_macros=list(DEFINE_MACROS),
depends=glob_many('src/gevent/dnshelper.c',
'src/gevent/cares_*.[ch]'))
ARES.optional = True
if CARES_EMBED:
ARES.sources += glob_many('deps/c-ares/*.c')
# Strip the standalone binaries that would otherwise
# cause linking issues
for bin_c in ('acountry', 'adig', 'ahost'):
try:
ARES.sources.remove('deps/c-ares/' + bin_c + '.c')
except ValueError:
pass
ARES.configure = configure_ares
if WIN:
ARES.libraries += ['advapi32']
ARES.define_macros += [('CARES_STATICLIB', '')]
else:
ARES.define_macros += [('HAVE_CONFIG_H', '')]
if sys.platform != 'darwin':
ARES.libraries += ['rt']
ARES.define_macros += [('CARES_EMBED', '1')]
else:
ARES.libraries.append('cares')
ARES.define_macros += [('HAVE_NETDB_H', '')]
# -*- coding: utf-8 -*-
"""
setup helpers for libev.
"""
from __future__ import print_function, absolute_import, division
import sys
import os.path
from _setuputils import Extension
from _setuputils import system
from _setuputils import quoted_dep_abspath
from _setuputils import WIN
from _setuputils import make_universal_header
from _setuputils import LIBRARIES
from _setuputils import DEFINE_MACROS
from _setuputils import glob_many
from _setuputils import dep_abspath
from _setuputils import should_embed
LIBEV_EMBED = should_embed('libev')
# Configure libev in place; but cp the config.h to the old directory;
# if we're building a CPython extension, the old directory will be
# the build/temp.XXX/libev/ directory. If we're building from a
# source checkout on pypy, OLDPWD will be the location of setup.py
# and the PyPy branch will clean it up.
libev_configure_command = ' '.join([
"(cd ", quoted_dep_abspath('libev'),
" && /bin/sh ./configure ",
" && cp config.h \"$OLDPWD\"",
")",
'> configure-output.txt'
])
def configure_libev(bext, ext):
if WIN:
return
bdir = os.path.join(bext.build_temp, 'libev')
ext.include_dirs.insert(0, bdir)
if not os.path.isdir(bdir):
os.makedirs(bdir)
cwd = os.getcwd()
os.chdir(bdir)
try:
if os.path.exists('config.h'):
return
system(libev_configure_command)
if sys.platform == 'darwin':
make_universal_header('config.h',
'SIZEOF_LONG', 'SIZEOF_SIZE_T', 'SIZEOF_TIME_T')
finally:
os.chdir(cwd)
CORE = Extension(name='gevent.libev.corecext',
sources=['src/gevent/libev/gevent.corecext.c'],
include_dirs=[dep_abspath('libev')] if LIBEV_EMBED else [],
libraries=list(LIBRARIES),
define_macros=list(DEFINE_MACROS),
depends=glob_many('src/gevent/libev/callbacks.*',
'src/gevent/libev/stathelper.c',
'src/gevent/libev/libev*.h',
'deps/libev/*.[ch]'))
if WIN:
CORE.define_macros.append(('EV_STANDALONE', '1'))
# QQQ libev can also use -lm, however it seems to be added implicitly
if LIBEV_EMBED:
CORE.define_macros += [('LIBEV_EMBED', '1'),
('EV_COMMON', ''), # we don't use void* data
# libev watchers that we don't use currently:
('EV_CLEANUP_ENABLE', '0'),
('EV_EMBED_ENABLE', '0'),
("EV_PERIODIC_ENABLE", '0')]
CORE.configure = configure_libev
if sys.platform == "darwin":
os.environ["CPPFLAGS"] = ("%s %s" % (os.environ.get("CPPFLAGS", ""), "-U__llvm__")).lstrip()
if os.environ.get('GEVENTSETUP_EV_VERIFY') is not None:
CORE.define_macros.append(('EV_VERIFY', os.environ['GEVENTSETUP_EV_VERIFY']))
else:
CORE.libraries.append('ev')
# -*- coding: utf-8 -*-
"""
gevent build utilities.
.. $Id$
"""
from __future__ import print_function, absolute_import, division
import re
import os
import os.path
import sys
from subprocess import check_call
from glob import glob
from setuptools.command.build_ext import build_ext
from setuptools.command.sdist import sdist
## Exported configurations
PYPY = hasattr(sys, 'pypy_version_info')
WIN = sys.platform.startswith('win')
CFFI_WIN_BUILD_ANYWAY = os.environ.get("PYPY_WIN_BUILD_ANYWAY")
LIBRARIES = []
DEFINE_MACROS = []
if WIN:
LIBRARIES += ['ws2_32']
DEFINE_MACROS += [('FD_SETSIZE', '1024'), ('_WIN32', '1')]
### File handling
THIS_DIR = os.path.dirname(__file__)
def quoted_abspath(*segments):
return '"' + os.path.abspath(os.path.join(*segments)) + '"'
def read(name, *args):
"""Read a file path relative to this file."""
with open(os.path.join(THIS_DIR, name)) as f:
return f.read(*args)
def read_version(name="src/gevent/__init__.py"):
contents = read(name)
version = re.search(r"__version__\s*=\s*'(.*)'", contents, re.M).group(1)
assert version, "could not read version"
return version
def dep_abspath(depname, *extra):
return os.path.abspath(os.path.join('deps', depname, *extra))
def quoted_dep_abspath(depname):
return quoted_abspath(dep_abspath(depname))
def glob_many(*globs):
"""
Return a list of all the glob patterns expanded.
"""
result = []
for pattern in globs:
result.extend(glob(pattern))
return sorted(result)
## Configuration
def _parse_environ(key):
value = os.environ.get(key)
if not value:
return
value = value.lower().strip()
if value in ('1', 'true', 'on', 'yes'):
return True
elif value in ('0', 'false', 'off', 'no'):
return False
raise ValueError('Environment variable %r has invalid value %r. '
'Please set it to 1, 0 or an empty string' % (key, value))
IGNORE_CFFI = _parse_environ("GEVENT_NO_CFFI_BUILD")
def _get_config_value(key, defkey, path=None):
"""
Find a boolean value, configured in the environment at *key* or
*defkey* (typically, *defkey* will be shared by several calls). If
those don't exist, then check for the existence of *path* and return
that (if path is given)
"""
value = _parse_environ(key)
if value is None:
value = _parse_environ(defkey)
if value is not None:
return value
return os.path.exists(path) if path is not None else False
def should_embed(dep_name):
"""
Check the configuration for the dep_name and see if it
should be embedded. Environment keys are derived from the
dep name: libev becomes LIBEV_EMBED and c-ares becomes CARES_EMBED.
"""
path = dep_abspath(dep_name)
defkey = 'EMBED'
key = dep_name.replace('-', '').upper() + '_' + defkey
return _get_config_value(key, defkey, path)
## Headers
def make_universal_header(filename, *defines):
defines = [('#define %s ' % define, define) for define in defines]
with open(filename, 'r') as f:
lines = f.read().split('\n')
ifdef = 0
with open(filename, 'w') as f:
for line in lines:
if line.startswith('#ifdef'):
ifdef += 1
elif line.startswith('#endif'):
ifdef -= 1
elif not ifdef:
for prefix, define in defines:
if line.startswith(prefix):
line = '#ifdef __LP64__\n#define %s 8\n#else\n#define %s 4\n#endif' % (define, define)
break
print(line, file=f)
# Processes
def _system(cmd):
sys.stdout.write('Running %r in %s\n' % (cmd, os.getcwd()))
return check_call(cmd, shell=True)
def system(cmd):
if _system(cmd):
sys.exit(1)
## Distutils extensions
class BuildFailed(Exception):
pass
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError # pylint:disable=no-name-in-module,import-error
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError)
class ConfiguringBuildExt(build_ext):
def gevent_prepare(self, ext):
configure = getattr(ext, 'configure', None)
if configure:
configure(self, ext)
def build_extension(self, ext):
self.gevent_prepare(ext)
try:
result = build_ext.build_extension(self, ext)
except ext_errors:
if getattr(ext, 'optional', False):
raise BuildFailed()
else:
raise
return result
class MakeSdist(sdist):
"""
An sdist that runs make if needed, and makes sure
that the Makefile doesn't make it into the dist
archive.
"""
_ran_make = False
@classmethod
def make(cls, targets=''):
# NOTE: We have two copies of the makefile, one
# for posix, one for windows. Our sdist command takes
# care of renaming the posix one so it doesn't get into
# the .tar.gz file (we don't want to re-run make in a released
# file). We trigger off the presence/absence of that file altogether
# to skip both posix and unix branches.
# See https://github.com/gevent/gevent/issues/757
if cls._ran_make:
return
if os.path.exists('Makefile'):
if WIN:
# make.cmd handles checking for PyPy and only making the
# right things, so we can ignore the targets
system("appveyor\\make.cmd")
else:
if "PYTHON" not in os.environ:
os.environ["PYTHON"] = sys.executable
system('make ' + targets)
cls._ran_make = True
def run(self):
renamed = False
if os.path.exists('Makefile'):
self.make()
os.rename('Makefile', 'Makefile.ext')
renamed = True
try:
return sdist.run(self)
finally:
if renamed:
os.rename('Makefile.ext', 'Makefile')
from setuptools import Extension as _Extension
class Extension(_Extension):
# This class exists currently mostly to make pylint
# happy in terms of attributes we use.
def __init__(self, *args, **kwargs):
self.libraries = []
self.define_macros = []
# Python 2 has this as an old-style class for some reason
# so super() doesn't work.
_Extension.__init__(self, *args, **kwargs) # pylint:disable=no-member,non-parent-init-called
...@@ -145,7 +145,7 @@ build_script: ...@@ -145,7 +145,7 @@ build_script:
test_script: test_script:
# Run the project tests # Run the project tests
- "cd greentest && %PYEXE% testrunner.py --config ../known_failures.py && cd .." - "cd src/greentest && %PYEXE% testrunner.py --config known_failures.py && cd ../.."
after_test: after_test:
# We already built the wheel during build_script, because it's # We already built the wheel during build_script, because it's
......
IF "%PYTHON_EXE%" == "python" ( IF "%PYTHON_EXE%" == "python" (
%PYEXE% util\cythonpp.py -o gevent.corecext.c gevent\corecext.ppyx %PYEXE% util\cythonpp.py -o gevent.corecext.c --module-name gevent.libev.corecext.pyx src\gevent\libev\corecext.ppyx
type gevent\callbacks.c >> gevent.corecext.c type src\gevent\libev\callbacks.c >> gevent.corecext.c
move gevent.corecext.* gevent move gevent.corecext.* src\gevent\libev
) )
cython -o gevent.ares.c gevent\ares.pyx cython -o gevent.ares.c src\gevent\ares.pyx
move gevent.ares.* gevent move gevent.ares.* src\gevent
cython -o gevent._semaphore.c gevent\_semaphore.py cython -o gevent._semaphore.c src\gevent\_semaphore.py
move gevent._semaphore.* gevent move gevent._semaphore.* src\gevent
...@@ -12,6 +12,9 @@ Incompatible Changes ...@@ -12,6 +12,9 @@ Incompatible Changes
- Support for Python 2.6 has been removed. See :pr:`766`. - Support for Python 2.6 has been removed. See :pr:`766`.
- Remove module ``gevent.coros`` which was replaced by ``gevent.lock`` - Remove module ``gevent.coros`` which was replaced by ``gevent.lock``
and has been deprecated since 1.0b2. and has been deprecated since 1.0b2.
- The internal implementation modules ``gevent.corecext`` and
``gevent.corecffi`` have been moved. Please import from
``gevent.core`` instead.
Libraries Libraries
--------- ---------
...@@ -29,6 +32,8 @@ Libraries ...@@ -29,6 +32,8 @@ Libraries
attempted at every import. This could lead to scattered "gevent" attempted at every import. This could lead to scattered "gevent"
directories and undependable results. directories and undependable results.
- Update Cython to 0.24. - Update Cython to 0.24.
- setuptools is now required at build time on all platforms.
Previously it was only required for Windows and PyPy.
Security Security
......
...@@ -3,17 +3,21 @@ ...@@ -3,17 +3,21 @@
from __future__ import print_function from __future__ import print_function
import sys import sys
import os import os
import re
import shutil
import traceback
from os.path import join, abspath, basename, dirname
from subprocess import check_call
from glob import glob
from _setuputils import read
PYPY = hasattr(sys, 'pypy_version_info') from _setuputils import read_version
WIN = sys.platform.startswith('win') from _setuputils import system
CFFI_WIN_BUILD_ANYWAY = os.environ.get("PYPY_WIN_BUILD_ANYWAY") from _setuputils import PYPY, WIN, CFFI_WIN_BUILD_ANYWAY
from _setuputils import IGNORE_CFFI
from _setuputils import ConfiguringBuildExt
from _setuputils import MakeSdist
from _setuputils import BuildFailed
# setuptools is *required* on Windows
# (https://bugs.python.org/issue23246) and for PyPy. No reason not to
# use it everywhere.
from setuptools import Extension, setup
from setuptools import find_packages
if PYPY and WIN and not CFFI_WIN_BUILD_ANYWAY: if PYPY and WIN and not CFFI_WIN_BUILD_ANYWAY:
# We can't properly handle (hah!) file-descriptors and # We can't properly handle (hah!) file-descriptors and
...@@ -25,10 +29,6 @@ if PYPY and WIN and not CFFI_WIN_BUILD_ANYWAY: ...@@ -25,10 +29,6 @@ if PYPY and WIN and not CFFI_WIN_BUILD_ANYWAY:
raise Exception("Unable to install on PyPy/Windows") raise Exception("Unable to install on PyPy/Windows")
if WIN: if WIN:
# https://bugs.python.org/issue23246
# We must have setuptools on windows
__import__('setuptools')
# Make sure the env vars that make.cmd needs are set # Make sure the env vars that make.cmd needs are set
if not os.environ.get('PYTHON_EXE'): if not os.environ.get('PYTHON_EXE'):
os.environ['PYTHON_EXE'] = 'pypy' if PYPY else 'python' os.environ['PYTHON_EXE'] = 'pypy' if PYPY else 'python'
...@@ -38,371 +38,68 @@ if WIN: ...@@ -38,371 +38,68 @@ if WIN:
if sys.version_info[:2] < (2, 7): if sys.version_info[:2] < (2, 7):
raise Exception("Please install gevent 1.1 for Python 2.6") raise Exception("Please install gevent 1.1 for Python 2.6")
import distutils if PYPY and sys.pypy_version_info[:3] < (2, 6, 1): # pylint:disable=no-member
import distutils.sysconfig # to get CFLAGS to pass into c-ares configure script # We have to have CFFI >= 1.3.0, and this platform cannot upgrade
# it.
raise Exception("PyPy >= 2.6.1 is required")
try:
from setuptools import Extension, setup
except ImportError:
if PYPY:
# need setuptools for include_package_data to work
raise
from distutils.core import Extension, setup
from distutils.command.build_ext import build_ext
from distutils.command.sdist import sdist as _sdist
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError)
with open('gevent/__init__.py') as _:
__version__ = re.search(r"__version__\s*=\s*'(.*)'", _.read(), re.M).group(1)
assert __version__
def _quoted_abspath(p):
return '"' + abspath(p) + '"'
def parse_environ(key):
value = os.environ.get(key)
if not value:
return
value = value.lower().strip()
if value in ('1', 'true', 'on', 'yes'):
return True
elif value in ('0', 'false', 'off', 'no'):
return False
raise ValueError('Environment variable %r has invalid value %r. Please set it to 1, 0 or an empty string' % (key, value))
def get_config_value(key, defkey, path):
value = parse_environ(key)
if value is None:
value = parse_environ(defkey)
if value is not None:
return value
return os.path.exists(path)
LIBEV_EMBED = get_config_value('LIBEV_EMBED', 'EMBED', 'libev')
CARES_EMBED = get_config_value('CARES_EMBED', 'EMBED', 'c-ares')
define_macros = []
libraries = []
# Configure libev in place; but cp the config.h to the old directory;
# if we're building a CPython extension, the old directory will be
# the build/temp.XXX/libev/ directory. If we're building from a
# source checkout on pypy, OLDPWD will be the location of setup.py
# and the PyPy branch will clean it up.
libev_configure_command = ' '.join([
"(cd ", _quoted_abspath('libev/'),
" && /bin/sh ./configure ",
" && cp config.h \"$OLDPWD\"",
")",
'> configure-output.txt'
])
# See #616, trouble building for a 32-bit python against a 64-bit platform
_config_vars = distutils.sysconfig.get_config_var("CFLAGS")
if _config_vars and "m32" in _config_vars:
_m32 = 'CFLAGS="' + os.getenv('CFLAGS', '') + ' -m32" '
else:
_m32 = ''
# Use -r, not -e, for support of old solaris. See https://github.com/gevent/gevent/issues/777
ares_configure_command = ' '.join(["(cd ", _quoted_abspath('c-ares/'),
" && if [ -r ares_build.h ]; then cp ares_build.h ares_build.h.orig; fi ",
" && /bin/sh ./configure --disable-dependency-tracking " + _m32 + "CONFIG_COMMANDS= ",
" && cp ares_config.h ares_build.h \"$OLDPWD\" ",
" && mv ares_build.h.orig ares_build.h)",
"> configure-output.txt"])
if WIN:
libraries += ['ws2_32']
define_macros += [('FD_SETSIZE', '1024'), ('_WIN32', '1')]
def expand(*lst):
result = []
for item in lst:
for name in sorted(glob(item)):
result.append(name)
return result
CORE = Extension(name='gevent.corecext', __version__ = read_version()
sources=['gevent/gevent.corecext.c'],
include_dirs=['libev'] if LIBEV_EMBED else [],
libraries=libraries,
define_macros=define_macros,
depends=expand('gevent/callbacks.*', 'gevent/stathelper.c', 'gevent/libev*.h', 'libev/*.*'))
# QQQ libev can also use -lm, however it seems to be added implicitly
ARES = Extension(name='gevent.ares',
sources=['gevent/gevent.ares.c'],
include_dirs=['c-ares'] if CARES_EMBED else [],
libraries=libraries,
define_macros=define_macros,
depends=expand('gevent/dnshelper.c', 'gevent/cares_*.*'))
ARES.optional = True
def make_universal_header(filename, *defines):
defines = [('#define %s ' % define, define) for define in defines]
with open(filename, 'r') as f:
lines = f.read().split('\n')
ifdef = 0
with open(filename, 'w') as f:
for line in lines:
if line.startswith('#ifdef'):
ifdef += 1
elif line.startswith('#endif'):
ifdef -= 1
elif not ifdef:
for prefix, define in defines:
if line.startswith(prefix):
line = '#ifdef __LP64__\n#define %s 8\n#else\n#define %s 4\n#endif' % (define, define)
break
print(line, file=f)
def _system(cmd):
sys.stdout.write('Running %r in %s\n' % (cmd, os.getcwd()))
return check_call(cmd, shell=True)
def system(cmd):
if _system(cmd):
sys.exit(1)
def configure_libev(bext, ext):
if WIN:
CORE.define_macros.append(('EV_STANDALONE', '1'))
return
bdir = os.path.join(bext.build_temp, 'libev')
ext.include_dirs.insert(0, bdir)
if not os.path.isdir(bdir):
os.makedirs(bdir)
cwd = os.getcwd()
os.chdir(bdir)
try:
if os.path.exists('config.h'):
return
rc = _system(libev_configure_command)
if rc == 0 and sys.platform == 'darwin':
make_universal_header('config.h', 'SIZEOF_LONG', 'SIZEOF_SIZE_T', 'SIZEOF_TIME_T')
finally:
os.chdir(cwd)
def configure_ares(bext, ext):
bdir = os.path.join(bext.build_temp, 'c-ares')
ext.include_dirs.insert(0, bdir)
if not os.path.isdir(bdir):
os.makedirs(bdir)
if WIN:
shutil.copy("c-ares\\ares_build.h.dist", os.path.join(bdir, "ares_build.h"))
return
cwd = os.getcwd()
os.chdir(bdir)
try:
if os.path.exists('ares_config.h') and os.path.exists('ares_build.h'):
return
try:
rc = _system(ares_configure_command)
except:
with open('configure-output.txt', 'r') as t:
print(t.read(), file=sys.stderr)
raise
if rc == 0 and sys.platform == 'darwin':
make_universal_header('ares_build.h', 'CARES_SIZEOF_LONG')
make_universal_header('ares_config.h', 'SIZEOF_LONG', 'SIZEOF_SIZE_T', 'SIZEOF_TIME_T')
finally:
os.chdir(cwd)
if LIBEV_EMBED:
CORE.define_macros += [('LIBEV_EMBED', '1'),
('EV_COMMON', ''), # we don't use void* data
# libev watchers that we don't use currently:
('EV_CLEANUP_ENABLE', '0'),
('EV_EMBED_ENABLE', '0'),
("EV_PERIODIC_ENABLE", '0')]
CORE.configure = configure_libev
if sys.platform == "darwin":
os.environ["CPPFLAGS"] = ("%s %s" % (os.environ.get("CPPFLAGS", ""), "-U__llvm__")).lstrip()
if os.environ.get('GEVENTSETUP_EV_VERIFY') is not None:
CORE.define_macros.append(('EV_VERIFY', os.environ['GEVENTSETUP_EV_VERIFY']))
else:
CORE.libraries.append('ev')
if CARES_EMBED:
ARES.sources += expand('c-ares/*.c')
# Strip the standalone binaries that would otherwise
# cause linking issues
for bin_c in ('acountry', 'adig', 'ahost'):
try:
ARES.sources.remove('c-ares/' + bin_c + '.c')
except ValueError:
pass
ARES.configure = configure_ares
if WIN:
ARES.libraries += ['advapi32']
ARES.define_macros += [('CARES_STATICLIB', '')]
else:
ARES.define_macros += [('HAVE_CONFIG_H', '')]
if sys.platform != 'darwin':
ARES.libraries += ['rt']
ARES.define_macros += [('CARES_EMBED', '1')]
else:
ARES.libraries.append('cares')
ARES.define_macros += [('HAVE_NETDB_H', '')]
_ran_make = []
def make(targets=''): from _setuplibev import libev_configure_command
# NOTE: We have two copies of the makefile, one from _setuplibev import LIBEV_EMBED
# for posix, one for windows. Our sdist command takes from _setuplibev import CORE
# care of renaming the posix one so it doesn't get into
# the .tar.gz file (we don't want to re-run make in a released
# file). We trigger off the presence/absence of that file altogether
# to skip both posix and unix branches.
# See https://github.com/gevent/gevent/issues/757
if not _ran_make:
if os.path.exists('Makefile'):
if WIN:
# make.cmd handles checking for PyPy and only making the
# right things, so we can ignore the targets
system("appveyor\\make.cmd")
else:
if "PYTHON" not in os.environ:
os.environ["PYTHON"] = sys.executable
system('make ' + targets)
_ran_make.append(1)
from _setupares import ARES
class sdist(_sdist): SEMAPHORE = Extension(name="gevent._semaphore",
sources=["src/gevent/gevent._semaphore.c"])
def run(self): EXT_MODULES = [
renamed = False CORE,
if os.path.exists('Makefile'): ARES,
make() SEMAPHORE,
os.rename('Makefile', 'Makefile.ext') ]
renamed = True
try:
return _sdist.run(self)
finally:
if renamed:
os.rename('Makefile.ext', 'Makefile')
class my_build_ext(build_ext):
def gevent_prepare(self, ext):
configure = getattr(ext, 'configure', None)
if configure:
configure(self, ext)
def build_extension(self, ext):
self.gevent_prepare(ext)
try:
result = build_ext.build_extension(self, ext)
except ext_errors:
if getattr(ext, 'optional', False):
raise BuildFailed
else:
raise
if not PYPY:
self.gevent_symlink(ext)
return result
def gevent_symlink(self, ext):
# hack: create a symlink from build/../core.so to gevent/core.so
# to prevent "ImportError: cannot import name core" failures
try:
fullname = self.get_ext_fullname(ext.name)
modpath = fullname.split('.')
filename = self.get_ext_filename(ext.name)
filename = os.path.split(filename)[-1]
if not self.inplace:
filename = os.path.join(*modpath[:-1] + [filename])
path_to_build_core_so = os.path.join(self.build_lib, filename)
path_to_core_so = join('gevent', basename(path_to_build_core_so))
link(path_to_build_core_so, path_to_core_so)
except Exception:
traceback.print_exc()
def link(source, dest):
source = abspath(source)
dest = abspath(dest)
if source == dest:
return
try:
os.unlink(dest)
except OSError:
pass
try:
os.symlink(source, dest)
sys.stdout.write('Linking %s to %s\n' % (source, dest))
except (OSError, AttributeError):
sys.stdout.write('Copying %s to %s\n' % (source, dest))
shutil.copyfile(source, dest)
class BuildFailed(Exception):
pass
def read(name, *args):
try:
with open(join(dirname(__file__), name)) as f:
return f.read(*args)
except OSError:
return ''
cffi_modules = ['gevent/_corecffi_build.py:ffi'] cffi_modules = ['src/gevent/libev/_corecffi_build.py:ffi']
if PYPY: if PYPY:
install_requires = [] install_requires = []
setup_requires = []
EXT_MODULES.remove(CORE)
EXT_MODULES.remove(SEMAPHORE)
# By building the semaphore with Cython under PyPy, we get
# atomic operations (specifically, exiting/releasing), at the
# cost of some speed (one trivial semaphore micro-benchmark put the pure-python version
# at around 1s and the compiled version at around 4s). Some clever subclassing
# and having only the bare minimum be in cython might help reduce that penalty.
# NOTE: You must use version 0.23.4 or later to avoid a memory leak.
# https://mail.python.org/pipermail/cython-devel/2015-October/004571.html
# However, that's all for naught on up to and including PyPy 4.0.1 which
# have some serious crashing bugs with GC interacting with cython,
# so this is disabled
else: else:
install_requires = ['greenlet >= 0.4.9'] install_requires = ['greenlet >= 0.4.9']
setup_kwds = {} setup_requires = []
try: try:
cffi = __import__('cffi') cffi = __import__('cffi')
except ImportError: except ImportError:
setup_kwds = {} pass
else: else:
_min_cffi_version = (1, 3, 0) if IGNORE_CFFI and not PYPY:
_cffi_version_is_supported = cffi.__version_info__ >= _min_cffi_version # Allow distributors to turn off CFFI builds
_kwds = {'cffi_modules': cffi_modules} # even if it's available, because CFFI always embeds
# We already checked for PyPy on Windows above and excluded it # our copy of libev and they may not want that.
if PYPY: del cffi_modules[:]
if not _cffi_version_is_supported: # Note that we don't add cffi to install_requires, it's
raise Exception("PyPy 2.6.1 or higher is required") # optional. We tend to build and distribute wheels with the CFFI
setup_kwds = _kwds # modules built and they can be imported if CFFI is installed.
elif LIBEV_EMBED and (not WIN or CFFI_WIN_BUILD_ANYWAY): # install_requires.append('cffi >= 1.3.0')
if not _cffi_version_is_supported:
print("WARNING: CFFI version 1.3.0 is required to build CFFI backend", file=sys.stderr)
else:
# If we're on CPython, we can only reliably build
# the CFFI module if we're embedding libev (in some cases
# we wind up embedding it anyway, which may not be what the
# distributor wanted).
setup_kwds = _kwds
# If we are running info / help commands, or we're being imported by # If we are running info / help commands, or we're being imported by
# tools like pyroma, we don't need to build anything # tools like pyroma, we don't need to build anything
...@@ -416,69 +113,19 @@ if ((len(sys.argv) >= 2 ...@@ -416,69 +113,19 @@ if ((len(sys.argv) >= 2
'--long-description'))) '--long-description')))
or __name__ != '__main__'): or __name__ != '__main__'):
_BUILDING = False _BUILDING = False
ext_modules = []
include_package_data = PYPY
run_make = False
elif PYPY:
if not WIN:
# We need to configure libev because the CORE Extension
# won't do it (since we're not building it)
system(libev_configure_command)
# Then get rid of the extra copy created in place
system('rm config.h')
# NOTE that we're NOT adding the distutils extension module, as
# doing so compiles the module already: import gevent._corecffi_build
# imports gevent, which imports the hub, which imports the core,
# which compiles the module in-place. Instead we use the setup-time
# support of cffi_modules
#from gevent import _corecffi_build
ext_modules = [
#_corecffi_build.ffi.distutils_extension(),
ARES,
# By building the semaphore with Cython under PyPy, we get
# atomic operations (specifically, exiting/releasing), at the
# cost of some speed (one trivial semaphore micro-benchmark put the pure-python version
# at around 1s and the compiled version at around 4s). Some clever subclassing
# and having only the bare minimum be in cython might help reduce that penalty.
# NOTE: You must use version 0.23.4 or later to avoid a memory leak.
# https://mail.python.org/pipermail/cython-devel/2015-October/004571.html
# However, that's all for naught on up to and including PyPy 4.0.1 which
# have some serious crashing bugs with GC interacting with cython,
# so this is disabled (would need to add gevent/gevent._semaphore.c back to
# the run_make line)
#Extension(name="gevent._semaphore",
# sources=["gevent/gevent._semaphore.c"]),
]
include_package_data = True
run_make = 'gevent/gevent.ares.c'
else:
ext_modules = [
CORE,
ARES,
Extension(name="gevent._semaphore",
sources=["gevent/gevent._semaphore.c"]),
]
include_package_data = False
run_make = True
if run_make and os.path.exists("Makefile"):
# The 'sdist' command renames our makefile after it
# runs so we don't try to use it from a release tarball.
# NOTE: This is effectively pointless and serves only for
# documentation/metadata, because we run 'make' *before* we run
# setup(), so installing cython happens too late.
setup_requires = ['cython >= 0.24']
else:
setup_requires = []
def run_setup(ext_modules, run_make): def run_setup(ext_modules, run_make):
if run_make: if run_make:
if isinstance(run_make, str): if (not LIBEV_EMBED and not WIN and cffi_modules) or PYPY:
make(run_make) # We're not embedding libev but we do want
else: # to build the CFFI module. We need to configure libev
make() # because the CORE Extension won't.
# TODO: Generalize this.
system(libev_configure_command)
MakeSdist.make()
setup( setup(
name='gevent', name='gevent',
version=__version__, version=__version__,
...@@ -491,12 +138,16 @@ def run_setup(ext_modules, run_make): ...@@ -491,12 +138,16 @@ def run_setup(ext_modules, run_make):
maintainer='Jason Madden', maintainer='Jason Madden',
maintainer_email='jason@nextthought.com', maintainer_email='jason@nextthought.com',
url='http://www.gevent.org/', url='http://www.gevent.org/',
packages=['gevent'], package_dir={'': 'src'},
include_package_data=include_package_data, packages=find_packages('src'),
include_package_data=True,
ext_modules=ext_modules, ext_modules=ext_modules,
cmdclass=dict(build_ext=my_build_ext, sdist=sdist), cmdclass=dict(build_ext=ConfiguringBuildExt, sdist=MakeSdist),
install_requires=install_requires, install_requires=install_requires,
setup_requires=setup_requires, setup_requires=setup_requires,
# It's always safe to pass the CFFI keyword, even if
# cffi is not installed: it's just ignored in that case.
cffi_modules=cffi_modules,
zip_safe=False, zip_safe=False,
test_suite="greentest.testrunner", test_suite="greentest.testrunner",
classifiers=[ classifiers=[
...@@ -513,8 +164,8 @@ def run_setup(ext_modules, run_make): ...@@ -513,8 +164,8 @@ def run_setup(ext_modules, run_make):
"Topic :: Internet", "Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers", "Intended Audience :: Developers",
"Development Status :: 4 - Beta"], "Development Status :: 4 - Beta"
**setup_kwds ],
) )
# Tools like pyroma expect the actual call to `setup` to be performed # Tools like pyroma expect the actual call to `setup` to be performed
...@@ -528,11 +179,11 @@ if os.getenv('READTHEDOCS'): ...@@ -528,11 +179,11 @@ if os.getenv('READTHEDOCS'):
os.environ['PATH'] = new_path os.environ['PATH'] = new_path
try: try:
run_setup(ext_modules, run_make=run_make) run_setup(EXT_MODULES, run_make=_BUILDING)
except BuildFailed: except BuildFailed:
if ARES not in ext_modules: if ARES not in EXT_MODULES:
raise raise
ext_modules.remove(ARES) EXT_MODULES.remove(ARES)
run_setup(ext_modules, run_make=run_make) run_setup(EXT_MODULES, run_make=_BUILDING)
if ARES not in ext_modules and __name__ == '__main__' and _BUILDING: if ARES not in EXT_MODULES and __name__ == '__main__' and _BUILDING:
sys.stderr.write('\nWARNING: The gevent.ares extension has been disabled.\n') sys.stderr.write('\nWARNING: The gevent.ares extension has been disabled.\n')
...@@ -9,13 +9,13 @@ try: ...@@ -9,13 +9,13 @@ try:
if os.environ.get('GEVENT_CORE_CFFI_ONLY'): if os.environ.get('GEVENT_CORE_CFFI_ONLY'):
raise ImportError("Not attempting corecext") raise ImportError("Not attempting corecext")
from gevent import corecext as _core from gevent.libev import corecext as _core
except ImportError: except ImportError:
if os.environ.get('GEVENT_CORE_CEXT_ONLY'): if os.environ.get('GEVENT_CORE_CEXT_ONLY'):
raise raise
# CFFI/PyPy # CFFI/PyPy
from gevent import corecffi as _core from gevent.libev import corecffi as _core
copy_globals(_core, globals()) copy_globals(_core, globals())
......
...@@ -61,10 +61,10 @@ void vfd_free(int); ...@@ -61,10 +61,10 @@ void vfd_free(int);
include_dirs = [ include_dirs = [
thisdir, # libev_vfd.h thisdir, # libev_vfd.h
os.path.abspath(os.path.join(thisdir, '..', 'libev')), os.path.abspath(os.path.join(thisdir, '..', '..', '..', 'deps', 'libev')),
] ]
ffi.cdef(_cdef) ffi.cdef(_cdef)
ffi.set_source('gevent._corecffi', _source, include_dirs=include_dirs) ffi.set_source('gevent.libev._corecffi', _source, include_dirs=include_dirs)
if __name__ == '__main__': if __name__ == '__main__':
# XXX: Note, on Windows, we would need to specify the external libraries # XXX: Note, on Windows, we would need to specify the external libraries
......
/* Copyright (c) 2011-2012 Denis Bilenko. See LICENSE for details. */ /* Copyright (c) 2011-2012 Denis Bilenko. See LICENSE for details. */
#ifdef Py_PYTHON_H #ifdef Py_PYTHON_H
#if 1 /* the name changes depending on our file layout and --module-name option */
/* name generated by cython when we use threads */ #define _GEVENTLOOP struct __pyx_vtabstruct_6gevent_5libev_8corecext_loop
#define _GEVENTLOOP struct __pyx_vtabstruct_8corecext_loop
#else
#define _GEVENTLOOP struct __pyx_vtabstruct_6gevent_8corecext_loop
#endif
static void gevent_handle_error(struct PyGeventLoopObject* loop, PyObject* context) { static void gevent_handle_error(struct PyGeventLoopObject* loop, PyObject* context) {
PyThreadState *tstate; PyThreadState *tstate;
......
...@@ -5,7 +5,10 @@ ...@@ -5,7 +5,10 @@
# cython: emit_code_comments=False # cython: emit_code_comments=False
cimport cython cimport cython
cimport libev cimport libev
# Note this is not the standard cython 'cpython' (which has a backwards compat alias of 'python')
# it's our custom def. If it's not on the include path, we get warned.
from python cimport * from python cimport *
# Work around lack of absolute_import in Cython # Work around lack of absolute_import in Cython
# Note for PY3: not doing so will leave reference to locals() on import # Note for PY3: not doing so will leave reference to locals() on import
# (reproducible under Python 3.3, not under Python 3.4; see test__refcount_core.py) # (reproducible under Python 3.3, not under Python 3.4; see test__refcount_core.py)
......
...@@ -16,10 +16,10 @@ __all__ = [ ...@@ -16,10 +16,10 @@ __all__ = [
'loop', 'loop',
] ]
import gevent._corecffi # pylint:disable=no-name-in-module import gevent.libev._corecffi as _corecffi # pylint:disable=no-name-in-module
ffi = gevent._corecffi.ffi # pylint:disable=no-member ffi = _corecffi.ffi # pylint:disable=no-member
libev = gevent._corecffi.lib # pylint:disable=no-member libev = _corecffi.lib # pylint:disable=no-member
if hasattr(libev, 'vfd_open'): if hasattr(libev, 'vfd_open'):
# Must be on windows # Must be on windows
......
...@@ -44,7 +44,7 @@ if __name__ == '__main__': ...@@ -44,7 +44,7 @@ if __name__ == '__main__':
sys.path.append('.') sys.path.append('.')
base = os.path.dirname(gevent.__file__) base = os.path.dirname(gevent.__file__)
print(base) print(base)
os.chdir('..') os.chdir('../..')
globs = {'myfunction': myfunction, 'gevent': gevent, 'socket': socket} globs = {'myfunction': myfunction, 'gevent': gevent, 'socket': socket}
......
...@@ -17,7 +17,7 @@ class Test_udp_client(TestCase): ...@@ -17,7 +17,7 @@ class Test_udp_client(TestCase):
server = DatagramServer('127.0.0.1:9000', handle) server = DatagramServer('127.0.0.1:9000', handle)
server.start() server.start()
try: try:
run([sys.executable, '-u', 'udp_client.py', 'Test_udp_client'], timeout=10, cwd='../examples/') run([sys.executable, '-u', 'udp_client.py', 'Test_udp_client'], timeout=10, cwd='../../examples/')
finally: finally:
server.close() server.close()
self.assertEqual(log, [b'Test_udp_client']) self.assertEqual(log, [b'Test_udp_client'])
......
...@@ -5,7 +5,7 @@ import time ...@@ -5,7 +5,7 @@ import time
import util import util
cwd = '../examples/' cwd = '../../examples/'
ignore = ['wsgiserver.py', ignore = ['wsgiserver.py',
'wsgiserver_ssl.py', 'wsgiserver_ssl.py',
'webproxy.py', 'webproxy.py',
...@@ -24,7 +24,7 @@ time_ranges = { ...@@ -24,7 +24,7 @@ time_ranges = {
def main(tests=None): def main(tests=None):
if not tests: if not tests:
tests = set(os.path.basename(x) for x in glob.glob('../examples/*.py')) tests = set(os.path.basename(x) for x in glob.glob(cwd + '/*.py'))
tests = sorted(tests) tests = sorted(tests)
failed = [] failed = []
......
...@@ -209,7 +209,7 @@ def run(command, **kwargs): ...@@ -209,7 +209,7 @@ def run(command, **kwargs):
class TestServer(unittest.TestCase): class TestServer(unittest.TestCase):
cwd = '../examples/' cwd = '../../examples/'
args = [] args = []
before_delay = 3 before_delay = 3
after_delay = 0.5 after_delay = 0.5
......
...@@ -13,16 +13,6 @@ whitelist_externals = ...@@ -13,16 +13,6 @@ whitelist_externals =
* *
commands = commands =
make toxtest make toxtest
# our Makefile assumes it's being run from the directory
# containing setup.py. However, that directory also has the "gevent"
# directory in it. Which means it's on sys.path first. So unless it
# has the correct binary extensions for the python version/impl we're trying
# to run, nothing works. By setting usedevelop=True, then the .so gets built
# in place in that gevent directory, which makes the tox commands run. This
# can step on the toes of various implementations when they share the same name
# for files, but at least it lets some implementations co-exist.
# The real solution is probably to put "gevent" beneath a "src" directory.
usedevelop = True
[testenv:py33] [testenv:py33]
# On OS X, at least, the binary wheel for 1.5.2 is broken # On OS X, at least, the binary wheel for 1.5.2 is broken
......
...@@ -407,7 +407,8 @@ def _run_cython_on_file(configuration, pyx_filename, ...@@ -407,7 +407,8 @@ def _run_cython_on_file(configuration, pyx_filename,
py_banner, banner, py_banner, banner,
output_filename, output_filename,
counter, lines, counter, lines,
cache=None): cache=None,
module_name=None):
value = ''.join(lines) value = ''.join(lines)
sourcehash = md5(value.encode("utf-8")).hexdigest() sourcehash = md5(value.encode("utf-8")).hexdigest()
comment = configuration.format_tag() + " hash:" + str(sourcehash) comment = configuration.format_tag() + " hash:" + str(sourcehash)
...@@ -420,7 +421,7 @@ def _run_cython_on_file(configuration, pyx_filename, ...@@ -420,7 +421,7 @@ def _run_cython_on_file(configuration, pyx_filename,
tempdir = tempfile.mkdtemp() tempdir = tempfile.mkdtemp()
#unique_pyx_filename = pyx_filename #unique_pyx_filename = pyx_filename
#unique_output_filename = output_filename #unique_output_filename = output_filename
unique_pyx_filename = os.path.join(tempdir, pyx_filename) unique_pyx_filename = os.path.join(tempdir, module_name or pyx_filename)
unique_output_filename = os.path.join(tempdir, output_filename) unique_output_filename = os.path.join(tempdir, output_filename)
dirname = os.path.dirname(unique_pyx_filename) # output must be in same dir dirname = os.path.dirname(unique_pyx_filename) # output must be in same dir
...@@ -442,7 +443,8 @@ def _run_cython_on_file(configuration, pyx_filename, ...@@ -442,7 +443,8 @@ def _run_cython_on_file(configuration, pyx_filename,
return configuration.attach_tags(output), configuration, sourcehash return configuration.attach_tags(output), configuration, sourcehash
def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, preprocessed): def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, preprocessed,
module_name=None):
counter = 0 counter = 0
threads = [] threads = []
cache = {} cache = {}
...@@ -452,7 +454,7 @@ def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, prepr ...@@ -452,7 +454,7 @@ def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, prepr
args=(configuration, pyx_filename, args=(configuration, pyx_filename,
py_banner, banner, output_filename, py_banner, banner, output_filename,
counter, lines, counter, lines,
cache))) cache, module_name)))
threads[-1].start() threads[-1].start()
for t in threads: for t in threads:
...@@ -482,7 +484,7 @@ def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, prepr ...@@ -482,7 +484,7 @@ def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, prepr
return ordered_results return ordered_results
def process_filename(filename, output_filename=None): def process_filename(filename, output_filename=None, module_name=None):
"""Process the .ppyx file with preprocessor and compile it with cython. """Process the .ppyx file with preprocessor and compile it with cython.
The algorithm is as following: The algorithm is as following:
...@@ -515,7 +517,7 @@ def process_filename(filename, output_filename=None): ...@@ -515,7 +517,7 @@ def process_filename(filename, output_filename=None):
reference_pyx = preprocessed.pop(None) reference_pyx = preprocessed.pop(None)
sources = _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, sources = _run_cython_on_files(pyx_filename, py_banner, banner, output_filename,
preprocessed) preprocessed, module_name)
log('Generating %s ', output_filename) log('Generating %s ', output_filename)
result = generate_merged(sources) result = generate_merged(sources)
...@@ -914,7 +916,10 @@ def run_cython(filename, sourcehash, output_filename, banner, comment, cache=Non ...@@ -914,7 +916,10 @@ def run_cython(filename, sourcehash, output_filename, banner, comment, cache=Non
result = cache.get(sourcehash) if cache is not None else None result = cache.get(sourcehash) if cache is not None else None
# Use an array for the argument so that filename arguments are properly # Use an array for the argument so that filename arguments are properly
# quoted according to local convention # quoted according to local convention
command = [CYTHON, '-o', output_filename, '-I', 'gevent', filename] command = [CYTHON, '-o', output_filename,
'-I', os.path.join('src', 'gevent', 'libev'),
'-I', os.path.join('src', 'gevent'), # python.pxd, shared with c-ares
filename]
if result is not None: if result is not None:
log('Reusing %s # %s', command, comment) log('Reusing %s # %s', command, comment)
return result return result
...@@ -1023,6 +1028,9 @@ def main(): ...@@ -1023,6 +1028,9 @@ def main():
parser.add_argument('--ignore-cond', action='store_true', help='Ignore conditional directives (only expand definitions)') parser.add_argument('--ignore-cond', action='store_true', help='Ignore conditional directives (only expand definitions)')
parser.add_argument('--write-intermediate', action='store_true', help='Save intermediate files produced by preprocessor and Cython') parser.add_argument('--write-intermediate', action='store_true', help='Save intermediate files produced by preprocessor and Cython')
parser.add_argument('-o', '--output-file', help='Specify name of generated C file') parser.add_argument('-o', '--output-file', help='Specify name of generated C file')
# TODO: Derive the module name automatically from the input filename relative to the base
# dir.
parser.add_argument('--module-name', help="specify name of .pyx module")
parser.add_argument("input") parser.add_argument("input")
options = parser.parse_args() options = parser.parse_args()
filename = options.input filename = options.input
...@@ -1057,7 +1065,7 @@ def main(): ...@@ -1057,7 +1065,7 @@ def main():
sys.stdout.write(preprocess_filename(filename, FakeConfig())) sys.stdout.write(preprocess_filename(filename, FakeConfig()))
if run: if run:
process_filename(filename, options.output_file) process_filename(filename, options.output_file, options.module_name)
if __name__ == '__main__': if __name__ == '__main__':
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment