Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
P
Pyston
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Boxiang Sun
Pyston
Commits
93df6ef7
Commit
93df6ef7
authored
Nov 10, 2016
by
Kevin Modzelewski
Committed by
GitHub
Nov 10, 2016
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #1395 from kmod/log_hash
Add "log hashing" to see new failures
parents
236a402e
eb3edfd6
Changes
24
Hide whitespace changes
Inline
Side-by-side
Showing
24 changed files
with
263 additions
and
45 deletions
+263
-45
src/asm_writing/disassemble.h
src/asm_writing/disassemble.h
+2
-0
test/extra/M2Crypto_test.py
test/extra/M2Crypto_test.py
+9
-4
test/extra/avro_test.py
test/extra/avro_test.py
+7
-2
test/extra/babel_test.py
test/extra/babel_test.py
+6
-1
test/extra/cffi_1.7_test.py
test/extra/cffi_1.7_test.py
+6
-1
test/extra/cffi_test.py
test/extra/cffi_test.py
+11
-1
test/extra/cheetah_test.py
test/extra/cheetah_test.py
+6
-1
test/extra/formencode_test.py
test/extra/formencode_test.py
+6
-1
test/extra/geoip_test.py
test/extra/geoip_test.py
+9
-1
test/extra/lxml_test.py
test/extra/lxml_test.py
+8
-3
test/extra/mysqldb_test.py
test/extra/mysqldb_test.py
+9
-4
test/extra/numpy_fulltest.py
test/extra/numpy_fulltest.py
+6
-1
test/extra/paste_test.py
test/extra/paste_test.py
+6
-3
test/extra/protobuf_test.py
test/extra/protobuf_test.py
+6
-1
test/extra/pyicu_test.py
test/extra/pyicu_test.py
+10
-5
test/extra/pylons_test.py
test/extra/pylons_test.py
+8
-3
test/extra/pyopenssl_test.py
test/extra/pyopenssl_test.py
+6
-1
test/extra/routes_test.py
test/extra/routes_test.py
+6
-1
test/extra/sass_test.py
test/extra/sass_test.py
+6
-2
test/extra/simplejson_test.py
test/extra/simplejson_test.py
+10
-4
test/extra/unidecode_test.py
test/extra/unidecode_test.py
+7
-2
test/integration/pycrypto_test.py
test/integration/pycrypto_test.py
+7
-1
test/lib/test_helper.py
test/lib/test_helper.py
+104
-1
tools/tester.py
tools/tester.py
+2
-1
No files found.
src/asm_writing/disassemble.h
View file @
93df6ef7
...
...
@@ -15,7 +15,9 @@
#ifndef PYSTON_ASMWRITING_DISASSEMBLE_H
#define PYSTON_ASMWRITING_DISASSEMBLE_H
#include <string>
#include <unordered_map>
#include <vector>
#include "llvm/ADT/Twine.h"
#include "llvm/Support/raw_ostream.h"
...
...
test/extra/M2Crypto_test.py
View file @
93df6ef7
...
...
@@ -26,7 +26,7 @@ def install_and_test_lxml():
"skip_sslv2_tests.patch"
,
"fix_testsuite_ftbfs.patch"
,
"fix_testsuite_tls1.2.patch"
,
"fix_testsuite_sha256.patch"
)
"fix_testsuite_sha256.patch"
)
for
patch
in
debian_patches
:
PATCH_FILE
=
os
.
path
.
abspath
(
os
.
path
.
join
(
M2CRYPTO_DIR
,
"debian"
,
"patches"
,
patch
))
...
...
@@ -39,11 +39,16 @@ def install_and_test_lxml():
# M2Crypto can't find the opensslconf without this
env
[
"DEB_HOST_MULTIARCH"
]
=
"/usr/include/x86_64-linux-gnu"
# SWIG does not work with pyston if this define is not set
env
[
"CFLAGS"
]
=
"-DSWIG_PYTHON_SLOW_GETSET_THIS"
env
[
"CFLAGS"
]
=
"-DSWIG_PYTHON_SLOW_GETSET_THIS"
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"install"
],
cwd
=
M2CRYPTO_DIR
,
env
=
env
)
expected
=
[{
'ran'
:
235
,
'errors'
:
5
,
'skipped'
:
2
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
M2CRYPTO_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
rAAABwigI04NBogROC1ATTYBiAUIAMhCMKBLQAC1SMALgBCRQIIAgKOpBBGgIaMKAAUAkVgCUJAA
ABCMAIwJAQAAwONQATYSBmEYSACDAEUoRABkJKEAhBBEMgYMwQoFABBwCETByQggaAkAqAgUgAAG
QBWgAamiIaSEIIIiGjE=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
M2CRYPTO_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
None
,
force_create
=
True
)
install_and_test_lxml
()
test/extra/avro_test.py
View file @
93df6ef7
...
...
@@ -8,7 +8,7 @@ PYTHON_EXE = os.path.abspath(os.path.join(ENV_NAME, "bin", "python"))
PYTEST_EXE
=
os
.
path
.
abspath
(
os
.
path
.
join
(
ENV_NAME
,
"bin"
,
"py.test"
))
AVRO_DIR
=
os
.
path
.
abspath
(
os
.
path
.
join
(
ENV_NAME
,
"avro-1.7.7"
))
packages
=
[
"pytest==2.8.7"
,
"py==1.4.29"
,
"avro==1.7.7"
]
packages
=
[
"pytest==2.8.7"
,
"py==1.4.29"
,
"avro==1.7.7"
]
create_virtenv
(
ENV_NAME
,
packages
,
force_create
=
True
)
url
=
"https://pypi.python.org/packages/source/a/avro/avro-1.7.7.tar.gz"
...
...
@@ -20,4 +20,9 @@ env["PYTHONPATH"] = os.path.abspath(os.path.join(ENV_NAME, "lib/python2.7/site-p
# cpython has the same number of failures
expected
=
[{
'failed'
:
2
,
'passed'
:
47
}]
run_test
([
PYTEST_EXE
],
env
=
env
,
cwd
=
AVRO_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gBAAAACAAAABBAgAAAACAAgAIAAABAQAAAAAAACAAAgDAIAAABAAIAMBAQgACBAAFBAQAACBAAAA
EAEAAAAQAABAAAAAAIAAAAAAAAQAAAgICCgAEBAAAAAQAAAAAACAAAAAAAEAgAAAAAIAAAAAgBAA
AZQAAAAAAAAAIAAEAAA=
'''
run_test
([
PYTEST_EXE
],
env
=
env
,
cwd
=
AVRO_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/babel_test.py
View file @
93df6ef7
...
...
@@ -20,4 +20,9 @@ subprocess.check_call([PYTHON_EXE, "setup.py", "build"], cwd=BABEL_DIR)
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"install"
],
cwd
=
BABEL_DIR
)
expected
=
[{
"ran"
:
227
,
"failures"
:
3
,
"errors"
:
3
}]
run_test
([
NOSETESTS_EXE
],
cwd
=
BABEL_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gAIAAAAACQAAAABAAAAABAAAAIAAEAAAAAAAAAAAAEAEBAAAAAAAkAAAAAAAAAAAQAAEgAAAAAAA
AAAAAAAAAQAACAgAAAAAAAAAIAAJAAAAAAAAAAAAAAAAAAAAAEAAAAAAAhAAAAAAAAAAEACAAAAA
EIgAAAAQAAAAAIAAAAA=
'''
run_test
([
NOSETESTS_EXE
],
cwd
=
BABEL_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/cffi_1.7_test.py
View file @
93df6ef7
...
...
@@ -30,7 +30,12 @@ def install_and_test_cffi():
# dir_to_test = "."
# I just picked a subdirectory; I don't really know what it's testing.
dir_to_test
=
os
.
path
.
join
(
CFFI_DIR
,
"testing"
,
"cffi1"
)
run_test
([
PYTEST_EXE
,
dir_to_test
],
cwd
=
CFFI_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gBEACAAQEAIAwICAAAAAAABAAAAAAAACAAAAEAAAEAAAAEQEQAAAAKAAARFEEACKAAABAACAAAAA
QgAAAEAQBAACgAEAAABAAAAAAAFAAAoAAAAAAACAAAACAAIAAUiAIAAAAAODgAAgEIQBABAACgAC
GBACAAAAICEAABAAgQA=
'''
run_test
([
PYTEST_EXE
,
dir_to_test
],
cwd
=
CFFI_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
[
"pytest==2.8.7"
,
"py==1.4.31"
,
"pycparser==2.14"
],
force_create
=
True
)
install_and_test_cffi
()
test/extra/cffi_test.py
View file @
93df6ef7
...
...
@@ -26,9 +26,19 @@ def install_and_test_cffi():
# looks like clang 3.5 causes more errors like: 214 != -42 doing casts
if
os
.
environ
.
has_key
(
"CC"
)
and
"clang"
in
os
.
environ
[
"CC"
]:
expected
=
[{
"failed"
:
20
,
"passed"
:
1659
,
"skipped"
:
73
,
"xfailed"
:
4
}]
expected_log_hash
=
'''
oRkAgDIgEgAAwoKiAIQAIABAQAAAAAIKBOAIUABAEAAAIMQFgQCKhKEgERFEMAgAAAIBAAiCCBAC
CAIASESQBAQDpAEAAAogAAMBAoVQqkCKABBAAIDgAKECABJAAQiEIAAgAgOigAIwcoQBIAAACoAG
2FIHAAQAJIELIVABgwA=
'''
else
:
expected
=
[{
"failed"
:
11
,
"passed"
:
1668
,
"skipped"
:
73
,
"xfailed"
:
4
}]
run_test
([
PYTEST_EXE
],
cwd
=
CFFI_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
oRkAwBAg0gAEwoCiQIQgIQBAQAABQEKKBGAZVAhKcAAAAMQFAQAogKggFRFGEIgAAAKABgiGCBCC
CAIASEAQHAQSpAEADEugCJEBAoFgIECDBBBEAACgACECAAJKgQicIAAgAAOChBIyUoQBIAAACoAG
2FInAAQQpIEHARAJowE=
'''
run_test
([
PYTEST_EXE
],
cwd
=
CFFI_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
[
"pytest==2.8.7"
,
"py==1.4.31"
,
"pycparser==2.14"
],
force_create
=
True
)
install_and_test_cffi
()
test/extra/cheetah_test.py
View file @
93df6ef7
...
...
@@ -10,4 +10,9 @@ cheetah_exe = os.path.join(ENV_NAME, "bin", "cheetah")
env
=
os
.
environ
env
[
"PATH"
]
=
os
.
path
.
join
(
ENV_NAME
,
"bin"
)
expected
=
[{
'ran'
:
2138
,
'errors'
:
4
},
{
'ran'
:
2138
,
'errors'
:
232
,
'failures'
:
2
}]
run_test
([
cheetah_exe
,
"test"
],
cwd
=
ENV_NAME
,
expected
=
expected
,
env
=
env
)
expected_log_hash
=
'''
jcoDAKUIQTpEDIDiMwAuQFEAKABjEbNAAAACgqABAAGgGsGQaQQLg/l0gIQXbEA4IKQisBIAAlOQ
IG4lA5AAASAqqGdMCPAAALKbAEQAYAcCEgRHAQCAAhAVJIghShwAUpAAKaEwgk0GaEUkgQIIADgb
pKTQYrIACAshhJ6Bwh0=
'''
run_test
([
cheetah_exe
,
"test"
],
cwd
=
ENV_NAME
,
expected
=
expected
,
env
=
env
,
expected_log_hash
=
expected_log_hash
)
test/extra/formencode_test.py
View file @
93df6ef7
...
...
@@ -16,4 +16,9 @@ create_virtenv(ENV_NAME, packages, force_create = True)
subprocess
.
check_call
([
"patch"
,
"-p1"
],
stdin
=
open
(
os
.
path
.
join
(
os
.
path
.
dirname
(
__file__
),
"formencode.patch"
)),
cwd
=
SRC_DIR
)
expected
=
[{
'ran'
:
201
}]
run_test
([
NOSETESTS_EXE
],
cwd
=
FORMENCODE_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAgAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAgAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAA=
'''
run_test
([
NOSETESTS_EXE
],
cwd
=
FORMENCODE_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/geoip_test.py
View file @
93df6ef7
...
...
@@ -11,4 +11,12 @@ pkg = ["nose==1.3.7", "-e", "git+http://github.com/maxmind/geoip-api-python.git@
create_virtenv
(
ENV_NAME
,
pkg
,
force_create
=
True
)
GEOIP_DIR
=
os
.
path
.
abspath
(
os
.
path
.
join
(
SRC_DIR
,
"geoip"
))
expected
=
[{
'ran'
:
10
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
GEOIP_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
ggAAAAAAQAQAAAAACAAAAAAAAAAAAAIABIAAAAAAgAACAAAAAAAIAAAAAAAAAAIIBAAAgABABAgA
AAAAAAAAAAAAAAAAAAAAAAQAAIgAAAAAAAAAAQBAABAAEAEAAAAAAAAAAAgAAAAIAIAAAAEAAAAA
AIAAAAgAAAAAAAAAAAA=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
GEOIP_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/lxml_test.py
View file @
93df6ef7
...
...
@@ -32,9 +32,14 @@ def install_and_test_lxml():
print
"Applied lxml patch"
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"build_ext"
,
"-i"
,
"--with-cython"
],
cwd
=
LXML_DIR
)
expected
=
[{
'ran'
:
1381
}]
run_test
([
PYTHON_EXE
,
"test.py"
],
cwd
=
LXML_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gAAAAAAAAQAAAAAIAAAAAAAAAAAAgAAAAAAAAABAAACCAEgAAAAAgAIAAAAAAACAAAAAoAAAAAAA
ABAAAAAAAAAAAAAigAAAAAAAAAAQAAAwAgAAAAAAAAAAAAAAAAIAAAAEAAAACAAAAAAAAABBAAAA
AAAAAAAAAAAAAAAAAAA=
'''
run_test
([
PYTHON_EXE
,
"test.py"
],
cwd
=
LXML_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
None
,
force_create
=
True
)
install_and_test_lxml
()
test/extra/mysqldb_test.py
View file @
93df6ef7
...
...
@@ -16,8 +16,8 @@ def install_and_test_mysqldb():
subprocess
.
check_call
([
"git"
,
"clone"
,
"https://github.com/farcepest/MySQLdb1.git"
],
cwd
=
SRC_DIR
)
MYSQLDB_DIR
=
os
.
path
.
abspath
(
os
.
path
.
join
(
SRC_DIR
,
"MySQLdb1"
))
subprocess
.
check_call
([
"git"
,
"checkout"
,
"MySQLdb-1.2.5"
],
cwd
=
MYSQLDB_DIR
)
subprocess
.
check_call
([
"git"
,
"checkout"
,
"MySQLdb-1.2.5"
],
cwd
=
MYSQLDB_DIR
)
nosetests_exe
=
os
.
path
.
abspath
(
ENV_NAME
+
"/bin/nosetests"
)
#apply patch
...
...
@@ -29,9 +29,14 @@ def install_and_test_mysqldb():
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"install"
],
cwd
=
MYSQLDB_DIR
)
env
=
os
.
environ
env
[
"TESTDB"
]
=
"travis.cnf"
env
[
"TESTDB"
]
=
"travis.cnf"
expected
=
[{
"ran"
:
69
}]
run_test
([
nosetests_exe
],
cwd
=
MYSQLDB_DIR
,
expected
=
expected
,
env
=
env
)
expected_log_hash
=
'''
gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAA
AAAAAAAEAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAQAAAAAAA=
'''
run_test
([
nosetests_exe
],
cwd
=
MYSQLDB_DIR
,
expected
=
expected
,
env
=
env
,
expected_log_hash
=
expected_log_hash
)
packages
=
[
"nose==1.3.7"
]
create_virtenv
(
ENV_NAME
,
packages
,
force_create
=
True
)
...
...
test/extra/numpy_fulltest.py
View file @
93df6ef7
...
...
@@ -77,8 +77,13 @@ except:
raise
expected_log_hash
=
'''
gAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAQAAAAAABAAACAAAAAAAAAAAAA
AAAgAAAAAAAAAAAAAAA=
'''
test_helper
.
run_test
([
'sh'
,
'-c'
,
'. %s/bin/activate && python %s/numpy/tools/test-installed-numpy.py'
%
(
ENV_DIR
,
ENV_DIR
)],
ENV_NAME
,
[
dict
(
ran
=
6139
,
failures
=
1
)])
ENV_NAME
,
[
dict
(
ran
=
6139
,
failures
=
1
)]
,
expected_log_hash
=
expected_log_hash
)
print
print
"PASSED"
test/extra/paste_test.py
View file @
93df6ef7
...
...
@@ -37,7 +37,10 @@ print ">> "
# - no shiftjis encoding
# - slightly different error messages
expected
=
[{
"failed"
:
22
,
"passed"
:
112
}]
run_test
([
PYTEST_EXE
],
cwd
=
PASTE_TEST_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
ghA0YC2ZyyvAxlQASAgCMAglIjZ2pwSCB8wuCIgiAYGKAITIQgIQLRgRYwA0e1BchxcHGsShlUij
OBXOA0E4AQIkzEKIOCikJYUgRF5hh8YJVAioAI6FDJB8waBqoGC7hEIDCdEZkACMA4IDaLABMFSi
n7AogIFoJ4iCeUEEggM=
'''
run_test
([
PYTEST_EXE
],
cwd
=
PASTE_TEST_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/protobuf_test.py
View file @
93df6ef7
...
...
@@ -28,7 +28,12 @@ def install_and_test_protobuf():
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"build"
],
cwd
=
PROTOBUF_PY_DIR
,
env
=
env
)
expected
=
[{
"ran"
:
216
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
PROTOBUF_PY_DIR
,
expected
=
expected
,
env
=
env
)
expected_log_hash
=
'''
gAAQSBxQEAxCwBwkAAREFCAUCQAAiAsIBggpNIQAAIBBBAAEAAQQAAADDEgABFI9QpcAlQAAgwEi
HEAJAESKkAKBGAAlpAAIAMggcAgAQQsQMwCkEgAisDKIAhEhABCMEE4CBAAEQQQAgIAIiIAEJBIy
gUBSkjAAIAUAQA8EIAI=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
PROTOBUF_PY_DIR
,
expected
=
expected
,
env
=
env
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
None
,
force_create
=
True
)
install_and_test_protobuf
()
test/extra/pyicu_test.py
View file @
93df6ef7
...
...
@@ -15,12 +15,12 @@ def install_and_test_pyicu():
subprocess
.
check_call
([
"wget"
,
url
],
cwd
=
SRC_DIR
)
subprocess
.
check_call
([
"tar"
,
"-zxf"
,
"icu4c-4_2_1-src.tgz"
],
cwd
=
SRC_DIR
)
ICU_DIR
=
os
.
path
.
abspath
(
os
.
path
.
join
(
SRC_DIR
,
"icu"
,
"source"
))
INSTALL_DIR
=
os
.
path
.
join
(
SRC_DIR
,
"icu_install"
)
subprocess
.
check_call
([
"./runConfigureICU"
,
"Linux"
,
"--prefix="
+
INSTALL_DIR
],
cwd
=
ICU_DIR
)
subprocess
.
check_call
([
"make"
,
"-j4"
],
cwd
=
ICU_DIR
)
subprocess
.
check_call
([
"make"
,
"install"
],
cwd
=
ICU_DIR
)
url
=
"https://pypi.python.org/packages/source/P/PyICU/PyICU-1.0.1.tar.gz"
subprocess
.
check_call
([
"wget"
,
url
],
cwd
=
SRC_DIR
)
subprocess
.
check_call
([
"tar"
,
"-zxf"
,
"PyICU-1.0.1.tar.gz"
],
cwd
=
SRC_DIR
)
...
...
@@ -37,9 +37,14 @@ def install_and_test_pyicu():
env
[
"LD_LIBRARY_PATH"
]
=
LIB_DIR
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"build"
],
cwd
=
PYICU_DIR
,
env
=
env
)
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"install"
],
cwd
=
PYICU_DIR
,
env
=
env
)
expected
=
[{
'ran'
:
17
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
PYICU_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gAAAAQAAABQAACBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARAAAAAAAAAAAAAAAIAIAAgAAAAAAA
AAAAAAgAAAACAAAAAAAAAAIAAAiAAAgAAQQAAAAAABAAAIEBAAAAAAAAACAAAAAAAAAAIIAIAAAA
AAAAAAAAAAAAAAACgAA=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
PYICU_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
None
,
force_create
=
True
)
install_and_test_pyicu
()
test/extra/pylons_test.py
View file @
93df6ef7
...
...
@@ -18,12 +18,17 @@ def install_and_test_pylons():
PYLONS_DIR
=
os
.
path
.
abspath
(
os
.
path
.
join
(
SRC_DIR
,
"Pylons-0.9.6.2"
))
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"install"
],
cwd
=
PYLONS_DIR
)
# most of the errors are because of our coerceUnicodeToStr which raises a TypeError instead of a UnicodeError
# but as we don't support the unicode string correctly every where I don't want to change this currently.
expected
=
[{
"ran"
:
50
,
"errors"
:
7
}]
run_test
([
NOSE_EXE
],
cwd
=
PYLONS_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
wLKBAAEAEQAABEAgAAUAYBABtBACiIFIAoAIIAiAYAIUBADgCOIAggAIBACQCAgIgAGBgCAsAIAB
FCIAQAAQAQQAmQoAAACEMQAiAaIAFIgAEEAAAUgAAGAIQAEAAEBQQABQAEAAAAAAAiEiIEAAAEIC
ECBAiigwIAAABAQIAQE=
'''
run_test
([
NOSE_EXE
],
cwd
=
PYLONS_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
pkg
=
[
"Mako==1.0.3"
,
"decorator==4.0.9"
,
"simplejson==3.8.2"
,
...
...
test/extra/pyopenssl_test.py
View file @
93df6ef7
...
...
@@ -25,4 +25,9 @@ subprocess.check_call(["sed", "-i", 's/\\(def test_export_text.*\\)/\\1\\n
print
os
.
path
.
join
(
PYOPENSSL_DIR
,
"test"
,
"test_crypto.py"
)
expected
=
[{
'ran'
:
438
}]
run_test
([
NOSETESTS_EXE
],
cwd
=
PYOPENSSL_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gAAAAAAAAAAAAAAgAgAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAIAAAAAAgAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAEAAAAAAAA
AAAAACAAAAAAgAAAAAA=
'''
run_test
([
NOSETESTS_EXE
],
cwd
=
PYOPENSSL_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/routes_test.py
View file @
93df6ef7
...
...
@@ -13,4 +13,9 @@ create_virtenv(ENV_NAME, pkg, force_create = True)
ROUTES_DIR
=
os
.
path
.
abspath
(
os
.
path
.
join
(
SRC_DIR
,
"routes"
))
expected
=
[{
"ran"
:
141
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
ROUTES_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
ggSACQMCAQZABAAUIIMCEABCAAAgQAACDAUABAKgAAgEAAECIAAgIAgBABQICCDoIkNQgQAAIQQE
xACSGAIAoIAAAgAQAQEIAAAQAFhWACgBBHAEYAAgIBQAUGAAAAIABCAEQEgAAAFAACAAAo5EgBAA
AAAQCSBIURAAwDCAkQA=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
ROUTES_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/sass_test.py
View file @
93df6ef7
...
...
@@ -15,5 +15,9 @@ packages += ["-e", "git+https://github.com/dahlia/libsass-python@0.8.3#egg=libsa
create_virtenv
(
ENV_NAME
,
packages
,
force_create
=
True
)
expected
=
[{
'ran'
:
75
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
SASS_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
wEAIQAAAAAQEhBAAAAgQIF0QAAAQQAAAQigEAAwwABACAARUEBEAACTAAAQAEAAIAABJgQAEACAC
AEAAUQQACBAAAAEBBABAARAAAQgCAHAWBQQAAABkaDABQAAYYAAEJtgCAIAgcoASgAwgUAAIRAQR
QAQgIAQACCAgFEDEiFA=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
SASS_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
test/extra/simplejson_test.py
View file @
93df6ef7
...
...
@@ -10,7 +10,7 @@ PYTHON_EXE = os.path.abspath(os.path.join(ENV_NAME, "bin", "python"))
def
install_and_test_simplejson
():
shutil
.
rmtree
(
SRC_DIR
,
ignore_errors
=
True
)
os
.
makedirs
(
SRC_DIR
)
url
=
"https://pypi.python.org/packages/source/s/simplejson/simplejson-2.6.2.tar.gz"
subprocess
.
check_call
([
"wget"
,
url
],
cwd
=
SRC_DIR
)
subprocess
.
check_call
([
"tar"
,
"-zxf"
,
"simplejson-2.6.2.tar.gz"
],
cwd
=
SRC_DIR
)
...
...
@@ -18,9 +18,15 @@ def install_and_test_simplejson():
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"build"
],
cwd
=
SIMPLEJSON_DIR
)
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"install"
],
cwd
=
SIMPLEJSON_DIR
)
expected
=
[{
'ran'
:
170
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
SIMPLEJSON_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAA
AAAAAAAAAAAAAAAAAEAAAAQAAAgAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAIAAAAAAAAAAAAAAAA=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
SIMPLEJSON_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
None
,
force_create
=
True
)
install_and_test_simplejson
()
test/extra/unidecode_test.py
View file @
93df6ef7
...
...
@@ -20,7 +20,12 @@ def install_and_test_unidecode():
subprocess
.
check_call
([
PYTHON_EXE
,
"setup.py"
,
"install"
],
cwd
=
UNIDECODE_DIR
)
expected
=
[{
'ran'
:
8
}]
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
UNIDECODE_DIR
,
expected
=
expected
)
expected_log_hash
=
'''
gECAAAAAAAAAAABAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAA
AAAAGAAAAAAAAAAAAAAAAAQAAggAAAAAAAAAAAAAABAAAAIAAAAAAAAAAAAAAAggAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAA=
'''
run_test
([
PYTHON_EXE
,
"setup.py"
,
"test"
],
cwd
=
UNIDECODE_DIR
,
expected
=
expected
,
expected_log_hash
=
expected_log_hash
)
create_virtenv
(
ENV_NAME
,
None
,
force_create
=
True
)
install_and_test_unidecode
()
test/integration/pycrypto_test.py
View file @
93df6ef7
...
...
@@ -59,7 +59,13 @@ assert enc_data != test_string
assert
key
.
decrypt
(
enc_data
)
==
test_string
expected
=
[{
'ran'
:
1891
}]
test_helper
.
run_test
([
sys
.
executable
,
"setup.py"
,
"test"
],
pycrypto_dir
,
expected
)
expected_log_hash
=
'''
gAAAAAAAAAAAAABAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAIAAIAgAAAAAAA
BAABAABAAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAA=
'''
test_helper
.
run_test
([
sys
.
executable
,
"setup.py"
,
"test"
],
pycrypto_dir
,
expected
,
expected_log_hash
=
expected_log_hash
)
print
"-- Tests finished"
...
...
test/lib/test_helper.py
View file @
93df6ef7
...
...
@@ -57,7 +57,8 @@ def parse_output(output):
result[-1][res_type] = int(m.group(1))
return result
def run_test(cmd, cwd, expected, env = None):
def run_test(cmd, cwd, expected, expected_log_hash="", env=None):
assert isinstance(expected_log_hash, str)
print "
Running
", cmd
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd, env=env)
output, unused_err = process.communicate()
...
...
@@ -68,12 +69,114 @@ def run_test(cmd, cwd, expected, env = None):
print "
Return
code
:
", errcode
assert errcode in (0, 1), "
\
n
\
n
%
s
\
nTest
process
crashed
" % output
expected_log_hash = expected_log_hash.strip()
this_log_hash = log_hash(output)
if expected_log_hash == "":
raise Exception("
please
set
the
expected
log
hash
:
\
nexpected_log_hash
=
'''
\
n
%s
\
n
'''" % (this_log_hash,))
if expected == result:
print "
Received
expected
output
"
different = check_hash(output, expected_log_hash)
# These checks are useful for making sure that we have the right expected
# hashes in our test files, but I don't think it's worth failing the build for them:
# assert not different, "
expected_log_hash
=
'''
\
n
%s
\
n
'''" % (this_log_hash,)
# assert this_log_hash == expected_log_hash, "
expected_log_hash
=
'''
\
n
%s
\
n
'''" % (this_log_hash,)
else:
print >> sys.stderr, '
\
n
'.join(output.split('
\
n
')[-500:])
print >> sys.stderr, '
\
n
'
different = check_hash(output, expected_log_hash)
print >> sys.stderr, '
\
n
'
print >> sys.stderr, "
WRONG
output
"
print >> sys.stderr, "
is
:
", result
print >> sys.stderr, "
expected
:
", expected
if not different:
print >> sys.stderr, "
(
log
hash
can
't detect missing lines)"
if this_log_hash != expected_log_hash:
print >> sys.stderr, "expected_log_hash = '''
\
n
%s
\
n
'''" % (this_log_hash,)
assert result == expected
# Try to canonicalize the log to remove most spurious differences.
# We won'
t
be
able
to
get
100
%
of
them
,
since
there
will
always
be
differences
in
the
number
of
# python warnings or compiler messages.
# But try to remove the most egregious things (filename differences, timing differences) so that the output is easier to parse.
def
process_log
(
log
):
r
=
[]
for
l
in
log
.
split
(
'
\
n
'
):
# Remove timing data:
l
=
re
.
sub
(
"tests in ([
\
\
d
\
\
.])+s"
,
""
,
l
)
l
=
re
.
sub
(
"in ([
\
\
d
\
\
.])+ seconds"
,
""
,
l
)
# Remove filenames:
# log = re.sub("/[^ ]*.py:\\d", "", log)
# log = re.sub("/[^ ]*.py.*line \\d", "", log)
if
"http://"
not
in
l
:
l
=
re
.
sub
(
"(^|[
\
"
\
'
/])/[^ :
\
"
\
'
]*($|[
\
"
:
\
'
])"
,
""
,
l
)
# Remove pointer ids:
l
=
re
.
sub
(
'0x([0-9a-f]{8,})'
,
""
,
l
)
r
.
append
(
l
)
return
r
def
log_hash
(
log
,
nbits
=
1024
):
log_lines
=
process_log
(
log
)
bits
=
[
0
]
*
nbits
for
l
in
log_lines
:
bits
[
hash
(
l
)
%
nbits
]
=
1
assert
sum
(
bits
)
<
nbits
*
.
67
,
"hash is very full!"
l
=
[]
for
i
in
xrange
(
0
,
nbits
,
8
):
t
=
0
for
j
in
xrange
(
8
):
if
bits
[
i
+
j
]:
t
+=
1
<<
(
7
-
j
)
l
.
append
(
chr
(
t
))
return
''
.
join
(
l
).
encode
(
'base64'
).
strip
()
def
check_hash
(
log
,
expected_hash
):
orig_log_lines
=
log
.
split
(
'
\
n
'
)
log_lines
=
process_log
(
log
)
s
=
expected_hash
.
decode
(
'base64'
)
nbits
=
len
(
s
)
*
8
bits
=
[
0
]
*
nbits
for
i
in
xrange
(
len
(
s
)):
c
=
ord
(
s
[
i
])
for
j
in
xrange
(
8
):
bit
=
(
c
>>
(
7
-
j
))
&
1
if
bit
:
bits
[
i
*
8
+
j
]
=
True
missing
=
[
False
]
*
len
(
log_lines
)
for
i
,
l
in
enumerate
(
log_lines
):
if
not
bits
[
hash
(
l
)
%
nbits
]:
missing
[
i
]
=
True
ncontext
=
2
def
ismissing
(
idx
,
within
):
for
i
in
xrange
(
max
(
0
,
idx
-
within
),
min
(
len
(
log_lines
),
idx
+
within
+
1
)):
if
missing
[
i
]:
return
True
return
False
different
=
False
for
i
in
xrange
(
len
(
log_lines
)):
if
ismissing
(
i
,
0
):
different
=
True
if
orig_log_lines
[
i
]
!=
log_lines
[
i
]:
print
>>
sys
.
stderr
,
"
\
033
[30m+ % 4d: %s
\
033
[0m"
%
(
i
+
1
,
orig_log_lines
[
i
])
print
>>
sys
.
stderr
,
"+ % 4d: %s"
%
(
i
+
1
,
log_lines
[
i
])
else
:
print
>>
sys
.
stderr
,
"+ % 4d: %s"
%
(
i
+
1
,
orig_log_lines
[
i
])
elif
ismissing
(
i
,
ncontext
):
print
>>
sys
.
stderr
,
" % 4d: %s"
%
(
i
+
1
,
orig_log_lines
[
i
])
assert
different
==
any
(
missing
)
return
any
(
missing
)
tools/tester.py
View file @
93df6ef7
...
...
@@ -535,7 +535,8 @@ def main(orig_dir):
TEST_DIR = os.path.join(orig_dir, opts.test_dir)
EXTMODULE_DIR_PYSTON = os.path.abspath(os.path.dirname(os.path.realpath(IMAGE)) + "/test/test_extension/")
EXTMODULE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(IMAGE)) + "/test/test_extension/build/lib.linux-x86_64-2.7/")
# EXTMODULE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(IMAGE)) + "/test/test_extension/build/lib.linux-x86_64-2.7/")
EXTMODULE_DIR = os.path.abspath(orig_dir) + "/test/test_extension/build/lib.linux-x86_64-2.7/"
patterns = opts.pattern
IS_OPTIMIZED = int(subprocess.check_output([IMAGE, "-c", 'import sysconfig; print int("-O0" not in sysconfig.get_config_var(
\
"
CFLAGS
\
"
))']))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment