Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
P
Pyston
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Boxiang Sun
Pyston
Commits
1d0a3da4
Commit
1d0a3da4
authored
Nov 05, 2014
by
Kevin Modzelewski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Simple perf tracking support. Should move these into a separate repo
parent
fc712777
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
122 additions
and
6 deletions
+122
-6
docs/PERF_TRACKING.md
docs/PERF_TRACKING.md
+25
-0
tools/measure_perf.py
tools/measure_perf.py
+40
-6
tools/submit.py
tools/submit.py
+57
-0
No files found.
docs/PERF_TRACKING.md
0 → 100644
View file @
1d0a3da4
We're currently trying out
[
codespeed
](
http://github.com/tobami/codespeed
)
for performance tracking.
These instructions are pretty untested.
## Server setup
```
git clone http://github.com/kmod/codespeed --branch pyston
virtualenv codespeed_env
. codespeed_env/bin/activate
cd codespeed
pip install -r requirements
python manage.py syncdb
# create admin user
python manage.py migrate
cp sample_project/deploy/apache-reverseproxy.conf /etc/apache2/sites-available/010-speed.pyston.conf
ln -s /etc/apache2/sites-available/010-speed.pyston.conf /etc/apache2/sites-enabled
# may need:
# a2enmod proxy_http
# service apache2 restart
service apache2 reload
```
Create an "environment" for each computer that it will be run on. Right now the tools are set up to set the environment to the hostname.
tools/measure_perf.py
View file @
1d0a3da4
#!/usr/bin/env python
#!/usr/bin/env python
import
commands
import
os.path
import
subprocess
import
subprocess
import
time
import
time
def
run_tests
(
executables
,
benchmarks
):
import
submit
def
run_tests
(
executables
,
benchmarks
,
callback
):
times
=
[[]
for
e
in
executables
]
times
=
[[]
for
e
in
executables
]
for
b
in
benchmarks
:
for
b
in
benchmarks
:
for
e
,
time_list
in
zip
(
executables
,
times
):
for
e
,
time_list
in
zip
(
executables
,
times
):
start
=
time
.
time
()
start
=
time
.
time
()
subprocess
.
check_call
(
e
+
[
b
],
stdout
=
open
(
"/dev/null"
,
'w'
))
subprocess
.
check_call
(
e
.
args
+
[
b
],
stdout
=
open
(
"/dev/null"
,
'w'
))
elapsed
=
time
.
time
()
-
start
elapsed
=
time
.
time
()
-
start
print
"%s %s: % 4.1fs"
%
(
" "
.
join
(
e
).
rjust
(
2
5
),
b
.
ljust
(
35
),
elapsed
)
print
"%s %s: % 4.1fs"
%
(
e
.
name
.
rjust
(
1
5
),
b
.
ljust
(
35
),
elapsed
)
time_list
.
append
(
elapsed
)
time_list
.
append
(
elapsed
)
if
callback
:
callback
(
e
,
b
,
elapsed
)
for
e
,
time_list
in
zip
(
executables
,
times
):
for
e
,
time_list
in
zip
(
executables
,
times
):
t
=
1
t
=
1
for
elapsed
in
time_list
:
for
elapsed
in
time_list
:
t
*=
elapsed
t
*=
elapsed
t
**=
(
1.0
/
len
(
time_list
))
t
**=
(
1.0
/
len
(
time_list
))
print
"%s %s: % 4.1fs"
%
(
" "
.
join
(
e
).
rjust
(
2
5
),
"geomean"
.
ljust
(
35
),
t
)
print
"%s %s: % 4.1fs"
%
(
e
.
name
.
rjust
(
1
5
),
"geomean"
.
ljust
(
35
),
t
)
class
Executable
(
object
):
def
__init__
(
self
,
args
,
name
):
self
.
args
=
args
self
.
name
=
name
def
main
():
def
main
():
executables
=
[[
"./pyston_release"
,
"-q"
]]
executables
=
[
Executable
([
"./pyston_release"
,
"-q"
],
"pyston"
)]
RUN_CPYTHON
=
0
if
RUN_CPYTHON
:
executables
.
append
(
Executable
([
"python"
],
"cpython 2.7"
))
DO_SUBMIT
=
1
# if RUN_PYPY:
# executables.append(Executable(["python"], "cpython 2.7"))
benchmarks
=
[]
benchmarks
=
[]
...
@@ -44,7 +62,23 @@ def main():
...
@@ -44,7 +62,23 @@ def main():
"spectral_norm.py"
,
"spectral_norm.py"
,
]]
]]
run_tests
(
executables
,
benchmarks
)
GIT_REV
=
commands
.
getoutput
(
"git rev-parse HEAD"
)
def
submit_callback
(
exe
,
benchmark
,
elapsed
):
benchmark
=
os
.
path
.
basename
(
benchmark
)
assert
benchmark
.
endswith
(
".py"
)
benchmark
=
benchmark
[:
-
3
]
commitid
=
GIT_REV
if
"cpython"
in
exe
.
name
:
commitid
=
"default"
submit
.
submit
(
commitid
=
commitid
,
benchmark
=
benchmark
,
executable
=
exe
.
name
,
value
=
elapsed
)
callback
=
None
if
DO_SUBMIT
:
callback
=
submit_callback
run_tests
(
executables
,
benchmarks
,
callback
)
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
main
()
main
()
tools/submit.py
0 → 100644
View file @
1d0a3da4
# Submission library based on the codespeed example:
from
datetime
import
datetime
import
socket
import
urllib
import
urllib2
# You need to enter the real URL and have the server running
CODESPEED_URL
=
'http://speed.pyston.org/'
def
_formdata
(
commitid
,
benchmark
,
executable
,
value
):
hostname
=
socket
.
gethostname
()
if
"cpython"
in
executable
.
lower
():
project
=
"CPython"
else
:
project
=
"Pyston"
# Mandatory fields
data
=
{
'commitid'
:
commitid
,
'branch'
:
'default'
,
# Always use default for trunk/master/tip
'project'
:
project
,
'executable'
:
executable
,
'benchmark'
:
benchmark
,
'environment'
:
hostname
,
'result_value'
:
value
,
}
"""
# Optional fields
current_date = datetime.today()
data.update({
'revision_date': current_date, # Optional. Default is taken either
# from VCS integration or from current date
'result_date': current_date, # Optional, default is current date
})
"""
return
data
def
submit
(
commitid
,
benchmark
,
executable
,
value
):
data
=
_formdata
(
commitid
,
benchmark
,
executable
,
value
)
params
=
urllib
.
urlencode
(
data
)
response
=
"None"
print
"Saving result for executable %s, revision %s, benchmark %s"
%
(
data
[
'executable'
],
data
[
'commitid'
],
data
[
'benchmark'
])
try
:
f
=
urllib2
.
urlopen
(
CODESPEED_URL
+
'result/add/'
,
params
)
except
urllib2
.
HTTPError
as
e
:
print
str
(
e
)
print
e
.
read
()
raise
response
=
f
.
read
()
f
.
close
()
print
"Server (%s) response: %s
\
n
"
%
(
CODESPEED_URL
,
response
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment