Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
erp5
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Eteri
erp5
Commits
31804f68
Commit
31804f68
authored
Sep 13, 2018
by
Bryton Lacquement
🚪
Committed by
Julien Muchembled
Jul 31, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
erp5.util: add support for Python 3
/reviewed-on
nexedi/erp5!830
parent
5abb074d
Changes
23
Hide whitespace changes
Inline
Side-by-side
Showing
23 changed files
with
307 additions
and
259 deletions
+307
-259
erp5/tests/testERP5TestNode.py
erp5/tests/testERP5TestNode.py
+63
-64
erp5/util/benchmark/performance_tester.py
erp5/util/benchmark/performance_tester.py
+3
-2
erp5/util/benchmark/report.py
erp5/util/benchmark/report.py
+4
-5
erp5/util/benchmark/scalability_tester.py
erp5/util/benchmark/scalability_tester.py
+2
-1
erp5/util/scalability/requestUrl.py
erp5/util/scalability/requestUrl.py
+2
-2
erp5/util/scalability/runScalabilityTestSuite.py
erp5/util/scalability/runScalabilityTestSuite.py
+11
-22
erp5/util/taskdistribution/__init__.py
erp5/util/taskdistribution/__init__.py
+16
-6
erp5/util/testbrowser/examples/createERP5User.py
erp5/util/testbrowser/examples/createERP5User.py
+3
-5
erp5/util/testbrowser/examples/createPerson.py
erp5/util/testbrowser/examples/createPerson.py
+2
-1
erp5/util/testnode/NodeTestSuite.py
erp5/util/testnode/NodeTestSuite.py
+5
-3
erp5/util/testnode/ProcessManager.py
erp5/util/testnode/ProcessManager.py
+8
-7
erp5/util/testnode/ScalabilityTestRunner.py
erp5/util/testnode/ScalabilityTestRunner.py
+15
-16
erp5/util/testnode/SlapOSControler.py
erp5/util/testnode/SlapOSControler.py
+7
-5
erp5/util/testnode/SlapOSMasterCommunicator.py
erp5/util/testnode/SlapOSMasterCommunicator.py
+12
-6
erp5/util/testnode/Updater.py
erp5/util/testnode/Updater.py
+4
-3
erp5/util/testnode/Utils.py
erp5/util/testnode/Utils.py
+26
-11
erp5/util/testnode/__init__.py
erp5/util/testnode/__init__.py
+2
-2
erp5/util/testnode/testnode.py
erp5/util/testnode/testnode.py
+1
-1
erp5/util/testsuite/__init__.py
erp5/util/testsuite/__init__.py
+38
-33
erp5/util/timinglogparser/__init__.py
erp5/util/timinglogparser/__init__.py
+35
-34
erp5/util/timinglogplotter/__init__.py
erp5/util/timinglogplotter/__init__.py
+11
-9
erp5/util/webchecker/__init__.py
erp5/util/webchecker/__init__.py
+8
-7
product/ERP5/bin/genbt5list
product/ERP5/bin/genbt5list
+29
-14
No files found.
erp5/tests/testERP5TestNode.py
View file @
31804f68
...
...
@@ -23,7 +23,6 @@ import sys
import
tempfile
import
json
import
time
import
types
import
re
@
contextmanager
...
...
@@ -144,7 +143,8 @@ class ERP5TestNode(TestCase):
self
.
__dict__
.
update
(
**
kw
)
def
__call__
(
self
,
command
):
return
subprocess
.
check_output
(
command
,
**
self
.
__dict__
)
return
subprocess
.
check_output
(
command
,
universal_newlines
=
True
,
**
self
.
__dict__
)
return
Caller
(
**
kw
)
def
generateTestRepositoryList
(
self
,
add_third_repository
=
False
):
...
...
@@ -172,10 +172,10 @@ class ERP5TestNode(TestCase):
output
=
call
([
'git'
,
'log'
,
'--format=%H %s'
])
output
=
output
.
strip
()
output_line_list
=
output
.
split
(
"
\
n
"
)
self
.
assertEqual
s
(
2
,
len
(
output_line_list
))
self
.
assertEqual
(
2
,
len
(
output_line_list
))
expected_commit_subject_list
=
[
"next_commit"
,
"first_commit"
]
commit_subject_list
=
[
x
.
split
()[
1
]
for
x
in
output_line_list
]
self
.
assertEqual
s
(
expected_commit_subject_list
,
commit_subject_list
)
self
.
assertEqual
(
expected_commit_subject_list
,
commit_subject_list
)
commit_dict
[
'rep%i'
%
i
]
=
[
x
.
split
()
for
x
in
output_line_list
]
if
repository_path
==
self
.
remote_repository2
:
output
=
call
(
'git checkout master -b foo'
.
split
())
...
...
@@ -192,13 +192,13 @@ class ERP5TestNode(TestCase):
"""
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
assertEqual
s
(
0
,
node_test_suite
.
retry_software_count
)
self
.
assertEqual
(
0
,
node_test_suite
.
retry_software_count
)
node_test_suite
.
retry_software_count
=
2
self
.
assertIs
(
node_test_suite
,
test_node
.
getNodeTestSuite
(
'foo'
))
self
.
assertEqual
s
(
2
,
node_test_suite
.
retry_software_count
)
self
.
assertEqual
(
2
,
node_test_suite
.
retry_software_count
)
del
test_node
.
node_test_suite_dict
[
'foo'
]
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
assertEqual
s
(
0
,
node_test_suite
.
retry_software_count
)
self
.
assertEqual
(
0
,
node_test_suite
.
retry_software_count
)
def
test_02_NodeTestSuiteWorkingDirectory
(
self
):
"""
...
...
@@ -206,9 +206,9 @@ class ERP5TestNode(TestCase):
"""
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
assertEqual
s
(
"%s/foo"
%
self
.
working_directory
,
self
.
assertEqual
(
"%s/foo"
%
self
.
working_directory
,
node_test_suite
.
working_directory
)
self
.
assertEqual
s
(
"%s/foo/test_suite"
%
self
.
working_directory
,
self
.
assertEqual
(
"%s/foo/test_suite"
%
self
.
working_directory
,
node_test_suite
.
test_suite_directory
)
def
test_03_NodeTestSuiteCheckDataAfterEdit
(
self
):
...
...
@@ -219,13 +219,13 @@ class ERP5TestNode(TestCase):
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
repository_path_list
=
[]
for
vcs_repository
in
node_test_suite
.
vcs_repository_list
:
repository_path_list
.
append
(
vcs_repository
[
'repository_path'
])
expected_list
=
[
"%s/rep0"
%
node_test_suite
.
working_directory
,
"%s/rep1"
%
node_test_suite
.
working_directory
]
self
.
assertEqual
s
(
expected_list
,
repository_path_list
)
self
.
assertEqual
(
expected_list
,
repository_path_list
)
def
test_04_constructProfile
(
self
,
my_test_type
=
'UnitTest'
):
"""
...
...
@@ -239,7 +239,7 @@ class ERP5TestNode(TestCase):
node_test_suite
.
revision_list
=
((
'rep1'
,
(
1234
,
'azerty'
)),
(
'rep2'
,
(
3456
,
'qwerty'
)))
test_node
.
constructProfile
(
node_test_suite
,
my_test_type
)
self
.
assertEqual
s
(
"%s/software.cfg"
%
(
node_test_suite
.
working_directory
,),
self
.
assertEqual
(
"%s/software.cfg"
%
(
node_test_suite
.
working_directory
,),
node_test_suite
.
custom_profile_path
)
profile
=
open
(
node_test_suite
.
custom_profile_path
,
'r'
)
if
my_test_type
==
'UnitTest'
:
...
...
@@ -282,7 +282,7 @@ ignore-ssl-certificate = true
develop = false
shared = true
"""
%
{
'temp_dir'
:
self
.
_temp_dir
,
'revision1'
:
revision1
,
'revision2'
:
revision2
}
self
.
assertEqual
s
(
expected_profile
,
profile
.
read
())
self
.
assertEqual
(
expected_profile
,
profile
.
read
())
profile
.
close
()
def
getAndUpdateFullRevisionList
(
self
,
test_node
,
node_test_suite
):
...
...
@@ -298,9 +298,9 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
rev_list
))
self
.
assertEqual
s
(
rev_list
[
0
],
'rep0=2-%s'
%
commit_dict
[
'rep0'
][
0
][
0
])
self
.
assertEqual
s
(
rev_list
[
1
],
'rep1=2-%s'
%
commit_dict
[
'rep1'
][
0
][
0
])
self
.
assertEqual
(
2
,
len
(
rev_list
))
self
.
assertEqual
(
rev_list
[
0
],
'rep0=2-%s'
%
commit_dict
[
'rep0'
][
0
][
0
])
self
.
assertEqual
(
rev_list
[
1
],
'rep1=2-%s'
%
commit_dict
[
'rep1'
][
0
][
0
])
my_file
=
open
(
os
.
path
.
join
(
self
.
remote_repository1
,
'first_file'
),
'w'
)
my_file
.
write
(
"next_content"
)
my_file
.
close
()
...
...
@@ -309,7 +309,7 @@ shared = true
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertTrue
(
rev_list
[
0
].
startswith
(
'rep0=2-'
))
self
.
assertTrue
(
rev_list
[
1
].
startswith
(
'rep1=3-'
))
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
for
vcs_repository
in
node_test_suite
.
vcs_repository_list
:
self
.
assertTrue
(
os
.
path
.
exists
(
vcs_repository
[
'repository_path'
]))
...
...
@@ -323,8 +323,8 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
,
add_third_repository
=
True
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
3
,
len
(
rev_list
))
self
.
assertEqual
s
(
3
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
3
,
len
(
rev_list
))
self
.
assertEqual
(
3
,
len
(
node_test_suite
.
vcs_repository_list
))
rep2_clone_path
=
[
x
[
'repository_path'
]
for
x
in
\
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep2"
)][
0
]
...
...
@@ -332,13 +332,13 @@ shared = true
output
=
call
(
"git branch"
.
split
()).
strip
()
self
.
assertTrue
(
"* foo"
in
output
.
split
(
'
\
n
'
))
vcs_repository_info
=
node_test_suite
.
vcs_repository_list
[
0
]
self
.
assertEqual
s
(
vcs_repository_info
[
'repository_id'
],
'rep2'
)
self
.
assertEqual
s
(
vcs_repository_info
[
'branch'
],
'foo'
)
self
.
assertEqual
(
vcs_repository_info
[
'repository_id'
],
'rep2'
)
self
.
assertEqual
(
vcs_repository_info
[
'branch'
],
'foo'
)
# change it to master
vcs_repository_info
[
'branch'
]
=
'master'
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
output
=
call
(
"git branch"
.
split
()).
strip
()
print
output
print
(
output
)
self
.
assertTrue
(
"* master"
in
output
.
split
(
'
\
n
'
))
# Add a third branch on remote, make sure we could switch to it
remote_call
=
self
.
getCaller
(
cwd
=
self
.
remote_repository2
)
...
...
@@ -368,8 +368,8 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
rev_list
))
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
rev_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
# patch deleteRepository to make sure it will be called once for the wrong
# repos, and not for the repos which has not changed
deleted_repository_path_list
=
[]
...
...
@@ -386,12 +386,12 @@ shared = true
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
call
=
self
.
getCaller
(
cwd
=
rep0_clone_path
)
self
.
assertEqual
s
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
assertEqual
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
remote_repository0
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
assertEqual
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
remote_repository2
)
self
.
assertEqual
s
([
rep0_clone_path
],
deleted_repository_path_list
)
self
.
assertEqual
([
rep0_clone_path
],
deleted_repository_path_list
)
finally
:
Updater
.
deleteRepository
=
original_deleteRepository
...
...
@@ -407,8 +407,8 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
rev_list
))
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
rev_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
rep0_clone_path
=
[
x
[
'repository_path'
]
for
x
in
\
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
...
...
@@ -457,10 +457,9 @@ shared = true
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
# simulate a data corruption on rep0's index
index_file
=
open
(
os
.
path
.
join
(
rep0_clone_path
,
'.git'
,
'index'
),
'a'
)
index_file
.
seek
(
10
,
os
.
SEEK_END
)
index_file
.
truncate
()
index_file
.
close
()
with
open
(
os
.
path
.
join
(
rep0_clone_path
,
'.git'
,
'index'
),
'ab'
)
as
index_file
:
index_file
.
seek
(
10
,
os
.
SEEK_END
)
index_file
.
truncate
()
# we get rev list with corrupted repository, we get None, but in the same
# time the bad repository is deleted
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
...
...
@@ -490,8 +489,8 @@ shared = true
info_list
.
append
(
call
(
"git log -n1 --format=%H"
.
split
()).
strip
())
return
info_list
self
.
assertEqual
s
([
'2'
,
'2'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
s
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
0
][
0
]],
self
.
assertEqual
([
'2'
,
'2'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
0
][
0
]],
getRepInfo
(
hash
=
1
))
class
TestResult
(
object
):
revision
=
NodeTestSuite
.
revision
...
...
@@ -501,25 +500,25 @@ shared = true
test_result
.
revision_list
=
((
'rep0'
,
(
2
,
commit_dict
[
'rep0'
][
0
][
0
])),
(
'rep1'
,
(
1
,
commit_dict
[
'rep1'
][
1
][
0
])))
test_node
.
checkRevision
(
test_result
,
node_test_suite
)
self
.
assertEqual
s
([
'2'
,
'1'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
s
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
1
][
0
]],
self
.
assertEqual
([
'2'
,
'1'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
1
][
0
]],
getRepInfo
(
hash
=
1
))
def
test_07_checkExistingTestSuite
(
self
):
test_node
=
self
.
getTestNode
()
test_suite_data
=
self
.
getTestSuiteData
(
add_third_repository
=
True
)
self
.
assertEqual
s
([],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([],
os
.
listdir
(
self
.
working_directory
))
test_node
.
purgeOldTestSuite
(
test_suite_data
)
self
.
assertEqual
s
([],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([],
os
.
listdir
(
self
.
working_directory
))
os
.
mkdir
(
os
.
path
.
join
(
self
.
working_directory
,
'foo'
))
self
.
assertEqual
s
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
test_node
.
purgeOldTestSuite
(
test_suite_data
)
self
.
assertEqual
s
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
os
.
mkdir
(
os
.
path
.
join
(
self
.
working_directory
,
'bar'
))
self
.
assertEqual
s
(
set
([
'bar'
,
'foo'
]),
self
.
assertEqual
(
set
([
'bar'
,
'foo'
]),
set
(
os
.
listdir
(
self
.
working_directory
)))
test_node
.
purgeOldTestSuite
(
test_suite_data
)
self
.
assertEqual
s
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
def
test_purgeOldTestSuiteChmodNonWriteable
(
self
):
"""Old test suites can be deleted even when some files/directories have
...
...
@@ -633,11 +632,11 @@ shared = true
method_list_for_prepareSlapOSForTestSuite
=
[
"initializeSlapOSControler"
,
"runSoftwareRelease"
,
"runComputerPartition"
]
runner
.
prepareSlapOSForTestNode
(
test_node_slapos
)
self
.
assertEqual
s
(
method_list_for_prepareSlapOSForTestNode
,
self
.
assertEqual
(
method_list_for_prepareSlapOSForTestNode
,
[
x
[
"method_name"
]
for
x
in
call_list
])
call_list
=
[]
runner
.
prepareSlapOSForTestSuite
(
node_test_suite
)
self
.
assertEqual
s
(
method_list_for_prepareSlapOSForTestSuite
,
self
.
assertEqual
(
method_list_for_prepareSlapOSForTestSuite
,
[
x
[
"method_name"
]
for
x
in
call_list
])
call_list
=
[]
SlapOSControler
.
runSoftwareRelease
=
Patch
(
"runSoftwareRelease"
,
status_code
=
1
)
...
...
@@ -681,7 +680,7 @@ shared = true
return
json
.
dumps
([])
def
_checkExistingTestSuite
(
reference_set
):
test_self
.
assertEqual
s
(
set
(
reference_set
),
test_self
.
assertEqual
(
set
(
reference_set
),
set
(
os
.
listdir
(
test_node
.
working_directory
)))
for
x
in
reference_set
:
test_self
.
assertTrue
(
os
.
path
.
exists
(
os
.
path
.
join
(
...
...
@@ -761,7 +760,7 @@ shared = true
SlapOSControler
.
initializeSlapOSControler
=
doNothing
# Inside test_node a runner is created using new UnitTestRunner methods
test_node
.
run
()
self
.
assertEqual
s
(
5
,
counter
)
self
.
assertEqual
(
5
,
counter
)
time
.
sleep
=
original_sleep
# Restore old class methods
if
my_test_type
==
"ScalabilityTest"
:
...
...
@@ -797,23 +796,23 @@ shared = true
file_name
=
'AC_Ra
\
xc3
\
xad
zertic
\
xc3
\
xa1
ma'
non_ascii_file
=
open
(
os
.
path
.
join
(
controler
.
software_root
,
file_name
),
'w'
)
non_ascii_file
.
close
()
self
.
assertEqual
s
([
file_name
],
os
.
listdir
(
controler
.
software_root
))
self
.
assertEqual
([
file_name
],
os
.
listdir
(
controler
.
software_root
))
controler
.
_resetSoftware
()
self
.
assertEqual
s
([],
os
.
listdir
(
controler
.
software_root
))
self
.
assertEqual
([],
os
.
listdir
(
controler
.
software_root
))
def
test_14_createFolder
(
self
):
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
folder
=
node_test_suite
.
test_suite_directory
self
.
assert
Equals
(
False
,
os
.
path
.
exists
(
folder
))
self
.
assert
False
(
os
.
path
.
exists
(
folder
))
createFolder
(
folder
)
self
.
assert
Equals
(
True
,
os
.
path
.
exists
(
folder
))
self
.
assert
True
(
os
.
path
.
exists
(
folder
))
to_drop_path
=
os
.
path
.
join
(
folder
,
'drop'
)
to_drop
=
open
(
to_drop_path
,
'w'
)
to_drop
.
close
()
self
.
assert
Equals
(
True
,
os
.
path
.
exists
(
to_drop_path
))
self
.
assert
True
(
os
.
path
.
exists
(
to_drop_path
))
createFolder
(
folder
,
clean
=
True
)
self
.
assert
Equals
(
False
,
os
.
path
.
exists
(
to_drop_path
))
self
.
assert
False
(
os
.
path
.
exists
(
to_drop_path
))
def
test_15_suite_log_directory
(
self
,
my_test_type
=
'UnitTest'
,
grade
=
'master'
):
def
doNothing
(
self
,
*
args
,
**
kw
):
...
...
@@ -861,7 +860,7 @@ shared = true
def
checkTestSuite
(
test_node
):
test_node
.
node_test_suite_dict
rand_part_set
=
set
()
self
.
assertEqual
s
(
2
,
len
(
test_node
.
node_test_suite_dict
))
self
.
assertEqual
(
2
,
len
(
test_node
.
node_test_suite_dict
))
for
ref
,
suite
in
test_node
.
node_test_suite_dict
.
items
():
self
.
assertTrue
(
'var/log/testnode/%s'
%
suite
.
reference
in
\
suite
.
suite_log_path
,
...
...
@@ -925,7 +924,7 @@ shared = true
RunnerClass
.
_prepareSlapOS
=
patch_prepareSlapOS
SlapOSControler
.
initializeSlapOSControler
=
doNothing
test_node
.
run
()
self
.
assertEqual
s
(
counter
,
3
)
self
.
assertEqual
(
counter
,
3
)
checkTestSuite
(
test_node
)
time
.
sleep
=
original_sleep
# Restore old class methods
...
...
@@ -1021,18 +1020,18 @@ shared = true
def
callRaisingPrepareSlapos
():
self
.
assertRaises
(
SubprocessError
,
callPrepareSlapOS
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
0
)
for
x
in
xrange
(
0
,
11
):
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
0
)
for
x
in
range
(
11
):
callRaisingPrepareSlapos
()
self
.
assertEqual
s
(
len
(
init_call_kw_list
),
11
)
self
.
assertEqual
s
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
11
)
self
.
assertEqual
(
len
(
init_call_kw_list
),
11
)
self
.
assertEqual
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
11
)
callRaisingPrepareSlapos
()
self
.
assertEqual
s
(
init_call_kw_list
[
-
1
][
'reset_software'
],
True
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
1
)
self
.
assertEqual
(
init_call_kw_list
[
-
1
][
'reset_software'
],
True
)
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
1
)
callRaisingPrepareSlapos
()
self
.
assertEqual
s
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
2
)
self
.
assertEqual
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
2
)
SlapOSControler
.
initializeSlapOSControler
=
\
initial_initializeSlapOSControler
SlapOSControler
.
runSoftwareRelease
=
initial_runSoftwareRelease
...
...
erp5/util/benchmark/performance_tester.py
View file @
31804f68
...
...
@@ -28,6 +28,7 @@
#
##############################################################################
from
__future__
import
print_function
import
argparse
import
os
import
sys
...
...
@@ -264,7 +265,7 @@ class PerformanceTester(object):
error_message
=
exit_msg_queue
.
get
()
except
KeyboardInterrupt
,
e
:
print
>>
sys
.
stderr
,
"
\
n
Interrupted by user, stopping gracefully..."
print
(
"
\
n
Interrupted by user, stopping gracefully..."
,
file
=
sys
.
stderr
)
exit_status
=
2
# An IOError may be raised when receiving a SIGINT which interrupts the
...
...
@@ -337,7 +338,7 @@ class PerformanceTester(object):
def
main
():
error_message_set
,
exit_status
=
PerformanceTester
().
run
()
for
error_message
in
error_message_set
:
print
>>
sys
.
stderr
,
"ERROR: %s"
%
error_message
print
(
"ERROR: %s"
%
error_message
,
file
=
sys
.
stderr
)
sys
.
exit
(
exit_status
)
...
...
erp5/util/benchmark/report.py
View file @
31804f68
...
...
@@ -31,6 +31,7 @@
#
##############################################################################
from
__future__
import
print_function
import
argparse
import
re
...
...
@@ -537,7 +538,7 @@ def generateReport():
for
filename
in
filename_iter
:
# There may be no results at all in case of errors
if
not
os
.
stat
(
filename
).
st_size
:
print
>>
sys
.
stderr
,
"Ignoring empty file %s"
%
filename
print
(
"Ignoring empty file %s"
%
filename
,
file
=
sys
.
stderr
)
continue
report_dict
=
per_nb_users_report_dict
.
setdefault
(
...
...
@@ -546,10 +547,8 @@ def generateReport():
report_dict
[
'filename'
].
append
(
filename
)
if
not
per_nb_users_report_dict
:
print
>>
sys
.
stderr
,
"ERROR: No result file found, perhaps "
\
"``--filename-prefix'' should be specified?"
sys
.
exit
(
1
)
sys
.
exit
(
"ERROR: No result file found, perhaps ``--filename-prefix'' should"
"be specified?"
)
pdf
=
PdfPages
(
argument_namespace
.
output_filename
)
...
...
erp5/util/benchmark/scalability_tester.py
View file @
31804f68
...
...
@@ -28,6 +28,7 @@
#
##############################################################################
from
__future__
import
print_function
from
.result
import
CSVBenchmarkResult
,
NothingFlushedException
class
CSVScalabilityBenchmarkResult
(
CSVBenchmarkResult
):
...
...
@@ -60,7 +61,7 @@ class ScalabilityTester(PerformanceTester):
urllib
.
urlencode
({
'error_message_set'
:
'|'
.
join
(
error_message_set
)})).
close
()
except
:
print
>>
sys
.
stderr
,
"ERROR: %s"
%
Formatter
().
formatException
(
sys
.
exc_info
()
)
print
(
"ERROR: %s"
%
Formatter
().
formatException
(
sys
.
exc_info
()),
file
=
sys
.
stderr
)
def
getResultClass
(
self
):
if
not
self
.
_argument_namespace
.
erp5_publish_url
:
...
...
erp5/util/scalability/requestUrl.py
View file @
31804f68
...
...
@@ -27,7 +27,7 @@ def main():
if
error_message_set
:
exit_status
=
1
for
error
in
error_message_set
:
print
error
print
(
error
)
elif
result
:
print
result
print
(
result
)
sys
.
exit
(
exit_status
)
erp5/util/scalability/runScalabilityTestSuite.py
View file @
31804f68
...
...
@@ -5,23 +5,15 @@ import os
import
shutil
import
time
import
sys
import
multiprocessing
import
signal
import
errno
import
json
import
logging
import
logging.handlers
import
glob
import
urlparse
import
httplib
import
base64
import
threading
from
erp5.util.benchmark.argument
import
ArgumentType
from
erp5.util.benchmark.performance_tester
import
PerformanceTester
from
erp5.util.benchmark.thread
import
TestThread
,
TestMetricThread
from
erp5.util
import
taskdistribution
from
erp5.util.testnode
import
Utils
from
erp5.util.testnode.ProcessManager
import
SubprocessError
,
ProcessManager
,
CancellationErro
r
from
erp5.util.testnode.ProcessManager
import
ProcessManage
r
import
datetime
MAX_INSTALLATION_TIME
=
60
*
50
...
...
@@ -179,31 +171,28 @@ class ScalabilityLauncher(object):
"""
data_array
=
self
.
__argumentNamespace
.
current_test_data
.
split
(
','
)
data
=
json
.
dumps
({
"count"
:
data_array
[
0
],
"title"
:
data_array
[
1
],
"relative_path"
:
data_array
[
2
]})
de
coded_data
=
Utils
.
deunicodeData
(
json
.
loads
(
data
))
return
ScalabilityTest
(
de
coded_data
,
self
.
test_result
)
en
coded_data
=
Utils
.
deunicodeData
(
json
.
loads
(
data
))
return
ScalabilityTest
(
en
coded_data
,
self
.
test_result
)
def
clearUsersFile
(
self
,
user_file_path
):
self
.
log
(
"Clearing users file: %s"
%
user_file_path
)
os
.
remove
(
user_file_path
)
users_file
=
open
(
user_file_path
,
"w"
)
for
line
in
self
.
users_file_original_content
:
users_file
.
write
(
line
)
users_file
.
close
()
with
open
(
user_file_path
,
"w"
)
as
users_file
:
for
line
in
self
.
users_file_original_content
:
users_file
.
write
(
line
)
def
updateUsersFile
(
self
,
user_quantity
,
password
,
user_file_path
):
self
.
log
(
"Updating users file: %s"
%
user_file_path
)
users_file
=
open
(
user_file_path
,
"r"
)
file_content
=
users_file
.
readlines
()
with
open
(
user_file_path
,
"r"
)
as
users_file
:
file_content
=
users_file
.
readlines
()
self
.
users_file_original_content
=
file_content
new_file_content
=
[]
for
line
in
file_content
:
new_file_content
.
append
(
line
.
replace
(
'<password>'
,
password
).
replace
(
'<user_quantity>'
,
str
(
user_quantity
)))
users_file
.
close
()
os
.
remove
(
user_file_path
)
users_file
=
open
(
user_file_path
,
"w"
)
for
line
in
new_file_content
:
users_file
.
write
(
line
)
users_file
.
close
()
with
open
(
user_file_path
,
"w"
)
as
users_file
:
for
line
in
new_file_content
:
users_file
.
write
(
line
)
def
run
(
self
):
self
.
log
(
"Scalability Launcher started, with:"
)
...
...
erp5/util/taskdistribution/__init__.py
View file @
31804f68
...
...
@@ -40,11 +40,15 @@ Example use:
test_line.stop()
"""
from
__future__
import
print_function
import
httplib
import
six
from
six.moves
import
(
map
,
http_client
as
httplib
,
xmlrpc_client
as
xmlrpclib
,
)
import
socket
import
threading
import
time
import
xmlrpclib
__all__
=
[
'TaskDistributor'
,
'TestResultProxy'
,
'TestResultLineProxy'
,
'patchRPCParser'
]
...
...
@@ -89,11 +93,17 @@ def patchRPCParser(error_handler):
def
verbose_feed
(
self
,
data
):
try
:
return
original_feed
(
self
,
data
)
except
Exception
,
exc
:
except
Exception
as
exc
:
if
not
error_handler
(
data
,
exc
):
raise
parser_klass
.
feed
=
verbose_feed
try
:
# PY3
basestring
except
NameError
:
basestring
=
bytes
,
str
unicode
=
str
def
binarize_args
(
arg
):
# Converts recursively basestring arg into xmlrpclib.Binary, as they can
# contain non-XML allowed characters
...
...
@@ -102,9 +112,9 @@ def binarize_args(arg):
arg
=
arg
.
encode
(
'utf-8'
)
return
xmlrpclib
.
Binary
(
arg
)
if
isinstance
(
arg
,
(
list
,
tuple
,
set
)):
return
map
(
binarize_args
,
arg
)
return
list
(
map
(
binarize_args
,
arg
)
)
if
isinstance
(
arg
,
dict
):
return
{
k
:
binarize_args
(
v
)
for
k
,
v
in
arg
.
iteritems
(
)}
return
{
k
:
binarize_args
(
v
)
for
k
,
v
in
six
.
iteritems
(
arg
)}
return
arg
class
RPCRetry
(
object
):
...
...
@@ -350,7 +360,7 @@ class TestResultProxy(RPCRetry):
caption_list
=
[]
append
=
caption_list
.
append
for
name
,
(
stream
,
max_history_bytes
)
in
\
s
elf
.
_watcher_dict
.
iteritems
(
):
s
ix
.
iteritems
(
self
.
_watcher_dict
):
append
(
'==> %s <=='
%
(
name
,
))
start
=
stream
.
tell
()
stream
.
seek
(
0
,
2
)
...
...
erp5/util/testbrowser/examples/createERP5User.py
View file @
31804f68
...
...
@@ -8,6 +8,7 @@
#
# TODO: There must be a better way than the code below to do that though...
from
__future__
import
print_function
import
sys
from
erp5.util.testbrowser.browser
import
Browser
...
...
@@ -19,11 +20,8 @@ try:
user_nbr
=
int
(
user_nbr
)
except
ValueError
:
print
>>
sys
.
stderr
,
"ERROR: Missing arguments: %s URL USERNAME "
\
"PASSWORD NUMBER_OF_USERS NEW_USERNAME_PREFIX NEW_USERS_PASSWORD"
%
\
sys
.
argv
[
0
]
sys
.
exit
(
1
)
sys
.
exit
(
"ERROR: Missing arguments: %s URL USERNAME PASSWORD NUMBER_OF_USERS "
"NEW_USERNAME_PREFIX NEW_USERS_PASSWORD"
%
sys
.
argv
[
0
])
# Create a browser instance
browser
=
Browser
(
url
,
username
,
password
)
...
...
erp5/util/testbrowser/examples/createPerson.py
View file @
31804f68
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from
__future__
import
division
,
print_function
from
erp5.util.testbrowser.browser
import
Browser
ITERATION
=
20
...
...
@@ -89,4 +90,4 @@ if __name__ == '__main__':
counter
+=
1
for
title
,
time_list
in
result_dict
.
iteritems
():
print
"%s: %.4fs"
%
(
title
,
float
(
sum
(
time_list
))
/
ITERATION
)
print
(
"%s: %.4fs"
%
(
title
,
sum
(
time_list
)
/
ITERATION
)
)
erp5/util/testnode/NodeTestSuite.py
View file @
31804f68
...
...
@@ -32,6 +32,8 @@ import string
import
random
from
.Utils
import
createFolder
from
six.moves
import
range
class
SlapOSInstance
(
object
):
"""
Base of an software instance,
...
...
@@ -69,14 +71,14 @@ class NodeTestSuite(SlapOSInstance):
def
createSuiteLog
(
self
):
# /srv/slapgrid/slappartXX/srv/var/log/testnode/az-D27KqX7FxJ/suite.log
alphabets
=
string
.
digits
+
string
.
letters
alphabets
=
string
.
digits
+
string
.
ascii_
letters
while
1
:
log_folder_name
=
'%s-%s'
%
(
self
.
reference
,
''
.
join
(
random
.
choice
(
alphabets
)
for
i
in
x
range
(
10
)))
''
.
join
(
random
.
choice
(
alphabets
)
for
i
in
range
(
10
)))
log_folder_path
=
os
.
path
.
join
(
self
.
log_directory
,
log_folder_name
)
try
:
os
.
makedirs
(
log_folder_path
)
except
OSError
,
e
:
except
OSError
as
e
:
if
e
.
errno
!=
errno
.
EEXIST
:
raise
else
:
...
...
erp5/util/testnode/ProcessManager.py
View file @
31804f68
...
...
@@ -79,7 +79,8 @@ def subprocess_capture(p, log_prefix, get_output=True):
break
if
get_output
:
buffer
.
append
(
data
)
log
(
log_prefix
+
data
.
rstrip
(
'
\
n
'
))
log
(
log_prefix
+
(
data
if
str
is
bytes
else
data
.
decode
(
'utf-8'
,
errors
=
'replace'
)).
rstrip
(
'
\
n
'
))
if
p
.
stdout
:
stdout
=
[]
stdout_thread
=
threading
.
Thread
(
target
=
readerthread
,
...
...
@@ -97,8 +98,8 @@ def subprocess_capture(p, log_prefix, get_output=True):
stdout_thread
.
join
()
if
p
.
stderr
:
stderr_thread
.
join
()
return
(
p
.
stdout
and
''
.
join
(
stdout
),
p
.
stderr
and
''
.
join
(
stderr
))
return
(
p
.
stdout
and
b
''
.
join
(
stdout
),
p
.
stderr
and
b
''
.
join
(
stderr
))
def
killCommand
(
pid
):
"""
...
...
@@ -109,7 +110,7 @@ def killCommand(pid):
try
:
process
=
psutil
.
Process
(
pid
)
process
.
suspend
()
except
psutil
.
Error
,
e
:
except
psutil
.
Error
as
e
:
return
process_list
=
[
process
]
new_list
=
process
.
children
(
recursive
=
True
)
...
...
@@ -118,19 +119,19 @@ def killCommand(pid):
for
child
in
new_list
:
try
:
child
.
suspend
()
except
psutil
.
Error
,
e
:
except
psutil
.
Error
as
e
:
logger
.
debug
(
"killCommand/suspend: %s"
,
e
)
time
.
sleep
(
1
)
new_list
=
set
(
process
.
children
(
recursive
=
True
)).
difference
(
process_list
)
for
process
in
process_list
:
try
:
process
.
kill
()
except
psutil
.
Error
,
e
:
except
psutil
.
Error
as
e
:
logger
.
debug
(
"killCommand/kill: %s"
,
e
)
class
ProcessManager
(
object
):
stdin
=
file
(
os
.
devnull
)
stdin
=
open
(
os
.
devnull
)
def
__init__
(
self
,
max_timeout
=
MAX_TIMEOUT
):
self
.
process_pid_set
=
set
()
...
...
erp5/util/testnode/ScalabilityTestRunner.py
View file @
31804f68
...
...
@@ -30,30 +30,31 @@ import subprocess
import
sys
import
time
import
glob
import
SlapOSControler
import
SlapOSMasterCommunicator
from
.
import
SlapOSControler
,
SlapOSMasterCommunicator
import
json
import
time
import
shutil
import
logging
import
string
import
random
import
urlparse
from
six.moves.urllib.parse
import
urlparse
import
base64
import
httplib
import
Utils
from
six.moves
import
http_client
as
httplib
from
.
import
Utils
import
requests
import
slapos.slap
import
cPickle
as
pickle
from
ProcessManager
import
SubprocessError
,
ProcessManager
,
CancellationError
from
six.moves
import
cPickle
as
pickle
from
.
ProcessManager
import
SubprocessError
,
ProcessManager
,
CancellationError
from
subprocess
import
CalledProcessError
from
Updater
import
Updater
from
.
Updater
import
Updater
from
erp5.util
import
taskdistribution
from
erp5.util.benchmark.thread
import
TestThread
# for dummy slapos answer
import
signal
from
.
import
logger
from
six.moves
import
range
# max time to generate frontend instance: 1.5 hour
MAX_FRONTEND_TIME
=
60
*
90
# max time to register instance to slapOSMaster: 5 minutes
...
...
@@ -322,18 +323,16 @@ ces or already launched.")
software_hash_directory
=
self
.
testnode
.
config
[
'slapos_binary'
].
rsplit
(
"bin/slapos"
,
1
)[
0
]
apache_htpasswd
=
software_hash_directory
+
"parts/apache/bin/htpasswd"
testsuite_directory
=
self
.
testnode
.
config
[
'repository_path_list'
][
0
].
rsplit
(
'/'
,
1
)[
0
]
htaccess_file
=
open
(
testsuite_directory
+
HTACCESS
,
"w"
)
file_content
=
"""
with
open
(
testsuite_directory
+
HTACCESS
,
"w"
)
as
htaccess_file
:
htaccess_file
.
write
(
"""
AuthType Basic
AuthName "Password Protected Area"
AuthUserFile "%s%s"
Require valid-user
"""
%
(
testsuite_directory
,
HTPASSWD
)
htaccess_file
.
write
(
file_content
)
htaccess_file
.
close
()
"""
%
(
testsuite_directory
,
HTPASSWD
))
password_path
=
testsuite_directory
+
PASSWORD_FILE
with
open
(
password_path
,
"w"
)
as
password_file
:
password
=
''
.
join
(
random
.
choice
(
string
.
digits
+
string
.
letters
)
for
i
in
x
range
(
PASSWORD_LENGTH
))
password
=
''
.
join
(
random
.
choice
(
string
.
digits
+
string
.
ascii_letters
)
for
i
in
range
(
PASSWORD_LENGTH
))
password_file
.
write
(
password
)
user
=
TESTNODE_USER
command
=
[
apache_htpasswd
,
"-bc"
,
testsuite_directory
+
HTPASSWD
,
user
,
password
]
...
...
@@ -363,7 +362,7 @@ Require valid-user
user
,
password
=
self
.
generateProfilePasswordAccess
()
logger
.
info
(
"Software Profile password: %s"
%
password
)
self
.
reachable_profile
=
"https://%s:%s@%s"
%
(
user
,
password
,
os
.
path
.
join
(
urlparse
.
urlparse
(
self
.
testnode
.
config
[
'frontend_url'
]).
netloc
,
os
.
path
.
join
(
urlparse
(
self
.
testnode
.
config
[
'frontend_url'
]).
netloc
,
"software"
,
self
.
randomized_path
,
"software.cfg"
))
def
prepareSlapOSForTestSuite
(
self
,
node_test_suite
):
...
...
@@ -526,7 +525,7 @@ Require valid-user
if
not
self
.
launchable
:
return
{
'status_code'
:
1
,
'error_message'
:
"Current test_suite is not actually launchable."
}
configuration_list
=
node_test_suite
.
configuration_list
test_list
=
range
(
0
,
len
(
configuration_list
))
test_list
=
list
(
range
(
len
(
configuration_list
)
))
try
:
test_result_proxy
=
self
.
testnode
.
taskdistribution
.
createTestResult
(
node_test_suite
.
revision
,
test_list
,
...
...
erp5/util/testnode/SlapOSControler.py
View file @
31804f68
...
...
@@ -35,6 +35,8 @@ from slapos import client
from
.
import
logger
from
.Utils
import
createFolder
from
six.moves
import
range
MAX_PARTITIONS
=
10
MAX_SR_RETRIES
=
3
...
...
@@ -243,7 +245,7 @@ class SlapOSControler(object):
computer
=
slap
.
registerComputer
(
config
[
'computer_id'
])
# Call a method to ensure connection to master can be established
computer
.
getComputerPartitionList
()
except
slapos
.
slap
.
ConnectionError
,
e
:
except
slapos
.
slap
.
ConnectionError
as
e
:
retries
+=
1
if
retries
>=
60
:
raise
...
...
@@ -270,7 +272,7 @@ class SlapOSControler(object):
# MySQL DB content) from previous runs. To support changes of partition
# naming scheme (which already happened), do this at instance_root level.
createFolder
(
instance_root
,
True
)
for
i
in
x
range
(
MAX_PARTITIONS
):
for
i
in
range
(
MAX_PARTITIONS
):
# create partition and configure computer
# XXX: at the moment all partitions do share same virtual interface address
# this is not a problem as usually all services are on different ports
...
...
@@ -278,7 +280,7 @@ class SlapOSControler(object):
partition_path
=
os
.
path
.
join
(
instance_root
,
partition_reference
)
if
not
(
os
.
path
.
exists
(
partition_path
)):
os
.
mkdir
(
partition_path
)
os
.
chmod
(
partition_path
,
0750
)
os
.
chmod
(
partition_path
,
0
o
750
)
computer
.
updateConfiguration
(
xml_marshaller
.
xml_marshaller
.
dumps
({
'address'
:
config
[
'ipv4_address'
],
'instance_root'
:
instance_root
,
...
...
@@ -318,7 +320,7 @@ class SlapOSControler(object):
os
.
environ
[
'PATH'
]
=
environment
[
'PATH'
]
# a SR may fail for number of reasons (incl. network failures)
# so be tolerant and run it a few times before giving up
for
_
in
x
range
(
MAX_SR_RETRIES
):
for
_
in
range
(
MAX_SR_RETRIES
):
status_dict
=
self
.
spawn
(
config
[
'slapos_binary'
],
'node'
,
'software'
,
'--all'
,
'--pidfile'
,
os
.
path
.
join
(
self
.
software_root
,
'slapos-node.pid'
),
...
...
@@ -346,7 +348,7 @@ class SlapOSControler(object):
# try to run for all partitions as one partition may in theory request another one
# this not always is required but curently no way to know how "tree" of partitions
# may "expand"
for
_
in
x
range
(
max_quantity
):
for
_
in
range
(
max_quantity
):
status_dict
=
self
.
spawn
(
config
[
'slapos_binary'
],
'node'
,
'instance'
,
'--pidfile'
,
os
.
path
.
join
(
self
.
instance_root
,
'slapos-node.pid'
),
'--cfg'
,
self
.
slapos_config
,
raise_error_if_fail
=
False
,
...
...
erp5/util/testnode/SlapOSMasterCommunicator.py
View file @
31804f68
from
__future__
import
print_function
import
datetime
import
json
import
traceback
...
...
@@ -12,6 +14,8 @@ from requests.exceptions import HTTPError
from
..taskdistribution
import
SAFE_RPC_EXCEPTION_LIST
from
.
import
logger
import
six
# max time to instance changing state: 3 hour
MAX_INSTANCE_TIME
=
60
*
60
*
3
...
...
@@ -52,7 +56,7 @@ def retryOnNetworkFailure(func,
except
_except_list
:
traceback
.
print_exc
()
print
'Network failure. Retry method %s in %i seconds'
%
(
func
,
retry_time
)
print
(
'Network failure. Retry method %s in %i seconds'
%
(
func
,
retry_time
)
)
time
.
sleep
(
retry_time
)
retry_time
=
min
(
retry_time
*
1.5
,
640
)
...
...
@@ -92,8 +96,9 @@ class SlapOSMasterCommunicator(object):
if
instance_title
is
not
None
:
self
.
name
=
instance_title
if
request_kw
is
not
None
:
if
isinstance
(
request_kw
,
basestring
)
or
\
isinstance
(
request_kw
,
unicode
):
if
isinstance
(
request_kw
,
bytes
):
self
.
request_kw
=
json
.
loads
(
request_kw
.
decode
(
'utf-8'
))
elif
isinstance
(
request_kw
,
six
.
text_type
):
self
.
request_kw
=
json
.
loads
(
request_kw
)
else
:
self
.
request_kw
=
request_kw
...
...
@@ -214,7 +219,7 @@ class SlapOSMasterCommunicator(object):
result
=
self
.
hateoas_navigator
.
GET
(
url
)
result
=
json
.
loads
(
result
)
if
result
[
'_links'
].
get
(
'action_object_slap'
,
None
)
is
None
:
print
result
[
'links'
]
print
(
result
[
'links'
])
return
None
object_link
=
self
.
hateoas_navigator
.
hateoasGetLinkFromLinks
(
...
...
@@ -385,8 +390,9 @@ class SlapOSTester(SlapOSMasterCommunicator):
self
.
name
=
name
self
.
computer_guid
=
computer_guid
if
isinstance
(
request_kw
,
str
)
or
\
isinstance
(
request_kw
,
unicode
):
if
isinstance
(
request_kw
,
bytes
):
self
.
request_kw
=
json
.
loads
(
request_kw
.
decode
(
'utf-8'
))
elif
isinstance
(
request_kw
,
six
.
text_type
):
self
.
request_kw
=
json
.
loads
(
request_kw
)
else
:
self
.
request_kw
=
request_kw
...
...
erp5/util/testnode/Updater.py
View file @
31804f68
...
...
@@ -30,6 +30,7 @@ import re
from
.
import
logger
from
.ProcessManager
import
SubprocessError
from
.Utils
import
rmtree
from
slapos.util
import
bytes2str
,
str2bytes
SVN_UP_REV
=
re
.
compile
(
r'^(?:At|Updated to) revision (\
d+).$
')
SVN_CHANGED_REV = re.compile(r'
^
Last
Changed
Rev
.
*
:
\
s
*
(
\
d
+
)
', re.MULTILINE)
...
...
@@ -82,7 +83,7 @@ class Updater(object):
# allow several processes clean the same folder at the same time
try:
os.remove(os.path.join(path, file))
except OSError
,
e:
except OSError
as
e:
if e.errno != errno.ENOENT:
raise
...
...
@@ -96,7 +97,7 @@ class Updater(object):
**kw)
def _git(self, *args, **kw):
return
self.spawn(self.git_binary, *args, **kw)['
stdout
'].strip(
)
return
bytes2str(self.spawn(self.git_binary, *args, **kw)['
stdout
'].strip()
)
def git_update_server_info(self):
return self._git('
update
-
server
-
info
', '
-
f')
...
...
@@ -219,7 +220,7 @@ class Updater(object):
self.deletePycFiles(path)
try:
status_dict = self.spawn(*args)
except SubprocessError
,
e:
except SubprocessError
as
e:
if '
cleanup
' not in e.stderr:
raise
self.spawn('
svn
', '
cleanup
', path)
...
...
erp5/util/testnode/Utils.py
View file @
31804f68
...
...
@@ -3,6 +3,9 @@ import stat
import
shutil
import
errno
import
six
from
six.moves
import
map
def
rmtree
(
path
):
"""Delete a path recursively.
...
...
@@ -11,14 +14,22 @@ def rmtree(path):
def
chmod_retry
(
func
,
failed_path
,
exc_info
):
"""Make sure the directories are executable and writable.
"""
# Depending on the Python version, the following items differ.
if
six
.
PY3
:
expected_error_type
=
PermissionError
expected_func
=
os
.
lstat
else
:
expected_error_type
=
OSError
expected_func
=
os
.
listdir
e
=
exc_info
[
1
]
if
isinstance
(
e
,
OSError
):
if
isinstance
(
e
,
expected_error_type
):
if
e
.
errno
==
errno
.
ENOENT
:
# because we are calling again rmtree on listdir errors, this path might
# have been already deleted by the recursive call to rmtree.
return
if
e
.
errno
==
errno
.
EACCES
:
if
func
is
os
.
listdir
:
if
func
is
expected_func
:
os
.
chmod
(
failed_path
,
0o700
)
# corner case to handle errors in listing directories.
# https://bugs.python.org/issue8523
...
...
@@ -39,12 +50,16 @@ def createFolder(folder, clean=False):
rmtree
(
folder
)
os
.
mkdir
(
folder
)
def
deunicodeData
(
data
):
if
isinstance
(
data
,
list
):
return
map
(
deunicodeData
,
data
)
if
isinstance
(
data
,
unicode
):
return
data
.
encode
(
'utf8'
)
if
isinstance
(
data
,
dict
):
return
{
deunicodeData
(
key
):
deunicodeData
(
value
)
for
key
,
value
in
data
.
iteritems
()}
return
data
if
six
.
PY3
:
def
deunicodeData
(
data
):
return
data
else
:
def
deunicodeData
(
data
):
if
isinstance
(
data
,
list
):
return
list
(
map
(
deunicodeData
,
data
))
if
isinstance
(
data
,
unicode
):
return
data
.
encode
(
'utf8'
)
if
isinstance
(
data
,
dict
):
return
{
deunicodeData
(
key
):
deunicodeData
(
value
)
for
key
,
value
in
six
.
iteritems
(
data
)}
return
data
erp5/util/testnode/__init__.py
View file @
31804f68
...
...
@@ -24,7 +24,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import
ConfigP
arser
from
six.moves
import
configp
arser
import
argparse
import
logging
import
logging.handlers
...
...
@@ -64,7 +64,7 @@ def main(*args):
CONFIG
=
{
'partition_reference'
:
'test0'
,
}
config
=
ConfigP
arser
.
SafeConfigParser
()
config
=
configp
arser
.
SafeConfigParser
()
# do not change case of option keys
config
.
optionxform
=
str
config
.
readfp
(
parsed_argument
.
configuration_file
[
0
])
...
...
erp5/util/testnode/testnode.py
View file @
31804f68
...
...
@@ -171,7 +171,7 @@ shared = true
# only limit to particular error, if we run that code for all errors,
# then if server having most repositories is down for some time, we would
# erase all repositories and facing later hours of downloads
if
getattr
(
error
,
'stderr'
,
''
).
find
(
'index'
)
>=
0
:
if
b'index'
in
getattr
(
error
,
'stderr'
,
b''
)
:
rmtree
(
repository_path
)
logger
.
warning
(
"Error while getting repository, ignoring this test suite"
,
exc_info
=
1
)
...
...
erp5/util/testsuite/__init__.py
View file @
31804f68
from
__future__
import
print_function
import
argparse
import
re
,
os
,
shlex
,
glob
import
sys
,
threading
,
subprocess
import
traceback
import
errno
import
pprint
import
six
from
six.moves
import
range
from
erp5.util
import
taskdistribution
from
pprint
import
pprint
if
six
.
PY3
:
stdbin
=
lambda
x
:
x
.
buffer
else
:
stdbin
=
lambda
x
:
x
# PY3: use shlex.quote
_format_command_search
=
re
.
compile
(
"[[
\
\
s $({?*
\
\
`#~';<>&|]"
).
search
_format_command_escape
=
lambda
s
:
"'%s'"
%
r"'\''"
.
join
(
s
.
split
(
"'"
))
def
format_command
(
*
args
,
**
kw
):
...
...
@@ -31,7 +41,7 @@ def subprocess_capture(p, quiet=False):
buffer
.
append
(
data
)
if
p
.
stdout
:
stdout
=
[]
output
=
quiet
and
(
lambda
data
:
None
)
or
sys
.
stdout
.
write
output
=
(
lambda
data
:
None
)
if
quiet
else
stdbin
(
sys
.
stdout
)
.
write
stdout_thread
=
threading
.
Thread
(
target
=
readerthread
,
args
=
(
p
.
stdout
,
output
,
stdout
))
stdout_thread
.
setDaemon
(
True
)
...
...
@@ -39,7 +49,7 @@ def subprocess_capture(p, quiet=False):
if
p
.
stderr
:
stderr
=
[]
stderr_thread
=
threading
.
Thread
(
target
=
readerthread
,
args
=
(
p
.
stderr
,
sys
.
stderr
.
write
,
stderr
))
args
=
(
p
.
stderr
,
stdbin
(
sys
.
stderr
)
.
write
,
stderr
))
stderr_thread
.
setDaemon
(
True
)
stderr_thread
.
start
()
if
p
.
stdout
:
...
...
@@ -47,8 +57,8 @@ def subprocess_capture(p, quiet=False):
if
p
.
stderr
:
stderr_thread
.
join
()
p
.
wait
()
return
(
p
.
stdout
and
''
.
join
(
stdout
),
p
.
stderr
and
''
.
join
(
stderr
))
return
(
p
.
stdout
and
b
''
.
join
(
stdout
),
p
.
stderr
and
b
''
.
join
(
stderr
))
class
SubprocessError
(
EnvironmentError
):
def
__init__
(
self
,
status_dict
):
...
...
@@ -72,15 +82,15 @@ class Persistent(object):
def
__getattr__
(
self
,
attr
):
if
attr
==
'_db'
:
try
:
db
=
file
(
self
.
_filename
,
'r+'
)
except
IOError
,
e
:
db
=
open
(
self
.
_filename
,
'r+'
)
except
IOError
as
e
:
if
e
.
errno
!=
errno
.
ENOENT
:
raise
db
=
file
(
self
.
_filename
,
'w+'
)
db
=
open
(
self
.
_filename
,
'w+'
)
else
:
try
:
self
.
__dict__
.
update
(
eval
(
db
.
read
()))
except
StandardError
:
except
Exception
:
pass
self
.
_db
=
db
return
db
...
...
@@ -89,7 +99,7 @@ class Persistent(object):
def
sync
(
self
):
self
.
_db
.
seek
(
0
)
db
=
dict
(
x
for
x
in
s
elf
.
__dict__
.
iteritems
(
)
if
x
[
0
][:
1
]
!=
'_'
)
db
=
dict
(
x
for
x
in
s
ix
.
iteritems
(
self
.
__dict__
)
if
x
[
0
][:
1
]
!=
'_'
)
pprint
.
pprint
(
db
,
self
.
_db
)
self
.
_db
.
truncate
()
...
...
@@ -103,10 +113,10 @@ class TestSuite(object):
"""
RUN_RE
=
re
.
compile
(
r'Ran (?P<all_tests>\
d+)
tests? in (?P<seconds>\
d+
\.\
d+)s
',
b
r'Ran (?P<all_tests>\
d+)
tests? in (?P<seconds>\
d+
\.\
d+)s
',
re.DOTALL)
STATUS_RE = re.compile(r"""
STATUS_RE = re.compile(
b
r"""
(OK|FAILED)\
s+
\(
(failures=(?P<failures>\
d+),?
\s*)?
(errors=(?P<errors>\
d+),?
\s*)?
...
...
@@ -117,7 +127,7 @@ class TestSuite(object):
""", re.DOTALL | re.VERBOSE)
SUB_STATUS_RE = re.compile(
r"""SUB\
s+RESULT:
\s+(?P<all_tests>\
d+)
\s+Tests,\
s+
b
r"""SUB\
s+RESULT:
\s+(?P<all_tests>\
d+)
\s+Tests,\
s+
(?P<failures>\
d+)
\s+Failures\
s*
\
(?
(skipped=(?P<skips>\
d+),?
\s*)?
...
...
@@ -130,7 +140,10 @@ class TestSuite(object):
mysql_db_count = 1
allow_restart = False
realtime_output = True
stdin = file(os.devnull)
try: # PY3
stdin = subprocess.DEVNULL
except AttributeError:
stdin = open(os.devnull, 'rb')
def __init__(self, max_instance_count, **kw):
self.__dict__.update(kw)
...
...
@@ -139,8 +152,8 @@ class TestSuite(object):
self.acquire = pool.acquire
self.release = pool.release
self._instance = threading.local()
self._pool =
max_instance_count == 1 and [None] or
\
range(1, max_instance_count + 1
)
self._pool =
[None] if max_instance_count == 1 else
\
list(range(1, max_instance_count + 1)
)
self._ready = set()
self.running = {}
if max_instance_count != 1:
...
...
@@ -185,13 +198,14 @@ class TestSuite(object):
def spawn(self, *args, **kw):
quiet = kw.pop('
quiet
', False)
cwd = kw.pop('
cwd
', None)
env = kw and dict(os.environ, **kw) or None
command = format_command(*args, **kw)
print
'
\
n
$
' + command
print
('
\
n
$
' + command)
sys.stdout.flush()
try:
p = subprocess.Popen(args, stdin=self.stdin, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=env)
stderr=subprocess.PIPE, env=env
, cwd=cwd
)
except Exception:
# Catch any exception here, to warn user instead of beeing silent,
# by generating fake error result
...
...
@@ -229,20 +243,11 @@ class EggTestSuite(TestSuite):
The python interpreter is ``python_interpreter``
"""
def run(self, test):
print test
original_dir = os.getcwd()
try:
os.chdir(self.egg_test_path_dict[test])
return self.runUnitTest(test)
finally:
os.chdir(original_dir)
def runUnitTest(self, *args, **kw):
print(test)
try:
runUnitTest = "{python} setup.py test".format(python=self.python_interpreter)
args = tuple(shlex.split(runUnitTest))
status_dict = self.spawn(*args, **kw)
except SubprocessError, e:
status_dict = self.spawn(self.python_interpreter, '
setup
.
py
', '
test
',
cwd=self.egg_test_path_dict[test])
except SubprocessError as e:
status_dict = e.status_dict
test_log = status_dict['
stderr
']
search = self.RUN_RE.search(test_log)
...
...
@@ -270,7 +275,7 @@ class EggTestSuite(TestSuite):
return status_dict
def getTestList(self):
return
self.egg_test_path_dict.keys(
)
return
list(self.egg_test_path_dict
)
def runTestSuite():
parser = argparse.ArgumentParser(description='
Run
a
test
suite
.
')
...
...
@@ -327,7 +332,7 @@ def runTestSuite():
if test_result is not None:
assert revision == test_result.revision, (revision, test_result.revision)
while suite.acquire():
test = test_result.start(
suite.running.keys(
))
test = test_result.start(
list(suite.running
))
if test is not None:
suite.start(test.name, lambda status_dict, __test=test:
__test.stop(**status_dict))
...
...
erp5/util/timinglogparser/__init__.py
View file @
31804f68
...
...
@@ -27,6 +27,8 @@
#
##############################################################################
from
__future__
import
division
,
print_function
import
os
import
sys
import
imp
...
...
@@ -126,7 +128,7 @@ def parseFile(filename, measure_dict):
sys
.
stderr
.
flush
()
match_list
=
LINE_PATTERN
.
findall
(
line
)
if
len
(
match_list
)
!=
1
:
print
>>
sys
.
stderr
,
'Unparseable line: %s:%i %r'
%
(
filename
,
line_number
,
line
)
print
(
'Unparseable line: %s:%i %r'
%
(
filename
,
line_number
,
line
),
file
=
sys
.
stderr
)
else
:
result
,
filter_id
,
date
,
duration
=
processLine
(
match_list
[
0
],
filename
,
line_number
)
# Possible result values & meaning:
...
...
@@ -135,20 +137,21 @@ def parseFile(filename, measure_dict):
# (string): use & skip to next line
if
result
is
False
:
if
debug
:
print
>>
sys
.
stderr
,
'? %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]
)
print
(
'? %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]),
file
=
sys
.
stderr
)
elif
result
is
True
:
if
debug
:
print
>>
sys
.
stderr
,
'- %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]
)
print
(
'- %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]),
file
=
sys
.
stderr
)
skip_count
+=
1
else
:
measure_dict
.
setdefault
(
filter_id
,
{}).
setdefault
(
result
,
{}).
setdefault
(
date
,
[]).
append
(
int
(
duration
))
match_count
+=
1
line
=
logfile
.
readline
()
print
>>
sys
.
stderr
,
'%i'
%
(
line_number
,
)
print
(
'%i'
%
(
line_number
,
),
file
=
sys
.
stderr
)
if
line_number
>
0
:
duration
=
time
()
-
begin
print
>>
sys
.
stderr
,
"Matched %i lines (%.2f%%), %i skipped (%.2f%%), %i unmatched (%.2f%%) in %.2fs (%i lines per second)."
%
\
(
match_count
,
(
float
(
match_count
)
/
line_number
)
*
100
,
skip_count
,
(
float
(
skip_count
)
/
line_number
)
*
100
,
(
line_number
-
match_count
-
skip_count
),
(
1
-
(
float
(
match_count
+
skip_count
)
/
line_number
))
*
100
,
duration
,
line_number
/
duration
)
print
(
"Matched %i lines (%.2f%%), %i skipped (%.2f%%), %i unmatched (%.2f%%) in %.2fs (%i lines per second)."
%
\
(
match_count
,
(
match_count
/
line_number
)
*
100
,
skip_count
,
(
skip_count
/
line_number
)
*
100
,
(
line_number
-
match_count
-
skip_count
),
(
1
-
(
match_count
+
skip_count
)
/
line_number
))
*
100
,
duration
,
line_number
//
duration
),
file
=
sys
.
stderr
)
debug
=
False
outfile_prefix
=
None
...
...
@@ -161,9 +164,9 @@ decimate_count = 1
try
:
opts
,
file_list
=
getopt
.
getopt
(
sys
.
argv
[
1
:],
''
,
[
'debug'
,
'config='
,
'prefix='
,
'no-average'
,
'sum'
,
'load='
,
'save='
,
'decimate='
])
except
Exception
,
reason
:
print
>>
sys
.
stderr
,
reason
print
>>
sys
.
stderr
,
usage
except
Exception
as
reason
:
print
(
reason
,
file
=
sys
.
stderr
)
print
(
usage
,
file
=
sys
.
stderr
)
sys
.
exit
(
1
)
for
name
,
value
in
opts
:
...
...
@@ -185,7 +188,7 @@ for name, value in opts:
decimate_count
=
int
(
value
)
if
configuration
is
None
:
raise
ValueError
,
'--config is mandatory'
raise
ValueError
(
'--config is mandatory'
)
config_file
=
os
.
path
.
splitext
(
os
.
path
.
basename
(
configuration
))[
0
]
config_path
=
[
os
.
path
.
dirname
(
os
.
path
.
abspath
(
configuration
))]
+
sys
.
path
...
...
@@ -203,26 +206,24 @@ file_number = 0
measure_dict
=
{}
if
len
(
load_file_name_list
):
for
load_file_name
in
load_file_name_list
:
load_file
=
open
(
load_file_name
)
temp_measure_dict
=
eval
(
load_file
.
read
(),
{})
load_file
.
close
()
with
open
(
load_file_name
)
as
load_file
:
temp_measure_dict
=
eval
(
load_file
.
read
(),
{})
assert
isinstance
(
measure_dict
,
dict
)
for
filter_id
,
result_dict
in
temp_measure_dict
.
iteritems
():
for
result
,
date_dict
in
result_dict
.
iteritems
():
for
date
,
duration_list
in
date_dict
.
iteritems
():
measure_dict
.
setdefault
(
filter_id
,
{}).
setdefault
(
result
,
{}).
setdefault
(
date
,
[]).
extend
(
duration_list
)
print
>>
sys
.
stderr
,
'Previous processing result restored from %r'
%
(
load_file_name
,
)
print
(
'Previous processing result restored from %r'
%
(
load_file_name
,
),
file
=
sys
.
stderr
)
for
filename
in
file_list
:
file_number
+=
1
print
>>
sys
.
stderr
,
'Processing %s [%i/%i]...'
%
(
filename
,
file_number
,
file_count
)
print
(
'Processing %s [%i/%i]...'
%
(
filename
,
file_number
,
file_count
),
file
=
sys
.
stderr
)
parseFile
(
filename
,
measure_dict
)
if
save_file_name
is
not
None
:
save_file
=
open
(
save_file_name
,
'w'
)
save_file
.
write
(
repr
(
measure_dict
))
save_file
.
close
()
print
>>
sys
.
stderr
,
'Processing result saved to %r'
%
(
save_file_name
,
)
with
open
(
save_file_name
,
'w'
)
as
save_file
:
save_file
.
write
(
repr
(
measure_dict
))
print
(
'Processing result saved to %r'
%
(
save_file_name
,
),
file
=
sys
.
stderr
)
if
outfile_prefix
is
not
None
:
## Generate a list of all measures and a 2-levels dictionnary with date as key and measure dictionnary as value
...
...
@@ -252,21 +253,21 @@ if outfile_prefix is not None:
def
renderOutput
(
data_format
,
filename_suffix
):
for
sheet_id
,
sheet_column_list
in
sheet_dict
.
iteritems
():
outfile_name
=
'%s_%s_%s.csv'
%
(
outfile_prefix
,
sheet_id
,
filename_suffix
)
print
>>
sys
.
stderr
,
'Writing to %r...'
%
(
outfile_name
,
)
outfile
=
open
(
outfile_name
,
'w'
)
print
>>
outfile
,
'"date",%s'
%
(
','
.
join
([
'"%s"'
%
(
x
[
0
],
)
for
x
in
sheet_column_list
]),
)
decimate_dict
=
{}
decimate
=
0
for
date
in
date_list
:
for
key
,
value
in
line_dict
[
date
].
iteritems
():
decimate_dict
.
setdefault
(
key
,
[]).
extend
(
value
)
decimate
+=
1
if
decimate
==
decimate_count
:
print
>>
outfile
,
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])
)
decimate_dict
=
{}
decimate
=
0
if
len
(
decimate_dict
):
print
>>
outfile
,
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])
)
print
(
'Writing to %r...'
%
(
outfile_name
,
),
file
=
sys
.
stderr
)
with
open
(
outfile_name
,
'w'
)
as
outfile
:
print
(
'"date",%s'
%
(
','
.
join
([
'"%s"'
%
(
x
[
0
],
)
for
x
in
sheet_column_list
]),
),
file
=
outfile
)
decimate_dict
=
{}
decimate
=
0
for
date
in
date_list
:
for
key
,
value
in
line_dict
[
date
].
iteritems
():
decimate_dict
.
setdefault
(
key
,
[]).
extend
(
value
)
decimate
+=
1
if
decimate
==
decimate_count
:
print
(
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])),
file
=
outfile
)
decimate_dict
=
{}
decimate
=
0
if
len
(
decimate_dict
):
print
(
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])),
file
=
outfile
)
if
do_average
:
renderOutput
(
'=%(sum)i/%(count)i'
,
'avg'
)
...
...
erp5/util/timinglogplotter/__init__.py
View file @
31804f68
...
...
@@ -27,6 +27,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from
__future__
import
division
,
print_function
from
datetime
import
date
from
os
import
path
import
rpy2.robjects
as
robjects
...
...
@@ -82,9 +84,9 @@ class CSVFile(object):
if
cell
>
value_max
.
get
(
key
,
0
):
value_max
[
key
]
=
cell
column_dict
[
key
].
append
(
cell
)
line_num
=
float
(
line_num
)
/
100
line_num
=
line_num
/
100
for
key
in
ratio_dict
:
ratio_dict
[
key
]
/=
line_num
ratio_dict
[
key
]
/
/
=
line_num
def
getColumn
(
self
,
column_id
):
return
self
.
column_dict
[
self
.
column_list
[
column_id
]]
...
...
@@ -101,7 +103,7 @@ def computeExpr(expr):
if
expr
:
assert
expr
[
0
]
==
'='
num
,
denom
=
expr
[
1
:].
split
(
'/'
)
result
=
float
(
int
(
num
)
)
/
int
(
denom
)
result
=
int
(
num
)
/
int
(
denom
)
else
:
result
=
None
return
result
...
...
@@ -121,7 +123,7 @@ def main():
current_dir
=
os
.
getcwd
()
for
file_name
in
file_name_list
:
print
'Loading %s...'
%
(
file_name
,
)
print
(
'Loading %s...'
%
(
file_name
,
)
)
file
=
CSVFile
(
file_name
)
date_string_list
=
file
.
getColumn
(
0
)
...
...
@@ -134,7 +136,7 @@ def main():
# date_list will be like ['2009/07/01', '2009/07/05', '2009/07/10', ...]
factor
=
1
if
len
(
date_string_list
)
>
20
:
factor
=
int
(
len
(
date_string_list
)
/
20
)
factor
=
int
(
len
(
date_string_list
)
/
/
20
)
i
=
0
for
date_string
in
date_string_list
:
if
i
%
factor
==
0
:
...
...
@@ -183,13 +185,13 @@ def main():
y_data
.
append
(
value
)
i
+=
1
if
len
(
x_data
)
==
0
:
print
'Nothing to plot for %s...'
%
(
out_file_name
,
)
print
(
'Nothing to plot for %s...'
%
(
out_file_name
,
)
)
continue
if
options
.
minimal_non_empty_ratio
is
not
None
:
column_len
=
len
(
column
)
if
column_len
:
if
float
(
len
(
x_data
))
/
column_len
<
options
.
minimal_non_empty_ratio
:
print
'Not enough values to plot for %s...'
%
(
out_file_name
,
)
if
len
(
x_data
)
/
column_len
<
options
.
minimal_non_empty_ratio
:
print
(
'Not enough values to plot for %s...'
%
(
out_file_name
,
)
)
continue
r_y_data
=
robjects
.
FloatVector
(
y_data
)
r_x_data
=
robjects
.
FloatVector
(
x_data
)
...
...
@@ -220,7 +222,7 @@ def main():
# stop changing the out-type file
r
(
"""dev.off()"""
)
print
'Saving %s...'
%
(
out_file_name
,
)
print
(
'Saving %s...'
%
(
out_file_name
,
)
)
if
__name__
==
'__main__'
:
main
()
...
...
erp5/util/webchecker/__init__.py
View file @
31804f68
...
...
@@ -26,6 +26,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from
__future__
import
print_function
import
os
import
shutil
import
sys
...
...
@@ -543,7 +545,7 @@ class HTTPCacheCheckerTestSuite(object):
from optparse import OptionParser
import ConfigP
arser
from six.moves import configp
arser
def _formatConfiguration(configuration):
""" format the configuration"""
...
...
@@ -559,11 +561,11 @@ def web_checker_utility():
(options, args) = parser.parse_args()
if len(args) != 1 :
print
parser.print_help(
)
print
(parser.print_help()
)
parser.error('incorrect number of arguments')
config_path = args[0]
config =
ConfigP
arser.RawConfigParser()
config =
configp
arser.RawConfigParser()
config.read(config_path)
working_directory = config.get('web_checker', 'working_directory')
url = config.get('web_checker', 'url')
...
...
@@ -615,10 +617,9 @@ def web_checker_utility():
result = instance.start(prohibited_file_name_list=prohibited_file_name_list,
prohibited_folder_name_list=prohibited_folder_name_list)
if options.output_file:
file_object = open(options.output_file, 'w')
file_object.write(result)
file_object.close()
with open(options.output_file, 'w') as file_object:
file_object.write(result)
else:
print
result
print
(result)
product/ERP5/bin/genbt5list
View file @
31804f68
...
...
@@ -35,12 +35,24 @@ import posixpath
import
tarfile
import
os
import
sys
import
cgi
try
:
from
html
import
escape
except
ImportError
:
from
cgi
import
escape
# Deprecated since version 3.2
from
base64
import
b64encode
from
cStringIO
import
String
IO
from
io
import
Bytes
IO
from
hashlib
import
sha1
from
urllib
import
unquote
try
:
from
urllib.parse
import
unquote
except
ImportError
:
from
urllib
import
unquote
if
sys
.
version_info
[
0
]
==
3
:
def
iteritems
(
d
):
return
iter
(
d
.
items
())
else
:
def
iteritems
(
d
):
return
d
.
iteritems
()
# Order is important for installation
# We want to have:
...
...
@@ -109,11 +121,11 @@ item_name_list = tuple('_%s_item' % x for x in item_name_list)
class
BusinessTemplateRevision
(
list
):
def
hash
(
self
,
path
,
text
):
self
.
append
((
path
,
sha1
(
text
).
digest
()))
self
.
append
((
path
.
encode
(
'utf-8'
)
,
sha1
(
text
).
digest
()))
def
digest
(
self
):
self
.
sort
()
return
b64encode
(
sha1
(
'
\
0
'
.
join
(
h
+
p
for
(
h
,
p
)
in
self
)).
digest
())
return
b64encode
(
sha1
(
b
'
\
0
'
.
join
(
h
+
p
for
(
h
,
p
)
in
self
)).
digest
())
class
BusinessTemplate
(
dict
):
...
...
@@ -151,7 +163,7 @@ force_install
def
__iter__
(
self
):
self
[
'revision'
]
=
self
.
revision
.
digest
()
return
iter
(
sorted
(
self
.
iteritems
(
)))
return
iter
(
sorted
(
iteritems
(
self
)))
@
classmethod
def
fromTar
(
cls
,
tar
):
...
...
@@ -179,8 +191,8 @@ force_install
return
iter
(
self
)
def
generateInformation
(
dir
,
info
=
id
,
err
=
None
):
xml
=
String
IO
()
xml
.
write
(
'<?xml version="1.0"?>
\
n
<repository>
\
n
'
)
xml
=
Bytes
IO
()
xml
.
write
(
b
'<?xml version="1.0"?>
\
n
<repository>
\
n
'
)
for
name
in
sorted
(
os
.
listdir
(
dir
)):
path
=
os
.
path
.
join
(
dir
,
name
)
if
name
.
endswith
(
'.bt5'
):
...
...
@@ -201,13 +213,16 @@ def generateInformation(dir, info=id, err=None):
property_list
=
BusinessTemplate
.
fromDir
(
path
)
else
:
continue
xml
.
write
(
' <template id="%s">
\
n
'
%
name
)
xml
.
write
(
b' <template id="%s">
\
n
'
%
name
.
encode
()
)
for
k
,
v
in
property_list
:
for
v
in
(
v
,)
if
type
(
v
)
is
str
else
v
:
xml
.
write
(
' <%s>%s</%s>
\
n
'
%
(
k
,
cgi
.
escape
(
v
),
k
))
xml
.
write
(
' </template>
\
n
'
)
if
str
is
not
bytes
:
k
=
k
.
encode
()
for
v
in
(
v
,)
if
type
(
v
)
is
bytes
else
v
:
xml
.
write
(
b' <%s>%s</%s>
\
n
'
%
(
k
,
escape
(
v
)
if
str
is
bytes
else
escape
(
v
.
decode
()).
encode
(),
k
))
xml
.
write
(
b' </template>
\
n
'
)
info
(
'done
\
n
'
)
xml
.
write
(
'</repository>
\
n
'
)
xml
.
write
(
b
'</repository>
\
n
'
)
return
xml
def
main
(
dir_list
=
None
,
**
kw
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment