Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
erp5
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
preetwinder
erp5
Commits
283859d2
Commit
283859d2
authored
Jul 19, 2013
by
Benjamin Blanc
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
scalability: runScalabilityTestSuite: add simplistic scalability measure
parent
0eb4e8a6
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
64 additions
and
16 deletions
+64
-16
erp5/util/scalability/runScalabilityTestSuite.py
erp5/util/scalability/runScalabilityTestSuite.py
+64
-16
No files found.
erp5/util/scalability/runScalabilityTestSuite.py
View file @
283859d2
...
@@ -6,6 +6,8 @@ import os
...
@@ -6,6 +6,8 @@ import os
import
time
import
time
import
sys
import
sys
import
multiprocessing
import
multiprocessing
import
subprocess
import
signal
import
errno
import
errno
import
json
import
json
import
logging
import
logging
...
@@ -19,6 +21,10 @@ from erp5.util.testnode import Utils
...
@@ -19,6 +21,10 @@ from erp5.util.testnode import Utils
from
subprocess
import
call
from
subprocess
import
call
LOG_FILE_PREFIX
=
"performance_tester_erp5"
LOG_FILE_PREFIX
=
"performance_tester_erp5"
# Duration of a test case
TEST_CASE_DURATION
=
60
# Maximum limit of documents to create during a test case
MAX_DOCUMENTS
=
100000
class
ScalabilityTest
(
object
):
class
ScalabilityTest
(
object
):
def
__init__
(
self
,
data
,
test_result
):
def
__init__
(
self
,
data
,
test_result
):
...
@@ -116,7 +122,7 @@ class ScalabilityLauncher(object):
...
@@ -116,7 +122,7 @@ class ScalabilityLauncher(object):
"""
"""
"""
"""
complete_scheme
=
os
.
path
.
join
(
path
,
scheme
)
complete_scheme
=
os
.
path
.
join
(
path
,
scheme
)
file_path_list
=
glob
.
glob
(
scheme
)
file_path_list
=
glob
.
glob
(
complete_
scheme
)
content_list
=
[]
content_list
=
[]
for
file_path
in
file_path_list
:
for
file_path
in
file_path_list
:
opened_file
=
open
(
file_path
,
'r'
)
opened_file
=
open
(
file_path
,
'r'
)
...
@@ -131,6 +137,14 @@ class ScalabilityLauncher(object):
...
@@ -131,6 +137,14 @@ class ScalabilityLauncher(object):
def
returnCsvList
(
self
):
def
returnCsvList
(
self
):
return
self
.
_returnFileContentList
(
self
.
__argumentNamespace
.
log_path
,
return
self
.
_returnFileContentList
(
self
.
__argumentNamespace
.
log_path
,
"%s*.csv"
%
LOG_FILE_PREFIX
)
"%s*.csv"
%
LOG_FILE_PREFIX
)
def
getCreatedDocumentNumber
(
self
):
number
=
0
complete_scheme
=
os
.
path
.
join
(
self
.
__argumentNamespace
.
log_path
,
"%s*.csv"
%
LOG_FILE_PREFIX
)
file_path_list
=
glob
.
glob
(
complete_scheme
)
for
file_path
in
file_path_list
:
number
=
number
+
sum
(
1
for
line
in
open
(
file_path
))
return
number
def
cleanUplogAndCsv
(
self
):
def
cleanUplogAndCsv
(
self
):
files_to_delete
=
glob
.
glob
(
os
.
path
.
join
(
self
.
__argumentNamespace
.
log_path
,
files_to_delete
=
glob
.
glob
(
os
.
path
.
join
(
self
.
__argumentNamespace
.
log_path
,
...
@@ -154,6 +168,30 @@ class ScalabilityLauncher(object):
...
@@ -154,6 +168,30 @@ class ScalabilityLauncher(object):
next_test
=
ScalabilityTest
(
decoded_data
,
self
.
test_result
)
next_test
=
ScalabilityTest
(
decoded_data
,
self
.
test_result
)
return
next_test
return
next_test
def
getCreatedDocumentNumber
(
self
):
# First file line is corresponding to header
number
=
-
1
complete_scheme
=
os
.
path
.
join
(
self
.
__argumentNamespace
.
log_path
,
"%s*.csv"
%
LOG_FILE_PREFIX
)
file_path_list
=
glob
.
glob
(
complete_scheme
)
for
file_path
in
file_path_list
:
number
=
number
+
sum
(
1
for
line
in
open
(
file_path
))
return
number
def
getFailedDocumentNumber
(
self
):
number
=
0
complete_scheme
=
os
.
path
.
join
(
self
.
__argumentNamespace
.
log_path
,
"%s*.csv"
%
LOG_FILE_PREFIX
)
file_path_list
=
glob
.
glob
(
complete_scheme
)
for
file_path
in
file_path_list
:
opened_file
=
open
(
file_path
,
'r'
)
lines
=
opened_file
.
readlines
()
for
line
in
lines
:
if
'-1'
in
line
:
number
=
number
+
1
opened_file
.
close
()
return
number
def
run
(
self
):
def
run
(
self
):
self
.
log
(
"Scalability Launcher started, with:"
)
self
.
log
(
"Scalability Launcher started, with:"
)
self
.
log
(
"Test suite master url: %s"
%
self
.
__argumentNamespace
.
test_suite_master_url
)
self
.
log
(
"Test suite master url: %s"
%
self
.
__argumentNamespace
.
test_suite_master_url
)
...
@@ -178,11 +216,12 @@ class ScalabilityLauncher(object):
...
@@ -178,11 +216,12 @@ class ScalabilityLauncher(object):
self
.
log
(
"No Test Case Ready"
)
self
.
log
(
"No Test Case Ready"
)
time
.
sleep
(
5
)
time
.
sleep
(
5
)
else
:
else
:
error_count
=
1
# Here call a runScalabilityTest ( placed on product/ERP5Type/tests ) ?
# Here call a runScalabilityTest ( placed on product/ERP5Type/tests ) ?
self
.
log
(
"Test Case %s is running..."
%
(
current_test
.
title
))
self
.
log
(
"Test Case %s is running..."
%
(
current_test
.
title
))
# Call the performance_tester_erp5
try
:
try
:
call
([
tester_path
,
tester_process
=
subprocess
.
Popen
([
tester_path
,
self
.
__argumentNamespace
.
erp5_url
,
self
.
__argumentNamespace
.
erp5_url
,
'1'
,
'1'
,
test_suites
,
test_suites
,
...
@@ -190,16 +229,26 @@ class ScalabilityLauncher(object):
...
@@ -190,16 +229,26 @@ class ScalabilityLauncher(object):
'--users-file-path'
,
user_file_path
,
'--users-file-path'
,
user_file_path
,
'--filename-prefix'
,
"%s_%s_"
%
(
LOG_FILE_PREFIX
,
current_test
.
title
),
'--filename-prefix'
,
"%s_%s_"
%
(
LOG_FILE_PREFIX
,
current_test
.
title
),
'--report-directory'
,
self
.
__argumentNamespace
.
log_path
,
'--report-directory'
,
self
.
__argumentNamespace
.
log_path
,
'--repeat'
,
'100'
,
'--repeat'
,
"%s"
%
str
(
MAX_DOCUMENTS
)
,
])
])
test_case_duration
=
TEST_CASE_DURATION
time
.
sleep
(
test_case_duration
)
#tester_process.kill()
tester_process
.
send_signal
(
signal
.
SIGINT
)
error_count
=
0
except
:
except
:
self
.
log
(
"Error during tester call."
)
self
.
log
(
"Error during tester call."
)
raise
ValueError
(
"Tester call failed"
)
raise
ValueError
(
"Tester call failed"
)
self
.
log
(
"Test Case %s is finish"
%
(
current_test
.
title
))
self
.
log
(
"Test Case %s is finish"
%
(
current_test
.
title
))
log_contents
=
self
.
returnLogList
()
failed_document_number
=
self
.
getFailedDocumentNumber
()
csv_contents
=
self
.
returnCsvList
()
created_document_number
=
self
.
getCreatedDocumentNumber
()
-
failed_document_number
#self.cleanUplogAndCsv()
created_document_per_hour_number
=
(
(
float
(
created_document_number
)
*
60
*
60
)
/
float
(
test_case_duration
)
)
#log_contents = self.returnLogList()
#csv_contents = self.returnCsvList()
self
.
cleanUplogAndCsv
()
retry_time
=
2.0
retry_time
=
2.0
proxy
=
taskdistribution
.
ServerProxy
(
proxy
=
taskdistribution
.
ServerProxy
(
...
@@ -211,14 +260,13 @@ class ScalabilityLauncher(object):
...
@@ -211,14 +260,13 @@ class ScalabilityLauncher(object):
current_test
.
relative_path
,
current_test
.
relative_path
,
current_test
.
title
current_test
.
title
)
)
stdout
=
"LOG:
\
n
""
\
n
====
\
n
====
\
n
====
\
n
====
\
n
"
for
log_content
in
log_contents
:
output
=
"%s doc in %s secs = %s docs per hour"
%
(
created_document_number
,
test_case_duration
,
created_document_per_hour_number
)
stdout
=
stdout
+
log_content
+
"
\
n
====
\
n
====
\
n
"
test_result_line_test
.
stop
(
stdout
=
output
,
stdout
=
stdout
+
"CSV:
\
n
""
\
n
====
\
n
====
\
n
====
\
n
====
\
n
"
test_count
=
created_document_number
,
for
csv_content
in
csv_contents
:
failure_count
=
failed_document_number
,
stdout
=
stdout
+
csv_content
+
"
\
n
====
\
n
====
\
n
"
error_count
=
error_count
,
duration
=
test_case_duration
)
test_result_line_test
.
stop
(
stdout
=
stdout
)
self
.
log
(
"Test Case Stopped"
)
self
.
log
(
"Test Case Stopped"
)
return
error_message_set
,
exit_status
return
error_message_set
,
exit_status
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment