Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
slapos
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Xavier Thompson
slapos
Commits
54e74797
Commit
54e74797
authored
Aug 04, 2023
by
Xavier Thompson
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
software/end-to-end-testing: Switch to nxdtest
parent
b527abd1
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
78 additions
and
135 deletions
+78
-135
software/end-to-end-testing/buildout.hash.cfg
software/end-to-end-testing/buildout.hash.cfg
+8
-4
software/end-to-end-testing/instance.cfg.in
software/end-to-end-testing/instance.cfg.in
+55
-13
software/end-to-end-testing/software.cfg
software/end-to-end-testing/software.cfg
+6
-5
software/end-to-end-testing/test_kvm.py
software/end-to-end-testing/test_kvm.py
+0
-113
software/end-to-end-testing/test_test.py
software/end-to-end-testing/test_test.py
+9
-0
No files found.
software/end-to-end-testing/buildout.hash.cfg
View file @
54e74797
[instance.cfg]
[instance.cfg]
filename = instance.cfg.in
filename = instance.cfg.in
md5sum =
562e123cefa9e39cbc78300e4643f7b3
md5sum =
0e49345c3b7b7988adf2aaa7c666a415
[runTestSuite.in]
[test_test.py]
filename = runTestSuite.in
filename = test_test.py
md5sum = 3fab881b3baba3c398b4d89b5ce26542
md5sum = c074373dbb4154aa924ef5781dade7a0
[test_kvm.py]
filename = test_kvm.py
md5sum = b6773d5ed283f94d20f38c34b47976da
software/end-to-end-testing/instance.cfg.in
View file @
54e74797
...
@@ -2,11 +2,11 @@
...
@@ -2,11 +2,11 @@
eggs-directory = ${buildout:eggs-directory}
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
extends =
${nxdtest-instance.cfg:output}
parts =
parts =
runTestSuite
.nxdtest
client.crt
client.key
slapos-client.cfg
[slap-configuration]
[slap-configuration]
...
@@ -25,15 +25,8 @@ bin = $${buildout:directory}/bin
...
@@ -25,15 +25,8 @@ bin = $${buildout:directory}/bin
etc = $${buildout:directory}/etc
etc = $${buildout:directory}/etc
var = $${buildout:directory}/var
var = $${buildout:directory}/var
cfg = $${buildout:directory}/.slapos
cfg = $${buildout:directory}/.slapos
tmp = $${buildout:directory}/tmp
testdir = $${:var}/tests
nxdtest = $${:var}/nxdtest
nxdtestdir = $${:var}/nxdtest
[runTestSuite]
recipe = slapos.recipe.template
output = $${directory:bin}/runTestSuite
url = ${runTestSuite.in:target}
python_for_test = ${python_for_test:executable}
[client.crt]
[client.crt]
...
@@ -58,3 +51,52 @@ inline =
...
@@ -58,3 +51,52 @@ inline =
[slapconsole]
[slapconsole]
cert_file = $${client.crt:output}
cert_file = $${client.crt:output}
key_file = $${client.key:output}
key_file = $${client.key:output}
[env.sh]
recipe = slapos.recipe.template:jinja2
output = $${directory:cfg}/env.sh
inline =
export HOME=$${directory:home}
[runTestSuite]
# extended from stack/nxdtest
env.sh = $${env.sh:output}
workdir = $${directory:nxdtestdir}
[.nxdtest]
recipe = slapos.recipe.template:jinja2
output = $${runTestSuite:workdir}/.nxdtest
python_for_test = ${python_for_test:executable}
tests =
$${test_test.py:output}
$${test_kvm.py:output}
context =
key tests :tests
key python_for_test :python_for_test
inline =
import os
tests = {{ repr(tests) }}
for test in tests.splitlines():
directory, filename = os.path.split(test)
name, _ = os.path.splitext(filename)
TestCase(
name,
[{{ repr(python_for_test) }} , '-m', 'unittest', '-v', name],
cwd=directory,
summaryf=UnitTest.summary,
)
[test_test.py]
recipe = slapos.recipe.template
output = $${directory:testdir}/test_test.py
url = ${test_test.py:target}
[test_kvm.py]
recipe = slapos.recipe.template
output = $${directory:testdir}/test_kvm.py
url = ${test_kvm.py:target}
software/end-to-end-testing/software.cfg
View file @
54e74797
[buildout]
[buildout]
extends =
extends =
../../component/pygolang/buildout.cfg
../../stack/slapos.cfg
../../stack/slapos.cfg
../../stack/nxdtest.cfg
buildout.hash.cfg
buildout.hash.cfg
parts =
parts =
...
@@ -14,24 +16,23 @@ output = ${buildout:directory}/instance.cfg
...
@@ -14,24 +16,23 @@ output = ${buildout:directory}/instance.cfg
url = ${:_profile_base_location_}/${:filename}
url = ${:_profile_base_location_}/${:filename}
[
runTestSuite.in
]
[
test_test.py
]
recipe = slapos.recipe.build:download
recipe = slapos.recipe.build:download
output = ${buildout:directory}/${:filename}
output = ${buildout:directory}/${:filename}
url = ${:_profile_base_location_}/${:filename}
url = ${:_profile_base_location_}/${:filename}
[test_
one
.py]
[test_
kvm
.py]
recipe = slapos.recipe.build:download
recipe = slapos.recipe.build:download
output = ${buildout:directory}/${:filename}
output = ${buildout:directory}/${:filename}
url = ${:_profile_base_location_}/${:filename}
url = ${:_profile_base_location_}/${:filename}
[python_for_test]
[python_for_test]
recipe = zc.recipe.egg
<= python-interpreter
interpreter = python_for_test
interpreter = python_for_test
scripts = ${:interpreter}
executable = ${buildout:bin-directory}/${:interpreter}
executable = ${buildout:bin-directory}/${:interpreter}
depends = ${lxml-python:egg}
depends = ${lxml-python:egg}
eggs =
eggs =
${pygolang:egg}
slapos.core
slapos.core
erp5.util
software/end-to-end-testing/
runTestSuite.in
→
software/end-to-end-testing/
test_kvm.py
View file @
54e74797
#!${:python_for_test}
import argparse
import
configparser
import
configparser
import importlib
import
json
import
json
import
logging
import
logging
import
time
import
time
import
unittest
import
unittest
import erp5.util.taskdistribution
import
slapos.client
import
slapos.client
class EndToEndResult(unittest.TextTestResult):
def __init__(self, stream, descriptions, verbosity):
self.durations = []
super().__init__(stream, descriptions, verbosity)
def startTest(self, test):
self._startTime = time.perf_counter()
def stopTest(self, test):
stopTime = time.perf_counter()
self.durations.append((test, stopTime - self._startTime))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--test_suite')
parser.add_argument('--test_suite_title')
parser.add_argument('--test_node_title')
parser.add_argument('--project_title')
parser.add_argument('--revision')
parser.add_argument('--master_url')
args, _ = parser.parse_known_args() # ignore other arguments
if not args.master_url:
unittest.main() # exits
module = importlib.import_module(__name__)
suite = unittest.defaultTestLoader.loadTestsFromModule(module)
all_tests = [t for s in suite for t in s]
runner = unittest.TextTestRunner(resultclass=EndToEndResult)
result = runner.run(suite)
errors = dict(result.errors)
failures = dict(result.failures)
skipped = dict(result.skipped)
# TODO: unexpected successes and expected failures
durations = dict(result.durations)
print(errors)
print(failures)
print(skipped)
print(durations)
# Create test lines at the end to reduce race conditions on multiple testnodes
task_distributor = erp5.util.taskdistribution.TaskDistributor(
portal_url=args.master_url
)
test_result = task_distributor.createTestResult(
revision = args.revision,
test_name_list = [t.id() for t in all_tests],
node_title = args.test_node_title,
test_title = args.test_suite_title,
project_title = args.project_title,
)
if test_result is None:
print("A test result has already been completed")
print("Nothing to do")
return
for t in all_tests:
kind = [errors, failures, skipped]
count = [0, 0, 0]
output = 'OK'
for i in range(len(kind)):
try:
output = kind[i][t]
count[i] = 1
break
except KeyError:
pass
print(t.id())
print(count)
print(output)
test_result_line = test_result.start()
if test_result_line is None:
print("A test result line has already been completed")
print("Nothing to do")
break
test_result_line.stop(
test_count = 1,
error_count = count[0],
failure_count = count[1],
skip_count = count[2],
duration = durations[t],
command = '',
stdout = output,
stderr = '',
html_test_result = '',
)
class
EndToEndTestCase
(
unittest
.
TestCase
):
class
EndToEndTestCase
(
unittest
.
TestCase
):
@
classmethod
@
classmethod
def
setUpClass
(
cls
):
def
setUpClass
(
cls
):
...
@@ -197,14 +96,6 @@ class EndToEndTestCase(unittest.TestCase):
...
@@ -197,14 +96,6 @@ class EndToEndTestCase(unittest.TestCase):
time
.
sleep
(
60
)
time
.
sleep
(
60
)
class Test(EndToEndTestCase):
def test_fail(self):
self.assertEqual(0, 1)
def test_succeed(self):
self.assertEqual(0, 0)
class
KvmTest
(
EndToEndTestCase
):
class
KvmTest
(
EndToEndTestCase
):
def
test
(
self
):
def
test
(
self
):
# instance_name = time.strftime('e2e-test-kvm-%Y-%B-%d-%H:%M:%S')
# instance_name = time.strftime('e2e-test-kvm-%Y-%B-%d-%H:%M:%S')
...
@@ -213,7 +104,3 @@ class KvmTest(EndToEndTestCase):
...
@@ -213,7 +104,3 @@ class KvmTest(EndToEndTestCase):
self
.
waitUntilGreen
(
instance_name
)
self
.
waitUntilGreen
(
instance_name
)
connection_dict
=
self
.
request
(
self
.
product
.
kvm
,
instance_name
)
connection_dict
=
self
.
request
(
self
.
product
.
kvm
,
instance_name
)
self
.
assertIn
(
'url'
,
connection_dict
)
self
.
assertIn
(
'url'
,
connection_dict
)
if __name__ == '__main__':
main()
software/end-to-end-testing/test_test.py
0 → 100644
View file @
54e74797
import
unittest
class
Test
(
unittest
.
TestCase
):
def
test_fail
(
self
):
self
.
assertEqual
(
0
,
1
)
def
test_succeed
(
self
):
self
.
assertEqual
(
0
,
0
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment