Commit 501d29e9 authored by Ivan Tyagov's avatar Ivan Tyagov

Refactor and extend test

parent 243137fe
......@@ -26,6 +26,7 @@
##############################################################################
from Products.ERP5Type.tests.ERP5TypeTestCase import ERP5TypeTestCase
from Products.ERP5Type.tests.utils import createZODBPythonScript
from wendelin.bigarray.array_zodb import ZBigArray
from DateTime import DateTime
import msgpack
......@@ -57,31 +58,13 @@ class Test(ERP5TypeTestCase):
# here, you can create the categories and objects your test will depend on
pass
def test_0_import(self):
"""
Test we can import certain libraries but still failure to do so should be a
a test step failure rather than global test failure.
"""
import scipy
import sklearn
import pandas
def test_01_IngestionFromFluentd(self):
def stepSetupIngestion(self, reference):
"""
Test ingestion using a POST Request containing a msgpack encoded message
simulating input from fluentd
Generic step.
"""
now = DateTime()
portal = self.portal
request = portal.REQUEST
reference = getRandomString()
number_string_list = []
for my_list in list(chunks(range(0, 100001), 10)):
number_string_list.append(','.join([str(x) for x in my_list]))
real_data = '\n'.join(number_string_list)
# make sure real_data tail is also a full line
real_data += '\n'
# create ingestion policy
ingestion_policy = portal.portal_ingestion_policies.newContent( \
......@@ -116,8 +99,44 @@ class Test(ERP5TypeTestCase):
data_supply = ingestion_policy.PortalIngestionPolicy_addDataSupply( \
data_supply_kw, \
data_supply_line_kw)
data_array = portal.data_array_module.newContent(
portal_type='Data Array',
reference = reference,
version = '001')
data_array.validate()
self.tic()
return ingestion_policy, data_supply, data_stream, data_array
def test_0_import(self):
"""
Test we can import certain libraries but still failure to do so should be a
a test step failure rather than global test failure.
"""
import scipy
import sklearn
import pandas
def test_01_IngestionFromFluentd(self):
"""
Test ingestion using a POST Request containing a msgpack encoded message
simulating input from fluentd.
"""
portal = self.portal
request = portal.REQUEST
reference = getRandomString()
number_string_list = []
for my_list in list(chunks(range(0, 100001), 10)):
number_string_list.append(','.join([str(x) for x in my_list]))
real_data = '\n'.join(number_string_list)
# make sure real_data tail is also a full line
real_data += '\n'
ingestion_policy, data_supply, data_stream, data_array = \
self.stepSetupIngestion(reference)
# simulate fluentd by setting proper values in REQUEST
request.method = 'POST'
data_chunk = msgpack.packb([0, real_data], use_bin_type=True)
......@@ -125,19 +144,10 @@ class Test(ERP5TypeTestCase):
request.set('data_chunk', data_chunk)
ingestion_policy.ingest()
# ingestion handler script saves new data using new line so we
# need to remove it, it also stringifies thus we need to
data_stream_data = data_stream.getData()
self.assertEqual(real_data, data_stream_data)
# try sample transformation
data_array = portal.data_array_module.newContent(
portal_type='Data Array',
reference = reference,
version = '001')
data_array.validate()
self.tic()
data_stream.DataStream_transform(\
chunk_length = 10450, \
transform_script_id = 'DataStream_copyCSVToDataArray',
......@@ -145,13 +155,50 @@ class Test(ERP5TypeTestCase):
self.tic()
# test some numpy operations
# test that extracted array contains same values as input CSV
zarray = data_array.getArray()
np.average(zarray)
self.assertEqual(np.average(zarray), np.average(np.arange(100001)))
self.assertTrue(np.array_equal(zarray, np.arange(100001)))
def test_01_1_IngestionTail(self):
"""
Test real time convertion to a numpy array by appending data to a data stream.
"""
portal = self.portal
reference = getRandomString()
number_string_list = []
for my_list in list(chunks(range(0, 100001), 10)):
number_string_list.append(','.join([str(x) for x in my_list]))
real_data = '\n'.join(number_string_list)
# make sure real_data tail is also a full line
real_data += '\n'
ingestion_policy, data_supply, data_stream, data_array = self.stepSetupIngestion(reference)
# override DataStream_transformTail to actually do transformation on appenData
script_id = 'DataStream_transformTail'
script_content_list = ['**kw', """
# created by testWendelin.test_01_1_IngestionTail
context.DataStream_transform(\
chunk_length = 10450, \
transform_script_id = 'DataStream_copyCSVToDataArray',
data_array_reference = context.getReference())"""]
createZODBPythonScript(portal.portal_skins.custom, script_id, *script_content_list)
# append data to Data Stream and check array.
data_stream.appendData(real_data)
self.tic()
# test that extracted array contains same values as input CSV
zarray = data_array.getArray()
self.assertEqual(np.average(zarray), np.average(np.arange(100001)))
self.assertTrue(np.array_equal(zarray, np.arange(100001)))
# clean up script
portal.portal_skins.custom.manage_delObjects([script_id,])
self.tic()
def test_02_Examples(self):
"""
Test we can use python scientific libraries by using directly created
......
......@@ -46,11 +46,13 @@
<key> <string>text_content_warning_message</string> </key>
<value>
<tuple>
<string>W: 38, 8: Unused variable \'n\' (unused-variable)</string>
<string>W: 65, 4: Unused variable \'scipy\' (unused-variable)</string>
<string>W: 67, 4: Unused variable \'pandas\' (unused-variable)</string>
<string>W: 66, 4: Unused variable \'sklearn\' (unused-variable)</string>
<string>W:116, 4: Unused variable \'data_supply\' (unused-variable)</string>
<string>W: 39, 8: Unused variable \'n\' (unused-variable)</string>
<string>W:117, 4: Unused variable \'scipy\' (unused-variable)</string>
<string>W:119, 4: Unused variable \'pandas\' (unused-variable)</string>
<string>W:118, 4: Unused variable \'sklearn\' (unused-variable)</string>
<string>W:137, 22: Unused variable \'data_supply\' (unused-variable)</string>
<string>W:177, 4: Unused variable \'ingestion_policy\' (unused-variable)</string>
<string>W:177, 22: Unused variable \'data_supply\' (unused-variable)</string>
</tuple>
</value>
</item>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment