Commit fd228fc1 authored by Levin Zimmermann's avatar Levin Zimmermann

erp5_wendelin: fix test_16_createDataAnalysisFromDataTransformationWithoutResolution

parent 51ff4078
...@@ -633,10 +633,24 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -633,10 +633,24 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
data_array.setArrayDtypeNames((dtype_name1,)) data_array.setArrayDtypeNames((dtype_name1,))
self.assertEqual(data_array.getArrayDtypeNames(), (dtype_name1,)) self.assertEqual(data_array.getArrayDtypeNames(), (dtype_name1,))
def test_16_createDataAnalysisFromDataTransformationWithoutResolution(self): def test_16_createDataAnalysisFromDataTransformationWithoutResolution(self):
"""
Ensure data analysis are created from data transformation without any specified
variation categories.
"""
portal = self.portal portal = self.portal
title_prefix = "Wendelin Test 16" title_prefix = "Wendelin Test 16"
test_function_to_organisation = {}
for test_function in ("source", "destination"):
organisation = portal.organisation_module.newContent(
portal_typle="Organisation",
title="%s %s" % (title_prefix, test_function),
)
self.addCleanup(self._removeDocument, organisation)
organisation.validate()
test_function_to_organisation.update({test_function: organisation})
data_operation = portal.data_operation_module.newContent( data_operation = portal.data_operation_module.newContent(
portal_typle="Data Operation", portal_typle="Data Operation",
title="%s Data Operation" % title_prefix, title="%s Data Operation" % title_prefix,
...@@ -649,6 +663,7 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -649,6 +663,7 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
portal_type="Data Product", portal_type="Data Product",
title="%s Data Product" % title_prefix, title="%s Data Product" % title_prefix,
individual_variation_base_category_list=["resolution"], individual_variation_base_category_list=["resolution"],
quantity_unit="unit/piece"
) )
self.addCleanup(self._removeDocument, resource) self.addCleanup(self._removeDocument, resource)
resource.validate() resource.validate()
...@@ -677,6 +692,8 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -677,6 +692,8 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
title="%s Import Raw Data" % title_prefix, title="%s Import Raw Data" % title_prefix,
reference="wendelin.test.16.initial.data.analysis", reference="wendelin.test.16.initial.data.analysis",
resource=resource.getRelativeUrl(), resource=resource.getRelativeUrl(),
source=test_function_to_organisation['source'].getRelativeUrl(),
destination=test_function_to_organisation['destination'].getRelativeUrl(),
specialise_value_list=[ specialise_value_list=[
specialise_data_supply.getRelativeUrl(), specialise_data_supply.getRelativeUrl(),
specialise_data_transformation.getRelativeUrl() specialise_data_transformation.getRelativeUrl()
...@@ -691,7 +708,11 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -691,7 +708,11 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
reference="out_array", reference="out_array",
resource=resource.getRelativeUrl(), resource=resource.getRelativeUrl(),
quantity=1, quantity=1,
variation_category_list=["resolution/%s" % resource_resolution.getRelativeUrl()], quantity_unit="unit/piece",
variation_category_list=[
"resolution/%s" % resource_resolution.getRelativeUrl(),
"resource/%s" % resource.getRelativeUrl(),
],
use= "use/big_data/ingestion/stream", use= "use/big_data/ingestion/stream",
) )
initial_data_analysis.newContent( initial_data_analysis.newContent(
...@@ -700,9 +721,8 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -700,9 +721,8 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
reference="data_operation", reference="data_operation",
resource=data_operation.getRelativeUrl(), resource=data_operation.getRelativeUrl(),
quantity=1, quantity=1,
quantity_unit="unit/piece",
) )
self.commit()
self.tic()
data_transformation = portal.data_transformation_module.newContent( data_transformation = portal.data_transformation_module.newContent(
portal_type="Data Transformation", portal_type="Data Transformation",
...@@ -711,28 +731,32 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -711,28 +731,32 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
) )
self.addCleanup(self._removeDocument, data_transformation) self.addCleanup(self._removeDocument, data_transformation)
data_transformation.validate() data_transformation.validate()
data_transformation.newContent( data_transformation.newContent(
portal_type="Data Transformation Operation Line", portal_type="Data Transformation Operation Line",
title="Convert A to B", title="Convert A to B",
reference="data_operation", reference="data_operation",
resource=data_operation.getRelativeUrl(), resource=data_operation.getRelativeUrl(),
quantity=1, quantity=1,
quantity_unit="unit/piece",
) )
def getDataAnalysis(): def getDataAnalysisByTitle(title):
return portal.portal_catalog.getResultValue( return portal.portal_catalog.getResultValue(
portal_type="Data Analysis", portal_type="Data Analysis",
title=data_transformation.getTitle() title=title
) )
self.assertEqual(getDataAnalysis(), None) data_analysis_title_list = [specialise_data_transformation.getTitle(), data_transformation.getTitle()]
for data_analysis_title in data_analysis_title_list:
self.assertEqual(getDataAnalysisByTitle(data_analysis_title), None)
self.commit()
self.tic() self.tic()
for _ in range(1):
self.portal.portal_alarms.wendelin_handle_analysis.activeSense() self.portal.portal_alarms.wendelin_handle_analysis.activeSense()
self.tic() self.tic()
data_analysis = getDataAnalysis() for data_analysis_title in data_analysis_title_list:
data_analysis = getDataAnalysisByTitle(data_analysis_title)
self.assertNotEqual(data_analysis, None) self.assertNotEqual(data_analysis, None)
self.addCleanup(self._removeDocument, data_analysis) self.addCleanup(self._removeDocument, data_analysis)
\ No newline at end of file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment