Commit 51ff4078 authored by Levin Zimmermann's avatar Levin Zimmermann

erp5_wendelin: Fix test_16_createDataAnalysisFromDataTransformationWithoutResolution

parent c794942b
...@@ -665,12 +665,22 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -665,12 +665,22 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
self.addCleanup(self._removeDocument, specialise_data_transformation) self.addCleanup(self._removeDocument, specialise_data_transformation)
specialise_data_transformation.validate() specialise_data_transformation.validate()
specialise_data_supply = portal.data_supply_module.newContent(
portal_type="Data Supply",
title="%s Specialise Data Supply" % title_prefix,
)
self.addCleanup(self._removeDocument, specialise_data_supply)
specialise_data_supply.validate()
initial_data_analysis = portal.data_analysis_module.newContent( initial_data_analysis = portal.data_analysis_module.newContent(
portal_type="Data Analysis", portal_type="Data Analysis",
title="%s Import Raw Data" % title_prefix, title="%s Import Raw Data" % title_prefix,
reference="wendelin.test.16.initial.data.analysis", reference="wendelin.test.16.initial.data.analysis",
resource=resource.getRelativeUrl(), resource=resource.getRelativeUrl(),
specialise_value_list=[specialise_data_transformation.getRelativeUrl()], specialise_value_list=[
specialise_data_supply.getRelativeUrl(),
specialise_data_transformation.getRelativeUrl()
],
) )
self.addCleanup(self._removeDocument, initial_data_analysis) self.addCleanup(self._removeDocument, initial_data_analysis)
...@@ -691,6 +701,8 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -691,6 +701,8 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
resource=data_operation.getRelativeUrl(), resource=data_operation.getRelativeUrl(),
quantity=1, quantity=1,
) )
self.commit()
self.tic()
data_transformation = portal.data_transformation_module.newContent( data_transformation = portal.data_transformation_module.newContent(
portal_type="Data Transformation", portal_type="Data Transformation",
...@@ -700,6 +712,14 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -700,6 +712,14 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
self.addCleanup(self._removeDocument, data_transformation) self.addCleanup(self._removeDocument, data_transformation)
data_transformation.validate() data_transformation.validate()
data_transformation.newContent(
portal_type="Data Transformation Operation Line",
title="Convert A to B",
reference="data_operation",
resource=data_operation.getRelativeUrl(),
quantity=1,
)
def getDataAnalysis(): def getDataAnalysis():
return portal.portal_catalog.getResultValue( return portal.portal_catalog.getResultValue(
portal_type="Data Analysis", portal_type="Data Analysis",
...@@ -709,9 +729,10 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()] ...@@ -709,9 +729,10 @@ result = [x for x in data_bucket_stream.getBucketIndexKeySequenceByIndex()]
self.assertEqual(getDataAnalysis(), None) self.assertEqual(getDataAnalysis(), None)
self.tic() self.tic()
for _ in range(2): for _ in range(1):
self.portal.portal_alarms.wendelin_handle_analysis.activeSense() self.portal.portal_alarms.wendelin_handle_analysis.activeSense()
self.tic() self.tic()
data_analysis = getDataAnalysis() data_analysis = getDataAnalysis()
self.assertNotEqual(data_analysis, None) self.assertNotEqual(data_analysis, None)
self.addCleanup(self._removeDocument, data_analysis)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment