Commit c2588247 authored by Klaus Wölfel's avatar Klaus Wölfel

support analysis with multiple causalities

parent ebb7df6d
from DateTime import DateTime from DateTime import DateTime
from Products.ZSQLCatalog.SQLCatalog import AndQuery, OrQuery, Query from Products.ZSQLCatalog.SQLCatalog import AndQuery, OrQuery, Query
from Products.ERP5Type.Errors import UnsupportedWorkflowMethod
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog portal_catalog = portal.portal_catalog
...@@ -47,6 +48,7 @@ for movement in portal_catalog(query): ...@@ -47,6 +48,7 @@ for movement in portal_catalog(query):
validation_state = "validated", validation_state = "validated",
resource_relative_url = movement.getResource())) resource_relative_url = movement.getResource()))
for transformation in transformation_list: for transformation in transformation_list:
is_shared_data_analysis = False
# Check if analysis already exists # Check if analysis already exists
data_analysis = portal_catalog.getResultValue( data_analysis = portal_catalog.getResultValue(
portal_type="Data Analysis", portal_type="Data Analysis",
...@@ -54,22 +56,35 @@ for movement in portal_catalog(query): ...@@ -54,22 +56,35 @@ for movement in portal_catalog(query):
causality_relative_url = delivery.getRelativeUrl()) causality_relative_url = delivery.getRelativeUrl())
if data_analysis is not None: if data_analysis is not None:
continue continue
# Create Analysis # for first level analysis check if same kind of data analysis with same project and same source already exists
data_analysis = portal.data_analysis_module.newContent( # If yes, then later add additional input lines to this shared data analysis
portal_type = "Data Analysis", if delivery.getPortalType() == "Data Ingestion":
title = transformation.getTitle(), data_analysis = portal_catalog.getResultValue(
reference = delivery.getReference(), portal_type="Data Analysis",
start_date = delivery.getStartDate(), specialise_relative_url = transformation.getRelativeUrl(),
stop_date = delivery.getStopDate(), source_relative_url = delivery.getSource(),
specialise_value_list = [transformation] + data_supply_list, destination_project_relative_url = delivery.getDestinationProject())
causality_value = delivery, if data_analysis is not None:
source = delivery.getSource(), data_analysis.setDefaultCausalityValue(delivery)
source_section = delivery.getSourceSection(), data_analysis.setSpecialiseValueSet(data_analysis.getSpecialiseValueList() + data_supply_list)
source_project = delivery.getSourceProject(), is_shared_data_analysis = True
destination = delivery.getDestination(), else:
destination_section = delivery.getDestinationSection(), # Create Analysis
destination_project = delivery.getDestinationProject()) data_analysis = portal.data_analysis_module.newContent(
portal_type = "Data Analysis",
title = transformation.getTitle(),
reference = delivery.getReference(),
start_date = delivery.getStartDate(),
stop_date = delivery.getStopDate(),
specialise_value_list = [transformation] + data_supply_list,
causality_value = delivery,
source = delivery.getSource(),
source_section = delivery.getSourceSection(),
source_project = delivery.getSourceProject(),
destination = delivery.getDestination(),
destination_section = delivery.getDestinationSection(),
destination_project = delivery.getDestinationProject())
data_analysis.checkConsistency(fixit=True)
# create input and output lines # create input and output lines
for transformation_line in transformation.objectValues( for transformation_line in transformation.objectValues(
portal_type=["Data Transformation Resource Line", portal_type=["Data Transformation Resource Line",
...@@ -78,6 +93,9 @@ for movement in portal_catalog(query): ...@@ -78,6 +93,9 @@ for movement in portal_catalog(query):
quantity = transformation_line.getQuantity() quantity = transformation_line.getQuantity()
if isinstance(quantity, tuple): if isinstance(quantity, tuple):
quantity = quantity[0] quantity = quantity[0]
# In case of shared data anylsis only add additional input lines
if is_shared_data_analysis and quantity > -1:
continue
aggregate_set = set() aggregate_set = set()
# manually add device to every line # manually add device to every line
aggregate_set.add(movement.getAggregateDevice()) aggregate_set.add(movement.getAggregateDevice())
...@@ -136,7 +154,6 @@ for movement in portal_catalog(query): ...@@ -136,7 +154,6 @@ for movement in portal_catalog(query):
aggregate_set.add(item.getRelativeUrl()) aggregate_set.add(item.getRelativeUrl())
# find other items such as device configuration and data configuration # find other items such as device configuration and data configuration
# from data ingestion and data supply # from data ingestion and data supply
data_analysis.checkConsistency(fixit=True)
composed = data_analysis.asComposedDocument() composed = data_analysis.asComposedDocument()
line_list = [l for l in delivery.objectValues(portal_type="Data Ingestion Line")] line_list = [l for l in delivery.objectValues(portal_type="Data Ingestion Line")]
line_list += [l for l in composed.objectValues(portal_type="Data Supply Line")] line_list += [l for l in composed.objectValues(portal_type="Data Supply Line")]
...@@ -144,7 +161,7 @@ for movement in portal_catalog(query): ...@@ -144,7 +161,7 @@ for movement in portal_catalog(query):
if line.getResourceValue().getPortalType() == "Data Operation": if line.getResourceValue().getPortalType() == "Data Operation":
aggregate_set.update(line.getAggregateList()) aggregate_set.update(line.getAggregateList())
data_analysis.newContent( data_analysis_line = data_analysis.newContent(
portal_type = "Data Analysis Line", portal_type = "Data Analysis Line",
title = transformation_line.getTitle(), title = transformation_line.getTitle(),
reference = transformation_line.getReference(), reference = transformation_line.getReference(),
...@@ -155,5 +172,14 @@ for movement in portal_catalog(query): ...@@ -155,5 +172,14 @@ for movement in portal_catalog(query):
quantity_unit = transformation_line.getQuantityUnit(), quantity_unit = transformation_line.getQuantityUnit(),
use = transformation_line.getUse(), use = transformation_line.getUse(),
aggregate_set = aggregate_set) aggregate_set = aggregate_set)
data_analysis.start() # for intput lines of first level analysis set causality and specialise
if quantity < 0 and delivery.getPortalType() == "Data Ingestion":
data_analysis_line.edit(
causality_value = delivery,
specialise_value_list = data_supply_list)
data_analysis.checkConsistency(fixit=True)
try:
data_analysis.start()
except UnsupportedWorkflowMethod:
pass
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment