Commit 1fcc9d18 authored by Roque Porchetto's avatar Roque Porchetto

erp5_wendelin_telecom_ingestion: split ingestion are partially appended by alarm

parent 471a61d4
...@@ -4,14 +4,14 @@ from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery ...@@ -4,14 +4,14 @@ from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog portal_catalog = portal.portal_catalog
# start single planned ingestion (not split files)
for data_ingestion in portal_catalog(portal_type = "Data Ingestion", for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
simulation_state = "planned", simulation_state = "planned",
id = "%EOF"): id = "%EOF"):
context.logEntry("Planned EOF ingestion found: " + data_ingestion.getId())
related_split_ingestions = portal_catalog(portal_type = "Data Ingestion", related_split_ingestions = portal_catalog(portal_type = "Data Ingestion",
simulation_state = "planned",
reference = data_ingestion.getReference()) reference = data_ingestion.getReference())
if len(related_split_ingestions) == 1: if len(related_split_ingestions) == 1:
context.logEntry("Planned EOF ingestion found: " + data_ingestion.getId())
data_stream = portal_catalog.getResultValue( data_stream = portal_catalog.getResultValue(
portal_type = 'Data Stream', portal_type = 'Data Stream',
reference = data_ingestion.getReference()) reference = data_ingestion.getReference())
...@@ -19,25 +19,33 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion", ...@@ -19,25 +19,33 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
data_stream.validate() data_stream.validate()
data_ingestion.start() data_ingestion.start()
context.logEntry("Data Ingestion %s started." % data_ingestion.getId()) context.logEntry("Data Ingestion %s started." % data_ingestion.getId())
elif len(related_split_ingestions) > 1:
try: # append split ingestions
query = Query(portal_type="Data Stream", reference=data_ingestion.getReference()) for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
result_list = portal_catalog(query=query, sort_on=(('creation_date', 'ascending'),)) simulation_state = "planned",
index = 1 id = "%001"):
for data_stream in result_list: context.logEntry("Planned split ingestion found: " + data_ingestion.getId())
if index == 1: try:
full_data_stream = data_stream last_data_stream_id = ""
else: query = Query(portal_type="Data Stream", reference=data_ingestion.getReference())
full_data_stream.appendData(data_stream.getData()) result_list = portal_catalog(query=query, sort_on=(('creation_date', 'ascending'),))
portal.data_stream_module.deleteContent(data_stream.getId()) for data_stream in result_list:
index += 1 if data_stream.getId() == data_ingestion.getId():
full_data_stream = data_stream
else:
full_data_stream.appendData(data_stream.getData())
last_data_stream_id = data_stream.getId()
portal.data_stream_module.deleteContent(data_stream.getId())
if last_data_stream_id.endswith("EOF"):
full_data_stream.validate() full_data_stream.validate()
related_split_ingestions = portal_catalog(portal_type = "Data Ingestion",
reference = data_ingestion.getReference())
for ingestion in related_split_ingestions: for ingestion in related_split_ingestions:
if ingestion.getId() == full_data_stream.getId(): if ingestion.getId() == full_data_stream.getId():
ingestion.start() ingestion.start()
else: else:
ingestion.cancel() ingestion.cancel()
context.logEntry("Chunks of split ingestion where appended into Data Stream %s. Corresponding Data Ingestion started." % full_data_stream.getId()) context.logEntry("Chunks of split ingestion where appended into Data Stream %s. Corresponding Data Ingestion started." % full_data_stream.getId())
except Exception as e: except Exception as e:
context.logEntry("ERROR appending split data streams for ingestion: %s" % data_ingestion.getReference()) context.logEntry("ERROR appending split data streams for ingestion: %s" % data_ingestion.getReference())
context.logEntry(e) context.logEntry(e)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment