Commit 2a7fed31 authored by Roque Porchetto's avatar Roque Porchetto

erp5_wendelin_telecom_ingestion: minor fix in pending file metadata and...

erp5_wendelin_telecom_ingestion: minor fix in pending file metadata and logging references besides ids
parent f10ce0bb
......@@ -5,7 +5,7 @@ log("Data_chunk size: %s" % str(len(data_chunk)))
decoded = base64.b64decode(data_chunk)
log("Decoded data_chunk size: %s" % str(len(decoded)))
log("Appending to data stream: %s." % data_stream)
log("Appending to data stream: %s - reference: %s" % (data_stream, data_stream.getReference()))
data_stream.appendData(decoded)
......
......@@ -37,7 +37,7 @@ for line_data_ingestion in portal_catalog(**query_dict):
destination_project = data_ingestion.getDestinationProject())
context.logEntry("Data Analyisis created for Data Ingestion %s (ID: %s)" % (str(data_ingestion.getReference()), data_analysis.getId()))
except:
context.logEntry("[ERROR] Error creating Data Analysis for Data Ingestion '%s'. Script returned" % data_ingestion.getId())
context.logEntry("[ERROR] Error creating Data Analysis for Data Ingestion '%s' (ID: %s). Script returned" % (str(data_ingestion.getReference()), data_analysis.getId()))
return # Data Analysis was already created
# create input and output lines
......@@ -70,7 +70,7 @@ for line_data_ingestion in portal_catalog(**query_dict):
aggregate_set.update(related_line.getAggregateSet())
related_line.getParentValue().deliver()
log("DATA INGESTION DELIVERED")
context.logEntry("Data Ingestion '%s' delivered." % data_ingestion.getId())
context.logEntry("Data Ingestion '%s' delivered. (ID: %s)" % (str(data_ingestion.getReference()), data_analysis.getId()))
else:
# it is an output line
# create new item based on item_type: data array, stream, descriptor, etc.
......@@ -101,4 +101,4 @@ for line_data_ingestion in portal_catalog(**query_dict):
data_analysis.plan()
log("DATA ANALYSIS PLANNED")
except Exception as e:
context.logEntry("[ERROR] Error creating Data Analysis for Data Ingestion '%s': %s" % (data_ingestion.getId(), str(e)))
context.logEntry("[ERROR] Error creating Data Analysis for Data Ingestion '%s' (ID: %s): %s" % (data_ingestion.getReference(), data_ingestion.getId(), str(e)))
......@@ -33,7 +33,7 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
related_split_ingestions = portal_catalog(portal_type = "Data Ingestion",
reference = data_ingestion.getReference())
if len(related_split_ingestions) == 1:
context.logEntry("Started single ingestion (not split) found: " + data_ingestion.getId())
context.logEntry("Started single ingestion (not split) found: %s - reference: %s" % (data_ingestion.getId(), data_ingestion.getReference()))
data_stream = portal_catalog.getResultValue(
portal_type = 'Data Stream',
reference = data_ingestion.getReference())
......@@ -44,14 +44,14 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
data_stream.validate()
if data_ingestion.getSimulationState() == "started":
data_ingestion.stop()
context.logEntry("Data Ingestion %s stopped." % data_ingestion.getId())
context.logEntry("Data Ingestion %s stopped. Reference: %s." % (data_ingestion.getId(), data_ingestion.getReference()))
# append split ingestions
for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
simulation_state = "started",
id = "%001"):
if not data_ingestion.getReference().endswith("_invalid"):
context.logEntry("Started split ingestion found: " + data_ingestion.getId())
context.logEntry("Started split ingestion found: %s - reference: %s" % (data_ingestion.getId(), data_ingestion.getReference()))
try:
last_data_stream_id = ""
query = Query(portal_type="Data Stream", reference=data_ingestion.getReference(), validation_state="draft")
......@@ -85,7 +85,7 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
else:
ingestion.setReference(ingestion.getReference() + "_invalid")
ingestion.deliver()
context.logEntry("Chunks of split ingestion where appended into Data Stream %s. Corresponding Data Ingestion stopped." % full_data_stream.getId())
context.logEntry("Chunks of split ingestion where appended into Data Stream %s. Reference: %s. Corresponding Data Ingestion stopped." % (full_data_stream.getId(), full_data_stream.getReference()))
except Exception as e:
context.logEntry("ERROR appending split data streams for ingestion: %s" % data_ingestion.getReference())
context.logEntry("ERROR appending split data streams for ingestion: %s - reference: %s." % (data_ingestion.getId(), data_ingestion.getReference()))
context.logEntry(e)
......@@ -55,7 +55,6 @@ try:
validation_state = 'validated')]
# create a new data ingestion
context.logEntry("Data Ingestion created. ID: %s" % data_ingestion_id)
data_ingestion = portal.data_ingestion_module.newContent(
id = data_ingestion_id,
portal_type = "Data Ingestion",
......@@ -63,6 +62,7 @@ try:
reference = data_ingestion_reference,
start_date = now,
specialise_value_list = specialise_value_list)
context.logEntry("Data Ingestion created. ID: %s - Reference: %s" % (data_ingestion_id, data_ingestion_reference))
property_list = ["title",
"source",
......@@ -111,7 +111,7 @@ try:
title = "%s%s" % (data_ingestion.getTitle(), "."+extension if extension != "none" else ""),
reference = data_ingestion_reference)
context.logEntry("Data Stream created. ID: %s" % data_stream.getId())
context.logEntry("Data Stream created. ID: %s - Reference: %s" % (data_stream.getId(), data_ingestion_reference))
input_line.setDefaultAggregateValue(data_stream)
if dataset_reference is not None:
......
......@@ -14,6 +14,18 @@ ing_dict = {
ingestions = portal_catalog(**ing_dict)
if len(ingestions) == 1:
data_ingestion = ingestions[0]
elif len(ingestions) == 0:
ing_dict = {
"simulation_state": "started",
"portal_type": "Data Ingestion",
"id": "%END",
"reference": reference}
single_started_ingestions = portal_catalog(**ing_dict)
if len(single_started_ingestions) == 1:
return '{"metadata":"Metadata not ready yet, please wait some minutes."}'
else:
context.logEntry("ERROR getting Data Ingestion of file %s. The file does not have a unique data ingestion in correct state." % reference)
return '{"metadata":"No metadata available for this type of file yet"}'
else:
context.logEntry("ERROR getting Data Ingestion of file %s. The file does not have a unique data ingestion in correct state." % reference)
return '{"metadata":"No metadata available for this type of file yet"}'
......@@ -36,7 +48,7 @@ try:
data_descriptor = context.restrictedTraverse(url)
except Exception as e:
# backward compatibility
context.logEntry("ERROR while looking for data descriptor with id %s : %s" % (str(data_ingestion.getId()), str(e)))
context.logEntry("ERROR while looking for data descriptor with id %s (reference: %s) : %s" % (str(data_ingestion.getId()), data_ingestion.getReference(), str(e)))
query = Query(portal_type="Data Descriptor")
data_descriptor = None
for document in portal_catalog(query=query):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment