Commit 985884a3 authored by Roque Porchetto's avatar Roque Porchetto

erp5_wendelin_telecom_ingestion: remove debug logs

parent bae5c56d
......@@ -13,7 +13,6 @@ CHUNK_SIZE_TXT = 50000
CHUNK_SIZE_CSV = 25
def saveRawFile(data_stream, file_name):
log("Writting raw data file...")
data_stream_chunk = None
n_chunk = 0
chunk_size = CHUNK_SIZE
......@@ -28,7 +27,6 @@ def saveRawFile(data_stream, file_name):
with open(file_name, 'a') as fif_file:
fif_file.write(data_stream_chunk)
n_chunk += 1
log("Done.")
def getJSONfromTextFile(file_name):
try:
......@@ -88,11 +86,9 @@ def processFifData(file_name, data_array, data_descriptor):
try:
json_report = getMNEReportJSON(file_name, raw)
data_descriptor.setTextContent(json_report)
log("Data Descriptor content saved")
except Exception as e:
log("Error handling Data Descriptor content: " + str(e))
log("Saving raw data in Data Array...")
picks = mne.pick_types(raw.info)
if len(picks) == 0: raise StandardError("The raw data does not contain any element")
data, times = raw[picks[:1]] # get data from first pick to get shape
......@@ -113,7 +109,6 @@ def processTextData(file_name, data_array, data_descriptor):
try:
json_report = getJSONfromTextFile(file_name)
data_descriptor.setTextContent(json_report)
log("Data Descriptor text content saved")
except Exception as e:
log("Error handling Data Descriptor content: " + str(e))
......@@ -166,11 +161,9 @@ def processCsvData(file_name, data_array, data_descriptor, delimiter=","):
appendArray(chunk, data_array, columns, initialize)
# so returning after first chunk
data_descriptor.setTextContent(json_data)
log("Data Descriptor content saved")
return
initialize = False
data_descriptor.setTextContent(json_data)
log("Data Descriptor content saved")
except Exception as e:
log("Error handling csv Data Descriptor content: " + str(e))
......@@ -203,7 +196,6 @@ def processNiiData(file_name, data_array, data_descriptor, gz=False):
pass # ignore non serializable info
json_data = json.dumps(data)
data_descriptor.setTextContent(json_data)
log("Data Descriptor nii content saved")
except Exception as e:
log("Error handling Data Descriptor nii content: " + str(e))
raise e
......@@ -217,9 +209,6 @@ def processGZData(file_name, data_array, data_descriptor):
raise KeyError("gz")
def processRawData(data_stream, data_array, data_descriptor, reference_extension):
import time
start = time.time()
content = {"File content":"empty"}
if data_stream.getSize() == 0:
data_descriptor.setTextContent(json.dumps(content))
......@@ -243,7 +232,6 @@ def processRawData(data_stream, data_array, data_descriptor, reference_extension
"default" : processTextData,
}
try:
log("Processing data...")
if reference_extension in options:
options[reference_extension](file_name, data_array, data_descriptor)
else:
......@@ -256,7 +244,5 @@ def processRawData(data_stream, data_array, data_descriptor, reference_extension
if os.path.exists(file_name):
os.remove(file_name)
elapsed = time.time() - start
log("Done. Time elapsed: " + str(elapsed))
return "Raw data processed in %s seconds." % str(elapsed)
\ No newline at end of file
return "Raw data processed."
\ No newline at end of file
......@@ -52,13 +52,13 @@
<key> <string>text_content_warning_message</string> </key>
<value>
<tuple>
<string>W: 75, 2: No exception type(s) specified (bare-except)</string>
<string>W: 80, 4: No exception type(s) specified (bare-except)</string>
<string>W: 98, 8: Unused variable \'times\' (unused-variable)</string>
<string>W:135, 6: No exception type(s) specified (bare-except)</string>
<string>W:171, 8: Unreachable code (unreachable)</string>
<string>W:190, 11: Using type() instead of isinstance() for a typecheck. (unidiomatic-typecheck)</string>
<string>W:194, 10: No exception type(s) specified (bare-except)</string>
<string>W: 73, 2: No exception type(s) specified (bare-except)</string>
<string>W: 78, 4: No exception type(s) specified (bare-except)</string>
<string>W: 94, 8: Unused variable \'times\' (unused-variable)</string>
<string>W:130, 6: No exception type(s) specified (bare-except)</string>
<string>W:165, 8: Unreachable code (unreachable)</string>
<string>W:183, 11: Using type() instead of isinstance() for a typecheck. (unidiomatic-typecheck)</string>
<string>W:187, 10: No exception type(s) specified (bare-except)</string>
</tuple>
</value>
</item>
......
from Products.ERP5Type.Log import log
import base64
log("Data_chunk size: %s" % str(len(data_chunk)))
decoded = base64.b64decode(data_chunk)
log("Decoded data_chunk size: %s" % str(len(decoded)))
log("Appending to data stream: %s - reference: %s" % (data_stream, data_stream.getReference()))
data_stream.appendData(decoded)
log("Ingested data successfully appended.")
context.logEntry("Datachunk (size %s) appended to Data Stream." % str(len(decoded)))
from Products.ERP5Type.Log import log
log("Processing raw data from Data Stream " + str(input_stream_data.getReference()))
context.logEntry("Processing raw data from Data Stream %s" % str(input_stream_data.getReference()))
reference_extension = input_stream_data.getReference().split("/")[-1]
result = str(context.processRawData(input_stream_data, output_array, output_descriptor, reference_extension))
log(result)
context.logEntry("Result: %s" % result)
from Products.ERP5Type.Log import log
context.logEntry("[NEW INGESTION]")
context.logEntry("Reference received: %s" % reference)
record = reference.rsplit('/')
length = len(record)
if (length < 7):
context.logEntry("[ERROR] Data Ingestion reference is not well formated")
context.logEntry("[ERROR] In HandleFifEmbulkIngestion: Data Ingestion reference is not well formated")
raise ValueError("Data Ingestion reference is not well formated.")
invalid_chars = ["&", ";", "#", "%", '"', "+"]
for char in invalid_chars:
if char in reference:
context.logEntry("[ERROR] Data Ingestion reference contains chars that are not allowed")
context.logEntry("[ERROR] In HandleFifEmbulkIngestion: Data Ingestion reference contains chars that are not allowed")
raise ValueError("Data Ingestion reference contains chars that are not allowed.")
......@@ -35,7 +30,4 @@ dict = { 'filename': filename,
'hash': hash
}
log("Returning dictionary: %s." % dict)
context.logEntry("Parameter dictionary: %s" % dict)
return dict
from Products.ERP5Type.Log import log
portal = context.getPortalObject()
portal.ERP5Site_stopIngestionList()
portal.ERP5Site_createDataAnalysisList()
......
from Products.ERP5Type.Log import log
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery
return
# This alarm was deprecated - kept for test
portal = context.getPortalObject()
portal_catalog = portal.portal_catalog
......@@ -19,7 +21,6 @@ for data_ingestion in portal_catalog(**catalog_kw):
'reference': data_ingestion.getReference()}
invalidate = True
if len(portal_catalog(**catalog_kw)) > 0:
context.logEntry("Data Ingestion %s is old but it has related data ingestions that are not, so split ingestion is still in course. It won't be invalidated." % data_ingestion.getId())
invalidate = False
if invalidate:
......@@ -33,11 +34,9 @@ for data_ingestion in portal_catalog(**catalog_kw):
data_stream.invalidate()
except:
context.logEntry("[WARNING] Could not invalidate data stream '%s', it was already invalidated or draft" % data_stream.getId())
context.logEntry("%s %s (id:%s) invalidated" % (data_stream.getPortalType(), data_stream.getReference(), data_stream.getId()))
try:
if not data_ingestion.getReference().endswith("_invalid"):
data_ingestion.setReference(data_ingestion.getReference() + "_invalid")
data_ingestion.deliver()
except:
context.logEntry("[WARNING] Could not invalidate/deliver data ingestion '%s', it was already invalidated/deliver" % data_ingestion.getId())
context.logEntry("%s %s (id:%s) invalidated and delivered" % (data_ingestion.getPortalType(), data_ingestion.getReference(), data_ingestion.getId()))
from Products.ERP5Type.Log import log
operation = None
parameter_dict = {}
......@@ -10,7 +8,6 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
else:
resource_portal_type = ''
if resource_portal_type == 'Data Operation':
log("Getting Data Operation and Resources from Data Analysis...")
operation_analysis_line = analysis_line
operation = analysis_line.getResourceValue()
else:
......@@ -21,6 +18,5 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
parameter_dict[reference] = aggregate
script_id = operation.getScriptId()
log("Calling script: " + str(script_id))
getattr(operation_analysis_line, script_id)(**parameter_dict)
context.stop()
......@@ -20,7 +20,6 @@ for line_data_ingestion in portal_catalog(**query_dict):
# Create Analysis
try:
try:
context.logEntry("creating Data Analysis for Data Ingestion " + str(data_ingestion.getReference()))
data_analysis = portal.data_analysis_module.newContent(
portal_type = "Data Analysis",
id = data_ingestion.getId(),
......@@ -35,14 +34,12 @@ for line_data_ingestion in portal_catalog(**query_dict):
destination = data_ingestion.getDestination(),
destination_section = data_ingestion.getDestinationSection(),
destination_project = data_ingestion.getDestinationProject())
context.logEntry("Data Analyisis created for Data Ingestion %s (ID: %s)" % (str(data_ingestion.getReference()), data_ingestion.getId()))
except Exception as e:
context.logEntry("[WARNING] Exception creating Data Analysis (already created?): " + str(e))
log(''.join(["[WARNING] Exception creating Data Analysis (already created?): ", str(e)]))
data_analysis = None
if data_analysis is not None:
# create input and output lines
context.logEntry("creating input and output lines of data analysis %s (ID: %s)" % (str(data_ingestion.getReference()), data_ingestion.getId()))
for transformation_line in transformation.objectValues(
portal_type=["Data Transformation Resource Line",
"Data Transformation Operation Line"]):
......@@ -70,7 +67,6 @@ for line_data_ingestion in portal_catalog(**query_dict):
if(related_line.getParentValue().getReference() == data_ingestion.getReference() and related_line.getParentValue().getSimulationState() == "stopped"):
aggregate_set.update(related_line.getAggregateSet())
related_line.getParentValue().deliver()
context.logEntry("Data Ingestion '%s' delivered. (ID: %s)" % (str(data_ingestion.getReference()), data_ingestion.getId()))
else:
# it is an output line
# create new item based on item_type: data array, stream, descriptor, etc.
......@@ -83,7 +79,6 @@ for line_data_ingestion in portal_catalog(**query_dict):
version = '001')
if "Data Descriptor" not in item_type:
item.validate()
context.logEntry(str(item_type) + " %s created (ID: %s)" % (str(data_ingestion.getReference()), data_ingestion.getId()))
aggregate_set = set()
aggregate_set.add(item)
......@@ -98,6 +93,5 @@ for line_data_ingestion in portal_catalog(**query_dict):
aggregate_value_set = aggregate_set)
data_analysis.plan()
context.logEntry("Data Anaysis '%s' planned. (ID: %s)" % (str(data_ingestion.getReference()), data_ingestion.getId()))
except Exception as e:
context.logEntry("[ERROR] Error creating Data Analysis for Data Ingestion '%s' (ID: %s): %s" % (data_ingestion.getReference(), data_ingestion.getId(), str(e)))
portal = context.getPortalObject()
portal_catalog = portal.portal_catalog
from Products.ERP5Type.Log import log
for data_analysis in portal_catalog(portal_type = "Data Analysis",
simulation_state = "planned"):
try:
if data_analysis.getSimulationState() == "planned":
data_analysis.start()
context.logEntry("Data Analysis %s started." % data_analysis.getReference())
data_analysis.activate(serialization_tag=str(data_analysis.getUid()))\
.DataAnalysis_executeDataOperation()
except Exception as e:
......
......@@ -5,6 +5,5 @@ data_set = portal.data_set_module.get(reference)
if data_set is not None:
version = int(data_set.getVersion()) + 1
data_set.setVersion("%03d" % (version,))
context.logEntry("Dataset %s increased version to %03d" % (data_set.getId(), version))
else:
context.logEntry("Fail to increase dataset version. No dataset found for reference '%s'" % (reference))
from Products.ERP5Type.Log import log
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery, ComplexQuery
portal = context.getPortalObject()
portal_catalog = portal.portal_catalog
context.logEntry("Invalidating objects for reference: " + reference)
portal_type_query = ComplexQuery(Query(portal_type='Data Stream'),
Query(portal_type='Data Array'),
Query(portal_type='Data Descriptor'),
......@@ -14,12 +11,13 @@ portal_type_query = ComplexQuery(Query(portal_type='Data Stream'),
logical_operator="OR")
kw_dict = {"query": portal_type_query,
"reference": reference}
for document in portal_catalog(**kw_dict):
if not document.getReference().endswith("_invalid"):
context.logEntry("%s %s (id:%s) invalidated" % (document.getPortalType(), document.getReference(), document.getId()))
document.setReference(document.getReference() + "_invalid")
try:
document.invalidate()
except:
pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI)
try:
for document in portal_catalog(**kw_dict):
if not document.getReference().endswith("_invalid"):
document.setReference(document.getReference() + "_invalid")
try:
document.invalidate()
except:
pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI)
except Exception as e:
context.logEntry("[ERROR] Error invalidating objects for reference '%s': %s" % (reference, str(e)))
from Products.ERP5Type.Log import log
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery, ComplexQuery
portal = context.getPortalObject()
......@@ -7,7 +6,6 @@ portal_catalog = portal.portal_catalog
try:
if success: # full split ingestions successfully appendend
# invalidate old ingestion objects
context.logEntry("Invalidating old data ingestion objects after split ingestion for reference: " + reference)
data_stream = portal_catalog.getResultValue(
portal_type = 'Data Stream',
reference = reference,
......@@ -18,26 +16,21 @@ try:
id = data_stream.getId())
data_stream.setReference(data_stream.getReference() + "_invalid")
data_stream.invalidate()
log("Data Stream invalidated")
if not data_ingestion.getReference().endswith("_invalid"):
data_ingestion.setReference(data_ingestion.getReference() + "_invalid")
log("Data ingestion invalidated")
data_an = portal_catalog.getResultValue(
portal_type = 'Data Analysis',
id = data_stream.getId())
if data_an != None:
data_an.setReference(data_an.getReference() + "_invalid")
log("Data Analysis invalidated")
data_array = portal_catalog.getResultValue(
portal_type = 'Data Array',
id = data_stream.getId())
if data_array != None:
data_array.setReference(data_array.getReference() + "_invalid")
data_array.invalidate()
log("Data Array invalidated")
else: # split ingestion interrumped and restarted
# invalidate draft datastreams and old started data ingestions
context.logEntry("Invalidating old split data ingestions and data streams for reference: " + reference)
for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
simulation_state = "started",
reference = reference):
......@@ -50,5 +43,5 @@ try:
if not data_stream.getReference().endswith("_invalid"):
data_stream.setReference(data_stream.getReference() + "_invalid")
except Exception as e:
log("ERROR in ERP5Site_invalidateSplitIngestions: " + str(e))
context.logEntry("ERROR in ERP5Site_invalidateSplitIngestions: " + str(e))
pass
from Products.ERP5Type.Log import log
portal = context.getPortalObject()
portal_catalog = portal.portal_catalog
context.logEntry("Renaming requested: '%s' -> '%s'" % (reference, new_reference))
# check new reference
data_ingestions = portal_catalog(portal_type = "Data Ingestion", reference = new_reference)
if len(data_ingestions) > 0: raise "Error renaming: new reference '%s' already exists." % new_reference
......
......@@ -33,7 +33,6 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
related_split_ingestions = portal_catalog(portal_type = "Data Ingestion",
reference = data_ingestion.getReference())
if len(related_split_ingestions) == 1:
context.logEntry("Started single ingestion (not split) found: %s - reference: %s" % (data_ingestion.getId(), data_ingestion.getReference()))
data_stream = portal_catalog.getResultValue(
portal_type = 'Data Stream',
reference = data_ingestion.getReference())
......@@ -44,26 +43,24 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
data_stream.validate()
if data_ingestion.getSimulationState() == "started":
data_ingestion.stop()
context.logEntry("Data Ingestion %s stopped. Reference: %s." % (data_ingestion.getId(), data_ingestion.getReference()))
# append split ingestions
for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
simulation_state = "started",
id = "%001"):
if not data_ingestion.getReference().endswith("_invalid"):
context.logEntry("Started split ingestion found: %s - reference: %s" % (data_ingestion.getId(), data_ingestion.getReference()))
try:
last_data_stream_id = ""
query = Query(portal_type="Data Stream", reference=data_ingestion.getReference(), validation_state="draft")
result_list = portal_catalog(query=query, sort_on=(('creation_date', 'ascending'),))
full_data_stream = None
for data_stream in result_list:
log("data_stream.getId(): " + data_stream.getId())
log(''.join(["Data stream for split ingestion: ", data_stream.getId()]))
if data_stream.getId() == data_ingestion.getId():
log("is base data stream (001)")
log("It is base data stream (001)")
full_data_stream = data_stream
else:
log("is NOT base data stream (!=001)")
log("It is not base data stream, it is a part (!=001)")
if full_data_stream != None:
log("appending content to base data stream...")
full_data_stream.appendData(data_stream.getData())
......@@ -85,7 +82,6 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
else:
ingestion.setReference(ingestion.getReference() + "_invalid")
ingestion.deliver()
context.logEntry("Chunks of split ingestion where appended into Data Stream %s. Reference: %s. Corresponding Data Ingestion stopped." % (full_data_stream.getId(), full_data_stream.getReference()))
except Exception as e:
context.logEntry("ERROR appending split data streams for ingestion: %s - reference: %s." % (data_ingestion.getId(), data_ingestion.getReference()))
context.logEntry(e)
from DateTime import DateTime
from Products.ERP5Type.Log import log
now = DateTime()
now_string = now.strftime('%Y%m%d-%H%M%S-%f')[:-3]
......@@ -10,7 +9,6 @@ portal_catalog = portal.portal_catalog
try:
# remove supplier, eof, size and hash from reference
reference = '/'.join(reference.split('/')[1:-3])
context.logEntry("Data Ingestion reference: %s" % reference)
data_ingestion_reference = reference
eof = movement_dict.get('eof', 'END') if movement_dict.get('eof', 'END') != "" else 'END'
......@@ -19,7 +17,6 @@ try:
extension = movement_dict.get('extension', None)
dataset_reference = movement_dict.get('dataset_reference', None)
data_ingestion_id = '%s_%s_%s_%s' %(supplier, dataset_reference, now_string, eof)
context.logEntry("Data Ingestion ID: %s" % data_ingestion_id)
size = movement_dict.get('size', None) if movement_dict.get('size', None) != "" else None
hash_value = movement_dict.get('hash', None) if movement_dict.get('hash', None) != "" else None
......@@ -30,22 +27,6 @@ try:
if data_ingestion is not None:
if data_ingestion.getSimulationState() != 'started':
modified = False
if size != None and size != "":
data_stream = portal_catalog.getResultValue(
portal_type = 'Data Stream',
validation_state = 'validated',
reference = data_ingestion_reference)
if data_stream != None:
if size != data_stream.getSize():
modified = True
elif hash_value != None and hash_value != "" and hash_value != data_stream.getVersion():
modified = True
if not modified:
context.logEntry("An older ingestion for reference %s was already done." % data_ingestion_reference)
context.logEntry("Old file version will be invalidated.")
else:
context.logEntry("Ingestion of modified file. Old version will be invalidated.")
if eof == "END": # if not split (one single ingestion), invalidate old ingestion
portal.ERP5Site_invalidateIngestionObjects(data_ingestion_reference)
......@@ -62,7 +43,6 @@ try:
reference = data_ingestion_reference,
start_date = now,
specialise_value_list = specialise_value_list)
context.logEntry("Data Ingestion created. ID: %s - Reference: %s" % (data_ingestion_id, data_ingestion_reference))
property_list = ["title",
"source",
......@@ -111,14 +91,12 @@ try:
title = "%s%s" % (data_ingestion.getTitle(), "."+extension if extension != "none" else ""),
reference = data_ingestion_reference)
context.logEntry("Data Stream created. ID: %s - Reference: %s" % (data_stream.getId(), data_ingestion_reference))
input_line.setDefaultAggregateValue(data_stream)
if dataset_reference is not None:
data_set = portal.data_set_module.get(dataset_reference)
try:
if data_set is None:
context.logEntry("Data Set created.")
data_set = portal.data_set_module.newContent(
portal_type = "Data Set",
title = "Data set " + dataset_reference,
......@@ -128,11 +106,8 @@ try:
version = "001"
)
data_set.validate()
else:
context.logEntry("Data Set found for dataset reference: " + dataset_reference)
except:
data_set = portal.data_set_module.get(dataset_reference)
context.logEntry("Data Set found for dataset reference: " + dataset_reference)
if data_set.getReference().endswith("_invalid"):
data_set.setReference(data_set.getReference().replace("_invalid", ""))
if data_set.getValidationState() == "invalidated":
......@@ -140,7 +115,6 @@ try:
input_line.setDefaultAggregateValue(data_set)
data_ingestion.start()
context.logEntry("Data Ingestion started.")
data_operation = operation_line.getResourceValue()
data_stream = input_line.getAggregateDataStreamValue()
......@@ -150,5 +124,5 @@ try:
return data_operation, {'data_stream': data_stream}
except Exception as e:
context.logEntry("[ERROR] Error during ingestion policy operation: " + str(e))
context.logEntry(''.join(["[ERROR] Error during ingestion policy operation: ", str(e)]))
raise e
......@@ -11,8 +11,7 @@ try:
if data_set is None or data_set.getReference().endswith("_invalid"):
return { "status_code": 0, "result": [] }
except Exception as e: # fails because unauthorized access
context.logEntry("[ERROR] At script getDataStreamList")
context.logEntry("[ERROR] " + str(e))
log("Unauthorized access to getDataStreamList.")
return { "status_code": 1, "error_message": "401 - Unauthorized access. Please check your user credentials and try again." }
data_set = portal.data_set_module.get(data_set_reference)
......
from Products.ERP5Type.Log import log
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery, ComplexQuery
portal = context.getPortalObject()
......
......@@ -11,15 +11,13 @@ portal = context.getPortalObject()
portal_catalog = portal.portal_catalog
try:
context.logEntry("Checking if reference %s already exists" % reference)
# remove supplier and eof from reference
data_ingestion_reference = '/'.join(reference.split('/')[1:-3])
EOF = reference.split('/')[-3]
size = reference.split('/')[-2]
if data_ingestion_reference is "":
context.logEntry("[ERROR] Data Ingestion reference is not well formated")
context.logEntry("[ERROR] Data Ingestion reference parameter for ingestionReferenceExists script is not well formated")
raise ValueError("Data Ingestion reference is not well formated")
# check if there are started ingestions for this reference
......@@ -41,8 +39,7 @@ try:
return TRUE
else:
# previous ingestion was interrumped
context.log("[WARNING] User has restarted an interrumpted ingestion for reference %s." % data_ingestion.getReference())
context.log("[WARNING] Previous split ingestions for reference %s will be discarted and full ingestion restarted." % data_ingestion.getReference())
log(''.join(["[WARNING] User has restarted an interrumpted ingestion for reference ", data_ingestion.getReference(), ". Previous split ingestions will be discarted and full ingestion restarted."]))
portal.ERP5Site_invalidateSplitIngestions(data_ingestion.getReference(), success=False)
except:
pass
......@@ -60,9 +57,7 @@ try:
if size != "" and size != None:
# this is a modified file
return FALSE
context.logEntry("[ERROR] Data Ingestion reference %s already exists" % data_ingestion_reference)
return TRUE
except Exception as e:
context.logEntry("[ERROR] At script ingestionReferenceExists")
context.logEntry("[ERROR] " + str(e))
context.logEntry(''.join(["[ERROR] At script ingestionReferenceExists: ", str(e)]))
raise e
from Products.ERP5Type.Log import log
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery
portal = context.getPortalObject()
......@@ -15,8 +14,6 @@ if object.getPortalType() == "Data Set":
if data_stream.getReference().startswith(data_set.getReference()+'/') and not data_stream.getReference().endswith("_invalid"):
portal.ERP5Site_invalidateIngestionObjects(data_stream.getReference())
data_set.setReference(data_set.getReference() + "_invalid")
context.logEntry("Data set '%s' invalidated." % data_set.getReference())
elif object.getPortalType() == "Data Stream":
data_stream = object
context.logEntry("Invalidating data stream: " + data_stream.getReference())
portal.ERP5Site_invalidateIngestionObjects(data_stream.getReference())
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment