Commit cdf92b86 authored by Martin Manchev's avatar Martin Manchev Committed by Nikola Balog

Changes in 'erp5_wendelin' ...

parent a760a195
......@@ -142,7 +142,7 @@ class DataArray(BigFile):
self.getArray().dtype.names = names
security.declareProtected(Permissions.View, 'index_html')
def index_html(self, REQUEST, RESPONSE, format=_MARKER, inline=_MARKER, **kw):
def index_html(self, REQUEST, RESPONSE, fmt=_MARKER, inline=_MARKER, **kw):
"""
Support streaming
"""
......@@ -174,12 +174,12 @@ class DataArray(BigFile):
RESPONSE.write(self.getArray()[tuple(slice_index_list)].tobytes())
return True
range = REQUEST.get_header('Range', None)
request_range = REQUEST.get_header('Request-Range', None)
if request_range is not None:
# Netscape 2 through 4 and MSIE 3 implement a draft version
# Later on, we need to serve a different mime-type as well.
range = request_range
# header_range = request_range
pass
if_range = REQUEST.get_header('If-Range', None)
if range is not None:
ranges = HTTPRangeSupport.parseRange(range)
......@@ -200,7 +200,8 @@ class DataArray(BigFile):
# Date
date = if_range.split( ';')[0]
try: mod_since=long(DateTime(date).timeTime())
except: mod_since=None
except Exception:
mod_since=None
if mod_since is not None:
last_mod = self._data_mtime()
if last_mod is None:
......
......@@ -73,7 +73,7 @@ class DataArrayView(DataArray):
Data Array like view on one or multiple Data Arrays
"""
def initArray(self, shape, dtype):
def initArray(self, shape, dimensional_type):
"""
Not Implemented.
"""
......
......@@ -126,7 +126,7 @@ class DataBucketStream(Document):
PropertySheet.SortIndex
)
def __init__(self, id, **kw):
def __init__(self, identifier, **kw):
self.initBucketTree()
self.initIndexTree()
Document.__init__(self, id, **kw)
......@@ -192,7 +192,7 @@ class DataBucketStream(Document):
except ValueError:
return None
def _getOb(self, id, *args, **kw):
def _getOb(self, identifier, *args, **kw):
return None
def getBucketByKey(self, key=None):
......
if consuming_analysis_list is None:
consuming_analysis_list=[]
portal = context.getPortalObject()
operation = None
use_list = []
......
"""
This script will return all Data streams for Data set
"""
if limit is None:
limit=[]
catalog_kw = {'portal_type': 'Data Ingestion Line',
'aggregate_uid': context.getUid(),
'limit': limit,
......
"""
Get a chunks of data from a Data Stream, convert it to numpy array
and return proper start and end for next record.
This script assumes stream has following format.
{dict1}{dict2}
{dict3}
And it's possible that last chunk in its last line is incomplete dictionary
And it's possible that last chunk in its last line is incomplete dictionary
thus correction needed.
"""
import json
chunk_text = ''.join(chunk_list)
#context.log('%s %s %s' %(start, end, len(chunk_text)))
......@@ -24,16 +23,13 @@ for line in line_list:
# must have proper format
assert line.endswith('}')
assert line.startswith('{')
# fix ' -> "
line = line.replace("'", '"')
if line.count('{') > 1:
# multiple concatenated dictionaries in one line, bad format ignore for now
pass
else:
d = json.loads(line)
# xxx: save this value as a Data Array identified by data_array_reference
pass
# start and enf offsets may not match existing record structure in stream
# thus corrections in start and end offsets is needed thus we
......
from DateTime import DateTime
from erp5.component.module.DateUtils import addToDate
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery
from Products.ZSQLCatalog.SQLCatalog import Query
portal_catalog = context.getPortalObject().portal_catalog
......@@ -55,6 +55,6 @@ if len(parent_uid_list) != 0:
# we need to wait until there are 2 batches until we can stop it
# TODO: this should be implemented in transformation, not here
continue
data_ingestion.setStopDate(DateTime())
data_ingestion.stop()
object = state_change['object']
object.Base_checkConsistency()
obj = state_change['object']
obj.Base_checkConsistency()
object = state_change['object']
object.Base_checkConsistency()
obj = state_change['object']
obj.Base_checkConsistency()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment