Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
W
wendelin
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Boxiang Sun
wendelin
Commits
8fcca25e
Commit
8fcca25e
authored
Jun 24, 2015
by
Ivan Tyagov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
compensate possible offset mistmatch. Do not hide errors.
parent
7313a789
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
32 additions
and
17 deletions
+32
-17
bt5/erp5_wendelin/ExtensionTemplateItem/portal_components/extension.erp5.Wendelin.py
...TemplateItem/portal_components/extension.erp5.Wendelin.py
+21
-7
bt5/erp5_wendelin/TestTemplateItem/portal_components/test.erp5.testWendelin.py
...tTemplateItem/portal_components/test.erp5.testWendelin.py
+10
-9
bt5/erp5_wendelin/TestTemplateItem/portal_components/test.erp5.testWendelin.xml
...TemplateItem/portal_components/test.erp5.testWendelin.xml
+1
-1
No files found.
bt5/erp5_wendelin/ExtensionTemplateItem/portal_components/extension.erp5.Wendelin.py
View file @
8fcca25e
...
...
@@ -8,23 +8,37 @@ import numpy as np
def
DataStream_copyCSVToDataArray
(
self
,
chunk_list
,
start
,
end
,
\
data_array_reference
=
None
):
"""
Rec
ieve CSV data and transform it to a numpy array of int
.
Rec
eive CSV data and transform it to a numpy array of floats
.
"""
chunk_text
=
''
.
join
(
chunk_list
)
data_array
=
self
.
portal_catalog
.
getResultValue
(
\
portal_type
=
'Data Array'
,
\
reference
=
data_array_reference
,
\
validation_state
=
'validated'
)
# compensate possible offset mistmatch
last_new_line_index
=
chunk_text
.
rfind
(
'
\
n
'
)
offset_mismatch
=
len
(
chunk_text
)
-
last_new_line_index
-
1
start
=
start
-
offset_mismatch
end
=
end
-
offset_mismatch
#self.log('%s %s %s' %(len(chunk_list), chunk_text.rfind('\n'), chunk_list))
# remove offset line which is to be processed next call
chunk_text
=
chunk_text
[:
len
(
chunk_text
)
-
offset_mismatch
-
1
]
# process left data
line_list
=
chunk_text
.
split
(
'
\
n
'
)
size_list
=
[]
for
line
in
line_list
:
line_item_list
=
line
.
split
(
','
)
size_list
.
extend
([
x
.
strip
()
for
x
in
line_item_list
])
size_list
.
extend
([
x
for
x
in
line_item_list
])
self
.
log
(
size_list
)
# save this value as a numpy array (for testing, only create ZBigArray for one variable)
size_list
=
[
float
(
x
)
for
x
in
size_list
if
x
not
in
(
''
,)
]
size_list
=
[
float
(
x
)
for
x
in
size_list
]
ndarray
=
np
.
array
(
size_list
)
data_array
=
self
.
portal_catalog
.
getResultValue
(
\
portal_type
=
'Data Array'
,
\
reference
=
data_array_reference
,
\
validation_state
=
'validated'
)
zarray
=
data_array
.
getArray
()
if
zarray
is
None
:
# first time init
...
...
bt5/erp5_wendelin/TestTemplateItem/portal_components/test.erp5.testWendelin.py
View file @
8fcca25e
...
...
@@ -50,7 +50,7 @@ class Test(ERP5TypeTestCase):
"""
# here, you can create the categories and objects your test will depend on
pass
def
test_0_import
(
self
):
"""
Test we can import certain libraries but still failure to do so should be a
...
...
@@ -71,15 +71,10 @@ class Test(ERP5TypeTestCase):
# simulate fluentd by setting proper values in REQUEST
reference
=
getRandomString
()
request
.
method
=
'POST'
number_string
=
','
.
join
([
str
(
x
)
for
x
in
range
(
11
)])
number_string_list
=
[
number_string
]
*
10000
real_data
=
'
\
n
'
.
join
(
number_string_list
)
data_chunk
=
msgpack
.
packb
([
0
,
real_data
],
use_bin_type
=
True
)
request
.
set
(
'reference'
,
reference
)
request
.
set
(
'data_chunk'
,
data_chunk
)
# create ingestion policy
ingestion_policy
=
portal
.
portal_ingestion_policies
.
newContent
(
\
portal_type
=
'Ingestion Policy'
,
...
...
@@ -115,6 +110,10 @@ class Test(ERP5TypeTestCase):
self
.
tic
()
# do real ingestion call
request
.
method
=
'POST'
data_chunk
=
msgpack
.
packb
([
0
,
real_data
],
use_bin_type
=
True
)
request
.
set
(
'reference'
,
reference
)
request
.
set
(
'data_chunk'
,
data_chunk
)
ingestion_policy
.
ingest
()
# ingestion handler script saves new data using new line so we
...
...
@@ -123,8 +122,7 @@ class Test(ERP5TypeTestCase):
self
.
assertEqual
(
real_data
,
data_stream_data
)
# try sample transformation
reference
=
'test-data-array- %s'
%
getRandomString
()
reference
=
'test-data-array- %s'
%
reference
data_array
=
portal
.
data_array_module
.
newContent
(
portal_type
=
'Data Array'
,
reference
=
reference
,
...
...
@@ -133,16 +131,19 @@ class Test(ERP5TypeTestCase):
self
.
tic
()
data_stream
.
DataStream_transform
(
\
chunk_length
=
5001
,
\
chunk_length
=
5
2
001
,
\
transform_script_id
=
'DataStream_copyCSVToDataArray'
,
data_array_reference
=
reference
)
self
.
tic
()
# test some numpy operations
zarray
=
data_array
.
getArray
()
np
.
average
(
zarray
)
# XXX: test that extracted array is same as input one
self
.
assertNotEqual
(
None
,
zarray
)
#self.assertEqual(1, zarray.shape)
def
test_02_Examples
(
self
):
"""
...
...
bt5/erp5_wendelin/TestTemplateItem/portal_components/test.erp5.testWendelin.xml
View file @
8fcca25e
...
...
@@ -50,7 +50,7 @@
<string>
W: 59, 4: Unused variable \'scipy\' (unused-variable)
</string>
<string>
W: 61, 4: Unused variable \'pandas\' (unused-variable)
</string>
<string>
W: 60, 4: Unused variable \'sklearn\' (unused-variable)
</string>
<string>
W:1
12
, 4: Unused variable \'data_supply\' (unused-variable)
</string>
<string>
W:1
07
, 4: Unused variable \'data_supply\' (unused-variable)
</string>
</tuple>
</value>
</item>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment