Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
K
klaus_wendelin
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Eteri
klaus_wendelin
Commits
1199b007
Commit
1199b007
authored
Jun 28, 2019
by
Eteri
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Revert "new functionality: support measurement intervals"
This reverts commit
b0ddd24d
parent
b0ddd24d
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
31 additions
and
50 deletions
+31
-50
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataAnalysis_executeDataOperation.py
..._skins/erp5_wendelin/DataAnalysis_executeDataOperation.py
+0
-6
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
...al_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
+31
-44
No files found.
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/DataAnalysis_executeDataOperation.py
View file @
1199b007
...
@@ -2,7 +2,6 @@ portal = context.getPortalObject()
...
@@ -2,7 +2,6 @@ portal = context.getPortalObject()
operation
=
None
operation
=
None
use
=
None
use
=
None
parameter_dict
=
{}
parameter_dict
=
{}
context
.
checkConsistency
(
fixit
=
True
)
context
.
checkConsistency
(
fixit
=
True
)
initial_product
=
context
.
getSpecialiseValue
(
portal_type
=
"Data Transformation"
).
getResourceValue
()
initial_product
=
context
.
getSpecialiseValue
(
portal_type
=
"Data Transformation"
).
getResourceValue
()
for
analysis_line
in
context
.
objectValues
(
portal_type
=
"Data Analysis Line"
):
for
analysis_line
in
context
.
objectValues
(
portal_type
=
"Data Analysis Line"
):
...
@@ -29,9 +28,6 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
...
@@ -29,9 +28,6 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
parameter
[
base_category
]
=
analysis_line
.
getVariationCategoryItemList
(
parameter
[
base_category
]
=
analysis_line
.
getVariationCategoryItemList
(
base_category_list
=
(
base_category
,))[
0
][
0
]
base_category_list
=
(
base_category
,))[
0
][
0
]
reference
=
analysis_line
.
getReference
()
reference
=
analysis_line
.
getReference
()
parameter
[
"Start Date"
]
=
analysis_line
.
getStartDate
()
parameter
[
"Stop Date"
]
=
analysis_line
.
getStopDate
()
# several lines with same reference wil turn the parameter into a list
# several lines with same reference wil turn the parameter into a list
if
reference
in
parameter_dict
:
if
reference
in
parameter_dict
:
if
not
isinstance
(
parameter_dict
[
reference
],
list
):
if
not
isinstance
(
parameter_dict
[
reference
],
list
):
...
@@ -39,9 +35,7 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
...
@@ -39,9 +35,7 @@ for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
parameter_dict
[
reference
].
append
(
parameter
)
parameter_dict
[
reference
].
append
(
parameter
)
else
:
else
:
parameter_dict
[
reference
]
=
parameter
parameter_dict
[
reference
]
=
parameter
script_id
=
operation
.
getScriptId
()
script_id
=
operation
.
getScriptId
()
out
=
getattr
(
operation_analysis_line
,
script_id
)(
**
parameter_dict
)
out
=
getattr
(
operation_analysis_line
,
script_id
)(
**
parameter_dict
)
if
out
==
1
:
if
out
==
1
:
...
...
bt5/erp5_wendelin/SkinTemplateItem/portal_skins/erp5_wendelin/ERP5Site_createDataAnalysisList.py
View file @
1199b007
...
@@ -35,7 +35,6 @@ for movement in portal_catalog(query):
...
@@ -35,7 +35,6 @@ for movement in portal_catalog(query):
delivery
=
movement
.
getParentValue
()
delivery
=
movement
.
getParentValue
()
data_supply
=
delivery
.
getSpecialiseValue
(
portal_type
=
"Data Supply"
)
data_supply
=
delivery
.
getSpecialiseValue
(
portal_type
=
"Data Supply"
)
data_supply_list
=
delivery
.
getSpecialiseValueList
(
portal_type
=
"Data Supply"
)
data_supply_list
=
delivery
.
getSpecialiseValueList
(
portal_type
=
"Data Supply"
)
composed_data_supply
=
data_supply
.
asComposedDocument
()
composed_data_supply
=
data_supply
.
asComposedDocument
()
# Get applicable transformation
# Get applicable transformation
transformation_list
=
[]
transformation_list
=
[]
...
@@ -92,7 +91,6 @@ for movement in portal_catalog(query):
...
@@ -92,7 +91,6 @@ for movement in portal_catalog(query):
"Data Transformation Operation Line"
]):
"Data Transformation Operation Line"
]):
resource
=
transformation_line
.
getResourceValue
()
resource
=
transformation_line
.
getResourceValue
()
quantity
=
transformation_line
.
getQuantity
()
quantity
=
transformation_line
.
getQuantity
()
if
isinstance
(
quantity
,
tuple
):
if
isinstance
(
quantity
,
tuple
):
quantity
=
quantity
[
0
]
quantity
=
quantity
[
0
]
# In case of shared data anylsis only add additional input lines
# In case of shared data anylsis only add additional input lines
...
@@ -101,7 +99,6 @@ for movement in portal_catalog(query):
...
@@ -101,7 +99,6 @@ for movement in portal_catalog(query):
aggregate_set
=
set
()
aggregate_set
=
set
()
# manually add device to every line
# manually add device to every line
aggregate_set
.
add
(
movement
.
getAggregateDevice
())
aggregate_set
.
add
(
movement
.
getAggregateDevice
())
if
transformation_line
.
getPortalType
()
==
\
if
transformation_line
.
getPortalType
()
==
\
"Data Transformation Resource Line"
:
"Data Transformation Resource Line"
:
# at the moment, we only check for positive or negative quantity
# at the moment, we only check for positive or negative quantity
...
@@ -113,7 +110,6 @@ for movement in portal_catalog(query):
...
@@ -113,7 +110,6 @@ for movement in portal_catalog(query):
# then we search for an ingestion line with resource portal type
# then we search for an ingestion line with resource portal type
# Data Product
# Data Product
batch_relative_url
=
movement
.
getAggregateDataIngestionBatch
()
batch_relative_url
=
movement
.
getAggregateDataIngestionBatch
()
if
batch_relative_url
is
not
None
:
if
batch_relative_url
is
not
None
:
related_movement_list
=
portal_catalog
(
related_movement_list
=
portal_catalog
(
portal_type
=
"Data Ingestion Line"
,
portal_type
=
"Data Ingestion Line"
,
...
@@ -124,10 +120,10 @@ for movement in portal_catalog(query):
...
@@ -124,10 +120,10 @@ for movement in portal_catalog(query):
related_movement_list
=
movement
.
getParentValue
().
searchFolder
(
related_movement_list
=
movement
.
getParentValue
().
searchFolder
(
portal_type
=
[
"Data Ingestion Line"
,
"Data Analysis Line"
],
portal_type
=
[
"Data Ingestion Line"
,
"Data Analysis Line"
],
resource_relative_url
=
resource
.
getRelativeUrl
())
resource_relative_url
=
resource
.
getRelativeUrl
())
#
for related_movement in related_movement_list:
for
related_movement
in
related_movement_list
:
#aggregate_set.update(related_movement.getAggregateSet()) ########## do not copy device configurations to lines!!
aggregate_set
.
update
(
related_movement
.
getAggregateSet
())
#
if related_movement.getUse() == "big_data/ingestion/batch":
if
related_movement
.
getUse
()
==
"big_data/ingestion/batch"
:
#
related_movement.getParentValue().deliver()
related_movement
.
getParentValue
().
deliver
()
# create new item based on item_type if it is not already aggregated
# create new item based on item_type if it is not already aggregated
aggregate_type_set
=
set
(
aggregate_type_set
=
set
(
[
portal
.
restrictedTraverse
(
a
).
getPortalType
()
for
a
in
aggregate_set
])
[
portal
.
restrictedTraverse
(
a
).
getPortalType
()
for
a
in
aggregate_set
])
...
@@ -135,15 +131,7 @@ for movement in portal_catalog(query):
...
@@ -135,15 +131,7 @@ for movement in portal_catalog(query):
# create item if it does note exist yet.
# create item if it does note exist yet.
# Except if it is a Data Array Line, then it is currently created by
# Except if it is a Data Array Line, then it is currently created by
# data operation itself (probably this exception is inconsistent)
# data operation itself (probably this exception is inconsistent)
if
item_type
not
in
aggregate_type_set
and
item_type
!=
"Data Array Line"
:
if
transformation_line
.
getPortalType
()
==
"Data Transformation Operation Line"
and
item_type
not
in
aggregate_type_set
:
if
item_type
in
portal
.
getPortalDeviceConfigurationTypeList
()
+
portal
.
getPortalDataConfigurationTypeList
():
item
=
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
item_type
,
#validation_state="validated",
item_project_relative_url
=
data_analysis
.
getDestinationProject
(),
item_source_relative_url
=
data_analysis
.
getSource
())
elif
item_type
!=
"Data Array Line"
:
item
=
portal
.
portal_catalog
.
getResultValue
(
item
=
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
item_type
,
portal_type
=
item_type
,
validation_state
=
"validated"
,
validation_state
=
"validated"
,
...
@@ -164,15 +152,14 @@ for movement in portal_catalog(query):
...
@@ -164,15 +152,14 @@ for movement in portal_catalog(query):
except
AttributeError
:
except
AttributeError
:
pass
pass
aggregate_set
.
add
(
item
.
getRelativeUrl
())
aggregate_set
.
add
(
item
.
getRelativeUrl
())
# find other items such as device configuration and data configuration
# find other items such as device configuration and data configuration
# from data ingestion and data supply
# from data ingestion and data supply
#
composed = data_analysis.asComposedDocument()
composed
=
data_analysis
.
asComposedDocument
()
#
line_list = [l for l in delivery.objectValues(portal_type="Data Ingestion Line")]
line_list
=
[
l
for
l
in
delivery
.
objectValues
(
portal_type
=
"Data Ingestion Line"
)]
#
line_list += [l for l in composed.objectValues(portal_type="Data Supply Line")]
line_list
+=
[
l
for
l
in
composed
.
objectValues
(
portal_type
=
"Data Supply Line"
)]
#
for line in line_list:
for
line
in
line_list
:
#
if line.getResourceValue().getPortalType() == "Data Operation":
if
line
.
getResourceValue
().
getPortalType
()
==
"Data Operation"
:
#
aggregate_set.update(line.getAggregateList())
aggregate_set
.
update
(
line
.
getAggregateList
())
data_analysis_line
=
data_analysis
.
newContent
(
data_analysis_line
=
data_analysis
.
newContent
(
portal_type
=
"Data Analysis Line"
,
portal_type
=
"Data Analysis Line"
,
...
@@ -185,12 +172,12 @@ for movement in portal_catalog(query):
...
@@ -185,12 +172,12 @@ for movement in portal_catalog(query):
quantity_unit
=
transformation_line
.
getQuantityUnit
(),
quantity_unit
=
transformation_line
.
getQuantityUnit
(),
use
=
transformation_line
.
getUse
(),
use
=
transformation_line
.
getUse
(),
aggregate_set
=
aggregate_set
)
aggregate_set
=
aggregate_set
)
# for intput lines of first level analysis set causality and specialise
# for intput lines of first level analysis set causality and specialise
if
quantity
<
0
and
delivery
.
getPortalType
()
==
"Data Ingestion"
:
if
quantity
<
0
and
delivery
.
getPortalType
()
==
"Data Ingestion"
:
data_analysis_line
.
edit
(
data_analysis_line
.
edit
(
causality_value
=
delivery
,
causality_value
=
delivery
,
specialise_value_list
=
data_supply_list
)
specialise_value_list
=
data_supply_list
)
data_analysis
.
checkConsistency
(
fixit
=
True
)
data_analysis
.
checkConsistency
(
fixit
=
True
)
try
:
try
:
data_analysis
.
start
()
data_analysis
.
start
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment