Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
erp5
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Laurent S
erp5
Commits
5645d0da
Commit
5645d0da
authored
Apr 26, 2012
by
Julien Muchembled
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
CMFActivity: move SQLDict specific code out of SQLBase.getProcessableMessageList
parent
31b4bb58
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
64 additions
and
76 deletions
+64
-76
product/CMFActivity/Activity/SQLBase.py
product/CMFActivity/Activity/SQLBase.py
+28
-43
product/CMFActivity/Activity/SQLDict.py
product/CMFActivity/Activity/SQLDict.py
+36
-26
product/CMFActivity/Activity/SQLQueue.py
product/CMFActivity/Activity/SQLQueue.py
+0
-7
No files found.
product/CMFActivity/Activity/SQLBase.py
View file @
5645d0da
...
@@ -168,6 +168,14 @@ class SQLBase(Queue):
...
@@ -168,6 +168,14 @@ class SQLBase(Queue):
activity_tool
.
SQLBase_makeMessageListAvailable
(
table
=
self
.
sql_table
,
activity_tool
.
SQLBase_makeMessageListAvailable
(
table
=
self
.
sql_table
,
uid
=
uid_list
)
uid
=
uid_list
)
def
getProcessableMessageLoader
(
self
,
activity_tool
,
processing_node
):
# do not merge anything
def
load
(
line
):
uid
=
line
.
uid
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
uid
,
line
=
line
)
return
m
,
uid
,
()
return
load
def
getProcessableMessageList
(
self
,
activity_tool
,
processing_node
):
def
getProcessableMessageList
(
self
,
activity_tool
,
processing_node
):
"""
"""
Always true:
Always true:
...
@@ -206,24 +214,16 @@ class SQLBase(Queue):
...
@@ -206,24 +214,16 @@ class SQLBase(Queue):
if
line_list
:
if
line_list
:
self
.
_log
(
TRACE
,
'Reserved messages: %r'
%
[
x
.
uid
for
x
in
line_list
])
self
.
_log
(
TRACE
,
'Reserved messages: %r'
%
[
x
.
uid
for
x
in
line_list
])
return
line_list
return
line_list
def
getDuplicateMessageUidList
(
line
):
uid_list
=
self
.
getDuplicateMessageUidList
(
activity_tool
=
activity_tool
,
line
=
line
,
processing_node
=
processing_node
)
if
uid_list
:
self
.
_log
(
TRACE
,
'Reserved duplicate messages: %r'
%
(
uid_list
,
))
return
uid_list
now_date
=
self
.
getNow
(
activity_tool
)
now_date
=
self
.
getNow
(
activity_tool
)
uid_to_duplicate_uid_list_dict
=
{}
uid_to_duplicate_uid_list_dict
=
{}
try
:
try
:
result
=
getReservedMessageList
(
1
)
result
=
getReservedMessageList
(
1
)
if
result
:
if
result
:
line
=
result
[
0
]
load
=
self
.
getProcessableMessageLoader
(
activity_tool
,
processing_node
)
uid
=
line
.
uid
m
,
uid
,
uid_list
=
load
(
result
[
0
])
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
uid
,
line
=
line
)
message_list
=
[
m
]
message_list
=
[
m
]
uid_to_duplicate_uid_list_dict
[
uid
]
=
getDuplicateMessageUidList
(
line
)
uid_to_duplicate_uid_list_dict
[
uid
]
=
uid_list
group_method_id
=
line
.
group_method_id
group_method_id
=
m
.
line
.
group_method_id
activity_tool
.
SQLBase_processMessage
(
table
=
self
.
sql_table
,
uid
=
[
uid
])
if
group_method_id
!=
'
\
0
'
:
if
group_method_id
!=
'
\
0
'
:
# Count the number of objects to prevent too many objects.
# Count the number of objects to prevent too many objects.
cost
=
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
cost
=
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
...
@@ -235,39 +235,24 @@ class SQLBase(Queue):
...
@@ -235,39 +235,24 @@ class SQLBase(Queue):
if
limit
>
1
:
# <=> cost * count < 1
if
limit
>
1
:
# <=> cost * count < 1
cost
*=
count
cost
*=
count
# Retrieve objects which have the same group method.
# Retrieve objects which have the same group method.
result
=
getReservedMessageList
(
limit
,
group_method_id
)
result
=
iter
(
getReservedMessageList
(
limit
,
group_method_id
))
if
self
.
merge_duplicate
:
path_and_method_id_dict
=
{(
line
.
path
,
line
.
method_id
):
uid
}
unreserve_uid_list
=
[]
for
line
in
result
:
for
line
in
result
:
if
line
.
uid
==
uid
:
if
line
.
uid
in
uid_to_duplicate_uid_list_dict
:
continue
m
,
uid
,
uid_list
=
load
(
line
)
if
m
is
None
:
uid_to_duplicate_uid_list_dict
[
uid
]
+=
uid_list
continue
continue
# All fetched lines have the same group_method_id and
uid_to_duplicate_uid_list_dict
[
uid
]
=
uid_list
# processing_node.
cost
+=
len
(
m
.
getObjectList
(
activity_tool
))
*
\
# Their dates are lower-than or equal-to now_date.
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
# We read each line once so lines have distinct uids.
message_list
.
append
(
m
)
# So what remains to be filtered on are path and method_id.
if
cost
>=
1
:
if
self
.
merge_duplicate
:
# Unreserve extra messages as soon as possible.
key
=
line
.
path
,
line
.
method_id
self
.
makeMessageListAvailable
(
activity_tool
=
activity_tool
,
original_uid
=
path_and_method_id_dict
.
get
(
key
)
uid_list
=
[
line
.
uid
for
line
in
result
if
line
.
uid
!=
uid
])
if
original_uid
is
not
None
:
activity_tool
.
SQLBase_processMessage
(
table
=
self
.
sql_table
,
uid_to_duplicate_uid_list_dict
[
original_uid
].
append
(
line
.
uid
)
uid
=
uid_to_duplicate_uid_list_dict
.
keys
())
continue
path_and_method_id_dict
[
key
]
=
line
.
uid
uid_to_duplicate_uid_list_dict
[
line
.
uid
]
=
\
getDuplicateMessageUidList
(
line
)
if
cost
<
1
:
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
line
.
uid
,
line
=
line
)
cost
+=
len
(
m
.
getObjectList
(
activity_tool
))
*
\
m
.
activity_kw
.
get
(
'group_method_cost'
,
.
01
)
message_list
.
append
(
m
)
else
:
unreserve_uid_list
.
append
(
line
.
uid
)
activity_tool
.
SQLBase_processMessage
(
table
=
self
.
sql_table
,
uid
=
[
m
.
uid
for
m
in
message_list
])
# Unreserve extra messages as soon as possible.
self
.
makeMessageListAvailable
(
activity_tool
=
activity_tool
,
uid_list
=
unreserve_uid_list
)
return
message_list
,
group_method_id
,
uid_to_duplicate_uid_list_dict
return
message_list
,
group_method_id
,
uid_to_duplicate_uid_list_dict
except
:
except
:
self
.
_log
(
WARNING
,
'Exception while reserving messages.'
)
self
.
_log
(
WARNING
,
'Exception while reserving messages.'
)
...
...
product/CMFActivity/Activity/SQLDict.py
View file @
5645d0da
...
@@ -109,32 +109,42 @@ class SQLDict(SQLBase):
...
@@ -109,32 +109,42 @@ class SQLDict(SQLBase):
message_list
=
activity_buffer
.
getMessageList
(
self
)
message_list
=
activity_buffer
.
getMessageList
(
self
)
return
[
m
for
m
in
message_list
if
m
.
is_registered
]
return
[
m
for
m
in
message_list
if
m
.
is_registered
]
def
getDuplicateMessageUidList
(
self
,
activity_tool
,
line
,
processing_node
):
def
getProcessableMessageLoader
(
self
,
activity_tool
,
processing_node
):
"""
path_and_method_id_dict
=
{}
Reserve unreserved messages matching given line.
def
load
(
line
):
Return their uids.
# getProcessableMessageList already fetch messages with the same
"""
# group_method_id, so what remains to be filtered on are path and
try
:
# method_id.
result
=
activity_tool
.
SQLDict_selectDuplicatedLineList
(
# XXX: What about tag ?
path
=
line
.
path
,
path
=
line
.
path
method_id
=
line
.
method_id
,
method_id
=
line
.
method_id
group_method_id
=
line
.
group_method_id
,
key
=
path
,
method_id
)
uid
=
line
.
uid
uid_list
=
[
x
.
uid
for
x
in
result
]
original_uid
=
path_and_method_id_dict
.
get
(
key
)
if
uid_list
:
if
original_uid
is
None
:
activity_tool
.
SQLDict_reserveDuplicatedLineList
(
m
=
self
.
loadMessage
(
line
.
message
,
uid
=
uid
,
line
=
line
)
processing_node
=
processing_node
,
uid
=
uid_list
)
try
:
else
:
result
=
activity_tool
.
SQLDict_selectDuplicatedLineList
(
# Release locks
path
=
path
,
activity_tool
.
SQLDict_commit
()
method_id
=
method_id
,
except
:
group_method_id
=
line
.
group_method_id
,
# Log
)
LOG
(
'SQLDict'
,
WARNING
,
'getDuplicateMessageUidList got an exception'
,
error
=
sys
.
exc_info
())
uid_list
=
[
x
.
uid
for
x
in
result
]
# Release lock
if
uid_list
:
activity_tool
.
SQLDict_rollback
()
activity_tool
.
SQLDict_reserveDuplicatedLineList
(
# And re-raise
processing_node
=
processing_node
,
uid
=
uid_list
)
raise
else
:
return
uid_list
activity_tool
.
SQLDict_commit
()
# release locks
except
:
self
.
_log
(
WARNING
,
'getDuplicateMessageUidList got an exception'
)
activity_tool
.
SQLDict_rollback
()
# release locks
raise
if
uid_list
:
self
.
_log
(
TRACE
,
'Reserved duplicate messages: %r'
%
uid_list
)
path_and_method_id_dict
[
key
]
=
uid
return
m
,
uid
,
uid_list
return
None
,
original_uid
,
[
uid
]
return
load
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
hasMessage
=
getattr
(
activity_tool
,
'SQLDict_hasMessage'
,
None
)
hasMessage
=
getattr
(
activity_tool
,
'SQLDict_hasMessage'
,
None
)
...
...
product/CMFActivity/Activity/SQLQueue.py
View file @
5645d0da
...
@@ -82,13 +82,6 @@ class SQLQueue(SQLBase):
...
@@ -82,13 +82,6 @@ class SQLQueue(SQLBase):
processing_node_list
=
None
,
processing_node_list
=
None
,
serialization_tag_list
=
serialization_tag_list
)
serialization_tag_list
=
serialization_tag_list
)
def
getDuplicateMessageUidList
(
self
,
activity_tool
,
line
,
processing_node
):
"""
Reserve unreserved messages matching given line.
Return their uids.
"""
return
()
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
def
hasActivity
(
self
,
activity_tool
,
object
,
method_id
=
None
,
only_valid
=
None
,
active_process_uid
=
None
):
hasMessage
=
getattr
(
activity_tool
,
'SQLQueue_hasMessage'
,
None
)
hasMessage
=
getattr
(
activity_tool
,
'SQLQueue_hasMessage'
,
None
)
if
hasMessage
is
not
None
:
if
hasMessage
is
not
None
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment