Commit ae193981 authored by Georgios Dagkakis's avatar Georgios Dagkakis

instance specific plugins removed from plugin root

parent 01409502
from copy import copy
import json
import time
import random
import operator
import datetime
from dream.plugins import plugin
class AvailableCapacitySpreadsheet(plugin.InputPreparationPlugin):
""" Input prepration
read capacity data and update the capacity property of the stations.
"""
def preprocess(self, data):
strptime = datetime.datetime.strptime
capacityData=data['input'].get('available_capacity_spreadsheet', None)
nodes=data['graph']['node']
now = strptime(data['general']['currentDate'], '%Y/%m/%d')
if capacityData:
poolDict={}
for node_id, node in nodes.iteritems():
pool=node.get('pool',None)
if pool and not pool in nodes.keys():
if not poolDict.get(pool,None):
poolDict[pool]=[]
poolDict[pool].append(node_id)
# loop through columns and get those that contain a pool
columnsToErase=[]
for col in range(len(copy(capacityData[0]))):
# if the column contains a pool create new columns with the actual station id
if capacityData[0][col] in poolDict.keys():
pool=capacityData[0][col]
if pool in ['DAY',None,'']:
continue
poolCapacity=[c[col] for c in capacityData]
columnsToErase.append(col)
for stationId in poolDict[pool]:
capacityData[0].append(stationId)
i=1
for row in capacityData[1:]:
row.append(poolCapacity[i])
i+=1
# erase the columns that contain pools
for col in columnsToErase:
for row in capacityData:
row.pop(col)
# loop through stations
for col in range(len(capacityData[0])):
stationId=capacityData[0][col]
if stationId in ['DAY',None,'']:
continue
assert stationId in data['graph']['node'].keys(), ('available capacity spreadsheet has station id:',stationId,
'that does not exist in production line')
# for every station read the interval capacity (Monday to Sunday)
intervalCapacity=[]
for row in range(7):
intervalCapacity.append(float(capacityData[row+1][col]))
nodes[stationId]['intervalCapacity']=intervalCapacity
# for every station read the interval capacity exceptions
for row in range(8,len(capacityData)):
# at the first empty line break
if not capacityData[row][0]:
break
exeptionDate=strptime(capacityData[row][0], '%Y/%m/%d')
dayDifference=(exeptionDate-now).days
assert dayDifference>=0, 'exception date for past day given'
intervalCapacityExceptions=nodes[stationId].get('intervalCapacityExceptions',{})
if not intervalCapacityExceptions:
nodes[stationId]['intervalCapacityExceptions']={}
nodes[stationId]['intervalCapacityExceptions'][str(float(dayDifference))]=float(capacityData[row][col])
# set the interval capacity start
nodes[stationId]['intervalCapacityStart']=now.weekday()
return data
\ No newline at end of file
from datetime import datetime
import random
from pprint import pformat
from dream.plugins import plugin
from dream.plugins.TimeSupport import TimeSupportMixin
class CapacityProjectGantt(plugin.OutputPreparationPlugin, TimeSupportMixin):
def postprocess(self, data):
"""Post process the data for Gantt gadget
"""
# in this instance there is not need to define start time
data['general']['dateFormat']='%Y/%m/%d'
self.initializeTimeSupport(data)
date_format = '%d-%m-%Y %H:%M'
for result in data['result']['result_list']:
resultElements = result['elementList']
task_dict = {}
# loop in the results to find CapacityProjects
for element in resultElements:
if element['_class']=="Dream.CapacityProject":
# add the project in the task_dict
task_dict[element['id']] = dict(
id=element['id'],
text='Project %s' % element['id'],
type='project',
color='DodgerBlue',
open=False)
# loop in the project schedule to create the sub-tasks
projectSchedule=element['results'].get('schedule',{})
for record in projectSchedule:
task_dict[element['id']+record['stationId']] = dict(
id=element['id']+record['stationId'],
parent=element['id'],
text=record['stationId'],
start_date=self.convertToRealWorldTime(
record['entranceTime']).strftime(date_format),
stop_date=self.convertToRealWorldTime(
record['exitTime']).strftime(date_format),
open=False,
duration=int(record['exitTime'])-int(record['entranceTime']),
entranceTime=record['entranceTime'],
color='cyan'
)
# return the result to the gadget
result[self.configuration_dict['output_id']] = dict(
time_unit=self.getTimeUnitText(),
task_list=sorted(task_dict.values(),
key=lambda task: (task.get('parent'),
task.get('type') == 'project',
task.get('entranceTime'),task.get('id'))))
return data
from copy import copy
import json
import time
import random
import operator
import datetime
from dream.plugins import plugin
class CapacityProjectSpreadsheet(plugin.InputPreparationPlugin):
""" Input prepration
read the capacity projects from the spreadsheet
"""
def preprocess(self, data):
strptime = datetime.datetime.strptime
projectData=data['input'].get('projects_spreadsheet', None)
data['input']['BOM']={}
data['input']['BOM']['productionOrders']=[]
node=data['graph']['node']
now = strptime(data['general']['currentDate'], '%Y/%m/%d')
if projectData:
for row in range(1, len(projectData)):
if projectData[row][0]:
projectId=projectData[row][0]
orderDate=strptime(projectData[row][1], '%Y/%m/%d')
orderDate=(orderDate-now).days
if projectData[row][2]:
dueDate=strptime(projectData[row][2], '%Y/%m/%d')
dueDate=(dueDate-now).days
# if no due date is given set it to 180 (about 6 months)
else:
dueDate=180
assemblySpaceRequirement=float(projectData[row][3])
capacityRequirementDict={}
earliestStartDict={}
# get the number of operations of the project
numberOfOperations=1
i=1
while not projectData[row+i][0]:
# if a completely empty line is found break
if all(v in [None, ''] for v in projectData[row+i]):
break
numberOfOperations+=1
i+=1
# for every operation get capacityRequirementDict and earliestStartDict
for stationRecord in range(numberOfOperations):
stationId=projectData[row+stationRecord][4]
requiredCapacity=projectData[row+stationRecord][5]
earliestStart=projectData[row+stationRecord][6]
capacityRequirementDict[stationId]=float(requiredCapacity)
if earliestStart:
earliestStart=strptime(earliestStart, '%Y/%m/%d')
earliestStartDict[stationId]=(earliestStart-now).days
# define the order in BOM
data['input']['BOM']['productionOrders'].append({
'orderDate':orderDate,
'dueDate':dueDate,
'assemblySpaceRequirement':assemblySpaceRequirement,
'capacityRequirementDict':capacityRequirementDict,
'earliestStartDict':earliestStartDict,
'id':projectId,
'name':projectId,
'_class':"dream.simulation.applications.CapacityStations.CapacityProject.CapacityProject"
})
return data
\ No newline at end of file
from dream.plugins import plugin
from copy import copy
class CapacityProjectStationUtilization(plugin.OutputPreparationPlugin):
""" Output the station utilization metrics in a format compatible with
"""
def postprocess(self, data):
for result in data['result']['result_list']:
ticks = []
utilized_data = []
idle_data = []
options = {
"xaxis": {
"minTickSize": 1,
"ticks": ticks
},
"yaxis": {
"max": 100
},
"series": {
"bars": {
"show": True,
"barWidth": 0.8,
"align": "center"
},
"stack": True
}
}
series = [{
"label": "Utilized",
"data": utilized_data
}, {
"label": "Idle",
"data": idle_data
}
];
out = result[self.configuration_dict['output_id']] = {
"series": series,
"options": options
}
i = 0
for obj in result['elementList']:
if obj.get('family') == self.configuration_dict.get('family'):
if obj['results']['meanUtilization']:
utilized_data.append((i, obj['results']['meanUtilization']*100))
idle_data.append((i, (1- obj['results']['meanUtilization'])*100))
ticks.append((i, obj.get('name', self.getNameFromId(data, obj['id']))))
i += 1
return data
from datetime import datetime
import random
from pprint import pformat
from dream.plugins import plugin
from dream.plugins.TimeSupport import TimeSupportMixin
class CapacityStationGantt(plugin.OutputPreparationPlugin, TimeSupportMixin):
def postprocess(self, data):
"""Post process the data for Gantt gadget
"""
# in this instance there is not need to define start time
data['general']['dateFormat']='%Y/%m/%d'
self.initializeTimeSupport(data)
date_format = '%d-%m-%Y %H:%M'
for result in data['result']['result_list']:
resultElements = result['elementList']
task_dict = {}
# loop in the results to find CapacityProjects
for element in resultElements:
if element['_class']=="Dream.CapacityStation":
# add the project in the task_dict
task_dict[element['id']] = dict(
id=element['id'],
text='Station %s' % element['id'],
type='station',
open=False,
color='DodgerBlue')
# loop in the project schedule to create the sub-tasks
detailedWorkPlan=element['results'].get('detailedWorkPlan',{})
projectIds=[]
for record in detailedWorkPlan:
if record['project'] not in projectIds:
projectIds.append(record['project'])
for projectId in projectIds:
timesInStation=[]
for record in detailedWorkPlan:
if record['project']==projectId:
timesInStation.append(float(record['time']))
entranceTime=int(min(timesInStation))
exitTime=int(max(timesInStation)+1)
task_dict[element['id']+projectId] = dict(
id=element['id']+projectId,
parent=element['id'],
text=projectId,
start_date=self.convertToRealWorldTime(entranceTime).strftime(date_format),
stop_date=self.convertToRealWorldTime(exitTime).strftime(date_format),
open=False,
duration=exitTime-entranceTime,
color='cyan',
entranceTime=entranceTime
)
import json
outputJSONString=json.dumps(task_dict, indent=5)
outputJSONFile=open('taskDict.json', mode='w')
outputJSONFile.write(outputJSONString)
# return the result to the gadget
result[self.configuration_dict['output_id']] = dict(
time_unit=self.getTimeUnitText(),
task_list=sorted(task_dict.values(),
key=lambda task: (task.get('parent'),
task.get('type') == 'station',
task.get('entranceTime'),
task.get('id'))))
return data
\ No newline at end of file
from copy import copy
import json
import time
import random
import operator
from datetime import datetime
from dream.plugins import plugin
class CapacityStationWIPSpreadsheet(plugin.InputPreparationPlugin):
""" Input prepration
read wip-srpeadsheet data and update the wip property of the stations.
"""
def preprocess(self, data):
""" Set the WIP in queue from spreadsheet data.
"""
wipData=data['input'].get('wip_spreadsheet', None)
if wipData:
node=data['graph']['node']
# create an empty wip list in all CapacityStationBuffers
for (node_id,node_data) in node.iteritems():
if node_data['_class']=='dream.simulation.applications.CapacityStations.CapacityStationBuffer.CapacityStationBuffer':
node_data['wip']=[]
# get the number of projects
numberOfProjects=len([pr for pr in wipData[0] if (pr and not pr=='Operation')])
# get the number of operations
numberOfOperations=len([op for op in wipData if (op[0] and not op[0]=='Operation')])
# loop through all the columns>0
for col in range(1,numberOfProjects+1):
projectId=wipData[0][col]
# loop through all the rows>0
for row in range(1,numberOfOperations+1):
stationId=wipData[row][0]
assert stationId in node.keys(), 'wip spreadsheet has station id that does not exist in production line'
requiredCapacity=float(wipData[row][col])
# if the cell has a requiredCapacity>0 create the entity
if requiredCapacity:
capacityBuffer=self.getBuffer(data, stationId)
data['graph']['node'][capacityBuffer]['wip'].append({
"_class": "dream.simulation.applications.CapacityStations.CapacityEntity.CapacityEntity",
"requiredCapacity": requiredCapacity,
"capacityProjectId": projectId,
"name": projectId+'_'+stationId+'_'+str(requiredCapacity)
})
return data
# gets the data and the station id and returns the buffer id of this station
def getBuffer(self,data,stationId):
for (edge_id, edge) in data['graph']['edge'].iteritems():
if edge['destination']==stationId:
return edge['source']
\ No newline at end of file
from copy import copy
import json
import time
import random
import operator
import datetime
from dream.plugins import plugin
class CapacityStationsSetAssemblySpace(plugin.InputPreparationPlugin):
""" Input preparation
creates the CapacityStationBuffer and CapacityStationExit for each CapacityStation
"""
def preprocess(self, data):
assemblySpace=int(copy(data['general']).get('assemblySpace',100))
data['general']['extraPropertyDict']={}
data['general']['extraPropertyDict']['assemblySpace']=assemblySpace
return data
from copy import copy, deepcopy
import json
import time
import random
import operator
import datetime
from dream.plugins import plugin
class CreateCapacityStations(plugin.InputPreparationPlugin):
""" Input preparation
creates the CapacityStationBuffer and CapacityStationExit for each CapacityStation
"""
def preprocess(self, data):
nodes=copy(data['graph']['node'])
originalData=deepcopy(data)
for (stationId, node) in nodes.iteritems():
_class=node['_class']
if _class=='dream.simulation.applications.CapacityStations.CapacityStation.CapacityStation':
nextCapacityStationBufferId=self.getNextCapacityStationBufferId(data,stationId)
# the nextCapacityStationBufferId should point to the buffer
if nextCapacityStationBufferId:
nextCapacityStationBufferId+='_B'
stationName=node['name']
# create the CapacityStationBuffer
bufferName=stationName+'_Buffer'
bufferId=stationId+'_B'
requireFullProject=data['graph']['node'][stationId].pop('requireFullProject',None)
data['graph']['node'][bufferId]={
"_class": "dream.simulation.applications.CapacityStations.CapacityStationBuffer.CapacityStationBuffer",
"name": bufferName,
"wip": [],
'requireFullProject':requireFullProject
}
if requireFullProject:
data['graph']['node'][bufferId]['notRequiredOperations']=self.findNotRequiredOperations(originalData,stationId)
data['graph']['node'][stationId]['notProcessOutsideThreshold']=1
# create an edge that connects the CapacityStationBuffer to the CapacityStation
data['graph']['edge'][bufferId+'_to_'+stationId]={
"source": bufferId,
"destination": stationId,
"data": {},
"_class": "Dream.Edge"
}
# create the CapacityStationExit
exitName=stationName+'_Exit'
exitId=stationId+'_E'
data['graph']['node'][exitId]={
"_class": "dream.simulation.applications.CapacityStations.CapacityStationExit.CapacityStationExit",
"name": exitName,
"nextCapacityStationBufferId": nextCapacityStationBufferId
}
# create an edge that connects the CapacityStationBuffer to the CapacityStation
data['graph']['edge'][stationId+'_to_'+exitId]={
"source": stationId,
"destination": exitId,
"data": {},
"_class": "Dream.Edge"
}
# if projects shares from a pool read which others share resources and create
# the sharedResources element
pool = data['graph']['node'][stationId].get('pool','')
if pool:
sharingStations=[]
priority=data['graph']['node'][stationId].get('priority',None)
for other_id, other_node in originalData['graph']['node'].iteritems():
if other_id==stationId:
continue
otherPool=other_node.get('pool','')
if otherPool==pool:
sharingStations.append(other_id)
data['graph']['node'][stationId]['sharedResources']={
"stationIds": sharingStations,
"priority": priority
}
# add also a CapacityStationController
dueDateThreshold=data['general'].get('dueDateThreshold',14)
data['graph']['node']['CSC']={
"dueDateThreshold": dueDateThreshold,
"name": "CSC",
"prioritizeIfCanFinish": 1,
"interval": "1",
"start": "0",
"interruptions": {},
"_class": "dream.simulation.applications.CapacityStations.CapacityStationController.CapacityStationController"
}
return data
# gets the data and the stationId
# returns the successorId and erases the edge
def getNextCapacityStationBufferId(self,data,stationId):
successorId=None
edgeToErase=None
for (edgeId, edge) in data['graph']['edge'].iteritems():
if data['graph']['edge'][edgeId]['source']==stationId:
successorId=data['graph']['edge'][edgeId]['destination']
edgeToErase=edgeId
break
if edgeToErase:
data['graph']['edge'].pop(edgeToErase,None)
return successorId
# for an assembly station finds which are the not required operations
def findNotRequiredOperations(self,data,stationId):
requiredOperations=[]
nodes=data['graph']['node']
notRequiredOperations=nodes.keys()
for node_id, node in nodes.iteritems():
currentId=node_id
while 1:
successorList = self.getSuccessors(data, currentId)
if not successorList:
break
successorId=successorList[0]
if successorId==stationId:
requiredOperations.append(node_id)
break
currentId=successorId
for element in deepcopy(notRequiredOperations):
if element in requiredOperations:
notRequiredOperations.remove(element)
return notRequiredOperations
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment