Merge remote-tracking branch 'github/master' into development

This commit is contained in:
Joeri Exelmans 2025-07-23 13:59:00 +02:00
commit 6b5931343d
56 changed files with 2175 additions and 21 deletions

View file

@ -53,7 +53,7 @@ class CDAPI:
return self.bottom.read_outgoing_elements(self.m, type_name)[0]
def is_direct_subtype(self, super_type_name: str, sub_type_name: str):
return sub_type_name in self.direct_sub_types[super_type]
return sub_type_name in self.direct_sub_types[super_type_name]
def is_direct_supertype(self, sub_type_name: str, super_type_name: str):
return super_type_name in self.direct_super_types[sub_type_name]
@ -83,3 +83,6 @@ class CDAPI:
result = self.find_attribute_type(supertype, attr_name)
if result != None:
return result
def get_type(self, type_name: str):
return next(k for k, v in self.type_model_names.items() if v == type_name)

View file

@ -10,7 +10,8 @@ from uuid import UUID
from typing import Optional
from util.timer import Timer
NEXT_ID = 0
NEXT_LINK_ID = 0
NEXT_OBJ_ID = 0
# Models map names to elements
# This builds the inverse mapping, so we can quickly lookup the name of an element
@ -145,7 +146,7 @@ class ODAPI:
typ = self.cdapi.get_type(type_name)
types = set(typ) if not include_subtypes else self.cdapi.transitive_sub_types[type_name]
for type_of_obj in self.bottom.read_outgoing_elements(obj, "Morphism"):
if type_of_obj in types:
if self.get_name(type_of_obj) in types:
return True
return False
@ -153,10 +154,9 @@ class ODAPI:
self.bottom.delete_element(obj)
self.__recompute_mappings()
# Does the class of the object have the given attribute?
# Does the the object have the given attribute?
def has_slot(self, obj: UUID, attr_name: str):
class_name = self.get_name(self.get_type(obj))
return self.od.get_attr_link_name(class_name, attr_name) != None
return self.od.get_slot_link(obj, attr_name) != None
def get_slots(self, obj: UUID) -> list[str]:
return [attr_name for attr_name, _ in self.od.get_slots(obj)]
@ -247,7 +247,7 @@ class ODAPI:
raise Exception("Unimplemented type "+value)
def create_link(self, link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID):
global NEXT_ID
global NEXT_LINK_ID
types = self.bottom.read_outgoing_elements(self.mm, assoc_name)
if len(types) == 0:
raise Exception(f"No such association: '{assoc_name}'")
@ -255,13 +255,18 @@ class ODAPI:
raise Exception(f"More than one association exists with name '{assoc_name}' - this means the MM is invalid.")
typ = types[0]
if link_name == None:
link_name = f"__{assoc_name}{NEXT_ID}"
NEXT_ID += 1
link_name = f"__{assoc_name}{NEXT_LINK_ID}"
NEXT_LINK_ID += 1
link_id = self.od._create_link(link_name, typ, src, tgt)
self.__recompute_mappings()
return link_id
def create_object(self, object_name: Optional[str], class_name: str):
global NEXT_OBJ_ID
if object_name == None:
object_name = f"__{class_name}{NEXT_OBJ_ID}"
NEXT_OBJ_ID += 1
obj = self.od.create_object(object_name, class_name)
self.__recompute_mappings()
return obj
@ -279,6 +284,7 @@ def bind_api_readonly(odapi):
'get_target': odapi.get_target,
'get_source': odapi.get_source,
'get_slot': odapi.get_slot,
'get_slots': odapi.get_slots,
'get_slot_value': odapi.get_slot_value,
'get_slot_value_default': odapi.get_slot_value_default,
'get_all_instances': odapi.get_all_instances,
@ -287,6 +293,7 @@ def bind_api_readonly(odapi):
'get_outgoing': odapi.get_outgoing,
'get_incoming': odapi.get_incoming,
'has_slot': odapi.has_slot,
'is_instance': odapi.is_instance,
}
return funcs

View file

@ -93,8 +93,8 @@ def parse_od(state,
return (_Code(str(token[1:-1])), token.line) # strip the ``
def BYTES(self, token):
# return (bytes(token[2:-1], "utf-8"), token.line) # Strip b"" or b''
return (bytes(token[2:-1], "utf-8"), token.line) # Strip b"" or b''
# Strip b"" or b'', and make \\ back to \ (happens when reading the file as a string)
return (token[2:-1].encode().decode('unicode_escape').encode('raw_unicode_escape'), token.line) # Strip b"" or b''
def INDENTED_CODE(self, token):
skip = 4 # strip the ``` and the following newline character

View file

@ -0,0 +1,47 @@
import os
# Todo: remove src.backend.muMLE from the imports
from state.devstate import DevState
from bootstrap.scd import bootstrap_scd
from concrete_syntax.textual_od.parser import parse_od
from api.od import ODAPI
from concrete_syntax.textual_od.renderer import render_od as od_renderer
from concrete_syntax.plantuml import make_url as plant_url, renderer as plant_renderer
from concrete_syntax.graphviz import make_url as graphviz_url, renderer as graphviz_renderer
class FtgPmPt:
def __init__(self, name: str):
self.state = DevState()
self.scd_mmm = bootstrap_scd(self.state)
self.meta_model = self.load_metamodel()
self.model = None
self.odapi = None
self.name = name
@staticmethod
def read_file(file_name):
with open(os.path.join(os.path.dirname(__file__), file_name)) as file:
return file.read()
def load_metamodel(self):
mm_cs = self.read_file("pm/metamodels/mm_design.od")
mm_rt_cs = mm_cs + self.read_file("pm/metamodels/mm_runtime.od")
mm_total = mm_rt_cs + self.read_file("pt/metamodels/mm_design.od")
return parse_od(self.state, m_text=mm_total, mm=self.scd_mmm)
def load_model(self, m_text: str | None = None):
m_text = "" if not m_text else m_text
self.model = parse_od(self.state, m_text=m_text, mm=self.meta_model)
self.odapi = ODAPI(self.state, self.model, self.meta_model)
def render_od(self):
return od_renderer(self.state, self.model, self.meta_model, hide_names=False)
def render_plantuml_object_diagram(self):
print(plant_url.make_url(plant_renderer.render_package(
self.name, plant_renderer.render_object_diagram(self.state, self.model, self.meta_model)))
)
def render_graphviz_object_diagram(self):
print(graphviz_url.make_url(graphviz_renderer.render_object_diagram(self.state, self.model, self.meta_model)))

View file

@ -0,0 +1,68 @@
import copy
import pickle
from api.od import ODAPI
from examples.ftg_pm_pt.helpers.composite_activity import execute_composite_workflow
def serialize(obj):
return pickle.dumps(obj)
def deserialize(obj):
return pickle.loads(obj)
def create_activity_links(od: ODAPI, activity, prev_element, ctrl_port, end_trace=None,
relation_type="pt_IsFollowedBy"):
od.create_link(None, "pt_RelatesTo", activity, ctrl_port)
od.create_link(None, relation_type, prev_element, activity)
if end_trace:
od.create_link(None, "pt_IsFollowedBy", activity, end_trace)
def extract_input_data(od: ODAPI, activity):
input_data = {}
for has_data_in in od.get_outgoing(activity, "pm_HasDataIn"):
data_port = od.get_target(has_data_in)
artefact_state = od.get_source(od.get_incoming(od.get_source(od.get_incoming(data_port, "pm_DataFlowOut")[0]), "pm_Of")[0])
input_data[od.get_name(data_port)] = deserialize(od.get_slot_value(artefact_state, "data"))
return input_data
def execute_activity(od: ODAPI, globs, activity, input_data):
inp = copy.deepcopy(input_data) # Necessary, otherwise the function changes the values inside the dictionary -> need the original values for process trace
func = globs[od.get_slot_value(activity, "func")]
return func(inp) if func.__code__.co_argcount > 0 else func()
def handle_artefact(od: ODAPI, activity, artefact_type, relation_type, data_port=None, data=None,
direction="DataFlowIn"):
artefact = od.create_object(None, "pt_Artefact")
if 'pt_Consumes' == relation_type:
od.create_link(None, relation_type, artefact, activity)
else:
od.create_link(None, relation_type, activity, artefact)
if data_port:
flow_direction = od.get_incoming if relation_type == 'pt_Consumes' else od.get_outgoing
ass_side = od.get_source if relation_type == 'pt_Consumes' else od.get_target
pm_artefact = ass_side(flow_direction(data_port, f"pm_{direction}")[0])
prev_artefact = find_previous_artefact(od, od.get_incoming(pm_artefact, "pt_BelongsTo"))
if prev_artefact:
od.create_link(None, "pt_PrevVersion", artefact, prev_artefact)
od.create_link(None, "pt_BelongsTo", artefact, pm_artefact)
if data is not None:
artefact_state = od.get_source(od.get_incoming(pm_artefact, "pm_Of")[0])
od.set_slot_value(artefact_state, "data", serialize(data))
od.set_slot_value(artefact, "data", serialize(data))
def find_previous_artefact(od: ODAPI, linked_artefacts):
return next((od.get_source(link) for link in linked_artefacts if
not od.get_incoming(od.get_source(link), "pt_PrevVersion")), None)
def update_control_states(od: ODAPI, activity, ctrl_out):
for has_ctrl_in in od.get_outgoing(activity, "pm_HasCtrlIn"):
od.set_slot_value(od.get_source(od.get_incoming(od.get_target(has_ctrl_in), "pm_Of")[0]), "active", False)
od.set_slot_value(od.get_source(od.get_incoming(ctrl_out, "pm_Of")[0]), "active", True)

View file

@ -0,0 +1,272 @@
from uuid import UUID
from api.od import ODAPI
from examples.ftg_pm_pt.ftg_pm_pt import FtgPmPt
from examples.ftg_pm_pt.runner import FtgPmPtRunner
def find_previous_artefact(od: ODAPI, linked_artefacts):
return next((od.get_source(link) for link in linked_artefacts if
not od.get_incoming(od.get_source(link), "pt_PrevVersion")), None)
def create_activity_links(od: ODAPI, activity, prev_element, ctrl_port, end_trace=None,
relation_type="pt_IsFollowedBy"):
od.create_link(None, "pt_RelatesTo", activity, ctrl_port)
od.create_link(None, relation_type, prev_element, activity)
if end_trace:
od.create_link(None, "pt_IsFollowedBy", activity, end_trace)
def get_workflow_path(od: ODAPI, activity: UUID):
return od.get_slot_value(activity, "subworkflow_path")
def get_workflow(workflow_path: str):
with open(workflow_path, "r") as f:
return f.read()
############################
def get_runtime_state(od: ODAPI, design_obj: UUID):
states = od.get_incoming(design_obj, "pm_Of")
if len(states) == 0:
print(f"Design object '{od.get_name(design_obj)}' has no runtime state.")
return None
return od.get_source(states[0])
def get_source_incoming(od: ODAPI, obj: UUID, link_name: str):
links = od.get_incoming(obj, link_name)
if len(links) == 0:
print(f"Object '{od.get_name(obj)} has no incoming links of type '{link_name}'.")
return None
return od.get_source(links[0])
def get_target_outgoing(od: ODAPI, obj: UUID, link_name: str):
links = od.get_outgoing(obj, link_name)
if len(links) == 0:
print(f"Object '{od.get_name(obj)} has no outgoing links of type '{link_name}'.")
return None
return od.get_target(links[0])
def set_control_port_value(od: ODAPI, port: UUID, value: bool):
state = get_runtime_state(od, port)
od.set_slot_value(state, "active", value)
def set_artefact_data(od: ODAPI, artefact: UUID, value: bytes):
state = artefact
# Only the proces model of the artefact contains a runtime state
if od.get_type_name(state) == "pm_Artefact":
state = get_runtime_state(od, artefact)
od.set_slot_value(state, "data", value)
def get_artefact_data(od: ODAPI, artefact):
state = artefact
# Only the proces model of the artefact contains a runtime state
if od.get_type_name(state) == "pm_Artefact":
state = get_runtime_state(od, artefact)
return od.get_slot_value(state, "data")
############################
def set_workflow_control_source(workflow_model: FtgPmPt, ctrl_port_name: str, composite_linkage: dict):
od = workflow_model.odapi
source_port_name = composite_linkage[ctrl_port_name]
source_port = od.get(source_port_name)
set_control_port_value(od, source_port, True)
def set_workflow_artefacts(act_od: ODAPI, activity: UUID, workflow_model: FtgPmPt, composite_linkage: dict):
for data_port in [act_od.get_target(data_in) for data_in in act_od.get_outgoing(activity, "pm_HasDataIn")]:
# Get the data source port of the inner workflow
data_port_name = act_od.get_name(data_port)
source_port_name = composite_linkage[data_port_name]
source_port = workflow_model.odapi.get(source_port_name)
# Get the artefact that is linked to the data port of the activity
act_artefact = get_source_incoming(act_od, data_port, "pm_DataFlowOut")
# Get the data of the artefact
artefact_data = get_artefact_data(act_od, act_artefact)
# Get the artefact that is linked to the data port of the inner workflow
workflow_artefact = get_target_outgoing(workflow_model.odapi, source_port, "pm_DataFlowIn")
set_artefact_data(workflow_model.odapi, workflow_artefact, artefact_data)
def get_activity_port_from_inner_port(composite_linkage: dict, port_name: str):
for act_port_name, work_port_name in composite_linkage.items():
if work_port_name == port_name:
return act_port_name
def execute_composite_workflow(od: ODAPI, activity: UUID, ctrl_port: UUID, composite_linkage: dict,
packages: dict | None, matched=None):
activity_name = od.get_slot_value(activity, "name")
# First get the path of the object diagram file that contains the inner workflow of the activity
workflow_path = get_workflow_path(od, activity)
# Read the object diagram file
workflow = get_workflow(workflow_path)
# Create an FtgPmPt object
workflow_model = FtgPmPt(activity_name)
# Load the workflow to the object
workflow_model.load_model(workflow)
# Set the correct control source port of the workflow to active
set_workflow_control_source(workflow_model, od.get_name(ctrl_port), composite_linkage[activity_name])
# If a data port is linked, set the data of the artefact
set_workflow_artefacts(od, activity, workflow_model, composite_linkage[activity_name])
# Create an FtgPmPtRunner object with the FtgPmPt object
workflow_runner = FtgPmPtRunner(workflow_model)
# Set the packages if present
workflow_runner.set_packages(packages, is_path=False)
# Run the FtgPmPtRunner (is a subprocess necessary? This makes it more complicated because now we have direct access to the object)
workflow_runner.run()
# Contains all the ports of the inner workflow -> map back to the activity ports, and so we can set the correct
# Control ports to active and also set the data artefacts correctly
ports = extract_inner_workflow(workflow_model.odapi)
start_act = None
end_act = None
for port in [port for port in ports if port]:
port_name = workflow_model.odapi.get_name(port)
activity_port_name = get_activity_port_from_inner_port(composite_linkage[activity_name], port_name)
activity_port = od.get(activity_port_name)
match workflow_model.odapi.get_type_name(port):
case "pm_CtrlSource":
start_act = handle_control_source(od, activity_port, matched("prev_trace_element"))
case "pm_CtrlSink":
end_act = handle_control_sink(od, activity_port, start_act, matched("end_trace"))
case "pm_DataSource":
handle_data_source(od, activity_port, start_act)
case "pm_DataSink":
handle_data_sink(od, workflow_model.odapi, activity_port, port, end_act)
def handle_control_source(od: ODAPI, port, prev_trace_elem):
set_control_port_value(od, port, False)
start_activity = od.create_object(None, "pt_StartActivity")
create_activity_links(od, start_activity, prev_trace_elem, port)
return start_activity
def handle_control_sink(od: ODAPI, port, start_act, end_trace):
set_control_port_value(od, port, True)
end_activity = od.create_object(None, "pt_EndActivity")
create_activity_links(od, end_activity, start_act, port, end_trace)
return end_activity
def handle_data_source(od: ODAPI, port, start_activity):
pt_artefact = od.create_object(None, "pt_Artefact")
od.create_link(None, "pt_Consumes", pt_artefact, start_activity)
pm_artefact = get_source_incoming(od, port, "pm_DataFlowOut")
pm_artefact_data = get_artefact_data(od, pm_artefact)
set_artefact_data(od, pt_artefact, pm_artefact_data)
prev_pt_artefact = find_previous_artefact(od, od.get_incoming(pm_artefact, "pt_BelongsTo"))
if prev_pt_artefact:
od.create_link(None, "pt_PrevVersion", pt_artefact, prev_pt_artefact)
od.create_link(None, "pt_BelongsTo", pt_artefact, pm_artefact)
def handle_data_sink(act_od: ODAPI, work_od: ODAPI, act_port, work_port, end_activity):
pt_artefact = act_od.create_object(None, "pt_Artefact")
act_od.create_link(None, "pt_Produces", end_activity, pt_artefact)
work_artefact = get_source_incoming(work_od, work_port, "pm_DataFlowOut")
work_artefact_data = get_artefact_data(work_od, work_artefact)
act_artefact = get_target_outgoing(act_od, act_port, "pm_DataFlowIn")
set_artefact_data(act_od, act_artefact, work_artefact_data)
set_artefact_data(act_od, pt_artefact, work_artefact_data)
prev_pt_artefact = find_previous_artefact(act_od, act_od.get_incoming(act_artefact, "pt_BelongsTo"))
if prev_pt_artefact:
act_od.create_link(None, "pt_PrevVersion", pt_artefact, prev_pt_artefact)
act_od.create_link(None, "pt_BelongsTo", pt_artefact, act_artefact)
def extract_inner_workflow(workflow: ODAPI):
# Get the model, this should be only one
name, model = workflow.get_all_instances("pm_Model")[0]
# Get the start of the process trace
start_trace = get_source_incoming(workflow, model, "pt_Starts")
# Get the end of the process trace
end_trace = get_source_incoming(workflow, model, "pt_Ends")
# Get the first started activity
first_activity = get_target_outgoing(workflow, start_trace, "pt_IsFollowedBy")
# Get the last ended activity
end_activity = get_source_incoming(workflow, end_trace, "pt_IsFollowedBy")
# Get the control port that started the activity
act_ctrl_in = get_target_outgoing(workflow, first_activity, "pt_RelatesTo")
# Get the control port that is activated when the activity is executed
act_ctrl_out = get_target_outgoing(workflow, end_activity, "pt_RelatesTo")
# Get the control source of the workflow
ports = []
for port in workflow.get_incoming(act_ctrl_in, "pm_CtrlFlow"):
source = workflow.get_source(port)
if workflow.get_type_name(source) == "pm_CtrlSource":
# Only one port can activate an activity
ports.append(source)
break
# Get the control sink of the workflow
for port in workflow.get_outgoing(act_ctrl_out, "pm_CtrlFlow"):
sink = workflow.get_target(port)
if workflow.get_type_name(sink) == "pm_CtrlSink":
# Only one port can be set to active one an activity is ended
ports.append(sink)
break
# Get the data port that the activity consumes (if used)
consumed_links = workflow.get_incoming(first_activity, "pt_Consumes")
if len(consumed_links) > 0:
pt_artefact = None
for link in consumed_links:
pt_artefact = workflow.get_source(link)
# Check if it is the first artefact -> contains no previous version
if len(workflow.get_outgoing(pt_artefact, "pt_PrevVersion")) == 0:
break
pm_artefact = get_target_outgoing(workflow, pt_artefact, "pt_BelongsTo")
# Find the data source port
for link in workflow.get_incoming(pm_artefact, "pm_DataFlowIn"):
source = workflow.get_source(link)
if workflow.get_type_name(source) == "pm_DataSource":
# An activity can only use one artefact as input
ports.append(source)
break
# Get all data ports that are connected to an artefact that is produced by an activity in the workflow,
# where the artefact is also part of main workflow
for port_name, data_sink in workflow.get_all_instances("pm_DataSink"):
pm_art = get_source_incoming(workflow, data_sink, "pm_DataFlowOut")
# If the pm_artefact is linked to a proces trace artefact that is produced, we can add to port
links = workflow.get_incoming(pm_art, "pt_BelongsTo")
if not len(links):
continue
# A data sink port linkage will only be added to the proces trace when an activity is ended and so an artefact
# is produced, meaning that if a belongsTo link exists, a proces trace artefact is linked to this data port
ports.append(data_sink)
return ports

View file

@ -0,0 +1,2 @@
# Match the model
model:RAM_pm_Model

View file

@ -0,0 +1,7 @@
model:RAM_pm_Model
# Check if the model isn't already connected to a process trace
start_trace:RAM_pt_StartTrace
:RAM_pt_Starts (start_trace -> model)
end_trace:RAM_pt_EndTrace
:RAM_pt_Ends (end_trace -> model)

View file

@ -0,0 +1,12 @@
# Keep the left hand side
model:RAM_pm_Model
# Connect a process trace to it
start_trace:RAM_pt_StartTrace
starts:RAM_pt_Starts (start_trace -> model)
end_trace:RAM_pt_EndTrace
ends:RAM_pt_Ends (end_trace -> model)
# Connect the start with the end
:RAM_pt_IsFollowedBy (start_trace -> end_trace)

View file

@ -0,0 +1,49 @@
# When a control port is active and is connected to an activity, we want to execute the activity
# But, if the activity has input_and (input_or = False). It only can be activated if all its inputs are active
# Match the model
model:RAM_pm_Model
# Match the a python automated activity
py_activity:RAM_pm_PythonAutomatedActivity {
# Check if all connected ports are active in case of input_and
condition = ```
all_active = True
# Check for or / and
if not get_slot_value(this, "input_or"):
# Get all the ctrl in ports
for has_ctrl_in in get_outgoing(this, "pm_HasCtrlIn"):
c_in_state = get_source(get_incoming(get_target(has_ctrl_in), "pm_Of")[0])
# Check if the port is active or not
if not get_slot_value(c_in_state, "active"):
all_active = False
break
all_active
```;
} model_to_activity:RAM_pm_Owns (model -> py_activity)
# Match a control activity in port that is active
ctrl_in:RAM_pm_CtrlActivityIn
ctrl_in_state:RAM_pm_CtrlPortState {
RAM_active = `get_value(this)`;
}
state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
# Match the activity link to the port
activity_to_port:RAM_pm_HasCtrlIn (py_activity -> ctrl_in)
# Match the end of the trace
end_trace:RAM_pt_EndTrace
ends:RAM_pt_Ends (end_trace -> model)
# Match the previous trace element before the end trace
prev_trace_element:RAM_pt_Event
followed_by:RAM_pt_IsFollowedBy (prev_trace_element -> end_trace)

View file

@ -0,0 +1,42 @@
model:RAM_pm_Model
py_activity:RAM_pm_PythonAutomatedActivity {
condition = ```
start_activity = create_object(None, "pt_StartActivity")
create_activity_links(odapi, start_activity, matched("prev_trace_element"), matched("ctrl_in"))
input_data = extract_input_data(odapi, this)
result = execute_activity(odapi, globals()["packages"], this, input_data)
if len(result) == 3:
status_code, output_data, input_used = result
else:
status_code, output_data, input_used = *result, None
if input_used:
handle_artefact(odapi, start_activity, "pt_Artefact", "pt_Consumes", get(input_used), input_data[input_used], direction="DataFlowOut")
end_activity = create_object(None, "pt_EndActivity")
ctrl_out = get(status_code)
create_activity_links(odapi, end_activity, start_activity, ctrl_out, end_trace=matched("end_trace"))
if output_data:
port, data = output_data
handle_artefact(odapi, end_activity, "pt_Artefact", "pt_Produces", get(port), data, direction="DataFlowIn")
update_control_states(odapi, this, ctrl_out)
```;
}
model_to_activity:RAM_pm_Owns
ctrl_in:RAM_pm_CtrlActivityIn
ctrl_in_state:RAM_pm_CtrlPortState {
RAM_active = `False`;
}
state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
activity_to_port:RAM_pm_HasCtrlIn (py_activity -> ctrl_in)
end_trace:RAM_pt_EndTrace
ends:RAM_pt_Ends (end_trace -> model)
prev_trace_element:RAM_pt_Event

View file

@ -0,0 +1,36 @@
# When a control port is active and is connected to an activity, we want to execute the activity. If it is a composite one, we execute the inner workflow of it
# But, if the activity has input_and (input_or = False). It only can be activated if all its inputs are active
# Match the model
model:RAM_pm_Model
# Match the a python automated activity
activity:RAM_pm_Activity {
RAM_composite = `True`;
} model_to_activity:RAM_pm_Owns (model -> activity)
# Match a control activity in port that is active
ctrl_in:RAM_pm_CtrlActivityIn
ctrl_in_state:RAM_pm_CtrlPortState {
RAM_active = `get_value(this)`;
}
state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
# Match the activity link to the port
activity_to_port:RAM_pm_HasCtrlIn (activity -> ctrl_in)
# Match the end of the trace
end_trace:RAM_pt_EndTrace
ends:RAM_pt_Ends (end_trace -> model)
# Match the previous trace element before the end trace
prev_trace_element:RAM_pt_Event
followed_by:RAM_pt_IsFollowedBy (prev_trace_element -> end_trace)

View file

@ -0,0 +1,29 @@
model:RAM_pm_Model
activity:RAM_pm_Activity {
RAM_composite = `True`;
condition = ```
# Execute inner workflow
execute_composite_workflow(odapi, this, matched("ctrl_in"), globals()["composite_linkage"], globals()["packages"], matched)
```;
}
model_to_activity:RAM_pm_Owns
ctrl_in:RAM_pm_CtrlActivityIn
ctrl_in_state:RAM_pm_CtrlPortState {
RAM_active = `False`;
}
state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
activity_to_port:RAM_pm_HasCtrlIn (activity -> ctrl_in)
end_trace:RAM_pt_EndTrace
ends:RAM_pt_Ends (end_trace -> model)
prev_trace_element:RAM_pt_Event

View file

@ -0,0 +1,20 @@
# Match an active control output port
out_state:RAM_pm_CtrlPortState {
RAM_active = `get_value(this)`;
}
out:RAM_pm_CtrlOut
state_to_out:RAM_pm_Of (out_state -> out)
# Match an inactive control input port
in_state:RAM_pm_CtrlPortState {
RAM_active = `not get_value(this)`;
}
in:RAM_pm_CtrlIn
state_to_in:RAM_pm_Of (in_state -> in)
# Match the connection between those two ports
flow:RAM_pm_CtrlFlow (out -> in)

View file

@ -0,0 +1,42 @@
# Copy the left hand side
out_state:RAM_pm_CtrlPortState {
# Only set the output port to inactive if all connected input ports are set to active
RAM_active = ```
set_to_active = False
output_port = matched("out")
outgoing_flows = get_outgoing(output_port, "pm_CtrlFlow")
# for each flow: pm_CtrlFlow -> pm_CtrlIn <- pm_Of <- pm_CtrlPortState == state
all_input_port_states = [get_source(get_incoming(get_target(flow), "pm_Of")[0]) for flow in outgoing_flows]
input_port_state = matched("in_state")
for state in all_input_port_states:
is_active = get_slot_value(state, "active")
# If the state is not active and it is not the input port state we have matched and planned to set active
# Then we can't yet set this output port state to active
if not is_active and state != input_port_state:
set_to_active = True
break
# Set the attribute to the assigned value
set_to_active
```;
}
out:RAM_pm_CtrlOut
state_to_out:RAM_pm_Of (out_state -> out)
in_state:RAM_pm_CtrlPortState {
# Set the input port active
RAM_active = `True`;
}
in:RAM_pm_CtrlIn
state_to_in:RAM_pm_Of (in_state -> in)
flow:RAM_pm_CtrlFlow (out -> in)

View file

@ -0,0 +1,200 @@
##################################################
pm_Model:Class
##################################################
pm_Stateful:Class
##################################################
pm_ModelElement:Class {
abstract = True;
}
##################################################
pm_Activity:Class
:Inheritance (pm_Activity -> pm_ModelElement)
pm_Activity_name:AttributeLink (pm_Activity -> String) {
name = "name";
optional = False;
}
pm_Activity_composite:AttributeLink (pm_Activity -> Boolean) {
name = "composite";
optional = False;
}
pm_Activity_subworkflow_path:AttributeLink (pm_Activity -> String) {
name = "subworkflow_path";
optional = True;
}
pm_AutomatedActivity:Class {
abstract = True;
} :Inheritance (pm_AutomatedActivity -> pm_Activity)
pm_AutomatedActivity_input_or:AttributeLink (pm_AutomatedActivity -> Boolean) {
name = "input_or";
optional = False;
}
pm_PythonAutomatedActivity:Class
:Inheritance (pm_PythonAutomatedActivity -> pm_AutomatedActivity)
pm_PythonAutomatedActivity_func:AttributeLink (pm_PythonAutomatedActivity -> ActionCode) {
name = "func";
optional = False;
}
##################################################
pm_Artefact:Class
:Inheritance (pm_Artefact -> pm_ModelElement)
:Inheritance (pm_Artefact -> pm_Stateful)
##################################################
pm_CtrlPort:Class {
abstract = True;
} :Inheritance (pm_CtrlPort -> pm_Stateful)
pm_CtrlIn:Class {
abstract = True;
} :Inheritance (pm_CtrlIn -> pm_CtrlPort)
pm_CtrlSink:Class {
# 1) A control sink port must have at least one incoming control flow
# 2) A control sink port can't have any control flow output
constraint = ```
has_incoming = len(get_incoming(this, "pm_CtrlFlow")) > 0
no_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) == 0
# Return constraint
has_incoming and no_outgoing
```;
} :Inheritance (pm_CtrlSink -> pm_CtrlIn)
pm_CtrlActivityIn:Class {
# 1) Must have at least one incoming control flow
constraint = ```
has_incoming = len(get_incoming(this, "pm_CtrlFlow")) > 0
# Return constraint
has_incoming
```;
} :Inheritance (pm_CtrlActivityIn -> pm_CtrlIn)
pm_CtrlOut:Class {
abstract = True;
} :Inheritance (pm_CtrlOut -> pm_CtrlPort)
pm_CtrlSource:Class {
# 1) A control source port can't have any control flow inputs
# 2) A control source port must have at least one outgoing control flow
constraint = ```
no_incoming = len(get_incoming(this, "pm_CtrlFlow")) == 0
has_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) > 0
# Return constraint
no_incoming and has_outgoing
```;
} :Inheritance (pm_CtrlSource -> pm_CtrlOut)
pm_CtrlActivityOut:Class {
# 1) Must have at least one outgoing control flow
constraint = ```
has_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) > 0
# Return constraint
has_outgoing
```;
} :Inheritance (pm_CtrlActivityOut -> pm_CtrlOut)
##################################################
pm_DataPort:Class {
abstract = True;
}
pm_DataIn:Class {
abstract = True;
} :Inheritance (pm_DataIn -> pm_DataPort)
pm_DataSink:Class
:Inheritance (pm_DataSink -> pm_DataIn)
pm_DataActivityIn:Class
:Inheritance (pm_DataActivityIn -> pm_DataIn)
pm_DataOut:Class {
abstract = True;
} :Inheritance (pm_DataOut -> pm_DataPort)
pm_DataSource:Class
:Inheritance (pm_DataSource -> pm_DataOut)
pm_DataActivityOut:Class
:Inheritance (pm_DataActivityOut -> pm_DataOut)
##################################################
##################################################
pm_Owns:Association (pm_Model -> pm_ModelElement) {
source_lower_cardinality = 1;
source_upper_cardinality = 1;
}
##################################################
pm_CtrlFlow:Association (pm_CtrlPort -> pm_CtrlPort)
##################################################
pm_HasCtrlIn:Association (pm_Activity -> pm_CtrlIn) {
source_upper_cardinality = 1;
target_lower_cardinality = 1;
}
pm_HasCtrlOut:Association (pm_Activity -> pm_CtrlOut) {
source_upper_cardinality = 1;
target_lower_cardinality = 1;
}
pm_HasDataIn:Association (pm_Activity -> pm_DataIn) {
source_upper_cardinality = 1;
}
pm_HasDataOut:Association (pm_Activity -> pm_DataOut) {
source_upper_cardinality = 1;
}
##################################################
pm_DataFlowIn:Association (pm_DataOut -> pm_Artefact) {
source_lower_cardinality = 1;
target_lower_cardinality = 1;
}
pm_DataFlowOut:Association (pm_Artefact -> pm_DataIn) {
source_lower_cardinality = 1;
target_lower_cardinality = 1;
}
##################################################
##################################################
has_source_and_sink:GlobalConstraint {
# There should be at least one source and sink control port
constraint = ```
contains_source = len(get_all_instances("pm_CtrlSource")) > 0
contains_sink = len(get_all_instances("pm_CtrlSink")) > 0
# return constraint
contains_source and contains_sink
```;
}
##################################################

View file

@ -0,0 +1,38 @@
##################################################
pm_State:Class {
abstract = True;
}
##################################################
pm_ArtefactState:Class
:Inheritance (pm_ArtefactState -> pm_State)
pm_ArtefactState_data:AttributeLink (pm_ArtefactState -> Bytes) {
name = "data";
optional = False;
}
##################################################
pm_CtrlPortState:Class
:Inheritance (pm_CtrlPortState -> pm_State)
pm_CtrlPortState_active:AttributeLink (pm_CtrlPortState -> Boolean) {
name = "active";
optional = False;
}
##################################################
##################################################
pm_Of:Association (pm_State -> pm_Stateful) {
# one-to-one
source_lower_cardinality = 1;
source_upper_cardinality = 1;
target_lower_cardinality = 1;
target_upper_cardinality = 1;
}
##################################################

View file

@ -0,0 +1,109 @@
##################################################
pt_Event:Class {
abstract = True;
}
##################################################
pt_Activity:Class {
abstract = True;
} :Inheritance (pt_Activity -> pt_Event)
pt_StartActivity:Class {
# A start activity can only be related to a control in port
constraint = ```
correct_related = True
port = get_target(get_outgoing(this, "pt_RelatesTo")[0])
correct_related = port in [uid for _, uid in get_all_instances("pm_CtrlIn")]
correct_related
```;
} :Inheritance (pt_StartActivity -> pt_Activity)
pt_EndActivity:Class {
# A end activity can only be related to a control out port
constraint = ```
correct_related = True
port = get_target(get_outgoing(this, "pt_RelatesTo")[0])
correct_related = port in [uid for _, uid in get_all_instances("pm_CtrlOut")]
correct_related
```;
} :Inheritance (pt_EndActivity -> pt_Activity)
##################################################
pt_StartTrace:Class
:Inheritance (pt_StartTrace -> pt_Event)
pt_EndTrace:Class
:Inheritance (pt_EndTrace -> pt_Event)
##################################################
pt_Artefact:Class
:Inheritance (pt_Artefact -> pt_Event)
pt_Artefact_data:AttributeLink (pt_Artefact -> Bytes) {
name = "data";
optional = False;
}
##################################################
##################################################
pt_IsFollowedBy:Association (pt_Event -> pt_Event) {
source_upper_cardinality = 1;
target_upper_cardinality = 1;
}
##################################################
pt_RelatesTo:Association (pt_Activity -> pm_CtrlPort) {
source_upper_cardinality = 1;
target_lower_cardinality = 1;
target_upper_cardinality = 1;
}
pt_Consumes:Association (pt_Artefact -> pt_StartActivity) {
source_upper_cardinality = 1;
target_lower_cardinality = 1;
target_upper_cardinality = 1;
}
pt_Produces:Association (pt_EndActivity -> pt_Artefact) {
source_lower_cardinality = 1;
source_upper_cardinality = 1;
target_upper_cardinality = 1;
}
##################################################
pt_Starts:Association (pt_StartTrace -> pm_Model) {
source_upper_cardinality = 1;
target_lower_cardinality = 1;
target_upper_cardinality = 1;
}
pt_Ends:Association (pt_EndTrace -> pm_Model) {
source_upper_cardinality = 1;
target_lower_cardinality = 1;
target_upper_cardinality = 1;
}
##################################################
pt_PrevVersion:Association (pt_Artefact -> pt_Artefact) {
source_upper_cardinality = 1;
target_upper_cardinality = 1;
}
pt_BelongsTo:Association (pt_Artefact -> pm_Artefact) {
target_lower_cardinality = 1;
target_upper_cardinality = 1;
}
##################################################

View file

@ -0,0 +1,162 @@
import re
from state.devstate import DevState
from bootstrap.scd import bootstrap_scd
from util import loader
from transformation.rule import RuleMatcherRewriter
from transformation.ramify import ramify
from concrete_syntax.graphviz import renderer as graphviz
from concrete_syntax.graphviz.make_url import make_url
from concrete_syntax.plantuml import renderer as plantuml
from concrete_syntax.plantuml.make_url import make_url as plant_make_url
from api.od import ODAPI
import os
from os import listdir
from os.path import isfile, join
import importlib.util
from util.module_to_dict import module_to_dict
from examples.ftg_pm_pt import help_functions
from examples.ftg_pm_pt.ftg_pm_pt import FtgPmPt
class FtgPmPtRunner:
def __init__(self, model: FtgPmPt, composite_linkage: dict | None = None):
self.model = model
self.ram_mm = ramify(self.model.state, self.model.meta_model)
self.rules = self.load_rules()
self.packages = None
self.composite_linkage = composite_linkage
def load_rules(self):
return loader.load_rules(
self.model.state,
lambda rule_name, kind: os.path.join(
os.path.dirname(__file__),
f"operational_semantics/r_{rule_name}_{kind}.od"
),
self.ram_mm,
["connect_process_trace", "trigger_ctrl_flow", "exec_activity", "exec_composite_activity"]
)
def set_packages(self, packages: str | dict, is_path: bool):
if not is_path:
self.packages = packages
return
self.packages = self.parse_packages(packages)
def parse_packages(self, packages_path: str) -> dict:
return self.collect_functions_from_packages(packages_path, packages_path)
def collect_functions_from_packages(self, base_path, current_path):
functions_dict = {}
for entry in listdir(current_path):
entry_path = join(current_path, entry)
if isfile(entry_path) and entry.endswith(".py"):
module_name = self.convert_path_to_module_name(base_path, entry_path)
module = self.load_module_from_file(entry_path)
for func_name, func in module_to_dict(module).items():
functions_dict[f"{module_name}.{func_name}"] = func
elif not isfile(entry_path):
nested_functions = self.collect_functions_from_packages(base_path, entry_path)
functions_dict.update(nested_functions)
return functions_dict
@staticmethod
def convert_path_to_module_name(base_path, file_path):
return file_path.replace(base_path, "").replace(".py", "").replace("/", "")
@staticmethod
def load_module_from_file(file_path):
spec = importlib.util.spec_from_file_location("", file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def create_matcher(self):
packages = module_to_dict(help_functions)
if self.packages:
packages.update({ "packages": self.packages })
if self.composite_linkage:
packages.update({ "composite_linkage": self.composite_linkage })
matcher_rewriter = RuleMatcherRewriter(
self.model.state, self.model.meta_model, self.ram_mm, eval_context=packages
)
return matcher_rewriter
def visualize_model(self):
print(make_url(graphviz.render_object_diagram(self.model.state, self.model.model, self.model.meta_model)))
print(plant_make_url(plantuml.render_object_diagram(self.model.state, self.model.model, self.model.meta_model)))
@staticmethod
def __extract_artefact_info(od, pt_art):
"""Extract artefact metadata and data."""
data = od.get_slot_value(pt_art, "data")
pm_art = od.get_name(od.get_target(od.get_outgoing(pt_art, "pt_BelongsTo")[0]))
has_prev_version = bool(od.get_outgoing(pt_art, "pt_PrevVersion"))
is_last_version = not od.get_incoming(pt_art, "pt_PrevVersion")
return {
"Artefact Name": pm_art,
"Data": data,
"Has previous version": has_prev_version,
"Is last version": is_last_version
}
def __extract_inputs(self, od, event_node):
"""Extract all consumed artefacts for an event."""
return [
self.__extract_artefact_info(od, od.get_source(consumes))
for consumes in od.get_incoming(event_node, "pt_Consumes")
]
def __extract_outputs(self, od, event_node):
"""Extract all produced artefacts for an event."""
return [
self.__extract_artefact_info(od, od.get_target(produces))
for produces in od.get_outgoing(event_node, "pt_Produces")
]
@staticmethod
def to_snake_case(experiment_type):
# Finds uppercase letters that are not at the start of the string.
# Example: AtomicExperiment -> atomic_experiment
return re.sub(r'(?<!^)(?=[A-Z])', '_', experiment_type).lower()
def run(self, debug_flag: bool = False):
matcher = self.create_matcher()
rule_performed = True
while rule_performed:
# Loop over all the rules first in order priority
for i, (rule_name, rule) in enumerate(self.rules.items()):
rule_performed = False
result = matcher.exec_on_first_match(
self.model.model, rule, rule_name, in_place=True
)
# If the rule cannot be executed go to the next rule
if not result:
continue
rule_performed = True
self.model.model, lhs_match, _ = result
if debug_flag:
print(f"Match: {lhs_match}")
self.visualize_model()
# If a rule is performed, break and start loping over the rules from the beginning
break

View file

@ -0,0 +1,66 @@
start:Start
end:End
transitions:Match{
file = "operational_semantics/transition";
}
d:Data_modify
{
modify_dict = '
{
"tr": "t"
}';
}
nac_input_without:Match{
file = "operational_semantics/all_input_have_token";
n = "1";
}
inputs:Match{
file = "operational_semantics/all_inputs";
}
rewrite_incoming:Rewrite
{
file = "operational_semantics/remove_incoming";
}
loop_trans:Loop
loop_input:Loop
p:Print
{
event = True;
label = "transition: ";
}
p2:Print
{
event = True;
label = "inputs: ";
}
:Exec_con(start -> transitions){gate_from = 0;gate_to = 0;}
:Exec_con(transitions -> end){gate_from = 1;gate_to = 0;}
:Exec_con(transitions -> loop_trans){gate_from = 0;gate_to = 0;}
:Exec_con(loop_trans -> nac_input_without){gate_from = 0;gate_to = 0;}
[//]: # (:Exec_con&#40;nac_input_without -> loop_trans&#41;{gate_from = 0;gate_to = 0;})
:Exec_con(nac_input_without -> inputs){gate_from = 1;gate_to = 0;}
:Exec_con(inputs -> loop_input){gate_from = 0;gate_to = 0;}
:Exec_con(inputs -> loop_trans){gate_from = 1;gate_to = 0;}
:Exec_con(loop_trans -> end){gate_from = 1;gate_to = 0;}
:Data_con(transitions -> loop_trans)
:Data_con(nac_input_without -> p)
:Data_con(d -> nac_input_without)
:Data_con(loop_trans -> d)
:Data_con(loop_trans -> rewrite_incoming)

View file

@ -0,0 +1,13 @@
# A place with no tokens:
p:RAM_PNPlace
ps:RAM_PNPlaceState {
RAM_numTokens = `get_value(this) == 0`;
}
:RAM_pn_of (ps -> p)
# An incoming arc from that place to our transition:
t:RAM_PNTransition
:RAM_arc (p -> t)

View file

@ -0,0 +1,13 @@
# A place with no tokens:
p:RAM_PNPlace
ps:RAM_PNPlaceState {
RAM_numTokens = `True`;
}
:RAM_pn_of (ps -> p)
# An incoming arc from that place to our transition:
t:RAM_PNTransition
:RAM_arc (p -> t)

View file

@ -0,0 +1,13 @@
# A place with no tokens:
p:RAM_PNPlace
ps:RAM_PNPlaceState {
RAM_numTokens = `True`;
}
:RAM_pn_of (ps -> p)
# An incoming arc from that place to our transition:
t:RAM_PNTransition
:RAM_arc (t -> p)

View file

@ -0,0 +1,13 @@
# A place with no tokens:
p:RAM_PNPlace
ps:RAM_PNPlaceState {
RAM_numTokens = `set_value(this, get_value(this) + 1)`;
}
:RAM_pn_of (ps -> p)
# An incoming arc from that place to our transition:
t:RAM_PNTransition
:RAM_arc (t -> p)

View file

@ -1 +1 @@
t:RAM_PNTransition
t:RAM_PNTransition

View file

@ -0,0 +1 @@
tr:RAM_PNTransition

View file

@ -1,3 +1,4 @@
from examples.schedule.RuleExecuter import RuleExecuter
from state.devstate import DevState
from api.od import ODAPI
from concrete_syntax.textual_od.renderer import render_od
@ -9,6 +10,10 @@ from transformation.ramify import ramify
from examples.semantics.operational import simulator
from examples.petrinet.renderer import show_petri_net
from examples.schedule.ScheduledActionGenerator import *
from examples.schedule.RuleExecuter import *
if __name__ == "__main__":
import os
@ -46,20 +51,25 @@ if __name__ == "__main__":
mm_rt_ramified,
["fire_transition"]) # only 1 rule :(
matcher_rewriter = RuleMatcherRewriter(state, mm_rt, mm_rt_ramified)
action_generator = ActionGenerator(matcher_rewriter, rules)
# matcher_rewriter = RuleMatcherRewriter(state, mm_rt, mm_rt_ramified)
# action_generator = ActionGenerator(matcher_rewriter, rules)
matcher_rewriter2 = RuleExecuter(state, mm_rt, mm_rt_ramified)
action_generator = ScheduleActionGenerator(matcher_rewriter2, f"models/schedule.od")
def render_callback(od):
show_petri_net(od)
# return render_od(state, od.m, od.mm)
return render_od_jinja2(state, od.m, od.mm)
sim = simulator.Simulator(
action_generator.generate_dot()
sim = simulator.MinimalSimulator(
action_generator=action_generator,
decision_maker=simulator.InteractiveDecisionMaker(auto_proceed=False),
# decision_maker=simulator.RandomDecisionMaker(seed=0),
renderer=render_callback,
termination_condition=action_generator.termination_condition,
# renderer=lambda od: render_od(state, od.m, od.mm),
)
sim.run(ODAPI(state, m_rt_initial, mm_rt))
sim.run(ODAPI(state, m_rt_initial, mm_rt))

View file

@ -0,0 +1,49 @@
from concrete_syntax.textual_od.renderer import render_od
import pprint
from typing import Generator, Callable, Any
from uuid import UUID
import functools
from api.od import ODAPI
from concrete_syntax.common import indent
from transformation.matcher import match_od
from transformation.rewriter import rewrite
from transformation.cloner import clone_od
from util.timer import Timer
from util.loader import parse_and_check
class RuleExecuter:
def __init__(self, state, mm: UUID, mm_ramified: UUID, eval_context={}):
self.state = state
self.mm = mm
self.mm_ramified = mm_ramified
self.eval_context = eval_context
# Generates matches.
# Every match is a dictionary with entries LHS_element_name -> model_element_name
def match_rule(self, m: UUID, lhs: UUID, *, pivot:dict[Any, Any]):
lhs_matcher = match_od(self.state,
host_m=m,
host_mm=self.mm,
pattern_m=lhs,
pattern_mm=self.mm_ramified,
eval_context=self.eval_context,
pivot= pivot,
)
return lhs_matcher
def rewrite_rule(self, m: UUID, rhs: UUID, *, pivot:dict[Any, Any]):
yield rewrite(self.state,
rhs_m=rhs,
pattern_mm=self.mm_ramified,
lhs_match=pivot,
host_m=m,
host_mm=self.mm,
eval_context=self.eval_context,
)
def load_match(self, file: str):
with open(file, "r") as f:
return parse_and_check(self.state, f.read(), self.mm_ramified, file)

View file

@ -0,0 +1,104 @@
import importlib.util
import io
import os
from jinja2 import FileSystemLoader, Environment
from concrete_syntax.textual_od import parser as parser_od
from concrete_syntax.textual_cd import parser as parser_cd
from api.od import ODAPI
from bootstrap.scd import bootstrap_scd
from examples.schedule.generator import schedule_generator
from examples.schedule.schedule_lib import End, NullNode
from framework.conformance import Conformance, render_conformance_check_result
from state.devstate import DevState
class ScheduleActionGenerator:
def __init__(self, rule_executer, schedulefile:str):
self.rule_executer = rule_executer
self.rule_dict = {}
self.schedule: "Schedule"
self.state = DevState()
self.load_schedule(schedulefile)
def load_schedule(self, filename):
print("Loading schedule ...")
scd_mmm = bootstrap_scd(self.state)
with open("../schedule/models/scheduling_MM.od", "r") as f_MM:
mm_cs = f_MM.read()
with open(f"{filename}", "r") as f_M:
m_cs = f_M.read()
print("OK")
print("\nParsing models")
print(f"\tParsing meta model")
scheduling_mm = parser_cd.parse_cd(
self.state,
m_text=mm_cs,
)
print(f"\tParsing '{filename}_M.od' model")
scheduling_m = parser_od.parse_od(
self.state,
m_text=m_cs,
mm=scheduling_mm
)
print(f"OK")
print("\tmeta-meta-model a valid class diagram")
conf = Conformance(self.state, scd_mmm, scd_mmm)
print(render_conformance_check_result(conf.check_nominal()))
print(f"Is our '{filename}_M.od' model a valid '{filename}_MM.od' diagram?")
conf = Conformance(self.state, scheduling_m, scheduling_mm)
print(render_conformance_check_result(conf.check_nominal()))
print("OK")
od = ODAPI(self.state, scheduling_m, scheduling_mm)
g = schedule_generator(od)
output_buffer = io.StringIO()
g.generate_schedule(output_buffer)
open(f"schedule.py", "w").write(output_buffer.getvalue())
spec = importlib.util.spec_from_file_location("schedule", "schedule.py")
scedule_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(scedule_module)
self.schedule = scedule_module.Schedule(self.rule_executer)
self.load_matchers()
def load_matchers(self):
matchers = dict()
for file in self.schedule.get_matchers():
matchers[file] = self.rule_executer.load_match(file)
self.schedule.init_schedule(matchers)
def __call__(self, api: ODAPI):
exec_op = self.schedule(api)
yield from exec_op
def termination_condition(self, api: ODAPI):
if type(self.schedule.cur) == End:
return "jay"
if type(self.schedule.cur) == NullNode:
return "RRRR"
return None
def generate_dot(self):
env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')))
env.trim_blocks = True
env.lstrip_blocks = True
template_dot = env.get_template('schedule_dot.j2')
nodes = []
edges = []
visit = set()
self.schedule.generate_dot(nodes, edges, visit)
print("Nodes:")
print(nodes)
print("\nEdges:")
print(edges)
with open("test.dot", "w") as f_dot:
f_dot.write(template_dot.render({"nodes": nodes, "edges": edges}))

View file

View file

@ -0,0 +1,129 @@
import sys
import os
import json
from uuid import UUID
from jinja2.runtime import Macro
from api.od import ODAPI
from jinja2 import Environment, FileSystemLoader, meta
class schedule_generator:
def __init__(self, odApi:ODAPI):
self.env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')))
self.env.trim_blocks = True
self.env.lstrip_blocks = True
self.template = self.env.get_template('schedule_template.j2')
self.template_wrap = self.env.get_template('schedule_template_wrap.j2')
self.api = odApi
def get_slot_value_default(item: UUID, slot:str, default):
if slot in self.api.get_slots(item):
return self.api.get_slot_value(item, slot)
return default
name_dict = lambda item: {"name": self.api.get_name(item)}
conn_dict = lambda item: {"name_from": self.api.get_name(self.api.get_source(item)),
"name_to": self.api.get_name(self.api.get_target(item)),
"gate_from": self.api.get_slot_value(item, "gate_from"),
"gate_to": self.api.get_slot_value(item, "gate_to"),
}
conn_data_event = {"Match": lambda item: False,
"Rewrite": lambda item: False,
"Data_modify": lambda item: True,
"Loop": lambda item: True,
"Print": lambda item: get_slot_value_default(item, "event", False)
}
conn_data_dict = lambda item: {"name_from": self.api.get_name(self.api.get_source(item)),
"name_to": self.api.get_name(self.api.get_target(item)),
"event": conn_data_event[self.api.get_type_name(target := self.api.get_target(item))](target)
}
rewrite_dict = lambda item: {"name": self.api.get_name(item),
"file": self.api.get_slot_value(item, "file"),
}
match_dict = lambda item: {"name": self.api.get_name(item),
"file": self.api.get_slot_value(item, "file"),
"n": self.api.get_slot_value(item, "n") \
if "n" in self.api.get_slots(item) else 'float("inf")'
}
data_modify_dict = lambda item: {"name": self.api.get_name(item),
"dict": json.loads(self.api.get_slot_value(item, "modify_dict"))
}
loop_dict = lambda item: {"name": self.api.get_name(item),
"choise": get_slot_value_default(item, "choise", False)}
print_dict = lambda item: {"name": self.api.get_name(item),
"label": get_slot_value_default(item, "label", "")}
arg_map = {"Start": name_dict, "End": name_dict,
"Match": match_dict, "Rewrite": rewrite_dict,
"Data_modify": data_modify_dict, "Loop": loop_dict,
"Exec_con": conn_dict, "Data_con": conn_data_dict,
"Print": print_dict}
self.macro_args = {tp: (macro, arg_map.get(tp)) for tp, macro in self.template.module.__dict__.items()
if type(macro) == Macro}
def _render(self, item):
type_name = self.api.get_type_name(item)
macro, arg_gen = self.macro_args[type_name]
return macro(**arg_gen(item))
def generate_schedule(self, stream = sys.stdout):
start = self.api.get_all_instances("Start")[0][1]
stack = [start]
out = {"blocks":[], "exec_conn":[], "data_conn":[], "match_files":set(), "matchers":[], "start":self.api.get_name(start)}
execBlocks = set()
exec_conn = list()
while len(stack) > 0:
exec_obj = stack.pop()
if exec_obj in execBlocks:
continue
execBlocks.add(exec_obj)
for conn in self.api.get_outgoing(exec_obj, "Exec_con"):
exec_conn.append(conn)
stack.append(self.api.get_target(conn))
stack = list(execBlocks)
data_blocks = set()
for name, p in self.api.get_all_instances("Print"):
if "event" in (event := self.api.get_slots(p)) and event:
stack.append(p)
execBlocks.add(p)
data_conn = set()
while len(stack) > 0:
obj = stack.pop()
for data_c in self.api.get_incoming(obj, "Data_con"):
data_conn.add(data_c)
source = self.api.get_source(data_c)
if not self.api.is_instance(source, "Exec") and \
source not in execBlocks and \
source not in data_blocks:
stack.append(source)
data_blocks.add(source)
for exec_item in execBlocks:
out["blocks"].append(self._render(exec_item))
if self.api.is_instance(exec_item, "Rule"):
d = self.macro_args[self.api.get_type_name(exec_item)][1](exec_item)
out["match_files"].add(d["file"])
out["matchers"].append(d)
for exec_c in exec_conn:
out["exec_conn"].append(self._render(exec_c))
for data_c in data_conn:
out["data_conn"].append(self._render(data_c))
for data_b in data_blocks:
out["blocks"].append(self._render(data_b))
print(self.template_wrap.render(out), file=stream)
# print("with open('test.dot', 'w') as f:", file=stream)
# print(f"\tf.write({self.api.get_name(start)}.generate_dot())", file=stream)

View file

@ -0,0 +1,26 @@
### association Exec_con
Integer gate_from;
Integer gate_to;
### association Data_con
### class Start [1..1]
### class End [1..*]
### class Match
optional Integer n;
### class Rewrite
### class Data_modify
String modify_dict;
### class Loop
optional Boolean choise;
## debugging tools
### class Print(In_Exec, Out_Exec, In_Data)
optional Boolean event;

View file

@ -0,0 +1,46 @@
abstract class Exec
abstract class In_Exec(Exec)
abstract class Out_Exec(Exec)
association Exec_con [0..*] Out_Exec -> In_Exec [0..*] {
Integer gate_from;
Integer gate_to;
}
abstract class Data
abstract class In_Data(Data)
abstract class Out_Data(Data)
association Data_con [0..*] Out_Data -> In_Data [0..*]
class Start [1..1] (Out_Exec)
class End [1..*] (In_Exec)
abstract class Rule (In_Exec, Out_Exec, In_Data, Out_Data)
{
String file;
}
class Match (Rule)
{
optional Integer n;
}
class Rewrite (Rule)
class Data_modify(In_Data, Out_Data)
{
String modify_dict;
}
class Loop(In_Exec, Out_Exec, In_Data, Out_Data)
{
optional Boolean choise;
}
# debugging tools
class Print(In_Exec, Out_Exec, In_Data)
{
optional Boolean event;
optional String label;
}

View file

@ -0,0 +1,12 @@
from .data_node import DataNode
from .data_modify import DataModify
from .end import End
from .exec_node import ExecNode
from .loop import Loop
from .match import Match
from .null_node import NullNode
from .print import Print
from .rewrite import Rewrite
from .start import Start
__all__ = ["DataNode", "End", "ExecNode", "Loop", "Match", "NullNode", "Rewrite", "Print", "DataModify", "Start"]

View file

@ -0,0 +1,63 @@
import functools
from typing import Any, Generator, Callable
class Data:
def __init__(self, super) -> None:
self.data: list[dict[Any, Any]] = list()
self.success: bool = False
self.super = super
@staticmethod
def store_output(func: Callable) -> Callable:
def wrapper(self, *args, **kwargs) -> Any:
output = func(self, *args, **kwargs)
self.success = output
return output
return wrapper
@store_output
def store_data(self, data_gen: Generator, n: int) -> bool:
self.data.clear()
if n == 0:
return True
i: int = 0
while (match := next(data_gen, None)) is not None:
self.data.append(match)
i+=1
if i >= n:
break
else:
if n == float("inf"):
return bool(len(self.data))
self.data.clear()
return False
return True
def get_super(self) -> int:
return self.super
def replace(self, data: "Data") -> None:
self.data.clear()
self.data.extend(data.data)
def append(self, data: Any) -> None:
self.data.append(data)
def clear(self) -> None:
self.data.clear()
def pop(self, index = -1) -> Any:
return self.data.pop(index)
def empty(self) -> bool:
return len(self.data) == 0
def __getitem__(self, index):
return self.data[index]
def __iter__(self):
return self.data.__iter__()
def __len__(self):
return self.data.__len__()

View file

@ -0,0 +1,26 @@
import functools
from typing import TYPE_CHECKING, Callable, List
from api.od import ODAPI
from examples.schedule.RuleExecuter import RuleExecuter
from .exec_node import ExecNode
from .data_node import DataNode
class DataModify(DataNode):
def __init__(self, modify_dict: dict[str,str]) -> None:
DataNode.__init__(self)
self.modify_dict: dict[str,str] = modify_dict
def input_event(self, success: bool) -> None:
if success or self.data_out.success:
self.data_out.data.clear()
for data in self.data_in.data:
self.data_out.append({self.modify_dict[key]: value for key, value in data.items() if key in self.modify_dict.keys()})
DataNode.input_event(self, success)
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=modify]")
super().generate_dot(nodes, edges, visited)

View file

@ -0,0 +1,47 @@
from typing import Any, Generator, List
from examples.schedule.schedule_lib.id_generator import IdGenerator
from .data import Data
class DataNode:
def __init__(self) -> None:
if not hasattr(self, 'id'):
self.id = IdGenerator().generate_id()
self.data_out : Data = Data(self)
self.data_in: Data | None = None
self.eventsub: list[DataNode] = list()
def connect_data(self, data_node: "DataNode", eventsub=True) -> None:
data_node.data_in = self.data_out
if eventsub:
self.eventsub.append(data_node)
def store_data(self, data_gen: Generator, n: int) -> None:
success: bool = self.data_out.store_data(data_gen, n)
for sub in self.eventsub:
sub.input_event(success)
def get_input_data(self) -> list[dict[Any, Any]]:
if not self.data_in.success:
raise Exception("Invalid input data: matching has failed")
data = self.data_in.data
if len(data) == 0:
raise Exception("Invalid input data: no data present")
return data
def input_event(self, success: bool) -> None:
self.data_out.success = success
for sub in self.eventsub:
sub.input_event(success)
def get_id(self) -> int:
return self.id
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
visited.add(self.id)
if self.data_in is not None:
edges.append(f"{self.data_in.get_super().get_id()} -> {self.get_id()} [color = green]")
self.data_in.get_super().generate_dot(nodes, edges, visited)
for sub in self.eventsub:
sub.generate_dot(nodes, edges, visited)

View file

@ -0,0 +1,21 @@
import functools
from typing import TYPE_CHECKING, List, Callable, Generator
from api.od import ODAPI
from .exec_node import ExecNode
class End(ExecNode):
def __init__(self) -> None:
super().__init__(out_connections=1)
def execute(self, od: ODAPI) -> Generator | None:
return self.terminate(od)
@staticmethod
def terminate(od: ODAPI) -> Generator:
yield f"end:", functools.partial(lambda od:(od, ""), od)
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=end]")

View file

@ -0,0 +1,34 @@
from typing import TYPE_CHECKING, List, Callable, Generator
from api.od import ODAPI
from .id_generator import IdGenerator
class ExecNode:
def __init__(self, out_connections: int = 1) -> None:
from .null_node import NullNode
self.next_state: list[ExecNode] = []
if out_connections > 0:
self.next_state = [NullNode()]*out_connections
self.id: int = IdGenerator().generate_id()
def nextState(self) -> "ExecNode":
return self.next_state[0]
def connect(self, next_state: "ExecNode", from_gate: int = 0, to_gate: int = 0) -> None:
if from_gate >= len(self.next_state):
raise IndexError
self.next_state[from_gate] = next_state
def execute(self, od: ODAPI) -> Generator | None:
return None
def get_id(self) -> int:
return self.id
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
visited.add(self.id)
for edge in self.next_state:
edges.append(f"{self.id} -> {edge.get_id()}")
for next in self.next_state:
next.generate_dot(nodes, edges, visited)

View file

@ -0,0 +1,10 @@
from typing import Callable
def generate_dot_wrap(func) -> Callable:
def wrapper(self, *args, **kwargs) -> str:
nodes = []
edges = []
self.reset_visited()
func(self, nodes, edges, *args, **kwargs)
return f"digraph G {{\n\t{"\n\t".join(nodes)}\n\t{"\n\t".join(edges)}\n}}"
return wrapper

View file

@ -0,0 +1,8 @@
from .singleton import Singleton
class IdGenerator(metaclass=Singleton):
def __init__(self):
self.id = -1
def generate_id(self) -> int:
self.id += 1
return self.id

View file

@ -0,0 +1,57 @@
import functools
from random import choice
from typing import TYPE_CHECKING, Callable, List, Generator
from api.od import ODAPI
from examples.schedule.RuleExecuter import RuleExecuter
from .exec_node import ExecNode
from .data_node import DataNode
from .data_node import Data
class Loop(ExecNode, DataNode):
def __init__(self, choice) -> None:
ExecNode.__init__(self, out_connections=2)
DataNode.__init__(self)
self.choice: bool = choice
self.cur_data: Data = Data(-1)
def nextState(self) -> ExecNode:
return self.next_state[not self.data_out.success]
def execute(self, od: ODAPI) -> Generator | None:
if self.cur_data.empty():
self.data_out.clear()
self.data_out.success = False
DataNode.input_event(self, False)
return None
if self.choice:
def select_data() -> Generator:
for i in range(len(self.cur_data)):
yield f"choice: {self.cur_data[i]}", functools.partial(self.select_next,od, i)
return select_data()
else:
self.select_next(od, -1)
return None
def input_event(self, success: bool) -> None:
if (b := self.data_out.success) or success:
self.cur_data.replace(self.data_in)
self.data_out.clear()
self.data_out.success = False
if b:
DataNode.input_event(self, False)
def select_next(self,od: ODAPI, index: int) -> tuple[ODAPI, list[str]]:
self.data_out.clear()
self.data_out.append(self.cur_data.pop(index))
DataNode.input_event(self, True)
return (od, ["data selected"])
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=Loop]")
ExecNode.generate_dot(self, nodes, edges, visited)
DataNode.generate_dot(self, nodes, edges, visited)

View file

@ -0,0 +1,42 @@
import functools
from typing import TYPE_CHECKING, Callable, List, Generator
from api.od import ODAPI
from examples.schedule.RuleExecuter import RuleExecuter
from .exec_node import ExecNode
from .data_node import DataNode
class Match(ExecNode, DataNode):
def __init__(self, label: str, n: int | float) -> None:
ExecNode.__init__(self, out_connections=2)
DataNode.__init__(self)
self.label: str = label
self.n:int = n
self.rule = None
self.rule_executer : RuleExecuter
def nextState(self) -> ExecNode:
return self.next_state[not self.data_out.success]
def execute(self, od: ODAPI) -> Generator | None:
self.match(od)
return None
def init_rule(self, rule, rule_executer):
self.rule = rule
self.rule_executer = rule_executer
def match(self, od: ODAPI) -> None:
pivot = {}
if self.data_in is not None:
pivot = self.get_input_data()[0]
print(f"matching: {self.label}\n\tpivot: {pivot}")
self.store_data(self.rule_executer.match_rule(od.m, self.rule, pivot=pivot), self.n)
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=M_{self.label.split("/")[-1]}_{self.n}]")
ExecNode.generate_dot(self, nodes, edges, visited)
DataNode.generate_dot(self, nodes, edges, visited)

View file

@ -0,0 +1,25 @@
import functools
from symtable import Function
from typing import List, Callable, Generator
from api.od import ODAPI
from .singleton import Singleton
from .exec_node import ExecNode
class NullNode(ExecNode, metaclass=Singleton):
def __init__(self):
ExecNode.__init__(self, out_connections=0)
def execute(self, od: ODAPI) -> Generator | None:
raise Exception('Null node should already have terminated the schedule')
@staticmethod
def terminate(od: ODAPI):
return None
yield # verrrry important line, dont remove this unreachable code
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=Null]")

View file

@ -0,0 +1,28 @@
import functools
from typing import TYPE_CHECKING, Callable, List, Generator
from api.od import ODAPI
from examples.schedule.RuleExecuter import RuleExecuter
from .exec_node import ExecNode
from .data_node import DataNode
class Print(ExecNode, DataNode):
def __init__(self, label: str = "") -> None:
ExecNode.__init__(self, out_connections=1)
DataNode.__init__(self)
self.label = label
def execute(self, od: ODAPI) -> Generator | None:
self.input_event(True)
return None
def input_event(self, success: bool) -> None:
print(f"{self.label}{self.data_in.data}")
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=Print_{self.label.replace(":", "")}]")
ExecNode.generate_dot(self, nodes, edges, visited)
DataNode.generate_dot(self, nodes, edges, visited)

View file

@ -0,0 +1,38 @@
import functools
from typing import List, Callable, Generator
from api.od import ODAPI
from .exec_node import ExecNode
from .data_node import DataNode
from ..RuleExecuter import RuleExecuter
class Rewrite(ExecNode, DataNode):
def __init__(self, label: str) -> None:
ExecNode.__init__(self, out_connections=1)
DataNode.__init__(self)
self.label = label
self.rule = None
self.rule_executer : RuleExecuter
def init_rule(self, rule, rule_executer):
self.rule = rule
self.rule_executer= rule_executer
def execute(self, od: ODAPI) -> Generator | None:
yield "ghello", functools.partial(self.rewrite, od)
def rewrite(self, od):
print("rewrite" + self.label)
pivot = {}
if self.data_in is not None:
pivot = self.get_input_data()[0]
self.store_data(self.rule_executer.rewrite_rule(od.m, self.rule, pivot=pivot), 1)
return ODAPI(od.state, od.m, od.mm),[f"rewrite {self.label}\n\tpivot: {pivot}\n\t{"success" if self.data_out.success else "failure"}\n"]
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=R_{self.label.split("/")[-1]}]")
ExecNode.generate_dot(self, nodes, edges, visited)
DataNode.generate_dot(self, nodes, edges, visited)

View file

@ -0,0 +1,8 @@
from abc import ABCMeta
class Singleton(ABCMeta):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]

View file

@ -0,0 +1,16 @@
from typing import TYPE_CHECKING, Callable, List, Any
from .funcs import generate_dot_wrap
from .exec_node import ExecNode
class Start(ExecNode):
def __init__(self) -> None:
ExecNode.__init__(self, out_connections=1)
def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None:
if self.id in visited:
return
nodes.append(f"{self.id}[label=start]")
super().generate_dot(nodes, edges, visited)

View file

@ -0,0 +1,9 @@
digraph G {
{% for node in nodes %}
{{ node }}
{% endfor %}
{% for edge in edges %}
{{ edge }}
{% endfor %}
}

View file

@ -0,0 +1,35 @@
{% macro Start(name) %}
{{ name }} = Start()
{%- endmacro %}
{% macro End(name) %}
{{ name }} = End()
{%- endmacro %}
{% macro Match(name, file, n) %}
{{ name }} = Match("{{ file }}", {{ n }})
{%- endmacro %}
{% macro Rewrite(name, file) %}
{{ name }} = Rewrite("{{ file }}")
{%- endmacro %}
{% macro Data_modify(name, dict) %}
{{ name }} = DataModify({{ dict }})
{%- endmacro %}
{% macro Exec_con(name_from, name_to, gate_from, gate_to) %}
{{ name_from }}.connect({{ name_to }},{{ gate_from }},{{ gate_to }})
{%- endmacro %}
{% macro Data_con(name_from, name_to, event) %}
{{ name_from }}.connect_data({{ name_to }}, {{ event }})
{%- endmacro %}
{% macro Loop(name, choise) %}
{{ name }} = Loop({{ choise }})
{%- endmacro %}
{% macro Print(name, label) %}
{{ name }} = Print("{{ label }}")
{%- endmacro %}

View file

@ -0,0 +1,47 @@
from examples.schedule.schedule_lib import *
class Schedule:
def __init__(self, rule_executer):
self.start: Start
self.cur: ExecNode = None
self.rule_executer = rule_executer
def __call__(self, od):
self.cur = self.cur.nextState()
while not isinstance(self.cur, NullNode):
action_gen = self.cur.execute(od)
if action_gen is not None:
# if (action_gen := self.cur.execute(od)) is not None:
return action_gen
self.cur = self.cur.nextState()
return NullNode.terminate(od)
@staticmethod
def get_matchers():
return [
{% for file in match_files %}
"{{ file }}.od",
{% endfor %}
]
def init_schedule(self, matchers):
{% for block in blocks%}
{{ block }}
{% endfor %}
{% for conn in exec_conn%}
{{ conn }}
{% endfor %}
{% for conn_d in data_conn%}
{{ conn_d }}
{% endfor %}
self.start = {{ start }}
self.cur = {{ start }}
{% for match in matchers %}
{{ match["name"] }}.init_rule(matchers["{{ match["file"] }}.od"], self.rule_executer)
{% endfor %}
return None
def generate_dot(self, *args, **kwargs):
return self.start.generate_dot(*args, **kwargs)

View file

@ -148,7 +148,7 @@ class OD:
actioncode_t.create(value)
return self.create_model_ref(name, "ActionCode", actioncode_node)
def create_bytes_value(self, name: str, value: str):
def create_bytes_value(self, name: str, value: bytes):
from services.primitives.bytes_type import Bytes
bytes_node = self.bottom.create_node()
bytes_t = Bytes(bytes_node, self.bottom.state)

View file

@ -22,7 +22,6 @@ class TryAgainNextRound(Exception):
# Rewrite is performed in-place (modifying `host_m`)
def rewrite(state,
lhs_m: UUID, # LHS-pattern
rhs_m: UUID, # RHS-pattern
pattern_mm: UUID, # meta-model of both patterns (typically the RAMified host_mm)
lhs_match: dict, # a match, morphism, from lhs_m to host_m (mapping pattern name -> host name), typically found by the 'match_od'-function.

View file

@ -117,7 +117,6 @@ class RuleMatcherRewriter:
try:
rhs_match = rewrite(self.state,
lhs_m=lhs,
rhs_m=rhs,
pattern_mm=self.mm_ramified,
lhs_match=lhs_match,

View file

@ -27,6 +27,8 @@ class RandomDecisionMaker(DecisionMaker):
def __call__(self, actions):
arr = [action for descr, action in actions]
if len(arr) == 0:
return
i = math.floor(self.r.random()*len(arr))
return arr[i]
@ -91,7 +93,7 @@ class MinimalSimulator:
self._print("Start simulation")
self._print(f"Decision maker: {self.decision_maker}")
step_counter = 0
while True:
while step_counter < 10:
termination_reason = self.termination_condition(model)
if termination_reason != None:
self._print(f"Termination condition satisfied.\nReason: {termination_reason}.")