diff --git a/examples/schedule/__init__.py b/api/__init__.py similarity index 100% rename from examples/schedule/__init__.py rename to api/__init__.py diff --git a/api/od.py b/api/od.py index cb335e0..cfaa049 100644 --- a/api/od.py +++ b/api/od.py @@ -5,13 +5,10 @@ from services.primitives.boolean_type import Boolean from services.primitives.integer_type import Integer from services.primitives.string_type import String from services.primitives.actioncode_type import ActionCode -from services.primitives.bytes_type import Bytes from uuid import UUID -from typing import Optional -from util.timer import Timer +from typing import Optional, Any -NEXT_LINK_ID = 0 -NEXT_OBJ_ID = 0 +NEXT_ID = 0 # Models map names to elements # This builds the inverse mapping, so we can quickly lookup the name of an element @@ -43,12 +40,11 @@ class ODAPI: self.create_integer_value = self.od.create_integer_value self.create_string_value = self.od.create_string_value self.create_actioncode_value = self.od.create_actioncode_value - self.create_bytes_value = self.od.create_bytes_value - self.__recompute_mappings() + self.recompute_mappings() # Called after every change - makes querying faster but modifying slower - def __recompute_mappings(self): + def recompute_mappings(self): self.m_obj_to_name = build_name_mapping(self.state, self.m) self.mm_obj_to_name = build_name_mapping(self.state, self.mm) self.type_to_objs = { type_name : set() for type_name in self.bottom.read_keys(self.mm)} @@ -63,25 +59,33 @@ class ODAPI: def get_value(self, obj: UUID): return od.read_primitive_value(self.bottom, obj, self.mm)[0] - def get_target(self, link: UUID): + def get_target(self, link: UUID) -> UUID: return self.bottom.read_edge_target(link) - def get_source(self, link: UUID): + def get_source(self, link: UUID) -> UUID: return self.bottom.read_edge_source(link) - def get_slot(self, obj: UUID, attr_name: str): + def get_slot(self, obj: UUID, attr_name: str) -> UUID: slot = self.od.get_slot(obj, attr_name) if slot == None: raise NoSuchSlotException(f"Object '{self.m_obj_to_name[obj]}' has no slot '{attr_name}'") return slot - def get_slot_link(self, obj: UUID, attr_name: str): + def get_slot_link(self, obj: UUID, attr_name: str) -> UUID: return self.od.get_slot_link(obj, attr_name) # Parameter 'include_subtypes': whether to include subtypes of the given association - def get_outgoing(self, obj: UUID, assoc_name: str, include_subtypes=True): + def get_outgoing(self, obj: UUID, assoc_name: str, include_subtypes=True) -> list[UUID]: outgoing = self.bottom.read_outgoing_edges(obj) - result = [] + return self.filter_edges_by_type(outgoing, assoc_name, include_subtypes) + + # Parameter 'include_subtypes': whether to include subtypes of the given association + def get_incoming(self, obj: UUID, assoc_name: str, include_subtypes=True): + incoming = self.bottom.read_incoming_edges(obj) + return self.filter_edges_by_type(incoming, assoc_name, include_subtypes) + + def filter_edges_by_type(self, outgoing: list[UUID], assoc_name: str, include_subtypes=True) -> list[UUID]: + result: list[UUID] = [] for o in outgoing: try: type_of_outgoing_link = self.get_type_name(o) @@ -92,23 +96,8 @@ class ODAPI: result.append(o) return result - - # Parameter 'include_subtypes': whether to include subtypes of the given association - def get_incoming(self, obj: UUID, assoc_name: str, include_subtypes=True): - incoming = self.bottom.read_incoming_edges(obj) - result = [] - for i in incoming: - try: - type_of_incoming_link = self.get_type_name(i) - except: - continue # OK, not all edges are typed - if (include_subtypes and self.cdapi.is_subtype(super_type_name=assoc_name, sub_type_name=type_of_incoming_link) - or not include_subtypes and type_of_incoming_link == assoc_name): - result.append(i) - return result - # Returns list of tuples (name, obj) - def get_all_instances(self, type_name: str, include_subtypes=True): + def get_all_instances(self, type_name: str, include_subtypes=True) -> list[UUID]: if include_subtypes: all_types = self.cdapi.transitive_sub_types[type_name] else: @@ -130,7 +119,7 @@ class ODAPI: else: raise Exception(f"Couldn't find name of {obj} - are you sure it exists in the (meta-)model?") - def get(self, name: str): + def get(self, name: str) -> UUID: results = self.bottom.read_outgoing_elements(self.m, name) if len(results) == 1: return results[0] @@ -139,10 +128,10 @@ class ODAPI: else: raise Exception(f"No such element in model: '{name}'") - def get_type_name(self, obj: UUID): + def get_type_name(self, obj: UUID) -> str: return self.get_name(self.get_type(obj)) - def is_instance(self, obj: UUID, type_name: str, include_subtypes=True): + def is_instance(self, obj: UUID, type_name: str, include_subtypes=True) -> bool: typ = self.cdapi.get_type(type_name) types = set(typ) if not include_subtypes else self.cdapi.transitive_sub_types[type_name] for type_of_obj in self.bottom.read_outgoing_elements(obj, "Morphism"): @@ -150,18 +139,21 @@ class ODAPI: return True return False - def delete(self, obj: UUID): + def delete(self, obj: UUID) -> None: self.bottom.delete_element(obj) - self.__recompute_mappings() + self.recompute_mappings() # Does the the object have the given attribute? - def has_slot(self, obj: UUID, attr_name: str): - return self.od.get_slot_link(obj, attr_name) != None + def has_slot(self, obj: UUID, attr_name: str) -> bool: + class_name = self.get_name(self.get_type(obj)) + if self.od.get_attr_link_name(class_name, attr_name) is None: + return False + return self.od.get_slot_link(obj, attr_name) is not None def get_slots(self, obj: UUID) -> list[str]: return [attr_name for attr_name, _ in self.od.get_slots(obj)] - def get_slot_value(self, obj: UUID, attr_name: str): + def get_slot_value(self, obj: UUID, attr_name: str) -> Any: slot = self.get_slot(obj, attr_name) return self.get_value(slot) @@ -174,14 +166,14 @@ class ODAPI: # Returns the given default value if the slot does not exist on the object. # The attribute must exist in the object's class, or an exception will be thrown. # The slot may not exist however, if the attribute is defined as 'optional' in the class. - def get_slot_value_default(self, obj: UUID, attr_name: str, default: any): + def get_slot_value_default(self, obj: UUID, attr_name: str, default: any) -> any: try: return self.get_slot_value(obj, attr_name) except NoSuchSlotException: return default # create or update slot value - def set_slot_value(self, obj: UUID, attr_name: str, new_value: any, is_code=False): + def set_slot_value(self, obj: UUID, attr_name: str, new_value: any, is_code=False) -> None: obj_name = self.get_name(obj) link_name = f"{obj_name}_{attr_name}" @@ -196,7 +188,7 @@ class ODAPI: new_target = self.create_primitive_value(target_name, new_value, is_code) slot_type = self.cdapi.find_attribute_type(self.get_type_name(obj), attr_name) new_link = self.od._create_link(link_name, slot_type, obj, new_target) - self.__recompute_mappings() + self.recompute_mappings() def create_primitive_value(self, name: str, value: any, is_code=False): # watch out: in Python, 'bool' is subtype of 'int' @@ -210,44 +202,29 @@ class ODAPI: tgt = self.create_actioncode_value(name, value) else: tgt = self.create_string_value(name, value) - elif isinstance(value, bytes): - tgt = self.create_bytes_value(name, value) else: raise Exception("Unimplemented type "+value) - self.__recompute_mappings() + self.recompute_mappings() return tgt def overwrite_primitive_value(self, name: str, value: any, is_code=False): referred_model = UUID(self.bottom.read_value(self.get(name))) - to_overwrite_type = self.get_type_name(self.get(name)) # watch out: in Python, 'bool' is subtype of 'int' # so we must check for 'bool' first if isinstance(value, bool): - if to_overwrite_type != "Boolean": - raise Exception(f"Cannot assign boolean value '{value}' to value of type {to_overwrite_type}.") Boolean(referred_model, self.state).create(value) elif isinstance(value, int): - if to_overwrite_type != "Integer": - raise Exception(f"Cannot assign integer value '{value}' to value of type {to_overwrite_type}.") Integer(referred_model, self.state).create(value) elif isinstance(value, str): if is_code: - if to_overwrite_type != "ActionCode": - raise Exception(f"Cannot assign code to value of type {to_overwrite_type}.") ActionCode(referred_model, self.state).create(value) else: - if to_overwrite_type != "String": - raise Exception(f"Cannot assign string value '{value}' to value of type {to_overwrite_type}.") String(referred_model, self.state).create(value) - elif isinstance(value, bytes): - if to_overwrite_type != "Bytes": - raise Exception(f"Cannot assign bytes value '{value}' to value of type {to_overwrite_type}.") - Bytes(referred_model, self.state).create(value) else: raise Exception("Unimplemented type "+value) - def create_link(self, link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID): - global NEXT_LINK_ID + def create_link(self, link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID) -> UUID: + global NEXT_ID types = self.bottom.read_outgoing_elements(self.mm, assoc_name) if len(types) == 0: raise Exception(f"No such association: '{assoc_name}'") @@ -255,20 +232,15 @@ class ODAPI: raise Exception(f"More than one association exists with name '{assoc_name}' - this means the MM is invalid.") typ = types[0] if link_name == None: - link_name = f"__{assoc_name}{NEXT_LINK_ID}" - NEXT_LINK_ID += 1 + link_name = f"__{assoc_name}{NEXT_ID}" + NEXT_ID += 1 link_id = self.od._create_link(link_name, typ, src, tgt) - self.__recompute_mappings() - + self.recompute_mappings() return link_id - def create_object(self, object_name: Optional[str], class_name: str): - global NEXT_OBJ_ID - if object_name == None: - object_name = f"__{class_name}{NEXT_OBJ_ID}" - NEXT_OBJ_ID += 1 + def create_object(self, object_name: Optional[str], class_name: str) -> UUID: obj = self.od.create_object(object_name, class_name) - self.__recompute_mappings() + self.recompute_mappings() return obj # internal use @@ -307,6 +279,6 @@ def bind_api(odapi): 'create_object': odapi.create_object, 'create_link': odapi.create_link, 'delete': odapi.delete, - 'set_slot_value': odapi.set_slot_value, + 'set_slot_value': odapi.set_slot_value } return funcs diff --git a/api/od_stub.pyi b/api/od_stub.pyi new file mode 100644 index 0000000..563e3e0 --- /dev/null +++ b/api/od_stub.pyi @@ -0,0 +1,9 @@ +from typing import Optional +from uuid import UUID + +from od_stub_readonly import * + +def create_object(object_name: Optional[str], class_name: str) -> UUID: ... +def create_link(link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID) -> UUID: ... +def delete(obj: UUID) -> None: ... +def set_slot_value(obj: UUID, attr_name: str, new_value: any, is_code=False) -> None: ... \ No newline at end of file diff --git a/api/od_stub_readonly.pyi b/api/od_stub_readonly.pyi new file mode 100644 index 0000000..89bbc4c --- /dev/null +++ b/api/od_stub_readonly.pyi @@ -0,0 +1,18 @@ +from typing import Any +from uuid import UUID + +def get(name: str) -> UUID: ... +def get_value(obj: UUID) -> Any: ... +def get_target(link: UUID) -> UUID: ... +def get_source(link: UUID) -> UUID: ... +def get_slot(obj: UUID, attr_name: str) -> UUID: ... +def get_slots(obj: UUID) -> list[str]: ... +def get_slot_value(obj: UUID, attr_name: str) -> Any: ... +def get_slot_value_default(obj: UUID, attr_name: str, default: any) -> Any: ... +def get_all_instances(type_name: str, include_subtypes=True) -> list[UUID]: ... +def get_name(obj: UUID) -> str: ... +def get_type_name(obj: UUID) -> str: ... +def get_outgoing(obj: UUID, assoc_name: str, include_subtypes=True) -> list[UUID]: ... +def get_incoming(obj: UUID, assoc_name: str, include_subtypes: object = True) -> list[UUID]: ... +def has_slot(obj: UUID, attr_name: str) -> bool: ... +def is_instance(obj: UUID, type_name: str, include_subtypes=True) -> bool: ... diff --git a/examples/performance/runner.py b/benchmark/runner.py similarity index 100% rename from examples/performance/runner.py rename to benchmark/runner.py diff --git a/concrete_syntax/textual_cd/parser.py b/concrete_syntax/textual_cd/parser.py index 8b8ebd7..352ef62 100644 --- a/concrete_syntax/textual_cd/parser.py +++ b/concrete_syntax/textual_cd/parser.py @@ -40,7 +40,7 @@ attrs: attr* constraint: CODE | INDENTED_CODE -class_: [ABSTRACT] "class" IDENTIFIER [multiplicity] ["(" superclasses ")"] ["{" attrs [constraint] "}"] +class_: [ABSTRACT] "class" IDENTIFIER [multiplicity] ["(" superclasses ")"] ["{" attrs [constraint ";"] "}"] association: "association" IDENTIFIER [multiplicity] IDENTIFIER "->" IDENTIFIER [multiplicity] ["{" attrs [constraint] "}"] @@ -75,7 +75,7 @@ def parse_cd(state, m_text): primitive_types = { type_name : UUID(state.read_value(state.read_dict(state.read_root(), type_name))) - for type_name in ["Integer", "String", "Boolean"] + for type_name in ["Integer", "String", "Boolean", "ActionCode"] } class T(TBase): diff --git a/examples/geraniums/geraniums_renderer.j2 b/examples/geraniums/geraniums_renderer.j2 new file mode 100644 index 0000000..1ef47cc --- /dev/null +++ b/examples/geraniums/geraniums_renderer.j2 @@ -0,0 +1,34 @@ +digraph G { + rankdir=LR; + center=true; + margin=1; + nodesep=1; + + node [fontname="Arial", fontsize=10, shape=box, style=filled, fillcolor=white]; + + // Geraniums + {% for id, name, flowering in geraniums %} + g{{ id }} [ + label="geranium: {{ name }}\n({{ 'flowering' if flowering else 'not flowering' }})", + shape=ellipse, + fillcolor="{{ 'lightpink' if flowering else 'lightgray' }}", + fontcolor=black + ]; + {% endfor %} + + // Pots + {% for id, name, cracked in pots %} + p{{ id }} [ + label="pot: {{ name }}\n({{ 'cracked' if cracked else 'pristine' }})", + shape=box, + fillcolor="{{ 'mistyrose' if cracked else 'lightgreen' }}", + fontcolor=black, + style="filled,bold" + ]; + {% endfor %} + + // Connections: geranium -> pot + {% for source, target in planted %} + g{{ source }} -> p{{ target }}; + {% endfor %} +} diff --git a/examples/geraniums/metamodels/mm.od b/examples/geraniums/metamodels/mm.od new file mode 100644 index 0000000..f6f6962 --- /dev/null +++ b/examples/geraniums/metamodels/mm.od @@ -0,0 +1,9 @@ +class Geranium { + Boolean flowering; +} + +class Pot { + Boolean cracked; +} + +association Planted [0..*] Geranium -> Pot [1..1] diff --git a/examples/geraniums/models/eval_context.py b/examples/geraniums/models/eval_context.py new file mode 100644 index 0000000..d8dfcd8 --- /dev/null +++ b/examples/geraniums/models/eval_context.py @@ -0,0 +1,44 @@ +import os + +from jinja2 import Environment, FileSystemLoader + +from api.od import ODAPI +from framework.conformance import eval_context_decorator + + +@eval_context_decorator +def _render_geraniums_dot(od: ODAPI, file: str) -> str: + __DIR__ = os.path.dirname(__file__) + env = Environment( + loader=FileSystemLoader( + __DIR__ + ) + ) + env.trim_blocks = True + env.lstrip_blocks = True + template_dot = env.get_template("geraniums_renderer.j2") + + id_count = 0 + id_map = {} + render = {"geraniums": [], "pots": [], "planted": []} + + for name, uuid in od.get_all_instances("Geranium"): + render["geraniums"].append((id_count, name, od.get_slot_value(uuid, "flowering"))) + id_map[uuid] = id_count + id_count += 1 + + for name, uuid in od.get_all_instances("Pot"): + render["pots"].append((id_count, name, od.get_slot_value(uuid, "cracked"))) + id_map[uuid] = id_count + id_count += 1 + + for name, uuid in od.get_all_instances("Planted"): + render["planted"].append((id_map[od.get_source(uuid)], id_map[od.get_target(uuid)])) + + with open(file, "w", encoding="utf-8") as f_dot: + f_dot.write(template_dot.render(**render)) + return "" + +eval_context = { + "render_geraniums_dot": _render_geraniums_dot, +} diff --git a/examples/geraniums/models/example1.od b/examples/geraniums/models/example1.od new file mode 100644 index 0000000..db5bc32 --- /dev/null +++ b/examples/geraniums/models/example1.od @@ -0,0 +1,17 @@ +f1:Geranium { + flowering = True; +} +f2:Geranium { + flowering = False; +} +f3:Geranium { + flowering = True; +} + +p1:Pot { + cracked = True; +} + +:Planted (f1 -> p1) +:Planted (f2 -> p1) +:Planted (f3 -> p1) \ No newline at end of file diff --git a/examples/geraniums/models/example2.od b/examples/geraniums/models/example2.od new file mode 100644 index 0000000..9c4e0f4 --- /dev/null +++ b/examples/geraniums/models/example2.od @@ -0,0 +1,47 @@ +f1:Geranium { + flowering = True; +} +f2:Geranium { + flowering = True; +} +f3:Geranium { + flowering = False; +} + +p1:Pot { + cracked = True; +} + +:Planted (f1 -> p1) +:Planted (f2 -> p1) +:Planted (f3 -> p1) + + + + +f4:Geranium { + flowering = True; +} +p2:Pot { + cracked = True; +} +:Planted (f4 -> p2) + + + +f5:Geranium { + flowering = True; +} +p3:Pot { + cracked = False; +} +:Planted (f5 -> p3) + + +f6:Geranium { + flowering = False; +} +p4:Pot { + cracked = True; +} +:Planted (f6 -> p4) \ No newline at end of file diff --git a/examples/geraniums/renderer.py b/examples/geraniums/renderer.py new file mode 100644 index 0000000..3ac50f5 --- /dev/null +++ b/examples/geraniums/renderer.py @@ -0,0 +1,45 @@ +import os + +from jinja2 import Environment, FileSystemLoader + +from api.od import ODAPI +from concrete_syntax.graphviz.make_url import show_graphviz +from concrete_syntax.graphviz.renderer import make_graphviz_id + +try: + import graphviz + HAVE_GRAPHVIZ = True +except ImportError: + HAVE_GRAPHVIZ = False + +def render_geraniums_dot(od: ODAPI, file: str) -> str: + __DIR__ = os.path.dirname(__file__) + env = Environment( + loader=FileSystemLoader( + __DIR__ + ) + ) + env.trim_blocks = True + env.lstrip_blocks = True + template_dot = env.get_template("geraniums_renderer.j2") + + id_count = 0 + id_map = {} + render = {"geraniums": [], "pots": [], "planted": []} + + for name, uuid in od.get_all_instances("Geranium"): + render["geraniums"].append((id_count, name, od.get_slot_value(uuid, "flowering"))) + id_map[uuid] = id_count + id_count += 1 + + for name, uuid in od.get_all_instances("Pot"): + render["pots"].append((id_count, name, od.get_slot_value(uuid, "cracked"))) + id_map[uuid] = id_count + id_count += 1 + + for name, uuid in od.get_all_instances("Planted"): + render["planted"].append((id_map[od.get_source(uuid)], id_map[od.get_target(uuid)])) + + with open(file, "w", encoding="utf-8") as f_dot: + f_dot.write(template_dot.render(**render)) + return "" \ No newline at end of file diff --git a/examples/geraniums/rules/cracked_pots.od b/examples/geraniums/rules/cracked_pots.od new file mode 100644 index 0000000..61ef57f --- /dev/null +++ b/examples/geraniums/rules/cracked_pots.od @@ -0,0 +1,3 @@ +pot:RAM_Pot { + RAM_cracked = `get_value(this)`; +} \ No newline at end of file diff --git a/examples/geraniums/rules/create_pot.od b/examples/geraniums/rules/create_pot.od new file mode 100644 index 0000000..c6ef5d0 --- /dev/null +++ b/examples/geraniums/rules/create_pot.od @@ -0,0 +1,3 @@ +pot:RAM_Pot { + RAM_cracked = `False`; +} \ No newline at end of file diff --git a/examples/geraniums/rules/flowering_flowers_in_pot.od b/examples/geraniums/rules/flowering_flowers_in_pot.od new file mode 100644 index 0000000..591c123 --- /dev/null +++ b/examples/geraniums/rules/flowering_flowers_in_pot.od @@ -0,0 +1,7 @@ +pot:RAM_Pot + +flower:RAM_Geranium { + RAM_flowering = `get_value(this)`; +} + +:RAM_Planted (flower -> pot) \ No newline at end of file diff --git a/examples/geraniums/rules/repot_flower_in_pot.od b/examples/geraniums/rules/repot_flower_in_pot.od new file mode 100644 index 0000000..134813f --- /dev/null +++ b/examples/geraniums/rules/repot_flower_in_pot.od @@ -0,0 +1,8 @@ +pot:RAM_Pot +new_pot:RAM_Pot + +flower:RAM_Geranium { + RAM_flowering = `get_value(this)`; +} + +replant:RAM_Planted (flower -> new_pot) \ No newline at end of file diff --git a/examples/geraniums/runner.py b/examples/geraniums/runner.py new file mode 100644 index 0000000..cd72db6 --- /dev/null +++ b/examples/geraniums/runner.py @@ -0,0 +1,48 @@ +from examples.geraniums.renderer import render_geraniums_dot +from transformation.ramify import ramify + +from models.eval_context import eval_context + +from transformation.schedule.rule_scheduler import * + +if __name__ == "__main__": + import os + THIS_DIR = os.path.dirname(__file__) + + # get file contents as string + def read_file(filename): + with open(THIS_DIR+'/'+filename) as file: + return file.read() + + + state = DevState() + scd_mmm = bootstrap_scd(state) + + mm_cs = read_file('metamodels/mm.od') + m_cs = read_file('models/example2.od') + + mm = parser_cd.parse_cd( + state, + m_text=mm_cs, + ) + m = parser_od.parse_od( + state, m_text=m_cs, mm=mm + ) + conf_err = Conformance( + state, m, mm + ).check_nominal() + print(render_conformance_check_result(conf_err)) + mm_ramified = ramify(state, mm) + + action_generator = RuleScheduler(state, mm, mm_ramified, verbose=True, directory="examples/geraniums", eval_context=eval_context) + od = ODAPI(state, m, mm) + render_geraniums_dot(od, f"{THIS_DIR}/geraniums.dot") + + # if action_generator.load_schedule(f"petrinet.od"): + # if action_generator.load_schedule("schedules/combinatory.drawio"): + if action_generator.load_schedule("schedules/schedule.drawio"): + + action_generator.generate_dot("../dot.dot") + code, message = action_generator.run(od) + print(f"{code}: {message}") + render_geraniums_dot(od, f"{THIS_DIR}/geraniums_final.dot") \ No newline at end of file diff --git a/examples/geraniums/schedules/schedule.drawio b/examples/geraniums/schedules/schedule.drawio new file mode 100644 index 0000000..41437fa --- /dev/null +++ b/examples/geraniums/schedules/schedule.drawio @@ -0,0 +1,645 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/geraniums/schedules/schedule.od b/examples/geraniums/schedules/schedule.od new file mode 100644 index 0000000..e69de29 diff --git a/examples/model_transformation/woods.plantuml b/examples/model_transformation/woods.plantuml deleted file mode 100644 index 0d52b6b..0000000 --- a/examples/model_transformation/woods.plantuml +++ /dev/null @@ -1,139 +0,0 @@ -package "DSL Meta-Model" { -class "Bear" as 00000000_0000_0000_0000_00000000046d { -} -abstract class "Animal" as 00000000_0000_0000_0000_000000000474 { -} -class "Man" as 00000000_0000_0000_0000_000000000491 { - weight : Integer -} - -00000000_0000_0000_0000_000000000474 <|-- 00000000_0000_0000_0000_000000000491 -00000000_0000_0000_0000_000000000474 <|-- 00000000_0000_0000_0000_00000000046d - -00000000_0000_0000_0000_000000000491 " " --> "1 .. *" 00000000_0000_0000_0000_000000000474 : afraidOf -} -package "Int Meta-Model" { -class "Integer" as 00000000_0000_0000_0000_000000000094 { -} - - -} -package "RAMified DSL Meta-Model" { -class "RAM_Bear" as 00000000_0000_0000_0000_0000000005bb { -} -class "RAM_Animal" as 00000000_0000_0000_0000_0000000005c5 { -} -class "RAM_Man" as 00000000_0000_0000_0000_0000000005cf { - RAM_weight : ActionCode -} - -00000000_0000_0000_0000_0000000005c5 <|-- 00000000_0000_0000_0000_0000000005cf -00000000_0000_0000_0000_0000000005c5 <|-- 00000000_0000_0000_0000_0000000005bb - -00000000_0000_0000_0000_0000000005cf " " --> "0 .. *" 00000000_0000_0000_0000_0000000005c5 : RAM_afraidOf -} -package "RAMified Int Meta-Model" { -class "RAM_Integer" as 00000000_0000_0000_0000_00000000064c { -} - - -} -00000000_0000_0000_0000_0000000005bb ..> 00000000_0000_0000_0000_00000000046d #line:green;text:green : RAMifies -00000000_0000_0000_0000_0000000005c5 ..> 00000000_0000_0000_0000_000000000474 #line:green;text:green : RAMifies -00000000_0000_0000_0000_0000000005cf ..> 00000000_0000_0000_0000_000000000491 #line:green;text:green : RAMifies -00000000_0000_0000_0000_0000000005cf::RAM_weight ..> 00000000_0000_0000_0000_000000000491::weight #line:green;text:green : RAMifies -00000000_0000_0000_0000_00000000064c ..> 00000000_0000_0000_0000_000000000094 #line:green;text:green : RAMifies -package "LHS" { -map "scaryAnimal : RAM_Animal" as 00000000_0000_0000_0000_00000000068a { -} -map "man : RAM_Man" as 00000000_0000_0000_0000_00000000066d { -RAM_weight => `v > 60` -} - -00000000_0000_0000_0000_00000000066d -> 00000000_0000_0000_0000_00000000068a : :RAM_afraidOf -} -00000000_0000_0000_0000_00000000068a ..> 00000000_0000_0000_0000_0000000005c5 #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_00000000066d ..> 00000000_0000_0000_0000_0000000005cf #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_00000000066d::RAM_weight ..> 00000000_0000_0000_0000_0000000005cf::RAM_weight #line:blue;text:blue : instanceOf - -package "RHS" { -map "man : RAM_Man" as 00000000_0000_0000_0000_000000000699 { -RAM_weight => `v + 5` -} -map "bill : RAM_Man" as 00000000_0000_0000_0000_0000000006b6 { -RAM_weight => `100` -} - -00000000_0000_0000_0000_0000000006b6 -> 00000000_0000_0000_0000_000000000699 : :RAM_afraidOf -} -00000000_0000_0000_0000_000000000699 ..> 00000000_0000_0000_0000_0000000005cf #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000699::RAM_weight ..> 00000000_0000_0000_0000_0000000005cf::RAM_weight #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_0000000006b6 ..> 00000000_0000_0000_0000_0000000005cf #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_0000000006b6::RAM_weight ..> 00000000_0000_0000_0000_0000000005cf::RAM_weight #line:blue;text:blue : instanceOf - -package "Model (before rewrite)" { -map "bear2 : Bear" as 00000000_0000_0000_0000_000000000597 { -} -map "bear1 : Bear" as 00000000_0000_0000_0000_000000000590 { -} -map "george : Man" as 00000000_0000_0000_0000_000000000573 { -weight => 80 -} - -00000000_0000_0000_0000_000000000573 -> 00000000_0000_0000_0000_000000000590 : :afraidOf -00000000_0000_0000_0000_000000000573 -> 00000000_0000_0000_0000_000000000597 : :afraidOf -} -00000000_0000_0000_0000_000000000597 ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000590 ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000573 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000573::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf - -00000000_0000_0000_0000_00000000068a ..> 00000000_0000_0000_0000_000000000590 #line:red;line.dotted;text:red : matchedWith -00000000_0000_0000_0000_00000000066d ..> 00000000_0000_0000_0000_000000000573 #line:red;line.dotted;text:red : matchedWith -00000000_0000_0000_0000_00000000066d::RAM_weight ..> 00000000_0000_0000_0000_000000000573::weight #line:red;line.dotted;text:red : matchedWith -package "Model (after rewrite 0)" { -map "bear2 : Bear" as 00000000_0000_0000_0000_0000000006db { -} -map "george : Man" as 00000000_0000_0000_0000_0000000006e9 { -weight => 85 -} -map "bill0 : Man" as 00000000_0000_0000_0000_000000000723 { -weight => 100 -} - -00000000_0000_0000_0000_000000000723 -> 00000000_0000_0000_0000_0000000006e9 : :afraidOf -00000000_0000_0000_0000_0000000006e9 -> 00000000_0000_0000_0000_0000000006db : :afraidOf -} -00000000_0000_0000_0000_000000000699 ..> 00000000_0000_0000_0000_0000000006e9 #line:red;line.dotted;text:red : matchedWith -00000000_0000_0000_0000_000000000699::RAM_weight ..> 00000000_0000_0000_0000_0000000006e9::weight #line:red;line.dotted;text:red : matchedWith -00000000_0000_0000_0000_0000000006b6 ..> 00000000_0000_0000_0000_000000000723 #line:red;line.dotted;text:red : matchedWith -00000000_0000_0000_0000_0000000006db ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_0000000006e9 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_0000000006e9::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000723 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000723::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf - -00000000_0000_0000_0000_00000000068a ..> 00000000_0000_0000_0000_000000000597 #line:orange;line.dotted;text:orange : matchedWith -00000000_0000_0000_0000_00000000066d ..> 00000000_0000_0000_0000_000000000573 #line:orange;line.dotted;text:orange : matchedWith -00000000_0000_0000_0000_00000000066d::RAM_weight ..> 00000000_0000_0000_0000_000000000573::weight #line:orange;line.dotted;text:orange : matchedWith -package "Model (after rewrite 1)" { -map "bear1 : Bear" as 00000000_0000_0000_0000_000000000747 { -} -map "george : Man" as 00000000_0000_0000_0000_00000000074e { -weight => 85 -} -map "bill0 : Man" as 00000000_0000_0000_0000_000000000788 { -weight => 100 -} - -00000000_0000_0000_0000_000000000788 -> 00000000_0000_0000_0000_00000000074e : :afraidOf -00000000_0000_0000_0000_00000000074e -> 00000000_0000_0000_0000_000000000747 : :afraidOf -} -00000000_0000_0000_0000_000000000699 ..> 00000000_0000_0000_0000_00000000074e #line:orange;line.dotted;text:orange : matchedWith -00000000_0000_0000_0000_000000000699::RAM_weight ..> 00000000_0000_0000_0000_00000000074e::weight #line:orange;line.dotted;text:orange : matchedWith -00000000_0000_0000_0000_0000000006b6 ..> 00000000_0000_0000_0000_000000000788 #line:orange;line.dotted;text:orange : matchedWith -00000000_0000_0000_0000_000000000747 ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_00000000074e ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_00000000074e::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000788 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf -00000000_0000_0000_0000_000000000788::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf diff --git a/examples/model_transformation/woods.py b/examples/model_transformation/woods.py deleted file mode 100644 index e6e88e5..0000000 --- a/examples/model_transformation/woods.py +++ /dev/null @@ -1,245 +0,0 @@ -# Model transformation experiment - -from state.devstate import DevState -from bootstrap.scd import bootstrap_scd -from uuid import UUID -from services.scd import SCD -from framework.conformance import Conformance -from services.od import OD -from transformation.matcher import match_od -from transformation.ramify import ramify -from transformation.cloner import clone_od -from transformation import rewriter -from services.bottom.V0 import Bottom -from services.primitives.integer_type import Integer -from concrete_syntax.plantuml import renderer as plantuml -from concrete_syntax.plantuml.make_url import make_url as make_plantuml_url -from concrete_syntax.textual_od import parser, renderer - -def main(): - state = DevState() - root = state.read_root() # id: 0 - - # Meta-meta-model: a class diagram that describes the language of class diagrams - scd_mmm_id = bootstrap_scd(state) - int_mm_id = UUID(state.read_value(state.read_dict(state.read_root(), "Integer"))) - string_mm_id = UUID(state.read_value(state.read_dict(state.read_root(), "String"))) - - # conf = Conformance(state, scd_mmm_id, scd_mmm_id) - # print("Conformance SCD_MM -> SCD_MM?", conf.check_nominal(log=True)) - # print("--------------------------------------") - # print(renderer.render_od(state, scd_mmm_id, scd_mmm_id, hide_names=True)) - # print("--------------------------------------") - - # Create DSL MM with parser - dsl_mm_cs = """ - # Integer:ModelRef - Bear:Class - Animal:Class { - abstract = True; - } - Man:Class { - lower_cardinality = 1; - upper_cardinality = 2; - constraint = ``` - get_value(get_slot(this, "weight")) > 20 - ```; - } - Man_weight:AttributeLink (Man -> Integer) { - name = "weight"; - optional = False; - constraint = ``` - # this is the same constraint as above, but this time, part of the attributelink itself (and thus shorter) - tgt = get_target(this) - tgt_type = get_type_name(tgt) - get_value(tgt) > 20 - ```; - } - afraidOf:Association (Man -> Animal) { - target_lower_cardinality = 1; - } - :Inheritance (Man -> Animal) - :Inheritance (Bear -> Animal) - - not_too_fat:GlobalConstraint { - constraint = ``` - # total weight of all men low enough - total_weight = 0 - for man_name, man_id in get_all_instances("Man"): - total_weight += get_value(get_slot(man_id, "weight")) - total_weight < 85 - ```; - } - """ - dsl_mm_id = parser.parse_od(state, dsl_mm_cs, mm=scd_mmm_id) - - # Create DSL M with parser - dsl_m_cs = """ - george:Man { - weight = 80; - } - bear1:Bear - bear2:Bear - :afraidOf (george -> bear1) - :afraidOf (george -> bear2) - """ - dsl_m_id = parser.parse_od(state, dsl_m_cs, mm=dsl_mm_id) - - # print("DSL MM:") - # print("--------------------------------------") - # print(renderer.render_od(state, dsl_mm_id, scd_mmm_id, hide_names=True)) - # print("--------------------------------------") - - conf = Conformance(state, dsl_mm_id, scd_mmm_id) - print("Conformance DSL_MM -> SCD_MM?", conf.check_nominal(log=True)) - - # print("DSL M:") - # print("--------------------------------------") - # print(renderer.render_od(state, dsl_m_id, dsl_mm_id, hide_names=True)) - # print("--------------------------------------") - - conf = Conformance(state, dsl_m_id, dsl_mm_id) - print("Conformance DSL_M -> DSL_MM?", conf.check_nominal(log=True)) - - # RAMify MM - prefix = "RAM_" # all ramified types can be prefixed to distinguish them a bit more - ramified_mm_id = ramify(state, dsl_mm_id, prefix) - ramified_int_mm_id = ramify(state, int_mm_id, prefix) - - # LHS - pattern to match - - # TODO: enable more powerful constraints - lhs_cs = f""" - # object to match - man:{prefix}Man {{ - # match only men heavy enough - {prefix}weight = `get_value(this) > 60`; - }} - - # object to delete - scaryAnimal:{prefix}Animal - - # link to delete - manAfraidOfAnimal:{prefix}afraidOf (man -> scaryAnimal) - """ - lhs_id = parser.parse_od(state, lhs_cs, mm=ramified_mm_id) - - - conf = Conformance(state, lhs_id, ramified_mm_id) - print("Conformance LHS_M -> RAM_DSL_MM?", conf.check_nominal(log=True)) - - # RHS of our rule - - # TODO: enable more powerful actions - rhs_cs = f""" - # matched object - man:{prefix}Man {{ - # man gains weight - {prefix}weight = `get_value(this) + 5`; - }} - - # object to create - bill:{prefix}Man {{ - # name = `"billie"+str(get_slot_value(matched("man"), "weight"))`; - {prefix}weight = `100`; - }} - - # link to create - billAfraidOfMan:{prefix}afraidOf (bill -> man) - """ - rhs_id = parser.parse_od(state, rhs_cs, mm=ramified_mm_id) - - conf = Conformance(state, rhs_id, ramified_mm_id) - print("Conformance RHS_M -> RAM_DSL_MM?", conf.check_nominal(log=True)) - - def render_ramification(): - uml = ("" - # Render original and RAMified meta-models - + plantuml.render_package("DSL Meta-Model", plantuml.render_class_diagram(state, dsl_mm_id)) - + plantuml.render_package("Int Meta-Model", plantuml.render_class_diagram(state, int_mm_id)) - + plantuml.render_package("RAMified DSL Meta-Model", plantuml.render_class_diagram(state, ramified_mm_id)) - + plantuml.render_package("RAMified Int Meta-Model", plantuml.render_class_diagram(state, ramified_int_mm_id)) - - # Render RAMification traceability links - + plantuml.render_trace_ramifies(state, dsl_mm_id, ramified_mm_id) - + plantuml.render_trace_ramifies(state, int_mm_id, ramified_int_mm_id) - ) - - return uml - - def render_lhs_rhs(): - uml = render_ramification() - # Render pattern - uml += plantuml.render_package("LHS", plantuml.render_object_diagram(state, lhs_id, ramified_mm_id)) - uml += plantuml.render_trace_conformance(state, lhs_id, ramified_mm_id) - - # Render pattern - uml += plantuml.render_package("RHS", plantuml.render_object_diagram(state, rhs_id, ramified_mm_id)) - uml += plantuml.render_trace_conformance(state, rhs_id, ramified_mm_id) - return uml - - - def render_all_matches(): - uml = render_lhs_rhs() - # Render host graph (before rewriting) - uml += plantuml.render_package("Model (before rewrite)", plantuml.render_object_diagram(state, dsl_m_id, dsl_mm_id)) - # Render conformance - uml += plantuml.render_trace_conformance(state, dsl_m_id, dsl_mm_id) - - print("matching...") - generator = match_od(state, dsl_m_id, dsl_mm_id, lhs_id, ramified_mm_id) - for match, color in zip(generator, ["red", "orange"]): - print("\nMATCH:\n", match) - - # Render every match - uml += plantuml.render_trace_match(state, match, lhs_id, dsl_m_id, color) - - print("DONE") - return uml - - def render_rewrite(): - uml = render_lhs_rhs() - - # Render host graph (before rewriting) - uml += plantuml.render_package("Model (before rewrite)", plantuml.render_object_diagram(state, dsl_m_id, dsl_mm_id)) - # Render conformance - uml += plantuml.render_trace_conformance(state, dsl_m_id, dsl_mm_id) - - generator = match_od(state, dsl_m_id, dsl_mm_id, lhs_id, ramified_mm_id) - for i, (match, color) in enumerate(zip(generator, ["red", "orange"])): - print("\nMATCH:\n", match) - uml += plantuml.render_trace_match(state, match, lhs_id, dsl_m_id, color) - - # rewrite happens in-place (which sucks), so we will only modify a clone: - snapshot_dsl_m_id = clone_od(state, dsl_m_id, dsl_mm_id) - rewriter.rewrite(state, lhs_id, rhs_id, ramified_mm_id, match, snapshot_dsl_m_id, dsl_mm_id) - - conf = Conformance(state, snapshot_dsl_m_id, dsl_mm_id) - print(f"Conformance DSL_M (after rewrite {i}) -> DSL_MM?", conf.check_nominal(log=True)) - - # Render host graph (after rewriting) - uml += plantuml.render_package(f"Model (after rewrite {i})", plantuml.render_object_diagram(state, snapshot_dsl_m_id, dsl_mm_id)) - # Render match - uml += plantuml.render_trace_match(state, match, rhs_id, snapshot_dsl_m_id, color) - # Render conformance - uml += plantuml.render_trace_conformance(state, snapshot_dsl_m_id, dsl_mm_id) - - return uml - - # plantuml_str = render_ramification() - # plantuml_str = render_all_matches() - plantuml_str = render_rewrite() - - print() - print("==============================================") - print("BEGIN PLANTUML") - print("==============================================") - - print(make_plantuml_url(plantuml_str)) - - print("==============================================") - print("END PLANTUML") - print("==============================================") - -if __name__ == "__main__": - main() diff --git a/examples/petrinet/models/m_example_simple.od b/examples/petrinet/models/m_example_simple.od index a3eee8d..d7dd1ea 100644 --- a/examples/petrinet/models/m_example_simple.od +++ b/examples/petrinet/models/m_example_simple.od @@ -1,5 +1,8 @@ p0:PNPlace p1:PNPlace +p2:PNPlace +p3:PNPlace +p4:PNPlace t0:PNTransition :arc (p0 -> t0) @@ -7,4 +10,12 @@ t0:PNTransition t1:PNTransition :arc (p1 -> t1) -:arc (t1 -> p0) \ No newline at end of file +:arc (t1 -> p2) + +t2:PNTransition +:arc (p2 -> t2) +:arc (t2 -> p0) + + +t3:PNTransition +:arc (t3 -> p4) \ No newline at end of file diff --git a/examples/petrinet/models/m_example_simple_rt_initial.od b/examples/petrinet/models/m_example_simple_rt_initial.od index fa93f4e..64fc3b7 100644 --- a/examples/petrinet/models/m_example_simple_rt_initial.od +++ b/examples/petrinet/models/m_example_simple_rt_initial.od @@ -9,3 +9,21 @@ p1s:PNPlaceState { } :pn_of (p1s -> p1) + +p2s:PNPlaceState { + numTokens = 0; +} + +:pn_of (p2s -> p2) + +p3s:PNPlaceState { + numTokens = 0; +} + +:pn_of (p3s -> p3) + +p4s:PNPlaceState { + numTokens = 0; +} + +:pn_of (p4s -> p4) diff --git a/examples/petrinet/models/rules/all_incoming.od b/examples/petrinet/models/rules/all_incoming.od new file mode 100644 index 0000000..1b87f1d --- /dev/null +++ b/examples/petrinet/models/rules/all_incoming.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `True`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (p -> t) diff --git a/examples/petrinet/models/rules/all_incomming.od b/examples/petrinet/models/rules/all_incomming.od new file mode 100644 index 0000000..1b87f1d --- /dev/null +++ b/examples/petrinet/models/rules/all_incomming.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `True`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (p -> t) diff --git a/examples/petrinet/models/rules/all_outgoing.od b/examples/petrinet/models/rules/all_outgoing.od new file mode 100644 index 0000000..ab431cc --- /dev/null +++ b/examples/petrinet/models/rules/all_outgoing.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `True`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (t -> p) diff --git a/examples/petrinet/models/rules/increase_outgoing.od b/examples/petrinet/models/rules/increase_outgoing.od new file mode 100644 index 0000000..1fa1acb --- /dev/null +++ b/examples/petrinet/models/rules/increase_outgoing.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `get_value(this) + 1`; +} +:RAM_pn_of (ps -> p) + +# An outgoing arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (t -> p) diff --git a/examples/petrinet/operational_semantics/all_input_have_token.od b/examples/petrinet/models/rules/input_without_token.od similarity index 100% rename from examples/petrinet/operational_semantics/all_input_have_token.od rename to examples/petrinet/models/rules/input_without_token.od diff --git a/examples/petrinet/models/rules/places.od b/examples/petrinet/models/rules/places.od new file mode 100644 index 0000000..923fb03 --- /dev/null +++ b/examples/petrinet/models/rules/places.od @@ -0,0 +1,3 @@ +# A place with no tokens: + +p:RAM_PNPlace \ No newline at end of file diff --git a/examples/petrinet/models/rules/reduce_incoming.od b/examples/petrinet/models/rules/reduce_incoming.od new file mode 100644 index 0000000..b85a2db --- /dev/null +++ b/examples/petrinet/models/rules/reduce_incoming.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `get_value(this) -1`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (p -> t) \ No newline at end of file diff --git a/examples/petrinet/models/rules/reduce_incomming.od b/examples/petrinet/models/rules/reduce_incomming.od new file mode 100644 index 0000000..b85a2db --- /dev/null +++ b/examples/petrinet/models/rules/reduce_incomming.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `get_value(this) -1`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (p -> t) \ No newline at end of file diff --git a/examples/petrinet/models/rules/transition.od b/examples/petrinet/models/rules/transition.od new file mode 100644 index 0000000..c3bd82c --- /dev/null +++ b/examples/petrinet/models/rules/transition.od @@ -0,0 +1 @@ +t:RAM_PNTransition \ No newline at end of file diff --git a/examples/petrinet/models/schedule.od b/examples/petrinet/models/schedule.od deleted file mode 100644 index 1584a7c..0000000 --- a/examples/petrinet/models/schedule.od +++ /dev/null @@ -1,66 +0,0 @@ -start:Start -end:End - -transitions:Match{ - file = "operational_semantics/transition"; -} - - -d:Data_modify -{ - modify_dict = ' - { - "tr": "t" - }'; -} - -nac_input_without:Match{ - file = "operational_semantics/all_input_have_token"; - n = "1"; -} - -inputs:Match{ - file = "operational_semantics/all_inputs"; -} - -rewrite_incoming:Rewrite -{ - file = "operational_semantics/remove_incoming"; -} - -loop_trans:Loop -loop_input:Loop - -p:Print -{ -event = True; -label = "transition: "; -} - -p2:Print -{ -event = True; -label = "inputs: "; -} - -:Exec_con(start -> transitions){gate_from = 0;gate_to = 0;} -:Exec_con(transitions -> end){gate_from = 1;gate_to = 0;} -:Exec_con(transitions -> loop_trans){gate_from = 0;gate_to = 0;} -:Exec_con(loop_trans -> nac_input_without){gate_from = 0;gate_to = 0;} - -[//]: # (:Exec_con(nac_input_without -> loop_trans){gate_from = 0;gate_to = 0;}) -:Exec_con(nac_input_without -> inputs){gate_from = 1;gate_to = 0;} -:Exec_con(inputs -> loop_input){gate_from = 0;gate_to = 0;} -:Exec_con(inputs -> loop_trans){gate_from = 1;gate_to = 0;} - -:Exec_con(loop_trans -> end){gate_from = 1;gate_to = 0;} - -:Data_con(transitions -> loop_trans) -:Data_con(nac_input_without -> p) -:Data_con(d -> nac_input_without) -:Data_con(loop_trans -> d) -:Data_con(loop_trans -> rewrite_incoming) - - - - diff --git a/examples/petrinet/models/schedules/combinatory.drawio b/examples/petrinet/models/schedules/combinatory.drawio new file mode 100644 index 0000000..c22b5ce --- /dev/null +++ b/examples/petrinet/models/schedules/combinatory.drawio @@ -0,0 +1,526 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/petrinet/models/schedules/foo.od b/examples/petrinet/models/schedules/foo.od new file mode 100644 index 0000000..7acc7a8 --- /dev/null +++ b/examples/petrinet/models/schedules/foo.od @@ -0,0 +1,23 @@ +start:Start { + ports_exec = `["F","FF"]`; +} +end:End { + ports_exec = `["F"]`; +} + +p1:Print{ + custom = "Foo"; +} + +p2:Print{ + custom = "FooFoo"; +} + +p3:Print{ + custom = "FooFooFoo"; +} + +:Conn_exec (start -> p1) {from="F";to="in";} +:Conn_exec (p1 -> end) {from="out";to="F";} +:Conn_exec (start -> p2) {from="FF";to="in";} +:Conn_exec (p2 -> end) {from="out";to="F";} diff --git a/examples/petrinet/models/schedules/petrinet.od b/examples/petrinet/models/schedules/petrinet.od new file mode 100644 index 0000000..386c3ed --- /dev/null +++ b/examples/petrinet/models/schedules/petrinet.od @@ -0,0 +1,66 @@ +start:Start +end:End + +m:Match{ + file = "operational_semantics/transition"; +} + +nac1:Match{ + file = "operational_semantics/all_input_have_token"; + n = "1"; +} + +inputs:Match{ + file = "operational_semantics/all_inputs"; +} +rinput:Rewrite{ + file = "operational_semantics/all_inputs_reduced"; +} + +outputs:Match{ + file = "operational_semantics/all_outputs"; +} +routput:Rewrite{ + file = "operational_semantics/all_outputs_increased"; +} + +p:Print{ + event = True; +} +p2:Print{ + event = False; + custom = `"succesfully execuded a petrinet transition"`; +} + +l:Loop +l2:Loop +l3:Loop + + +:Conn_exec (start -> m) {from="out"; to="in";} +:Conn_exec (m -> l) {from="success"; to="in";} +:Conn_exec (l -> nac1) {from="it"; to="in";} +:Conn_exec (l -> end) {from="out"; to="in";} +:Conn_exec (nac1 -> l) {from="success"; to="in";} +:Conn_exec (nac1 -> inputs) {from="fail"; to="in";} +:Conn_exec (inputs -> l2) {from="success"; to="in";} +:Conn_exec (inputs -> l2) {from="fail"; to="in";} +:Conn_exec (l2 -> rinput) {from="it"; to="in";} +:Conn_exec (rinput -> l2) {from="out"; to="in";} +:Conn_exec (l2 -> outputs) {from="out"; to="in";} +:Conn_exec (outputs -> l3) {from="success"; to="in";} +:Conn_exec (outputs -> l3) {from="fail"; to="in";} +:Conn_exec (l3 -> routput) {from="it"; to="in";} +:Conn_exec (routput -> l3) {from="out"; to="in";} +:Conn_exec (l3 -> p2) {from="out"; to="in";} +:Conn_exec (p2 -> end) {from="out"; to="in";} + + +:Conn_data (m -> l) {from="out"; to="in";} +:Conn_data (l -> nac1) {from="out"; to="in";} +:Conn_data (l -> inputs) {from="out"; to="in";} +:Conn_data (inputs -> l2) {from="out"; to="in";} +:Conn_data (l2 -> rinput) {from="out"; to="in";} +:Conn_data (l -> outputs) {from="out"; to="in";} +:Conn_data (outputs -> l3) {from="out"; to="in";} +:Conn_data (l3 -> routput) {from="out"; to="in";} \ No newline at end of file diff --git a/examples/petrinet/models/schedules/petrinet2.drawio b/examples/petrinet/models/schedules/petrinet2.drawio new file mode 100644 index 0000000..6294d7f --- /dev/null +++ b/examples/petrinet/models/schedules/petrinet2.drawio @@ -0,0 +1,1160 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/petrinet/models/schedules/petrinet3.drawio b/examples/petrinet/models/schedules/petrinet3.drawio new file mode 100644 index 0000000..4e701fe --- /dev/null +++ b/examples/petrinet/models/schedules/petrinet3.drawio @@ -0,0 +1,915 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/petrinet/models/schedules/recursion.drawio b/examples/petrinet/models/schedules/recursion.drawio new file mode 100644 index 0000000..f82cabd --- /dev/null +++ b/examples/petrinet/models/schedules/recursion.drawio @@ -0,0 +1,217 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/petrinet/models/schedules/schedule.od b/examples/petrinet/models/schedules/schedule.od new file mode 100644 index 0000000..8c8f816 --- /dev/null +++ b/examples/petrinet/models/schedules/schedule.od @@ -0,0 +1,4 @@ +start: Start +end: End + +:Conn_exec (start -> end) {from="tfuy"; to="in";} \ No newline at end of file diff --git a/examples/petrinet/operational_semantics/all_inputs_reduced.od b/examples/petrinet/operational_semantics/all_inputs_reduced.od new file mode 100644 index 0000000..a6bfdd4 --- /dev/null +++ b/examples/petrinet/operational_semantics/all_inputs_reduced.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `get_value(this) -1`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (t -> p) diff --git a/examples/petrinet/operational_semantics/all_outputs.od b/examples/petrinet/operational_semantics/all_outputs.od new file mode 100644 index 0000000..ce5efd0 --- /dev/null +++ b/examples/petrinet/operational_semantics/all_outputs.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `True`; +} +:RAM_pn_of (ps -> p) + +# An outgoing arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (t -> p) diff --git a/examples/petrinet/operational_semantics/all_outputs_increased.od b/examples/petrinet/operational_semantics/all_outputs_increased.od new file mode 100644 index 0000000..1fa1acb --- /dev/null +++ b/examples/petrinet/operational_semantics/all_outputs_increased.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `get_value(this) + 1`; +} +:RAM_pn_of (ps -> p) + +# An outgoing arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (t -> p) diff --git a/examples/petrinet/operational_semantics/input_without_token.od b/examples/petrinet/operational_semantics/input_without_token.od new file mode 100644 index 0000000..9207ce2 --- /dev/null +++ b/examples/petrinet/operational_semantics/input_without_token.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `get_value(this) == 0`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (p -> t) diff --git a/examples/petrinet/operational_semantics/transition.od b/examples/petrinet/operational_semantics/transition.od index c7c8203..c3bd82c 100644 --- a/examples/petrinet/operational_semantics/transition.od +++ b/examples/petrinet/operational_semantics/transition.od @@ -1 +1 @@ -tr:RAM_PNTransition \ No newline at end of file +t:RAM_PNTransition \ No newline at end of file diff --git a/examples/petrinet/petrinet_renderer.j2 b/examples/petrinet/petrinet_renderer.j2 new file mode 100644 index 0000000..0ace22b --- /dev/null +++ b/examples/petrinet/petrinet_renderer.j2 @@ -0,0 +1,12 @@ +digraph G { + rankdir=LR; + center=true; + margin=1; + nodesep=1; + subgraph places { + node [fontname=Arial,fontsize=10,shape=circle,fixedsize=true,label="", height=.35,width=.35]; + {% for place in places %} + {{ place[0] }} [label="{{ place[1] }}_{{ place[2] }}"] + {% endfor %} + } +} \ No newline at end of file diff --git a/examples/petrinet/renderer.py b/examples/petrinet/renderer.py index 278376a..3916311 100644 --- a/examples/petrinet/renderer.py +++ b/examples/petrinet/renderer.py @@ -1,3 +1,7 @@ +import os + +from jinja2 import Environment, FileSystemLoader + from api.od import ODAPI from concrete_syntax.graphviz.make_url import show_graphviz from concrete_syntax.graphviz.renderer import make_graphviz_id @@ -16,13 +20,24 @@ def render_tokens(num_tokens: int): return str(num_tokens) def render_petri_net_to_dot(od: ODAPI) -> str: + env = Environment( + loader=FileSystemLoader( + os.path.dirname(__file__) + ) + ) + env.trim_blocks = True + env.lstrip_blocks = True + template_dot = env.get_template("petrinet_renderer.j2") + with open("test_pet.dot", "w", encoding="utf-8") as f_dot: + places = [(make_graphviz_id(place), place_name, render_tokens(od.get_slot_value(od.get_source(od.get_incoming(place, "pn_of")[0]), "numTokens"))) for place_name, place in od.get_all_instances("PNPlace")] + f_dot.write(template_dot.render({"places": places})) dot = "" - dot += "rankdir=LR;" - dot += "center=true;" - dot += "margin=1;" - dot += "nodesep=1;" - dot += "subgraph places {" - dot += " node [fontname=Arial,fontsize=10,shape=circle,fixedsize=true,label=\"\", height=.35,width=.35];" + dot += "rankdir=LR;\n" + dot += "center=true;\n" + dot += "margin=1;\n" + dot += "nodesep=1;\n" + dot += "subgraph places {\n" + dot += " node [fontname=Arial,fontsize=10,shape=circle,fixedsize=true,label=\"\", height=.35,width=.35];\n" for place_name, place in od.get_all_instances("PNPlace"): # place_name = od.get_name(place) try: diff --git a/examples/petrinet/runner.py b/examples/petrinet/runner.py index 4c23b3f..75fd37f 100644 --- a/examples/petrinet/runner.py +++ b/examples/petrinet/runner.py @@ -1,19 +1,12 @@ -from examples.schedule.RuleExecuter import RuleExecuter -from state.devstate import DevState -from api.od import ODAPI +from icecream import ic + from concrete_syntax.textual_od.renderer import render_od -from concrete_syntax.textual_od.renderer_jinja2 import render_od_jinja2 -from bootstrap.scd import bootstrap_scd +from transformation.schedule.Tests import Test_xmlparser from util import loader -from transformation.rule import RuleMatcherRewriter, ActionGenerator from transformation.ramify import ramify -from examples.semantics.operational import simulator from examples.petrinet.renderer import show_petri_net -from examples.schedule.ScheduledActionGenerator import * -from examples.schedule.RuleExecuter import * - - +from transformation.schedule.rule_scheduler import * if __name__ == "__main__": import os @@ -33,43 +26,28 @@ if __name__ == "__main__": mm_rt_cs = mm_cs + read_file('metamodels/mm_runtime.od') # m_cs = read_file('models/m_example_simple.od') # m_rt_initial_cs = m_cs + read_file('models/m_example_simple_rt_initial.od') - m_cs = read_file('models/m_example_mutex.od') - m_rt_initial_cs = m_cs + read_file('models/m_example_mutex_rt_initial.od') - # m_cs = read_file('models/m_example_inharc.od') - # m_rt_initial_cs = m_cs + read_file('models/m_example_inharc_rt_initial.od') + # m_cs = read_file('models/m_example_mutex.od') + # m_rt_initial_cs = m_cs + read_file('models/m_example_mutex_rt_initial.od') + m_cs = read_file('models/m_example_simple.od') + m_rt_initial_cs = m_cs + read_file('models/m_example_simple_rt_initial.od') # Parse them mm = loader.parse_and_check(state, mm_cs, scd_mmm, "Petri-Net Design meta-model") mm_rt = loader.parse_and_check(state, mm_rt_cs, scd_mmm, "Petri-Net Runtime meta-model") m = loader.parse_and_check(state, m_cs, mm, "Example model") m_rt_initial = loader.parse_and_check(state, m_rt_initial_cs, mm_rt, "Example model initial state") - mm_rt_ramified = ramify(state, mm_rt) - rules = loader.load_rules(state, - lambda rule_name, kind: f"{THIS_DIR}/operational_semantics/r_{rule_name}_{kind}.od", - mm_rt_ramified, - ["fire_transition"]) # only 1 rule :( - # matcher_rewriter = RuleMatcherRewriter(state, mm_rt, mm_rt_ramified) - # action_generator = ActionGenerator(matcher_rewriter, rules) - matcher_rewriter2 = RuleExecuter(state, mm_rt, mm_rt_ramified) - action_generator = ScheduleActionGenerator(matcher_rewriter2, f"models/schedule.od") - def render_callback(od): - show_petri_net(od) - # return render_od(state, od.m, od.mm) - return render_od_jinja2(state, od.m, od.mm) + scheduler = RuleScheduler(state, mm_rt, mm_rt_ramified, verbose=True, directory="models") - action_generator.generate_dot() + # if scheduler.load_schedule(f"petrinet.od"): + # if scheduler.load_schedule("schedules/combinatory.drawio"): + if scheduler.load_schedule("schedules/petrinet3.drawio"): - sim = simulator.MinimalSimulator( - action_generator=action_generator, - decision_maker=simulator.InteractiveDecisionMaker(auto_proceed=False), - # decision_maker=simulator.RandomDecisionMaker(seed=0), - termination_condition=action_generator.termination_condition, - # renderer=lambda od: render_od(state, od.m, od.mm), - ) - sim.run(ODAPI(state, m_rt_initial, mm_rt)) \ No newline at end of file + scheduler.generate_dot("../dot.dot") + code, message = scheduler.run(ODAPI(state, m_rt_initial, mm_rt)) + print(f"{code}: {message}") diff --git a/examples/schedule/RuleExecuter.py b/examples/schedule/RuleExecuter.py deleted file mode 100644 index 8566d10..0000000 --- a/examples/schedule/RuleExecuter.py +++ /dev/null @@ -1,49 +0,0 @@ -from concrete_syntax.textual_od.renderer import render_od - -import pprint -from typing import Generator, Callable, Any -from uuid import UUID -import functools - -from api.od import ODAPI -from concrete_syntax.common import indent -from transformation.matcher import match_od -from transformation.rewriter import rewrite -from transformation.cloner import clone_od -from util.timer import Timer -from util.loader import parse_and_check - -class RuleExecuter: - def __init__(self, state, mm: UUID, mm_ramified: UUID, eval_context={}): - self.state = state - self.mm = mm - self.mm_ramified = mm_ramified - self.eval_context = eval_context - - # Generates matches. - # Every match is a dictionary with entries LHS_element_name -> model_element_name - def match_rule(self, m: UUID, lhs: UUID, *, pivot:dict[Any, Any]): - lhs_matcher = match_od(self.state, - host_m=m, - host_mm=self.mm, - pattern_m=lhs, - pattern_mm=self.mm_ramified, - eval_context=self.eval_context, - pivot= pivot, - ) - return lhs_matcher - - def rewrite_rule(self, m: UUID, rhs: UUID, *, pivot:dict[Any, Any]): - yield rewrite(self.state, - rhs_m=rhs, - pattern_mm=self.mm_ramified, - lhs_match=pivot, - host_m=m, - host_mm=self.mm, - eval_context=self.eval_context, - ) - - - def load_match(self, file: str): - with open(file, "r") as f: - return parse_and_check(self.state, f.read(), self.mm_ramified, file) diff --git a/examples/schedule/ScheduledActionGenerator.py b/examples/schedule/ScheduledActionGenerator.py deleted file mode 100644 index 0f91121..0000000 --- a/examples/schedule/ScheduledActionGenerator.py +++ /dev/null @@ -1,104 +0,0 @@ -import importlib.util -import io -import os - -from jinja2 import FileSystemLoader, Environment - -from concrete_syntax.textual_od import parser as parser_od -from concrete_syntax.textual_cd import parser as parser_cd -from api.od import ODAPI -from bootstrap.scd import bootstrap_scd -from examples.schedule.generator import schedule_generator -from examples.schedule.schedule_lib import End, NullNode -from framework.conformance import Conformance, render_conformance_check_result -from state.devstate import DevState - - -class ScheduleActionGenerator: - def __init__(self, rule_executer, schedulefile:str): - self.rule_executer = rule_executer - self.rule_dict = {} - self.schedule: "Schedule" - - - self.state = DevState() - self.load_schedule(schedulefile) - - def load_schedule(self, filename): - print("Loading schedule ...") - scd_mmm = bootstrap_scd(self.state) - with open("../schedule/models/scheduling_MM.od", "r") as f_MM: - mm_cs = f_MM.read() - with open(f"{filename}", "r") as f_M: - m_cs = f_M.read() - print("OK") - - print("\nParsing models") - - print(f"\tParsing meta model") - scheduling_mm = parser_cd.parse_cd( - self.state, - m_text=mm_cs, - ) - print(f"\tParsing '{filename}_M.od' model") - scheduling_m = parser_od.parse_od( - self.state, - m_text=m_cs, - mm=scheduling_mm - ) - print(f"OK") - - print("\tmeta-meta-model a valid class diagram") - conf = Conformance(self.state, scd_mmm, scd_mmm) - print(render_conformance_check_result(conf.check_nominal())) - print(f"Is our '{filename}_M.od' model a valid '{filename}_MM.od' diagram?") - conf = Conformance(self.state, scheduling_m, scheduling_mm) - print(render_conformance_check_result(conf.check_nominal())) - print("OK") - - od = ODAPI(self.state, scheduling_m, scheduling_mm) - g = schedule_generator(od) - - output_buffer = io.StringIO() - g.generate_schedule(output_buffer) - open(f"schedule.py", "w").write(output_buffer.getvalue()) - spec = importlib.util.spec_from_file_location("schedule", "schedule.py") - scedule_module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(scedule_module) - self.schedule = scedule_module.Schedule(self.rule_executer) - self.load_matchers() - - def load_matchers(self): - matchers = dict() - for file in self.schedule.get_matchers(): - matchers[file] = self.rule_executer.load_match(file) - self.schedule.init_schedule(matchers) - - def __call__(self, api: ODAPI): - exec_op = self.schedule(api) - yield from exec_op - - def termination_condition(self, api: ODAPI): - if type(self.schedule.cur) == End: - return "jay" - if type(self.schedule.cur) == NullNode: - return "RRRR" - return None - - def generate_dot(self): - env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates'))) - env.trim_blocks = True - env.lstrip_blocks = True - template_dot = env.get_template('schedule_dot.j2') - - nodes = [] - edges = [] - visit = set() - self.schedule.generate_dot(nodes, edges, visit) - print("Nodes:") - print(nodes) - print("\nEdges:") - print(edges) - - with open("test.dot", "w") as f_dot: - f_dot.write(template_dot.render({"nodes": nodes, "edges": edges})) \ No newline at end of file diff --git a/examples/schedule/generator.py b/examples/schedule/generator.py deleted file mode 100644 index ed8a111..0000000 --- a/examples/schedule/generator.py +++ /dev/null @@ -1,129 +0,0 @@ -import sys -import os -import json -from uuid import UUID - -from jinja2.runtime import Macro - -from api.od import ODAPI -from jinja2 import Environment, FileSystemLoader, meta - - -class schedule_generator: - def __init__(self, odApi:ODAPI): - self.env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates'))) - self.env.trim_blocks = True - self.env.lstrip_blocks = True - self.template = self.env.get_template('schedule_template.j2') - self.template_wrap = self.env.get_template('schedule_template_wrap.j2') - self.api = odApi - - def get_slot_value_default(item: UUID, slot:str, default): - if slot in self.api.get_slots(item): - return self.api.get_slot_value(item, slot) - return default - - name_dict = lambda item: {"name": self.api.get_name(item)} - conn_dict = lambda item: {"name_from": self.api.get_name(self.api.get_source(item)), - "name_to": self.api.get_name(self.api.get_target(item)), - "gate_from": self.api.get_slot_value(item, "gate_from"), - "gate_to": self.api.get_slot_value(item, "gate_to"), - } - - conn_data_event = {"Match": lambda item: False, - "Rewrite": lambda item: False, - "Data_modify": lambda item: True, - "Loop": lambda item: True, - "Print": lambda item: get_slot_value_default(item, "event", False) - } - conn_data_dict = lambda item: {"name_from": self.api.get_name(self.api.get_source(item)), - "name_to": self.api.get_name(self.api.get_target(item)), - "event": conn_data_event[self.api.get_type_name(target := self.api.get_target(item))](target) - } - rewrite_dict = lambda item: {"name": self.api.get_name(item), - "file": self.api.get_slot_value(item, "file"), - } - match_dict = lambda item: {"name": self.api.get_name(item), - "file": self.api.get_slot_value(item, "file"), - "n": self.api.get_slot_value(item, "n") \ - if "n" in self.api.get_slots(item) else 'float("inf")' - } - data_modify_dict = lambda item: {"name": self.api.get_name(item), - "dict": json.loads(self.api.get_slot_value(item, "modify_dict")) - } - loop_dict = lambda item: {"name": self.api.get_name(item), - "choise": get_slot_value_default(item, "choise", False)} - print_dict = lambda item: {"name": self.api.get_name(item), - "label": get_slot_value_default(item, "label", "")} - arg_map = {"Start": name_dict, "End": name_dict, - "Match": match_dict, "Rewrite": rewrite_dict, - "Data_modify": data_modify_dict, "Loop": loop_dict, - "Exec_con": conn_dict, "Data_con": conn_data_dict, - "Print": print_dict} - self.macro_args = {tp: (macro, arg_map.get(tp)) for tp, macro in self.template.module.__dict__.items() - if type(macro) == Macro} - - def _render(self, item): - type_name = self.api.get_type_name(item) - macro, arg_gen = self.macro_args[type_name] - return macro(**arg_gen(item)) - - def generate_schedule(self, stream = sys.stdout): - start = self.api.get_all_instances("Start")[0][1] - stack = [start] - out = {"blocks":[], "exec_conn":[], "data_conn":[], "match_files":set(), "matchers":[], "start":self.api.get_name(start)} - execBlocks = set() - exec_conn = list() - - while len(stack) > 0: - exec_obj = stack.pop() - if exec_obj in execBlocks: - continue - execBlocks.add(exec_obj) - for conn in self.api.get_outgoing(exec_obj, "Exec_con"): - exec_conn.append(conn) - stack.append(self.api.get_target(conn)) - - stack = list(execBlocks) - data_blocks = set() - for name, p in self.api.get_all_instances("Print"): - if "event" in (event := self.api.get_slots(p)) and event: - stack.append(p) - execBlocks.add(p) - - - data_conn = set() - while len(stack) > 0: - obj = stack.pop() - for data_c in self.api.get_incoming(obj, "Data_con"): - data_conn.add(data_c) - source = self.api.get_source(data_c) - if not self.api.is_instance(source, "Exec") and \ - source not in execBlocks and \ - source not in data_blocks: - stack.append(source) - data_blocks.add(source) - - for exec_item in execBlocks: - out["blocks"].append(self._render(exec_item)) - if self.api.is_instance(exec_item, "Rule"): - d = self.macro_args[self.api.get_type_name(exec_item)][1](exec_item) - out["match_files"].add(d["file"]) - out["matchers"].append(d) - for exec_c in exec_conn: - out["exec_conn"].append(self._render(exec_c)) - - for data_c in data_conn: - out["data_conn"].append(self._render(data_c)) - - for data_b in data_blocks: - out["blocks"].append(self._render(data_b)) - - print(self.template_wrap.render(out), file=stream) - - - - - - # print("with open('test.dot', 'w') as f:", file=stream) - # print(f"\tf.write({self.api.get_name(start)}.generate_dot())", file=stream) \ No newline at end of file diff --git a/examples/schedule/models/README.md b/examples/schedule/models/README.md deleted file mode 100644 index 5767d48..0000000 --- a/examples/schedule/models/README.md +++ /dev/null @@ -1,26 +0,0 @@ - -### association Exec_con - Integer gate_from; - Integer gate_to; - -### association Data_con - -### class Start [1..1] -### class End [1..*] - - -### class Match - optional Integer n; - -### class Rewrite - -### class Data_modify - String modify_dict; - -### class Loop - optional Boolean choise; - -## debugging tools - -### class Print(In_Exec, Out_Exec, In_Data) - optional Boolean event; \ No newline at end of file diff --git a/examples/schedule/models/scheduling_MM.od b/examples/schedule/models/scheduling_MM.od deleted file mode 100644 index 533d8bc..0000000 --- a/examples/schedule/models/scheduling_MM.od +++ /dev/null @@ -1,46 +0,0 @@ -abstract class Exec -abstract class In_Exec(Exec) -abstract class Out_Exec(Exec) - -association Exec_con [0..*] Out_Exec -> In_Exec [0..*] { - Integer gate_from; - Integer gate_to; -} - -abstract class Data -abstract class In_Data(Data) -abstract class Out_Data(Data) -association Data_con [0..*] Out_Data -> In_Data [0..*] - -class Start [1..1] (Out_Exec) -class End [1..*] (In_Exec) - - -abstract class Rule (In_Exec, Out_Exec, In_Data, Out_Data) -{ - String file; -} -class Match (Rule) -{ - optional Integer n; -} - -class Rewrite (Rule) - -class Data_modify(In_Data, Out_Data) -{ - String modify_dict; -} - -class Loop(In_Exec, Out_Exec, In_Data, Out_Data) -{ - optional Boolean choise; -} - -# debugging tools - -class Print(In_Exec, Out_Exec, In_Data) -{ - optional Boolean event; - optional String label; -} \ No newline at end of file diff --git a/examples/schedule/schedule_lib/__init__.py b/examples/schedule/schedule_lib/__init__.py deleted file mode 100644 index 0b826ab..0000000 --- a/examples/schedule/schedule_lib/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from .data_node import DataNode -from .data_modify import DataModify -from .end import End -from .exec_node import ExecNode -from .loop import Loop -from .match import Match -from .null_node import NullNode -from .print import Print -from .rewrite import Rewrite -from .start import Start - -__all__ = ["DataNode", "End", "ExecNode", "Loop", "Match", "NullNode", "Rewrite", "Print", "DataModify", "Start"] \ No newline at end of file diff --git a/examples/schedule/schedule_lib/data.py b/examples/schedule/schedule_lib/data.py deleted file mode 100644 index 88bcb42..0000000 --- a/examples/schedule/schedule_lib/data.py +++ /dev/null @@ -1,63 +0,0 @@ -import functools -from typing import Any, Generator, Callable - - -class Data: - def __init__(self, super) -> None: - self.data: list[dict[Any, Any]] = list() - self.success: bool = False - self.super = super - - @staticmethod - def store_output(func: Callable) -> Callable: - def wrapper(self, *args, **kwargs) -> Any: - output = func(self, *args, **kwargs) - self.success = output - return output - return wrapper - - @store_output - def store_data(self, data_gen: Generator, n: int) -> bool: - self.data.clear() - if n == 0: - return True - i: int = 0 - while (match := next(data_gen, None)) is not None: - self.data.append(match) - i+=1 - if i >= n: - break - else: - if n == float("inf"): - return bool(len(self.data)) - self.data.clear() - return False - return True - - def get_super(self) -> int: - return self.super - - def replace(self, data: "Data") -> None: - self.data.clear() - self.data.extend(data.data) - - def append(self, data: Any) -> None: - self.data.append(data) - - def clear(self) -> None: - self.data.clear() - - def pop(self, index = -1) -> Any: - return self.data.pop(index) - - def empty(self) -> bool: - return len(self.data) == 0 - - def __getitem__(self, index): - return self.data[index] - - def __iter__(self): - return self.data.__iter__() - - def __len__(self): - return self.data.__len__() \ No newline at end of file diff --git a/examples/schedule/schedule_lib/data_modify.py b/examples/schedule/schedule_lib/data_modify.py deleted file mode 100644 index 0df6cba..0000000 --- a/examples/schedule/schedule_lib/data_modify.py +++ /dev/null @@ -1,26 +0,0 @@ -import functools -from typing import TYPE_CHECKING, Callable, List - -from api.od import ODAPI -from examples.schedule.RuleExecuter import RuleExecuter -from .exec_node import ExecNode -from .data_node import DataNode - - -class DataModify(DataNode): - def __init__(self, modify_dict: dict[str,str]) -> None: - DataNode.__init__(self) - self.modify_dict: dict[str,str] = modify_dict - - def input_event(self, success: bool) -> None: - if success or self.data_out.success: - self.data_out.data.clear() - for data in self.data_in.data: - self.data_out.append({self.modify_dict[key]: value for key, value in data.items() if key in self.modify_dict.keys()}) - DataNode.input_event(self, success) - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=modify]") - super().generate_dot(nodes, edges, visited) diff --git a/examples/schedule/schedule_lib/data_node.py b/examples/schedule/schedule_lib/data_node.py deleted file mode 100644 index 557f297..0000000 --- a/examples/schedule/schedule_lib/data_node.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Any, Generator, List - -from examples.schedule.schedule_lib.id_generator import IdGenerator -from .data import Data - -class DataNode: - def __init__(self) -> None: - if not hasattr(self, 'id'): - self.id = IdGenerator().generate_id() - self.data_out : Data = Data(self) - self.data_in: Data | None = None - self.eventsub: list[DataNode] = list() - - def connect_data(self, data_node: "DataNode", eventsub=True) -> None: - data_node.data_in = self.data_out - if eventsub: - self.eventsub.append(data_node) - - def store_data(self, data_gen: Generator, n: int) -> None: - success: bool = self.data_out.store_data(data_gen, n) - for sub in self.eventsub: - sub.input_event(success) - - def get_input_data(self) -> list[dict[Any, Any]]: - if not self.data_in.success: - raise Exception("Invalid input data: matching has failed") - data = self.data_in.data - if len(data) == 0: - raise Exception("Invalid input data: no data present") - return data - - def input_event(self, success: bool) -> None: - self.data_out.success = success - for sub in self.eventsub: - sub.input_event(success) - - def get_id(self) -> int: - return self.id - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - visited.add(self.id) - if self.data_in is not None: - edges.append(f"{self.data_in.get_super().get_id()} -> {self.get_id()} [color = green]") - self.data_in.get_super().generate_dot(nodes, edges, visited) - for sub in self.eventsub: - sub.generate_dot(nodes, edges, visited) - diff --git a/examples/schedule/schedule_lib/end.py b/examples/schedule/schedule_lib/end.py deleted file mode 100644 index 2a008c4..0000000 --- a/examples/schedule/schedule_lib/end.py +++ /dev/null @@ -1,21 +0,0 @@ -import functools -from typing import TYPE_CHECKING, List, Callable, Generator - -from api.od import ODAPI -from .exec_node import ExecNode - -class End(ExecNode): - def __init__(self) -> None: - super().__init__(out_connections=1) - - def execute(self, od: ODAPI) -> Generator | None: - return self.terminate(od) - - @staticmethod - def terminate(od: ODAPI) -> Generator: - yield f"end:", functools.partial(lambda od:(od, ""), od) - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=end]") \ No newline at end of file diff --git a/examples/schedule/schedule_lib/exec_node.py b/examples/schedule/schedule_lib/exec_node.py deleted file mode 100644 index c5d2d04..0000000 --- a/examples/schedule/schedule_lib/exec_node.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import TYPE_CHECKING, List, Callable, Generator -from api.od import ODAPI - -from .id_generator import IdGenerator - -class ExecNode: - def __init__(self, out_connections: int = 1) -> None: - from .null_node import NullNode - self.next_state: list[ExecNode] = [] - if out_connections > 0: - self.next_state = [NullNode()]*out_connections - self.id: int = IdGenerator().generate_id() - - def nextState(self) -> "ExecNode": - return self.next_state[0] - - def connect(self, next_state: "ExecNode", from_gate: int = 0, to_gate: int = 0) -> None: - if from_gate >= len(self.next_state): - raise IndexError - self.next_state[from_gate] = next_state - - def execute(self, od: ODAPI) -> Generator | None: - return None - - def get_id(self) -> int: - return self.id - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - visited.add(self.id) - for edge in self.next_state: - edges.append(f"{self.id} -> {edge.get_id()}") - for next in self.next_state: - next.generate_dot(nodes, edges, visited) - diff --git a/examples/schedule/schedule_lib/funcs.py b/examples/schedule/schedule_lib/funcs.py deleted file mode 100644 index 0b19b99..0000000 --- a/examples/schedule/schedule_lib/funcs.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Callable - -def generate_dot_wrap(func) -> Callable: - def wrapper(self, *args, **kwargs) -> str: - nodes = [] - edges = [] - self.reset_visited() - func(self, nodes, edges, *args, **kwargs) - return f"digraph G {{\n\t{"\n\t".join(nodes)}\n\t{"\n\t".join(edges)}\n}}" - return wrapper diff --git a/examples/schedule/schedule_lib/id_generator.py b/examples/schedule/schedule_lib/id_generator.py deleted file mode 100644 index d1f4b25..0000000 --- a/examples/schedule/schedule_lib/id_generator.py +++ /dev/null @@ -1,8 +0,0 @@ -from .singleton import Singleton - -class IdGenerator(metaclass=Singleton): - def __init__(self): - self.id = -1 - def generate_id(self) -> int: - self.id += 1 - return self.id \ No newline at end of file diff --git a/examples/schedule/schedule_lib/loop.py b/examples/schedule/schedule_lib/loop.py deleted file mode 100644 index 44ec5c5..0000000 --- a/examples/schedule/schedule_lib/loop.py +++ /dev/null @@ -1,57 +0,0 @@ -import functools -from random import choice -from typing import TYPE_CHECKING, Callable, List, Generator - -from api.od import ODAPI -from examples.schedule.RuleExecuter import RuleExecuter -from .exec_node import ExecNode -from .data_node import DataNode -from .data_node import Data - - -class Loop(ExecNode, DataNode): - def __init__(self, choice) -> None: - ExecNode.__init__(self, out_connections=2) - DataNode.__init__(self) - self.choice: bool = choice - self.cur_data: Data = Data(-1) - - def nextState(self) -> ExecNode: - return self.next_state[not self.data_out.success] - - def execute(self, od: ODAPI) -> Generator | None: - if self.cur_data.empty(): - self.data_out.clear() - self.data_out.success = False - DataNode.input_event(self, False) - return None - - if self.choice: - def select_data() -> Generator: - for i in range(len(self.cur_data)): - yield f"choice: {self.cur_data[i]}", functools.partial(self.select_next,od, i) - return select_data() - else: - self.select_next(od, -1) - return None - - def input_event(self, success: bool) -> None: - if (b := self.data_out.success) or success: - self.cur_data.replace(self.data_in) - self.data_out.clear() - self.data_out.success = False - if b: - DataNode.input_event(self, False) - - def select_next(self,od: ODAPI, index: int) -> tuple[ODAPI, list[str]]: - self.data_out.clear() - self.data_out.append(self.cur_data.pop(index)) - DataNode.input_event(self, True) - return (od, ["data selected"]) - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=Loop]") - ExecNode.generate_dot(self, nodes, edges, visited) - DataNode.generate_dot(self, nodes, edges, visited) \ No newline at end of file diff --git a/examples/schedule/schedule_lib/match.py b/examples/schedule/schedule_lib/match.py deleted file mode 100644 index f350ba6..0000000 --- a/examples/schedule/schedule_lib/match.py +++ /dev/null @@ -1,42 +0,0 @@ -import functools -from typing import TYPE_CHECKING, Callable, List, Generator - -from api.od import ODAPI -from examples.schedule.RuleExecuter import RuleExecuter -from .exec_node import ExecNode -from .data_node import DataNode - - -class Match(ExecNode, DataNode): - def __init__(self, label: str, n: int | float) -> None: - ExecNode.__init__(self, out_connections=2) - DataNode.__init__(self) - self.label: str = label - self.n:int = n - self.rule = None - self.rule_executer : RuleExecuter - - def nextState(self) -> ExecNode: - return self.next_state[not self.data_out.success] - - def execute(self, od: ODAPI) -> Generator | None: - self.match(od) - return None - - def init_rule(self, rule, rule_executer): - self.rule = rule - self.rule_executer = rule_executer - - def match(self, od: ODAPI) -> None: - pivot = {} - if self.data_in is not None: - pivot = self.get_input_data()[0] - print(f"matching: {self.label}\n\tpivot: {pivot}") - self.store_data(self.rule_executer.match_rule(od.m, self.rule, pivot=pivot), self.n) - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=M_{self.label.split("/")[-1]}_{self.n}]") - ExecNode.generate_dot(self, nodes, edges, visited) - DataNode.generate_dot(self, nodes, edges, visited) \ No newline at end of file diff --git a/examples/schedule/schedule_lib/null_node.py b/examples/schedule/schedule_lib/null_node.py deleted file mode 100644 index 2d322bb..0000000 --- a/examples/schedule/schedule_lib/null_node.py +++ /dev/null @@ -1,25 +0,0 @@ -import functools -from symtable import Function -from typing import List, Callable, Generator - -from api.od import ODAPI -from .singleton import Singleton - -from .exec_node import ExecNode - -class NullNode(ExecNode, metaclass=Singleton): - def __init__(self): - ExecNode.__init__(self, out_connections=0) - - def execute(self, od: ODAPI) -> Generator | None: - raise Exception('Null node should already have terminated the schedule') - - @staticmethod - def terminate(od: ODAPI): - return None - yield # verrrry important line, dont remove this unreachable code - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=Null]") \ No newline at end of file diff --git a/examples/schedule/schedule_lib/print.py b/examples/schedule/schedule_lib/print.py deleted file mode 100644 index ed0bbc6..0000000 --- a/examples/schedule/schedule_lib/print.py +++ /dev/null @@ -1,28 +0,0 @@ -import functools -from typing import TYPE_CHECKING, Callable, List, Generator - -from api.od import ODAPI -from examples.schedule.RuleExecuter import RuleExecuter -from .exec_node import ExecNode -from .data_node import DataNode - - -class Print(ExecNode, DataNode): - def __init__(self, label: str = "") -> None: - ExecNode.__init__(self, out_connections=1) - DataNode.__init__(self) - self.label = label - - def execute(self, od: ODAPI) -> Generator | None: - self.input_event(True) - return None - - def input_event(self, success: bool) -> None: - print(f"{self.label}{self.data_in.data}") - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=Print_{self.label.replace(":", "")}]") - ExecNode.generate_dot(self, nodes, edges, visited) - DataNode.generate_dot(self, nodes, edges, visited) \ No newline at end of file diff --git a/examples/schedule/schedule_lib/rewrite.py b/examples/schedule/schedule_lib/rewrite.py deleted file mode 100644 index c00ee8e..0000000 --- a/examples/schedule/schedule_lib/rewrite.py +++ /dev/null @@ -1,38 +0,0 @@ -import functools -from typing import List, Callable, Generator - -from api.od import ODAPI -from .exec_node import ExecNode -from .data_node import DataNode -from ..RuleExecuter import RuleExecuter - - -class Rewrite(ExecNode, DataNode): - def __init__(self, label: str) -> None: - ExecNode.__init__(self, out_connections=1) - DataNode.__init__(self) - self.label = label - self.rule = None - self.rule_executer : RuleExecuter - - def init_rule(self, rule, rule_executer): - self.rule = rule - self.rule_executer= rule_executer - - def execute(self, od: ODAPI) -> Generator | None: - yield "ghello", functools.partial(self.rewrite, od) - - def rewrite(self, od): - print("rewrite" + self.label) - pivot = {} - if self.data_in is not None: - pivot = self.get_input_data()[0] - self.store_data(self.rule_executer.rewrite_rule(od.m, self.rule, pivot=pivot), 1) - return ODAPI(od.state, od.m, od.mm),[f"rewrite {self.label}\n\tpivot: {pivot}\n\t{"success" if self.data_out.success else "failure"}\n"] - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=R_{self.label.split("/")[-1]}]") - ExecNode.generate_dot(self, nodes, edges, visited) - DataNode.generate_dot(self, nodes, edges, visited) \ No newline at end of file diff --git a/examples/schedule/schedule_lib/start.py b/examples/schedule/schedule_lib/start.py deleted file mode 100644 index 44ed1e1..0000000 --- a/examples/schedule/schedule_lib/start.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import TYPE_CHECKING, Callable, List, Any - -from .funcs import generate_dot_wrap - -from .exec_node import ExecNode - - -class Start(ExecNode): - def __init__(self) -> None: - ExecNode.__init__(self, out_connections=1) - - def generate_dot(self, nodes: List[str], edges: List[str], visited: set[int]) -> None: - if self.id in visited: - return - nodes.append(f"{self.id}[label=start]") - super().generate_dot(nodes, edges, visited) \ No newline at end of file diff --git a/examples/schedule/templates/schedule_dot.j2 b/examples/schedule/templates/schedule_dot.j2 deleted file mode 100644 index 39d2672..0000000 --- a/examples/schedule/templates/schedule_dot.j2 +++ /dev/null @@ -1,9 +0,0 @@ -digraph G { -{% for node in nodes %} - {{ node }} -{% endfor %} - -{% for edge in edges %} - {{ edge }} -{% endfor %} -} \ No newline at end of file diff --git a/examples/schedule/templates/schedule_template.j2 b/examples/schedule/templates/schedule_template.j2 deleted file mode 100644 index a0c251c..0000000 --- a/examples/schedule/templates/schedule_template.j2 +++ /dev/null @@ -1,35 +0,0 @@ -{% macro Start(name) %} -{{ name }} = Start() -{%- endmacro %} - -{% macro End(name) %} -{{ name }} = End() -{%- endmacro %} - -{% macro Match(name, file, n) %} -{{ name }} = Match("{{ file }}", {{ n }}) -{%- endmacro %} - -{% macro Rewrite(name, file) %} -{{ name }} = Rewrite("{{ file }}") -{%- endmacro %} - -{% macro Data_modify(name, dict) %} -{{ name }} = DataModify({{ dict }}) -{%- endmacro %} - -{% macro Exec_con(name_from, name_to, gate_from, gate_to) %} -{{ name_from }}.connect({{ name_to }},{{ gate_from }},{{ gate_to }}) -{%- endmacro %} - -{% macro Data_con(name_from, name_to, event) %} -{{ name_from }}.connect_data({{ name_to }}, {{ event }}) -{%- endmacro %} - -{% macro Loop(name, choise) %} -{{ name }} = Loop({{ choise }}) -{%- endmacro %} - -{% macro Print(name, label) %} -{{ name }} = Print("{{ label }}") -{%- endmacro %} \ No newline at end of file diff --git a/examples/schedule/templates/schedule_template_wrap.j2 b/examples/schedule/templates/schedule_template_wrap.j2 deleted file mode 100644 index 389f2c2..0000000 --- a/examples/schedule/templates/schedule_template_wrap.j2 +++ /dev/null @@ -1,47 +0,0 @@ -from examples.schedule.schedule_lib import * - -class Schedule: - def __init__(self, rule_executer): - self.start: Start - self.cur: ExecNode = None - self.rule_executer = rule_executer - - def __call__(self, od): - self.cur = self.cur.nextState() - while not isinstance(self.cur, NullNode): - action_gen = self.cur.execute(od) - if action_gen is not None: - # if (action_gen := self.cur.execute(od)) is not None: - return action_gen - self.cur = self.cur.nextState() - return NullNode.terminate(od) - - @staticmethod - def get_matchers(): - return [ - {% for file in match_files %} - "{{ file }}.od", - {% endfor %} - ] - - def init_schedule(self, matchers): - {% for block in blocks%} - {{ block }} - {% endfor %} - - {% for conn in exec_conn%} - {{ conn }} - {% endfor %} - {% for conn_d in data_conn%} - {{ conn_d }} - {% endfor %} - self.start = {{ start }} - self.cur = {{ start }} - - {% for match in matchers %} - {{ match["name"] }}.init_rule(matchers["{{ match["file"] }}.od"], self.rule_executer) - {% endfor %} - return None - - def generate_dot(self, *args, **kwargs): - return self.start.generate_dot(*args, **kwargs) \ No newline at end of file diff --git a/examples/semantics/operational/port/assignment.py b/examples/semantics/operational/port/assignment.py deleted file mode 100644 index a7ae221..0000000 --- a/examples/semantics/operational/port/assignment.py +++ /dev/null @@ -1,142 +0,0 @@ -import functools -from concrete_syntax.common import indent -from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time -from examples.semantics.operational.simulator import make_actions_pure, filter_valid_actions - - -def precondition_can_move_from(od, from_state): - - # TO IMPLEMENT - - # Function should return True if a ship can move out of 'from_state' - - return False - -def precondition_can_move_to(od, to_state): - - # TO IMPLEMENT - - # Function should return True if a ship can move into 'to_state' - - return False - -def precondition_all_successors_moved(od, conn): - - # TO IMPLEMENT - - # A move (or skip) can only be made along a connection after all subsequent connections have already made their move (or were skipped). - - return True - -def precondition_workers_available(od, workerset): - - # TO IMPLEMENT - - # A worker in a WorkerSet can only be allocated to a berth, if the number of 'isOperating'-links is smaller than the number of workers in the WorkerSet. - - return True - -def precondition_berth_unserved(od, berth): - - # TO IMPLEMENT - - # A worker can only be allocated to a berth, if the berth contains an 'unserved' ship. - - return True - -def action_skip(od, conn_name): - # SERVES AS AN EXAMPLE - NO NEED TO EDIT THIS FUNCTION - conn = od.get(conn_name) - conn_state = design_to_state(od, conn) - od.set_slot_value(conn_state, "moved", True) - return [f"skip {conn_name}"] - -def action_move(od, conn_name): - action_skip(od, conn_name) # flag the connection as 'moved' - - conn = od.get(conn_name) - from_place = od.get_source(conn) - to_place = od.get_target(conn) - - from_state = design_to_state(od, from_place) # beware: Generator does not have State - to_state = design_to_state(od, to_place) - - # TO IMPLEMENT: - # - move a ship along the connection - - return [f"unimplemented! nothing changed!"] - -def action_serve_berth(od, workerset_name, berth_name): - - # TO IMPLEMENT: - # - A worker starts operating a berth - - return [f"unimplemented! nothing changed!"] - -def action_advance_time(od): - _, clock = od.get_all_instances("Clock")[0] - time = od.get_slot_value(clock, "time") - new_time = time + 1 - od.set_slot_value(clock, "time", new_time) - - # TO IMPLEMENT: - # - all 'moved'-attributes need to be reset (to False) - # - if there is a worker operating a Berth, then: - # (1) the Berth's status becomes 'served' - # (2) the worker is no longer operating the Berth - - return [f"time is now {new_time}"] - -# This function is called to discover the possible steps that can be made. -# It should not be necessary to edit this function -def get_actions(od): - actions = {} - - # Add move-actions (or skip-actions) - for conn_name, conn in od.get_all_instances("connection"): - already_moved = od.get_slot_value(design_to_state(od, conn), "moved") - if already_moved or not precondition_all_successors_moved(od, conn): - # a move was already made along this connection in the current time-step - continue - - from_place = od.get_source(conn) - to_place = od.get_target(conn) - from_name = od.get_name(from_place) - to_name = od.get_name(to_place) - from_state = design_to_state(od, from_place) - to_state = design_to_state(od, to_place) - - if (precondition_can_move_from(od, from_state) - and precondition_can_move_to(od, to_state)): - actions[f"move {conn_name} ({from_name} -> {to_name})"] = functools.partial(action_move, conn_name=conn_name) - else: - actions[f"skip {from_name} -> {to_name}"] = functools.partial(action_skip, conn_name=conn_name) - - # Add actions to assign workers - for _, workerset in od.get_all_instances("WorkerSet"): - if not precondition_workers_available(od, workerset): - continue - for lnk in od.get_outgoing(workerset, "canOperate"): - berth = od.get_target(lnk) - if precondition_berth_unserved(od, berth): - berth_name = od.get_name(berth) - workerset_name = od.get_name(workerset) - actions[f"{workerset_name} operates {berth_name}"] = functools.partial(action_serve_berth, workerset_name=workerset_name, berth_name=berth_name) - - # Only when no other action can be performed, can time advance - if len(actions) == 0: - actions["advance time"] = action_advance_time - - # This wrapper turns our actions into pure functions: they will clone the model before modifying it. This is useful if we ever want to rollback an action. - return make_actions_pure(actions.items(), od) - - -# Called every time the runtime state changes. -# When this function returns a string, the simulation ends. -# The string should represent the reason for ending the simulation. -# When this function returns None, the simulation continues. -def termination_condition(od): - - # TO IMPLEMENT: terminate simulation when the place 'served' contains 2 ships. - - pass diff --git a/examples/semantics/operational/port/helpers.py b/examples/semantics/operational/port/helpers.py deleted file mode 100644 index 12ecdfc..0000000 --- a/examples/semantics/operational/port/helpers.py +++ /dev/null @@ -1,18 +0,0 @@ -# Some helper functions - -def get_num_ships(od, place): - place_state = design_to_state(od, place) - return od.get_slot_value(place_state, "numShips") - -def design_to_state(od, design): - incoming = od.get_incoming(design, "of") - if len(incoming) == 1: - # not all design-objects have a state - return od.get_source(incoming[0]) - -def state_to_design(od, state): - return od.get_target(od.get_outgoing(state, "of")[0]) - -def get_time(od): - _, clock = od.get_all_instances("Clock")[0] - return clock, od.get_slot_value(clock, "time") diff --git a/examples/semantics/operational/port/models.py b/examples/semantics/operational/port/models.py deleted file mode 100644 index b4a7c10..0000000 --- a/examples/semantics/operational/port/models.py +++ /dev/null @@ -1,407 +0,0 @@ -# Design meta-model -port_mm_cs = """ - Source:Class { - abstract = True; - } - Sink:Class { - abstract = True; - } - - Place:Class - :Inheritance (Place -> Source) - :Inheritance (Place -> Sink) - - connection:Association (Source -> Sink) - - CapacityConstraint:Class - - CapacityConstraint_shipCapacity:AttributeLink (CapacityConstraint -> Integer) { - name = "shipCapacity"; - optional = False; - - # cannot have negative capacity: - constraint = `get_value(get_target(this)) >= 0`; # non-negative - } - - # Capacity - capacityOf:Association (CapacityConstraint -> Place) { - # must say something about at least one Place, otherwise what is the point of the constraint? - target_lower_cardinality = 1; - } - - Berth:Class - :Inheritance (Berth -> Place) - - # Set of workers - WorkerSet:Class - - WorkerSet_numWorkers:AttributeLink (WorkerSet -> Integer) { - name = "numWorkers"; - optional = False; - constraint = `get_value(get_target(this)) >= 0`; # non-negative - } - canOperate:Association (WorkerSet -> Berth) { - target_lower_cardinality = 1; - } - - Generator:Class - :Inheritance (Generator -> Source) - - - # Those classes to which we want to attach a runtime state object - Stateful:Class { - abstract = True; - } - :Inheritance (Place -> Stateful) - :Inheritance (WorkerSet -> Stateful) - :Inheritance (Berth -> Stateful) - :Inheritance (connection -> Stateful) -"""; - -# Runtime meta-model -port_rt_mm_cs = port_mm_cs + """ - State:Class - of:Association (State -> Stateful) { - source_lower_cardinality = 1; - source_upper_cardinality = 1; - target_lower_cardinality = 1; - target_upper_cardinality = 1; - } - - PlaceState:Class - :Inheritance (PlaceState -> State) - - PlaceState_numShips:AttributeLink (PlaceState -> Integer) { - # number of ships currently in the place - name = "numShips"; - optional = False; - constraint = `get_value(get_target(this)) >= 0`; # non-negative - } - - shipCapacities:GlobalConstraint { - constraint = ``` - errors = [] - for _, constr in get_all_instances("CapacityConstraint"): - cap = get_slot_value(constr, "shipCapacity") - total = 0 - place_names = [] # for debugging - for lnk in get_outgoing(constr, "capacityOf"): - place = get_target(lnk) - place_names.append(get_name(place)) - place_state = get_source(get_incoming(place, "of")[0]) - total += get_slot_value(place_state, "numShips") - if total > cap: - errors.append(f"The number of ships in places {','.join(place_names)} ({total}) exceeds the capacity ({cap}) of CapacityConstraint {get_name(constr)}.") - errors - ```; - } - - BerthState:Class { - # status == empty <=> numShips == 0 - constraint = ``` - errors = [] - numShips = get_slot_value(this, "numShips") - status = get_slot_value(this, "status") - if (numShips == 0) != (status == "empty"): - errors.append(f"Inconsistent: numShips = {numShips}, but status = {status}") - errors - ```; - } - :Inheritance (BerthState -> PlaceState) - - BerthState_status:AttributeLink (BerthState -> String) { - name = "status"; - optional = False; - constraint = `( - get_value(get_target(this)) in { "empty", "unserved", "served" } - )`; - } - - WorkerSetState:Class - :Inheritance (WorkerSetState -> State) - - isOperating:Association (WorkerSetState -> Berth) { - constraint = ``` - errors = [] - - # get status of Berth - berth = get_target(this) - berth_state = get_source(get_incoming(berth, "of")[0]) - status = get_slot_value(berth_state, "status") - if status != "unserved": - errors.append(f"Cannot operate {get_name(berth)} because there is no unserved ship there.") - - # only operate Berts that we can operate - workerset = get_target(get_outgoing(get_source(this), "of")[0]) - can_operate = [get_target(lnk) for lnk in get_outgoing(workerset, "canOperate")] - if berth not in can_operate: - errors.append(f"Cannot operate {get_name(berth)}.") - - errors - ```; - } - - operatingCapacities:GlobalConstraint { - constraint = ``` - errors = [] - for _, workersetstate in get_all_instances("WorkerSetState"): - workerset = get_target(get_outgoing(workersetstate, "of")[0]) - num_operating = len(get_outgoing(workersetstate, "isOperating")) - num_workers = get_slot_value(workerset, "numWorkers") - if num_operating > num_workers: - errors.append(f"WorkerSet {get_name(workerset)} is operating more berths ({num_operating}) than there are workers ({num_workers})") - errors - ```; - } - - ConnectionState:Class - :Inheritance (ConnectionState -> State) - ConnectionState_moved:AttributeLink (ConnectionState -> Boolean) { - name = "moved"; - optional = False; - constraint = ``` - result = True - all_successors_moved = True - moved = get_value(get_target(this)) - conn_state = get_source(this) - conn = get_target(get_outgoing(conn_state, "of")[0]) - tgt_place = get_target(conn) - next_conns = get_outgoing(tgt_place, "connection") - for next_conn in next_conns: - next_conn_state = get_source(get_incoming(next_conn, "of")[0]) - if not get_slot_value(next_conn_state, "moved"): - all_successors_moved = False - if moved and not all_successors_moved: - result = f"Connection {get_name(conn)} played before its turn." - result - ```; - } - - Clock:Class { - lower_cardinality = 1; - upper_cardinality = 1; - } - Clock_time:AttributeLink (Clock -> Integer) { - name = "time"; - optional = False; - constraint = `get_value(get_target(this)) >= 0`; - } -""" - -# Design model: the part that doesn't change -port_m_cs = """ - gen:Generator - - # newly arrived ships collect here - waiting:Place - c1:connection (gen -> waiting) - - inboundPassage:Place - c2:connection (waiting -> inboundPassage) - - outboundPassage:Place - - # inboundPassage and outboundPassage cannot have more than 3 ships total - passageCap:CapacityConstraint { - shipCapacity = 3; - } - :capacityOf (passageCap -> inboundPassage) - :capacityOf (passageCap -> outboundPassage) - - - # Berth 1 - - inboundBerth1:Place - berth1:Berth - outboundBerth1:Place - - inboundBerth1Cap:CapacityConstraint { shipCapacity = 1; } - :capacityOf (inboundBerth1Cap -> inboundBerth1) - outboundBerth1Cap:CapacityConstraint { shipCapacity = 1; } - :capacityOf (outboundBerth1Cap -> outboundBerth1) - - berth1Cap:CapacityConstraint { shipCapacity = 1; } - :capacityOf (berth1Cap -> berth1) - - c3:connection (inboundBerth1 -> berth1) - c4:connection (berth1 -> outboundBerth1) - - # Berth 2 - - inboundBerth2:Place - berth2:Berth - outboundBerth2:Place - - inboundBerth2Cap:CapacityConstraint { shipCapacity = 1; } - :capacityOf (inboundBerth2Cap -> inboundBerth2) - outboundBerth2Cap:CapacityConstraint { shipCapacity = 1; } - :capacityOf (outboundBerth2Cap -> outboundBerth2) - - berth2Cap:CapacityConstraint { shipCapacity = 1; } - :capacityOf (berth2Cap -> berth2) - - c5:connection (inboundBerth2 -> berth2) - c6:connection (berth2 -> outboundBerth2) - - - # can either go to Berth 1 or Berth 2 - c7:connection (inboundPassage -> inboundBerth1) - c8:connection (inboundPassage -> inboundBerth2) - - c9:connection (outboundBerth1 -> outboundPassage) - c10:connection (outboundBerth2 -> outboundPassage) - - - # ships that have been served are counted here - served:Place - c11:connection (outboundPassage -> served) - - - workers:WorkerSet { - numWorkers = 1; - } - :canOperate (workers -> berth1) - :canOperate (workers -> berth2) -""" - -# Initial runtime model: the part that changes (every execution step) -port_rt_m_cs = port_m_cs + """ - clock:Clock { - time = 0; - } - - waitingState:PlaceState { numShips = 2; } :of (waitingState -> waiting) - inboundPassageState:PlaceState { numShips = 0; } :of (inboundPassageState -> inboundPassage) - outboundPassageState:PlaceState { numShips = 0; } :of (outboundPassageState -> outboundPassage) - - inboundBerth1State:PlaceState { numShips = 0; } :of (inboundBerth1State -> inboundBerth1) - outboundBerth1State:PlaceState { numShips = 0; } :of (outboundBerth1State -> outboundBerth1) - inboundBerth2State:PlaceState { numShips = 0; } :of (inboundBerth2State -> inboundBerth2) - outboundBerth2State:PlaceState { numShips = 0; } :of (outboundBerth2State -> outboundBerth2) - - berth1State:BerthState { status = "empty"; numShips = 0; } :of (berth1State -> berth1) - berth2State:BerthState { status = "empty"; numShips = 0; } :of (berth2State -> berth2) - - servedState:PlaceState { numShips = 1; } :of (servedState -> served) - - workersState:WorkerSetState :of (workersState -> workers) - - c1S:ConnectionState { moved = False; } :of (c1S -> c1) - c2S:ConnectionState { moved = False; } :of (c2S -> c2) - c3S:ConnectionState { moved = False; } :of (c3S -> c3) - c4S:ConnectionState { moved = False; } :of (c4S -> c4) - c5S:ConnectionState { moved = False; } :of (c5S -> c5) - c6S:ConnectionState { moved = False; } :of (c6S -> c6) - c7S:ConnectionState { moved = False; } :of (c7S -> c7) - c8S:ConnectionState { moved = False; } :of (c8S -> c8) - c9S:ConnectionState { moved = False; } :of (c9S -> c9) - c10S:ConnectionState { moved = False; } :of (c10S -> c10) - c11S:ConnectionState { moved = False; } :of (c11S -> c11) -""" - -################################################### - -# ┌─────────────────┐ -# │ shipCapacity=3 │ -# ┌───┐ ┌───────┐ │┌──────────────┐ │ ┌───────┐ -# │gen├────►│waiting├────►│inboundPassage├───►│turning│ -# └───┘ └───────┘ │└──────────────┘ │ └───┬───┘ -# │ │ │ -# ┌──────┐ │┌───────────────┐│ │ -# │served│◄────┼outboundPassage│◄──────┘ -# └──────┘ │└───────────────┘│ -# └─────────────────┘ -smaller_model_cs = """ - gen:Generator - waiting:Place - inboundPassage:Place - turning:Place - outboundPassage:Place - served:Place - - gen2wait:connection (gen -> waiting) - wait2inbound:connection (waiting -> inboundPassage) - inbound2turning:connection (inboundPassage -> turning) - turning2outbound:connection (turning -> outboundPassage) - outbound2served:connection (outboundPassage -> served) - - # inboundPassage and outboundPassage cannot have more than 3 ships total - passageCap:CapacityConstraint { - shipCapacity = 3; - } - :capacityOf (passageCap -> inboundPassage) - :capacityOf (passageCap -> outboundPassage) -""" - -smaller_model_rt_cs = smaller_model_cs + """ - clock:Clock { - time = 0; - } - - waitingState:PlaceState { numShips = 1; } :of (waitingState -> waiting) - inboundPassageState:PlaceState { numShips = 1; } :of (inboundPassageState -> inboundPassage) - turningState:PlaceState { numShips = 1; } :of (turningState -> turning) - outboundPassageState:PlaceState { numShips = 1; } :of (outboundPassageState -> outboundPassage) - servedState:PlaceState { numShips = 0; } :of (servedState -> served) - - gen2waitState:ConnectionState { moved = False; } :of (gen2waitState -> gen2wait) - wait2inboundState:ConnectionState { moved = False; } :of (wait2inboundState -> wait2inbound) - inbound2turningState:ConnectionState { moved = False; } :of (inbound2turningState -> inbound2turning) - turning2outboundState:ConnectionState { moved = False; } :of (turning2outboundState -> turning2outbound) - outbound2servedState:ConnectionState { moved = False; } :of (outbound2servedState -> outbound2served) -""" - -################################################### - -# ┌────────────┐ -# │ workerset │ -# │ │ -# │numWorkers=1│ -# └──────┬─────┘ -# │canOperate -# │ -# ┌───▼────┐ -# ┌───┐ ┌───────┐ │┌─────┐ │ ┌──────┐ -# │gen├────►│waiting├────││berth├─┼───►│served│ -# └───┘ └───────┘ │└─────┘ │ └──────┘ -# │ship- │ -# │Capacity│ -# │ =1 │ -# └────────┘ -smaller_model2_cs = """ - gen:Generator - waiting:Place - berth:Berth - served:Place - - gen2wait:connection (gen -> waiting) - wait2berth:connection (waiting -> berth) - berth2served:connection (berth -> served) - - # berth can only hold 1 ship - passageCap:CapacityConstraint { - shipCapacity = 1; - } - :capacityOf (passageCap -> berth) - - workers:WorkerSet { - numWorkers = 1; - } - :canOperate (workers -> berth) -""" - -smaller_model2_rt_cs = smaller_model2_cs + """ - clock:Clock { - time = 0; - } - - waitingState:PlaceState { numShips = 1; } :of (waitingState -> waiting) - berthState:BerthState { numShips = 1; status = "served"; } :of (berthState -> berth) - servedState:PlaceState { numShips = 1; } :of (servedState -> served) - - gen2waitState:ConnectionState { moved = False; } :of (gen2waitState -> gen2wait) - wait2berthState:ConnectionState { moved = False; } :of (wait2berthState -> wait2berth) - berth2servedState:ConnectionState { moved = True; } :of (berth2servedState -> berth2served) - - workersState:WorkerSetState :of (workersState -> workers) -""" diff --git a/examples/semantics/operational/port/renderer.py b/examples/semantics/operational/port/renderer.py deleted file mode 100644 index 63bebb3..0000000 --- a/examples/semantics/operational/port/renderer.py +++ /dev/null @@ -1,78 +0,0 @@ -from concrete_syntax.common import indent -from concrete_syntax.graphviz.make_url import make_url -from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time, get_num_ships - -def render_port_to_dot(od, - make_id=lambda name,obj: name # by default, we just use the object name for the graphviz node name -): - txt = "" - - def render_place(place): - name = od.get_name(place) - return f'"{make_id(name,place)}" [ label = "{name}\\n ships = {get_num_ships(od, place)}", style = filled, fillcolor = lightblue ]\n' - - for _, cap in od.get_all_instances("CapacityConstraint", include_subtypes=False): - name = od.get_name(cap) - capacity = od.get_slot_value(cap, "shipCapacity") - txt += f'subgraph cluster_{name} {{\n label = "{name}\\n capacity = {capacity}";\n' - for lnk in od.get_outgoing(cap, "capacityOf"): - place = od.get_target(lnk) - txt += f' {render_place(place)}' - txt += f'}}\n' - - for _, place_state in od.get_all_instances("PlaceState", include_subtypes=False): - place = state_to_design(od, place_state) - if len(od.get_incoming(place, "capacityOf")) == 0: - txt += render_place(place) - - for _, berth_state in od.get_all_instances("BerthState", include_subtypes=False): - berth = state_to_design(od, berth_state) - name = od.get_name(berth) - txt += f'"{make_id(name,berth)}" [ label = "{name}\\n numShips = {get_num_ships(od, berth)}\\n status = {od.get_slot_value(berth_state, "status")}", fillcolor = yellow, style = filled]\n' - - for _, gen in od.get_all_instances("Generator", include_subtypes=False): - txt += f'"{make_id(od.get_name(gen),gen)}" [ label = "+", shape = diamond, fillcolor = green, fontsize = 30, style = filled ]\n' - - for _, conn in od.get_all_instances("connection"): - src = od.get_source(conn) - tgt = od.get_target(conn) - moved = od.get_slot_value(design_to_state(od, conn), "moved") - src_name = od.get_name(src) - tgt_name = od.get_name(tgt) - txt += f"{make_id(src_name,src)} -> {make_id(tgt_name,tgt)} [color=deepskyblue3, penwidth={1 if moved else 2}];\n" - - for _, workers in od.get_all_instances("WorkerSet"): - already_have = [] - name = od.get_name(workers) - num_workers = od.get_slot_value(workers, "numWorkers") - txt += f'{make_id(name,workers)} [label="{num_workers} worker(s)", shape=parallelogram, fillcolor=chocolate, style=filled];\n' - for lnk in od.get_outgoing(design_to_state(od, workers), "isOperating"): - berth = od.get_target(lnk) - already_have.append(berth) - txt += f"{make_id(name,workers)} -> {make_id(od.get_name(berth),berth)} [arrowhead=none, color=chocolate];\n" - for lnk in od.get_outgoing(workers, "canOperate"): - berth = od.get_target(lnk) - if berth not in already_have: - txt += f"{make_id(name,workers)} -> {make_id(od.get_name(berth),berth)} [style=dotted, arrowhead=none, color=chocolate];\n" - - return txt - -def render_port_graphviz(od): - return make_url(render_port_to_dot(od)) - -def render_port_textual(od): - txt = "" - for _, place_state in od.get_all_instances("PlaceState", include_subtypes=False): - place = state_to_design(od, place_state) - name = od.get_name(place) - txt += f'place "{name}" {"🚢"*get_num_ships(od, place)}\n' - - for _, berth_state in od.get_all_instances("BerthState", include_subtypes=False): - berth = state_to_design(od, berth_state) - name = od.get_name(berth) - operated_descr = "" - if len(od.get_incoming(berth, "isOperating")): - operated_descr = " and being operated" - txt += f'berth "{name}" {"🚢"*get_num_ships(od, berth)} {od.get_slot_value(berth_state, "status")}{operated_descr}\n' - - return txt diff --git a/examples/semantics/operational/port/rulebased_runner.py b/examples/semantics/operational/port/rulebased_runner.py deleted file mode 100644 index 56cf67f..0000000 --- a/examples/semantics/operational/port/rulebased_runner.py +++ /dev/null @@ -1,62 +0,0 @@ -import urllib.parse - -from state.devstate import DevState -from bootstrap.scd import bootstrap_scd -from framework.conformance import Conformance, render_conformance_check_result -from concrete_syntax.textual_od import parser -from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram -from api.od import ODAPI - -from transformation.ramify import ramify - -from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker -from examples.semantics.operational.port import models -from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time -from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz - -from examples.semantics.operational.port import rulebased_sem - -state = DevState() -scd_mmm = bootstrap_scd(state) # Load meta-meta-model - -### Load (meta-)models ### - -def parse_and_check(m_cs: str, mm, descr: str): - m = parser.parse_od( - state, - m_text=m_cs, - mm=mm) - conf = Conformance(state, m, mm) - print(descr, "...", render_conformance_check_result(conf.check_nominal())) - return m - -port_mm = parse_and_check(models.port_mm_cs, scd_mmm, "MM") -port_m = parse_and_check(models.port_m_cs, port_mm, "M") -port_rt_mm = parse_and_check(models.port_rt_mm_cs, scd_mmm, "RT-MM") -port_rt_m = parse_and_check(models.port_rt_m_cs, port_rt_mm, "RT-M") - -print() - -# print(render_class_diagram(state, port_rt_mm)) - -### Simulate ### - -port_rt_mm_ramified = ramify(state, port_rt_mm) - -rulebased_action_generator = rulebased_sem.get_action_generator(state, port_rt_mm, port_rt_mm_ramified) -termination_condition = rulebased_sem.TerminationCondition(state, port_rt_mm_ramified) - -sim = Simulator( - action_generator=rulebased_action_generator, - # decision_maker=RandomDecisionMaker(seed=2), - decision_maker=InteractiveDecisionMaker(), - termination_condition=termination_condition, - check_conformance=True, - verbose=True, - # renderer=render_port_textual, - # renderer=render_port_graphviz, -) - -od = ODAPI(state, port_rt_m, port_rt_mm) - -sim.run(od) diff --git a/examples/semantics/operational/port/rulebased_sem.py b/examples/semantics/operational/port/rulebased_sem.py deleted file mode 100644 index 8ff41b5..0000000 --- a/examples/semantics/operational/port/rulebased_sem.py +++ /dev/null @@ -1,67 +0,0 @@ -### Operational Semantics - defined by rule-based model transformation ### - -from concrete_syntax.textual_od.parser import parse_od -from transformation.rule import Rule, RuleMatcherRewriter, PriorityActionGenerator -from transformation.matcher import match_od -from util import loader - -import os -THIS_DIR = os.path.dirname(__file__) - -# kind: lhs, rhs, nac -get_filename = lambda rule_name, kind: f"{THIS_DIR}/rules/r_{rule_name}_{kind}.od" - - -def get_action_generator(state, rt_mm, rt_mm_ramified): - matcher_rewriter = RuleMatcherRewriter(state, rt_mm, rt_mm_ramified) - - ############################################################################# - # TO IMPLEMENT: Full semantics as a set of rule-based model transformations # - - rules0_dict = loader.load_rules(state, get_filename, rt_mm_ramified, - ["ship_sinks"] # <- list of rule_name of equal priority - ) - rules1_dict = loader.load_rules(state, get_filename, rt_mm_ramified, - ["ship_appears_in_berth"] - ) - # rules2_dict = ... - - generator = PriorityActionGenerator(matcher_rewriter, [ - rules0_dict, # highest priority - rules1_dict, # lower priority - # rules2_dict, # lowest priority - ]) - - # TO IMPLEMENT: Full semantics as a set of rule-based model transformations # - ############################################################################# - - return generator - - - - -# The termination condition can also be specified as a pattern: -class TerminationCondition: - def __init__(self, state, rt_mm_ramified): - self.state = state - self.rt_mm_ramified = rt_mm_ramified - - # TO IMPLEMENT: terminate simulation when the place 'served' contains 2 ships. - - ######################################## - # You should only edit the pattern below - pattern_cs = """ - # Placeholder to make the termination condition never hold: - :GlobalCondition { - condition = `False`; - } - """ - # You should only edit the pattern above - ######################################## - - self.pattern = parse_od(state, pattern_cs, rt_mm_ramified) - - def __call__(self, od): - for match in match_od(self.state, od.m, od.mm, self.pattern, self.rt_mm_ramified): - # stop after the first match (no need to look for more matches): - return "There are 2 ships served." # Termination condition statisfied diff --git a/examples/semantics/operational/port/rules/README.txt b/examples/semantics/operational/port/rules/README.txt deleted file mode 100644 index 485ea73..0000000 --- a/examples/semantics/operational/port/rules/README.txt +++ /dev/null @@ -1,13 +0,0 @@ -The names of the files in this directory are important. - -A rule must always be named: - r__.od - -It is allowed to have more than one NAC. In this case, the NACs must be named: - r__nac.od - r__nac2.od - r__nac3.od - ... - - -For the assignment, you can delete the existing rules (they are nonsense) and start fresh. \ No newline at end of file diff --git a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_lhs.od b/examples/semantics/operational/port/rules/r_ship_appears_in_berth_lhs.od deleted file mode 100644 index 70bf95a..0000000 --- a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_lhs.od +++ /dev/null @@ -1,4 +0,0 @@ -berthState:RAM_BerthState { - RAM_numShips = `get_value(this) == 0`; - RAM_status = `get_value(this) == "empty"`; -} \ No newline at end of file diff --git a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_rhs.od b/examples/semantics/operational/port/rules/r_ship_appears_in_berth_rhs.od deleted file mode 100644 index e535053..0000000 --- a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_rhs.od +++ /dev/null @@ -1,4 +0,0 @@ -berthState:RAM_BerthState { - RAM_numShips = `1`; - RAM_status = `"served"`; -} \ No newline at end of file diff --git a/examples/semantics/operational/port/rules/r_ship_sinks_lhs.od b/examples/semantics/operational/port/rules/r_ship_sinks_lhs.od deleted file mode 100644 index 270c9ef..0000000 --- a/examples/semantics/operational/port/rules/r_ship_sinks_lhs.od +++ /dev/null @@ -1,5 +0,0 @@ -# Find any place that has at least one ship: - -placeState:RAM_PlaceState { - RAM_numShips = `get_value(this) > 0`; -} \ No newline at end of file diff --git a/examples/semantics/operational/port/rules/r_ship_sinks_rhs.od b/examples/semantics/operational/port/rules/r_ship_sinks_rhs.od deleted file mode 100644 index 828f3c8..0000000 --- a/examples/semantics/operational/port/rules/r_ship_sinks_rhs.od +++ /dev/null @@ -1,4 +0,0 @@ -placeState:RAM_PlaceState { - # Decrement number of ships: - RAM_numShips = `get_value(this) - 1`; -} \ No newline at end of file diff --git a/examples/semantics/operational/port/runner.py b/examples/semantics/operational/port/runner.py deleted file mode 100644 index bdb4a8d..0000000 --- a/examples/semantics/operational/port/runner.py +++ /dev/null @@ -1,56 +0,0 @@ -import urllib.parse - -from state.devstate import DevState -from bootstrap.scd import bootstrap_scd -from framework.conformance import Conformance, render_conformance_check_result -from concrete_syntax.textual_od import parser -from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram -from api.od import ODAPI - -from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker -from examples.semantics.operational.port import models -from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time -from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz - -# from examples.semantics.operational.port.joeris_solution import termination_condition, get_actions -from examples.semantics.operational.port.assignment import termination_condition, get_actions - -state = DevState() -scd_mmm = bootstrap_scd(state) # Load meta-meta-model - -### Load (meta-)models ### - -def parse_and_check(m_cs: str, mm, descr: str): - m = parser.parse_od( - state, - m_text=m_cs, - mm=mm) - conf = Conformance(state, m, mm) - print(descr, "...", render_conformance_check_result(conf.check_nominal())) - return m - -port_mm = parse_and_check(models.port_mm_cs, scd_mmm, "MM") -port_m = parse_and_check(models.port_m_cs, port_mm, "M") -port_rt_mm = parse_and_check(models.port_rt_mm_cs, scd_mmm, "RT-MM") -port_rt_m = parse_and_check(models.port_rt_m_cs, port_rt_mm, "RT-M") - -print() - -# print(render_class_diagram(state, port_rt_mm)) - -### Simulate ### - -sim = Simulator( - action_generator=get_actions, - # decision_maker=RandomDecisionMaker(seed=2), - decision_maker=InteractiveDecisionMaker(), - termination_condition=termination_condition, - check_conformance=True, - verbose=True, - renderer=render_port_textual, - # renderer=render_port_graphviz, -) - -od = ODAPI(state, port_rt_m, port_rt_mm) - -sim.run(od) diff --git a/examples/semantics/operational/simulator.py b/examples/semantics/operational/simulator.py deleted file mode 100644 index 248feda..0000000 --- a/examples/semantics/operational/simulator.py +++ /dev/null @@ -1,70 +0,0 @@ -import abc -import random -import math -import functools -import sys - -from framework.conformance import Conformance, render_conformance_check_result -from concrete_syntax.common import indent -from concrete_syntax.textual_od.renderer import render_od -from transformation.cloner import clone_od -from api.od import ODAPI - -from util.simulator import MinimalSimulator, DecisionMaker, RandomDecisionMaker, InteractiveDecisionMaker - - -class Simulator(MinimalSimulator): - def __init__(self, - action_generator, - decision_maker: DecisionMaker, - termination_condition=lambda od: None, - check_conformance=True, - verbose=True, - renderer=lambda od: render_od(od.state, od.m, od.mm), - ): - super().__init__( - action_generator=action_generator, - decision_maker=decision_maker, - termination_condition=lambda od: self.check_render_termination_condition(od), - verbose=verbose, - ) - self.check_conformance = check_conformance - self.actual_termination_condition = termination_condition - self.renderer = renderer - - def check_render_termination_condition(self, od): - # A termination condition checker that also renders the model, and performs conformance check - self._print("--------------") - self._print(indent(self.renderer(od), 2)) - self._print("--------------") - if self.check_conformance: - conf = Conformance(od.state, od.m, od.mm) - self._print(render_conformance_check_result(conf.check_nominal())) - self._print() - return self.actual_termination_condition(od) - -def make_actions_pure(actions, od): - # Copy model before modifying it - def exec_pure(action, od): - cloned_rt_m = clone_od(od.state, od.m, od.mm) - new_od = ODAPI(od.state, cloned_rt_m, od.mm) - msgs = action(new_od) - return (new_od, msgs) - - for descr, action in actions: - yield (descr, functools.partial(exec_pure, action, od)) - -def filter_valid_actions(pure_actions): - result = {} - def make_tuple(new_od, msgs): - return (new_od, msgs) - for name, callback in pure_actions: - # print(f"attempt '{name}' ...", end='\r') - (new_od, msgs) = callback() - conf = Conformance(new_od.state, new_od.m, new_od.mm) - errors = conf.check_nominal() - # erase current line: - # print(" ", end='\r') - if len(errors) == 0: - # updated RT-M is conform, we have a valid action: - yield (name, functools.partial(make_tuple, new_od, msgs)) diff --git a/examples/semantics/translational/.gitignore b/examples/semantics/translational/.gitignore deleted file mode 100644 index dfcb5e1..0000000 --- a/examples/semantics/translational/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Let's not accidently add the solution to assignment 5... -r_*.od - -snapshot_after_*.od \ No newline at end of file diff --git a/examples/semantics/translational/merged_mm.od b/examples/semantics/translational/merged_mm.od deleted file mode 100644 index 7ebee4a..0000000 --- a/examples/semantics/translational/merged_mm.od +++ /dev/null @@ -1,197 +0,0 @@ -# Auto-generated by /home/maestro/repos/MV2/examples/semantics/translational/regenerate_mm.py. - -# Merged run-time meta-models of 'Petri Net' and 'Port' formalisms. -# An abstract 'Top'-class (superclass of everything else), and a 'generic_link'-association (which can connect everything with everything) have also been added. - -# PlantUML visualization: https://deemz.org/plantuml/pdf/hPTFYzim4CNl_XGYnqA27P8uDgM7tSEobsmWWHw3RCk9Y2CPMIcKThzxHyuViiMGPwCSzhJpqxoPfz4uo2lUD6pqockUI_lxLQl66YwLPIF66nPUVxkEF-ut2uk8_GaOQmwola5OojwL5NjXWi_WUi1wjQvuBZQMMm6ZborQdKzRVHIgwUB-rEOep4RW-POtw2MqazehJR4WucV0CrUvtB97HdckO4pHZT5dawEvH25l8RUkLZe_icWoYS3mQTmMnygJw2hBYp3sqASsqPnVt44nPrVfZJLIxJjaRdMDCkFuKMDhApGqcJs6thtJIrAIFJBQag2XVFeO-YQKCDng0uSdNuIljeQhHbgf5Kh8mawFhLTqxvN8BSygk0vPtErNgOueelZIZciE9ATNFyhB03hfNtI3KlQYTIMu-iyW_OZtkREXgTOv8AxZ32QMhT3WwN-wAV3zxtZyd3ahn7ESkoiOZkQuJnorrYTkFaDmTBl1xFZKPoleJG6oez4CPfS0Ojsh0-BAfLUZY8LNeuJSAsuQ-nLR-3GArDaUOZD0R0-Z91cGNG5VCaWipLeGDqUCak6r2_rUCg_ZarPVhnE59rvjZ8pF7gqeI-XbNB1Hn2OJHiliUFo3djuHjbMdJ2FpcV9ro1OTkdE-0NmNbJ9kSa00VNdS3uZW0sXdJ5dErKVjbaNapI_BGK92EaUgmmuIuxmtu10Q7YJclkSXHLiEwBehGSfgjOCQ7mzgVEmQltShlCnt5Iszo8AI3JcfTO1iBWPmNqz0rQ8XLalQxbm_uZ_AVm== - - -CapacityConstraint:Class -PNPlaceState:Class -WorkerSet:Class -State:Class -Stateful:Class { - abstract = True; -} -Source:Class { - abstract = True; -} -Clock:Class { - lower_cardinality = 1; - upper_cardinality = 1; -} -BerthState:Class { - constraint = ``` - errors = [] - numShips = get_slot_value(this, "numShips") - status = get_slot_value(this, "status") - if (numShips == 0) != (status == "empty"): - errors.append(f"Inconsistent: numShips = {numShips}, but status = {status}") - errors - ```; -} -Top:Class { - abstract = True; -} -Place:Class -WorkerSetState:Class -Berth:Class -Generator:Class -PNTransition:Class -PNConnectable:Class { - abstract = True; -} -Sink:Class { - abstract = True; -} -ConnectionState:Class -PlaceState:Class -PNPlace:Class -shipCapacities:GlobalConstraint { - constraint = ``` - errors = [] - for _, constr in get_all_instances("CapacityConstraint"): - cap = get_slot_value(constr, "shipCapacity") - total = 0 - place_names = [] # for debugging - for lnk in get_outgoing(constr, "capacityOf"): - place = get_target(lnk) - place_names.append(get_name(place)) - place_state = get_source(get_incoming(place, "of")[0]) - total += get_slot_value(place_state, "numShips") - if total > cap: - errors.append(f"The number of ships in places {','.join(place_names)} ({total}) exceeds the capacity ({cap}) of CapacityConstraint {get_name(constr)}.") - errors - ```; -} -operatingCapacities:GlobalConstraint { - constraint = ``` - errors = [] - for _, workersetstate in get_all_instances("WorkerSetState"): - workerset = get_target(get_outgoing(workersetstate, "of")[0]) - num_operating = len(get_outgoing(workersetstate, "isOperating")) - num_workers = get_slot_value(workerset, "numWorkers") - if num_operating > num_workers: - errors.append(f"WorkerSet {get_name(workerset)} is operating more berths ({num_operating}) than there are workers ({num_workers})") - errors - ```; -} -WorkerSet_numWorkers:AttributeLink (WorkerSet -> Integer) { - name = "numWorkers"; - constraint = `get_value(get_target(this)) >= 0`; - optional = False; -} -PlaceState_numShips:AttributeLink (PlaceState -> Integer) { - constraint = `get_value(get_target(this)) >= 0`; - optional = False; - name = "numShips"; -} -ConnectionState_moved:AttributeLink (ConnectionState -> Boolean) { - name = "moved"; - constraint = ``` - result = True - all_successors_moved = True - moved = get_value(get_target(this)) - conn_state = get_source(this) - conn = get_target(get_outgoing(conn_state, "of")[0]) - tgt_place = get_target(conn) - next_conns = get_outgoing(tgt_place, "connection") - for next_conn in next_conns: - next_conn_state = get_source(get_incoming(next_conn, "of")[0]) - if not get_slot_value(next_conn_state, "moved"): - all_successors_moved = False - if moved and not all_successors_moved: - result = f"Connection {get_name(conn)} played before its turn." - result - ```; - optional = False; -} -BerthState_status:AttributeLink (BerthState -> String) { - optional = False; - name = "status"; - constraint = ``` - ( - get_value(get_target(this)) in { "empty", "unserved", "served" } - ) - ```; -} -PNPlaceState_numTokens:AttributeLink (PNPlaceState -> Integer) { - name = "numTokens"; - constraint = `"numTokens cannot be negative" if get_value(get_target(this)) < 0 else None`; - optional = False; -} -Clock_time:AttributeLink (Clock -> Integer) { - optional = False; - name = "time"; - constraint = `get_value(get_target(this)) >= 0`; -} -CapacityConstraint_shipCapacity:AttributeLink (CapacityConstraint -> Integer) { - optional = False; - name = "shipCapacity"; - constraint = `get_value(get_target(this)) >= 0`; -} -of:Association (State -> Stateful) { - target_lower_cardinality = 1; - source_upper_cardinality = 1; - source_lower_cardinality = 1; - target_upper_cardinality = 1; -} -arc:Association (PNConnectable -> PNConnectable) -canOperate:Association (WorkerSet -> Berth) { - target_lower_cardinality = 1; -} -inh_arc:Association (PNPlace -> PNTransition) -connection:Association (Source -> Sink) -pn_of:Association (PNPlaceState -> PNPlace) { - source_upper_cardinality = 1; - source_lower_cardinality = 1; - target_upper_cardinality = 1; - target_lower_cardinality = 1; -} -generic_link:Association (Top -> Top) -isOperating:Association (WorkerSetState -> Berth) { - constraint = ``` - errors = [] - - # get status of Berth - berth = get_target(this) - berth_state = get_source(get_incoming(berth, "of")[0]) - status = get_slot_value(berth_state, "status") - if status != "unserved": - errors.append(f"Cannot operate {get_name(berth)} because there is no unserved ship there.") - - # only operate Berts that we can operate - workerset = get_target(get_outgoing(get_source(this), "of")[0]) - can_operate = [get_target(lnk) for lnk in get_outgoing(workerset, "canOperate")] - if berth not in can_operate: - errors.append(f"Cannot operate {get_name(berth)}.") - - errors - ```; -} -capacityOf:Association (CapacityConstraint -> Place) { - target_lower_cardinality = 1; -} -:Inheritance (connection -> Stateful) -:Inheritance (CapacityConstraint -> Top) -:Inheritance (Sink -> Top) -:Inheritance (generic_link -> Top) -:Inheritance (Berth -> Place) -:Inheritance (WorkerSet -> Stateful) -:Inheritance (Place -> Source) -:Inheritance (PlaceState -> State) -:Inheritance (State -> Top) -:Inheritance (Source -> Top) -:Inheritance (Clock -> Top) -:Inheritance (Stateful -> Top) -:Inheritance (Place -> Stateful) -:Inheritance (PNConnectable -> Top) -:Inheritance (WorkerSetState -> State) -:Inheritance (Place -> Sink) -:Inheritance (BerthState -> PlaceState) -:Inheritance (PNTransition -> PNConnectable) -:Inheritance (ConnectionState -> State) -:Inheritance (PNPlaceState -> Top) -:Inheritance (Generator -> Source) -:Inheritance (Berth -> Stateful) -:Inheritance (PNPlace -> PNConnectable) \ No newline at end of file diff --git a/examples/semantics/translational/regenerate_mm.py b/examples/semantics/translational/regenerate_mm.py deleted file mode 100644 index 0a9b6dc..0000000 --- a/examples/semantics/translational/regenerate_mm.py +++ /dev/null @@ -1,65 +0,0 @@ -from state.devstate import DevState -from bootstrap.scd import bootstrap_scd -from concrete_syntax.textual_od import renderer -from concrete_syntax.plantuml.renderer import render_class_diagram -from concrete_syntax.plantuml.make_url import make_url -from api.od import ODAPI - -from transformation.topify.topify import Topifier -from transformation.merger import merge_models - -from util import loader - -from examples.semantics.operational.port import models - -import os -THIS_DIR = os.path.dirname(__file__) - -# get file contents as string -def read_file(filename): - with open(THIS_DIR+'/'+filename) as file: - return file.read() - -if __name__ == "__main__": - state = DevState() - scd_mmm = bootstrap_scd(state) - - # Load Petri Net meta-models - pn_mm_cs = read_file('../../petrinet/metamodels/mm_design.od') - pn_mm_rt_cs = pn_mm_cs + read_file('../../petrinet/metamodels/mm_runtime.od') - pn_mm = loader.parse_and_check(state, pn_mm_cs, scd_mmm, "Petri-Net Design meta-model") - pn_mm_rt = loader.parse_and_check(state, pn_mm_rt_cs, scd_mmm, "Petri-Net Runtime meta-model") - - # Load Port meta-models - port_mm = loader.parse_and_check(state, models.port_mm_cs, scd_mmm, "Port-MM") - port_mm_rt = loader.parse_and_check(state, models.port_rt_mm_cs, scd_mmm, "Port-MM-RT") - - # Merge Petri Net and Port meta-models - print("merging...") - merged_mm_rt = merge_models(state, mm=scd_mmm, models=[pn_mm_rt, port_mm_rt]) - print("done merging") - - print() - print("topifying... (may take a while)") - topifier = Topifier(state) - top_merged_mm_rt = topifier.topify_cd(merged_mm_rt) - print("done topifying") - - plantuml_url = make_url(render_class_diagram(state, top_merged_mm_rt)) - - print() - print(plantuml_url) - print() - - txt = renderer.render_od(state, top_merged_mm_rt, scd_mmm) - - filename = THIS_DIR+"/merged_mm.od" - - with open(filename, "w") as file: - file.write(f"# Auto-generated by {__file__}.\n\n") - file.write(f"# Merged run-time meta-models of 'Petri Net' and 'Port' formalisms.\n") - file.write(f"# An abstract 'Top'-class (superclass of everything else), and a 'generic_link'-association (which can connect everything with everything) have also been added.\n\n") - file.write(f"# PlantUML visualization: {plantuml_url}\n\n") - file.write(txt) - - print("Wrote file", filename) diff --git a/examples/semantics/translational/renderer.py b/examples/semantics/translational/renderer.py deleted file mode 100644 index 92a66d6..0000000 --- a/examples/semantics/translational/renderer.py +++ /dev/null @@ -1,90 +0,0 @@ -from api.od import ODAPI -from concrete_syntax.graphviz.renderer import render_object_diagram, make_graphviz_id -from concrete_syntax.graphviz.make_url import show_graphviz -from examples.petrinet.renderer import render_petri_net_to_dot -from examples.semantics.operational.port.renderer import render_port_to_dot -from examples.semantics.operational.port import helpers - -# COLORS -PLACE_BG = "#DAE8FC" # fill color -PLACE_FG = "#6C8EBF" # font, line, arrow -BERTH_BG = "#FFF2CC" -BERTH_FG = "#D6B656" -CAPACITY_BG = "#F5F5F5" -CAPACITY_FG = "#666666" -WORKER_BG = "#D5E8D4" -WORKER_FG = "#82B366" -GENERATOR_BG = "#FFE6CC" -GENERATOR_FG = "#D79B00" -CLOCK_BG = "black" -CLOCK_FG = "white" - -def graphviz_style_fg_bg(fg, bg): - return f"style=filled,fillcolor=\"{bg}\",color=\"{fg}\",fontcolor=\"{fg}\"" - -def render_port(state, m, mm): - dot = render_object_diagram(state, m, mm, - reify=True, - only_render=[ - # Only render these types - "Place", "Berth", "CapacityConstraint", "WorkerSet", "Generator", "Clock", - "connection", "capacityOf", "canOperate", "generic_link", - # Petri Net types not included (they are already rendered by other function) - # Port-State-types not included to avoid cluttering the diagram, but if you need them, feel free to add them. - ], - # We can style nodes/edges according to their type: - type_to_style={ - "Place": graphviz_style_fg_bg(PLACE_FG, PLACE_BG), - "Berth": graphviz_style_fg_bg(BERTH_FG, BERTH_BG), - "CapacityConstraint": graphviz_style_fg_bg(CAPACITY_FG, CAPACITY_BG), - "WorkerSet": "shape=oval,"+graphviz_style_fg_bg(WORKER_FG, WORKER_BG), - "Generator": "shape=parallelogram,"+graphviz_style_fg_bg(GENERATOR_FG, GENERATOR_BG), - "Clock": graphviz_style_fg_bg(CLOCK_FG, CLOCK_BG), - - # same blue as Place, thick line: - "connection": f"color=\"{PLACE_FG}\",fontcolor=\"{PLACE_FG}\",penwidth=2.0", - - # same grey as CapacityConstraint - "capacityOf": f"color=\"{CAPACITY_FG}\",fontcolor=\"{CAPACITY_FG}\"", - - # same green as WorkerSet - "canOperate": f"color=\"{WORKER_FG}\",fontcolor=\"{WORKER_FG}\"", - - # purple line - "generic_link": "color=purple,fontcolor=purple,arrowhead=onormal", - }, - # We have control over the node/edge labels that are rendered: - type_to_label={ - "CapacityConstraint": lambda capconstr_name, capconstr, odapi: f"{capconstr_name}\\nshipCapacity={odapi.get_slot_value(capconstr, "shipCapacity")}", - - "Place": lambda place_name, place, odapi: f"{place_name}\\nnumShips={helpers.get_num_ships(odapi, place)}", - - "Berth": lambda berth_name, berth, odapi: f"{berth_name}\\nnumShips={helpers.get_num_ships(odapi, berth)}\\nstatus={odapi.get_slot_value(helpers.design_to_state(odapi, berth), "status")}", - - "Clock": lambda _, clock, odapi: f"Clock\\ntime={odapi.get_slot_value(clock, "time")}", - - "connection": lambda conn_name, conn, odapi: f"{conn_name}\\nmoved={odapi.get_slot_value(helpers.design_to_state(odapi, conn), "moved")}", - - # hide generic link labels - "generic_link": lambda lnk_name, lnk, odapi: "", - - "WorkerSet": lambda ws_name, ws, odapi: f"{ws_name}\\nnumWorkers={odapi.get_slot_value(ws, "numWorkers")}", - - # hide the type (it's already clear enough) - "Generator": lambda gen_name, gen, odapi: gen_name, - }, - ) - return dot - -def render_port_and_petri_net(state, m, mm): - od = ODAPI(state, m, mm) - dot = "" - dot += "// petri net:\n" - dot += render_petri_net_to_dot(od) - dot += "\n// the rest:\n" - dot += render_port(state, m, mm) - return dot - - -def show_port_and_petri_net(state, m, mm, engine="dot"): - show_graphviz(render_port_and_petri_net(state, m, mm), engine=engine) diff --git a/examples/semantics/translational/rules/gen_pn/r_00_place2place_lhs.od b/examples/semantics/translational/rules/gen_pn/r_00_place2place_lhs.od deleted file mode 100644 index 75a3c7c..0000000 --- a/examples/semantics/translational/rules/gen_pn/r_00_place2place_lhs.od +++ /dev/null @@ -1,5 +0,0 @@ -# Look for a Place and its PlaceState: - -port_place:RAM_Place -port_place_state:RAM_PlaceState -port_of:RAM_of (port_place_state -> port_place) diff --git a/examples/semantics/translational/rules/gen_pn/r_00_place2place_nac.od b/examples/semantics/translational/rules/gen_pn/r_00_place2place_nac.od deleted file mode 100644 index 5df9f6a..0000000 --- a/examples/semantics/translational/rules/gen_pn/r_00_place2place_nac.od +++ /dev/null @@ -1,14 +0,0 @@ -# Our LHS: - - port_place:RAM_Place - port_place_state:RAM_PlaceState - port_of:RAM_of (port_place_state -> port_place) - - -# The elements from our RHS (this prevents the rule from firing forever): - - pn_place:RAM_PNPlace - place2place:RAM_generic_link (pn_place -> port_place) - - pn_place_state:RAM_PNPlaceState - :RAM_pn_of(pn_place_state -> pn_place) diff --git a/examples/semantics/translational/rules/gen_pn/r_00_place2place_rhs.od b/examples/semantics/translational/rules/gen_pn/r_00_place2place_rhs.od deleted file mode 100644 index ea6a227..0000000 --- a/examples/semantics/translational/rules/gen_pn/r_00_place2place_rhs.od +++ /dev/null @@ -1,22 +0,0 @@ -# Our entire LHS: - - port_place:RAM_Place - port_place_state:RAM_PlaceState - port_of:RAM_of (port_place_state -> port_place) - - -# To create: a Petri Net-place, and link it to our Port-place - - pn_place:RAM_PNPlace { - # new feature: you can control the name of the object to be created: - name = `f"ships_{get_name(matched("port_place"))}"`; - } - place2place:RAM_generic_link (pn_place -> port_place) - - -# And also create: a Petri Net-PlaceState (indicating the amount of tokens in our newly created place) - - pn_place_state:RAM_PNPlaceState { - RAM_numTokens = `get_slot_value(matched('port_place_state'), "numShips")`; - } - :RAM_pn_of(pn_place_state -> pn_place) diff --git a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_lhs.od b/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_lhs.od deleted file mode 100644 index bdde559..0000000 --- a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_lhs.od +++ /dev/null @@ -1,7 +0,0 @@ -# Just look for a connection and its state: - -port_src:RAM_Source -port_snk:RAM_Sink -port_conn:RAM_connection (port_src -> port_snk) -port_conn_state:RAM_ConnectionState -port_of:RAM_of (port_conn_state -> port_conn) \ No newline at end of file diff --git a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_nac.od b/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_nac.od deleted file mode 100644 index ff5b64c..0000000 --- a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_nac.od +++ /dev/null @@ -1,10 +0,0 @@ -# Our LHS: - -port_src:RAM_Source -port_snk:RAM_Sink -port_conn:RAM_connection (port_src -> port_snk) - -# There should not yet be a Petri Net transition linked to the connection: - -pn_transition:RAM_PNTransition -:RAM_generic_link (pn_transition -> port_conn) \ No newline at end of file diff --git a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_rhs.od b/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_rhs.od deleted file mode 100644 index a21b72f..0000000 --- a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_rhs.od +++ /dev/null @@ -1,28 +0,0 @@ -# Our LHS: - -port_src:RAM_Source -port_snk:RAM_Sink -port_conn:RAM_connection (port_src -> port_snk) -port_conn_state:RAM_ConnectionState -port_of:RAM_of (port_conn_state -> port_conn) - -# Create a Petri Net transition, and link it to our port-connection: - -move_transition:RAM_PNTransition { - name = `f"move_{get_name(matched("port_conn"))}"`; -} - -moved_place:RAM_PNPlace { - name = `f" moved_{get_name(matched("port_conn"))}"`; -} -moved_place_state:RAM_PNPlaceState { - RAM_numTokens = `1 if get_slot_value(matched('port_conn_state'), "moved") else 0`; -} -:RAM_pn_of (moved_place_state -> moved_place) -# when firing a 'move', put a token in the 'moved'-place -:RAM_arc (move_transition -> moved_place) - -trans2conn:RAM_generic_link (move_transition -> port_conn) -moved2conn:RAM_generic_link (moved_place -> port_conn) - -# Note that we are not yet creating any incoming/outgoing petri net arcs! This will be done in another rule. \ No newline at end of file diff --git a/examples/semantics/translational/runner_exec_pn.py b/examples/semantics/translational/runner_exec_pn.py deleted file mode 100644 index 6d43121..0000000 --- a/examples/semantics/translational/runner_exec_pn.py +++ /dev/null @@ -1,82 +0,0 @@ -from state.devstate import DevState -from bootstrap.scd import bootstrap_scd -from concrete_syntax.textual_od import parser, renderer -from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram -from concrete_syntax.plantuml.make_url import make_url -from api.od import ODAPI - -from transformation.ramify import ramify -from transformation.topify.topify import Topifier -from transformation.merger import merge_models -from transformation.ramify import ramify -from transformation.rule import RuleMatcherRewriter, ActionGenerator - -from util import loader - -from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker -from examples.semantics.operational.port import models -from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time -from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz -from examples.petrinet.renderer import show_petri_net -from examples.semantics.operational import simulator - -import os -import sys -THIS_DIR = os.path.dirname(__file__) - -# get file contents as string -def read_file(filename): - with open(THIS_DIR+'/'+filename) as file: - return file.read() - -if __name__ == "__main__": - if len(sys.argv) != 2: - print("Usage:") - print(f" python {__file__} model.od") - print("where `model.od` is a valid instance of Port+Petri-Net.") - sys.exit(1) - - model_to_open = sys.argv[1] - - state = DevState() - scd_mmm = bootstrap_scd(state) - - print('loading merged MM...') - merged_mm = loader.parse_and_check(state, read_file("merged_mm.od"), scd_mmm, "merged_mm.od", - check_conformance=False, # no need to check conformance every time - ) - - print('ramifying...') - ramified_merged_mm = ramify(state, merged_mm) - - print('loading petri net rules...') - rules = loader.load_rules(state, - lambda rule_name, kind: f"{THIS_DIR}/../../petrinet/operational_semantics/r_{rule_name}_{kind}.od", - ramified_merged_mm, - ["fire_transition"]) - - print('loading model...') - filename = f"{THIS_DIR}/{model_to_open}" - with open(filename, "r") as file: - model = loader.parse_and_check(state, file.read(), merged_mm, "model", - check_conformance=False, # no need to check conformance every time - ) - print('loaded', filename) - - print('ready!') - - matcher_rewriter = RuleMatcherRewriter(state, merged_mm, ramified_merged_mm) - action_generator = ActionGenerator(matcher_rewriter, rules) - - def render(od): - show_petri_net(od) # graphviz in web browser - return renderer.render_od(state, od.m, od.mm) # text in terminal - - sim = simulator.Simulator( - action_generator=action_generator, - decision_maker=simulator.InteractiveDecisionMaker(auto_proceed=False), - # decision_maker=simulator.RandomDecisionMaker(seed=0), - renderer=render, - ) - - sim.run(ODAPI(state, model, merged_mm)) diff --git a/examples/semantics/translational/runner_translate.py b/examples/semantics/translational/runner_translate.py deleted file mode 100644 index 3949b9d..0000000 --- a/examples/semantics/translational/runner_translate.py +++ /dev/null @@ -1,140 +0,0 @@ -from state.devstate import DevState -from bootstrap.scd import bootstrap_scd -from concrete_syntax.textual_od import parser, renderer -from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram -from concrete_syntax.plantuml.make_url import make_url -from api.od import ODAPI - -from transformation.ramify import ramify -from transformation.rule import RuleMatcherRewriter - -from util import loader -from util.module_to_dict import module_to_dict - -from examples.semantics.operational.port import models, helpers -from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz -from examples.semantics.translational.renderer import show_port_and_petri_net -from examples.petrinet.renderer import render_petri_net - -import os -THIS_DIR = os.path.dirname(__file__) - -# get file contents as string -def read_file(filename): - with open(THIS_DIR+'/'+filename) as file: - return file.read() - -if __name__ == "__main__": - state = DevState() - scd_mmm = bootstrap_scd(state) - - print('loading merged MM...') - merged_mm = loader.parse_and_check(state, read_file("merged_mm.od"), scd_mmm, "merged_mm.od", - check_conformance=False, # no need to check conformance every time - ) - - print('ramifying...') - ramified_merged_mm = ramify(state, merged_mm) - - ################################### - # This is the main part you want to edit (by adding/changing the transformation rules) - # | | | - # V V V - rule_names = [ - # high to low priority (the list-order here matters, the alphabetic-order of the names does not): - "00_place2place", - "10_conn2trans", - - # The above two rules create a bunch of PN places and PN transitions. - # (with generic_links to the Port-elements) - # One way to continue, is to create PN arcs between the places and transitions. - # Or you can also just start from scratch, if you have a better idea :) - ] - # The script below will keep executing the first rule until it no longer matches, then the second rule, etc. - ################################### - - - print('loading rules...') - rules = loader.load_rules(state, - lambda rule_name, kind: f"{THIS_DIR}/rules/gen_pn/r_{rule_name}_{kind}.od", - ramified_merged_mm, - rule_names) - - print('loading model...') - port_m_rt_initial = loader.parse_and_check(state, - # m_cs=models.port_rt_m_cs, # <-- your final solution should work with the full model - # m_cs=models.smaller_model_rt_cs, # <-- simpler model to try first - m_cs=models.smaller_model2_rt_cs, # <-- simpler model to try first - mm=merged_mm, - descr="initial model", - check_conformance=False, # no need to check conformance every time - ) - - print('ready!') - - port_m_rt = port_m_rt_initial - eval_context = { - # make all the functions defined in 'helpers' module available to 'condition'-code in LHS/NAC/RHS: - **module_to_dict(helpers), - # another example: in all 'condition'-code, there will be a global variable 'meaning_of_life', equal to 42: - 'meaning_of_life': 42, # just to demonstrate - feel free to remove this - } - print('The following additional globals are available:', ', '.join(list(eval_context.keys()))) - matcher_rewriter = RuleMatcherRewriter(state, merged_mm, ramified_merged_mm, eval_context=eval_context) - - ################################### - # Because the matching of many different rules can be slow, - # this script will store intermediate snapshots each time - # after having 'exhausted' a rule. - # When re-running the script, the stored snapshots will be loaded - # from disk instead of re-running the rules. - # You can force re-running the rules (e.g., because you changed the rules) - # by deleting the `snapshot_after_*` files. - ################################### - - ################################### - # You are allowed to edit the script below, but you don't have to. - # Changes you may want to make: - # - outcomment the 'render_petri_net'-call (preventing popups) - # - if you really want to do something crazy, - # you can even write a script that uses the lower-level `match_od`/`rewrite` primitives... - # - ?? - ################################### - - for i, rule_name in enumerate(rule_names): - filename = f"{THIS_DIR}/snapshot_after_{rule_name}.od" - print("rule =", rule_name) - rule = rules[rule_name] - try: - with open(filename, "r") as file: - port_m_rt = parser.parse_od(state, file.read(), merged_mm) - print(f'skip rule (found {filename})') - except FileNotFoundError: - # Fire every rule until it cannot match any longer: - while True: - result = matcher_rewriter.exec_on_first_match(port_m_rt, rule, rule_name, - in_place=True, # faster - ) - if result == None: - print(" no matches") - break - else: - port_m_rt, lhs_match, _ = result - print(" rewrote", lhs_match) - txt = renderer.render_od(state, port_m_rt, merged_mm) - with open(filename, "w") as file: - file.write(txt) - print('wrote', filename) - render_petri_net(ODAPI(state, port_m_rt, merged_mm)) - - # Uncomment to show also the port model: - # show_port_and_petri_net(state, port_m_rt, merged_mm) - - # Uncomment to pause after each rendering: - # input() - - ################################### - # Once you have generated a Petri Net, you can execute the petri net: - # - # python runner_exec_pn.py snapshot_after_XX_name_of_my_last_rule.od - ################################### diff --git a/examples/woods/common.py b/examples/woods/common.py deleted file mode 100644 index d301858..0000000 --- a/examples/woods/common.py +++ /dev/null @@ -1,58 +0,0 @@ -# Helpers -def state_of(od, animal): - return od.get_source(od.get_incoming(animal, "of")[0]) -def animal_of(od, state): - return od.get_target(od.get_outgoing(state, "of")[0]) -def get_time(od): - _, clock = od.get_all_instances("Clock")[0] - return clock, od.get_slot_value(clock, "time") - - -# Render our run-time state to a string -def render_woods(od): - txt = "" - _, time = get_time(od) - txt += f"T = {time}.\n" - txt += "Bears:\n" - def render_attacking(animal_state): - attacking = od.get_outgoing(animal_state, "attacking") - if len(attacking) == 1: - whom_state = od.get_target(attacking[0]) - whom_name = od.get_name(animal_of(od, whom_state)) - return f" attacking {whom_name}" - else: - return "" - def render_dead(animal_state): - return 'dead' if od.get_slot_value(animal_state, 'dead') else 'alive' - for _, bear_state in od.get_all_instances("BearState"): - bear = animal_of(od, bear_state) - hunger = od.get_slot_value(bear_state, "hunger") - txt += f" 🐻 {od.get_name(bear)} (hunger: {hunger}, {render_dead(bear_state)}) {render_attacking(bear_state)}\n" - txt += "Men:\n" - for _, man_state in od.get_all_instances("ManState"): - man = animal_of(od, man_state) - attacked_by = od.get_incoming(man_state, "attacking") - if len(attacked_by) == 1: - whom_state = od.get_source(attacked_by[0]) - whom_name = od.get_name(animal_of(od, whom_state)) - being_attacked = f" being attacked by {whom_name}" - else: - being_attacked = "" - txt += f" 👨 {od.get_name(man)} ({render_dead(man_state)}) {render_attacking(man_state)}{being_attacked}\n" - return txt - - -# When should simulation stop? -def termination_condition(od): - _, time = get_time(od) - if time >= 10: - return "Took too long" - - # End simulation when 2 animals are dead - who_is_dead = [] - for _, animal_state in od.get_all_instances("AnimalState"): - if od.get_slot_value(animal_state, "dead"): - animal_name = od.get_name(animal_of(od, animal_state)) - who_is_dead.append(animal_name) - if len(who_is_dead) >= 2: - return f"{' and '.join(who_is_dead)} are dead" diff --git a/examples/woods/models.py b/examples/woods/models.py deleted file mode 100644 index 45edb15..0000000 --- a/examples/woods/models.py +++ /dev/null @@ -1,158 +0,0 @@ -# Design meta-model -woods_mm_cs = """ - Animal:Class { - abstract = True; - } - - Bear:Class - :Inheritance (Bear -> Animal) - - Man:Class { - lower_cardinality = 1; - upper_cardinality = 2; - constraint = `get_value(get_slot(this, "weight")) > 20`; - } - :Inheritance (Man -> Animal) - - - Man_weight:AttributeLink (Man -> Integer) { - name = "weight"; - optional = False; - } - - afraidOf:Association (Man -> Animal) { - source_upper_cardinality = 6; - target_lower_cardinality = 1; - } -""" -# Runtime meta-model -woods_rt_mm_cs = woods_mm_cs + """ - AnimalState:Class { - abstract = True; - } - AnimalState_dead:AttributeLink (AnimalState -> Boolean) { - name = "dead"; - optional = False; - } - of:Association (AnimalState -> Animal) { - source_lower_cardinality = 1; - source_upper_cardinality = 1; - target_lower_cardinality = 1; - target_upper_cardinality = 1; - } - - BearState:Class { - constraint = `get_type_name(get_target(get_outgoing(this, "of")[0])) == "Bear"`; - } - :Inheritance (BearState -> AnimalState) - BearState_hunger:AttributeLink (BearState -> Integer) { - name = "hunger"; - optional = False; - constraint = ``` - val = get_value(get_target(this)) - val >= 0 and val <= 100 - ```; - } - - ManState:Class { - constraint = `get_type_name(get_target(get_outgoing(this, "of")[0])) == "Man"`; - } - :Inheritance (ManState -> AnimalState) - - attacking:Association (AnimalState -> ManState) { - # Animal can only attack one Man at a time - target_upper_cardinality = 1; - - # Man can only be attacked by one Animal at a time - source_upper_cardinality = 1; - - constraint = ``` - attacker = get_source(this) - if get_type_name(attacker) == "BearState": - # only BearState has 'hunger' attribute - hunger = get_value(get_slot(attacker, "hunger")) - else: - hunger = 100 # Man can always attack - attacker_dead = get_value(get_slot(attacker, "dead")) - attacked_state = get_target(this) - attacked_dead = get_value(get_slot(attacked_state, "dead")) - ( - hunger >= 50 - and not attacker_dead # cannot attack while dead - and not attacked_dead # cannot attack whoever is dead - ) - ```; - } - - attacking_starttime:AttributeLink (attacking -> Integer) { - name = "starttime"; - optional = False; - constraint = ``` - val = get_value(get_target(this)) - _, clock = get_all_instances("Clock")[0] - current_time = get_slot_value(clock, "time") - val >= 0 and val <= current_time - ```; - } - - # Just a clock singleton for keeping the time - Clock:Class { - lower_cardinality = 1; - upper_cardinality = 1; - } - Clock_time:AttributeLink (Clock -> Integer) { - name = "time"; - optional = False; - constraint = `get_value(get_target(this)) >= 0`; - } -""" - -# Our design model - the part that doesn't change -woods_m_cs = """ - george:Man { - weight = 80; - } - bill:Man { - weight = 70; - } - - teddy:Bear - mrBrown:Bear - - # george is afraid of both bears - :afraidOf (george -> teddy) - :afraidOf (george -> mrBrown) - - # the men are afraid of each other - :afraidOf (bill -> george) - :afraidOf (george -> bill) -""" - -# Our runtime model - the part that changes with every execution step -woods_rt_initial_m_cs = woods_m_cs + """ - georgeState:ManState { - dead = False; - } - :of (georgeState -> george) - - billState:ManState { - dead = False; - } - :of (billState -> bill) - - teddyState:BearState { - dead = False; - hunger = 40; - } - :of (teddyState -> teddy) - - mrBrownState:BearState { - dead = False; - hunger = 80; - } - :of (mrBrownState -> mrBrown) - - clock:Clock { - time = 0; - } -""" diff --git a/examples/woods/opsem_python.py b/examples/woods/opsem_python.py deleted file mode 100644 index 011ae00..0000000 --- a/examples/woods/opsem_python.py +++ /dev/null @@ -1,75 +0,0 @@ -### Operational Semantics - coded in Python ### - -import functools -from examples.semantics.operational.simulator import make_actions_pure, filter_valid_actions -from examples.woods.common import * - -# Action: Time advances, whoever is being attacked dies, bears become hungrier -def action_advance_time(od): - msgs = [] - clock, old_time = get_time(od) - new_time = old_time + 1 - od.set_slot_value(clock, "time", new_time) - - for _, attacking_link in od.get_all_instances("attacking"): - man_state = od.get_target(attacking_link) - animal_state = od.get_source(attacking_link) - if od.get_type_name(animal_state) == "BearState": - od.set_slot_value(animal_state, "hunger", max(od.get_slot_value(animal_state, "hunger") - 50, 0)) - od.set_slot_value(man_state, "dead", True) - od.delete(attacking_link) - msgs.append(f"{od.get_name(animal_of(od, animal_state))} kills {od.get_name(animal_of(od, man_state))}.") - - for _, bear_state in od.get_all_instances("BearState"): - if od.get_slot_value(bear_state, "dead"): - continue # bear already dead - old_hunger = od.get_slot_value(bear_state, "hunger") - new_hunger = min(old_hunger + 10, 100) - od.set_slot_value(bear_state, "hunger", new_hunger) - bear = od.get_target(od.get_outgoing(bear_state, "of")[0]) - bear_name = od.get_name(bear) - if new_hunger == 100: - od.set_slot_value(bear_state, "dead", True) - msgs.append(f"Bear {bear_name} dies of hunger.") - else: - msgs.append(f"Bear {bear_name}'s hunger level is now {new_hunger}.") - return msgs - -# Action: Animal attacks Man -# Note: We must use the names of the objects as parameters, because when cloning, the IDs of objects change! -def action_attack(od, animal_name: str, man_name: str): - msgs = [] - animal = od.get(animal_name) - man = od.get(man_name) - animal_state = state_of(od, animal) - man_state = state_of(od, man) - attack_link = od.create_link(None, # auto-generate link name - "attacking", animal_state, man_state) - _, clock = od.get_all_instances("Clock")[0] - current_time = od.get_slot_value(clock, "time") - od.set_slot_value(attack_link, "starttime", current_time) - msgs.append(f"{animal_name} is now attacking {man_name}") - return msgs - -# Get all actions that can be performed (including those that bring us to a non-conforming state) -def get_all_actions(od): - def _generate_actions(od): - # can always advance time: - yield ("advance time", action_advance_time) - - # if A is afraid of B, then B can attack A: - for _, afraid_link in od.get_all_instances("afraidOf"): - man = od.get_source(afraid_link) - animal = od.get_target(afraid_link) - animal_name = od.get_name(animal) - man_name = od.get_name(man) - man_state = state_of(od, man) - animal_state = state_of(od, animal) - descr = f"{animal_name} ({od.get_type_name(animal)}) attacks {man_name} ({od.get_type_name(man)})" - yield (descr, functools.partial(action_attack, animal_name=animal_name, man_name=man_name)) - - return make_actions_pure(_generate_actions(od), od) - -# Only get those actions that bring us to a conforming state -def get_valid_actions(od): - return filter_valid_actions(get_all_actions(od)) diff --git a/examples/woods/opsem_rulebased.py b/examples/woods/opsem_rulebased.py deleted file mode 100644 index da38179..0000000 --- a/examples/woods/opsem_rulebased.py +++ /dev/null @@ -1,25 +0,0 @@ -### Operational Semantics - defined by rule-based model transformation ### - -from transformation.rule import Rule, RuleMatcherRewriter, PriorityActionGenerator -from transformation.ramify import ramify -from util import loader - -import os -THIS_DIR = os.path.dirname(__file__) - -get_filename = lambda rule_name, kind: f"{THIS_DIR}/rules/r_{rule_name}_{kind}.od" - -def get_action_generator(state, rt_mm): - rt_mm_ramified = ramify(state, rt_mm) - - matcher_rewriter = RuleMatcherRewriter(state, rt_mm, rt_mm_ramified) - - rules0_dict = loader.load_rules(state, get_filename, rt_mm_ramified, ["hungry_bear_dies"]) - rules1_dict = loader.load_rules(state, get_filename, rt_mm_ramified, ["advance_time", "attack"]) - - generator = PriorityActionGenerator(matcher_rewriter, [ - rules0_dict, # highest priority - rules1_dict, # lowest priority - ]) - - return generator diff --git a/examples/woods/rules/r_advance_time_lhs.od b/examples/woods/rules/r_advance_time_lhs.od deleted file mode 100644 index 8ed63fc..0000000 --- a/examples/woods/rules/r_advance_time_lhs.od +++ /dev/null @@ -1,4 +0,0 @@ -clock:RAM_Clock { - RAM_time = `True`; -} - diff --git a/examples/woods/rules/r_advance_time_rhs.od b/examples/woods/rules/r_advance_time_rhs.od deleted file mode 100644 index 3c66ae0..0000000 --- a/examples/woods/rules/r_advance_time_rhs.od +++ /dev/null @@ -1,27 +0,0 @@ -clock:RAM_Clock { - RAM_time = `get_value(this) + 1`; -} - -# Advance time has a bunch of side-effects that we cannot easily model using NAC/LHS/RHS-kind of rules, -# so we just do it in code: - -:GlobalCondition { - condition = ``` - for _, attacking_link in get_all_instances("attacking"): - man_state = get_target(attacking_link) - animal_state = get_source(attacking_link) - if get_type_name(animal_state) == "BearState": - # Bear hunger decreases - set_slot_value(animal_state, "hunger", max(get_slot_value(animal_state, "hunger") - 50, 0)) - set_slot_value(man_state, "dead", True) - delete(attacking_link) - - # Bear hunger increases - for _, bear_state in get_all_instances("BearState"): - if get_slot_value(bear_state, "dead"): - continue # bear already dead - old_hunger = get_slot_value(bear_state, "hunger") - new_hunger = min(old_hunger + 10, 100) - set_slot_value(bear_state, "hunger", new_hunger) - ```; -} diff --git a/examples/woods/rules/r_attack_lhs.od b/examples/woods/rules/r_attack_lhs.od deleted file mode 100644 index bb5b4a6..0000000 --- a/examples/woods/rules/r_attack_lhs.od +++ /dev/null @@ -1,18 +0,0 @@ -# Some man is afraid of some animal: - -man:RAM_Man - -animal:RAM_Animal - -manAfraidOfAnimal:RAM_afraidOf (man -> animal) - - -# Both man and animal have an associated state: - -manState:RAM_ManState - -man2State:RAM_of (manState -> man) - -animalState:RAM_AnimalState - -animal2State:RAM_of (animalState -> animal) diff --git a/examples/woods/rules/r_attack_nac.od b/examples/woods/rules/r_attack_nac.od deleted file mode 100644 index 0b3f7a8..0000000 --- a/examples/woods/rules/r_attack_nac.od +++ /dev/null @@ -1,7 +0,0 @@ -# Cannot attack if already attacking - -manState:RAM_ManState - -animalState:RAM_AnimalState - -:RAM_attacking(animalState -> manState) \ No newline at end of file diff --git a/examples/woods/rules/r_attack_nac2.od b/examples/woods/rules/r_attack_nac2.od deleted file mode 100644 index 006b9bc..0000000 --- a/examples/woods/rules/r_attack_nac2.od +++ /dev/null @@ -1,7 +0,0 @@ -# Bear won't attack unless hungry - -animalState:RAM_AnimalState { - condition = ``` - get_type_name(this) == "BearState" and get_slot_value(this, "hunger") < 50 - ```; -} diff --git a/examples/woods/rules/r_attack_nac3.od b/examples/woods/rules/r_attack_nac3.od deleted file mode 100644 index 67b0e97..0000000 --- a/examples/woods/rules/r_attack_nac3.od +++ /dev/null @@ -1,5 +0,0 @@ -# If dead, cannot be attacked - -manState:RAM_ManState { - RAM_dead = `get_value(this)`; -} \ No newline at end of file diff --git a/examples/woods/rules/r_attack_nac4.od b/examples/woods/rules/r_attack_nac4.od deleted file mode 100644 index 383d396..0000000 --- a/examples/woods/rules/r_attack_nac4.od +++ /dev/null @@ -1,5 +0,0 @@ -# If dead, cannot attack - -animalState:RAM_AnimalState { - RAM_dead = `get_value(this)`; -} diff --git a/examples/woods/rules/r_attack_nac5.od b/examples/woods/rules/r_attack_nac5.od deleted file mode 100644 index 3568dc2..0000000 --- a/examples/woods/rules/r_attack_nac5.od +++ /dev/null @@ -1,7 +0,0 @@ -# Not already attacking someone else: - -animalState:RAM_AnimalState - -other:RAM_ManState - -:RAM_attacking(animalState -> other) \ No newline at end of file diff --git a/examples/woods/rules/r_attack_nac6.od b/examples/woods/rules/r_attack_nac6.od deleted file mode 100644 index 5d0c431..0000000 --- a/examples/woods/rules/r_attack_nac6.od +++ /dev/null @@ -1,7 +0,0 @@ -# Not already being attacked by someone else: - -manState:RAM_ManState - -other:RAM_AnimalState - -:RAM_attacking(other -> manState) \ No newline at end of file diff --git a/examples/woods/rules/r_attack_rhs.od b/examples/woods/rules/r_attack_rhs.od deleted file mode 100644 index 1b7d27f..0000000 --- a/examples/woods/rules/r_attack_rhs.od +++ /dev/null @@ -1,28 +0,0 @@ -# Our entire LHS (don't delete anything) - - # Some man is afraid of some animal: - - man:RAM_Man - - animal:RAM_Animal - - manAfraidOfAnimal:RAM_afraidOf (man -> animal) - - - # Both man and animal have an associated state: - - manState:RAM_ManState - - man2State:RAM_of (manState -> man) - - animalState:RAM_AnimalState - - animal2State:RAM_of (animalState -> animal) - - - -# Animal attacks man: - - :RAM_attacking(animalState -> manState) { - RAM_starttime = `get_slot_value(get_all_instances("Clock")[0][1], "time")`; - } diff --git a/examples/woods/rules/r_hungry_bear_dies_lhs.od b/examples/woods/rules/r_hungry_bear_dies_lhs.od deleted file mode 100644 index 3ec65ab..0000000 --- a/examples/woods/rules/r_hungry_bear_dies_lhs.od +++ /dev/null @@ -1,8 +0,0 @@ -bearState:RAM_BearState { - RAM_hunger = ``` - get_value(this) == 100 - ```; - RAM_dead = ``` - not get_value(this) - ```; -} diff --git a/examples/woods/rules/r_hungry_bear_dies_rhs.od b/examples/woods/rules/r_hungry_bear_dies_rhs.od deleted file mode 100644 index 874f881..0000000 --- a/examples/woods/rules/r_hungry_bear_dies_rhs.od +++ /dev/null @@ -1,4 +0,0 @@ -bearState:RAM_BearState { - RAM_hunger = `get_value(this)`; # unchanged - RAM_dead = `True`; -} diff --git a/examples/woods/woods_runner.py b/examples/woods/woods_runner.py deleted file mode 100644 index 2027fcd..0000000 --- a/examples/woods/woods_runner.py +++ /dev/null @@ -1,42 +0,0 @@ -from state.devstate import DevState -from bootstrap.scd import bootstrap_scd -from framework.conformance import Conformance, render_conformance_check_result -from concrete_syntax.textual_od import parser, renderer -from concrete_syntax.plantuml import renderer as plantuml -from api.od import ODAPI - -from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker -from examples.woods import models, opsem_python, opsem_rulebased -from examples.woods.common import termination_condition, render_woods - -from util import loader - -state = DevState() -scd_mmm = bootstrap_scd(state) # Load meta-meta-model - -### Load (meta-)models ### - -woods_mm = loader.parse_and_check(state, models.woods_mm_cs, scd_mmm, "MM") -woods_rt_mm = loader.parse_and_check(state, models.woods_rt_mm_cs, scd_mmm, "RT-MM") -woods_m = loader.parse_and_check(state, models.woods_m_cs, woods_mm, "M") -woods_rt_m = loader.parse_and_check(state, models.woods_rt_initial_m_cs, woods_rt_mm, "RT-M") - -print() - -rulebased_action_generator = opsem_rulebased.get_action_generator(state, woods_rt_mm) - -sim = Simulator( - # action_generator=opsem_python.get_valid_actions, - # action_generator=opsem_python.get_all_actions, - action_generator=rulebased_action_generator, - # decision_maker=RandomDecisionMaker(seed=3), - decision_maker=InteractiveDecisionMaker(), - termination_condition=termination_condition, - check_conformance=True, - verbose=True, - renderer=render_woods, -) - -od = ODAPI(state, woods_rt_m, woods_rt_mm) - -sim.run(od) diff --git a/framework/conformance.py b/framework/conformance.py index df5a4bf..d3e71ac 100644 --- a/framework/conformance.py +++ b/framework/conformance.py @@ -15,6 +15,30 @@ from api.od import ODAPI, bind_api_readonly import functools +def eval_context_decorator(func): + """ + Used to mark functions that can be called inside the evaluation context. + Base functions are mapped into the function, as well as the evaluation context. + This happens at runtime so typechecking will not be happy. + Important: Using the same name in the evaluation context as the function name + will lead to naming conflicts with the function as priority, resulting in missing argument errors. + + from typing import TYPE_CHECKING + if TYPE_CHECKING: + from api.od_stub import * + ... + + Use this to make partially fix the typechecking. + Optionally, define a stub for your own evaluation context and include it. + """ + def wrapper(*args, api_context, eval_context, **kwargs): + for key, value in api_context.items(): + func.__globals__[key] = value + for key, value in eval_context.items(): + func.__globals__[key] = value + return func(*args, **kwargs) + return wrapper + def render_conformance_check_result(error_list): if len(error_list) == 0: return "CONFORM" @@ -25,7 +49,7 @@ def render_conformance_check_result(error_list): class Conformance: # Parameter 'constraint_check_subtypes': whether to check local type-level constraints also on subtypes. - def __init__(self, state: State, model: UUID, type_model: UUID, constraint_check_subtypes=True): + def __init__(self, state: State, model: UUID, type_model: UUID, constraint_check_subtypes=True, *, eval_context = None): self.state = state self.bottom = Bottom(state) self.model = model @@ -51,6 +75,9 @@ class Conformance: self.structures = {} self.candidates = {} + # add user defined functions to constraints + self.eval_context = eval_context if eval_context else {} + def check_nominal(self, *, log=False): """ @@ -248,6 +275,13 @@ class Conformance: raise Exception(f"{description} evaluation result should be boolean or string! Instead got {result}") # local constraints + _api_context = bind_api_readonly(self.odapi) + _global_binds = {**_api_context} + _eval_context = {**self.eval_context} + for key, code in _eval_context.items(): + _f = functools.partial(code, **{"api_context" :_api_context, "eval_context":_eval_context}) + _global_binds[key] = _f + _eval_context[key] = _f for type_name in self.bottom.read_keys(self.type_model): code = get_code(type_name) if code != None: @@ -256,7 +290,7 @@ class Conformance: description = f"Local constraint of \"{type_name}\" in \"{obj_name}\"" # print(description) try: - result = exec_then_eval(code, _globals=bind_api_readonly(self.odapi), _locals={'this': obj_id}) # may raise + result = exec_then_eval(code, _globals=_global_binds, _locals={'this': obj_id}) # may raise check_result(result, description) except: errors.append(f"Runtime error during evaluation of {description}:\n{indent(traceback.format_exc(), 6)}") @@ -278,7 +312,7 @@ class Conformance: if code != None: description = f"Global constraint \"{tm_name}\"" try: - result = exec_then_eval(code, _globals=bind_api_readonly(self.odapi)) # may raise + result = exec_then_eval(code, _globals=_global_binds) # may raise check_result(result, description) except: errors.append(f"Runtime error during evaluation of {description}:\n{indent(traceback.format_exc(), 6)}") diff --git a/framework/interactive_prompt.py b/framework/interactive_prompt.py deleted file mode 100644 index 7ecd3c9..0000000 --- a/framework/interactive_prompt.py +++ /dev/null @@ -1,98 +0,0 @@ -from framework.manager import Manager -from state.devstate import DevState -from InquirerPy import prompt, separator -from pprint import pprint -import prompt_questions as questions -from inspect import signature -from uuid import UUID -from ast import literal_eval - - -def generate_context_question(ctx_type, services): - """ - Converts service names to human readable form - """ - choices = [ - s.__name__.replace('_', ' ') for s in services - ] - choices = sorted(choices) - choices.append(separator.Separator()) - choices.append("close context") - ctx_question = [ - { - 'type': 'list', - 'name': 'op', - 'message': f'Currently in context {ctx_type.__name__}, which operation would you like to perform?', - 'choices': choices, - 'filter': lambda x: x.replace(' ', '_') - } - ] - return ctx_question - - -def main(): - state = DevState() - man = Manager(state) - - while True: - if man.current_model != None and man.current_context == None: - # we have selected a model, so we display typing questions - answer = prompt(questions.MODEL_SELECTED) - ctx = man - elif man.current_model != None and man.current_context != None: - # we have selected both a model and a context, so we display available services - qs = generate_context_question(type(man.current_context), man.get_services()) - answer = prompt(qs) - if answer['op'] == 'close_context': - man.close_context() - continue - else: - ctx = man.current_context - else: - answer = prompt(questions.MODEL_MGMT) - ctx = man - - if answer['op'] == 'exit': - break - else: - method = getattr(ctx, answer['op']) - args_questions = [] - types = {} - for p in signature(method).parameters.values(): - types[p.name] = p.annotation if p.annotation else literal_eval # can't use filter in question dict, doesn't work for some reason... - if p.annotation == UUID: - args_questions.append({ - 'type': 'list', - 'name': p.name, - 'message': f'{p.name.replace("_", " ")}?', - 'choices': list(man.get_models()), - 'filter': lambda x: state.read_value(state.read_dict(state.read_root(), x)) - }) - else: - args_questions.append({ - 'type': 'input', - 'name': p.name, - 'message': f'{p.name.replace("_", " ")}?', - 'filter': lambda x: '' if x.lower() == 'false' else x - }) - args = prompt(args_questions) - args = {k: types[k](v) if len(v) > 0 else None for k, v in args.items()} - try: - output = method(**args) - if output != None: - try: - if isinstance(output, str): - raise TypeError - output = list(output) - if len(output) > 0: - for o in sorted(output): - print(f"\u2022 {o}") - except TypeError: - print(f"\u2022 {output}") - except RuntimeError as e: - print(e) - - -if __name__ == '__main__': - print("""Welcome to...\r\n __ ____ _____ \r\n | \\/ \\ \\ / /__ \\ \r\n | \\ / |\\ \\ / / ) |\r\n | |\\/| | \\ \\/ / / / \r\n | | | | \\ / / /_ \r\n |_| |_| \\/ |____| """) - main() diff --git a/framework/manager.py b/framework/manager.py deleted file mode 100644 index a320acb..0000000 --- a/framework/manager.py +++ /dev/null @@ -1,225 +0,0 @@ -from state.base import State -from bootstrap.scd import bootstrap_scd -from bootstrap.pn import bootstrap_pn -from services import implemented as services -from framework.conformance import Conformance -from uuid import UUID - - -class Manager: - def __init__(self, state: State): - self.current_model = None - self.current_context = None - self.state = state - bootstrap_scd(state) - # bootstrap_pn(state, "PN") - scd_node = self.state.read_dict(self.state.read_root(), "SCD") - for key_node in self.state.read_dict_keys(self.state.read_root()): - model_node = self.state.read_dict_node(self.state.read_root(), key_node) - self.state.create_edge(model_node, scd_node) - - def get_models(self): - """ - Retrieves all existing models - - Returns: - Names of exising models - """ - for key_node in self.state.read_dict_keys(self.state.read_root()): - yield self.state.read_value(key_node) - - def instantiate_model(self, type_model_name: str, name: str): - """ - Retrieves all existing models - - Args: - type_model_name: name of the type model we want to instantiate - name: name of the instance model to be created - - Returns: - Nothing - """ - root = self.state.read_root() - type_model_node = self.state.read_dict(root, type_model_name) - if type_model_node == None: - raise RuntimeError(f"No type model with name {type_model_name} found.") - else: - # check if model is a linguistic type model - scd_node = self.state.read_dict(self.state.read_root(), "SCD") - incoming = self.state.read_incoming(scd_node) - incoming = [self.state.read_edge(e)[0] for e in incoming] - if type_model_node not in incoming: - raise RuntimeError(f"Model with name {type_model_name} is not a type model.") - if name in map(self.state.read_value, self.state.read_dict_keys(root)): - raise RuntimeError(f"Model with name {name} already exists.") - new_model_root = self.state.create_node() - new_model_node = self.state.create_nodevalue(str(new_model_root)) - self.state.create_dict(root, name, new_model_node) - self.state.create_edge(new_model_node, type_model_node) - self.current_model = (name, new_model_root) - if type_model_name not in services: - raise RuntimeError(f"Services for type {type_model_name} not implemented.") - self.current_context = services[type_model_name](self.current_model[1], self.state) - - def select_model(self, name: str): - """ - Select a model to interact with - - Args: - name: name of the model we want to interact with - - Returns: - Nothing - """ - root = self.state.read_root() - model_node = self.state.read_dict(root, name) - if model_node == None: - raise RuntimeError(f"No model with name {name} found.") - model_root = UUID(self.state.read_value(model_node)) - self.current_model = (name, model_root) - - def close_model(self): - """ - Clear the currently selected model - - Returns: - Nothing - """ - self.current_model = None - self.current_context = None - - def get_types(self): - """ - Retrieve the types of the currently selected model - - Returns: - Names of the model's types - """ - root = self.state.read_root() - if self.current_model == None: - raise RuntimeError(f"No model currently selected.") - name, model = self.current_model - model_id = self.state.read_dict(root, name) - outgoing = self.state.read_outgoing(model_id) - outgoing = [e for e in outgoing if len(self.state.read_outgoing(e)) == 0] - elements = [self.state.read_edge(e)[1] for e in outgoing] - for e in elements: - incoming = self.state.read_incoming(e) - label_edge, = [e for e in incoming if len(self.state.read_outgoing(e)) == 1] - label_edge, = self.state.read_outgoing(label_edge) - _, label_node = self.state.read_edge(label_edge) - yield self.state.read_value(label_node) - - def select_context(self, name: str): - """ - Select a type to set as the current context - - Args: - name: name of the type/context - - Returns: - Nothing - """ - if name not in self.get_types(): - raise RuntimeError(f"No type {name} that currently selected model conforms to.") - if name not in services: - raise RuntimeError(f"Services for type {name} not implemented.") - self.current_context = services[name](self.current_model[1], self.state) - self.current_context.from_bottom() - - def close_context(self): - """ - Exit the current (type) context - - Returns: - Nothing - """ - self.current_context.to_bottom() - self.current_context = None - - def get_services(self): - """ - Retrieve the services available in the current context - - Returns: - Functions exposed by the current context's implementation - """ - if self.current_model == None: - raise RuntimeError(f"No model currently selected.") - if self.current_context == None: - raise RuntimeError(f"No context currently selected.") - yield from [ - getattr(self.current_context, func) - for func in dir(self.current_context) - if callable(getattr(self.current_context, func)) - and not func.startswith("__") - and not func == "from_bottom" - and not func == "to_bottom" - ] - - def check_conformance(self, type_model_name: str, model_name: str): - """ - If there are existing morphisms between the model and type model - check nominal conformance - Else - find conformance using structural conformance check - - Args: - type_model_name: name of the type model to check conformance against - model_name: name of the instance model - - Returns: - Boolean indicating whether conformance was found - """ - root = self.state.read_root() - type_model_node = self.state.read_dict(root, type_model_name) - if type_model_node == None: - raise RuntimeError(f"No type model with name {type_model_name} found.") - model_node = self.state.read_dict(root, model_name) - if model_node == None: - raise RuntimeError(f"No model with name {model_node} found.") - types = self.state.read_outgoing(model_node) - types = [self.state.read_edge(e)[1] for e in types] - # if type_model_node not in types: - if True: - print("checking structural conformance") - conf = Conformance(self.state, - UUID(self.state.read_value(model_node)), - UUID(self.state.read_value(type_model_node))).check_structural(log=True) - if conf: - self.state.create_edge(model_node, type_model_node) - return conf - else: - print("checking nominal conformance") - return Conformance(self.state, - UUID(self.state.read_value(model_node)), - UUID(self.state.read_value(type_model_node))).check_nominal(log=True) - - def dump_state(self): - """ - Dumps the current state of the Modelverse to a pickle file - """ - import pickle - with open("state.p", "wb") as file: - pickle.dump(self.state, file) - - def load_state(self): - """ - Loas a state of the Modelverse from a pickle file - """ - import pickle - with open("state.p", "rb") as file: - self.state = pickle.load(file) - - def to_graphviz(self): - self.state.dump("state.dot") - - -if __name__ == '__main__': - from state.devstate import DevState - s = DevState() - m = Manager(s) - m.select_model("SCD") - m.select_context("SCD") - for f in m.get_services(): - print(f) diff --git a/framework/prompt_questions.py b/framework/prompt_questions.py deleted file mode 100644 index c4b8cb0..0000000 --- a/framework/prompt_questions.py +++ /dev/null @@ -1,37 +0,0 @@ -from InquirerPy.separator import Separator - -MODEL_SELECTED = [ - { - 'type': 'list', - 'name': 'op', - 'message': 'Model selected... Which operation would you like to perform?', - 'choices': [ - 'get types', - 'select context', - Separator(), - 'close model' - ], - 'filter': lambda x: x.replace(' ', '_') - } -] - -MODEL_MGMT = [ - { - 'type': 'list', - 'name': 'op', - 'message': 'Which model management operation would you like to perform?', - 'choices': [ - 'get models', - 'select model', - 'instantiate model', - 'check conformance', - Separator(), - 'load state', - 'dump state', - 'to graphviz', - Separator(), - 'exit' - ], - 'filter': lambda x: x.replace(' ', '_') - } -] diff --git a/requirements.txt b/requirements.txt index 2105b38..179f66d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ lark==1.1.9 -jinja2==3.1.4 \ No newline at end of file +jinja2==3.1.4 +git+https://msdl.uantwerpen.be/git/jexelmans/drawio2py \ No newline at end of file diff --git a/transformation/merger.py b/transformation/merger.py index fcf6f81..bc4033b 100644 --- a/transformation/merger.py +++ b/transformation/merger.py @@ -52,7 +52,7 @@ def merge_models(state, mm, models: list[UUID]): model = state.read_value(obj) scd = SCD(merged, state) created_obj = scd.create_model_ref(prefixed_obj_name, model) - merged_odapi._ODAPI__recompute_mappings() # dirty!! + merged_odapi.recompute_mappings() # dirty!! else: # create node or edge if state.is_edge(obj): diff --git a/transformation/rewriter.py b/transformation/rewriter.py index 91ed53a..3b3b443 100644 --- a/transformation/rewriter.py +++ b/transformation/rewriter.py @@ -149,13 +149,13 @@ def rewrite(state, if od.is_typed_by(bottom, rhs_type, class_type): obj_name = first_available_name(suggested_name) host_od._create_object(obj_name, host_type) - host_odapi._ODAPI__recompute_mappings() + host_odapi.recompute_mappings() rhs_match[rhs_name] = obj_name elif od.is_typed_by(bottom, rhs_type, assoc_type): _, _, host_src, host_tgt = get_src_tgt() link_name = first_available_name(suggested_name) host_od._create_link(link_name, host_type, host_src, host_tgt) - host_odapi._ODAPI__recompute_mappings() + host_odapi.recompute_mappings() rhs_match[rhs_name] = link_name elif od.is_typed_by(bottom, rhs_type, attr_link_type): host_src_name, _, host_src, host_tgt = get_src_tgt() @@ -163,7 +163,7 @@ def rewrite(state, host_attr_name = host_mm_odapi.get_slot_value(host_attr_link, "name") link_name = f"{host_src_name}_{host_attr_name}" # must follow naming convention here host_od._create_link(link_name, host_type, host_src, host_tgt) - host_odapi._ODAPI__recompute_mappings() + host_odapi.recompute_mappings() rhs_match[rhs_name] = link_name elif rhs_type == rhs_mm_odapi.get("ActionCode"): # If we encounter ActionCode in our RHS, we assume that the code computes the value of an attribute... diff --git a/transformation/schedule/Tests/Test_meta_model.py b/transformation/schedule/Tests/Test_meta_model.py new file mode 100644 index 0000000..a0ef942 --- /dev/null +++ b/transformation/schedule/Tests/Test_meta_model.py @@ -0,0 +1,502 @@ +import io +import os +import sys +import unittest + +sys.path.insert( + 0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) +) + +from api.od import ODAPI +from bootstrap.scd import bootstrap_scd +from transformation.schedule.rule_scheduler import RuleScheduler +from state.devstate import DevState +from transformation.ramify import ramify +from util import loader + + +class Test_Meta_Model(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.dir = os.path.dirname(__file__) + state = DevState() + scd_mmm = bootstrap_scd(state) + with open(f"{cls.dir}/models/mm_petrinet.od") as file: + mm_s = file.read() + with open(f"{cls.dir}/models/m_petrinet.od") as file: + m_s = file.read() + mm = loader.parse_and_check(state, mm_s, scd_mmm, "mm") + m = loader.parse_and_check(state, m_s, mm, "m") + mm_rt_ramified = ramify(state, mm) + cls.model_param = (state, m, mm) + cls.generator_param = (state, mm, mm_rt_ramified) + + def setUp(self): + self.model = ODAPI(*self.model_param) + self.out = io.StringIO() + self.generator = RuleScheduler( + *self.generator_param, + directory=self.dir + "/models", + verbose=True, + outstream=self.out, + ) + + def _test_conformance( + self, file: str, expected_substr_err: dict[tuple[str, str], list[list[str]]] + ) -> None: + try: + self.generator.load_schedule(f"schedule/{file}") + errors = self.out.getvalue().split("\u25b8")[1:] + if len(errors) != len(expected_substr_err.keys()): + assert len(errors) == len(expected_substr_err.keys()) + for err in errors: + error_lines = err.strip().split("\n") + line = error_lines[0] + for key_pattern in expected_substr_err.keys(): + if (key_pattern[0] in line) and (key_pattern[1] in line): + key = key_pattern + break + else: + assert False + expected = expected_substr_err[key] + if (len(error_lines) - 1) != len(expected): + assert (len(error_lines) - 1) == len(expected) + it = error_lines.__iter__() + it.__next__() + for err_line in it: + if not any( + all(exp in err_line for exp in line_exp) + for line_exp in expected + ): + assert False + expected_substr_err.pop(key) + except AssertionError: + raise + except Exception as e: + assert False + + def test_no_start(self): + self._test_conformance("no_start.od", {("Start", "Cardinality"): []}) + + def test_no_end(self): + self._test_conformance("no_end.od", {("End", "Cardinality"): []}) + + def test_multiple_start(self): + self._test_conformance("multiple_start.od", {("Start", "Cardinality"): []}) + + def test_multiple_end(self): + self._test_conformance("multiple_end.od", {("End", "Cardinality"): []}) + + def test_connections_start(self): + # try to load the following schedule. + # The schedules contains happy day nodes and faulty nodes. + # Use the error messages to select error location and further validate the multiple reasons of failure. + self._test_conformance( + "connections_start.od", + { + ("Start", "start"): [ # locate failure (contains these two substrings), make sure other do not fully overlap -> flakey test + ["input exec", "foo_in", "exist"], # 4 total reasons, a reason contains these three substrings + ["output exec", "out", "multiple"], # a reason will match to exactly one subnstring list + ["output exec", "foo_out", "exist"], + ["input data", "in", "exist"], + ] + }, + ) + + def test_connections_end(self): + self._test_conformance( + "connections_end.od", + { + ("End", "end"): [ + ["input exec", "foo_in", "exist"], + ["output exec", "foo_out", "exist"], + ["input data", "in", "multiple"], + ["input data", "out2", "exist"], + ["output data", "out", "exist"], + ] + }, + ) + + def test_connections_match(self): + self._test_conformance( + "connections_match.od", + { + ("Match", "m_foo"): [ + ["input exec", "foo_in", "exist"], + ["output exec", "foo", "exist"], + ["output exec", "fail", "multiple"], + ["input data", "foo_in", "exist"], + ["input data", "in", "multiple"], + ["output data", "foo_out", "exist"], + ] + }, + ) + + def test_connections_rewrite(self): + self._test_conformance( + "connections_rewrite.od", + { + ("Rewrite", "r_foo1"): [ + ["input exec", "foo_in", "exist"], + ["output exec", "foo", "exist"], + ], + ("Rewrite", "r_foo2"): [ + ["output exec", "out", "multiple"], + ["input data", "foo_in", "exist"], + ["input data", "in", "multiple"], + ["output data", "foo_out", "exist"], + ], + }, + ) + + def test_connections_action(self): + self._test_conformance( + "connections_action.od", + { + ("Action", "a_foo1"): [ + ["input exec", "foo_in", "exist"], + ["output exec", "out", "multiple"], + ["output exec", "foo", "exist"], + ["input data", "in1", "multiple"], + ], + ("Action", "a_foo2"): [ + ["input exec", "in", "exist"], + ["output exec", "out3", "multiple"], + ["output exec", "out", "exist"], + ["input data", "in", "exist"], + ["output data", "out", "exist"], + ], + }, + ) + + def test_connections_modify(self): + #TODO: + # see test_connections_merge + self._test_conformance( + "connections_modify.od", + { + ("Invalid source", "Conn_exec"): [], + ("Invalid target", "Conn_exec"): [], + ("Modify", "m_foo"): [ + ["input data", "foo_in", "exist"], + ["output data", "foo_out", "exist"], + ["input data", "in", "multiple"], + ], + ("Modify", "m_exec"): [ + ["input exec", "in", "exist"], + ["input exec", "in", "exist"], + ["output exec", "out", "exist"], + ] + }, + ) + + def test_connections_merge(self): + #TODO: + # mm: + # association Conn_exec [0..*] Exec -> Exec [0..*] { + # ...; + # } + # m: + # Conn_exec ( Data -> Exec) {...;} -> Invalid source type 'Merge' for link '__Conn_exec_3:Conn_exec' (1) + # -> Invalid target type 'End' for link '__Conn_exec_3:Conn_exec' (2) + # Conn_exec ( Exec -> Data) {...;} -> No error at all, inconsistent and unexpected behaviour (3) + # different combinations behave unexpected + + self._test_conformance( + "connections_merge.od", + { + ("Invalid source", "Conn_exec"): [], # (1), expected + ("Invalid target", "Conn_exec"): [], # (2), invalid error, should not be shown + ("Merge", "m_foo"): [ + ["input data", "foo_in", "exist"], + ["input data", "in2", "multiple"], + ["output data", "foo_out", "exist"], + ], + ("Merge", "m_exec"): [ # (3), checked in Merge itself + ["input exec", "in", "exist"], + ["output exec", "out", "exist"], + ], + }, + ) + + def test_connections_store(self): + self._test_conformance( + "connections_store.od", + { + ("Store", "s_foo"): [ + ["input exec", "foo", "exist"], + ["output exec", "out", "multiple"], + ["output exec", "foo", "exist"], + ["input data", "foo_in", "exist"], + ["output data", "foo_out", "exist"], + ["input data", "2", "multiple"], + ], + }, + ) + + def test_connections_schedule(self): + self._test_conformance( + "connections_schedule.od", + { + ("Schedule", "s_foo"): [ + ["output exec", "out", "multiple"], + ["input data", "in2", "multiple"], + ] + }, + ) + + def test_connections_loop(self): + self._test_conformance( + "connections_loop.od", + { + ("Loop", "l_foo"): [ + ["input exec", "foo_in", "exist"], + ["output exec", "out", "multiple"], + ["output exec", "foo", "exist"], + ["input data", "foo_in", "exist"], + ["output data", "foo_out", "exist"], + ["input data", "in", "multiple"], + ] + }, + ) + + def test_connections_print(self): + self._test_conformance( + "connections_print.od", + { + ("Print", "p_foo"): [ + ["input exec", "foo_in", "exist"], + ["output exec", "out", "multiple"], + ["output exec", "foo", "exist"], + ["input data", "foo_in", "exist"], + ["output data", "out", "exist"], + ["input data", "in", "multiple"], + ] + }, + ) + + def test_fields_start(self): + self._test_conformance( + "fields_start.od", + { + ("Start", "Cardinality"): [], + ("Start", "string"): [ + ["Unexpected type", "ports_exec_out", "str"], + ["Unexpected type", "ports_data_out", "str"], + ], + ("Start", '"int"'): [ # included " to avoid flakey test + ["Unexpected type", "ports_exec_out", "int"], + ["Unexpected type", "ports_data_out", "int"], + ], + ("Start", "tuple"): [ + ["Unexpected type", "ports_exec_out", "tuple"], + ["Unexpected type", "ports_data_out", "tuple"], + ], + ("Start", "dict"): [ + ["Unexpected type", "ports_exec_out", "dict"], + ["Unexpected type", "ports_data_out", "dict"], + ], + ("Start", "none"): [ + ["Unexpected type", "ports_exec_out", "NoneType"], + ["Unexpected type", "ports_data_out", "NoneType"], + ], + ("Start", "invalid"): [ + ["Invalid python", "ports_exec_out"], + ["Invalid python", "ports_data_out"], + ], + ("Start", "subtype"): [ + ["Unexpected type", "ports_exec_out", "list"], + ["Unexpected type", "ports_data_out", "list"], + ], + ("Start", "code"): [ + ["Unexpected type", "ports_exec_out"], + ["Unexpected type", "ports_data_out"], + ], + }, + ) + + def test_fields_end(self): + self._test_conformance( + "fields_end.od", + { + ("End", "Cardinality"): [], + ("End", "string"): [ + ["Unexpected type", "ports_exec_in", "str"], + ["Unexpected type", "ports_data_in", "str"], + ], + ("End", '"int"'): [ + ["Unexpected type", "ports_exec_in", "int"], + ["Unexpected type", "ports_data_in", "int"], + ], + ("End", "tuple"): [ + ["Unexpected type", "ports_exec_in", "tuple"], + ["Unexpected type", "ports_data_in", "tuple"], + ], + ("End", "dict"): [ + ["Unexpected type", "ports_exec_in", "dict"], + ["Unexpected type", "ports_data_in", "dict"], + ], + ("End", "none"): [ + ["Unexpected type", "ports_exec_in", "NoneType"], + ["Unexpected type", "ports_data_in", "NoneType"], + ], + ("End", "invalid"): [ + ["Invalid python", "ports_exec_in"], + ["Invalid python", "ports_data_in"], + ], + ("End", "subtype"): [ + ["Unexpected type", "ports_exec_in", "list"], + ["Unexpected type", "ports_data_in", "list"], + ], + ("End", "code"): [ + ["Unexpected type", "ports_exec_in"], + ["Unexpected type", "ports_data_in"], + ], + }, + ) + + def test_fields_action(self): + self._test_conformance( + "fields_action.od", + { + ("cardinality", "Action_action"): [], + ("Action", "string"): [ + ["Unexpected type", "ports_exec_out", "str"], + ["Unexpected type", "ports_exec_in", "str"], + ["Unexpected type", "ports_data_out", "str"], + ["Unexpected type", "ports_data_in", "str"], + ], + ("Action", '"int"'): [ + ["Unexpected type", "ports_exec_out", "int"], + ["Unexpected type", "ports_exec_in", "int"], + ["Unexpected type", "ports_data_out", "int"], + ["Unexpected type", "ports_data_in", "int"], + ], + ("Action", "tuple"): [ + ["Unexpected type", "ports_exec_out", "tuple"], + ["Unexpected type", "ports_exec_in", "tuple"], + ["Unexpected type", "ports_data_out", "tuple"], + ["Unexpected type", "ports_data_in", "tuple"], + ], + ("Action", "dict"): [ + ["Unexpected type", "ports_exec_out", "dict"], + ["Unexpected type", "ports_exec_in", "dict"], + ["Unexpected type", "ports_data_out", "dict"], + ["Unexpected type", "ports_data_in", "dict"], + ], + ("Action", "none"): [ + ["Unexpected type", "ports_exec_out", "NoneType"], + ["Unexpected type", "ports_exec_in", "NoneType"], + ["Unexpected type", "ports_data_out", "NoneType"], + ["Unexpected type", "ports_data_in", "NoneType"], + ], + ('"Action"', '"invalid"'): [ + ["Invalid python", "ports_exec_out"], + ["Invalid python", "ports_exec_in"], + ["Invalid python", "ports_data_out"], + ["Invalid python", "ports_data_in"], + ], + ('"Action_action"', '"invalid_action"'): [ + ["Invalid python code"], + ["line"], + ], + ("Action", "subtype"): [ + ["Unexpected type", "ports_exec_out", "list"], + ["Unexpected type", "ports_exec_in", "list"], + ["Unexpected type", "ports_data_out", "list"], + ["Unexpected type", "ports_data_in", "list"], + ], + ("Action", "code"): [ + ["Unexpected type", "ports_exec_out"], + ["Unexpected type", "ports_exec_in"], + ["Unexpected type", "ports_data_out"], + ["Unexpected type", "ports_data_in"], + ], + }, + ) + + def test_fields_modify(self): + self._test_conformance( + "fields_modify.od", + { + ("Modify", "string"): [ + ["Unexpected type", "rename", "str"], + ["Unexpected type", "delete", "str"], + ], + ("Modify", "list"): [["Unexpected type", "rename", "list"]], + ("Modify", "set"): [["Unexpected type", "rename", "set"]], + ("Modify", "tuple"): [ + ["Unexpected type", "rename", "tuple"], + ["Unexpected type", "delete", "tuple"], + ], + ("Modify", "dict"): [["Unexpected type", "delete", "dict"]], + ("Modify", "none"): [ + ["Unexpected type", "rename", "NoneType"], + ["Unexpected type", "delete", "NoneType"], + ], + ("Modify", "invalid"): [ + ["Invalid python", "rename"], + ["Invalid python", "delete"], + ], + ("Modify", "subtype"): [ + ["Unexpected type", "rename", "dict"], + ["Unexpected type", "delete", "list"], + ], + ("Modify", "code"): [ + ["Unexpected type", "rename"], + ["Unexpected type", "delete"], + ], + ("Modify", "joined"): [["rename", "delete", "disjoint"]], + }, + ) + + def test_fields_merge(self): + self._test_conformance( + "fields_merge.od", + { + ("cardinality", "Merge_ports_data_in"): [], + ("Merge", "string"): [["Unexpected type", "ports_data_in", "str"]], + ("Merge", "tuple"): [["Unexpected type", "ports_data_in", "tuple"]], + ("Merge", "dict"): [["Unexpected type", "ports_data_in", "dict"]], + ("Merge", "none"): [["Unexpected type", "ports_data_in", "NoneType"]], + ("Merge", "invalid"): [["Invalid python", "ports_data_in"]], + ("Merge", "subtype"): [["Unexpected type", "ports_data_in", "list"]], + ("Merge", "code"): [["Unexpected type", "ports_data_in"]], + ("Merge", "no"): [["Missing", "slot", "ports_data_in"]], + }, + ) + + def test_fields_store(self): + self._test_conformance( + "fields_store.od", + { + ("cardinality", "Store_ports"): [], + ("Store", "string"): [["Unexpected type", "ports", "str"]], + ("Store", "tuple"): [["Unexpected type", "ports", "tuple"]], + ("Store", "dict"): [["Unexpected type", "ports", "dict"]], + ("Store", "none"): [["Unexpected type", "ports", "NoneType"]], + ("Store", "invalid"): [["Invalid python", "ports"]], + ("Store", "subtype"): [["Unexpected type", "ports", "list"]], + ("Store", "code"): [["Unexpected type", "ports"]], + ("Store", "no"): [["Missing", "slot", "ports"]], + }, + ) + + def test_fields_print(self): + self._test_conformance( + "fields_print.od", + { + ("Print_custom", "list_custom"): [["Unexpected type", "custom", "list"]], + ("Print_custom", "set_custom"): [["Unexpected type", "custom", "set"]], + ("Print_custom", "tuple_custom"): [["Unexpected type", "custom", "tuple"]], + ("Print_custom", "dict_custom"): [["Unexpected type", "custom", "dict"]], + ("Print_custom", "none_custom"): [["Unexpected type", "custom", "NoneType"]], + ("Print_custom", "invalid_custom"): [["Invalid python", "custom"]], + ("Print_custom", "subtype_custom"): [["Unexpected type", "custom", "list"]], + ("Print_custom", "code_custom"): [["Unexpected type", "custom"]], + }, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/transformation/schedule/Tests/Test_xmlparser.py b/transformation/schedule/Tests/Test_xmlparser.py new file mode 100644 index 0000000..b3b8a08 --- /dev/null +++ b/transformation/schedule/Tests/Test_xmlparser.py @@ -0,0 +1,43 @@ +import os +import unittest + +from transformation.schedule.rule_scheduler import RuleScheduler +from state.devstate import DevState + + +class MyTestCase(unittest.TestCase): + def setUp(self): + state = DevState() + self.generator = RuleScheduler(state, "", "") + + def test_empty(self): + try: + self.generator.generate_schedule( + f"{os.path.dirname(__file__)}/drawio/Empty.drawio" + ) + # buffer = io.BytesIO() + # self.generator.generate_dot(buffer) + except Exception as e: + assert False + + def test_simple(self): + try: + self.generator.generate_schedule( + f"{os.path.dirname(__file__)}/drawio/StartToEnd.drawio" + ) + # buffer = io.BytesIO() + # self.generator.generate_dot(buffer) + except Exception as e: + assert False + + # def test_unsupported(self): + # try: + # self.generator.generate_schedule("Tests/drawio/Unsupported.drawio") + # # buffer = io.BytesIO() + # # self.generator.generate_dot(buffer) + # except Exception as e: + # assert(False) + + +if __name__ == "__main__": + unittest.main() diff --git a/transformation/schedule/Tests/drawio/Empty.drawio b/transformation/schedule/Tests/drawio/Empty.drawio new file mode 100644 index 0000000..b025fbc --- /dev/null +++ b/transformation/schedule/Tests/drawio/Empty.drawio @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/transformation/schedule/Tests/drawio/StartToEnd.drawio b/transformation/schedule/Tests/drawio/StartToEnd.drawio new file mode 100644 index 0000000..c381120 --- /dev/null +++ b/transformation/schedule/Tests/drawio/StartToEnd.drawio @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/transformation/schedule/Tests/drawio/Unsupported.drawio b/transformation/schedule/Tests/drawio/Unsupported.drawio new file mode 100644 index 0000000..a9cf0fb --- /dev/null +++ b/transformation/schedule/Tests/drawio/Unsupported.drawio @@ -0,0 +1,75 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/transformation/schedule/Tests/models/m_petrinet.od b/transformation/schedule/Tests/models/m_petrinet.od new file mode 100644 index 0000000..f93a58b --- /dev/null +++ b/transformation/schedule/Tests/models/m_petrinet.od @@ -0,0 +1,22 @@ +p0:PNPlace +p1:PNPlace + +t0:PNTransition +:arc (p0 -> t0) +:arc (t0 -> p1) + +t1:PNTransition +:arc (p1 -> t1) +:arc (t1 -> p0) + +p0s:PNPlaceState { + numTokens = 1; +} + +:pn_of (p0s -> p0) + +p1s:PNPlaceState { + numTokens = 0; +} + +:pn_of (p1s -> p1) diff --git a/transformation/schedule/Tests/models/mm_petrinet.od b/transformation/schedule/Tests/models/mm_petrinet.od new file mode 100644 index 0000000..22986c3 --- /dev/null +++ b/transformation/schedule/Tests/models/mm_petrinet.od @@ -0,0 +1,31 @@ +# Places, transitions, arcs (and only one kind of arc) + +PNConnectable:Class { abstract = True; } + +arc:Association (PNConnectable -> PNConnectable) + +PNPlace:Class +PNTransition:Class + +# inhibitor arc +inh_arc:Association (PNPlace -> PNTransition) + +:Inheritance (PNPlace -> PNConnectable) +:Inheritance (PNTransition -> PNConnectable) + +# A place has a number of tokens, and that's it. + +PNPlaceState:Class +PNPlaceState_numTokens:AttributeLink (PNPlaceState -> Integer) { + name = "numTokens"; + optional = False; + constraint = `"numTokens cannot be negative" if get_value(get_target(this)) < 0 else None`; +} + +pn_of:Association (PNPlaceState -> PNPlace) { + # one-to-one + source_lower_cardinality = 1; + source_upper_cardinality = 1; + target_lower_cardinality = 1; + target_upper_cardinality = 1; +} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/rules/transitions.od b/transformation/schedule/Tests/models/rules/transitions.od new file mode 100644 index 0000000..1b87f1d --- /dev/null +++ b/transformation/schedule/Tests/models/rules/transitions.od @@ -0,0 +1,13 @@ +# A place with no tokens: + +p:RAM_PNPlace +ps:RAM_PNPlaceState { + RAM_numTokens = `True`; +} +:RAM_pn_of (ps -> p) + +# An incoming arc from that place to our transition: + +t:RAM_PNTransition + +:RAM_arc (p -> t) diff --git a/transformation/schedule/Tests/models/schedule/connections_action.od b/transformation/schedule/Tests/models/schedule/connections_action.od new file mode 100644 index 0000000..02cb3cf --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_action.od @@ -0,0 +1,62 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +a_void:Action{ + ports_data_in = `["in1", "in2"]`; + ports_data_out = `["out1", "out2"]`; + action=`print("hello foo1")`; +} + +a_foo1:Action{ + ports_data_in = `["in1", "in2"]`; + ports_data_out = `["out1", "out2"]`; + action=`print("hello foo1")`; +} + +a_foo2:Action{ + ports_exec_in = `["in2"]`; + ports_exec_out = `["out2", "out3"]`; + action=`print("hello foo2")`; +} + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> a_foo1) {from="success";to="in";} +:Conn_exec (m2 -> a_foo1) {from="fail";to="in";} +:Conn_exec (m3 -> a_foo1) {from="success";to="foo_in";} +:Conn_exec (m3 -> a_foo2) {from="fail";to="in2";} + +:Conn_exec (a_foo1 -> a_foo2) {from="out";to="in";} +:Conn_exec (a_foo1 -> a_foo2) {from="out";to="in2";} +:Conn_exec (a_foo1 -> a_foo2) {from="foo";to="in2";} +:Conn_exec (a_foo2 -> end) {from="out";to="in";} +:Conn_exec (a_foo2 -> end) {from="out2";to="in";} +:Conn_exec (a_foo2 -> end) {from="out3";to="in";} +:Conn_exec (a_foo2 -> end) {from="out3";to="in";} + +:Conn_data (start -> a_foo2) {from="1";to="in";} +:Conn_data (a_foo2-> m2) {from="out";to="in";} + +:Conn_data (start -> a_foo1) {from="1";to="in1";} +:Conn_data (start -> a_foo1) {from="2";to="in1";} +:Conn_data (start -> a_foo1) {from="3";to="in2";} +:Conn_data (a_foo1 -> end) {from="out1";to="1";} +:Conn_data (a_foo1 -> end) {from="out1";to="2";} +:Conn_data (a_foo1 -> end) {from="out2";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_end.od b/transformation/schedule/Tests/models/schedule/connections_end.od new file mode 100644 index 0000000..0bc355e --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_end.od @@ -0,0 +1,31 @@ +start:Start + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} +end:End { + ports_exec_in = `["out", "in"]`; + ports_data_in = `["out", "in"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> end) {from="success";to="in";} +:Conn_exec (m2 -> end) {from="fail";to="out";} +:Conn_exec (m3 -> end) {from="success";to="out";} +:Conn_exec (m3 -> end) {from="fail";to="foo_in";} +:Conn_exec (end -> m) {from="foo_out";to="in";} + +:Conn_data (m -> end) {from="out";to="in";} +:Conn_data (m2 -> end) {from="out";to="in";} +:Conn_data (m3 -> end) {from="out";to="out";} +:Conn_data (m3 -> end) {from="out";to="out2";} +:Conn_data (end -> m) {from="out";to="in";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_loop.od b/transformation/schedule/Tests/models/schedule/connections_loop.od new file mode 100644 index 0000000..922281a --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_loop.od @@ -0,0 +1,44 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +l:Loop +l_foo:Loop +l_void:Loop + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> l_foo) {from="success";to="in";} +:Conn_exec (m2 -> l_foo) {from="fail";to="in";} +:Conn_exec (m3 -> l_foo) {from="success";to="foo_in";} + +:Conn_exec (l_foo -> l_foo) {from="out";to="in";} +:Conn_exec (l_foo -> end) {from="out";to="in";} +:Conn_exec (l_foo -> end) {from="it";to="in";} +:Conn_exec (l_foo -> end) {from="foo";to="in";} + +:Conn_data (start -> l) {from="1";to="in";} +:Conn_data (l -> m2) {from="out";to="in";} + +:Conn_data (start -> l_foo) {from="1";to="in";} +:Conn_data (start -> l_foo) {from="2";to="in";} +:Conn_data (start -> l_foo) {from="3";to="foo_in";} +:Conn_data (l_foo -> end) {from="out";to="1";} +:Conn_data (l_foo -> end) {from="out";to="2";} +:Conn_data (l_foo -> end) {from="foo_out";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_match.od b/transformation/schedule/Tests/models/schedule/connections_match.od new file mode 100644 index 0000000..63a7f44 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_match.od @@ -0,0 +1,49 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +m_foo:Match{ + file="rules/transition.od"; +} + +m_void:Match{ + file="rules/transition.od"; +} + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> m_foo) {from="success";to="in";} +:Conn_exec (m2 -> m_foo) {from="fail";to="in";} +:Conn_exec (m3 -> m_foo) {from="success";to="foo_in";} +:Conn_exec (m3 -> m_foo) {from="fail";to="in";} + +:Conn_exec (m_foo -> end) {from="fail";to="in";} +:Conn_exec (m_foo -> end) {from="success";to="in";} +:Conn_exec (m_foo -> end) {from="fail";to="in";} +:Conn_exec (m_foo -> end) {from="foo";to="in";} + +:Conn_data (start -> m) {from="1";to="in";} +:Conn_data (m -> m2) {from="out";to="in";} + +:Conn_data (start -> m_foo) {from="1";to="in";} +:Conn_data (start -> m_foo) {from="2";to="in";} +:Conn_data (start -> m_foo) {from="3";to="foo_in";} +:Conn_data (m_foo -> end) {from="out";to="1";} +:Conn_data (m_foo -> end) {from="out";to="2";} +:Conn_data (m_foo -> end) {from="foo_out";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_merge.od b/transformation/schedule/Tests/models/schedule/connections_merge.od new file mode 100644 index 0000000..8144496 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_merge.od @@ -0,0 +1,44 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +m_exec:Merge { + ports_data_in = `["in1", "in2"]`; +} + +m_foo:Merge { + ports_data_in = `["in1", "in2"]`; +} + +m_void:Merge { + ports_data_in = `["in1", "in2"]`; +} + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> m_exec) {from="success";to="in";} +:Conn_exec (m_exec -> end) {from="out";to="in";} + +:Conn_data (start -> m_foo) {from="1";to="in1";} +:Conn_data (start -> m_foo) {from="1";to="in2";} +:Conn_data (start -> m_foo) {from="2";to="in2";} +:Conn_data (start -> m_foo) {from="3";to="foo_in";} +:Conn_data (m_foo -> end) {from="out";to="1";} +:Conn_data (m_foo -> end) {from="out";to="2";} +:Conn_data (m_foo -> end) {from="foo_out";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_modify.od b/transformation/schedule/Tests/models/schedule/connections_modify.od new file mode 100644 index 0000000..9027d0c --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_modify.od @@ -0,0 +1,42 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +m_exec:Modify +m_foo:Modify +m_void:Modify + +mo:Modify + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> m_exec) {from="success";to="in";} +:Conn_exec (m2 -> m_exec) {from="fail";to="in";} + +:Conn_exec (m_exec -> end) {from="out";to="in";} + +:Conn_data (start -> mo) {from="1";to="in";} +:Conn_data (mo -> m2) {from="out";to="in";} + +:Conn_data (start -> m_foo) {from="1";to="in";} +:Conn_data (start -> m_foo) {from="2";to="in";} +:Conn_data (start -> m_foo) {from="3";to="foo_in";} +:Conn_data (m_foo -> end) {from="out";to="1";} +:Conn_data (m_foo -> end) {from="out";to="2";} +:Conn_data (m_foo -> end) {from="foo_out";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_print.od b/transformation/schedule/Tests/models/schedule/connections_print.od new file mode 100644 index 0000000..9bf9126 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_print.od @@ -0,0 +1,41 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +p_foo:Print +p_void:Print + +p:Print + +end:End + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> p_foo) {from="success";to="in";} +:Conn_exec (m2 -> p_foo) {from="fail";to="in";} +:Conn_exec (m3 -> p_foo) {from="success";to="foo_in";} +:Conn_exec (m3 -> p) {from="fail";to="in";} +:Conn_exec (p -> end) {from="out";to="in";} + +:Conn_exec (p_foo -> p_foo) {from="out";to="in";} +:Conn_exec (p_foo -> end) {from="out";to="in";} +:Conn_exec (p_foo -> end) {from="foo";to="in";} + +:Conn_data (start -> p) {from="1";to="in";} + +:Conn_data (start -> p_foo) {from="1";to="in";} +:Conn_data (start -> p_foo) {from="2";to="in";} +:Conn_data (start -> p_foo) {from="3";to="foo_in";} +:Conn_data (p_foo -> m2) {from="out";to="in";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_rewrite.od b/transformation/schedule/Tests/models/schedule/connections_rewrite.od new file mode 100644 index 0000000..7e1b018 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_rewrite.od @@ -0,0 +1,52 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +r_foo1:Rewrite{ + file="rules/transition.od"; +} + +r_foo2:Rewrite{ + file="rules/transition.od"; +} +r_void:Rewrite{ + file="rules/transition.od"; +} + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> r_foo1) {from="success";to="in";} +:Conn_exec (m2 -> r_foo1) {from="fail";to="in";} +:Conn_exec (m3 -> r_foo1) {from="success";to="foo_in";} +:Conn_exec (m3 -> r_foo1) {from="fail";to="in";} + +:Conn_exec (r_foo1 -> r_foo2) {from="out";to="in";} +:Conn_exec (r_foo1 -> end) {from="foo";to="in";} +:Conn_exec (r_foo2 -> end) {from="out";to="in";} +:Conn_exec (r_foo2 -> end) {from="out";to="in";} + +:Conn_data (start -> r_foo1) {from="1";to="in";} +:Conn_data (r_foo1-> m2) {from="out";to="in";} + +:Conn_data (start -> r_foo2) {from="1";to="in";} +:Conn_data (start -> r_foo2) {from="2";to="in";} +:Conn_data (start -> r_foo2) {from="3";to="foo_in";} +:Conn_data (r_foo2 -> end) {from="out";to="1";} +:Conn_data (r_foo2 -> end) {from="out";to="2";} +:Conn_data (r_foo2 -> end) {from="foo_out";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_schedule.od b/transformation/schedule/Tests/models/schedule/connections_schedule.od new file mode 100644 index 0000000..a2e3c25 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_schedule.od @@ -0,0 +1,50 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +s_foo:Schedule{ + file="hello.od"; +} + +s_void:Schedule{ + file="hello.od"; +} + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> s_foo) {from="success";to="in";} +:Conn_exec (m2 -> s_foo) {from="fail";to="in";} +:Conn_exec (m3 -> s_foo) {from="success";to="foo";} +:Conn_exec (m3 -> s_foo) {from="fail";to="foo2";} + +:Conn_exec (s_foo -> s_foo) {from="out";to="in";} +:Conn_exec (s_foo -> s_foo) {from="out";to="in2";} +:Conn_exec (s_foo -> s_foo) {from="foo";to="foo3";} +:Conn_exec (s_foo -> end) {from="out4";to="in";} +:Conn_exec (s_foo -> end) {from="out2";to="in";} +:Conn_exec (s_foo -> end) {from="out5";to="in";} +:Conn_exec (s_foo -> end) {from="out3";to="in";} + +:Conn_data (start -> s_foo) {from="1";to="in1";} +:Conn_data (start -> s_foo) {from="1";to="in2";} +:Conn_data (start -> s_foo) {from="2";to="in2";} +:Conn_data (start -> s_foo) {from="3";to="foo_in";} +:Conn_data (s_foo -> end) {from="out";to="1";} +:Conn_data (s_foo -> end) {from="out";to="2";} +:Conn_data (s_foo -> end) {from="foo_out";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_start.od b/transformation/schedule/Tests/models/schedule/connections_start.od new file mode 100644 index 0000000..2ade389 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_start.od @@ -0,0 +1,27 @@ +start:Start { + ports_exec_out = `["out", "in"]`; + ports_data_out = `["out", "in"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} +end:End + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (start -> m) {from="in";to="in";} +:Conn_exec (start -> m) {from="foo_out";to="in";} +:Conn_exec (m -> start) {from="fail";to="foo_in";} +:Conn_exec (m -> end) {from="success";to="in";} + +:Conn_data (start -> m) {from="out";to="in";} +:Conn_data (start -> m2) {from="out";to="in";} +:Conn_data (start -> m3) {from="in";to="in";} +:Conn_data (m -> start) {from="out";to="in";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/connections_store.od b/transformation/schedule/Tests/models/schedule/connections_store.od new file mode 100644 index 0000000..a3e4477 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/connections_store.od @@ -0,0 +1,47 @@ +start:Start { + ports_data_out = `["1", "2", "3"]`; +} + +m:Match{ + file="rules/transition.od"; +} +m2:Match{ + file="rules/transition.od"; +} +m3:Match{ + file="rules/transition.od"; +} + +s_foo:Store { + ports = `["1", "2", "3"]`; +} + +s_void:Store { + ports = `["1", "2", "3"]`; +} + +end:End { + ports_data_in = `["1", "2", "3"]`; +} + +:Conn_exec (start -> m) {from="out";to="in";} +:Conn_exec (m -> m2) {from="fail";to="in";} +:Conn_exec (m -> m3) {from="success";to="in";} + +:Conn_exec (m2 -> s_foo) {from="success";to="in";} +:Conn_exec (m2 -> s_foo) {from="fail";to="in";} +:Conn_exec (m3 -> s_foo) {from="success";to="1";} +:Conn_exec (m3 -> s_foo) {from="fail";to="foo";} + +:Conn_exec (s_foo -> end) {from="out";to="in";} +:Conn_exec (s_foo -> s_foo) {from="1";to="2";} +:Conn_exec (s_foo -> end) {from="out";to="in";} +:Conn_exec (s_foo -> s_foo) {from="foo";to="2";} + +:Conn_data (start -> s_foo) {from="1";to="1";} +:Conn_data (start -> s_foo) {from="1";to="2";} +:Conn_data (start -> s_foo) {from="2";to="2";} +:Conn_data (start -> s_foo) {from="3";to="foo_in";} +:Conn_data (s_foo -> end) {from="out";to="1";} +:Conn_data (s_foo -> end) {from="out";to="2";} +:Conn_data (s_foo -> end) {from="foo_out";to="3";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/fields_action.od b/transformation/schedule/Tests/models/schedule/fields_action.od new file mode 100644 index 0000000..6770059 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/fields_action.od @@ -0,0 +1,83 @@ +string:Action { + ports_exec_in = `'["out", "in"]'`; + ports_exec_out = `'["out", "in"]'`; + ports_data_in = `'["out", "in"]'`; + ports_data_out = `'["out", "in"]'`; + action = `'["out", "in"]'`; +} + +int:Action { + ports_exec_in = `123`; + ports_exec_out = `123`; + ports_data_in = `123`; + ports_data_out = `123`; + action = `123`; +} + +list:Action { + ports_exec_out = `["out", "in"]`; + ports_exec_in = `["out", "in"]`; + ports_data_out = `["out", "in"]`; + ports_data_in = `["out", "in"]`; + action = `["out", "in"]`; +} +set:Action { + ports_exec_in = `{"out", "in"}`; + ports_exec_out = `{"out", "in"}`; + ports_data_in = `{"out", "in"}`; + ports_data_out = `{"out", "in"}`; + action = `{"out", "in"}`; +} + +tuple:Action { + ports_exec_in = `("out", "in")`; + ports_exec_out = `("out", "in")`; + ports_data_in = `("out", "in")`; + ports_data_out = `("out", "in")`; + action = `("out", "in")`; +} + +dict:Action { + ports_exec_in = `{"out": "in"}`; + ports_exec_out = `{"out": "in"}`; + ports_data_in = `{"out": "in"}`; + ports_data_out = `{"out": "in"}`; + action = `{"out": "in"}`; +} + +none:Action { + ports_exec_in = `None`; + ports_exec_out = `None`; + ports_data_in = `None`; + ports_data_out = `None`; + action = `None`; +} + +invalid:Action { + ports_exec_in = `[{a(0)['qkja("fyvka`; + ports_exec_out = `[{a(0)['qkja("fyvka`; + ports_data_in = `["", [{]]`; + ports_data_out = `["", [{]]`; + action = `hu(ja&{]8}]`; +} + +subtype:Action { + ports_exec_in = `[1, 2]`; + ports_exec_out = `[1, 2]`; + ports_data_in = `[1, 2]`; + ports_data_out = `[1, 2]`; + action = `[1, 2]`; +} + +code:Action { + ports_exec_in = `print("hello world")`; + ports_exec_out = `print("hello world")`; + ports_data_in = `print("hello world")`; + ports_data_out = `print("hello world")`; + action = `print("hello world")`; +} + +no:Action + +start:Start +end:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/fields_end.od b/transformation/schedule/Tests/models/schedule/fields_end.od new file mode 100644 index 0000000..22a26ee --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/fields_end.od @@ -0,0 +1,52 @@ +start:Start + +string:End { + ports_exec_in = `'["out", "in"]'`; + ports_data_in = `'["out", "in"]'`; +} + +int:End { + ports_exec_in = `123`; + ports_data_in = `123`; +} + +list:End { + ports_exec_in = `["out", "in"]`; + ports_data_in = `["out", "in"]`; +} +set:End { + ports_exec_in = `{"out", "in"}`; + ports_data_in = `{"out", "in"}`; +} + +tuple:End { + ports_exec_in = `("out", "in")`; + ports_data_in = `("out", "in")`; +} + +dict:End { + ports_exec_in = `{"out": "in"}`; + ports_data_in = `{"out": "in"}`; +} + +none:End { + ports_exec_in = `None`; + ports_data_in = `None`; +} + +invalid:End { + ports_exec_in = `[{a(0)['qkja("fyvka`; + ports_data_in = `["", [{]]`; +} + +subtype:End { + ports_exec_in = `[1, 2]`; + ports_data_in = `[1, 2]`; +} + +code:End { + ports_exec_in = `print("hello world")`; + ports_data_in = `print("hello world")`; +} + +no:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/fields_merge.od b/transformation/schedule/Tests/models/schedule/fields_merge.od new file mode 100644 index 0000000..18e3307 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/fields_merge.od @@ -0,0 +1,39 @@ +string:Merge { + ports_data_in = `'["out", "in"]'`; +} + +list:Merge { + ports_data_in = `["out", "in"]`; +} +set:Merge { + ports_data_in = `{"out", "in"}`; +} + +tuple:Merge { + ports_data_in = `("out", "in")`; +} + +dict:Merge { + ports_data_in = `{"out": "in"}`; +} + +none:Merge { + ports_data_in = `None`; +} + +invalid:Merge { + ports_data_in = `["", [{]]`; +} + +subtype:Merge { + ports_data_in = `[1, 2]`; +} + +code:Merge { + ports_data_in = `print("hello world")`; +} + +no:Merge + +start:Start +end:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/fields_modify.od b/transformation/schedule/Tests/models/schedule/fields_modify.od new file mode 100644 index 0000000..5730efb --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/fields_modify.od @@ -0,0 +1,51 @@ +string:Modify { + rename = `'["out", "in"]'`; + delete = `'["out", "in"]'`; +} + +list:Modify { + rename = `["out", "in"]`; + delete = `["out", "in"]`; +} +set:Modify { + rename = `{"out", "in"}`; + delete = `{"out", "in"}`; +} + +tuple:Modify { + rename = `("out", "in")`; + delete = `("out", "in")`; +} + +dict:Modify { + rename = `{"out": "in"}`; + delete = `{"out": "in"}`; +} + +none:Modify { + rename = `None`; + delete = `None`; +} + +invalid:Modify { + rename = `[{a(0)['qkja("fyvka`; + delete = `["", [{]]`; +} + +subtype:Modify { + rename = `{1: 2}`; + delete = `[1, 2]`; +} + +code:Modify { + rename = `print("hello world")`; + delete = `print("hello world")`; +} + +joined:Modify { + rename = `{"a":"1", "b":"2", "c":"3"}`; + delete = `{"a", "d"}`; +} + +start:Start +end:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/fields_print.od b/transformation/schedule/Tests/models/schedule/fields_print.od new file mode 100644 index 0000000..d520e44 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/fields_print.od @@ -0,0 +1,39 @@ +string:Print { + custom = `'["port_out", "port_in"]'`; +} + +list:Print { + custom = `["port_out", "port_in"]`; +} +set:Print { + custom = `{"port_out", "port_in"}`; +} + +tuple:Print { + custom = `("port_out", "port_in")`; +} + +dict:Print { + custom = `{"port_out": "port_in"}`; +} + +none:Print { + custom = `None`; +} + +invalid:Print { + custom = `["", [{]]`; +} + +subtype:Print { + custom = `[1, 2]`; +} + +code:Print { + custom = `print("hello world")`; +} + +no:Print + +start:Start +end:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/fields_start.od b/transformation/schedule/Tests/models/schedule/fields_start.od new file mode 100644 index 0000000..c82ea91 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/fields_start.od @@ -0,0 +1,52 @@ +string:Start { + ports_exec_out = `'["out", "in"]'`; + ports_data_out = `'["out", "in"]'`; +} + +int:Start { + ports_exec_out = `123`; + ports_data_out = `123`; +} + +list:Start { + ports_exec_out = `["out", "in"]`; + ports_data_out = `["out", "in"]`; +} +set:Start { + ports_exec_out = `{"out", "in"}`; + ports_data_out = `{"out", "in"}`; +} + +tuple:Start { + ports_exec_out = `("out", "in")`; + ports_data_out = `("out", "in")`; +} + +dict:Start { + ports_exec_out = `{"out": "in"}`; + ports_data_out = `{"out": "in"}`; +} + +none:Start { + ports_exec_out = `None`; + ports_data_out = `None`; +} + +invalid:Start { + ports_exec_out = `[{a(0)['qkja("fyvka`; + ports_data_out = `["", [{]]`; +} + +subtype:Start { + ports_exec_out = `[1, 2]`; + ports_data_out = `[1, 2]`; +} + +code:Start { + ports_exec_out = `print("hello world")`; + ports_data_out = `print("hello world")`; +} + +no:Start + +end:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/fields_store.od b/transformation/schedule/Tests/models/schedule/fields_store.od new file mode 100644 index 0000000..ec1f38c --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/fields_store.od @@ -0,0 +1,39 @@ +string:Store { + ports = `'["port_out", "port_in"]'`; +} + +list:Store { + ports = `["port_out", "port_in"]`; +} +set:Store { + ports = `{"port_out", "port_in"}`; +} + +tuple:Store { + ports = `("port_out", "port_in")`; +} + +dict:Store { + ports = `{"port_out": "port_in"}`; +} + +none:Store { + ports = `None`; +} + +invalid:Store { + ports = `["", [{]]`; +} + +subtype:Store { + ports = `[1, 2]`; +} + +code:Store { + ports = `print("hello world")`; +} + +no:Store + +start:Start +end:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/multiple_end.od b/transformation/schedule/Tests/models/schedule/multiple_end.od new file mode 100644 index 0000000..ae3651f --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/multiple_end.od @@ -0,0 +1,5 @@ +start:Start +end:End +end2:End + +:Conn_exec (start -> end) {from="out";to="in";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/multiple_start.od b/transformation/schedule/Tests/models/schedule/multiple_start.od new file mode 100644 index 0000000..0a869c8 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/multiple_start.od @@ -0,0 +1,5 @@ +start:Start +start2:Start +end:End + +:Conn_exec (start -> end) {from="out";to="in";} \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/no_end.od b/transformation/schedule/Tests/models/schedule/no_end.od new file mode 100644 index 0000000..e58e470 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/no_end.od @@ -0,0 +1 @@ +start:Start \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/no_start.od b/transformation/schedule/Tests/models/schedule/no_start.od new file mode 100644 index 0000000..36a7d96 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/no_start.od @@ -0,0 +1 @@ +end:End \ No newline at end of file diff --git a/transformation/schedule/Tests/models/schedule/start_end.od b/transformation/schedule/Tests/models/schedule/start_end.od new file mode 100644 index 0000000..bf51e88 --- /dev/null +++ b/transformation/schedule/Tests/models/schedule/start_end.od @@ -0,0 +1,3 @@ +start:Start +end:End +:Conn_exec (start -> end) {from="out";to="in";} \ No newline at end of file diff --git a/transformation/schedule/__init__.py b/transformation/schedule/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/transformation/schedule/doc/images/example_1.png b/transformation/schedule/doc/images/example_1.png new file mode 100644 index 0000000..8ea0451 Binary files /dev/null and b/transformation/schedule/doc/images/example_1.png differ diff --git a/transformation/schedule/doc/images/example_2.png b/transformation/schedule/doc/images/example_2.png new file mode 100644 index 0000000..40994fd Binary files /dev/null and b/transformation/schedule/doc/images/example_2.png differ diff --git a/transformation/schedule/doc/images/example_3.png b/transformation/schedule/doc/images/example_3.png new file mode 100644 index 0000000..d3092bb Binary files /dev/null and b/transformation/schedule/doc/images/example_3.png differ diff --git a/transformation/schedule/doc/images/geraniums-main.png b/transformation/schedule/doc/images/geraniums-main.png new file mode 100644 index 0000000..42c7174 Binary files /dev/null and b/transformation/schedule/doc/images/geraniums-main.png differ diff --git a/transformation/schedule/doc/images/geraniums-repot_flowers.png b/transformation/schedule/doc/images/geraniums-repot_flowers.png new file mode 100644 index 0000000..4a89de0 Binary files /dev/null and b/transformation/schedule/doc/images/geraniums-repot_flowers.png differ diff --git a/transformation/schedule/doc/schedule.md b/transformation/schedule/doc/schedule.md new file mode 100644 index 0000000..8a1c6a6 --- /dev/null +++ b/transformation/schedule/doc/schedule.md @@ -0,0 +1,260 @@ +# Schedule Module + +This module is used to define and execute model transformations using a schedule in the muMLE framework. +The development of this module is port of a research project of Robbe Teughels with Joeri Exelmans and Hans Vangheluwe. + +## Module Structure + +The entire module is wrapped in single interface [schedule.py](../rule_scheduler.py) responsible for loading, executing and other optional functionalities, such as generating dot files. +Loading modules (.py and .drawio) requires compilation. All these transformations are grouped together in [generator.py](../generator.py). +The interactions with the muMLE framework uses the custom interface: [rule_executor.py](../rule_executor.py). This reduces the dependency between the module and the framework. + +Schedules are compiled to python files. These files have a fixed interface defined in [schedule.pyi](../schedule.pyi). +This interface includes functionalities that will setup the schedule structure and link patterns or other schedules from the module interface with the nodes. +The compiled files do not include any functional implementation to reduce their size and compile time. They are linked to a libary [schedule_lib](../schedule_lib) including an implementation for each node type. +This means that nodes can be treated as a black box by the schedule. This architecture allowing easier testing of the library as generation is fully independent of the core implementation. + +The implementation of a given node is similar in the inheritance compared to the original meta-model to increasing traceability between the original instance and the compiled instance. + +## Usage + +### Running Module + +```python + +from state.devstate import DevState +from bootstrap.scd import bootstrap_scd +from util import loader +from transformation.ramify import ramify +from api.od import ODAPI +from transformation.schedule.rule_scheduler import RuleScheduler + +state = DevState() +scd_mmm = bootstrap_scd(state) + +# load model and meta-model +metamodel_cs = open('your_metamodel.od', 'r', encoding="utf-8").read() +model_cs = open('your_model.od', 'r', encoding="utf-8").read() + +# Parse them +metamodel = loader.parse_and_check(state, metamodel_cs, scd_mmm, "your_metamodel") +model = loader.parse_and_check(state, model_cs, metamodel, "Example model") + +# Ramified model +metamodel_ramified = ramify(state, metamodel) + +# scheduler +scheduler = RuleScheduler(state, metamodel, metamodel_ramified) + +# load schedule +scheduler.load_schedule("your_schedule.od") +# scheduler.load_schedule("your_schedule.py") # compiled version (without conformance checking) +# scheduler.load_schedule("your_schedule.drawio") # main page will be executed + +# execute model transformation +api = ODAPI(state, model, metamodel) +scheduler.run(api) +``` + +#### Simple example schedules (.od format) + +A schedule is executed from start to end or NullNode (reachable only from unconnected exec-gates). +Given the following basic schedule (ARule without NAC), the first match of the pre-condition_pattern is used to rewrite the host graph. +This schedule expect at least one match as the `fail' exec-gate of the match is not connected. +Zero matches leads to a NullState, resulting in early termination. + +```markdown +start:Start +end:End + +# match once +m:Match{ + file = "your_pre-condition_pattern.od"; + n = 1; +} + +# rewrite +r:Rewrite{ + file = "your_post-condition_pattern.od"; +} + +:Conn_exec (start -> m) {from="out"; to="in";} +:Conn_exec (m -> r) {from="success"; to="in";} +:Conn_exec (r -> end) {from="out"; to="in";} + +:Conn_data (m -> r) {from="out"; to="in";} +``` + + +With some small adjustments, all matches can be rewritten (FRule without NAC) + +```markdown +start:Start +end:End + +# match all +m:Match{ + file = "your_pre-condition_pattern.od"; + # n = +INF (if missing: all matches) +} + +l:Loop + +# rewrite +r:Rewrite{ + file = "your_post-condition_pattern.od"; +} + +:Conn_exec (start -> m) {from="out"; to="in";} +:Conn_exec (m -> l) {from="success"; to="in";} +:Conn_exec (l -> r) {from="it"; to="in";} +:Conn_exec (r -> l) {from="out"; to="in";} +:Conn_exec (l -> end) {from="out"; to="in";} + +:Conn_data (m -> l) {from="out"; to="in";} +:Conn_data (l -> r) {from="out"; to="in";} +``` + + +Adding a NAC to this example: adding a match using the previous match and expecting it to fail. (FRule with NAC) + +```markdown +start:Start +end:End + +# match all +m:Match{ + file = "your_pre-condition_pattern.od"; + # n = +INF (if missing: all matches) +} + +l:Loop + +# NAC +n:Match{ + file = "your_NAC_pre-condition_pattern.od"; + n = 1; # one fail is enough +} + +# rewrite +r:Rewrite{ + file = "your_post-condition_pattern.od"; +} + +:Conn_exec (start -> m) {from="out"; to="in";} +:Conn_exec (m -> l) {from="success"; to="in";} +:Conn_exec (l -> n) {from="it"; to="in";} +:Conn_exec (n -> r) {from="fail"; to="in";} +:Conn_exec (r -> l) {from="out"; to="in";} +:Conn_exec (l -> end) {from="out"; to="in";} + +:Conn_data (m -> l) {from="out"; to="in";} +:Conn_data (l -> n) {from="out"; to="in";} +:Conn_data (l -> r) {from="out"; to="in";} +``` + + +## Node Types + +### Start +This node indicates the start of a schedule. +It signature (additional ports) can be used to insert match sets or alternative exec-paths, increasing reusability. + +[Start](schedule_lib/start.md) + +### End +Counterpart to Start node. Reaching this node result in successful termination of the schedule. +It signature (additional ports) can be used to extract match sets or alternative exec-paths, increasing reusability. + +[End](schedule_lib/end.md) + +### Match +Matches a pre-condition pattern on the host-graph. A primitive defined in T-Core + +[Match](schedule_lib/match.md) + +### Rewrite +Rewrite the host-graph using a post-condition pattern. A primitive defined in T-Core + +[Rewrite](schedule_lib/rewrite.md) + +### Modify +Modifies the match set. This allows patterns to name elements to their linking. +This node modifies or deletes elements to be usable as pivot in another pattern with different names. +An example usage can be found in [examples/geraniums](../../../examples/geraniums). + +In the following schedule, a cracked filed was matched and no longer needed. +The Modify node deletes this, allowing for the flowering flower match node to use a pattern without this element, reducing the size and making it more general. +  + +[Modify](schedule_lib/modify.md) + +### Merge +Combines multiple matches. +Allowing patterns to be split into different parts or reuse a specific part with another match without recalculating. +An example usage can be found in [examples/geraniums](../../../examples/geraniums). + +In the following sub-schedule, a new pot and the flower with old pot and their connection, is combined to move the flower in a rewrite. +Replanting multiple flowers into one new pot would require markers, making the matching harder in order to combine these elements without the use of this node. + + + +[Merge](schedule_lib/merge.md) + +### Store +Combines matches (set) into a new match set. +Use the exec port to insert the data on the associated data-port to the set. + +The direct usage of this node is limited but invaluable for libraries. +An example usage is petrinet-execution with user interface. +This requires a list of all transitions that can fire. +Matching "all transitions" followed by a loop to check the NAC leaves single matches. +This nodes allows these matches to be recombined into a set that can be used to choose a transition from. + +[Store](schedule_lib/store.md) + +### Loop +Iterate over a given match set. +Nodes such as Match or Rewrite uses a single match as a pivot. +Executing these nodes over all the element is possible with this node. +See the examples in [Modify](#Modify) or [Merge](#Merge) for an example view. + +[Loop](schedule_lib/loop.md) + +### Print +Print the input data. This is mainly used as a debugging/testing tool to validate intermediate information or state. + +[Print](schedule_lib/print.md) + +### Action +This node allows for code to be injected into the schedule. +This node can be used for general purpuse and even recreate all other nodes (except start and end). +Not all functionalities can be described using the current nodes. For petrinets, an example can be to generate a visual overview of the petrinet-system. + +[Action.md](schedule_lib/action.md) + +## Edge Types +Nodes can be connected using two different edges. The execution-edges define the execution flow of the schedule. +These connections can only connect nodes that inherit form [ExecNode](schedule_lib/exec_node.md). +Connecting nodes between execution-gates defined by the nodes, happens in a system of "one to many" for gates. +The data-edges allows information to be distributed to other [DataNode](schedule_lib/data_node.md). +This happens in the opposite way of "many to one" on data-gates. +Data changes on a gate wil notify all connected nodes of the changes, allowing propagation through the system. Note: the data received is immutable to ensure consistent and reliable execution of the schedule. + + +## file formats + +### .od +This is the original textual file format used by the framework. The main advantage of this format is the integration with the framework that allows conformance checking of the scheduling language. +Therefore, all other formats are converted to this type for conformance checking before being compiled. + +### .py +All schedules are compiled to python after conformance checking. Allowing this format provides the benefit to load schedules without expensive compilation or conformance checking, reducing computational cost. +This format is recommended in the deployment of applications where the schedule will not change. +It is not advisable to implement schedules directly in this format as conformance checking guarantees proper working of the schedule module. + +### .drawio +A visual format for the drawio application. +The library includes a drawio [library](../schedule_lib/Schedule_lib.xml) that includes a representation with additional fields for easy integration with the application. +The main advantage of this format is the usage of pages that allows sub-schedules be easily created and organised within one schedule. (layers are not allowed) + diff --git a/transformation/schedule/doc/schedule_lib/action.md b/transformation/schedule/doc/schedule_lib/action.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/action.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/data_node.md b/transformation/schedule/doc/schedule_lib/data_node.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/data_node.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/end.md b/transformation/schedule/doc/schedule_lib/end.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/end.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/exec_node.md b/transformation/schedule/doc/schedule_lib/exec_node.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/exec_node.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/loop.md b/transformation/schedule/doc/schedule_lib/loop.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/loop.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/match.md b/transformation/schedule/doc/schedule_lib/match.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/match.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/merge.md b/transformation/schedule/doc/schedule_lib/merge.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/merge.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/modify.md b/transformation/schedule/doc/schedule_lib/modify.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/modify.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/node.md b/transformation/schedule/doc/schedule_lib/node.md new file mode 100644 index 0000000..078e00f --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/node.md @@ -0,0 +1,41 @@ +## Node Module + +Defines the abstract base Node class for graph-based structures. Each Node is assigned +a unique identifier via an external IdGenerator. The class provides an interface for +managing execution state and generating DOT graph representations using Jinja2 templates. + +### Class: `Node` + +- **Attributes** + - `id: int`: A unique identifier assigned to each instance upon initialization. + +- **Methods** + - `get_id` + - returns: `int`, The unique node ID + + Retrieves the unique identifier of the node. + + - `generate_stack_frame` + - exec_id: `int`, The ID of the execution context. + - returns: `None` + + Initializes a new state frame for a specific execution context. + Designed to be overridden in subclasses that use execution state. + + - `delete_stack_frame` + - exec_id: `int`, The ID of the execution context. + - returns: `None` + + Deletes the state frame for a specific execution context. + Designed to be overridden in subclasses that use execution state. + + - `generate_dot` + - nodes: `list[str]`, A list to append DOT node definitions to. + - edges: `list[str]`, A list to append DOT edges definitions to. + - visited: `set[str]`, A set of already visited node IDs to avoid duplicates or recursion. + - template: `list[str]`, A Jinja2 template used to format the node's DOT representation. + - returns: `None` + + Generates the DOT graph representation for this node and its relationships. + Must be implemented in subclasses. + \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/print.md b/transformation/schedule/doc/schedule_lib/print.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/print.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/rewrite.md b/transformation/schedule/doc/schedule_lib/rewrite.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/rewrite.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/rule.md b/transformation/schedule/doc/schedule_lib/rule.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/rule.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/schedule.md b/transformation/schedule/doc/schedule_lib/schedule.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/schedule.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/start.md b/transformation/schedule/doc/schedule_lib/start.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/start.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/doc/schedule_lib/store.md b/transformation/schedule/doc/schedule_lib/store.md new file mode 100644 index 0000000..9805841 --- /dev/null +++ b/transformation/schedule/doc/schedule_lib/store.md @@ -0,0 +1 @@ +# Under construction \ No newline at end of file diff --git a/transformation/schedule/generator.py b/transformation/schedule/generator.py new file mode 100644 index 0000000..9fd08a0 --- /dev/null +++ b/transformation/schedule/generator.py @@ -0,0 +1,197 @@ +import sys +import os +from uuid import UUID + +from black.trans import Callable +from jinja2.runtime import Macro + +from api.od import ODAPI +from jinja2 import Environment, FileSystemLoader + + +class schedule_generator: + def __init__(self, odApi: ODAPI): + self.env = Environment( + loader=FileSystemLoader( + os.path.join(os.path.dirname(__file__), "templates") + ) + ) + self.env.trim_blocks = True + self.env.lstrip_blocks = True + self.template = self.env.get_template("schedule_template.j2") + self.template_wrap = self.env.get_template("schedule_template_wrap.j2") + self.api = odApi + + + def _get_slot_value_default(item: UUID, slot: str, default): + if slot in self.api.get_slots(item): + return self.api.get_slot_value(item, slot) + return default + + conn_data_event = { + "Match": lambda item: False, + "Rewrite": lambda item: False, + "Modify": lambda item: True, + "Merge": lambda item: True, + "Loop": lambda item: True, + "Action": lambda item: _get_slot_value_default(item, "event", False), + "Print": lambda item: _get_slot_value_default(item, "event", False), + "Store": lambda item: False, + "Schedule": lambda item: False, + "End": lambda item: False, + } + + arg_map = { + "Loop": (name_dict := lambda item: {"name": self.api.get_name(item)}), + "Start": lambda item: { + **name_dict(item), + "ports_exec_out": eval( + self.api.get_slot_value_default(item, "ports_exec_out", "['out']") + ), + "ports_data_out": eval( + self.api.get_slot_value_default(item, "ports_data_out", "[]") + ), + }, + "End": lambda item: { + **name_dict(item), + "ports_exec_in": eval( + self.api.get_slot_value_default(item, "ports_exec_in", "['in']") + ), + "ports_data_in": eval( + self.api.get_slot_value_default(item, "ports_data_in", "[]") + ), + }, + "Rewrite": ( + file_dict := lambda item: { + **name_dict(item), + "file": self.api.get_slot_value(item, "file"), + } + ), + "Match": lambda item: { + **file_dict(item), + "n": self.api.get_slot_value_default(item, "n", 'float("inf")'), + }, + "Action": lambda item: { + **name_dict(item), + "ports_exec_in": self.api.get_slot_value_default(item, "ports_exec_in", ["in"]), + "ports_exec_out": self.api.get_slot_value_default(item, "ports_exec_out", ["out"]), + "ports_data_in": self.api.get_slot_value_default(item, "ports_data_in", []), + "ports_data_out": self.api.get_slot_value_default(item, "ports_data_out", []), + "action": repr(self.api.get_slot_value(item, "action")), + "init": repr( + self.api.get_slot_value_default(item, "init", "") + ), + }, + "Modify": lambda item: { + **name_dict(item), + "rename": eval(self.api.get_slot_value_default(item, "rename", "{}")), + "delete": eval(self.api.get_slot_value_default(item, "delete", "{}")), + }, + "Merge": lambda item: { + **name_dict(item), + "ports_data_in": eval( + self.api.get_slot_value_default(item, "ports_data_in", "[]") + ), + }, + "Store": lambda item: { + **name_dict(item), + "ports": eval(self.api.get_slot_value_default(item, "ports", "[]")), + }, + "Schedule": file_dict, + "Print": lambda item: { + **name_dict(item), + "label": self.api.get_slot_value_default(item, "label", ""), + "custom": self.api.get_slot_value_default(item, "custom", ""), + }, + "Conn_exec": ( + conn_dict := lambda item: { + "name_from": self.api.get_name(self.api.get_source(item)), + "name_to": self.api.get_name(self.api.get_target(item)), + "from": self.api.get_slot_value_default(item, "from", 0), + "to": self.api.get_slot_value_default(item, "to", 0), + } + ), + "Conn_data": lambda item: { + **conn_dict(item), + "event": conn_data_event[ + self.api.get_type_name(target := self.api.get_target(item)) + ](target), + }, + } + self.macro_args = { + tp: (macro, arg_map.get(tp)) + for tp, macro in self.template.module.__dict__.items() + if type(macro) == Macro + } + + def _render(self, item): + type_name = self.api.get_type_name(item) + macro, arg_gen = self.macro_args[type_name] + return macro(**arg_gen(item)) + + def _dfs( + self, stack: list[UUID], get_links: Callable, get_next_node: Callable + ) -> tuple[set[UUID], list[UUID]]: + visited = set() + connections = list() + while len(stack) > 0: + obj = stack.pop() + if obj in visited: + continue + visited.add(obj) + for conn in get_links(self.api, obj): + connections.append(conn) + stack.append(get_next_node(self.api, conn)) + return visited, connections + + def generate_schedule(self, stream=sys.stdout): + start = self.api.get_all_instances("Start")[0][1] + end = self.api.get_all_instances("End")[0][1] + out = { + "blocks": [], + "blocks_name": [], + "blocks_start_end": [], + "exec_conn": [], + "data_conn": [], + "match_files": set(), + "matchers": [], + "start": self.api.get_name(start), + "end": self.api.get_name(end), + } + + stack = [start, end] + exec_blocks, conn_exec = self._dfs( + stack, + lambda api, node: api.get_outgoing(node, "Conn_exec"), + lambda api, conn: api.get_target(conn), + ) + + for name, p in self.api.get_all_instances("Print"): + if self.api.has_slot(p, "event") and self.api.get_slot_value(p, "event"): + exec_blocks.add(p) + + stack = list(exec_blocks) + blocks, conn_data = self._dfs( + stack, + lambda api, node: api.get_incoming(node, "Conn_data"), + lambda api, conn: api.get_source(conn), + ) + + for exec_c in conn_exec: + out["exec_conn"].append(self._render(exec_c)) + + for data_c in conn_data: + out["data_conn"].append(self._render(data_c)) + + for block in blocks: + out["blocks_name"].append(self.api.get_name(block)) + if block in [start, end]: + out["blocks_start_end"].append(self._render(block)) + continue + out["blocks"].append(self._render(block)) + if self.api.is_instance(block, "Rule"): + d = self.macro_args[self.api.get_type_name(block)][1](block) + out["match_files"].add(d["file"]) + out["matchers"].append(d) + + print(self.template_wrap.render(out), file=stream) diff --git a/transformation/schedule/models/eval_context.py b/transformation/schedule/models/eval_context.py new file mode 100644 index 0000000..061b4f6 --- /dev/null +++ b/transformation/schedule/models/eval_context.py @@ -0,0 +1,151 @@ +from typing import TYPE_CHECKING, get_origin, get_args +from types import UnionType +from uuid import UUID + +from jinja2 import Template + +from framework.conformance import eval_context_decorator +from services.primitives.string_type import String + +if TYPE_CHECKING: + from api.od_stub_readonly import get_outgoing, get_incoming, get_slot_value, get_value, get_target, has_slot + from eval_context_stub import * + + +@eval_context_decorator +def _check_all_connections(this, labels: list[list[str] | str]) -> list[str]: + err = [] + check_incoming_exec(this, err, labels[0]) + check_outgoing_exec(this, err, labels[1]) + check_incoming_data(this, err, labels[2]) + check_outgoing_data(this, err, labels[3]) + return err + +@eval_context_decorator +def _check_outgoing_exec(this, err: list[str], labels: list[str]) -> None: + l = set(labels) + gates = set() + for y in get_outgoing(this, "Conn_exec"): + if (x := get_slot_value(y, "from")) not in l: + err.append(f"output exec gate '{x}' does not exist. Gates: {', '.join(labels)}.") + if x in gates: + err.append(f"output exec gate '{x}' is connected to multiple gates.") + gates.add(x) + + +@eval_context_decorator +def _check_incoming_exec(this, err: list[str], labels: list[str]) -> None: + l = set(labels) + for y in get_incoming(this, "Conn_exec"): + if (x := get_slot_value(y, "to")) not in l: + err.append(f"input exec gate gate '{x}' does not exist. Gates: {', '.join(labels)}.") + + +@eval_context_decorator +def _check_outgoing_data(this, err: list[str], labels: list[str]) -> None: + l = set(labels) + for y in get_outgoing(this, "Conn_data"): + if (x := get_slot_value(y, "from")) not in l: + err.append(f"output data gate '{x}' does not exist. Gates: {', '.join(labels)}.") + + +@eval_context_decorator +def _check_incoming_data(this, err: list[str], labels: list[str]) -> None: + l = set(labels) + gates = set() + for y in get_incoming(this, "Conn_data"): + if (x := get_slot_value(y, "to")) not in l: + err.append(f"input data gate '{x}' does not exist. Gates: {', '.join(labels)}.") + if x in gates: + err.append(f"input data gate '{x}' is connected to multiple gates.") + gates.add(x) + +def check_type(x: any, typ2: any) -> bool: + origin = get_origin(typ2) + if origin is None: + return isinstance(x, typ2) + args = get_args(typ2) + if origin is UnionType: + for tp in args: + if check_type(x, tp): + return True + return False + if not isinstance(x, origin): + return False + if origin in [list, set]: + for value in x: + if not check_type(value, args[0]): + return False + elif origin is tuple: + if len(args) != len(x): + return False + for i, value in enumerate(x): + if not check_type(value, args[i]): + return False + elif origin is dict: + for key, value in x.items(): + if not (check_type(key, args[0]) and check_type(value, args[1])): + return False + return True + +@eval_context_decorator +def _check_slot_code_type(this: UUID, slot: str, typ: type, unique = False, *, mandatory: bool = False, blacklist: list[str] | None = None) -> list[str]: + err = [] + if not (has_slot(this, slot)): + if mandatory: + err.append(f"Missing mandatory slot: '{slot}'.") + return err + try: + try: + x = eval(get_slot_value(this, slot)) + except Exception as _: + err.append(f"Invalid python code for {slot}: {get_slot_value(this, slot)}") + return err + + if not check_type(x, typ): + try: + typ_rep = typ.__name__ + except AttributeError: + typ_rep = str(typ) + err.append(f"Unexpected type for {slot}: {type(x).__name__}, expected type: {typ_rep}") + return err + + if unique and len(set(x)) != len(x): + err.append(f"elements must be unique") + return err + except Exception as e: + err.append(f"Unexpected error: {e}") + return err + + +@eval_context_decorator +def _check_jinja2_code(this: UUID, slot: str) -> list[str]: + if len(err:= check_slot_code_type(this, slot, str, mandatory=True)) != 0: + return err + s = eval(get_slot_value(this, slot)) + try: + template = Template(s) + template.render(**{"data":[{}]}) + return [] + except Exception as e: + return [f"Invalid Jinja2 syntax for '{slot}':\n{e}\n{s}"] + + +@eval_context_decorator +def _check_code_syntax(code) -> list[str]: + try: + compile(code, "", "exec") + return [] + except SyntaxError as e: + return [f"Invalid python code for: `{code}` :\n{e}"] + +mm_eval_context = { + "check_all_connections": _check_all_connections, + "check_outgoing_exec": _check_outgoing_exec, + "check_incoming_exec": _check_incoming_exec, + "check_outgoing_data": _check_outgoing_data, + "check_incoming_data": _check_incoming_data, + "check_slot_code_type": _check_slot_code_type, + "check_code_syntax": _check_code_syntax, + "check_jinja2_code": _check_jinja2_code, +} diff --git a/transformation/schedule/models/eval_context_stub.pyi b/transformation/schedule/models/eval_context_stub.pyi new file mode 100644 index 0000000..9811909 --- /dev/null +++ b/transformation/schedule/models/eval_context_stub.pyi @@ -0,0 +1,6 @@ +def check_outgoing_exec(this, err: list[str], labels: list[str]) -> bool: ... +def check_incoming_exec(this, err: list[str], labels: list[str]) -> bool: ... +def check_outgoing_data(this, err: list[str], labels: list[str]) -> bool: ... +def check_incoming_data(this, err: list[str], labels: list[str]) -> bool: ... +def check_is_type(s: str, typ: any) -> bool: ... +def check_code_syntax(code) -> bool: ... diff --git a/transformation/schedule/models/scheduling_MM.od b/transformation/schedule/models/scheduling_MM.od new file mode 100644 index 0000000..73f5131 --- /dev/null +++ b/transformation/schedule/models/scheduling_MM.od @@ -0,0 +1,194 @@ +abstract class Exec + +association Conn_exec [0..*] Exec -> Exec [0..*] { + String from; + String to; +} + +abstract class Data +association Conn_data [0..*] Data -> Data [0..*] { + String from; + String to; +} + +class Start [1..1] (Exec, Data) { + optional ActionCode ports_exec_out; + optional ActionCode ports_data_out; + ``` + err = check_slot_code_type(this, "ports_exec_out", list[str] | set[str], True) + err.extend(check_slot_code_type(this, "ports_data_out", list[str] | set[str], True)) + if len(err) == 0: + err = check_all_connections(this, [ + [], + eval(get_slot_value_default(this, "ports_exec_out", "['out']")), + [], + eval(get_slot_value_default(this, "ports_data_out", "[]")) + ]) + err + ```; +} +class End [1..1] (Exec, Data) { + optional ActionCode ports_exec_in; + optional ActionCode ports_data_in; + ``` + err = check_slot_code_type(this, "ports_exec_in", list[str] | set[str], True) + err.extend(check_slot_code_type(this, "ports_data_in", list[str] | set[str], True)) + if len(err) == 0: + err = check_all_connections(this, [ + eval(get_slot_value_default(this, "ports_exec_in", "['in']")), + [], + eval(get_slot_value_default(this, "ports_data_in", "[]")), + [] + ]) + err + ```; +} + +abstract class Rule (Exec, Data) +{ + String file; +} + +class Match (Rule) +{ + optional Integer n; + ``` + check_all_connections(this, [ + ["in"], + ["success", "fail"], + ["in"], + ["out"] + ]) + ```; +} + +class Rewrite (Rule) +{ + ``` + check_all_connections(this, [ + ["in"], + ["out"], + ["in"], + ["out"] + ]) + ```; +} + +class Action (Exec, Data) +{ + optional ActionCode ports_exec_in; + optional ActionCode ports_exec_out; + optional ActionCode ports_data_in; + optional ActionCode ports_data_out; + optional ActionCode init `check_code_syntax(get_value(get_target(this)))`; + ActionCode action `check_code_syntax(get_value(get_target(this)))`; + ``` + err = check_slot_code_type(this, "ports_exec_in", list[str] | set[str], True) + err.extend(check_slot_code_type(this, "ports_exec_out", list[str] | set[str], True)) + err.extend(check_slot_code_type(this, "ports_data_in", list[str] | set[str], True)) + err.extend(check_slot_code_type(this, "ports_data_out", list[str] | set[str], True)) + if len(err) == 0: + err = check_all_connections(this, [ + eval(get_slot_value_default(this, "ports_exec_in", "['in']")), + eval(get_slot_value_default(this, "ports_exec_out", "['out']")), + eval(get_slot_value_default(this, "ports_data_in", "[]")), + eval(get_slot_value_default(this, "ports_data_out", "[]")) + ]) + err + ```; + +} + +class Modify (Data) +{ + optional ActionCode rename; + optional ActionCode delete; + ``` + err = check_slot_code_type(this, "rename", dict[str,str]) + err.extend(check_slot_code_type(this, "delete", list[str] | set[str])) + if len(err) == 0: + if not (eval(get_slot_value_default(this, "rename", "dict()")).keys().isdisjoint( + eval(get_slot_value_default(this, "delete", "set()"))) + ): + err.append("rename and delete should be disjoint.") + err.extend(check_all_connections(this, [ + [], + [], + ["in"], + ["out"] + ])) + err + ```; +} + +class Merge (Data) +{ + ActionCode ports_data_in; + ``` + err = check_slot_code_type(this, "ports_data_in", list[str] | set[str], True, mandatory = True) + if len(err) == 0: + err = check_all_connections(this, [ + [], + [], + eval(get_slot_value(this, "ports_data_in")), + ["out"] + ]) + err + ```; +} + +class Store (Exec, Data) +{ + ActionCode ports; + ``` + err = check_slot_code_type(this, "ports", list[str] | set[str], True, mandatory = True, blacklist = ["in", "out"]) + if len(err) == 0: + err = check_all_connections(this, [ + [*(ports:= eval(get_slot_value(this, "ports"))), "in"], + [*ports, "out"], + ports, + ["out"] + ]) + err + ```; +} + +class Schedule (Exec, Data) +{ + String file; + ``` + check_all_connections(this, [ + {get_slot_value(conn, "to") for conn in get_incoming(this, "Conn_exec")}, + {get_slot_value(conn, "from") for conn in get_outgoing(this, "Conn_exec")}, + {get_slot_value(conn, "to") for conn in get_incoming(this, "Conn_data")}, + {get_slot_value(conn, "from") for conn in get_outgoing(this, "Conn_data")} + ]) + ```; +} + +class Loop(Exec, Data) +{ + ``` + check_all_connections(this, [ + ["in"], + ["it", "out"], + ["in"], + ["out"] + ]) + ```; +} + +class Print(Exec, Data) +{ + optional Boolean event; + optional String label; + optional ActionCode custom `check_jinja2_code(get_source(this), "custom")`; + ``` + check_all_connections(this, [ + ["in"], + ["out"], + ["in"], + [] + ]) + ```; +} \ No newline at end of file diff --git a/transformation/schedule/rule_executor.py b/transformation/schedule/rule_executor.py new file mode 100644 index 0000000..da97b2f --- /dev/null +++ b/transformation/schedule/rule_executor.py @@ -0,0 +1,46 @@ +from typing import Any +from uuid import UUID + +from api.od import ODAPI +from transformation.matcher import match_od +from transformation.rewriter import rewrite +from util.loader import parse_and_check + + +class RuleExecutor: + def __init__(self, state, mm: UUID, mm_ramified: UUID, eval_context={}): + self.state = state + self.mm = mm + self.mm_ramified = mm_ramified + self.eval_context = eval_context + + # Generates matches. + # Every match is a dictionary with entries LHS_element_name -> model_element_name + def match_rule(self, m: UUID, lhs: UUID, *, pivot: dict[Any, Any]): + lhs_matcher = match_od( + self.state, + host_m=m, + host_mm=self.mm, + pattern_m=lhs, + pattern_mm=self.mm_ramified, + eval_context=self.eval_context, + pivot=pivot, + ) + return lhs_matcher + + def rewrite_rule(self, od: ODAPI, rhs: UUID, *, pivot: dict[Any, Any]): + rhs = rewrite( + self.state, + rhs_m=rhs, + pattern_mm=self.mm_ramified, + lhs_match=pivot, + host_m=od.m, + host_mm=od.mm, + eval_context=self.eval_context, + ) + od.recompute_mappings() + yield rhs + + def load_match(self, file: str): + with open(file, "r") as f: + return parse_and_check(self.state, f.read(), self.mm_ramified, file) diff --git a/transformation/schedule/rule_scheduler.py b/transformation/schedule/rule_scheduler.py new file mode 100644 index 0000000..2b2e133 --- /dev/null +++ b/transformation/schedule/rule_scheduler.py @@ -0,0 +1,338 @@ +from __future__ import annotations + +import importlib.util +import io +import os +import re +import sys + +from pathlib import Path +from time import time +from typing import cast, TYPE_CHECKING + +from jinja2 import FileSystemLoader, Environment + +from concrete_syntax.textual_od import parser as parser_od +from concrete_syntax.textual_cd import parser as parser_cd +from api.od import ODAPI +from bootstrap.scd import bootstrap_scd +from transformation.schedule.rule_executor import RuleExecutor +from transformation.schedule.generator import schedule_generator +from transformation.schedule.models.eval_context import mm_eval_context +from transformation.schedule.schedule_lib import ExecNode, Start +from framework.conformance import Conformance, render_conformance_check_result, eval_context_decorator +from state.devstate import DevState +from examples.petrinet.renderer import render_petri_net_to_dot + +from drawio2py import parser +from drawio2py.abstract_syntax import DrawIOFile, Edge, Vertex, Cell +from icecream import ic + +from transformation.schedule.schedule_lib.funcs import IdGenerator + +if TYPE_CHECKING: + from transformation.schedule.schedule import Schedule + + +class RuleScheduler: + __slots__ = ( + "rule_executor", + "schedule_main", + "loaded", + "out", + "verbose", + "conformance", + "directory", + "eval_context", + "_state", + "_mmm_cs", + "sub_schedules", + "end_time", + ) + + def __init__( + self, + state, + mm_rt, + mm_rt_ramified, + *, + outstream=sys.stdout, + verbose: bool = False, + conformance: bool = True, + directory: str = "", + eval_context: dict[str, any] = None, + ): + self.rule_executor: RuleExecutor = RuleExecutor(state, mm_rt, mm_rt_ramified) + self.schedule_main: Schedule | None = None + self.out = outstream + self.verbose: bool = verbose + self.conformance: bool = conformance + self.directory: Path = Path.cwd() / directory + if eval_context is None: + eval_context = {} + self.eval_context: dict[str, any] = eval_context + + self.loaded: dict[str, dict[str, any]] = {"od": {}, "py": {}, "drawio": {}, "rules": {}} + + + self._state = DevState() + self._mmm_cs = bootstrap_scd(self._state) + + self.end_time = float("inf") + self.sub_schedules = float("inf") + + def load_schedule(self, filename): + return self._load_schedule(filename, _main=True) is not None + + + def _load_schedule(self, filename: str, *, _main = True) -> Schedule | None: + if filename.endswith(".drawio"): + if (filename := self._generate_schedule_drawio(filename)) is None: + return None + + if filename.endswith(".od"): + if (filename := self._generate_schedule_od(filename)) is None: + return None + if filename.endswith(".py"): + s = self._load_schedule_py(filename, _main=_main) + return s + + raise Exception(f"Error unknown file: {filename}") + + def _load_schedule_py(self, filename: str, *, _main = True) -> "Schedule": + if (s:= self.loaded["py"].get(filename, None)) is not None: + return s + + spec = importlib.util.spec_from_file_location(filename, str(self.directory / filename)) + schedule_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(schedule_module) + self.loaded["py"][filename] = (s:= schedule_module.Schedule()) + if _main: + self.schedule_main = s + self.load_matchers(s) + return s + + def _generate_schedule_od(self, filename: str) -> str | None: + if (s:= self.loaded.get(("od", filename), None)) is not None: + return s + file = str(self.directory / filename) + self._print("Generating schedule ...") + with open(f"{os.path.dirname(__file__)}/models/scheduling_MM.od", "r") as f_MM: + mm_cs = f_MM.read() + try: + with open(file, "r") as f_M: + m_cs = f_M.read() + except FileNotFoundError: + self._print(f"File not found: {file}") + return None + + self._print("OK\n\nParsing models\n\tParsing meta model") + try: + scheduling_mm = parser_cd.parse_cd( + self._state, + m_text=mm_cs, + ) + except Exception as e: + self._print( + f"Error while parsing meta-model: scheduling_MM.od\n\t{e}" + ) + return None + self._print(f"\tParsing '{filename}' model") + try: + scheduling_m = parser_od.parse_od( + self._state, m_text=m_cs, mm=scheduling_mm + ) + except Exception as e: + self._print(f"\033[91mError while parsing model: {filename}\n\t{e}\033[0m") + return None + if self.conformance: + success = True + self._print("OK\n\tmeta-meta-model a valid class diagram") + conf_err = Conformance( + self._state, self._mmm_cs, self._mmm_cs + ).check_nominal() + b = len(conf_err) + success = success and not b + self._print( + f"\t\t{'\033[91m' if b else ''}{render_conformance_check_result(conf_err)}{'\033[0m' if b else ''}" + ) + self._print( + f"Is our '{filename}' model a valid 'scheduling_MM.od' diagram?" + ) + conf_err = Conformance( + self._state, scheduling_m, scheduling_mm, eval_context=mm_eval_context + ).check_nominal() + b = len(conf_err) + success = success and not b + self._print( + f"\t\t{'\033[91m' if b else ''}{render_conformance_check_result(conf_err)}{'\033[0m' if b else ''}" + ) + if not success: + return None + od = ODAPI(self._state, scheduling_m, scheduling_mm) + g = schedule_generator(od) + + output_buffer = io.StringIO() + g.generate_schedule(output_buffer) + outfilename = f"{".".join(filename.split(".")[:-1])}.py" + open(self.directory / outfilename, "w", encoding='utf-8').write(output_buffer.getvalue()) + self._print("Schedule generated") + self.loaded[("od", filename)] = outfilename + return outfilename + + def _print(self, *args) -> None: + if self.verbose: + print(*args, file=self.out) + + def load_matchers(self, schedule: "Schedule") -> None: + matchers = dict() + for file in schedule.get_matchers(): + if (r:= self.loaded.get(("rule", file), None)) is None: + self.loaded[("rule", file)] = (r:= self.rule_executor.load_match(self.directory / file)) + matchers[file] = r + schedule.init_schedule(self, self.rule_executor, matchers) + + def generate_dot(self, file: str) -> None: + env = Environment( + loader=FileSystemLoader( + os.path.join(os.path.dirname(__file__), "templates") + ) + ) + env.trim_blocks = True + env.lstrip_blocks = True + template_dot = env.get_template("schedule_dot.j2") + + nodes = [] + edges = [] + visit = set() + for schedule in self.loaded["py"].values(): + schedule.generate_dot(nodes, edges, visit, template_dot) + with open(self.directory / file, "w") as f_dot: + f_dot.write(template_dot.render(nodes=nodes, edges=edges)) + + def run(self, model) -> tuple[int, str]: + self._print("Start simulation") + if 'pydevd' in sys.modules: + self.end_time = time() + 1000 + else: + self.end_time = time() + 10000 + return self._runner(model, self.schedule_main, "out", IdGenerator.generate_exec_id(), {}) + + def _runner(self, model, schedule: Schedule, exec_port: str, exec_id: int, data: dict[str, any]) -> tuple[int, any]: + self._generate_stackframe(schedule, exec_id) + cur_node = schedule.start + cur_node.run_init(exec_port, exec_id, data) + while self.end_time > time(): + cur_node, port = cur_node.nextState(exec_id) + termination_reason = cur_node.execute(port, exec_id, model) + if termination_reason is not None: + self._delete_stackframe(schedule, exec_id) + return termination_reason + + self._delete_stackframe(schedule, exec_id) + return -1, "limit reached" + + + def _generate_stackframe(self, schedule: Schedule, exec_id: int) -> None: + for node in schedule.nodes: + node.generate_stack_frame(exec_id) + + def _delete_stackframe(self, schedule: Schedule, exec_id: int) -> None: + for node in schedule.nodes: + node.delete_stack_frame(exec_id) + + + def _generate_schedule_drawio(self, filename:str) -> str | None: + if (s:= self.loaded["drawio"].get(filename, None)) is not None: + return s + env = Environment( + loader=FileSystemLoader( + os.path.join(os.path.dirname(__file__), "templates") + ) + ) + env.trim_blocks = True + env.lstrip_blocks = True + template = env.get_template("schedule_muMLE.j2") + main: bool = False + + node_map: dict[str, list[str | dict[str,str]]] + id_counter: int + def _get_node_id_map(elem: Cell) -> list[str | dict[str,str]]: + nonlocal node_map, id_counter + if (e_id := node_map.get(elem.id, None)) is None: + e_id = [f"{re.sub(r'[^a-zA-Z1-9_]', '', elem.properties["name"])}_{id_counter}", {}] + id_counter += 1 + node_map[elem.id] = e_id + return e_id + + edges: list[tuple[tuple[str, str, str, str], tuple[str,str,str,str]]] = [] + def _parse_edge(elem: Edge): + nonlocal edges + try: + edges.append(( + ( + _get_node_id_map(elem.source.parent.parent.parent)[0], + elem.source.properties["label"], + elem.source.properties["type"], + elem.source.parent.value + ), + ( + _get_node_id_map(elem.target.parent.parent.parent)[0], + elem.target.properties["label"], + elem.target.properties["type"], + elem.target.parent.value + ) + )) + except AttributeError as e: + raise Exception(f"Missing attribute {e}") + return + + def _parse_vertex(elem: Vertex): + nonlocal edges + try: + elem_map = _get_node_id_map(elem) + elem_map[1] = elem.properties + properties = elem_map[1] + properties.pop("label") + properties.pop("name") + properties.pop("placeholders") + if properties.get("type") == "Schedule": + if not re.search(r'\.(py|od)$', properties["file"]): + properties["file"] = f"{filename}/{properties["file"]}.od" + except AttributeError as e: + raise Exception(f"Missing attribute {e}") + return + + + abstract_syntax: DrawIOFile = parser.Parser.parse(str(self.directory / filename)) + filename = filename.removesuffix(".drawio") + (self.directory / filename).mkdir(parents=False, exist_ok=True) + for page in abstract_syntax.pages: + if page.name == "main": + main = True + if len(page.root.children) != 1: + raise Exception(f"Only 1 layer allowed (keybind: ctr+shift+L)") + edges = [] + id_counter = 1 + node_map = {} + + for element in page.root.children[0].children: + match element.__class__.__name__: + case "Edge": + _parse_edge(cast(Edge, element)) + case "Vertex": + _parse_vertex(cast(Vertex, element)) + for elem in element.children[0].children: + if elem.__class__.__name__ == "Edge": + _parse_edge(cast(Edge, elem)) + continue + case _: + raise Exception(f"Unexpected element: {element}") + with open(self.directory / f"{filename}/{page.name}.od", "w", encoding="utf-8") as f: + f.write(template.render(nodes=node_map, edges=edges)) + if main: + self.loaded["drawio"][filename] = (filename_out := f"{filename}/main.od") + return filename_out + + self._print("drawio schedule requires main page to automatically load.") + return None diff --git a/transformation/schedule/schedule.pyi b/transformation/schedule/schedule.pyi new file mode 100644 index 0000000..0edc014 --- /dev/null +++ b/transformation/schedule/schedule.pyi @@ -0,0 +1,18 @@ +from typing import TYPE_CHECKING +from transformation.schedule.schedule_lib import * +if TYPE_CHECKING: + from transformation.schedule.rule_executor import RuleExecutor + from rule_scheduler import RuleScheduler + +class Schedule: + __slots__ = { + "start", + "end", + "nodes" + } + def __init__(self): ... + + @staticmethod + def get_matchers(): ... + def init_schedule(self, scheduler: RuleScheduler, rule_executor: RuleExecutor, matchers): ... + def generate_dot(self, *args, **kwargs): ... \ No newline at end of file diff --git a/transformation/schedule/schedule_lib/Schedule_lib.xml b/transformation/schedule/schedule_lib/Schedule_lib.xml new file mode 100644 index 0000000..5dd1480 --- /dev/null +++ b/transformation/schedule/schedule_lib/Schedule_lib.xml @@ -0,0 +1,93 @@ +[ + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"start_name\" type=\"Start\" ports_exec_out=\"["out"]\" ports_data_out=\"[]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><mxCell id=\"5\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"5\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 100, + "aspect": "fixed", + "title": "Start Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"end_name\" type=\"End\" ports_exec_in=\"["in"]\" ports_data_in=\"[]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell></root></mxGraphModel>", + "w": 160, + "h": 100, + "aspect": "fixed", + "title": "End Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type% %file% matches: %n%\" placeholders=\"1\" name=\"match_name\" type=\"Match\" file=\"rule_filename.od\" n=\"1\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=60;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"220\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"60\" width=\"160\" height=\"160\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"7\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"success\" type=\"exec\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"fail\" type=\"exec\" id=\"10\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 220, + "aspect": "fixed", + "title": "Match Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type% %file%\" placeholders=\"1\" name=\"rewrite_name\" type=\"Rewrite\" file=\"rule_filename.od\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry y=\"1.1368683772161603e-13\" width=\"160\" height=\"150\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"110\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"-70\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"out\" type=\"data\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 150, + "aspect": "fixed", + "title": "Rewrite Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"action_name\" type=\"Action\" ports_exec_in=\"["in"]\" ports_exec_out=\"["out"]\" ports_data_in=\"[]\" ports_data_out=\"[]\" action=\"print("hello world")\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 100, + "aspect": "fixed", + "title": "Action Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"modify_name\" type=\"Modify\" rename=\"{"t":"transition"}\" delete=\"[]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 100, + "aspect": "fixed", + "title": "Modify Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"merge_name\" type=\"Merge\" ports_data_in=\"["input1", "input2"]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"150\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"110\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"input1\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input2\" type=\"data\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"7\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 150, + "aspect": "fixed", + "title": "Merge Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"store_name\" type=\"Store\" ports=\"["input1"]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"200\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"160\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input1\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input1\" type=\"data\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"8\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"8\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"out\" type=\"exec\" id=\"10\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"8\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input1\" type=\"exec\" id=\"11\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"8\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 200, + "aspect": "fixed", + "title": "Store Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"loop_name\" type=\"Loop\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"200\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"160\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"7\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"it\" type=\"exec\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"out\" type=\"exec\" id=\"10\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 200, + "aspect": "fixed", + "title": "Loop Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type% %file%\" placeholders=\"1\" name=\"schedule_name\" type=\"Schedule\" file=\"schedule_page-name\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 100, + "aspect": "fixed", + "title": "Schedule Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"print_name\" type=\"Print\" event=\"False\" custom=\"{{ data }}\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"150\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"110\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"-70\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 160, + "h": 150, + "aspect": "fixed", + "title": "Print Node" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"out\" type=\"exec\" id=\"2\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 60, + "h": 40, + "aspect": "fixed", + "title": "Exec Gate" + }, + { + "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"in\" type=\"data\" id=\"2\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>", + "w": 60, + "h": 40, + "aspect": "fixed", + "title": "Data Gate" + } +] \ No newline at end of file diff --git a/transformation/schedule/schedule_lib/__init__.py b/transformation/schedule/schedule_lib/__init__.py new file mode 100644 index 0000000..4df5a3d --- /dev/null +++ b/transformation/schedule/schedule_lib/__init__.py @@ -0,0 +1,31 @@ +from .action import Action +from .data_node import DataNode +from .end import End +from .exec_node import ExecNode +from .loop import Loop +from .match import Match +from .merge import Merge +from .modify import Modify +from .null_node import NullNode +from .print import Print +from .rewrite import Rewrite +from .start import Start +from .store import Store +from .sub_schedule import SubSchedule + +__all__ = [ + "Action", + "DataNode", + "End", + "ExecNode", + "Loop", + "Match", + "Merge", + "Modify", + "NullNode", + "Rewrite", + "Print", + "Start", + "Store", + "SubSchedule", +] diff --git a/transformation/schedule/schedule_lib/action.py b/transformation/schedule/schedule_lib/action.py new file mode 100644 index 0000000..9f10406 --- /dev/null +++ b/transformation/schedule/schedule_lib/action.py @@ -0,0 +1,106 @@ +from typing import List, override, Type + +from jinja2 import Template + +from api.od import ODAPI +from .funcs import not_visited, generate_dot_node +from .exec_node import ExecNode +from .data_node import DataNode + +class ActionState: + def __init__(self): + self.var = {"output_gate": "out"} + +class Action(ExecNode, DataNode): + def __init__( + self, + ports_exec_in: list[str], + ports_exec_out: list[str], + ports_data_in: list[str], + ports_data_out: list[str], + code: str = "", + init: str = "", + ) -> None: + self.gates: tuple[list[str], list[str], list[str], list[str]] = (ports_exec_in, ports_exec_out, ports_data_in, ports_data_out) + super().__init__() + self.state: dict[int, ActionState] = {} + self.var_globals = {} + self.code = code + self.init = init + + @override + def get_exec_input_gates(self) -> list[str]: + return self.gates[0] + + @override + def get_exec_output_gates(self) -> list[str]: + return self.gates[1] + + @override + def get_data_input_gates(self) -> list[str]: + return self.gates[2] + + @override + def get_data_output_gates(self) -> list[str]: + return self.gates[3] + + @override + def nextState(self, exec_id: int) -> tuple["ExecNode", str]: + state = self.get_state(exec_id) + return self.next_node[state.var["output_gate"]] + + def get_state(self, exec_id) -> ActionState: + return self.state[exec_id] + + @override + def generate_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state[exec_id] = (state := ActionState()) + if self.init: + exec (self.init, {"var": state.var}, {"globals": self.var_globals}) + @override + def delete_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state.pop(exec_id) + + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + state = self.get_state(exec_id) + exec( + self.code, + { + "api": od, + "var": state.var, + "data_in": {port: value.get_data(exec_id) for port, value in self.data_in.items() if value is not None}, + "data_out": {port: value.get_data(exec_id) for port, value in self.data_out.items() if value is not None}, + "globals": self.var_globals, + }, + ) + for gate, d in self.data_out.items(): + DataNode.input_event(self, gate, exec_id) + return None + + def input_event(self, gate: str, exec_id: int) -> None: + return + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"action", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + }, + ) + ExecNode.generate_dot(self, nodes, edges, visited, template) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/data.py b/transformation/schedule/schedule_lib/data.py new file mode 100644 index 0000000..7cafc5b --- /dev/null +++ b/transformation/schedule/schedule_lib/data.py @@ -0,0 +1,83 @@ +from symtable import Class +from typing import Any, Generator, Callable, Iterator, TYPE_CHECKING, override + +if TYPE_CHECKING: + from transformation.schedule.schedule_lib import DataNode + + +class DataState: + def __init__(self, data: Any): + self.data: list[dict[Any, Any]] = [] + +class Data: + __slots__ = ("state", "_parent") + + def __init__(self, parent: "DataNode") -> None: + self.state: dict[int, DataState] = dict() + self._parent = parent + + def __dir__(self): + return [attr for attr in super().__dir__() if attr != "_super"] + + def get_data(self, exec_id: int) -> list[dict[str, str]]: + state = self.get_state(exec_id) + return state.data + + def get_state(self, exec_id) -> DataState: + return self.state[exec_id] + + def store_data(self, exec_id: int, data_gen: Generator, n: int) -> bool: + state = self.get_state(exec_id) + state.data.clear() + if n == 0: + return True + i: int = 0 + while (match := next(data_gen, None)) is not None: + state.data.append(match) + i += 1 + if i >= n: + break + else: + if n == float("inf"): + return bool(len(state.data)) + state.data.clear() + return False + return True + + def get_parent(self) -> "DataNode": + return self._parent + + def replace(self, exec_id: int, data: list[dict[str, str]]) -> None: + state = self.get_state(exec_id) + state.data.clear() + state.data.extend(data) + + def append(self, exec_id: int, data: dict[str, str]) -> None: + self.get_state(exec_id).data.append(data) + + def extend(self, exec_id: int, data: list[dict[str, str]]) -> None: + self.get_state(exec_id).data.extend(data) + + def clear(self, exec_id: int) -> None: + self.get_state(exec_id).data.clear() + + def pop(self, exec_id: int, index: int =-1) -> Any: + return self.get_state(exec_id).data.pop(index) + + def empty(self, exec_id: int) -> bool: + return len(self.get_state(exec_id).data) == 0 + + def __getitem__(self, index): + raise NotImplementedError + + def __iter__(self, exec_id: int) -> Iterator[dict[str, str]]: + return self.get_state(exec_id).data.__iter__() + + def __len__(self, exec_id: int) -> int: + return self.get_state(exec_id).data.__len__() + + def generate_stack_frame(self, exec_id: int) -> None: + self.state[exec_id] = DataState(exec_id) + + def delete_stack_frame(self, exec_id: int) -> None: + self.state.pop(exec_id) \ No newline at end of file diff --git a/transformation/schedule/schedule_lib/data_node.py b/transformation/schedule/schedule_lib/data_node.py new file mode 100644 index 0000000..01e9b76 --- /dev/null +++ b/transformation/schedule/schedule_lib/data_node.py @@ -0,0 +1,101 @@ +from abc import abstractmethod +from typing import Any, Generator, List, override + +from jinja2 import Template + +from .data import Data +from .funcs import generate_dot_edge +from .node import Node + + +class DataNodeState: + def __init__(self) -> None: + super().__init__() + + +class DataNode(Node): + def __init__(self) -> None: + super().__init__() + self.eventsub: dict[str, list[tuple[DataNode, str]]] = { + gate: [] for gate in self.get_data_output_gates() + } + self.data_out: dict[str, Data] = { + name: Data(self) for name in self.get_data_output_gates() + } + self.data_in: dict[str, Data | None] = { + name: None for name in self.get_data_input_gates() + } + + @staticmethod + def get_data_input_gates() -> List[str]: + return ["in"] + + @staticmethod + def get_data_output_gates() -> List[str]: + return ["out"] + + @override + def generate_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + for d in self.data_out.values(): + d.generate_stack_frame(exec_id) + + @override + def delete_stack_frame(self, exec_id: int) -> None: + super().delete_stack_frame(exec_id) + for d in self.data_out.values(): + d.delete_stack_frame(exec_id) + + def connect_data( + self, data_node: "DataNode", from_gate: str, to_gate: str, eventsub=True + ) -> None: + if from_gate not in self.get_data_output_gates(): + raise Exception(f"from_gate {from_gate} is not a valid port") + if to_gate not in data_node.get_data_input_gates(): + raise Exception(f"to_gate {to_gate} is not a valid port") + data_node.data_in[to_gate] = self.data_out[from_gate] + if eventsub: + self.eventsub[from_gate].append((data_node, to_gate)) + + def store_data(self, exec_id, data_gen: Generator, port: str, n: int) -> None: + self.data_out[port].store_data(exec_id, data_gen, n) + for sub, gate in self.eventsub[port]: + sub.input_event(gate, exec_id) + + def get_input_data(self, gate: str, exec_id: int) -> list[dict[Any, Any]]: + data = self.data_in[gate] + if data is None: + return [{}] + return data.get_data(exec_id) + + @abstractmethod + def input_event(self, gate: str, exec_id: int) -> None: + for sub, gate_sub in self.eventsub[gate]: + sub.input_event(gate_sub, exec_id) + + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + for port, data in self.data_in.items(): + if data is not None: + source = data.get_parent() + generate_dot_edge( + source, + self, + edges, + template, + kwargs={ + "prefix": "d", + "from_gate": [ + port + for port, value in source.data_out.items() + if value == data + ][0], + "to_gate": port, + "color": "green", + }, + ) + data.get_parent().generate_dot(nodes, edges, visited, template) + for gate_form, subs in self.eventsub.items(): + for sub, gate in subs: + sub.generate_dot(nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/end.py b/transformation/schedule/schedule_lib/end.py new file mode 100644 index 0000000..a0218d8 --- /dev/null +++ b/transformation/schedule/schedule_lib/end.py @@ -0,0 +1,80 @@ +from typing import List, override, Type + +from jinja2 import Template + +from api.od import ODAPI +from . import DataNode +from .exec_node import ExecNode +from .funcs import not_visited, generate_dot_node + +class EndState: + def __init__(self) -> None: + self.end_gate: str = "" + +class End(ExecNode, DataNode): + @override + def input_event(self, gate: str, exec_id: int) -> None: + pass + + def __init__(self, ports_exec: List[str], ports_data: List[str]) -> None: + self.ports_exec = ports_exec + self.ports_data = ports_data + super().__init__() + self.state: dict[int, EndState] = {} + + @override + def get_exec_input_gates(self): + return self.ports_exec + + @staticmethod + @override + def get_exec_output_gates(): + return [] + + @override + def get_data_input_gates(self): + return self.ports_data + + @staticmethod + @override + def get_data_output_gates(): + return [] + + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + state = self.get_state(exec_id) + state.end_gate = port + return 1, {"exec_gate": state.end_gate, "data_out": {port: data.get_data(exec_id) for port, data in self.data_in.items()}} + + def get_state(self, exec_id) -> EndState: + return self.state[exec_id] + + @override + def generate_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state[exec_id] = EndState() + + @override + def delete_stack_frame(self, exec_id: int) -> None: + super().delete_stack_frame(exec_id) + self.state.pop(exec_id) + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": "end", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + } + ) diff --git a/transformation/schedule/schedule_lib/exec_node.py b/transformation/schedule/schedule_lib/exec_node.py new file mode 100644 index 0000000..ea1cc8b --- /dev/null +++ b/transformation/schedule/schedule_lib/exec_node.py @@ -0,0 +1,61 @@ +from abc import abstractmethod +from typing import override +from jinja2 import Template + +from api.od import ODAPI +from .funcs import generate_dot_edge +from .node import Node + + +class ExecNode(Node): + def __init__(self) -> None: + super().__init__() + + from .null_node import NullNode + self.next_node: dict[str, tuple[ExecNode, str]] = {} + for port in self.get_exec_output_gates(): + self.next_node[port] = (NullNode(), "in") + + def nextState(self, exec_id: int) -> tuple["ExecNode", str]: + return self.next_node["out"] + + @staticmethod + def get_exec_input_gates(): + return ["in"] + + @staticmethod + def get_exec_output_gates(): + return ["out"] + + def connect(self, next_state: "ExecNode", from_gate: str, to_gate: str) -> None: + if from_gate not in self.get_exec_output_gates(): + raise Exception(f"from_gate {from_gate} is not a valid port") + if to_gate not in next_state.get_exec_input_gates(): + raise Exception(f"to_gate {to_gate} is not a valid port") + self.next_node[from_gate] = (next_state, to_gate) + + @abstractmethod + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + return None + + @override + def generate_dot( + self, nodes: list[str], edges: list[str], visited: set[int], template: Template + ) -> None: + for out_port, edge in self.next_node.items(): + template.render() + generate_dot_edge( + self, + edge[0], + edges, + template, + kwargs={ + "prefix": "e", + "from_gate": out_port, + "to_gate": edge[1], + "color": "darkblue", + }, + ) + + for edge in self.next_node.values(): + edge[0].generate_dot(nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/funcs.py b/transformation/schedule/schedule_lib/funcs.py new file mode 100644 index 0000000..6a01eb0 --- /dev/null +++ b/transformation/schedule/schedule_lib/funcs.py @@ -0,0 +1,56 @@ +from typing import Callable, List + +from jinja2 import Template + +from .singleton import Singleton + + +class IdGenerator(metaclass=Singleton): + exec_id = -1 + node_id = -1 + + @classmethod + def generate_node_id(cls) -> int: + cls.node_id +=1 + return cls.node_id + + @classmethod + def generate_exec_id(cls) -> int: + cls.exec_id += 1 + return cls.exec_id + +def generate_dot_wrap(func) -> Callable: + def wrapper(self, *args, **kwargs) -> str: + nodes = [] + edges = [] + self.reset_visited() + func(self, nodes, edges, *args, **kwargs) + return f"digraph G {{\n\t{"\n\t".join(nodes)}\n\t{"\n\t".join(edges)}\n}}" + + return wrapper + + +def not_visited(func) -> Callable: + def wrapper( + self, nodes: List[str], edges: List[str], visited: set[int], *args, **kwargs + ) -> None: + if self in visited: + return + visited.add(self) + func(self, nodes, edges, visited, *args, **kwargs) + + return wrapper + + +def generate_dot_node(self, nodes: List[str], template: Template, **kwargs) -> None: + nodes.append(template.module.__getattribute__("Node")(**{**kwargs, "id": self.id})) + + +def generate_dot_edge( + self, target, edges: List[str], template: Template, kwargs +) -> None: + edges.append( + template.module.__getattribute__("Edge")( + **{**kwargs, "from_id": self.id, "to_id": target.id} + ) + ) diff --git a/transformation/schedule/schedule_lib/loop.py b/transformation/schedule/schedule_lib/loop.py new file mode 100644 index 0000000..8837080 --- /dev/null +++ b/transformation/schedule/schedule_lib/loop.py @@ -0,0 +1,74 @@ +import functools +from typing import List, Generator, override, Type + +from jinja2 import Template + +from api.od import ODAPI +from .exec_node import ExecNode +from .data_node import DataNode +from .data_node import Data +from .funcs import not_visited, generate_dot_node + +class Loop(ExecNode, DataNode): + def __init__(self) -> None: + super().__init__() + self.cur_data: Data = Data(self) + + @staticmethod + @override + def get_exec_output_gates(): + return ["it", "out"] + + @override + def generate_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.cur_data.generate_stack_frame(exec_id) + + @override + def delete_stack_frame(self, exec_id: int) -> None: + super().delete_stack_frame(exec_id) + self.cur_data.delete_stack_frame(exec_id) + + @override + def nextState(self, exec_id: int) -> tuple[ExecNode, str]: + return self.next_node["out" if self.data_out["out"].empty(exec_id) else "it"] + + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + self.data_out["out"].clear(exec_id) + + if not self.cur_data.empty(exec_id): + self.data_out["out"].append(exec_id, self.cur_data.pop(exec_id,0)) + DataNode.input_event(self, "out", exec_id) + return None + + def input_event(self, gate: str, exec_id: int) -> None: + self.cur_data.replace(exec_id, self.get_input_data(gate, exec_id)) + data_o = self.data_out["out"] + if data_o.empty(exec_id): + return + data_o.clear(exec_id) + DataNode.input_event(self, "out", exec_id) + + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"loop", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + }, + ) + ExecNode.generate_dot(self, nodes, edges, visited, template) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/match.py b/transformation/schedule/schedule_lib/match.py new file mode 100644 index 0000000..e0b097f --- /dev/null +++ b/transformation/schedule/schedule_lib/match.py @@ -0,0 +1,67 @@ +from typing import List, override, Type + +from jinja2 import Template + +from api.od import ODAPI +from transformation.schedule.rule_executor import RuleExecutor +from .exec_node import ExecNode +from .data_node import DataNode +from .funcs import not_visited, generate_dot_node + +class Match(ExecNode, DataNode): + def input_event(self, gate: str, exec_id: int) -> None: + pass + + def __init__(self, label: str, n: int | float) -> None: + super().__init__() + self.label: str = label + self.n: int = n + self.rule = None + self.rule_executer: RuleExecutor | None = None + + @override + def nextState(self, exec_id: int) -> tuple[ExecNode, str]: + return self.next_node["fail" if self.data_out["out"].empty(exec_id) else "success"] + + @staticmethod + @override + def get_exec_output_gates(): + return ["success", "fail"] + + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + pivot = {} + if self.data_in is not None: + pivot = self.get_input_data("in", exec_id)[0] + # TODO: remove this print + print(f"matching: {self.label}\n\tpivot: {pivot}") + self.store_data( exec_id, + self.rule_executer.match_rule(od.m, self.rule, pivot=pivot), "out", self.n + ) + return None + + def init_rule(self, rule, rule_executer): + self.rule = rule + self.rule_executer = rule_executer + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"match\n{self.label}\nn = {self.n}", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + }, + ) + ExecNode.generate_dot(self, nodes, edges, visited, template) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/merge.py b/transformation/schedule/schedule_lib/merge.py new file mode 100644 index 0000000..d31b809 --- /dev/null +++ b/transformation/schedule/schedule_lib/merge.py @@ -0,0 +1,57 @@ +from typing import List, override, Type + +from jinja2 import Template + +from api.od import ODAPI +from transformation.schedule.rule_executor import RuleExecutor +from . import ExecNode +from .exec_node import ExecNode +from .data_node import DataNode, DataNodeState +from .funcs import not_visited, generate_dot_node + +class Merge(DataNode): + def __init__(self, ports: list[str]) -> None: + self.in_data_ports = ports # ports must be defined before super.__init__ + super().__init__() + self.in_data_ports.reverse() + + @override + def get_data_input_gates(self) -> list[str]: + return self.in_data_ports + + @override + def input_event(self, gate: str, exec_id: int) -> None: + out = self.data_out["out"] + b = (not out.empty(exec_id)) and (self.data_in[gate].empty(exec_id)) + out.clear(exec_id) + if b: + DataNode.input_event(self, "out", exec_id) + return + + # TODO: only first element or all? + if any(data.empty(exec_id) for data in self.data_in.values()): + return + d: dict[str, str] = dict() + for gate in self.in_data_ports: + for key, value in self.data_in[gate].get_data(exec_id)[0].items(): + d[key] = value + out.append(exec_id, d) + DataNode.input_event(self, "out", exec_id) + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"merge", + "ports_data": ( + self.get_data_input_gates()[::-1], + self.get_data_output_gates(), + ), + }, + ) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/modify.py b/transformation/schedule/schedule_lib/modify.py new file mode 100644 index 0000000..ad4859e --- /dev/null +++ b/transformation/schedule/schedule_lib/modify.py @@ -0,0 +1,49 @@ +from typing import List, override + +from jinja2 import Template + +from transformation.schedule.schedule_lib.funcs import not_visited, generate_dot_node +from .data_node import DataNode + + +class Modify(DataNode): + def __init__(self, rename: dict[str, str], delete: dict[str, str]) -> None: + super().__init__() + self.rename: dict[str, str] = rename + self.delete: set[str] = set(delete) + + @override + def input_event(self, gate: str, exec_id: int) -> None: + data_i = self.get_input_data(gate, exec_id) + if len(data_i): + self.data_out["out"].clear(exec_id) + for data in data_i: + self.data_out["out"].append(exec_id, + { + self.rename.get(key, key): value + for key, value in data.items() + if key not in self.delete + } + ) + else: + if self.data_out["out"].empty(exec_id): + return + super().input_event("out", exec_id) + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"modify", + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + }, + ) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/node.py b/transformation/schedule/schedule_lib/node.py new file mode 100644 index 0000000..022c73c --- /dev/null +++ b/transformation/schedule/schedule_lib/node.py @@ -0,0 +1,70 @@ +""" +node.py + +Defines the abstract base Node class for graph-based structures. Each Node is assigned +a unique identifier via an external IdGenerator. The class provides an interface for +managing execution state and generating DOT graph representations. +""" + +from abc import abstractmethod +from jinja2 import Template +from .funcs import IdGenerator + + +class Node: + """ + Abstract base class for graph nodes. Each Node has a unique ID and supports + context-dependent state management for execution scenarios. Subclasses must + implement the DOT graph generation logic. + """ + + @abstractmethod + def __init__(self) -> None: + """ + Initializes the Node instance with a unique ID. + + Attributes: + id (int): A unique identifier assigned by IdGenerator. + """ + self.id: int = IdGenerator.generate_node_id() + + def get_id(self) -> int: + """ + Retrieves the unique identifier of the node. + + Returns: + int: The unique node ID. + """ + return self.id + + def generate_stack_frame(self, exec_id: int) -> None: + """ + Initializes a new state frame for a specific execution context. + Designed to be overridden in subclasses that use execution state. + + Args: + exec_id (int): The ID of the execution context. + """ + + def delete_stack_frame(self, exec_id: int) -> None: + """ + Deletes the state frame for a specific execution context. + Designed to be overridden in subclasses that use execution state. + + Args: + exec_id (int): The ID of the execution context. + """ + + @abstractmethod + def generate_dot( + self, nodes: list[str], edges: list[str], visited: set[int], template: Template + ) -> None: + """ + Generates the DOT graph representation for this node and its relationships. + + Args: + nodes (list[str]): A list to append DOT node definitions to. + edges (list[str]): A list to append DOT edge definitions to. + visited (set[int]): A set of already visited node IDs to avoid duplicates or recursion. + template (Template): A Jinja2 template used to format the node's DOT representation. + """ diff --git a/transformation/schedule/schedule_lib/null_node.py b/transformation/schedule/schedule_lib/null_node.py new file mode 100644 index 0000000..f7c44ad --- /dev/null +++ b/transformation/schedule/schedule_lib/null_node.py @@ -0,0 +1,80 @@ +""" +null_node.py + +Defines the NullNode class, a no-op singleton execution node used for open execution pins +in the object diagram execution graph. +""" + +from abc import ABC +from typing import List, Type +from jinja2 import Template +from api.od import ODAPI +from .funcs import generate_dot_node +from .singleton import Singleton +from .exec_node import ExecNode + +class NullNode(ExecNode, metaclass=Singleton): + """ + A no-op execution node representing a null operation. + + This node is typically used to represent a placeholder or open execution pin. + It always returns a fixed result and does not perform any operation. + """ + + def __init__(self): + """ + Initializes the NullNode instance. + Inherits unique ID and state behavior from ExecNode. + """ + super().__init__() + + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + """ + Simulates execution by returning a static result indicating an open pin. + + Args: + port (str): The name of the input port. + exec_id (int): The current execution ID. + od (ODAPI): The Object Diagram API instance providing execution context. + + Returns: + tuple[int, str] | None: A tuple (-1, "open pin reached") indicating a no-op. + """ + return -1, "open pin reached" + + @staticmethod + def get_exec_output_gates(): + """ + Returns the list of output gates for execution. + + Returns: + list: An empty list, as NullNode has no output gates. + """ + return [] + + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + """ + Generates DOT graph representation for this node if it hasn't been visited. + + Args: + nodes (List[str]): A list to accumulate DOT node definitions. + edges (List[str]): A list to accumulate DOT edge definitions. + visited (set[int]): Set of already visited node IDs to avoid cycles. + template (Template): A Jinja2 template used to render the node's DOT representation. + """ + if self.id in visited: + return + generate_dot_node( + self, + nodes, + template, + **{ + "label": "null", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + } + ) diff --git a/transformation/schedule/schedule_lib/print.py b/transformation/schedule/schedule_lib/print.py new file mode 100644 index 0000000..3b237a2 --- /dev/null +++ b/transformation/schedule/schedule_lib/print.py @@ -0,0 +1,60 @@ +from typing import List, override + +from jinja2 import Template + +from api.od import ODAPI +from transformation.schedule.schedule_lib.funcs import not_visited, generate_dot_node +from .exec_node import ExecNode +from .data_node import DataNode + + +class Print(ExecNode, DataNode): + def __init__(self, label: str = "", custom: str = "") -> None: + super().__init__() + self.label = label + + if custom: + template = Template(custom, trim_blocks=True, lstrip_blocks=True) + self._print = ( + lambda self_, exec_id: print(template.render(data=self.get_input_data("in", exec_id))) + ).__get__(self, Print) + + @staticmethod + @override + def get_data_output_gates(): + return [] + + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + self._print(exec_id) + return + + @override + def input_event(self, gate: str, exec_id: int) -> None: + if not self.data_in[gate].empty(exec_id): + self._print(exec_id) + + def _print(self, exec_id: int) -> None: + print(f"{self.label}{self.get_input_data("in", exec_id)}") + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"print", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + }, + ) + ExecNode.generate_dot(self, nodes, edges, visited, template) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/rewrite.py b/transformation/schedule/schedule_lib/rewrite.py new file mode 100644 index 0000000..2196d1d --- /dev/null +++ b/transformation/schedule/schedule_lib/rewrite.py @@ -0,0 +1,56 @@ +import functools +from typing import List, Type + +from jinja2 import Template + +from api.od import ODAPI +from .exec_node import ExecNode +from .data_node import DataNode +from .funcs import not_visited, generate_dot_node +from ..rule_executor import RuleExecutor + +class Rewrite(ExecNode, DataNode): + + def __init__(self, label: str) -> None: + super().__init__() + self.label = label + self.rule = None + self.rule_executor: RuleExecutor | None = None + + def init_rule(self, rule, rule_executer): + self.rule = rule + self.rule_executor = rule_executer + + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + pivot = {} + if self.data_in is not None: + pivot = self.get_input_data("in", exec_id)[0] + # TODO: remove print + print(f"rewrite: {self.label}\n\tpivot: {pivot}") + self.store_data( exec_id, + self.rule_executor.rewrite_rule(od, self.rule, pivot=pivot), "out", 1 + ) + return None + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"rewrite\n{self.label}", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + }, + ) + ExecNode.generate_dot(self, nodes, edges, visited, template) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/examples/schedule/schedule_lib/singleton.py b/transformation/schedule/schedule_lib/singleton.py similarity index 99% rename from examples/schedule/schedule_lib/singleton.py rename to transformation/schedule/schedule_lib/singleton.py index 31955e3..91ac5cf 100644 --- a/examples/schedule/schedule_lib/singleton.py +++ b/transformation/schedule/schedule_lib/singleton.py @@ -2,6 +2,7 @@ from abc import ABCMeta class Singleton(ABCMeta): _instances = {} + def __call__(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) diff --git a/transformation/schedule/schedule_lib/start.py b/transformation/schedule/schedule_lib/start.py new file mode 100644 index 0000000..441e95f --- /dev/null +++ b/transformation/schedule/schedule_lib/start.py @@ -0,0 +1,83 @@ +from typing import List, override + +from jinja2 import Template + +from . import DataNode +from .exec_node import ExecNode +from .funcs import not_visited, generate_dot_node + +class StartState: + def __init__(self) -> None: + super().__init__() + self.start_gate: str = "" + +class Start(ExecNode, DataNode): + def __init__(self, ports_exec: List[str], ports_data: List[str]) -> None: + self.state: dict[int, StartState] = {} + self.ports_exec = ports_exec + self.ports_data = ports_data + super().__init__() + + def run_init(self, gate: str, exec_id: int, data: dict[str, any]) -> None: + state = self.get_state(exec_id) + state.start_gate = gate + for port, d in data.items(): + self.data_out[port].replace(exec_id, d) + DataNode.input_event(self, port, exec_id) + + def nextState(self, exec_id: int) -> tuple["ExecNode", str]: + state = self.get_state(exec_id) + return self.next_node[state.start_gate] + + def get_state(self, exec_id) -> StartState: + return self.state[exec_id] + + @override + def generate_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state[exec_id] = StartState() + + @override + def delete_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state.pop(exec_id) + + @staticmethod + @override + def get_exec_input_gates(): + return [] + + @override + def get_exec_output_gates(self): + return self.ports_exec + + @staticmethod + @override + def get_data_input_gates(): + return [] + + @override + def get_data_output_gates(self): + return self.ports_data + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": "start", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + } + ) + super().generate_dot(nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/store.py b/transformation/schedule/schedule_lib/store.py new file mode 100644 index 0000000..4aced26 --- /dev/null +++ b/transformation/schedule/schedule_lib/store.py @@ -0,0 +1,92 @@ +from typing import List, override + +from jinja2 import Template + +from api.od import ODAPI +from .data import Data +from .exec_node import ExecNode +from .data_node import DataNode +from .funcs import not_visited, generate_dot_node + +class StoreState: + def __init__(self) -> None: + self.last_port: str = "in" + +class Store(ExecNode, DataNode): + def __init__(self, ports: list[str]) -> None: + self.ports = ports + super().__init__() + self.state: dict[int, StoreState] = {} + self.cur_data: Data = Data(self) + + @override + def get_exec_input_gates(self) -> list[str]: + return [*self.ports, "in"] + + @override + def get_exec_output_gates(self) -> list[str]: + return [*self.ports, "out"] + + @override + def get_data_input_gates(self) -> list[str]: + return self.ports + + @override + def nextState(self, exec_id: int) -> tuple[ExecNode, str]: + return self.next_node[self.get_state(exec_id).last_port] + + @override + def input_event(self, gate: str, exec_id: int) -> None: + return + + def get_state(self, exec_id) -> StoreState: + return self.state[exec_id] + + @override + def generate_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state[exec_id] = StoreState() + self.cur_data.generate_stack_frame(exec_id) + + @override + def delete_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state.pop(exec_id) + self.cur_data.delete_stack_frame(exec_id) + + + @override + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + state = self.get_state(exec_id) + if port == "in": + self.data_out["out"].replace(exec_id, self.cur_data.get_data(exec_id)) + self.cur_data.clear(exec_id) + DataNode.input_event(self, "out", True) + state.last_port = "out" + return None + self.cur_data.extend(exec_id, self.get_input_data(port, exec_id)) + state.last_port = port + return None + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": f"store", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + }, + ) + ExecNode.generate_dot(self, nodes, edges, visited, template) + DataNode.generate_dot(self, nodes, edges, visited, template) diff --git a/transformation/schedule/schedule_lib/sub_schedule.py b/transformation/schedule/schedule_lib/sub_schedule.py new file mode 100644 index 0000000..048658c --- /dev/null +++ b/transformation/schedule/schedule_lib/sub_schedule.py @@ -0,0 +1,107 @@ +from typing import List, override, TYPE_CHECKING + +from jinja2 import Template + +from api.od import ODAPI +from . import DataNode +from .exec_node import ExecNode +from .funcs import not_visited, generate_dot_node, IdGenerator + +if TYPE_CHECKING: + from ..rule_scheduler import RuleScheduler + + +class ScheduleState: + def __init__(self) -> None: + self.end_gate: str = "" + +class SubSchedule(ExecNode, DataNode): + def __init__(self, scheduler: "RuleScheduler", file: str) -> None: + self.schedule = scheduler._load_schedule(file, _main=False) + self.scheduler = scheduler + super().__init__() + self.state: dict[int, ScheduleState] = {} + + @override + def nextState(self, exec_id: int) -> tuple["ExecNode", str]: + return self.next_node[self.get_state(exec_id).end_gate] + + @override + def get_exec_input_gates(self) -> "List[ExecNode]": + return self.schedule.start.get_exec_output_gates() + + @override + def get_exec_output_gates(self) -> "List[ExecNode]": + return [*self.schedule.end.get_exec_input_gates()] + + @override + def get_data_input_gates(self) -> "List[ExecNode]": + return self.schedule.start.get_data_output_gates() + + @override + def get_data_output_gates(self) -> "List[ExecNode]": + return self.schedule.end.get_data_input_gates() + + def get_state(self, exec_id) -> ScheduleState: + return self.state[exec_id] + + @override + def generate_stack_frame(self, exec_id: int) -> None: + super().generate_stack_frame(exec_id) + self.state[exec_id] = ScheduleState() + + @override + def delete_stack_frame(self, exec_id: int) -> None: + super().delete_stack_frame(exec_id) + self.state.pop(exec_id) + + + @override + def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None: + runstatus, result = self.scheduler._runner( + od, + self.schedule, + port, + IdGenerator.generate_exec_id(), + { + port: self.get_input_data(port, exec_id) + for port, value in self.data_in.items() + if value is not None and not value.empty(exec_id) + }, + ) + if runstatus != 1: + return runstatus, result + self.get_state(exec_id).end_gate = result["exec_gate"] + results_data = result["data_out"] + for port, data in self.data_out.items(): + if port in results_data: + self.data_out[port].replace(exec_id, results_data[port]) + DataNode.input_event(self, port, exec_id) + continue + + if not data.empty(exec_id): + data.clear(exec_id) + DataNode.input_event(self, port, exec_id) + return None + + @not_visited + def generate_dot( + self, nodes: List[str], edges: List[str], visited: set[int], template: Template + ) -> None: + generate_dot_node( + self, + nodes, + template, + **{ + "label": "rrrrrrrrrr", + "ports_exec": ( + self.get_exec_input_gates(), + self.get_exec_output_gates(), + ), + "ports_data": ( + self.get_data_input_gates(), + self.get_data_output_gates(), + ), + } + ) + super().generate_dot(nodes, edges, visited, template) diff --git a/transformation/schedule/templates/schedule_dot.j2 b/transformation/schedule/templates/schedule_dot.j2 new file mode 100644 index 0000000..ca715dc --- /dev/null +++ b/transformation/schedule/templates/schedule_dot.j2 @@ -0,0 +1,65 @@ +digraph G { + rankdir=LR; + compound=true; + node [shape=rect]; +{% for node in nodes %} + {{ node }} +{% endfor %} + +{% for edge in edges %} + {{ edge }} +{% endfor %} +} + +{% macro Node(label, id, ports_exec=[], ports_data=[], debug = False) %} +subgraph cluster_{{ id }} { + label = " + {%- if debug %} + {{ id }}_ + {%- endif -%} + {{ label }}" + + style = rounded; + input_{{ id }} [ + shape=rect; + label= {{ Gate_Table(ports_exec[0], ports_data[0]) }} + ]; + output_{{ id }} [ + shape=rect; + label= {{ Gate_Table(ports_exec[1], ports_data[1]) }} + ]; + input_{{ id }}->output_{{ id }} [style=invis]; + } +{%- endmacro %} + +{%- macro Edge(from_id, to_id, from_gate, to_gate, prefix, color) %} +output_{{ from_id }}:{{ prefix }}_{{ from_gate }} -> input_{{ to_id }}:{{ prefix }}_{{ to_gate }} [color = {{ color }}] +{%- endmacro %} + +{%- macro Gate_Table(ports_exec, ports_data) %} + + < + {% if ports_exec or ports_data %} + {% if ports_exec %} + + + {% for port_e in ports_exec %} + {{ port_e }} + {% endfor %} + + + {% endif %} + {% if ports_data %} + + + {% for port_d in ports_data %} + {{ port_d }} + {% endfor %} + + + {% endif %} + {% else %} + + {% endif %} + > +{%- endmacro %} \ No newline at end of file diff --git a/transformation/schedule/templates/schedule_muMLE.j2 b/transformation/schedule/templates/schedule_muMLE.j2 new file mode 100644 index 0000000..624b203 --- /dev/null +++ b/transformation/schedule/templates/schedule_muMLE.j2 @@ -0,0 +1,28 @@ +{% for id, param in nodes.items() -%} + {{ param[0] }}:{{ param[1].pop("type") }} + {%- if param[1] %} + { + {% for key, value in param[1].items() %} + {% if value %} + {% if key in ["file"] %} + {% set value = '"' ~ value ~ '"' %} + {% elif key in ["custom"] %} + {% set value = '`"' ~ value.replace('\n', '\\n') ~ '"`' %} + {% elif key in ["action", "init"] %} + {% set value = '\n```\n' ~ value ~ '\n```' %} + {% elif key in ["ports", "ports_exec_in", "ports_exec_out", "ports_data_in", "ports_data_out", "rename", "delete"] %} + {% set value = '`' ~ value.replace('\n', '\\n') ~ '`' %} + {% endif %} + {{ key }} = {{ value }}; + {% endif %} + {% endfor %} +} + {% endif %} + +{% endfor %} + +{%- for edge in edges %} + {% set source = edge[0] %} + {% set target = edge[1] %} +:Conn_{{ source[2] }} ({{ source[0] }} -> {{ target[0] }}) {from="{{ source[1] }}"; to="{{ target[1] }}";} +{% endfor -%} \ No newline at end of file diff --git a/transformation/schedule/templates/schedule_template.j2 b/transformation/schedule/templates/schedule_template.j2 new file mode 100644 index 0000000..e696681 --- /dev/null +++ b/transformation/schedule/templates/schedule_template.j2 @@ -0,0 +1,51 @@ +{% macro Start(name, ports_exec_out, ports_data_out) %} +{{ name }} = Start({{ ports_exec_out }}, {{ ports_data_out }}) +{%- endmacro %} + +{% macro End(name, ports_exec_in, ports_data_in) %} +{{ name }} = End({{ ports_exec_in }}, {{ ports_data_in }}) +{%- endmacro %} + +{% macro Match(name, file, n) %} +{{ name }} = Match("{{ file }}", {{ n }}) +{%- endmacro %} + +{% macro Rewrite(name, file) %} +{{ name }} = Rewrite("{{ file }}") +{%- endmacro %} + +{% macro Action(name, ports_exec_in, ports_exec_out, ports_data_in, ports_data_out, action, init) %} +{{ name }} = Action({{ ports_exec_in }}, {{ ports_exec_out }}, {{ ports_data_in }}, {{ ports_data_out }}, {{ action }}, {{ init }}) +{%- endmacro %} + +{% macro Modify(name, rename, delete) %} +{{ name }} = Modify({{ rename }}, {{ delete }}) +{%- endmacro %} + +{% macro Merge(name, ports_data_in) %} +{{ name }} = Merge({{ ports_data_in }}) +{%- endmacro %} + +{% macro Store(name, ports) %} +{{ name }} = Store({{ ports }}) +{%- endmacro %} + +{% macro Schedule(name, file) %} +{{ name }} = SubSchedule(scheduler, "{{ file }}") +{%- endmacro %} + +{% macro Loop(name) %} +{{ name }} = Loop() +{%- endmacro %} + +{% macro Print(name, label, custom) %} +{{ name }} = Print("{{ label }}", {{ custom }}) +{%- endmacro %} + +{% macro Conn_exec(name_from, name_to, from, to) %} +{{ name_from }}.connect({{ name_to }},"{{ from }}","{{ to }}") +{%- endmacro %} + +{% macro Conn_data(name_from, name_to, from, to, event) %} +{{ name_from }}.connect_data({{ name_to }}, "{{ from }}", "{{ to }}", {{ event }}) +{%- endmacro %} \ No newline at end of file diff --git a/transformation/schedule/templates/schedule_template_wrap.j2 b/transformation/schedule/templates/schedule_template_wrap.j2 new file mode 100644 index 0000000..d1e8dfc --- /dev/null +++ b/transformation/schedule/templates/schedule_template_wrap.j2 @@ -0,0 +1,48 @@ +#generated from somewhere i do not now but it here so live with it + +from transformation.schedule.schedule_lib import * + +class Schedule: + def __init__(self): + self.start: Start | None = None + self.end: End | None = None + self.nodes: list[DataNode] = [] + + @staticmethod + def get_matchers(): + return [ + {% for file in match_files %} + "{{ file }}", + {% endfor %} + ] + + def init_schedule(self, scheduler, rule_executer, matchers): + {% for block in blocks_start_end%} + {{ block }} + {% endfor %} + self.start = {{ start }} + self.end = {{ end }} + {% for block in blocks%} + {{ block }} + {% endfor %} + + {% for conn in exec_conn%} + {{ conn }} + {% endfor %} + {% for conn_d in data_conn%} + {{ conn_d }} + {% endfor %} + + {% for match in matchers %} + {{ match["name"] }}.init_rule(matchers["{{ match["file"] }}"], rule_executer) + {% endfor %} + + self.nodes = [ + {% for name in blocks_name%} + {{ name }}, + {% endfor %} + ] + return None + + def generate_dot(self, *args, **kwargs): + return self.start.generate_dot(*args, **kwargs) \ No newline at end of file