diff --git a/README.md b/README.md
index 1171f46..9c054a8 100644
--- a/README.md
+++ b/README.md
@@ -13,6 +13,7 @@ Features:
- Class Diagrams (self-conforming)
- Causal Block Diagrams language
- Petri Net language
+ - [Repotting the Geraniums](https://ris.utwente.nl/ws/portalfiles/portal/5312315/gtvmt2009.pdf)
## Dependencies
@@ -26,7 +27,10 @@ Features:
The following branches exist:
- * `mde2425` - the branch containing a snapshot of the repo used for the MDE assignments 24-25. No breaking changes will be pushed here. After the re-exams (Sep 2025), this branch will be frozen.
- * `master` - currently equivalent to `mde2425` (this is the branch that was cloned by the students). This branch will be deleted after Sep 2025, because the name is too vague.
* `development` - in this branch, new development will occur, primarily cleaning up the code to prepare for next year's MDE classes.
+ * `mde2425` - contains a snapshot of the repo used for the MDE assignments 24-25. This branch should remain frozen.
+
+## Tutorial
+
+A good place to learn how to use muMLE is the `tutorial` directory. Each file is an executable Python script that explains muMLE step-by-step (read the comments).
diff --git a/TODO.txt b/TODO.txt
new file mode 100644
index 0000000..bf1e3d4
--- /dev/null
+++ b/TODO.txt
@@ -0,0 +1,46 @@
+Things that need to be cleaned up:
+
+ - At several places in the code, it is assumed that from the root node, there is an edge labeled 'SCD' containing the self-conforming meta-meta-model. It would be better for parts of the code that need the meta-meta-model to receive this model as a (function) parameter.
+
+ - The whole 'ModelRef'-construct does not work as originally foreseen. It is currently only used for attributes of primitive types, where it unnecessarily complicates things. Better to get rid of it.
+
+
+Known bugs:
+ - Cannot parse negative numbers
+
+
+ - When merging models, the model element names must not overlap. Maybe allow some kind of prefixing of the overlapping names? Difficulty porting existing models to the merged models if the type names have changed...
+
+
+
+Merging (meta-)models is a nightmare:
+
+ - Prefixing the type names (to avoid naming collisions) is not an option:
+ (*) constraints (and transformation rules) already contain API calls that mention type names -> all of these would break
+ (*) don't want to prefix primitive types like "Integer", "String", ... because the existing code already assumes these exact names
+
+ - Not prefixing the type names leads to naming collisions, even if names are carefully chosen:
+ (*) anonymous names, e.g., Inheritance-links still result in naming collisions (requiring auto-renaming?)
+
+
+Feature requests:
+
+ - Support custom functions in 'conditions'
+
+ - When matching edge, match 'any' src/tgt
+
+ - Support 'return'-statement in conditions? (just makes syntax nicer)
+
+ - RAMification / matching: add `match_subtypes` attribute to each RAMified class.
+
+ - Separate script for running LHS (+NAC) on any model, and visualizing the match.
+
+ - Syntax highlighting:
+ most students use:
+ - VS Code
+ - PyCharm
+ i use:
+ - Sublime Text
+ nobody uses:
+ - Eclipse
+
diff --git a/api/__init__.py b/api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/api/cd.py b/api/cd.py
index d18f7b0..16168d6 100644
--- a/api/cd.py
+++ b/api/cd.py
@@ -53,7 +53,7 @@ class CDAPI:
return self.bottom.read_outgoing_elements(self.m, type_name)[0]
def is_direct_subtype(self, super_type_name: str, sub_type_name: str):
- return sub_type_name in self.direct_sub_types[super_type]
+ return sub_type_name in self.direct_sub_types[super_type_name]
def is_direct_supertype(self, sub_type_name: str, super_type_name: str):
return super_type_name in self.direct_super_types[sub_type_name]
@@ -83,3 +83,6 @@ class CDAPI:
result = self.find_attribute_type(supertype, attr_name)
if result != None:
return result
+
+ def get_type(self, type_name: str):
+ return next(k for k, v in self.type_model_names.items() if v == type_name)
diff --git a/api/od.py b/api/od.py
index c23160d..cfaa049 100644
--- a/api/od.py
+++ b/api/od.py
@@ -6,8 +6,7 @@ from services.primitives.integer_type import Integer
from services.primitives.string_type import String
from services.primitives.actioncode_type import ActionCode
from uuid import UUID
-from typing import Optional
-from util.timer import Timer
+from typing import Optional, Any
NEXT_ID = 0
@@ -42,10 +41,10 @@ class ODAPI:
self.create_string_value = self.od.create_string_value
self.create_actioncode_value = self.od.create_actioncode_value
- self.__recompute_mappings()
+ self.recompute_mappings()
# Called after every change - makes querying faster but modifying slower
- def __recompute_mappings(self):
+ def recompute_mappings(self):
self.m_obj_to_name = build_name_mapping(self.state, self.m)
self.mm_obj_to_name = build_name_mapping(self.state, self.mm)
self.type_to_objs = { type_name : set() for type_name in self.bottom.read_keys(self.mm)}
@@ -60,25 +59,33 @@ class ODAPI:
def get_value(self, obj: UUID):
return od.read_primitive_value(self.bottom, obj, self.mm)[0]
- def get_target(self, link: UUID):
+ def get_target(self, link: UUID) -> UUID:
return self.bottom.read_edge_target(link)
- def get_source(self, link: UUID):
+ def get_source(self, link: UUID) -> UUID:
return self.bottom.read_edge_source(link)
- def get_slot(self, obj: UUID, attr_name: str):
+ def get_slot(self, obj: UUID, attr_name: str) -> UUID:
slot = self.od.get_slot(obj, attr_name)
if slot == None:
raise NoSuchSlotException(f"Object '{self.m_obj_to_name[obj]}' has no slot '{attr_name}'")
return slot
- def get_slot_link(self, obj: UUID, attr_name: str):
+ def get_slot_link(self, obj: UUID, attr_name: str) -> UUID:
return self.od.get_slot_link(obj, attr_name)
# Parameter 'include_subtypes': whether to include subtypes of the given association
- def get_outgoing(self, obj: UUID, assoc_name: str, include_subtypes=True):
+ def get_outgoing(self, obj: UUID, assoc_name: str, include_subtypes=True) -> list[UUID]:
outgoing = self.bottom.read_outgoing_edges(obj)
- result = []
+ return self.filter_edges_by_type(outgoing, assoc_name, include_subtypes)
+
+ # Parameter 'include_subtypes': whether to include subtypes of the given association
+ def get_incoming(self, obj: UUID, assoc_name: str, include_subtypes=True):
+ incoming = self.bottom.read_incoming_edges(obj)
+ return self.filter_edges_by_type(incoming, assoc_name, include_subtypes)
+
+ def filter_edges_by_type(self, outgoing: list[UUID], assoc_name: str, include_subtypes=True) -> list[UUID]:
+ result: list[UUID] = []
for o in outgoing:
try:
type_of_outgoing_link = self.get_type_name(o)
@@ -89,23 +96,8 @@ class ODAPI:
result.append(o)
return result
-
- # Parameter 'include_subtypes': whether to include subtypes of the given association
- def get_incoming(self, obj: UUID, assoc_name: str, include_subtypes=True):
- incoming = self.bottom.read_incoming_edges(obj)
- result = []
- for i in incoming:
- try:
- type_of_incoming_link = self.get_type_name(i)
- except:
- continue # OK, not all edges are typed
- if (include_subtypes and self.cdapi.is_subtype(super_type_name=assoc_name, sub_type_name=type_of_incoming_link)
- or not include_subtypes and type_of_incoming_link == assoc_name):
- result.append(i)
- return result
-
# Returns list of tuples (name, obj)
- def get_all_instances(self, type_name: str, include_subtypes=True):
+ def get_all_instances(self, type_name: str, include_subtypes=True) -> list[UUID]:
if include_subtypes:
all_types = self.cdapi.transitive_sub_types[type_name]
else:
@@ -127,7 +119,7 @@ class ODAPI:
else:
raise Exception(f"Couldn't find name of {obj} - are you sure it exists in the (meta-)model?")
- def get(self, name: str):
+ def get(self, name: str) -> UUID:
results = self.bottom.read_outgoing_elements(self.m, name)
if len(results) == 1:
return results[0]
@@ -136,30 +128,32 @@ class ODAPI:
else:
raise Exception(f"No such element in model: '{name}'")
- def get_type_name(self, obj: UUID):
+ def get_type_name(self, obj: UUID) -> str:
return self.get_name(self.get_type(obj))
- def is_instance(self, obj: UUID, type_name: str, include_subtypes=True):
+ def is_instance(self, obj: UUID, type_name: str, include_subtypes=True) -> bool:
typ = self.cdapi.get_type(type_name)
types = set(typ) if not include_subtypes else self.cdapi.transitive_sub_types[type_name]
for type_of_obj in self.bottom.read_outgoing_elements(obj, "Morphism"):
- if type_of_obj in types:
+ if self.get_name(type_of_obj) in types:
return True
return False
- def delete(self, obj: UUID):
+ def delete(self, obj: UUID) -> None:
self.bottom.delete_element(obj)
- self.__recompute_mappings()
+ self.recompute_mappings()
- # Does the class of the object have the given attribute?
- def has_slot(self, obj: UUID, attr_name: str):
+ # Does the the object have the given attribute?
+ def has_slot(self, obj: UUID, attr_name: str) -> bool:
class_name = self.get_name(self.get_type(obj))
- return self.od.get_attr_link_name(class_name, attr_name) != None
+ if self.od.get_attr_link_name(class_name, attr_name) is None:
+ return False
+ return self.od.get_slot_link(obj, attr_name) is not None
def get_slots(self, obj: UUID) -> list[str]:
return [attr_name for attr_name, _ in self.od.get_slots(obj)]
- def get_slot_value(self, obj: UUID, attr_name: str):
+ def get_slot_value(self, obj: UUID, attr_name: str) -> Any:
slot = self.get_slot(obj, attr_name)
return self.get_value(slot)
@@ -172,14 +166,14 @@ class ODAPI:
# Returns the given default value if the slot does not exist on the object.
# The attribute must exist in the object's class, or an exception will be thrown.
# The slot may not exist however, if the attribute is defined as 'optional' in the class.
- def get_slot_value_default(self, obj: UUID, attr_name: str, default: any):
+ def get_slot_value_default(self, obj: UUID, attr_name: str, default: any) -> any:
try:
return self.get_slot_value(obj, attr_name)
except NoSuchSlotException:
return default
# create or update slot value
- def set_slot_value(self, obj: UUID, attr_name: str, new_value: any, is_code=False):
+ def set_slot_value(self, obj: UUID, attr_name: str, new_value: any, is_code=False) -> None:
obj_name = self.get_name(obj)
link_name = f"{obj_name}_{attr_name}"
@@ -194,7 +188,7 @@ class ODAPI:
new_target = self.create_primitive_value(target_name, new_value, is_code)
slot_type = self.cdapi.find_attribute_type(self.get_type_name(obj), attr_name)
new_link = self.od._create_link(link_name, slot_type, obj, new_target)
- self.__recompute_mappings()
+ self.recompute_mappings()
def create_primitive_value(self, name: str, value: any, is_code=False):
# watch out: in Python, 'bool' is subtype of 'int'
@@ -210,7 +204,7 @@ class ODAPI:
tgt = self.create_string_value(name, value)
else:
raise Exception("Unimplemented type "+value)
- self.__recompute_mappings()
+ self.recompute_mappings()
return tgt
def overwrite_primitive_value(self, name: str, value: any, is_code=False):
@@ -229,7 +223,7 @@ class ODAPI:
else:
raise Exception("Unimplemented type "+value)
- def create_link(self, link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID):
+ def create_link(self, link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID) -> UUID:
global NEXT_ID
types = self.bottom.read_outgoing_elements(self.mm, assoc_name)
if len(types) == 0:
@@ -241,12 +235,12 @@ class ODAPI:
link_name = f"__{assoc_name}{NEXT_ID}"
NEXT_ID += 1
link_id = self.od._create_link(link_name, typ, src, tgt)
- self.__recompute_mappings()
+ self.recompute_mappings()
return link_id
- def create_object(self, object_name: Optional[str], class_name: str):
+ def create_object(self, object_name: Optional[str], class_name: str) -> UUID:
obj = self.od.create_object(object_name, class_name)
- self.__recompute_mappings()
+ self.recompute_mappings()
return obj
# internal use
@@ -262,6 +256,7 @@ def bind_api_readonly(odapi):
'get_target': odapi.get_target,
'get_source': odapi.get_source,
'get_slot': odapi.get_slot,
+ 'get_slots': odapi.get_slots,
'get_slot_value': odapi.get_slot_value,
'get_slot_value_default': odapi.get_slot_value_default,
'get_all_instances': odapi.get_all_instances,
@@ -270,6 +265,7 @@ def bind_api_readonly(odapi):
'get_outgoing': odapi.get_outgoing,
'get_incoming': odapi.get_incoming,
'has_slot': odapi.has_slot,
+ 'is_instance': odapi.is_instance,
}
return funcs
@@ -283,6 +279,6 @@ def bind_api(odapi):
'create_object': odapi.create_object,
'create_link': odapi.create_link,
'delete': odapi.delete,
- 'set_slot_value': odapi.set_slot_value,
+ 'set_slot_value': odapi.set_slot_value
}
return funcs
diff --git a/api/od_stub.pyi b/api/od_stub.pyi
new file mode 100644
index 0000000..563e3e0
--- /dev/null
+++ b/api/od_stub.pyi
@@ -0,0 +1,9 @@
+from typing import Optional
+from uuid import UUID
+
+from od_stub_readonly import *
+
+def create_object(object_name: Optional[str], class_name: str) -> UUID: ...
+def create_link(link_name: Optional[str], assoc_name: str, src: UUID, tgt: UUID) -> UUID: ...
+def delete(obj: UUID) -> None: ...
+def set_slot_value(obj: UUID, attr_name: str, new_value: any, is_code=False) -> None: ...
\ No newline at end of file
diff --git a/api/od_stub_readonly.pyi b/api/od_stub_readonly.pyi
new file mode 100644
index 0000000..89bbc4c
--- /dev/null
+++ b/api/od_stub_readonly.pyi
@@ -0,0 +1,18 @@
+from typing import Any
+from uuid import UUID
+
+def get(name: str) -> UUID: ...
+def get_value(obj: UUID) -> Any: ...
+def get_target(link: UUID) -> UUID: ...
+def get_source(link: UUID) -> UUID: ...
+def get_slot(obj: UUID, attr_name: str) -> UUID: ...
+def get_slots(obj: UUID) -> list[str]: ...
+def get_slot_value(obj: UUID, attr_name: str) -> Any: ...
+def get_slot_value_default(obj: UUID, attr_name: str, default: any) -> Any: ...
+def get_all_instances(type_name: str, include_subtypes=True) -> list[UUID]: ...
+def get_name(obj: UUID) -> str: ...
+def get_type_name(obj: UUID) -> str: ...
+def get_outgoing(obj: UUID, assoc_name: str, include_subtypes=True) -> list[UUID]: ...
+def get_incoming(obj: UUID, assoc_name: str, include_subtypes: object = True) -> list[UUID]: ...
+def has_slot(obj: UUID, attr_name: str) -> bool: ...
+def is_instance(obj: UUID, type_name: str, include_subtypes=True) -> bool: ...
diff --git a/examples/performance/runner.py b/benchmark/runner.py
similarity index 100%
rename from examples/performance/runner.py
rename to benchmark/runner.py
diff --git a/bootstrap/primitive.py b/bootstrap/primitive.py
index 853b552..4e2b36f 100644
--- a/bootstrap/primitive.py
+++ b/bootstrap/primitive.py
@@ -47,7 +47,7 @@ def bootstrap_constraint(class_node, type_name: str, python_type: str, scd_root:
bottom.create_edge(constraint_node, scd_node, "Morphism")
bottom.create_edge(constraint_link, scd_link, "Morphism")
-def bootstrap_primitive_types(scd_root, state, integer_type, boolean_type, float_type, string_type, type_type, actioncode_type):
+def bootstrap_primitive_types(scd_root, state, integer_type, boolean_type, float_type, string_type, type_type, actioncode_type, bytes_type):
# Order is important: Integer must come first
class_integer = bootstrap_type("Integer", scd_root, integer_type, state)
class_type = bootstrap_type("Type", scd_root, type_type, state)
@@ -55,6 +55,7 @@ def bootstrap_primitive_types(scd_root, state, integer_type, boolean_type, float
class_float = bootstrap_type("Float", scd_root, float_type, state)
class_string = bootstrap_type("String", scd_root, string_type, state)
class_actioncode = bootstrap_type("ActionCode", scd_root, actioncode_type, state)
+ class_bytes = bootstrap_type("Bytes", scd_root, bytes_type, state)
# Can only create constraints after ActionCode type has been created:
bootstrap_constraint(class_integer, "Integer", "int", scd_root, integer_type, actioncode_type, state)
@@ -63,3 +64,4 @@ def bootstrap_primitive_types(scd_root, state, integer_type, boolean_type, float
bootstrap_constraint(class_float, "Float", "float", scd_root, float_type, actioncode_type, state)
bootstrap_constraint(class_string, "String", "str", scd_root, string_type, actioncode_type, state)
bootstrap_constraint(class_actioncode, "ActionCode", "str", scd_root, actioncode_type, actioncode_type, state)
+ bootstrap_constraint(class_bytes, "Bytes", "bytes", scd_root, bytes_type, actioncode_type, state)
diff --git a/bootstrap/scd.py b/bootstrap/scd.py
index cac04c6..3a3002d 100644
--- a/bootstrap/scd.py
+++ b/bootstrap/scd.py
@@ -2,15 +2,7 @@ from state.base import State, UUID
from services.bottom.V0 import Bottom
from services.primitives.boolean_type import Boolean
from services.primitives.string_type import String
-from bootstrap.primitive import (
- bootstrap_primitive_types
- # bootstrap_boolean_type,
- # bootstrap_float_type,
- # bootstrap_integer_type,
- # bootstrap_string_type,
- # bootstrap_type_type,
- # bootstrap_actioncode_type
-)
+from bootstrap.primitive import bootstrap_primitive_types
def create_model_root(bottom: Bottom, model_name: str) -> UUID:
@@ -32,6 +24,7 @@ def bootstrap_scd(state: State) -> UUID:
float_type_root = create_model_root(bottom, "Float")
type_type_root = create_model_root(bottom, "Type")
actioncode_type_root = create_model_root(bottom, "ActionCode")
+ bytes_type_root = create_model_root(bottom, "Bytes")
# create MCL, without morphism links
@@ -85,8 +78,7 @@ def bootstrap_scd(state: State) -> UUID:
add_edge_element("gc_inh_element", glob_constr_node, element_node)
# # Attribute inherits from Element
add_edge_element("attr_inh_element", attr_node, element_node)
- # # Association inherits from Element
- # add_edge_element("assoc_inh_element", assoc_edge, element_node)
+ # # Association inherits from Class
add_edge_element("assoc_inh_element", assoc_edge, class_node)
# # AttributeLink inherits from Element
add_edge_element("attr_link_inh_element", attr_link_edge, element_node)
@@ -132,7 +124,8 @@ def bootstrap_scd(state: State) -> UUID:
float_type_root,
string_type_root,
type_type_root,
- actioncode_type_root)
+ actioncode_type_root,
+ bytes_type_root)
# bootstrap_integer_type(mcl_root, integer_type_root, integer_type_root, actioncode_type_root, state)
# bootstrap_boolean_type(mcl_root, boolean_type_root, integer_type_root, actioncode_type_root, state)
# bootstrap_float_type(mcl_root, float_type_root, integer_type_root, actioncode_type_root, state)
diff --git a/concrete_syntax/common.py b/concrete_syntax/common.py
index 3427b03..1ab0d3c 100644
--- a/concrete_syntax/common.py
+++ b/concrete_syntax/common.py
@@ -16,6 +16,8 @@ def display_value(val: any, type_name: str, indentation=0, newline_character='\n
return '"'+val+'"'.replace('\n', newline_character)
elif type_name == "Integer" or type_name == "Boolean":
return str(val)
+ elif type_name == "Bytes":
+ return val
else:
raise Exception("don't know how to display value" + type_name)
@@ -48,6 +50,9 @@ class TBase(Transformer):
def CODE(self, token):
return _Code(str(token[1:-1])) # strip the ``
+ def BYTES(self, token):
+ return (bytes(token[2:-1], "utf-8"), token.line) # Strip b"" or b''
+
def INDENTED_CODE(self, token):
skip = 4 # strip the ``` and the following newline character
space_count = 0
diff --git a/concrete_syntax/textual_cd/parser.py b/concrete_syntax/textual_cd/parser.py
index 8b8ebd7..352ef62 100644
--- a/concrete_syntax/textual_cd/parser.py
+++ b/concrete_syntax/textual_cd/parser.py
@@ -40,7 +40,7 @@ attrs: attr*
constraint: CODE | INDENTED_CODE
-class_: [ABSTRACT] "class" IDENTIFIER [multiplicity] ["(" superclasses ")"] ["{" attrs [constraint] "}"]
+class_: [ABSTRACT] "class" IDENTIFIER [multiplicity] ["(" superclasses ")"] ["{" attrs [constraint ";"] "}"]
association: "association" IDENTIFIER [multiplicity] IDENTIFIER "->" IDENTIFIER [multiplicity] ["{" attrs [constraint] "}"]
@@ -75,7 +75,7 @@ def parse_cd(state, m_text):
primitive_types = {
type_name : UUID(state.read_value(state.read_dict(state.read_root(), type_name)))
- for type_name in ["Integer", "String", "Boolean"]
+ for type_name in ["Integer", "String", "Boolean", "ActionCode"]
}
class T(TBase):
diff --git a/concrete_syntax/textual_od/objectdiagrams.jinja2 b/concrete_syntax/textual_od/objectdiagrams.jinja2
index 51425d2..a559347 100644
--- a/concrete_syntax/textual_od/objectdiagrams.jinja2
+++ b/concrete_syntax/textual_od/objectdiagrams.jinja2
@@ -1,18 +1,23 @@
{% macro render_name(name) %}{{ name if not hide_names or name.startswith("__") else "" }}{% endmacro %}
-{% macro render_attributes(obj) %} {
+{% macro render_attributes(obj) %}
+{% if len(odapi.get_slots(obj)) > 0 %} {
{% for attr_name in odapi.get_slots(obj) %}
{{ attr_name}} = {{ display_value(
val=odapi.get_slot_value(obj, attr_name),
type_name=odapi.get_type_name(odapi.get_slot(obj, attr_name)),
indentation=4) }};
- {% endfor %}
-}{% endmacro %}
+ {% endfor -%}
+}
+{% endif -%}
+{%- endmacro %}
-{% for obj_name, obj in objects %}
-{{ render_name(obj_name) }}:{{ odapi.get_type_name(obj) }}{{ render_attributes(obj) }}
-{% endfor %}
+{%- for obj_name, obj in objects %}
+{{ render_name(obj_name) }}:{{ odapi.get_type_name(obj) }}
+{{- render_attributes(obj) }}
+{% endfor -%}
-{% for lnk_name, lnk in links %}
-{{ render_name(obj_name) }}:{{ odapi.get_type_name(lnk) }} ({{odapi.get_name(odapi.get_source(lnk))}} -> {{odapi.get_name(odapi.get_target(lnk))}}){{ render_attributes(lnk) }}
-{% endfor %}
+{%- for lnk_name, lnk in links %}
+{{ render_name(obj_name) }}:{{ odapi.get_type_name(lnk) }} ({{odapi.get_name(odapi.get_source(lnk))}} -> {{odapi.get_name(odapi.get_target(lnk))}})
+{{- render_attributes(lnk) }}
+{% endfor -%}
diff --git a/concrete_syntax/textual_od/parser.py b/concrete_syntax/textual_od/parser.py
index b679210..3777250 100644
--- a/concrete_syntax/textual_od/parser.py
+++ b/concrete_syntax/textual_od/parser.py
@@ -21,6 +21,7 @@ literal: INT
| STR
| BOOL
| CODE
+ | BYTES
| INDENTED_CODE
INT: /[0-9]+/
@@ -28,6 +29,8 @@ STR: /"[^"]*"/
| /'[^']*'/
BOOL: "True" | "False"
CODE: /`[^`]*`/
+BYTES: /b"[^"]*"/
+ | /b'[^']*'/
INDENTED_CODE: /```[^`]*```/
type_name: IDENTIFIER
@@ -67,7 +70,7 @@ def parse_od(state,
primitive_types = {
type_name : UUID(state.read_value(state.read_dict(state.read_root(), type_name)))
- for type_name in ["Integer", "String", "Boolean", "ActionCode"]
+ for type_name in ["Integer", "String", "Boolean", "ActionCode", "Bytes"]
}
class T(Transformer):
@@ -89,6 +92,10 @@ def parse_od(state,
def CODE(self, token):
return (_Code(str(token[1:-1])), token.line) # strip the ``
+ def BYTES(self, token):
+ # Strip b"" or b'', and make \\ back to \ (happens when reading the file as a string)
+ return (token[2:-1].encode().decode('unicode_escape').encode('raw_unicode_escape'), token.line) # Strip b"" or b''
+
def INDENTED_CODE(self, token):
skip = 4 # strip the ``` and the following newline character
space_count = 0
diff --git a/concrete_syntax/textual_od/renderer.py b/concrete_syntax/textual_od/renderer.py
index a3fc030..fae1e72 100644
--- a/concrete_syntax/textual_od/renderer.py
+++ b/concrete_syntax/textual_od/renderer.py
@@ -9,7 +9,7 @@ def render_od(state, m_id, mm_id, hide_names=True):
m_od = od.OD(mm_id, m_id, state)
- serialized = set(["Integer", "String", "Boolean", "ActionCode"]) # assume these types always already exist
+ serialized = set(["Integer", "String", "Boolean", "ActionCode", "Bytes"]) # assume these types always already exist
def display_name(name: str):
# object names that start with "__" are hidden
diff --git a/doc/odapi/.gitignore b/doc/odapi/.gitignore
new file mode 100644
index 0000000..e477c8f
--- /dev/null
+++ b/doc/odapi/.gitignore
@@ -0,0 +1,3 @@
+*.aux
+*.log
+*.out
\ No newline at end of file
diff --git a/doc/odapi/api_table.pdf b/doc/odapi/api_table.pdf
new file mode 100644
index 0000000..30dfbd3
Binary files /dev/null and b/doc/odapi/api_table.pdf differ
diff --git a/doc/odapi/api_table.tex b/doc/odapi/api_table.tex
new file mode 100644
index 0000000..586053f
--- /dev/null
+++ b/doc/odapi/api_table.tex
@@ -0,0 +1,121 @@
+\documentclass{article}
+
+\usepackage[left=1cm, right=1cm]{geometry} % reduce page margins
+
+\usepackage{amssymb}
+\usepackage{booktabs}
+\usepackage[normalem]{ulem}
+\usepackage[utf8]{inputenc}
+\usepackage{hyperref}
+\usepackage{graphicx}
+\usepackage{tikz}
+\usepackage{color,listings}
+\usepackage{awesomebox}
+
+\newcommand{\specialcell}[2][c]{%
+ \begin{tabular}[#1]{@{}l@{}}#2\end{tabular}}
+
+\def\ck{\checkmark}
+
+\begin{document}
+
+\centering
+\begin{scriptsize}
+\begin{tabular}{|l|c|c|c|c|c|l|}
+ \hline
+ & \multicolumn{5}{c|}{Availability in Context} & \\
+ \hline
+ & \multicolumn{2}{c|}{ \specialcell{Meta-Model \\ Constraint}} & \multicolumn{2}{c|}{ \specialcell{Model Trans- \\ formation Rule} } & & \\
+
+ \hline
+ & \specialcell{ \textbf{Local} }
+ & \specialcell{ \textbf{Global} }
+ & \specialcell{ \textbf{NAC} \\ \textbf{LHS} }
+ & \textbf{RHS}
+ & \specialcell{ \textbf{OD-} \\ \textbf{API} }
+ & \textbf{Meaning} \\
+ \hline
+ \hline
+ \multicolumn{7}{|l|}{\textit{Querying}} \\
+ \hline
+ \texttt{this :obj} & \ck & & \ck & \ck & & Current object or link \\
+ \hline
+ \texttt{get\_name(:obj) :str} & \ck & \ck & \ck & \ck & \ck & Get name of object or link \\
+ \hline
+ \texttt{get(name:str) :obj} & \ck & \ck & \ck & \ck & \ck & Get object or link by name (inverse of \texttt{get\_name}) \\
+ \hline
+ \texttt{get\_type(:obj) :obj} & \ck & \ck & \ck & \ck & \ck & {Get type of object or link} \\
+ \hline
+ \texttt{get\_type\_name(:obj) :str} & \ck & \ck & \ck & \ck & \ck & {Same as \texttt{get\_name(get\_type(...))}} \\
+ \hline
+ \specialcell{
+ \texttt{is\_instance(:obj, type\_name:str}
+ \\ \texttt{ [,include\_subtypes:bool=True]) :bool}
+ } & \ck & \ck & \ck & \ck & \ck & \specialcell{Is object instance of given type\\(or subtype thereof)?} \\
+ \hline
+
+ \texttt{get\_value(:obj) :int|str|bool} & \ck & \ck & \ck & \ck & \ck & \specialcell{Get value (only works on Integer,\\String, Boolean objects)} \\
+ \hline
+ \texttt{get\_target(:link) :obj} & \ck & \ck & \ck & \ck & \ck & {Get target of link} \\
+ \hline
+ \texttt{get\_source(:link) :obj} & \ck & \ck & \ck & \ck & \ck & {Get source of link} \\
+ \hline
+ \texttt{get\_slot(:obj, attr\_name:str) :link} & \ck & \ck & \ck & \ck & \ck & {Get slot-link (link connecting object to a value)} \\
+ \hline
+ \specialcell{
+ \texttt{get\_slot\_value(:obj,}
+ \\ \texttt{attr\_name:str) :int|str|bool}
+ } & \ck & \ck & \ck & \ck & \ck & {Same as \texttt{get\_value(get\_slot(...))})} \\
+ \hline
+
+ \specialcell{
+ \texttt{get\_all\_instances(type\_name:str}
+ \\ \texttt{ [,include\_subtypes:bool=True]}
+ \\ \texttt{) :list<(str, obj)>}
+ } & \ck & \ck & \ck & \ck & \ck & \specialcell{Get list of tuples (name, object) \\ of given type (and its subtypes).} \\
+ \hline
+ \specialcell{
+ \texttt{get\_outgoing(:obj,}
+ \\ \texttt{ assoc\_name:str) :list}
+ } & \ck & \ck & \ck & \ck & \ck & {Get outgoing links of given type} \\
+ \hline
+ \specialcell{
+ \texttt{get\_incoming(:obj,}
+ \\ \texttt{ assoc\_name:str) :list}
+ } & \ck & \ck & \ck & \ck & \ck & {Get incoming links of given type} \\
+ \hline
+ \texttt{has\_slot(:obj, attr\_name:str) :bool} & \ck & \ck & \ck & \ck & \ck & {Does object have given slot?} \\
+ \hline
+ \texttt{matched(label:str) :obj} & & & \ck & \ck & & \specialcell{Get matched object by its label \\ (the name of the object in the pattern)} \\
+
+ \hline
+ \hline
+ \multicolumn{7}{|l|}{\textit{Modifying}} \\
+ \hline
+ \texttt{delete(:obj)} & & & & \ck & \ck & {Delete object or link} \\
+ \hline
+
+ \specialcell{
+ \texttt{set\_slot\_value(:obj, attr\_name:str,}
+ \\ \texttt{ val:int|str|bool)}
+ } & & & & \ck & \ck & \specialcell{Set value of slot.
+ \\ Creates slot if it doesn't exist yet.} \\
+ \hline
+
+ \specialcell{
+ \texttt{create\_link(link\_name:str|None,} \\
+ \texttt{ assoc\_name:str, src:obj, tgt:obj) :link}
+ } & & & & \ck & \ck & \specialcell{Create link (typed by given association). \\
+ If \texttt{link\_name} is None, name is auto-generated.} \\
+ \hline
+ \specialcell{
+ \texttt{create\_object(object\_name:str|None,} \\
+ \texttt{ class\_name:str) :obj}
+ } & & & & \ck & \ck & \specialcell{Create object (typed by given class). \\
+ If \texttt{object\_name} is None, name is auto-generated.} \\
+ \hline
+ % \texttt{print(*args)} & \multicolumn{2}{c|}{Python's print function (useful for debugging)} & no, use the real print() \\
+\end{tabular}
+\end{scriptsize}
+
+\end{document}
\ No newline at end of file
diff --git a/examples/conformance/abstract_assoc.py b/examples/conformance/abstract_assoc.py
deleted file mode 100644
index 27d4c7c..0000000
--- a/examples/conformance/abstract_assoc.py
+++ /dev/null
@@ -1,75 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from framework.conformance import Conformance, render_conformance_check_result
-from concrete_syntax.textual_od import parser, renderer
-from concrete_syntax.common import indent
-from concrete_syntax.plantuml import renderer as plantuml
-from util.prompt import yes_no, pause
-
-state = DevState()
-scd_mmm = bootstrap_scd(state)
-
-
-mm_cs = """
- BaseA:Class {
- abstract = True;
- }
- BaseB:Class {
- abstract = True;
- }
- baseAssoc:Association (BaseA -> BaseB) {
- abstract = True;
- target_lower_cardinality = 1;
- target_upper_cardinality = 2; # A has 1..2 B
- }
- A:Class
- B:Class
- assoc:Association (A -> B) {
- # we can further restrict cardinality from baseAssoc:
- target_upper_cardinality = 1;
-
- # relaxing cardinalities or constraints can be done (meaning: it will still be a valid meta-model), but will have no effect: for any instance of a type, the constraints defined on the type and its supertypes will be checked.
- }
- :Inheritance (A -> BaseA)
- :Inheritance (B -> BaseB)
- :Inheritance (assoc -> baseAssoc)
-"""
-
-print()
-print("Parsing meta-model...")
-mm = parser.parse_od(
- state,
- m_text=mm_cs, # the string of text to parse
- mm=scd_mmm, # the meta-model of class diagrams (= our meta-meta-model)
-)
-print("OK")
-
-print("Is our meta-model a valid class diagram?")
-conf = Conformance(state, mm, scd_mmm)
-print(render_conformance_check_result(conf.check_nominal()))
-
-m_cs = """
- a0:A
- b0:B
- b1:B
-
- # error: assoc (A -> B) must have tgt card 0..1 (and we have 2 instead)
- :assoc (a0 -> b0)
- :assoc (a0 -> b1)
-
- # error: baseAssoc (A -> B) must have tgt card 1..2 (and we have 0 instead)
- a1:A
-"""
-
-print()
-print("Parsing model...")
-m = parser.parse_od(
- state,
- m_text=m_cs,
- mm=mm, # this time, the meta-model is the previous model we parsed
-)
-print("OK")
-
-print("Is our model a valid woods-diagram?")
-conf = Conformance(state, m, mm)
-print(render_conformance_check_result(conf.check_nominal()))
diff --git a/examples/conformance/metacircularity.py b/examples/conformance/metacircularity.py
deleted file mode 100644
index c591ee0..0000000
--- a/examples/conformance/metacircularity.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from services.scd import SCD
-from concrete_syntax.plantuml import renderer as plantuml
-
-def main():
- state = DevState()
- root = state.read_root() # id: 0
-
- scd_mm_id = bootstrap_scd(state)
-
- uml = ""
-
- # Render SCD Meta-Model as Object Diagram
- uml += plantuml.render_package("Object Diagram", plantuml.render_object_diagram(state, scd_mm_id, scd_mm_id, prefix_ids="od_"))
-
- # Render SCD Meta-Model as Class Diagram
- uml += plantuml.render_package("Class Diagram", plantuml.render_class_diagram(state, scd_mm_id, prefix_ids="cd_"))
-
- # Render conformance
- uml += plantuml.render_trace_conformance(state, scd_mm_id, scd_mm_id, prefix_inst_ids="od_", prefix_type_ids="cd_")
-
- print(uml)
-
-
-if __name__ == "__main__":
- main()
\ No newline at end of file
diff --git a/examples/conformance/woods.py b/examples/conformance/woods.py
deleted file mode 100644
index 633e1e9..0000000
--- a/examples/conformance/woods.py
+++ /dev/null
@@ -1,202 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from framework.conformance import Conformance, render_conformance_check_result
-from concrete_syntax.textual_od import parser, renderer
-from concrete_syntax.common import indent
-from concrete_syntax.plantuml import renderer as plantuml
-from util.prompt import yes_no, pause
-
-state = DevState()
-
-print("Loading meta-meta-model...")
-scd_mmm = bootstrap_scd(state)
-print("OK")
-
-print("Is our meta-meta-model a valid class diagram?")
-conf = Conformance(state, scd_mmm, scd_mmm)
-print(render_conformance_check_result(conf.check_nominal()))
-
-# If you are curious, you can serialize the meta-meta-model:
-# print("--------------")
-# print(indent(
-# renderer.render_od(state,
-# m_id=scd_mmm,
-# mm_id=scd_mmm),
-# 4))
-# print("--------------")
-
-
-# Change this:
-woods_mm_cs = """
- Animal:Class {
- # The class Animal is an abstract class:
- abstract = True;
- }
-
- # A class without attributes
- # The `abstract` attribute shown above is optional (default: False)
- Bear:Class
-
- # Inheritance between two Classes is expressed as follows:
- :Inheritance (Bear -> Animal) # meaning: Bear is an Animal
-
- Man:Class {
- # We can define lower and upper cardinalities on Classes
- # (if unspecified, the lower-card is 0, and upper-card is infinity)
-
- lower_cardinality = 1; # there must be at least one Man in every model
- upper_cardinality = 2; # there must be at most two Men in every model
-
- constraint = ```
- # Python code
- # the last statement must be a boolean expression
-
- # When conformance checking, this code will be run for every Man-object.
- # The variable 'this' refers to the current Man-object.
-
- # Every man weighs at least '20'
- # (the attribute 'weight' is added further down)
- get_value(get_slot(this, "weight")) > 20
- ```;
- }
- # Note that we can only declare the inheritance link after having declared both Man and Animal: We can only refer to earlier objects
- :Inheritance (Man -> Animal) # Man is also an Animal
-
-
- # BTW, we could also give the Inheritance-link a name, for instance:
- # man_is_animal:Inheritance (Man -> Animal)
- #
- # Likewise, Classes, Associations, ... can also be nameless, for instance:
- # :Class { ... }
- # :Association (Man -> Man) { ... }
- # However, we typically want to give names to classes and associations, because we want to refer to them later.
-
-
- # We now add an attribute to 'Man'
- # Attributes are not that different from Associations: both are represented by links
- Man_weight:AttributeLink (Man -> Integer) {
- name = "weight"; # mandatory!
- optional = False; # <- meaning: every Man *must* have a weight
-
- # We can also define constraints on attributes
- constraint = ```
- # Python code
- # Here, 'this' refers to the LINK that connects a Man-object to an Integer
- tgt = get_target(this) # <- we get the target of the LINK (an Integer-object)
- weight = get_value(tgt) # <- get the Integer-value (e.g., 80)
- weight > 20
- ```;
- }
-
- # Create an Association from Man to Animal
- afraidOf:Association (Man -> Animal) {
- # An association has the following (optional) attributes:
- # - source_lower_cardinality (default: 0)
- # - source_upper_cardinality (default: infinity)
- # - target_lower_cardinality (default: 0)
- # - target_upper_cardinality (default: infinity)
-
- # Every Man is afraid of at least one Animal:
- target_lower_cardinality = 1;
-
- # No more than 6 Men are afraid of the same Animal:
- source_upper_cardinality = 6;
- }
-
- # Create a GlobalConstraint
- total_weight_small_enough:GlobalConstraint {
- # Note: for GlobalConstraints, there is no 'this'-variable
- constraint = ```
- # Python code
- # compute sum of all weights
- total_weight = 0
- for man_name, man_id in get_all_instances("Man"):
- total_weight += get_value(get_slot(man_id, "weight"))
-
- # as usual, the last statement is a boolean expression that we think should be satisfied
- total_weight < 85
- ```;
- }
-"""
-
-print()
-print("Parsing 'woods' meta-model...")
-woods_mm = parser.parse_od(
- state,
- m_text=woods_mm_cs, # the string of text to parse
- mm=scd_mmm, # the meta-model of class diagrams (= our meta-meta-model)
-)
-print("OK")
-
-# As a double-check, you can serialize the parsed model:
-# print("--------------")
-# print(indent(
-# renderer.render_od(state,
-# m_id=woods_mm,
-# mm_id=scd_mmm),
-# 4))
-# print("--------------")
-
-print("Is our 'woods' meta-model a valid class diagram?")
-conf = Conformance(state, woods_mm, scd_mmm)
-print(render_conformance_check_result(conf.check_nominal()))
-
-# Change this:
-woods_m_cs = """
- george:Man {
- weight = 15;
- }
- billy:Man {
- weight = 100;
- }
- bear1:Bear
- bear2:Bear
- :afraidOf (george -> bear1)
- :afraidOf (george -> bear2)
-"""
-
-print()
-print("Parsing 'woods' model...")
-woods_m = parser.parse_od(
- state,
- m_text=woods_m_cs,
- mm=woods_mm, # this time, the meta-model is the previous model we parsed
-)
-print("OK")
-
-# As a double-check, you can serialize the parsed model:
-# print("--------------")
-# print(indent(
-# renderer.render_od(state,
-# m_id=woods_m,
-# mm_id=woods_mm),
-# 4))
-# print("--------------")
-
-print("Is our model a valid woods-diagram?")
-conf = Conformance(state, woods_m, woods_mm)
-print(render_conformance_check_result(conf.check_nominal()))
-
-
-print()
-print("==================================")
-if yes_no("Print PlantUML?"):
- print_mm = yes_no(" ▸ Print meta-model?")
- print_m = yes_no(" ▸ Print model?")
- print_conf = print_mm and print_m and yes_no(" ▸ Print conformance links?")
-
- uml = ""
- if print_mm:
- uml += plantuml.render_package("Meta-model", plantuml.render_class_diagram(state, woods_mm))
- if print_m:
- uml += plantuml.render_package("Model", plantuml.render_object_diagram(state, woods_m, woods_mm))
- if print_conf:
- uml += plantuml.render_trace_conformance(state, woods_m, woods_mm)
-
- print("==================================")
- print(uml)
- print("==================================")
- print("Go to either:")
- print(" ▸ https://www.plantuml.com/plantuml/uml")
- print(" ▸ https://mstro.duckdns.org/plantuml/uml")
- print("and paste the above string.")
diff --git a/examples/conformance/woods2.py b/examples/conformance/woods2.py
deleted file mode 100644
index 656cddb..0000000
--- a/examples/conformance/woods2.py
+++ /dev/null
@@ -1,133 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from framework.conformance import Conformance, render_conformance_check_result
-from concrete_syntax.textual_cd import parser as parser_cd
-from concrete_syntax.textual_od import parser as parser_od
-from concrete_syntax.textual_od import renderer as renderer_od
-from concrete_syntax.common import indent
-from concrete_syntax.plantuml import renderer as plantuml
-from util.prompt import yes_no, pause
-
-state = DevState()
-
-print("Loading meta-meta-model...")
-scd_mmm = bootstrap_scd(state)
-print("OK")
-
-print("Is our meta-meta-model a valid class diagram?")
-conf = Conformance(state, scd_mmm, scd_mmm)
-print(render_conformance_check_result(conf.check_nominal()))
-
-# If you are curious, you can serialize the meta-meta-model:
-# print("--------------")
-# print(indent(
-# renderer.render_od(state,
-# m_id=scd_mmm,
-# mm_id=scd_mmm),
-# 4))
-# print("--------------")
-
-
-# Change this:
-woods_mm_cs = """
- abstract class Animal
-
- class Bear (Animal) # Bear inherits Animal
-
- class Man [1..2] (Animal) {
- Integer weight `get_value(get_target(this)) > 20`; # <- constraint in context of attribute-link
-
- `get_value(get_slot(this, "weight")) > 20` # <- constraint in context of Man-object
- }
-
- association afraidOf [0..6] Man -> Animal [1..2]
-
- global total_weight_small_enough ```
- total_weight = 0
- for man_name, man_id in get_all_instances("Man"):
- total_weight += get_value(get_slot(man_id, "weight"))
- total_weight < 85
- ```
-"""
-
-print()
-print("Parsing 'woods' meta-model...")
-woods_mm = parser_cd.parse_cd(
- state,
- m_text=woods_mm_cs, # the string of text to parse
-)
-print("OK")
-
-# We can serialize the class diagram to our object diagram syntax
-# (because the class diagram IS also an object diagram):
-print("--------------")
-print(indent(
- renderer_od.render_od(state,
- m_id=woods_mm,
- mm_id=scd_mmm),
- 4))
-print("--------------")
-
-print("Is our 'woods' meta-model a valid class diagram?")
-conf = Conformance(state, woods_mm, scd_mmm)
-print(render_conformance_check_result(conf.check_nominal()))
-
-# Change this:
-woods_m_cs = """
- george:Man {
- weight = 15;
- }
- billy:Man {
- weight = 100;
- }
- bear1:Bear
- bear2:Bear
- :afraidOf (george -> bear1)
- :afraidOf (george -> bear2)
-"""
-
-print()
-print("Parsing 'woods' model...")
-woods_m = parser_od.parse_od(
- state,
- m_text=woods_m_cs,
- mm=woods_mm, # this time, the meta-model is the previous model we parsed
-)
-print("OK")
-
-# As a double-check, you can serialize the parsed model:
-# print("--------------")
-# print(indent(
-# renderer.render_od(state,
-# m_id=woods_m,
-# mm_id=woods_mm),
-# 4))
-# print("--------------")
-
-print("Is our model a valid woods-diagram?")
-conf = Conformance(state, woods_m, woods_mm)
-print(render_conformance_check_result(conf.check_nominal()))
-
-
-print()
-print("==================================")
-if yes_no("Print PlantUML?"):
- print_mm = yes_no(" ▸ Print meta-model?")
- print_m = yes_no(" ▸ Print model?")
- print_conf = print_mm and print_m and yes_no(" ▸ Print conformance links?")
-
- uml = ""
- if print_mm:
- uml += plantuml.render_package("Meta-model", plantuml.render_class_diagram(state, woods_mm))
- if print_m:
- uml += plantuml.render_package("Model", plantuml.render_object_diagram(state, woods_m, woods_mm))
- if print_conf:
- uml += plantuml.render_trace_conformance(state, woods_m, woods_mm)
-
- print("==================================")
- print(uml)
- print("==================================")
- print("Go to either:")
- print(" ▸ https://www.plantuml.com/plantuml/uml")
- print(" ▸ https://mstro.duckdns.org/plantuml/uml")
- print("and paste the above string.")
diff --git a/examples/ftg_pm_pt/ftg_pm_pt.py b/examples/ftg_pm_pt/ftg_pm_pt.py
new file mode 100644
index 0000000..aed0f77
--- /dev/null
+++ b/examples/ftg_pm_pt/ftg_pm_pt.py
@@ -0,0 +1,47 @@
+import os
+
+# Todo: remove src.backend.muMLE from the imports
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from concrete_syntax.textual_od.parser import parse_od
+from api.od import ODAPI
+from concrete_syntax.textual_od.renderer import render_od as od_renderer
+from concrete_syntax.plantuml import make_url as plant_url, renderer as plant_renderer
+from concrete_syntax.graphviz import make_url as graphviz_url, renderer as graphviz_renderer
+
+class FtgPmPt:
+
+ def __init__(self, name: str):
+ self.state = DevState()
+ self.scd_mmm = bootstrap_scd(self.state)
+ self.meta_model = self.load_metamodel()
+ self.model = None
+ self.odapi = None
+ self.name = name
+
+ @staticmethod
+ def read_file(file_name):
+ with open(os.path.join(os.path.dirname(__file__), file_name)) as file:
+ return file.read()
+
+ def load_metamodel(self):
+ mm_cs = self.read_file("pm/metamodels/mm_design.od")
+ mm_rt_cs = mm_cs + self.read_file("pm/metamodels/mm_runtime.od")
+ mm_total = mm_rt_cs + self.read_file("pt/metamodels/mm_design.od")
+ return parse_od(self.state, m_text=mm_total, mm=self.scd_mmm)
+
+ def load_model(self, m_text: str | None = None):
+ m_text = "" if not m_text else m_text
+ self.model = parse_od(self.state, m_text=m_text, mm=self.meta_model)
+ self.odapi = ODAPI(self.state, self.model, self.meta_model)
+
+ def render_od(self):
+ return od_renderer(self.state, self.model, self.meta_model, hide_names=False)
+
+ def render_plantuml_object_diagram(self):
+ print(plant_url.make_url(plant_renderer.render_package(
+ self.name, plant_renderer.render_object_diagram(self.state, self.model, self.meta_model)))
+ )
+
+ def render_graphviz_object_diagram(self):
+ print(graphviz_url.make_url(graphviz_renderer.render_object_diagram(self.state, self.model, self.meta_model)))
\ No newline at end of file
diff --git a/examples/ftg_pm_pt/help_functions.py b/examples/ftg_pm_pt/help_functions.py
new file mode 100644
index 0000000..2eab6ed
--- /dev/null
+++ b/examples/ftg_pm_pt/help_functions.py
@@ -0,0 +1,68 @@
+import copy
+import pickle
+
+from api.od import ODAPI
+
+from examples.ftg_pm_pt.helpers.composite_activity import execute_composite_workflow
+
+def serialize(obj):
+ return pickle.dumps(obj)
+
+
+def deserialize(obj):
+ return pickle.loads(obj)
+
+
+def create_activity_links(od: ODAPI, activity, prev_element, ctrl_port, end_trace=None,
+ relation_type="pt_IsFollowedBy"):
+ od.create_link(None, "pt_RelatesTo", activity, ctrl_port)
+ od.create_link(None, relation_type, prev_element, activity)
+ if end_trace:
+ od.create_link(None, "pt_IsFollowedBy", activity, end_trace)
+
+
+def extract_input_data(od: ODAPI, activity):
+ input_data = {}
+ for has_data_in in od.get_outgoing(activity, "pm_HasDataIn"):
+ data_port = od.get_target(has_data_in)
+ artefact_state = od.get_source(od.get_incoming(od.get_source(od.get_incoming(data_port, "pm_DataFlowOut")[0]), "pm_Of")[0])
+ input_data[od.get_name(data_port)] = deserialize(od.get_slot_value(artefact_state, "data"))
+ return input_data
+
+
+def execute_activity(od: ODAPI, globs, activity, input_data):
+ inp = copy.deepcopy(input_data) # Necessary, otherwise the function changes the values inside the dictionary -> need the original values for process trace
+ func = globs[od.get_slot_value(activity, "func")]
+ return func(inp) if func.__code__.co_argcount > 0 else func()
+
+
+def handle_artefact(od: ODAPI, activity, artefact_type, relation_type, data_port=None, data=None,
+ direction="DataFlowIn"):
+ artefact = od.create_object(None, "pt_Artefact")
+ if 'pt_Consumes' == relation_type:
+ od.create_link(None, relation_type, artefact, activity)
+ else:
+ od.create_link(None, relation_type, activity, artefact)
+ if data_port:
+ flow_direction = od.get_incoming if relation_type == 'pt_Consumes' else od.get_outgoing
+ ass_side = od.get_source if relation_type == 'pt_Consumes' else od.get_target
+ pm_artefact = ass_side(flow_direction(data_port, f"pm_{direction}")[0])
+ prev_artefact = find_previous_artefact(od, od.get_incoming(pm_artefact, "pt_BelongsTo"))
+ if prev_artefact:
+ od.create_link(None, "pt_PrevVersion", artefact, prev_artefact)
+ od.create_link(None, "pt_BelongsTo", artefact, pm_artefact)
+ if data is not None:
+ artefact_state = od.get_source(od.get_incoming(pm_artefact, "pm_Of")[0])
+ od.set_slot_value(artefact_state, "data", serialize(data))
+ od.set_slot_value(artefact, "data", serialize(data))
+
+
+def find_previous_artefact(od: ODAPI, linked_artefacts):
+ return next((od.get_source(link) for link in linked_artefacts if
+ not od.get_incoming(od.get_source(link), "pt_PrevVersion")), None)
+
+
+def update_control_states(od: ODAPI, activity, ctrl_out):
+ for has_ctrl_in in od.get_outgoing(activity, "pm_HasCtrlIn"):
+ od.set_slot_value(od.get_source(od.get_incoming(od.get_target(has_ctrl_in), "pm_Of")[0]), "active", False)
+ od.set_slot_value(od.get_source(od.get_incoming(ctrl_out, "pm_Of")[0]), "active", True)
diff --git a/examples/ftg_pm_pt/helpers/composite_activity.py b/examples/ftg_pm_pt/helpers/composite_activity.py
new file mode 100644
index 0000000..73063d6
--- /dev/null
+++ b/examples/ftg_pm_pt/helpers/composite_activity.py
@@ -0,0 +1,272 @@
+from uuid import UUID
+
+from api.od import ODAPI
+from examples.ftg_pm_pt.ftg_pm_pt import FtgPmPt
+from examples.ftg_pm_pt.runner import FtgPmPtRunner
+
+
+def find_previous_artefact(od: ODAPI, linked_artefacts):
+ return next((od.get_source(link) for link in linked_artefacts if
+ not od.get_incoming(od.get_source(link), "pt_PrevVersion")), None)
+
+
+def create_activity_links(od: ODAPI, activity, prev_element, ctrl_port, end_trace=None,
+ relation_type="pt_IsFollowedBy"):
+ od.create_link(None, "pt_RelatesTo", activity, ctrl_port)
+ od.create_link(None, relation_type, prev_element, activity)
+ if end_trace:
+ od.create_link(None, "pt_IsFollowedBy", activity, end_trace)
+
+
+def get_workflow_path(od: ODAPI, activity: UUID):
+ return od.get_slot_value(activity, "subworkflow_path")
+
+
+def get_workflow(workflow_path: str):
+ with open(workflow_path, "r") as f:
+ return f.read()
+
+
+############################
+
+def get_runtime_state(od: ODAPI, design_obj: UUID):
+ states = od.get_incoming(design_obj, "pm_Of")
+ if len(states) == 0:
+ print(f"Design object '{od.get_name(design_obj)}' has no runtime state.")
+ return None
+ return od.get_source(states[0])
+
+
+def get_source_incoming(od: ODAPI, obj: UUID, link_name: str):
+ links = od.get_incoming(obj, link_name)
+ if len(links) == 0:
+ print(f"Object '{od.get_name(obj)} has no incoming links of type '{link_name}'.")
+ return None
+ return od.get_source(links[0])
+
+
+def get_target_outgoing(od: ODAPI, obj: UUID, link_name: str):
+ links = od.get_outgoing(obj, link_name)
+ if len(links) == 0:
+ print(f"Object '{od.get_name(obj)} has no outgoing links of type '{link_name}'.")
+ return None
+ return od.get_target(links[0])
+
+
+def set_control_port_value(od: ODAPI, port: UUID, value: bool):
+ state = get_runtime_state(od, port)
+ od.set_slot_value(state, "active", value)
+
+
+def set_artefact_data(od: ODAPI, artefact: UUID, value: bytes):
+ state = artefact
+ # Only the proces model of the artefact contains a runtime state
+ if od.get_type_name(state) == "pm_Artefact":
+ state = get_runtime_state(od, artefact)
+ od.set_slot_value(state, "data", value)
+
+
+def get_artefact_data(od: ODAPI, artefact):
+ state = artefact
+ # Only the proces model of the artefact contains a runtime state
+ if od.get_type_name(state) == "pm_Artefact":
+ state = get_runtime_state(od, artefact)
+ return od.get_slot_value(state, "data")
+
+
+############################
+
+def set_workflow_control_source(workflow_model: FtgPmPt, ctrl_port_name: str, composite_linkage: dict):
+ od = workflow_model.odapi
+ source_port_name = composite_linkage[ctrl_port_name]
+ source_port = od.get(source_port_name)
+ set_control_port_value(od, source_port, True)
+
+
+def set_workflow_artefacts(act_od: ODAPI, activity: UUID, workflow_model: FtgPmPt, composite_linkage: dict):
+ for data_port in [act_od.get_target(data_in) for data_in in act_od.get_outgoing(activity, "pm_HasDataIn")]:
+ # Get the data source port of the inner workflow
+ data_port_name = act_od.get_name(data_port)
+ source_port_name = composite_linkage[data_port_name]
+ source_port = workflow_model.odapi.get(source_port_name)
+
+ # Get the artefact that is linked to the data port of the activity
+ act_artefact = get_source_incoming(act_od, data_port, "pm_DataFlowOut")
+ # Get the data of the artefact
+ artefact_data = get_artefact_data(act_od, act_artefact)
+
+ # Get the artefact that is linked to the data port of the inner workflow
+ workflow_artefact = get_target_outgoing(workflow_model.odapi, source_port, "pm_DataFlowIn")
+ set_artefact_data(workflow_model.odapi, workflow_artefact, artefact_data)
+
+
+def get_activity_port_from_inner_port(composite_linkage: dict, port_name: str):
+ for act_port_name, work_port_name in composite_linkage.items():
+ if work_port_name == port_name:
+ return act_port_name
+
+
+def execute_composite_workflow(od: ODAPI, activity: UUID, ctrl_port: UUID, composite_linkage: dict,
+ packages: dict | None, matched=None):
+ activity_name = od.get_slot_value(activity, "name")
+
+ # First get the path of the object diagram file that contains the inner workflow of the activity
+ workflow_path = get_workflow_path(od, activity)
+
+ # Read the object diagram file
+ workflow = get_workflow(workflow_path)
+
+ # Create an FtgPmPt object
+ workflow_model = FtgPmPt(activity_name)
+
+ # Load the workflow to the object
+ workflow_model.load_model(workflow)
+
+ # Set the correct control source port of the workflow to active
+ set_workflow_control_source(workflow_model, od.get_name(ctrl_port), composite_linkage[activity_name])
+
+ # If a data port is linked, set the data of the artefact
+ set_workflow_artefacts(od, activity, workflow_model, composite_linkage[activity_name])
+
+ # Create an FtgPmPtRunner object with the FtgPmPt object
+ workflow_runner = FtgPmPtRunner(workflow_model)
+
+ # Set the packages if present
+ workflow_runner.set_packages(packages, is_path=False)
+
+ # Run the FtgPmPtRunner (is a subprocess necessary? This makes it more complicated because now we have direct access to the object)
+ workflow_runner.run()
+
+ # Contains all the ports of the inner workflow -> map back to the activity ports, and so we can set the correct
+ # Control ports to active and also set the data artefacts correctly
+ ports = extract_inner_workflow(workflow_model.odapi)
+ start_act = None
+ end_act = None
+ for port in [port for port in ports if port]:
+ port_name = workflow_model.odapi.get_name(port)
+ activity_port_name = get_activity_port_from_inner_port(composite_linkage[activity_name], port_name)
+ activity_port = od.get(activity_port_name)
+ match workflow_model.odapi.get_type_name(port):
+ case "pm_CtrlSource":
+ start_act = handle_control_source(od, activity_port, matched("prev_trace_element"))
+ case "pm_CtrlSink":
+ end_act = handle_control_sink(od, activity_port, start_act, matched("end_trace"))
+ case "pm_DataSource":
+ handle_data_source(od, activity_port, start_act)
+ case "pm_DataSink":
+ handle_data_sink(od, workflow_model.odapi, activity_port, port, end_act)
+
+
+def handle_control_source(od: ODAPI, port, prev_trace_elem):
+ set_control_port_value(od, port, False)
+ start_activity = od.create_object(None, "pt_StartActivity")
+ create_activity_links(od, start_activity, prev_trace_elem, port)
+ return start_activity
+
+
+def handle_control_sink(od: ODAPI, port, start_act, end_trace):
+ set_control_port_value(od, port, True)
+ end_activity = od.create_object(None, "pt_EndActivity")
+ create_activity_links(od, end_activity, start_act, port, end_trace)
+ return end_activity
+
+
+def handle_data_source(od: ODAPI, port, start_activity):
+ pt_artefact = od.create_object(None, "pt_Artefact")
+ od.create_link(None, "pt_Consumes", pt_artefact, start_activity)
+
+ pm_artefact = get_source_incoming(od, port, "pm_DataFlowOut")
+ pm_artefact_data = get_artefact_data(od, pm_artefact)
+ set_artefact_data(od, pt_artefact, pm_artefact_data)
+ prev_pt_artefact = find_previous_artefact(od, od.get_incoming(pm_artefact, "pt_BelongsTo"))
+ if prev_pt_artefact:
+ od.create_link(None, "pt_PrevVersion", pt_artefact, prev_pt_artefact)
+ od.create_link(None, "pt_BelongsTo", pt_artefact, pm_artefact)
+
+
+def handle_data_sink(act_od: ODAPI, work_od: ODAPI, act_port, work_port, end_activity):
+ pt_artefact = act_od.create_object(None, "pt_Artefact")
+ act_od.create_link(None, "pt_Produces", end_activity, pt_artefact)
+
+ work_artefact = get_source_incoming(work_od, work_port, "pm_DataFlowOut")
+ work_artefact_data = get_artefact_data(work_od, work_artefact)
+
+ act_artefact = get_target_outgoing(act_od, act_port, "pm_DataFlowIn")
+
+ set_artefact_data(act_od, act_artefact, work_artefact_data)
+ set_artefact_data(act_od, pt_artefact, work_artefact_data)
+
+ prev_pt_artefact = find_previous_artefact(act_od, act_od.get_incoming(act_artefact, "pt_BelongsTo"))
+ if prev_pt_artefact:
+ act_od.create_link(None, "pt_PrevVersion", pt_artefact, prev_pt_artefact)
+ act_od.create_link(None, "pt_BelongsTo", pt_artefact, act_artefact)
+
+
+def extract_inner_workflow(workflow: ODAPI):
+ # Get the model, this should be only one
+ name, model = workflow.get_all_instances("pm_Model")[0]
+
+ # Get the start of the process trace
+ start_trace = get_source_incoming(workflow, model, "pt_Starts")
+ # Get the end of the process trace
+ end_trace = get_source_incoming(workflow, model, "pt_Ends")
+
+ # Get the first started activity
+ first_activity = get_target_outgoing(workflow, start_trace, "pt_IsFollowedBy")
+ # Get the last ended activity
+ end_activity = get_source_incoming(workflow, end_trace, "pt_IsFollowedBy")
+
+ # Get the control port that started the activity
+ act_ctrl_in = get_target_outgoing(workflow, first_activity, "pt_RelatesTo")
+ # Get the control port that is activated when the activity is executed
+ act_ctrl_out = get_target_outgoing(workflow, end_activity, "pt_RelatesTo")
+
+ # Get the control source of the workflow
+ ports = []
+ for port in workflow.get_incoming(act_ctrl_in, "pm_CtrlFlow"):
+ source = workflow.get_source(port)
+ if workflow.get_type_name(source) == "pm_CtrlSource":
+ # Only one port can activate an activity
+ ports.append(source)
+ break
+
+ # Get the control sink of the workflow
+ for port in workflow.get_outgoing(act_ctrl_out, "pm_CtrlFlow"):
+ sink = workflow.get_target(port)
+ if workflow.get_type_name(sink) == "pm_CtrlSink":
+ # Only one port can be set to active one an activity is ended
+ ports.append(sink)
+ break
+
+ # Get the data port that the activity consumes (if used)
+ consumed_links = workflow.get_incoming(first_activity, "pt_Consumes")
+ if len(consumed_links) > 0:
+ pt_artefact = None
+ for link in consumed_links:
+ pt_artefact = workflow.get_source(link)
+ # Check if it is the first artefact -> contains no previous version
+ if len(workflow.get_outgoing(pt_artefact, "pt_PrevVersion")) == 0:
+ break
+
+ pm_artefact = get_target_outgoing(workflow, pt_artefact, "pt_BelongsTo")
+ # Find the data source port
+ for link in workflow.get_incoming(pm_artefact, "pm_DataFlowIn"):
+ source = workflow.get_source(link)
+ if workflow.get_type_name(source) == "pm_DataSource":
+ # An activity can only use one artefact as input
+ ports.append(source)
+ break
+
+ # Get all data ports that are connected to an artefact that is produced by an activity in the workflow,
+ # where the artefact is also part of main workflow
+ for port_name, data_sink in workflow.get_all_instances("pm_DataSink"):
+ pm_art = get_source_incoming(workflow, data_sink, "pm_DataFlowOut")
+ # If the pm_artefact is linked to a proces trace artefact that is produced, we can add to port
+ links = workflow.get_incoming(pm_art, "pt_BelongsTo")
+ if not len(links):
+ continue
+ # A data sink port linkage will only be added to the proces trace when an activity is ended and so an artefact
+ # is produced, meaning that if a belongsTo link exists, a proces trace artefact is linked to this data port
+ ports.append(data_sink)
+
+ return ports
diff --git a/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_lhs.od b/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_lhs.od
new file mode 100644
index 0000000..b24c468
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_lhs.od
@@ -0,0 +1,2 @@
+# Match the model
+model:RAM_pm_Model
diff --git a/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_nac.od b/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_nac.od
new file mode 100644
index 0000000..fb604ba
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_nac.od
@@ -0,0 +1,7 @@
+model:RAM_pm_Model
+
+# Check if the model isn't already connected to a process trace
+start_trace:RAM_pt_StartTrace
+ :RAM_pt_Starts (start_trace -> model)
+end_trace:RAM_pt_EndTrace
+ :RAM_pt_Ends (end_trace -> model)
diff --git a/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_rhs.od b/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_rhs.od
new file mode 100644
index 0000000..f81fb45
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_connect_process_trace_rhs.od
@@ -0,0 +1,12 @@
+# Keep the left hand side
+model:RAM_pm_Model
+
+# Connect a process trace to it
+start_trace:RAM_pt_StartTrace
+ starts:RAM_pt_Starts (start_trace -> model)
+
+end_trace:RAM_pt_EndTrace
+ ends:RAM_pt_Ends (end_trace -> model)
+
+# Connect the start with the end
+:RAM_pt_IsFollowedBy (start_trace -> end_trace)
diff --git a/examples/ftg_pm_pt/operational_semantics/r_exec_activity_lhs.od b/examples/ftg_pm_pt/operational_semantics/r_exec_activity_lhs.od
new file mode 100644
index 0000000..50460e0
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_exec_activity_lhs.od
@@ -0,0 +1,49 @@
+# When a control port is active and is connected to an activity, we want to execute the activity
+# But, if the activity has input_and (input_or = False). It only can be activated if all its inputs are active
+
+
+# Match the model
+model:RAM_pm_Model
+
+# Match the a python automated activity
+py_activity:RAM_pm_PythonAutomatedActivity {
+ # Check if all connected ports are active in case of input_and
+ condition = ```
+ all_active = True
+
+ # Check for or / and
+ if not get_slot_value(this, "input_or"):
+ # Get all the ctrl in ports
+ for has_ctrl_in in get_outgoing(this, "pm_HasCtrlIn"):
+ c_in_state = get_source(get_incoming(get_target(has_ctrl_in), "pm_Of")[0])
+ # Check if the port is active or not
+ if not get_slot_value(c_in_state, "active"):
+ all_active = False
+ break
+
+ all_active
+ ```;
+} model_to_activity:RAM_pm_Owns (model -> py_activity)
+
+
+# Match a control activity in port that is active
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+ RAM_active = `get_value(this)`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+# Match the activity link to the port
+activity_to_port:RAM_pm_HasCtrlIn (py_activity -> ctrl_in)
+
+# Match the end of the trace
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+# Match the previous trace element before the end trace
+prev_trace_element:RAM_pt_Event
+
+followed_by:RAM_pt_IsFollowedBy (prev_trace_element -> end_trace)
+
diff --git a/examples/ftg_pm_pt/operational_semantics/r_exec_activity_rhs.od b/examples/ftg_pm_pt/operational_semantics/r_exec_activity_rhs.od
new file mode 100644
index 0000000..27808eb
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_exec_activity_rhs.od
@@ -0,0 +1,42 @@
+model:RAM_pm_Model
+
+py_activity:RAM_pm_PythonAutomatedActivity {
+
+ condition = ```
+ start_activity = create_object(None, "pt_StartActivity")
+ create_activity_links(odapi, start_activity, matched("prev_trace_element"), matched("ctrl_in"))
+ input_data = extract_input_data(odapi, this)
+ result = execute_activity(odapi, globals()["packages"], this, input_data)
+ if len(result) == 3:
+ status_code, output_data, input_used = result
+ else:
+ status_code, output_data, input_used = *result, None
+ if input_used:
+ handle_artefact(odapi, start_activity, "pt_Artefact", "pt_Consumes", get(input_used), input_data[input_used], direction="DataFlowOut")
+ end_activity = create_object(None, "pt_EndActivity")
+ ctrl_out = get(status_code)
+ create_activity_links(odapi, end_activity, start_activity, ctrl_out, end_trace=matched("end_trace"))
+ if output_data:
+ port, data = output_data
+ handle_artefact(odapi, end_activity, "pt_Artefact", "pt_Produces", get(port), data, direction="DataFlowIn")
+ update_control_states(odapi, this, ctrl_out)
+ ```;
+}
+
+model_to_activity:RAM_pm_Owns
+
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+ RAM_active = `False`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+activity_to_port:RAM_pm_HasCtrlIn (py_activity -> ctrl_in)
+
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+prev_trace_element:RAM_pt_Event
+
diff --git a/examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_lhs.od b/examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_lhs.od
new file mode 100644
index 0000000..b472cd0
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_lhs.od
@@ -0,0 +1,36 @@
+# When a control port is active and is connected to an activity, we want to execute the activity. If it is a composite one, we execute the inner workflow of it
+# But, if the activity has input_and (input_or = False). It only can be activated if all its inputs are active
+
+
+# Match the model
+model:RAM_pm_Model
+
+# Match the a python automated activity
+activity:RAM_pm_Activity {
+
+ RAM_composite = `True`;
+
+} model_to_activity:RAM_pm_Owns (model -> activity)
+
+
+# Match a control activity in port that is active
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+ RAM_active = `get_value(this)`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+# Match the activity link to the port
+activity_to_port:RAM_pm_HasCtrlIn (activity -> ctrl_in)
+
+# Match the end of the trace
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+# Match the previous trace element before the end trace
+prev_trace_element:RAM_pt_Event
+
+followed_by:RAM_pt_IsFollowedBy (prev_trace_element -> end_trace)
+
diff --git a/examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_rhs.od b/examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_rhs.od
new file mode 100644
index 0000000..dc5e1c0
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_exec_composite_activity_rhs.od
@@ -0,0 +1,29 @@
+model:RAM_pm_Model
+
+activity:RAM_pm_Activity {
+
+ RAM_composite = `True`;
+
+ condition = ```
+ # Execute inner workflow
+ execute_composite_workflow(odapi, this, matched("ctrl_in"), globals()["composite_linkage"], globals()["packages"], matched)
+ ```;
+}
+
+model_to_activity:RAM_pm_Owns
+
+ctrl_in:RAM_pm_CtrlActivityIn
+
+ctrl_in_state:RAM_pm_CtrlPortState {
+ RAM_active = `False`;
+}
+
+state_to_port:RAM_pm_Of (ctrl_in_state -> ctrl_in)
+
+activity_to_port:RAM_pm_HasCtrlIn (activity -> ctrl_in)
+
+end_trace:RAM_pt_EndTrace
+ends:RAM_pt_Ends (end_trace -> model)
+
+prev_trace_element:RAM_pt_Event
+
diff --git a/examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_lhs.od b/examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_lhs.od
new file mode 100644
index 0000000..66557e6
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_lhs.od
@@ -0,0 +1,20 @@
+# Match an active control output port
+out_state:RAM_pm_CtrlPortState {
+ RAM_active = `get_value(this)`;
+}
+
+out:RAM_pm_CtrlOut
+
+state_to_out:RAM_pm_Of (out_state -> out)
+
+# Match an inactive control input port
+in_state:RAM_pm_CtrlPortState {
+ RAM_active = `not get_value(this)`;
+}
+
+in:RAM_pm_CtrlIn
+
+state_to_in:RAM_pm_Of (in_state -> in)
+
+# Match the connection between those two ports
+flow:RAM_pm_CtrlFlow (out -> in)
diff --git a/examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_rhs.od b/examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_rhs.od
new file mode 100644
index 0000000..3861692
--- /dev/null
+++ b/examples/ftg_pm_pt/operational_semantics/r_trigger_ctrl_flow_rhs.od
@@ -0,0 +1,42 @@
+# Copy the left hand side
+
+out_state:RAM_pm_CtrlPortState {
+ # Only set the output port to inactive if all connected input ports are set to active
+ RAM_active = ```
+ set_to_active = False
+
+ output_port = matched("out")
+ outgoing_flows = get_outgoing(output_port, "pm_CtrlFlow")
+
+ # for each flow: pm_CtrlFlow -> pm_CtrlIn <- pm_Of <- pm_CtrlPortState == state
+ all_input_port_states = [get_source(get_incoming(get_target(flow), "pm_Of")[0]) for flow in outgoing_flows]
+ input_port_state = matched("in_state")
+
+ for state in all_input_port_states:
+ is_active = get_slot_value(state, "active")
+
+ # If the state is not active and it is not the input port state we have matched and planned to set active
+ # Then we can't yet set this output port state to active
+ if not is_active and state != input_port_state:
+ set_to_active = True
+ break
+
+ # Set the attribute to the assigned value
+ set_to_active
+ ```;
+}
+
+out:RAM_pm_CtrlOut
+
+state_to_out:RAM_pm_Of (out_state -> out)
+
+in_state:RAM_pm_CtrlPortState {
+ # Set the input port active
+ RAM_active = `True`;
+}
+
+in:RAM_pm_CtrlIn
+
+state_to_in:RAM_pm_Of (in_state -> in)
+
+flow:RAM_pm_CtrlFlow (out -> in)
diff --git a/examples/ftg_pm_pt/pm/metamodels/mm_design.od b/examples/ftg_pm_pt/pm/metamodels/mm_design.od
new file mode 100644
index 0000000..719347b
--- /dev/null
+++ b/examples/ftg_pm_pt/pm/metamodels/mm_design.od
@@ -0,0 +1,200 @@
+##################################################
+
+pm_Model:Class
+
+##################################################
+
+pm_Stateful:Class
+
+##################################################
+
+pm_ModelElement:Class {
+ abstract = True;
+}
+
+##################################################
+
+pm_Activity:Class
+ :Inheritance (pm_Activity -> pm_ModelElement)
+
+pm_Activity_name:AttributeLink (pm_Activity -> String) {
+ name = "name";
+ optional = False;
+}
+
+pm_Activity_composite:AttributeLink (pm_Activity -> Boolean) {
+ name = "composite";
+ optional = False;
+}
+
+pm_Activity_subworkflow_path:AttributeLink (pm_Activity -> String) {
+ name = "subworkflow_path";
+ optional = True;
+}
+
+
+pm_AutomatedActivity:Class {
+ abstract = True;
+} :Inheritance (pm_AutomatedActivity -> pm_Activity)
+
+pm_AutomatedActivity_input_or:AttributeLink (pm_AutomatedActivity -> Boolean) {
+ name = "input_or";
+ optional = False;
+}
+
+pm_PythonAutomatedActivity:Class
+ :Inheritance (pm_PythonAutomatedActivity -> pm_AutomatedActivity)
+
+pm_PythonAutomatedActivity_func:AttributeLink (pm_PythonAutomatedActivity -> ActionCode) {
+ name = "func";
+ optional = False;
+}
+
+##################################################
+
+pm_Artefact:Class
+ :Inheritance (pm_Artefact -> pm_ModelElement)
+ :Inheritance (pm_Artefact -> pm_Stateful)
+
+##################################################
+
+pm_CtrlPort:Class {
+ abstract = True;
+} :Inheritance (pm_CtrlPort -> pm_Stateful)
+
+pm_CtrlIn:Class {
+ abstract = True;
+} :Inheritance (pm_CtrlIn -> pm_CtrlPort)
+
+pm_CtrlSink:Class {
+ # 1) A control sink port must have at least one incoming control flow
+ # 2) A control sink port can't have any control flow output
+ constraint = ```
+ has_incoming = len(get_incoming(this, "pm_CtrlFlow")) > 0
+ no_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) == 0
+
+ # Return constraint
+ has_incoming and no_outgoing
+ ```;
+} :Inheritance (pm_CtrlSink -> pm_CtrlIn)
+
+pm_CtrlActivityIn:Class {
+ # 1) Must have at least one incoming control flow
+ constraint = ```
+ has_incoming = len(get_incoming(this, "pm_CtrlFlow")) > 0
+ # Return constraint
+ has_incoming
+ ```;
+} :Inheritance (pm_CtrlActivityIn -> pm_CtrlIn)
+
+pm_CtrlOut:Class {
+ abstract = True;
+} :Inheritance (pm_CtrlOut -> pm_CtrlPort)
+
+pm_CtrlSource:Class {
+ # 1) A control source port can't have any control flow inputs
+ # 2) A control source port must have at least one outgoing control flow
+ constraint = ```
+ no_incoming = len(get_incoming(this, "pm_CtrlFlow")) == 0
+ has_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) > 0
+
+ # Return constraint
+ no_incoming and has_outgoing
+ ```;
+} :Inheritance (pm_CtrlSource -> pm_CtrlOut)
+
+pm_CtrlActivityOut:Class {
+ # 1) Must have at least one outgoing control flow
+ constraint = ```
+ has_outgoing = len(get_outgoing(this, "pm_CtrlFlow")) > 0
+
+ # Return constraint
+ has_outgoing
+ ```;
+} :Inheritance (pm_CtrlActivityOut -> pm_CtrlOut)
+
+##################################################
+
+pm_DataPort:Class {
+ abstract = True;
+}
+
+pm_DataIn:Class {
+ abstract = True;
+} :Inheritance (pm_DataIn -> pm_DataPort)
+
+pm_DataSink:Class
+ :Inheritance (pm_DataSink -> pm_DataIn)
+
+pm_DataActivityIn:Class
+ :Inheritance (pm_DataActivityIn -> pm_DataIn)
+
+pm_DataOut:Class {
+ abstract = True;
+} :Inheritance (pm_DataOut -> pm_DataPort)
+
+pm_DataSource:Class
+ :Inheritance (pm_DataSource -> pm_DataOut)
+
+pm_DataActivityOut:Class
+ :Inheritance (pm_DataActivityOut -> pm_DataOut)
+
+##################################################
+##################################################
+
+pm_Owns:Association (pm_Model -> pm_ModelElement) {
+ source_lower_cardinality = 1;
+ source_upper_cardinality = 1;
+}
+
+##################################################
+
+pm_CtrlFlow:Association (pm_CtrlPort -> pm_CtrlPort)
+
+##################################################
+
+pm_HasCtrlIn:Association (pm_Activity -> pm_CtrlIn) {
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+}
+
+pm_HasCtrlOut:Association (pm_Activity -> pm_CtrlOut) {
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+}
+
+pm_HasDataIn:Association (pm_Activity -> pm_DataIn) {
+ source_upper_cardinality = 1;
+}
+
+pm_HasDataOut:Association (pm_Activity -> pm_DataOut) {
+ source_upper_cardinality = 1;
+}
+
+##################################################
+
+pm_DataFlowIn:Association (pm_DataOut -> pm_Artefact) {
+ source_lower_cardinality = 1;
+ target_lower_cardinality = 1;
+}
+
+pm_DataFlowOut:Association (pm_Artefact -> pm_DataIn) {
+ source_lower_cardinality = 1;
+ target_lower_cardinality = 1;
+}
+
+##################################################
+##################################################
+
+has_source_and_sink:GlobalConstraint {
+ # There should be at least one source and sink control port
+ constraint = ```
+ contains_source = len(get_all_instances("pm_CtrlSource")) > 0
+ contains_sink = len(get_all_instances("pm_CtrlSink")) > 0
+
+ # return constraint
+ contains_source and contains_sink
+ ```;
+}
+
+##################################################
diff --git a/examples/ftg_pm_pt/pm/metamodels/mm_runtime.od b/examples/ftg_pm_pt/pm/metamodels/mm_runtime.od
new file mode 100644
index 0000000..c45daef
--- /dev/null
+++ b/examples/ftg_pm_pt/pm/metamodels/mm_runtime.od
@@ -0,0 +1,38 @@
+##################################################
+
+pm_State:Class {
+ abstract = True;
+}
+
+##################################################
+
+pm_ArtefactState:Class
+ :Inheritance (pm_ArtefactState -> pm_State)
+
+pm_ArtefactState_data:AttributeLink (pm_ArtefactState -> Bytes) {
+ name = "data";
+ optional = False;
+}
+
+##################################################
+
+pm_CtrlPortState:Class
+ :Inheritance (pm_CtrlPortState -> pm_State)
+
+pm_CtrlPortState_active:AttributeLink (pm_CtrlPortState -> Boolean) {
+ name = "active";
+ optional = False;
+}
+
+##################################################
+##################################################
+
+pm_Of:Association (pm_State -> pm_Stateful) {
+ # one-to-one
+ source_lower_cardinality = 1;
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+##################################################
diff --git a/examples/ftg_pm_pt/pt/metamodels/mm_design.od b/examples/ftg_pm_pt/pt/metamodels/mm_design.od
new file mode 100644
index 0000000..c6fa85c
--- /dev/null
+++ b/examples/ftg_pm_pt/pt/metamodels/mm_design.od
@@ -0,0 +1,109 @@
+##################################################
+
+pt_Event:Class {
+ abstract = True;
+}
+
+##################################################
+
+pt_Activity:Class {
+ abstract = True;
+} :Inheritance (pt_Activity -> pt_Event)
+
+pt_StartActivity:Class {
+ # A start activity can only be related to a control in port
+ constraint = ```
+ correct_related = True
+
+ port = get_target(get_outgoing(this, "pt_RelatesTo")[0])
+ correct_related = port in [uid for _, uid in get_all_instances("pm_CtrlIn")]
+ correct_related
+ ```;
+
+} :Inheritance (pt_StartActivity -> pt_Activity)
+
+pt_EndActivity:Class {
+ # A end activity can only be related to a control out port
+ constraint = ```
+ correct_related = True
+
+ port = get_target(get_outgoing(this, "pt_RelatesTo")[0])
+ correct_related = port in [uid for _, uid in get_all_instances("pm_CtrlOut")]
+
+ correct_related
+ ```;
+
+} :Inheritance (pt_EndActivity -> pt_Activity)
+
+##################################################
+
+pt_StartTrace:Class
+ :Inheritance (pt_StartTrace -> pt_Event)
+
+pt_EndTrace:Class
+ :Inheritance (pt_EndTrace -> pt_Event)
+
+##################################################
+
+pt_Artefact:Class
+ :Inheritance (pt_Artefact -> pt_Event)
+
+pt_Artefact_data:AttributeLink (pt_Artefact -> Bytes) {
+ name = "data";
+ optional = False;
+}
+
+##################################################
+##################################################
+
+pt_IsFollowedBy:Association (pt_Event -> pt_Event) {
+ source_upper_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+##################################################
+
+pt_RelatesTo:Association (pt_Activity -> pm_CtrlPort) {
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+pt_Consumes:Association (pt_Artefact -> pt_StartActivity) {
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+pt_Produces:Association (pt_EndActivity -> pt_Artefact) {
+ source_lower_cardinality = 1;
+ source_upper_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+##################################################
+
+pt_Starts:Association (pt_StartTrace -> pm_Model) {
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+pt_Ends:Association (pt_EndTrace -> pm_Model) {
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+##################################################
+
+pt_PrevVersion:Association (pt_Artefact -> pt_Artefact) {
+ source_upper_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+pt_BelongsTo:Association (pt_Artefact -> pm_Artefact) {
+ target_lower_cardinality = 1;
+ target_upper_cardinality = 1;
+}
+
+##################################################
diff --git a/examples/ftg_pm_pt/runner.py b/examples/ftg_pm_pt/runner.py
new file mode 100644
index 0000000..282138c
--- /dev/null
+++ b/examples/ftg_pm_pt/runner.py
@@ -0,0 +1,162 @@
+import re
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+from transformation.rule import RuleMatcherRewriter
+from transformation.ramify import ramify
+from concrete_syntax.graphviz import renderer as graphviz
+from concrete_syntax.graphviz.make_url import make_url
+from concrete_syntax.plantuml import renderer as plantuml
+from concrete_syntax.plantuml.make_url import make_url as plant_make_url
+from api.od import ODAPI
+import os
+from os import listdir
+from os.path import isfile, join
+import importlib.util
+from util.module_to_dict import module_to_dict
+from examples.ftg_pm_pt import help_functions
+
+from examples.ftg_pm_pt.ftg_pm_pt import FtgPmPt
+
+
+
+class FtgPmPtRunner:
+
+ def __init__(self, model: FtgPmPt, composite_linkage: dict | None = None):
+ self.model = model
+ self.ram_mm = ramify(self.model.state, self.model.meta_model)
+ self.rules = self.load_rules()
+ self.packages = None
+ self.composite_linkage = composite_linkage
+
+ def load_rules(self):
+ return loader.load_rules(
+ self.model.state,
+ lambda rule_name, kind: os.path.join(
+ os.path.dirname(__file__),
+ f"operational_semantics/r_{rule_name}_{kind}.od"
+ ),
+ self.ram_mm,
+ ["connect_process_trace", "trigger_ctrl_flow", "exec_activity", "exec_composite_activity"]
+ )
+
+ def set_packages(self, packages: str | dict, is_path: bool):
+ if not is_path:
+ self.packages = packages
+ return
+
+ self.packages = self.parse_packages(packages)
+
+ def parse_packages(self, packages_path: str) -> dict:
+ return self.collect_functions_from_packages(packages_path, packages_path)
+
+ def collect_functions_from_packages(self, base_path, current_path):
+ functions_dict = {}
+
+ for entry in listdir(current_path):
+ entry_path = join(current_path, entry)
+
+ if isfile(entry_path) and entry.endswith(".py"):
+ module_name = self.convert_path_to_module_name(base_path, entry_path)
+ module = self.load_module_from_file(entry_path)
+
+ for func_name, func in module_to_dict(module).items():
+ functions_dict[f"{module_name}.{func_name}"] = func
+
+ elif not isfile(entry_path):
+ nested_functions = self.collect_functions_from_packages(base_path, entry_path)
+ functions_dict.update(nested_functions)
+
+ return functions_dict
+
+ @staticmethod
+ def convert_path_to_module_name(base_path, file_path):
+ return file_path.replace(base_path, "").replace(".py", "").replace("/", "")
+
+ @staticmethod
+ def load_module_from_file(file_path):
+ spec = importlib.util.spec_from_file_location("", file_path)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ return module
+
+ def create_matcher(self):
+ packages = module_to_dict(help_functions)
+
+ if self.packages:
+ packages.update({ "packages": self.packages })
+
+ if self.composite_linkage:
+ packages.update({ "composite_linkage": self.composite_linkage })
+
+ matcher_rewriter = RuleMatcherRewriter(
+ self.model.state, self.model.meta_model, self.ram_mm, eval_context=packages
+ )
+ return matcher_rewriter
+
+ def visualize_model(self):
+ print(make_url(graphviz.render_object_diagram(self.model.state, self.model.model, self.model.meta_model)))
+ print(plant_make_url(plantuml.render_object_diagram(self.model.state, self.model.model, self.model.meta_model)))
+
+ @staticmethod
+ def __extract_artefact_info(od, pt_art):
+ """Extract artefact metadata and data."""
+ data = od.get_slot_value(pt_art, "data")
+ pm_art = od.get_name(od.get_target(od.get_outgoing(pt_art, "pt_BelongsTo")[0]))
+ has_prev_version = bool(od.get_outgoing(pt_art, "pt_PrevVersion"))
+ is_last_version = not od.get_incoming(pt_art, "pt_PrevVersion")
+ return {
+ "Artefact Name": pm_art,
+ "Data": data,
+ "Has previous version": has_prev_version,
+ "Is last version": is_last_version
+ }
+
+ def __extract_inputs(self, od, event_node):
+ """Extract all consumed artefacts for an event."""
+ return [
+ self.__extract_artefact_info(od, od.get_source(consumes))
+ for consumes in od.get_incoming(event_node, "pt_Consumes")
+ ]
+
+ def __extract_outputs(self, od, event_node):
+ """Extract all produced artefacts for an event."""
+ return [
+ self.__extract_artefact_info(od, od.get_target(produces))
+ for produces in od.get_outgoing(event_node, "pt_Produces")
+ ]
+
+ @staticmethod
+ def to_snake_case(experiment_type):
+ # Finds uppercase letters that are not at the start of the string.
+ # Example: AtomicExperiment -> atomic_experiment
+ return re.sub(r'(? pot
+ {% for source, target in planted %}
+ g{{ source }} -> p{{ target }};
+ {% endfor %}
+}
diff --git a/examples/geraniums/metamodels/mm.od b/examples/geraniums/metamodels/mm.od
new file mode 100644
index 0000000..f6f6962
--- /dev/null
+++ b/examples/geraniums/metamodels/mm.od
@@ -0,0 +1,9 @@
+class Geranium {
+ Boolean flowering;
+}
+
+class Pot {
+ Boolean cracked;
+}
+
+association Planted [0..*] Geranium -> Pot [1..1]
diff --git a/examples/geraniums/models/eval_context.py b/examples/geraniums/models/eval_context.py
new file mode 100644
index 0000000..d8dfcd8
--- /dev/null
+++ b/examples/geraniums/models/eval_context.py
@@ -0,0 +1,44 @@
+import os
+
+from jinja2 import Environment, FileSystemLoader
+
+from api.od import ODAPI
+from framework.conformance import eval_context_decorator
+
+
+@eval_context_decorator
+def _render_geraniums_dot(od: ODAPI, file: str) -> str:
+ __DIR__ = os.path.dirname(__file__)
+ env = Environment(
+ loader=FileSystemLoader(
+ __DIR__
+ )
+ )
+ env.trim_blocks = True
+ env.lstrip_blocks = True
+ template_dot = env.get_template("geraniums_renderer.j2")
+
+ id_count = 0
+ id_map = {}
+ render = {"geraniums": [], "pots": [], "planted": []}
+
+ for name, uuid in od.get_all_instances("Geranium"):
+ render["geraniums"].append((id_count, name, od.get_slot_value(uuid, "flowering")))
+ id_map[uuid] = id_count
+ id_count += 1
+
+ for name, uuid in od.get_all_instances("Pot"):
+ render["pots"].append((id_count, name, od.get_slot_value(uuid, "cracked")))
+ id_map[uuid] = id_count
+ id_count += 1
+
+ for name, uuid in od.get_all_instances("Planted"):
+ render["planted"].append((id_map[od.get_source(uuid)], id_map[od.get_target(uuid)]))
+
+ with open(file, "w", encoding="utf-8") as f_dot:
+ f_dot.write(template_dot.render(**render))
+ return ""
+
+eval_context = {
+ "render_geraniums_dot": _render_geraniums_dot,
+}
diff --git a/examples/geraniums/models/example1.od b/examples/geraniums/models/example1.od
new file mode 100644
index 0000000..db5bc32
--- /dev/null
+++ b/examples/geraniums/models/example1.od
@@ -0,0 +1,17 @@
+f1:Geranium {
+ flowering = True;
+}
+f2:Geranium {
+ flowering = False;
+}
+f3:Geranium {
+ flowering = True;
+}
+
+p1:Pot {
+ cracked = True;
+}
+
+:Planted (f1 -> p1)
+:Planted (f2 -> p1)
+:Planted (f3 -> p1)
\ No newline at end of file
diff --git a/examples/geraniums/models/example2.od b/examples/geraniums/models/example2.od
new file mode 100644
index 0000000..9c4e0f4
--- /dev/null
+++ b/examples/geraniums/models/example2.od
@@ -0,0 +1,47 @@
+f1:Geranium {
+ flowering = True;
+}
+f2:Geranium {
+ flowering = True;
+}
+f3:Geranium {
+ flowering = False;
+}
+
+p1:Pot {
+ cracked = True;
+}
+
+:Planted (f1 -> p1)
+:Planted (f2 -> p1)
+:Planted (f3 -> p1)
+
+
+
+
+f4:Geranium {
+ flowering = True;
+}
+p2:Pot {
+ cracked = True;
+}
+:Planted (f4 -> p2)
+
+
+
+f5:Geranium {
+ flowering = True;
+}
+p3:Pot {
+ cracked = False;
+}
+:Planted (f5 -> p3)
+
+
+f6:Geranium {
+ flowering = False;
+}
+p4:Pot {
+ cracked = True;
+}
+:Planted (f6 -> p4)
\ No newline at end of file
diff --git a/examples/geraniums/renderer.py b/examples/geraniums/renderer.py
new file mode 100644
index 0000000..3ac50f5
--- /dev/null
+++ b/examples/geraniums/renderer.py
@@ -0,0 +1,45 @@
+import os
+
+from jinja2 import Environment, FileSystemLoader
+
+from api.od import ODAPI
+from concrete_syntax.graphviz.make_url import show_graphviz
+from concrete_syntax.graphviz.renderer import make_graphviz_id
+
+try:
+ import graphviz
+ HAVE_GRAPHVIZ = True
+except ImportError:
+ HAVE_GRAPHVIZ = False
+
+def render_geraniums_dot(od: ODAPI, file: str) -> str:
+ __DIR__ = os.path.dirname(__file__)
+ env = Environment(
+ loader=FileSystemLoader(
+ __DIR__
+ )
+ )
+ env.trim_blocks = True
+ env.lstrip_blocks = True
+ template_dot = env.get_template("geraniums_renderer.j2")
+
+ id_count = 0
+ id_map = {}
+ render = {"geraniums": [], "pots": [], "planted": []}
+
+ for name, uuid in od.get_all_instances("Geranium"):
+ render["geraniums"].append((id_count, name, od.get_slot_value(uuid, "flowering")))
+ id_map[uuid] = id_count
+ id_count += 1
+
+ for name, uuid in od.get_all_instances("Pot"):
+ render["pots"].append((id_count, name, od.get_slot_value(uuid, "cracked")))
+ id_map[uuid] = id_count
+ id_count += 1
+
+ for name, uuid in od.get_all_instances("Planted"):
+ render["planted"].append((id_map[od.get_source(uuid)], id_map[od.get_target(uuid)]))
+
+ with open(file, "w", encoding="utf-8") as f_dot:
+ f_dot.write(template_dot.render(**render))
+ return ""
\ No newline at end of file
diff --git a/examples/geraniums/rules/cracked_pots.od b/examples/geraniums/rules/cracked_pots.od
new file mode 100644
index 0000000..61ef57f
--- /dev/null
+++ b/examples/geraniums/rules/cracked_pots.od
@@ -0,0 +1,3 @@
+pot:RAM_Pot {
+ RAM_cracked = `get_value(this)`;
+}
\ No newline at end of file
diff --git a/examples/geraniums/rules/create_pot.od b/examples/geraniums/rules/create_pot.od
new file mode 100644
index 0000000..c6ef5d0
--- /dev/null
+++ b/examples/geraniums/rules/create_pot.od
@@ -0,0 +1,3 @@
+pot:RAM_Pot {
+ RAM_cracked = `False`;
+}
\ No newline at end of file
diff --git a/examples/geraniums/rules/flowering_flowers_in_pot.od b/examples/geraniums/rules/flowering_flowers_in_pot.od
new file mode 100644
index 0000000..591c123
--- /dev/null
+++ b/examples/geraniums/rules/flowering_flowers_in_pot.od
@@ -0,0 +1,7 @@
+pot:RAM_Pot
+
+flower:RAM_Geranium {
+ RAM_flowering = `get_value(this)`;
+}
+
+:RAM_Planted (flower -> pot)
\ No newline at end of file
diff --git a/examples/geraniums/rules/repot_flower_in_pot.od b/examples/geraniums/rules/repot_flower_in_pot.od
new file mode 100644
index 0000000..134813f
--- /dev/null
+++ b/examples/geraniums/rules/repot_flower_in_pot.od
@@ -0,0 +1,8 @@
+pot:RAM_Pot
+new_pot:RAM_Pot
+
+flower:RAM_Geranium {
+ RAM_flowering = `get_value(this)`;
+}
+
+replant:RAM_Planted (flower -> new_pot)
\ No newline at end of file
diff --git a/examples/geraniums/runner.py b/examples/geraniums/runner.py
new file mode 100644
index 0000000..cd72db6
--- /dev/null
+++ b/examples/geraniums/runner.py
@@ -0,0 +1,48 @@
+from examples.geraniums.renderer import render_geraniums_dot
+from transformation.ramify import ramify
+
+from models.eval_context import eval_context
+
+from transformation.schedule.rule_scheduler import *
+
+if __name__ == "__main__":
+ import os
+ THIS_DIR = os.path.dirname(__file__)
+
+ # get file contents as string
+ def read_file(filename):
+ with open(THIS_DIR+'/'+filename) as file:
+ return file.read()
+
+
+ state = DevState()
+ scd_mmm = bootstrap_scd(state)
+
+ mm_cs = read_file('metamodels/mm.od')
+ m_cs = read_file('models/example2.od')
+
+ mm = parser_cd.parse_cd(
+ state,
+ m_text=mm_cs,
+ )
+ m = parser_od.parse_od(
+ state, m_text=m_cs, mm=mm
+ )
+ conf_err = Conformance(
+ state, m, mm
+ ).check_nominal()
+ print(render_conformance_check_result(conf_err))
+ mm_ramified = ramify(state, mm)
+
+ action_generator = RuleScheduler(state, mm, mm_ramified, verbose=True, directory="examples/geraniums", eval_context=eval_context)
+ od = ODAPI(state, m, mm)
+ render_geraniums_dot(od, f"{THIS_DIR}/geraniums.dot")
+
+ # if action_generator.load_schedule(f"petrinet.od"):
+ # if action_generator.load_schedule("schedules/combinatory.drawio"):
+ if action_generator.load_schedule("schedules/schedule.drawio"):
+
+ action_generator.generate_dot("../dot.dot")
+ code, message = action_generator.run(od)
+ print(f"{code}: {message}")
+ render_geraniums_dot(od, f"{THIS_DIR}/geraniums_final.dot")
\ No newline at end of file
diff --git a/examples/geraniums/schedules/schedule.drawio b/examples/geraniums/schedules/schedule.drawio
new file mode 100644
index 0000000..41437fa
--- /dev/null
+++ b/examples/geraniums/schedules/schedule.drawio
@@ -0,0 +1,645 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/model_transformation/woods.plantuml b/examples/model_transformation/woods.plantuml
deleted file mode 100644
index 0d52b6b..0000000
--- a/examples/model_transformation/woods.plantuml
+++ /dev/null
@@ -1,139 +0,0 @@
-package "DSL Meta-Model" {
-class "Bear" as 00000000_0000_0000_0000_00000000046d {
-}
-abstract class "Animal" as 00000000_0000_0000_0000_000000000474 {
-}
-class "Man" as 00000000_0000_0000_0000_000000000491 {
- weight : Integer
-}
-
-00000000_0000_0000_0000_000000000474 <|-- 00000000_0000_0000_0000_000000000491
-00000000_0000_0000_0000_000000000474 <|-- 00000000_0000_0000_0000_00000000046d
-
-00000000_0000_0000_0000_000000000491 " " --> "1 .. *" 00000000_0000_0000_0000_000000000474 : afraidOf
-}
-package "Int Meta-Model" {
-class "Integer" as 00000000_0000_0000_0000_000000000094 {
-}
-
-
-}
-package "RAMified DSL Meta-Model" {
-class "RAM_Bear" as 00000000_0000_0000_0000_0000000005bb {
-}
-class "RAM_Animal" as 00000000_0000_0000_0000_0000000005c5 {
-}
-class "RAM_Man" as 00000000_0000_0000_0000_0000000005cf {
- RAM_weight : ActionCode
-}
-
-00000000_0000_0000_0000_0000000005c5 <|-- 00000000_0000_0000_0000_0000000005cf
-00000000_0000_0000_0000_0000000005c5 <|-- 00000000_0000_0000_0000_0000000005bb
-
-00000000_0000_0000_0000_0000000005cf " " --> "0 .. *" 00000000_0000_0000_0000_0000000005c5 : RAM_afraidOf
-}
-package "RAMified Int Meta-Model" {
-class "RAM_Integer" as 00000000_0000_0000_0000_00000000064c {
-}
-
-
-}
-00000000_0000_0000_0000_0000000005bb ..> 00000000_0000_0000_0000_00000000046d #line:green;text:green : RAMifies
-00000000_0000_0000_0000_0000000005c5 ..> 00000000_0000_0000_0000_000000000474 #line:green;text:green : RAMifies
-00000000_0000_0000_0000_0000000005cf ..> 00000000_0000_0000_0000_000000000491 #line:green;text:green : RAMifies
-00000000_0000_0000_0000_0000000005cf::RAM_weight ..> 00000000_0000_0000_0000_000000000491::weight #line:green;text:green : RAMifies
-00000000_0000_0000_0000_00000000064c ..> 00000000_0000_0000_0000_000000000094 #line:green;text:green : RAMifies
-package "LHS" {
-map "scaryAnimal : RAM_Animal" as 00000000_0000_0000_0000_00000000068a {
-}
-map "man : RAM_Man" as 00000000_0000_0000_0000_00000000066d {
-RAM_weight => `v > 60`
-}
-
-00000000_0000_0000_0000_00000000066d -> 00000000_0000_0000_0000_00000000068a : :RAM_afraidOf
-}
-00000000_0000_0000_0000_00000000068a ..> 00000000_0000_0000_0000_0000000005c5 #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_00000000066d ..> 00000000_0000_0000_0000_0000000005cf #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_00000000066d::RAM_weight ..> 00000000_0000_0000_0000_0000000005cf::RAM_weight #line:blue;text:blue : instanceOf
-
-package "RHS" {
-map "man : RAM_Man" as 00000000_0000_0000_0000_000000000699 {
-RAM_weight => `v + 5`
-}
-map "bill : RAM_Man" as 00000000_0000_0000_0000_0000000006b6 {
-RAM_weight => `100`
-}
-
-00000000_0000_0000_0000_0000000006b6 -> 00000000_0000_0000_0000_000000000699 : :RAM_afraidOf
-}
-00000000_0000_0000_0000_000000000699 ..> 00000000_0000_0000_0000_0000000005cf #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000699::RAM_weight ..> 00000000_0000_0000_0000_0000000005cf::RAM_weight #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_0000000006b6 ..> 00000000_0000_0000_0000_0000000005cf #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_0000000006b6::RAM_weight ..> 00000000_0000_0000_0000_0000000005cf::RAM_weight #line:blue;text:blue : instanceOf
-
-package "Model (before rewrite)" {
-map "bear2 : Bear" as 00000000_0000_0000_0000_000000000597 {
-}
-map "bear1 : Bear" as 00000000_0000_0000_0000_000000000590 {
-}
-map "george : Man" as 00000000_0000_0000_0000_000000000573 {
-weight => 80
-}
-
-00000000_0000_0000_0000_000000000573 -> 00000000_0000_0000_0000_000000000590 : :afraidOf
-00000000_0000_0000_0000_000000000573 -> 00000000_0000_0000_0000_000000000597 : :afraidOf
-}
-00000000_0000_0000_0000_000000000597 ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000590 ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000573 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000573::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf
-
-00000000_0000_0000_0000_00000000068a ..> 00000000_0000_0000_0000_000000000590 #line:red;line.dotted;text:red : matchedWith
-00000000_0000_0000_0000_00000000066d ..> 00000000_0000_0000_0000_000000000573 #line:red;line.dotted;text:red : matchedWith
-00000000_0000_0000_0000_00000000066d::RAM_weight ..> 00000000_0000_0000_0000_000000000573::weight #line:red;line.dotted;text:red : matchedWith
-package "Model (after rewrite 0)" {
-map "bear2 : Bear" as 00000000_0000_0000_0000_0000000006db {
-}
-map "george : Man" as 00000000_0000_0000_0000_0000000006e9 {
-weight => 85
-}
-map "bill0 : Man" as 00000000_0000_0000_0000_000000000723 {
-weight => 100
-}
-
-00000000_0000_0000_0000_000000000723 -> 00000000_0000_0000_0000_0000000006e9 : :afraidOf
-00000000_0000_0000_0000_0000000006e9 -> 00000000_0000_0000_0000_0000000006db : :afraidOf
-}
-00000000_0000_0000_0000_000000000699 ..> 00000000_0000_0000_0000_0000000006e9 #line:red;line.dotted;text:red : matchedWith
-00000000_0000_0000_0000_000000000699::RAM_weight ..> 00000000_0000_0000_0000_0000000006e9::weight #line:red;line.dotted;text:red : matchedWith
-00000000_0000_0000_0000_0000000006b6 ..> 00000000_0000_0000_0000_000000000723 #line:red;line.dotted;text:red : matchedWith
-00000000_0000_0000_0000_0000000006db ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_0000000006e9 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_0000000006e9::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000723 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000723::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf
-
-00000000_0000_0000_0000_00000000068a ..> 00000000_0000_0000_0000_000000000597 #line:orange;line.dotted;text:orange : matchedWith
-00000000_0000_0000_0000_00000000066d ..> 00000000_0000_0000_0000_000000000573 #line:orange;line.dotted;text:orange : matchedWith
-00000000_0000_0000_0000_00000000066d::RAM_weight ..> 00000000_0000_0000_0000_000000000573::weight #line:orange;line.dotted;text:orange : matchedWith
-package "Model (after rewrite 1)" {
-map "bear1 : Bear" as 00000000_0000_0000_0000_000000000747 {
-}
-map "george : Man" as 00000000_0000_0000_0000_00000000074e {
-weight => 85
-}
-map "bill0 : Man" as 00000000_0000_0000_0000_000000000788 {
-weight => 100
-}
-
-00000000_0000_0000_0000_000000000788 -> 00000000_0000_0000_0000_00000000074e : :afraidOf
-00000000_0000_0000_0000_00000000074e -> 00000000_0000_0000_0000_000000000747 : :afraidOf
-}
-00000000_0000_0000_0000_000000000699 ..> 00000000_0000_0000_0000_00000000074e #line:orange;line.dotted;text:orange : matchedWith
-00000000_0000_0000_0000_000000000699::RAM_weight ..> 00000000_0000_0000_0000_00000000074e::weight #line:orange;line.dotted;text:orange : matchedWith
-00000000_0000_0000_0000_0000000006b6 ..> 00000000_0000_0000_0000_000000000788 #line:orange;line.dotted;text:orange : matchedWith
-00000000_0000_0000_0000_000000000747 ..> 00000000_0000_0000_0000_00000000046d #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_00000000074e ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_00000000074e::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000788 ..> 00000000_0000_0000_0000_000000000491 #line:blue;text:blue : instanceOf
-00000000_0000_0000_0000_000000000788::weight ..> 00000000_0000_0000_0000_000000000491::weight #line:blue;text:blue : instanceOf
diff --git a/examples/model_transformation/woods.py b/examples/model_transformation/woods.py
deleted file mode 100644
index f321037..0000000
--- a/examples/model_transformation/woods.py
+++ /dev/null
@@ -1,245 +0,0 @@
-# Model transformation experiment
-
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from uuid import UUID
-from services.scd import SCD
-from framework.conformance import Conformance
-from services.od import OD
-from transformation.matcher import match_od
-from transformation.ramify import ramify
-from transformation.cloner import clone_od
-from transformation import rewriter
-from services.bottom.V0 import Bottom
-from services.primitives.integer_type import Integer
-from concrete_syntax.plantuml import renderer as plantuml
-from concrete_syntax.plantuml.make_url import make_url as make_plantuml_url
-from concrete_syntax.textual_od import parser, renderer
-
-def main():
- state = DevState()
- root = state.read_root() # id: 0
-
- # Meta-meta-model: a class diagram that describes the language of class diagrams
- scd_mmm_id = bootstrap_scd(state)
- int_mm_id = UUID(state.read_value(state.read_dict(state.read_root(), "Integer")))
- string_mm_id = UUID(state.read_value(state.read_dict(state.read_root(), "String")))
-
- # conf = Conformance(state, scd_mmm_id, scd_mmm_id)
- # print("Conformance SCD_MM -> SCD_MM?", conf.check_nominal(log=True))
- # print("--------------------------------------")
- # print(renderer.render_od(state, scd_mmm_id, scd_mmm_id, hide_names=True))
- # print("--------------------------------------")
-
- # Create DSL MM with parser
- dsl_mm_cs = """
- # Integer:ModelRef
- Bear:Class
- Animal:Class {
- abstract = True;
- }
- Man:Class {
- lower_cardinality = 1;
- upper_cardinality = 2;
- constraint = ```
- get_value(get_slot(this, "weight")) > 20
- ```;
- }
- Man_weight:AttributeLink (Man -> Integer) {
- name = "weight";
- optional = False;
- constraint = ```
- # this is the same constraint as above, but this time, part of the attributelink itself (and thus shorter)
- tgt = get_target(this)
- tgt_type = get_type_name(tgt)
- get_value(tgt) > 20
- ```;
- }
- afraidOf:Association (Man -> Animal) {
- target_lower_cardinality = 1;
- }
- :Inheritance (Man -> Animal)
- :Inheritance (Bear -> Animal)
-
- not_too_fat:GlobalConstraint {
- constraint = ```
- # total weight of all men low enough
- total_weight = 0
- for man_name, man_id in get_all_instances("Man"):
- total_weight += get_value(get_slot(man_id, "weight"))
- total_weight < 85
- ```;
- }
- """
- dsl_mm_id = parser.parse_od(state, dsl_mm_cs, mm=scd_mmm_id)
-
- # Create DSL M with parser
- dsl_m_cs = """
- george:Man {
- weight = 80;
- }
- bear1:Bear
- bear2:Bear
- :afraidOf (george -> bear1)
- :afraidOf (george -> bear2)
- """
- dsl_m_id = parser.parse_od(state, dsl_m_cs, mm=dsl_mm_id)
-
- # print("DSL MM:")
- # print("--------------------------------------")
- # print(renderer.render_od(state, dsl_mm_id, scd_mmm_id, hide_names=True))
- # print("--------------------------------------")
-
- conf = Conformance(state, dsl_mm_id, scd_mmm_id)
- print("Conformance DSL_MM -> SCD_MM?", conf.check_nominal(log=True))
-
- # print("DSL M:")
- # print("--------------------------------------")
- # print(renderer.render_od(state, dsl_m_id, dsl_mm_id, hide_names=True))
- # print("--------------------------------------")
-
- conf = Conformance(state, dsl_m_id, dsl_mm_id)
- print("Conformance DSL_M -> DSL_MM?", conf.check_nominal(log=True))
-
- # RAMify MM
- prefix = "RAM_" # all ramified types can be prefixed to distinguish them a bit more
- ramified_mm_id = ramify(state, dsl_mm_id, prefix)
- ramified_int_mm_id = ramify(state, int_mm_id, prefix)
-
- # LHS - pattern to match
-
- # TODO: enable more powerful constraints
- lhs_cs = f"""
- # object to match
- man:{prefix}Man {{
- # match only men heavy enough
- {prefix}weight = ```
- get_value(this) > 60
- ```;
- }}
-
- # object to delete
- scaryAnimal:{prefix}Animal
-
- # link to delete
- manAfraidOfAnimal:{prefix}afraidOf (man -> scaryAnimal)
- """
- lhs_id = parser.parse_od(state, lhs_cs, mm=ramified_mm_id)
-
-
- conf = Conformance(state, lhs_id, ramified_mm_id)
- print("Conformance LHS_M -> RAM_DSL_MM?", conf.check_nominal(log=True))
-
- # RHS of our rule
-
- # TODO: enable more powerful actions
- rhs_cs = f"""
- # matched object
- man:{prefix}Man {{
- # man gains weight
- {prefix}weight = `get_value(this) + 5`;
- }}
-
- # object to create
- bill:{prefix}Man {{
- {prefix}weight = `100`;
- }}
-
- # link to create
- billAfraidOfMan:{prefix}afraidOf (bill -> man)
- """
- rhs_id = parser.parse_od(state, rhs_cs, mm=ramified_mm_id)
-
- conf = Conformance(state, rhs_id, ramified_mm_id)
- print("Conformance RHS_M -> RAM_DSL_MM?", conf.check_nominal(log=True))
-
- def render_ramification():
- uml = (""
- # Render original and RAMified meta-models
- + plantuml.render_package("DSL Meta-Model", plantuml.render_class_diagram(state, dsl_mm_id))
- + plantuml.render_package("Int Meta-Model", plantuml.render_class_diagram(state, int_mm_id))
- + plantuml.render_package("RAMified DSL Meta-Model", plantuml.render_class_diagram(state, ramified_mm_id))
- + plantuml.render_package("RAMified Int Meta-Model", plantuml.render_class_diagram(state, ramified_int_mm_id))
-
- # Render RAMification traceability links
- + plantuml.render_trace_ramifies(state, dsl_mm_id, ramified_mm_id)
- + plantuml.render_trace_ramifies(state, int_mm_id, ramified_int_mm_id)
- )
-
- return uml
-
- def render_lhs_rhs():
- uml = render_ramification()
- # Render pattern
- uml += plantuml.render_package("LHS", plantuml.render_object_diagram(state, lhs_id, ramified_mm_id))
- uml += plantuml.render_trace_conformance(state, lhs_id, ramified_mm_id)
-
- # Render pattern
- uml += plantuml.render_package("RHS", plantuml.render_object_diagram(state, rhs_id, ramified_mm_id))
- uml += plantuml.render_trace_conformance(state, rhs_id, ramified_mm_id)
- return uml
-
-
- def render_all_matches():
- uml = render_lhs_rhs()
- # Render host graph (before rewriting)
- uml += plantuml.render_package("Model (before rewrite)", plantuml.render_object_diagram(state, dsl_m_id, dsl_mm_id))
- # Render conformance
- uml += plantuml.render_trace_conformance(state, dsl_m_id, dsl_mm_id)
-
- print("matching...")
- generator = match_od(state, dsl_m_id, dsl_mm_id, lhs_id, ramified_mm_id)
- for match, color in zip(generator, ["red", "orange"]):
- print("\nMATCH:\n", match)
-
- # Render every match
- uml += plantuml.render_trace_match(state, match, lhs_id, dsl_m_id, color)
-
- print("DONE")
- return uml
-
- def render_rewrite():
- uml = render_lhs_rhs()
-
- # Render host graph (before rewriting)
- uml += plantuml.render_package("Model (before rewrite)", plantuml.render_object_diagram(state, dsl_m_id, dsl_mm_id))
- # Render conformance
- uml += plantuml.render_trace_conformance(state, dsl_m_id, dsl_mm_id)
-
- generator = match_od(state, dsl_m_id, dsl_mm_id, lhs_id, ramified_mm_id)
- for i, (match, color) in enumerate(zip(generator, ["red", "orange"])):
- uml += plantuml.render_trace_match(state, match, lhs_id, dsl_m_id, color)
-
- # rewrite happens in-place (which sucks), so we will only modify a clone:
- snapshot_dsl_m_id = clone_od(state, dsl_m_id, dsl_mm_id)
- rewriter.rewrite(state, lhs_id, rhs_id, ramified_mm_id, match, snapshot_dsl_m_id, dsl_mm_id)
-
- conf = Conformance(state, snapshot_dsl_m_id, dsl_mm_id)
- print(f"Conformance DSL_M (after rewrite {i}) -> DSL_MM?", conf.check_nominal(log=True))
-
- # Render host graph (after rewriting)
- uml += plantuml.render_package(f"Model (after rewrite {i})", plantuml.render_object_diagram(state, snapshot_dsl_m_id, dsl_mm_id))
- # Render match
- uml += plantuml.render_trace_match(state, match, rhs_id, snapshot_dsl_m_id, color)
- # Render conformance
- uml += plantuml.render_trace_conformance(state, snapshot_dsl_m_id, dsl_mm_id)
-
- return uml
-
- # plantuml_str = render_ramification()
- # plantuml_str = render_all_matches()
- plantuml_str = render_rewrite()
-
- print()
- print("==============================================")
- print("BEGIN PLANTUML")
- print("==============================================")
-
- print(make_plantuml_url(plantuml_str))
-
- print("==============================================")
- print("END PLANTUML")
- print("==============================================")
-
-if __name__ == "__main__":
- main()
diff --git a/examples/petrinet/models/m_example_simple.od b/examples/petrinet/models/m_example_simple.od
index a3eee8d..d7dd1ea 100644
--- a/examples/petrinet/models/m_example_simple.od
+++ b/examples/petrinet/models/m_example_simple.od
@@ -1,5 +1,8 @@
p0:PNPlace
p1:PNPlace
+p2:PNPlace
+p3:PNPlace
+p4:PNPlace
t0:PNTransition
:arc (p0 -> t0)
@@ -7,4 +10,12 @@ t0:PNTransition
t1:PNTransition
:arc (p1 -> t1)
-:arc (t1 -> p0)
\ No newline at end of file
+:arc (t1 -> p2)
+
+t2:PNTransition
+:arc (p2 -> t2)
+:arc (t2 -> p0)
+
+
+t3:PNTransition
+:arc (t3 -> p4)
\ No newline at end of file
diff --git a/examples/petrinet/models/m_example_simple_rt_initial.od b/examples/petrinet/models/m_example_simple_rt_initial.od
index fa93f4e..64fc3b7 100644
--- a/examples/petrinet/models/m_example_simple_rt_initial.od
+++ b/examples/petrinet/models/m_example_simple_rt_initial.od
@@ -9,3 +9,21 @@ p1s:PNPlaceState {
}
:pn_of (p1s -> p1)
+
+p2s:PNPlaceState {
+ numTokens = 0;
+}
+
+:pn_of (p2s -> p2)
+
+p3s:PNPlaceState {
+ numTokens = 0;
+}
+
+:pn_of (p3s -> p3)
+
+p4s:PNPlaceState {
+ numTokens = 0;
+}
+
+:pn_of (p4s -> p4)
diff --git a/examples/petrinet/models/rules/all_incoming.od b/examples/petrinet/models/rules/all_incoming.od
new file mode 100644
index 0000000..1b87f1d
--- /dev/null
+++ b/examples/petrinet/models/rules/all_incoming.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
diff --git a/examples/petrinet/models/rules/all_incomming.od b/examples/petrinet/models/rules/all_incomming.od
new file mode 100644
index 0000000..1b87f1d
--- /dev/null
+++ b/examples/petrinet/models/rules/all_incomming.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
diff --git a/examples/petrinet/models/rules/all_outgoing.od b/examples/petrinet/models/rules/all_outgoing.od
new file mode 100644
index 0000000..ab431cc
--- /dev/null
+++ b/examples/petrinet/models/rules/all_outgoing.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)
diff --git a/examples/petrinet/models/rules/increase_outgoing.od b/examples/petrinet/models/rules/increase_outgoing.od
new file mode 100644
index 0000000..1fa1acb
--- /dev/null
+++ b/examples/petrinet/models/rules/increase_outgoing.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `get_value(this) + 1`;
+}
+:RAM_pn_of (ps -> p)
+
+# An outgoing arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)
diff --git a/examples/petrinet/models/rules/input_without_token.od b/examples/petrinet/models/rules/input_without_token.od
new file mode 100644
index 0000000..9207ce2
--- /dev/null
+++ b/examples/petrinet/models/rules/input_without_token.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `get_value(this) == 0`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
diff --git a/examples/petrinet/models/rules/places.od b/examples/petrinet/models/rules/places.od
new file mode 100644
index 0000000..923fb03
--- /dev/null
+++ b/examples/petrinet/models/rules/places.od
@@ -0,0 +1,3 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
\ No newline at end of file
diff --git a/examples/petrinet/models/rules/reduce_incoming.od b/examples/petrinet/models/rules/reduce_incoming.od
new file mode 100644
index 0000000..b85a2db
--- /dev/null
+++ b/examples/petrinet/models/rules/reduce_incoming.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `get_value(this) -1`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
\ No newline at end of file
diff --git a/examples/petrinet/models/rules/reduce_incomming.od b/examples/petrinet/models/rules/reduce_incomming.od
new file mode 100644
index 0000000..b85a2db
--- /dev/null
+++ b/examples/petrinet/models/rules/reduce_incomming.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `get_value(this) -1`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
\ No newline at end of file
diff --git a/examples/petrinet/models/rules/transition.od b/examples/petrinet/models/rules/transition.od
new file mode 100644
index 0000000..c3bd82c
--- /dev/null
+++ b/examples/petrinet/models/rules/transition.od
@@ -0,0 +1 @@
+t:RAM_PNTransition
\ No newline at end of file
diff --git a/examples/petrinet/models/schedules/combinatory.drawio b/examples/petrinet/models/schedules/combinatory.drawio
new file mode 100644
index 0000000..c22b5ce
--- /dev/null
+++ b/examples/petrinet/models/schedules/combinatory.drawio
@@ -0,0 +1,526 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/petrinet/models/schedules/petrinet.od b/examples/petrinet/models/schedules/petrinet.od
new file mode 100644
index 0000000..386c3ed
--- /dev/null
+++ b/examples/petrinet/models/schedules/petrinet.od
@@ -0,0 +1,66 @@
+start:Start
+end:End
+
+m:Match{
+ file = "operational_semantics/transition";
+}
+
+nac1:Match{
+ file = "operational_semantics/all_input_have_token";
+ n = "1";
+}
+
+inputs:Match{
+ file = "operational_semantics/all_inputs";
+}
+rinput:Rewrite{
+ file = "operational_semantics/all_inputs_reduced";
+}
+
+outputs:Match{
+ file = "operational_semantics/all_outputs";
+}
+routput:Rewrite{
+ file = "operational_semantics/all_outputs_increased";
+}
+
+p:Print{
+ event = True;
+}
+p2:Print{
+ event = False;
+ custom = `"succesfully execuded a petrinet transition"`;
+}
+
+l:Loop
+l2:Loop
+l3:Loop
+
+
+:Conn_exec (start -> m) {from="out"; to="in";}
+:Conn_exec (m -> l) {from="success"; to="in";}
+:Conn_exec (l -> nac1) {from="it"; to="in";}
+:Conn_exec (l -> end) {from="out"; to="in";}
+:Conn_exec (nac1 -> l) {from="success"; to="in";}
+:Conn_exec (nac1 -> inputs) {from="fail"; to="in";}
+:Conn_exec (inputs -> l2) {from="success"; to="in";}
+:Conn_exec (inputs -> l2) {from="fail"; to="in";}
+:Conn_exec (l2 -> rinput) {from="it"; to="in";}
+:Conn_exec (rinput -> l2) {from="out"; to="in";}
+:Conn_exec (l2 -> outputs) {from="out"; to="in";}
+:Conn_exec (outputs -> l3) {from="success"; to="in";}
+:Conn_exec (outputs -> l3) {from="fail"; to="in";}
+:Conn_exec (l3 -> routput) {from="it"; to="in";}
+:Conn_exec (routput -> l3) {from="out"; to="in";}
+:Conn_exec (l3 -> p2) {from="out"; to="in";}
+:Conn_exec (p2 -> end) {from="out"; to="in";}
+
+
+:Conn_data (m -> l) {from="out"; to="in";}
+:Conn_data (l -> nac1) {from="out"; to="in";}
+:Conn_data (l -> inputs) {from="out"; to="in";}
+:Conn_data (inputs -> l2) {from="out"; to="in";}
+:Conn_data (l2 -> rinput) {from="out"; to="in";}
+:Conn_data (l -> outputs) {from="out"; to="in";}
+:Conn_data (outputs -> l3) {from="out"; to="in";}
+:Conn_data (l3 -> routput) {from="out"; to="in";}
\ No newline at end of file
diff --git a/examples/petrinet/models/schedules/petrinet2.drawio b/examples/petrinet/models/schedules/petrinet2.drawio
new file mode 100644
index 0000000..6294d7f
--- /dev/null
+++ b/examples/petrinet/models/schedules/petrinet2.drawio
@@ -0,0 +1,1160 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/petrinet/models/schedules/petrinet3.drawio b/examples/petrinet/models/schedules/petrinet3.drawio
new file mode 100644
index 0000000..a20ee2c
--- /dev/null
+++ b/examples/petrinet/models/schedules/petrinet3.drawio
@@ -0,0 +1,915 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/petrinet/models/schedules/recursion.drawio b/examples/petrinet/models/schedules/recursion.drawio
new file mode 100644
index 0000000..f82cabd
--- /dev/null
+++ b/examples/petrinet/models/schedules/recursion.drawio
@@ -0,0 +1,217 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/petrinet/models/schedules/schedule.od b/examples/petrinet/models/schedules/schedule.od
new file mode 100644
index 0000000..8c8f816
--- /dev/null
+++ b/examples/petrinet/models/schedules/schedule.od
@@ -0,0 +1,4 @@
+start: Start
+end: End
+
+:Conn_exec (start -> end) {from="tfuy"; to="in";}
\ No newline at end of file
diff --git a/examples/petrinet/operational_semantics/all_inputs.od b/examples/petrinet/operational_semantics/all_inputs.od
new file mode 100644
index 0000000..1b87f1d
--- /dev/null
+++ b/examples/petrinet/operational_semantics/all_inputs.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
diff --git a/examples/petrinet/operational_semantics/all_inputs_reduced.od b/examples/petrinet/operational_semantics/all_inputs_reduced.od
new file mode 100644
index 0000000..a6bfdd4
--- /dev/null
+++ b/examples/petrinet/operational_semantics/all_inputs_reduced.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `get_value(this) -1`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)
diff --git a/examples/petrinet/operational_semantics/all_output_places.od b/examples/petrinet/operational_semantics/all_output_places.od
new file mode 100644
index 0000000..ab431cc
--- /dev/null
+++ b/examples/petrinet/operational_semantics/all_output_places.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)
diff --git a/examples/petrinet/operational_semantics/all_output_places_update.od b/examples/petrinet/operational_semantics/all_output_places_update.od
new file mode 100644
index 0000000..8d2908e
--- /dev/null
+++ b/examples/petrinet/operational_semantics/all_output_places_update.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `set_value(this, get_value(this) + 1)`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)
diff --git a/examples/petrinet/operational_semantics/all_outputs.od b/examples/petrinet/operational_semantics/all_outputs.od
new file mode 100644
index 0000000..ce5efd0
--- /dev/null
+++ b/examples/petrinet/operational_semantics/all_outputs.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An outgoing arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)
diff --git a/examples/petrinet/operational_semantics/all_outputs_increased.od b/examples/petrinet/operational_semantics/all_outputs_increased.od
new file mode 100644
index 0000000..1fa1acb
--- /dev/null
+++ b/examples/petrinet/operational_semantics/all_outputs_increased.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `get_value(this) + 1`;
+}
+:RAM_pn_of (ps -> p)
+
+# An outgoing arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (t -> p)
diff --git a/examples/petrinet/operational_semantics/delete_all.od b/examples/petrinet/operational_semantics/delete_all.od
new file mode 100644
index 0000000..e69de29
diff --git a/examples/petrinet/operational_semantics/input_without_token.od b/examples/petrinet/operational_semantics/input_without_token.od
new file mode 100644
index 0000000..9207ce2
--- /dev/null
+++ b/examples/petrinet/operational_semantics/input_without_token.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `get_value(this) == 0`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
diff --git a/examples/petrinet/operational_semantics/r_fire_transition_lhs.od b/examples/petrinet/operational_semantics/r_fire_transition_lhs.od
index c3bd82c..c05515b 100644
--- a/examples/petrinet/operational_semantics/r_fire_transition_lhs.od
+++ b/examples/petrinet/operational_semantics/r_fire_transition_lhs.od
@@ -1 +1 @@
-t:RAM_PNTransition
\ No newline at end of file
+t:RAM_PNTransition
diff --git a/examples/petrinet/operational_semantics/transition.od b/examples/petrinet/operational_semantics/transition.od
new file mode 100644
index 0000000..c3bd82c
--- /dev/null
+++ b/examples/petrinet/operational_semantics/transition.od
@@ -0,0 +1 @@
+t:RAM_PNTransition
\ No newline at end of file
diff --git a/examples/petrinet/petrinet_renderer.j2 b/examples/petrinet/petrinet_renderer.j2
new file mode 100644
index 0000000..0ace22b
--- /dev/null
+++ b/examples/petrinet/petrinet_renderer.j2
@@ -0,0 +1,12 @@
+digraph G {
+ rankdir=LR;
+ center=true;
+ margin=1;
+ nodesep=1;
+ subgraph places {
+ node [fontname=Arial,fontsize=10,shape=circle,fixedsize=true,label="", height=.35,width=.35];
+ {% for place in places %}
+ {{ place[0] }} [label="{{ place[1] }}_{{ place[2] }}"]
+ {% endfor %}
+ }
+}
\ No newline at end of file
diff --git a/examples/petrinet/renderer.py b/examples/petrinet/renderer.py
index 278376a..3916311 100644
--- a/examples/petrinet/renderer.py
+++ b/examples/petrinet/renderer.py
@@ -1,3 +1,7 @@
+import os
+
+from jinja2 import Environment, FileSystemLoader
+
from api.od import ODAPI
from concrete_syntax.graphviz.make_url import show_graphviz
from concrete_syntax.graphviz.renderer import make_graphviz_id
@@ -16,13 +20,24 @@ def render_tokens(num_tokens: int):
return str(num_tokens)
def render_petri_net_to_dot(od: ODAPI) -> str:
+ env = Environment(
+ loader=FileSystemLoader(
+ os.path.dirname(__file__)
+ )
+ )
+ env.trim_blocks = True
+ env.lstrip_blocks = True
+ template_dot = env.get_template("petrinet_renderer.j2")
+ with open("test_pet.dot", "w", encoding="utf-8") as f_dot:
+ places = [(make_graphviz_id(place), place_name, render_tokens(od.get_slot_value(od.get_source(od.get_incoming(place, "pn_of")[0]), "numTokens"))) for place_name, place in od.get_all_instances("PNPlace")]
+ f_dot.write(template_dot.render({"places": places}))
dot = ""
- dot += "rankdir=LR;"
- dot += "center=true;"
- dot += "margin=1;"
- dot += "nodesep=1;"
- dot += "subgraph places {"
- dot += " node [fontname=Arial,fontsize=10,shape=circle,fixedsize=true,label=\"\", height=.35,width=.35];"
+ dot += "rankdir=LR;\n"
+ dot += "center=true;\n"
+ dot += "margin=1;\n"
+ dot += "nodesep=1;\n"
+ dot += "subgraph places {\n"
+ dot += " node [fontname=Arial,fontsize=10,shape=circle,fixedsize=true,label=\"\", height=.35,width=.35];\n"
for place_name, place in od.get_all_instances("PNPlace"):
# place_name = od.get_name(place)
try:
diff --git a/examples/petrinet/runner.py b/examples/petrinet/runner.py
index b2d0c51..75fd37f 100644
--- a/examples/petrinet/runner.py
+++ b/examples/petrinet/runner.py
@@ -1,14 +1,12 @@
-from state.devstate import DevState
-from api.od import ODAPI
+from icecream import ic
+
from concrete_syntax.textual_od.renderer import render_od
-# from concrete_syntax.textual_od.renderer_jinja2 import render_od_jinja2
-from bootstrap.scd import bootstrap_scd
+from transformation.schedule.Tests import Test_xmlparser
from util import loader
-from transformation.rule import RuleMatcherRewriter, ActionGenerator
from transformation.ramify import ramify
-from examples.semantics.operational import simulator
from examples.petrinet.renderer import show_petri_net
+from transformation.schedule.rule_scheduler import *
if __name__ == "__main__":
import os
@@ -30,35 +28,26 @@ if __name__ == "__main__":
# m_rt_initial_cs = m_cs + read_file('models/m_example_simple_rt_initial.od')
# m_cs = read_file('models/m_example_mutex.od')
# m_rt_initial_cs = m_cs + read_file('models/m_example_mutex_rt_initial.od')
- m_cs = read_file('models/m_example_inharc.od')
- m_rt_initial_cs = m_cs + read_file('models/m_example_inharc_rt_initial.od')
+ m_cs = read_file('models/m_example_simple.od')
+ m_rt_initial_cs = m_cs + read_file('models/m_example_simple_rt_initial.od')
# Parse them
mm = loader.parse_and_check(state, mm_cs, scd_mmm, "Petri-Net Design meta-model")
mm_rt = loader.parse_and_check(state, mm_rt_cs, scd_mmm, "Petri-Net Runtime meta-model")
m = loader.parse_and_check(state, m_cs, mm, "Example model")
m_rt_initial = loader.parse_and_check(state, m_rt_initial_cs, mm_rt, "Example model initial state")
-
mm_rt_ramified = ramify(state, mm_rt)
- rules = loader.load_rules(state,
- lambda rule_name, kind: f"{THIS_DIR}/operational_semantics/r_{rule_name}_{kind}.od",
- mm_rt_ramified,
- ["fire_transition"]) # only 1 rule :(
- matcher_rewriter = RuleMatcherRewriter(state, mm_rt, mm_rt_ramified)
- action_generator = ActionGenerator(matcher_rewriter, rules)
- def render_callback(od):
- show_petri_net(od)
- return render_od(state, od.m, od.mm)
- sim = simulator.Simulator(
- action_generator=action_generator,
- decision_maker=simulator.InteractiveDecisionMaker(auto_proceed=False),
- # decision_maker=simulator.RandomDecisionMaker(seed=0),
- renderer=render_callback,
- # renderer=lambda od: render_od(state, od.m, od.mm),
- )
+ scheduler = RuleScheduler(state, mm_rt, mm_rt_ramified, verbose=True, directory="models")
- sim.run(ODAPI(state, m_rt_initial, mm_rt))
+ # if scheduler.load_schedule(f"petrinet.od"):
+ # if scheduler.load_schedule("schedules/combinatory.drawio"):
+ if scheduler.load_schedule("schedules/petrinet3.drawio"):
+
+
+ scheduler.generate_dot("../dot.dot")
+ code, message = scheduler.run(ODAPI(state, m_rt_initial, mm_rt))
+ print(f"{code}: {message}")
diff --git a/examples/petrinet/translational_semantics/tapaal/tapaal.jinja2 b/examples/petrinet/translational_semantics/tapaal/tapaal.jinja2
index 445dc99..7c2596c 100644
--- a/examples/petrinet/translational_semantics/tapaal/tapaal.jinja2
+++ b/examples/petrinet/translational_semantics/tapaal/tapaal.jinja2
@@ -12,11 +12,22 @@
nameOffsetY="0"
positionX="{{ i * 100 + 100 }}"
positionY="100"
- />
+ />
{% endfor %}
{% for i, (transition_name, transition) in enumerate(odapi.get_all_instances("PNTransition")) %}
-
+
{% endfor %}
{% for arc_name, arc in odapi.get_all_instances("arc") %}
diff --git a/examples/semantics/operational/port/assignment.py b/examples/semantics/operational/port/assignment.py
deleted file mode 100644
index a7ae221..0000000
--- a/examples/semantics/operational/port/assignment.py
+++ /dev/null
@@ -1,142 +0,0 @@
-import functools
-from concrete_syntax.common import indent
-from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time
-from examples.semantics.operational.simulator import make_actions_pure, filter_valid_actions
-
-
-def precondition_can_move_from(od, from_state):
-
- # TO IMPLEMENT
-
- # Function should return True if a ship can move out of 'from_state'
-
- return False
-
-def precondition_can_move_to(od, to_state):
-
- # TO IMPLEMENT
-
- # Function should return True if a ship can move into 'to_state'
-
- return False
-
-def precondition_all_successors_moved(od, conn):
-
- # TO IMPLEMENT
-
- # A move (or skip) can only be made along a connection after all subsequent connections have already made their move (or were skipped).
-
- return True
-
-def precondition_workers_available(od, workerset):
-
- # TO IMPLEMENT
-
- # A worker in a WorkerSet can only be allocated to a berth, if the number of 'isOperating'-links is smaller than the number of workers in the WorkerSet.
-
- return True
-
-def precondition_berth_unserved(od, berth):
-
- # TO IMPLEMENT
-
- # A worker can only be allocated to a berth, if the berth contains an 'unserved' ship.
-
- return True
-
-def action_skip(od, conn_name):
- # SERVES AS AN EXAMPLE - NO NEED TO EDIT THIS FUNCTION
- conn = od.get(conn_name)
- conn_state = design_to_state(od, conn)
- od.set_slot_value(conn_state, "moved", True)
- return [f"skip {conn_name}"]
-
-def action_move(od, conn_name):
- action_skip(od, conn_name) # flag the connection as 'moved'
-
- conn = od.get(conn_name)
- from_place = od.get_source(conn)
- to_place = od.get_target(conn)
-
- from_state = design_to_state(od, from_place) # beware: Generator does not have State
- to_state = design_to_state(od, to_place)
-
- # TO IMPLEMENT:
- # - move a ship along the connection
-
- return [f"unimplemented! nothing changed!"]
-
-def action_serve_berth(od, workerset_name, berth_name):
-
- # TO IMPLEMENT:
- # - A worker starts operating a berth
-
- return [f"unimplemented! nothing changed!"]
-
-def action_advance_time(od):
- _, clock = od.get_all_instances("Clock")[0]
- time = od.get_slot_value(clock, "time")
- new_time = time + 1
- od.set_slot_value(clock, "time", new_time)
-
- # TO IMPLEMENT:
- # - all 'moved'-attributes need to be reset (to False)
- # - if there is a worker operating a Berth, then:
- # (1) the Berth's status becomes 'served'
- # (2) the worker is no longer operating the Berth
-
- return [f"time is now {new_time}"]
-
-# This function is called to discover the possible steps that can be made.
-# It should not be necessary to edit this function
-def get_actions(od):
- actions = {}
-
- # Add move-actions (or skip-actions)
- for conn_name, conn in od.get_all_instances("connection"):
- already_moved = od.get_slot_value(design_to_state(od, conn), "moved")
- if already_moved or not precondition_all_successors_moved(od, conn):
- # a move was already made along this connection in the current time-step
- continue
-
- from_place = od.get_source(conn)
- to_place = od.get_target(conn)
- from_name = od.get_name(from_place)
- to_name = od.get_name(to_place)
- from_state = design_to_state(od, from_place)
- to_state = design_to_state(od, to_place)
-
- if (precondition_can_move_from(od, from_state)
- and precondition_can_move_to(od, to_state)):
- actions[f"move {conn_name} ({from_name} -> {to_name})"] = functools.partial(action_move, conn_name=conn_name)
- else:
- actions[f"skip {from_name} -> {to_name}"] = functools.partial(action_skip, conn_name=conn_name)
-
- # Add actions to assign workers
- for _, workerset in od.get_all_instances("WorkerSet"):
- if not precondition_workers_available(od, workerset):
- continue
- for lnk in od.get_outgoing(workerset, "canOperate"):
- berth = od.get_target(lnk)
- if precondition_berth_unserved(od, berth):
- berth_name = od.get_name(berth)
- workerset_name = od.get_name(workerset)
- actions[f"{workerset_name} operates {berth_name}"] = functools.partial(action_serve_berth, workerset_name=workerset_name, berth_name=berth_name)
-
- # Only when no other action can be performed, can time advance
- if len(actions) == 0:
- actions["advance time"] = action_advance_time
-
- # This wrapper turns our actions into pure functions: they will clone the model before modifying it. This is useful if we ever want to rollback an action.
- return make_actions_pure(actions.items(), od)
-
-
-# Called every time the runtime state changes.
-# When this function returns a string, the simulation ends.
-# The string should represent the reason for ending the simulation.
-# When this function returns None, the simulation continues.
-def termination_condition(od):
-
- # TO IMPLEMENT: terminate simulation when the place 'served' contains 2 ships.
-
- pass
diff --git a/examples/semantics/operational/port/helpers.py b/examples/semantics/operational/port/helpers.py
deleted file mode 100644
index 12ecdfc..0000000
--- a/examples/semantics/operational/port/helpers.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Some helper functions
-
-def get_num_ships(od, place):
- place_state = design_to_state(od, place)
- return od.get_slot_value(place_state, "numShips")
-
-def design_to_state(od, design):
- incoming = od.get_incoming(design, "of")
- if len(incoming) == 1:
- # not all design-objects have a state
- return od.get_source(incoming[0])
-
-def state_to_design(od, state):
- return od.get_target(od.get_outgoing(state, "of")[0])
-
-def get_time(od):
- _, clock = od.get_all_instances("Clock")[0]
- return clock, od.get_slot_value(clock, "time")
diff --git a/examples/semantics/operational/port/models.py b/examples/semantics/operational/port/models.py
deleted file mode 100644
index dd7e7a3..0000000
--- a/examples/semantics/operational/port/models.py
+++ /dev/null
@@ -1,407 +0,0 @@
-# Design meta-model
-port_mm_cs = """
- Source:Class {
- abstract = True;
- }
- Sink:Class {
- abstract = True;
- }
-
- Place:Class
- :Inheritance (Place -> Source)
- :Inheritance (Place -> Sink)
-
- connection:Association (Source -> Sink)
-
- CapacityConstraint:Class
-
- CapacityConstraint_shipCapacity:AttributeLink (CapacityConstraint -> Integer) {
- name = "shipCapacity";
- optional = False;
-
- # cannot have negative capacity:
- constraint = `get_value(get_target(this)) >= 0`; # non-negative
- }
-
- # Capacity
- capacityOf:Association (CapacityConstraint -> Place) {
- # must say something about at least one Place, otherwise what is the point of the constraint?
- target_lower_cardinality = 1;
- }
-
- Berth:Class
- :Inheritance (Berth -> Place)
-
- # Set of workers
- WorkerSet:Class
-
- WorkerSet_numWorkers:AttributeLink (WorkerSet -> Integer) {
- name = "numWorkers";
- optional = False;
- constraint = `get_value(get_target(this)) >= 0`; # non-negative
- }
- canOperate:Association (WorkerSet -> Berth) {
- target_lower_cardinality = 1;
- }
-
- Generator:Class
- :Inheritance (Generator -> Source)
-
-
- # Those classes to which we want to attach a runtime state object
- Stateful:Class {
- abstract = True;
- }
- :Inheritance (Place -> Stateful)
- :Inheritance (WorkerSet -> Stateful)
- :Inheritance (Berth -> Stateful)
- :Inheritance (connection -> Stateful)
-""";
-
-# Runtime meta-model
-port_rt_mm_cs = port_mm_cs + """
- State:Class
- of:Association (State -> Stateful) {
- source_lower_cardinality = 1;
- source_upper_cardinality = 1;
- target_lower_cardinality = 1;
- target_upper_cardinality = 1;
- }
-
- PlaceState:Class
- :Inheritance (PlaceState -> State)
-
- PlaceState_numShips:AttributeLink (PlaceState -> Integer) {
- # number of ships currently in the place
- name = "numShips";
- optional = False;
- constraint = `get_value(get_target(this)) >= 0`; # non-negative
- }
-
- shipCapacities:GlobalConstraint {
- constraint = ```
- errors = []
- for _, constr in get_all_instances("CapacityConstraint"):
- cap = get_slot_value(constr, "shipCapacity")
- total = 0
- place_names = [] # for debugging
- for lnk in get_outgoing(constr, "capacityOf"):
- place = get_target(lnk)
- place_names.append(get_name(place))
- place_state = get_source(get_incoming(place, "of")[0])
- total += get_slot_value(place_state, "numShips")
- if total > cap:
- errors.append(f"The number of ships in places {','.join(place_names)} ({total}) exceeds the capacity ({cap}) of CapacityConstraint {get_name(constr)}.")
- errors
- ```;
- }
-
- BerthState:Class {
- # status == empty <=> numShips == 0
- constraint = ```
- errors = []
- numShips = get_slot_value(this, "numShips")
- status = get_slot_value(this, "status")
- if (numShips == 0) != (status == "empty"):
- errors.append(f"Inconsistent: numShips = {numShips}, but status = {status}")
- errors
- ```;
- }
- :Inheritance (BerthState -> PlaceState)
-
- BerthState_status:AttributeLink (BerthState -> String) {
- name = "status";
- optional = False;
- constraint = `(
- get_value(get_target(this)) in { "empty", "unserved", "served" }
- )`;
- }
-
- WorkerSetState:Class
- :Inheritance (WorkerSetState -> State)
-
- isOperating:Association (WorkerSetState -> Berth) {
- constraint = ```
- errors = []
-
- # get status of Berth
- berth = get_target(this)
- berth_state = get_source(get_incoming(berth, "of")[0])
- status = get_slot_value(berth_state, "status")
- if status != "unserved":
- errors.append(f"Cannot operate {get_name(berth)} because there is no unserved ship there.")
-
- # only operate Berts that we can operate
- workerset = get_target(get_outgoing(get_source(this), "of")[0])
- can_operate = [get_target(lnk) for lnk in get_outgoing(workerset, "canOperate")]
- if berth not in can_operate:
- errors.append(f"Cannot operate {get_name(berth)}.")
-
- errors
- ```;
- }
-
- operatingCapacities:GlobalConstraint {
- constraint = ```
- errors = []
- for _, workersetstate in get_all_instances("WorkerSetState"):
- workerset = get_target(get_outgoing(workersetstate, "of")[0])
- num_operating = len(get_outgoing(workersetstate, "isOperating"))
- num_workers = get_slot_value(workerset, "numWorkers")
- if num_operating > num_workers:
- errors.append(f"WorkerSet {get_name(workerset)} is operating more berths ({num_operating}) than there are workers ({num_workers})")
- errors
- ```;
- }
-
- ConnectionState:Class
- :Inheritance (ConnectionState -> State)
- ConnectionState_moved:AttributeLink (ConnectionState -> Boolean) {
- name = "moved";
- optional = False;
- constraint = ```
- result = True
- all_successors_moved = True
- moved = get_value(get_target(this))
- conn_state = get_source(this)
- conn = get_target(get_outgoing(conn_state, "of")[0])
- tgt_place = get_target(conn)
- next_conns = get_outgoing(tgt_place, "connection")
- for next_conn in next_conns:
- next_conn_state = get_source(get_incoming(next_conn, "of")[0])
- if not get_slot_value(next_conn_state, "moved"):
- all_successors_moved = False
- if moved and not all_successors_moved:
- result = f"Connection {get_name(conn)} played before its turn."
- result
- ```;
- }
-
- Clock:Class {
- lower_cardinality = 1;
- upper_cardinality = 1;
- }
- Clock_time:AttributeLink (Clock -> Integer) {
- name = "time";
- optional = False;
- constraint = `get_value(get_target(this)) >= 0`;
- }
-"""
-
-# Design model: the part that doesn't change
-port_m_cs = """
- gen:Generator
-
- # newly arrived ships collect here
- waiting:Place
- c1:connection (gen -> waiting)
-
- inboundPassage:Place
- c2:connection (waiting -> inboundPassage)
-
- outboundPassage:Place
-
- # inboundPassage and outboundPassage cannot have more than 3 ships total
- passageCap:CapacityConstraint {
- shipCapacity = 3;
- }
- :capacityOf (passageCap -> inboundPassage)
- :capacityOf (passageCap -> outboundPassage)
-
-
- # Berth 1
-
- inboundBerth1:Place
- berth1:Berth
- outboundBerth1:Place
-
- inboundBerth1Cap:CapacityConstraint { shipCapacity = 1; }
- :capacityOf (inboundBerth1Cap -> inboundBerth1)
- outboundBerth1Cap:CapacityConstraint { shipCapacity = 1; }
- :capacityOf (outboundBerth1Cap -> outboundBerth1)
-
- berth1Cap:CapacityConstraint { shipCapacity = 1; }
- :capacityOf (berth1Cap -> berth1)
-
- c3:connection (inboundBerth1 -> berth1)
- c4:connection (berth1 -> outboundBerth1)
-
- # Berth 2
-
- inboundBerth2:Place
- berth2:Berth
- outboundBerth2:Place
-
- inboundBerth2Cap:CapacityConstraint { shipCapacity = 1; }
- :capacityOf (inboundBerth2Cap -> inboundBerth2)
- outboundBerth2Cap:CapacityConstraint { shipCapacity = 1; }
- :capacityOf (outboundBerth2Cap -> outboundBerth2)
-
- berth2Cap:CapacityConstraint { shipCapacity = 1; }
- :capacityOf (berth2Cap -> berth2)
-
- c5:connection (inboundBerth2 -> berth2)
- c6:connection (berth2 -> outboundBerth2)
-
-
- # can either go to Berth 1 or Berth 2
- c7:connection (inboundPassage -> inboundBerth1)
- c8:connection (inboundPassage -> inboundBerth2)
-
- c9:connection (outboundBerth1 -> outboundPassage)
- c10:connection (outboundBerth2 -> outboundPassage)
-
-
- # ships that have been served are counted here
- served:Place
- c11:connection (outboundPassage -> served)
-
-
- workers:WorkerSet {
- numWorkers = 1;
- }
- :canOperate (workers -> berth1)
- :canOperate (workers -> berth2)
-"""
-
-# Initial runtime model: the part that changes (every execution step)
-port_rt_m_cs = port_m_cs + """
- clock:Clock {
- time = 0;
- }
-
- waitingState:PlaceState { numShips = 0; } :of (waitingState -> waiting)
- inboundPassageState:PlaceState { numShips = 0; } :of (inboundPassageState -> inboundPassage)
- outboundPassageState:PlaceState { numShips = 0; } :of (outboundPassageState -> outboundPassage)
-
- inboundBerth1State:PlaceState { numShips = 0; } :of (inboundBerth1State -> inboundBerth1)
- outboundBerth1State:PlaceState { numShips = 0; } :of (outboundBerth1State -> outboundBerth1)
- inboundBerth2State:PlaceState { numShips = 0; } :of (inboundBerth2State -> inboundBerth2)
- outboundBerth2State:PlaceState { numShips = 0; } :of (outboundBerth2State -> outboundBerth2)
-
- berth1State:BerthState { status = "empty"; numShips = 0; } :of (berth1State -> berth1)
- berth2State:BerthState { status = "empty"; numShips = 0; } :of (berth2State -> berth2)
-
- servedState:PlaceState { numShips = 0; } :of (servedState -> served)
-
- workersState:WorkerSetState :of (workersState -> workers)
-
- c1S:ConnectionState { moved = False; } :of (c1S -> c1)
- c2S:ConnectionState { moved = False; } :of (c2S -> c2)
- c3S:ConnectionState { moved = False; } :of (c3S -> c3)
- c4S:ConnectionState { moved = False; } :of (c4S -> c4)
- c5S:ConnectionState { moved = False; } :of (c5S -> c5)
- c6S:ConnectionState { moved = False; } :of (c6S -> c6)
- c7S:ConnectionState { moved = False; } :of (c7S -> c7)
- c8S:ConnectionState { moved = False; } :of (c8S -> c8)
- c9S:ConnectionState { moved = False; } :of (c9S -> c9)
- c10S:ConnectionState { moved = False; } :of (c10S -> c10)
- c11S:ConnectionState { moved = False; } :of (c11S -> c11)
-"""
-
-###################################################
-
-# ┌─────────────────┐
-# │ shipCapacity=3 │
-# ┌───┐ ┌───────┐ │┌──────────────┐ │ ┌───────┐
-# │gen├────►│waiting├────►│inboundPassage├───►│turning│
-# └───┘ └───────┘ │└──────────────┘ │ └───┬───┘
-# │ │ │
-# ┌──────┐ │┌───────────────┐│ │
-# │served│◄────┼outboundPassage│◄──────┘
-# └──────┘ │└───────────────┘│
-# └─────────────────┘
-smaller_model_cs = """
- gen:Generator
- waiting:Place
- inboundPassage:Place
- turning:Place
- outboundPassage:Place
- served:Place
-
- gen2wait:connection (gen -> waiting)
- wait2inbound:connection (waiting -> inboundPassage)
- inbound2turning:connection (inboundPassage -> turning)
- turning2outbound:connection (turning -> outboundPassage)
- outbound2served:connection (outboundPassage -> served)
-
- # inboundPassage and outboundPassage cannot have more than 3 ships total
- passageCap:CapacityConstraint {
- shipCapacity = 3;
- }
- :capacityOf (passageCap -> inboundPassage)
- :capacityOf (passageCap -> outboundPassage)
-"""
-
-smaller_model_rt_cs = smaller_model_cs + """
- clock:Clock {
- time = 0;
- }
-
- waitingState:PlaceState { numShips = 1; } :of (waitingState -> waiting)
- inboundPassageState:PlaceState { numShips = 1; } :of (inboundPassageState -> inboundPassage)
- turningState:PlaceState { numShips = 1; } :of (turningState -> turning)
- outboundPassageState:PlaceState { numShips = 1; } :of (outboundPassageState -> outboundPassage)
- servedState:PlaceState { numShips = 0; } :of (servedState -> served)
-
- gen2waitState:ConnectionState { moved = False; } :of (gen2waitState -> gen2wait)
- wait2inboundState:ConnectionState { moved = False; } :of (wait2inboundState -> wait2inbound)
- inbound2turningState:ConnectionState { moved = False; } :of (inbound2turningState -> inbound2turning)
- turning2outboundState:ConnectionState { moved = False; } :of (turning2outboundState -> turning2outbound)
- outbound2servedState:ConnectionState { moved = False; } :of (outbound2servedState -> outbound2served)
-"""
-
-###################################################
-
-# ┌────────────┐
-# │ workerset │
-# │ │
-# │numWorkers=1│
-# └──────┬─────┘
-# │canOperate
-# │
-# ┌───▼────┐
-# ┌───┐ ┌───────┐ │┌─────┐ │ ┌──────┐
-# │gen├────►│waiting├────││berth├─┼───►│served│
-# └───┘ └───────┘ │└─────┘ │ └──────┘
-# │ship- │
-# │Capacity│
-# │ =1 │
-# └────────┘
-smaller_model2_cs = """
- gen:Generator
- waiting:Place
- berth:Berth
- served:Place
-
- gen2wait:connection (gen -> waiting)
- wait2berth:connection (waiting -> berth)
- berth2served:connection (berth -> served)
-
- # berth can only hold 1 ship
- passageCap:CapacityConstraint {
- shipCapacity = 1;
- }
- :capacityOf (passageCap -> berth)
-
- workers:WorkerSet {
- numWorkers = 1;
- }
- :canOperate (workers -> berth)
-"""
-
-smaller_model2_rt_cs = smaller_model2_cs + """
- clock:Clock {
- time = 0;
- }
-
- waitingState:PlaceState { numShips = 1; } :of (waitingState -> waiting)
- berthState:BerthState { numShips = 0; status = "empty"; } :of (berthState -> berth)
- servedState:PlaceState { numShips = 0; } :of (servedState -> served)
-
- gen2waitState:ConnectionState { moved = False; } :of (gen2waitState -> gen2wait)
- wait2berthState:ConnectionState { moved = False; } :of (wait2berthState -> wait2berth)
- berth2servedState:ConnectionState { moved = False; } :of (berth2servedState -> berth2served)
-
- workersState:WorkerSetState :of (workersState -> workers)
-"""
diff --git a/examples/semantics/operational/port/renderer.py b/examples/semantics/operational/port/renderer.py
deleted file mode 100644
index 63bebb3..0000000
--- a/examples/semantics/operational/port/renderer.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from concrete_syntax.common import indent
-from concrete_syntax.graphviz.make_url import make_url
-from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time, get_num_ships
-
-def render_port_to_dot(od,
- make_id=lambda name,obj: name # by default, we just use the object name for the graphviz node name
-):
- txt = ""
-
- def render_place(place):
- name = od.get_name(place)
- return f'"{make_id(name,place)}" [ label = "{name}\\n ships = {get_num_ships(od, place)}", style = filled, fillcolor = lightblue ]\n'
-
- for _, cap in od.get_all_instances("CapacityConstraint", include_subtypes=False):
- name = od.get_name(cap)
- capacity = od.get_slot_value(cap, "shipCapacity")
- txt += f'subgraph cluster_{name} {{\n label = "{name}\\n capacity = {capacity}";\n'
- for lnk in od.get_outgoing(cap, "capacityOf"):
- place = od.get_target(lnk)
- txt += f' {render_place(place)}'
- txt += f'}}\n'
-
- for _, place_state in od.get_all_instances("PlaceState", include_subtypes=False):
- place = state_to_design(od, place_state)
- if len(od.get_incoming(place, "capacityOf")) == 0:
- txt += render_place(place)
-
- for _, berth_state in od.get_all_instances("BerthState", include_subtypes=False):
- berth = state_to_design(od, berth_state)
- name = od.get_name(berth)
- txt += f'"{make_id(name,berth)}" [ label = "{name}\\n numShips = {get_num_ships(od, berth)}\\n status = {od.get_slot_value(berth_state, "status")}", fillcolor = yellow, style = filled]\n'
-
- for _, gen in od.get_all_instances("Generator", include_subtypes=False):
- txt += f'"{make_id(od.get_name(gen),gen)}" [ label = "+", shape = diamond, fillcolor = green, fontsize = 30, style = filled ]\n'
-
- for _, conn in od.get_all_instances("connection"):
- src = od.get_source(conn)
- tgt = od.get_target(conn)
- moved = od.get_slot_value(design_to_state(od, conn), "moved")
- src_name = od.get_name(src)
- tgt_name = od.get_name(tgt)
- txt += f"{make_id(src_name,src)} -> {make_id(tgt_name,tgt)} [color=deepskyblue3, penwidth={1 if moved else 2}];\n"
-
- for _, workers in od.get_all_instances("WorkerSet"):
- already_have = []
- name = od.get_name(workers)
- num_workers = od.get_slot_value(workers, "numWorkers")
- txt += f'{make_id(name,workers)} [label="{num_workers} worker(s)", shape=parallelogram, fillcolor=chocolate, style=filled];\n'
- for lnk in od.get_outgoing(design_to_state(od, workers), "isOperating"):
- berth = od.get_target(lnk)
- already_have.append(berth)
- txt += f"{make_id(name,workers)} -> {make_id(od.get_name(berth),berth)} [arrowhead=none, color=chocolate];\n"
- for lnk in od.get_outgoing(workers, "canOperate"):
- berth = od.get_target(lnk)
- if berth not in already_have:
- txt += f"{make_id(name,workers)} -> {make_id(od.get_name(berth),berth)} [style=dotted, arrowhead=none, color=chocolate];\n"
-
- return txt
-
-def render_port_graphviz(od):
- return make_url(render_port_to_dot(od))
-
-def render_port_textual(od):
- txt = ""
- for _, place_state in od.get_all_instances("PlaceState", include_subtypes=False):
- place = state_to_design(od, place_state)
- name = od.get_name(place)
- txt += f'place "{name}" {"🚢"*get_num_ships(od, place)}\n'
-
- for _, berth_state in od.get_all_instances("BerthState", include_subtypes=False):
- berth = state_to_design(od, berth_state)
- name = od.get_name(berth)
- operated_descr = ""
- if len(od.get_incoming(berth, "isOperating")):
- operated_descr = " and being operated"
- txt += f'berth "{name}" {"🚢"*get_num_ships(od, berth)} {od.get_slot_value(berth_state, "status")}{operated_descr}\n'
-
- return txt
diff --git a/examples/semantics/operational/port/rulebased_runner.py b/examples/semantics/operational/port/rulebased_runner.py
deleted file mode 100644
index ce73ca5..0000000
--- a/examples/semantics/operational/port/rulebased_runner.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import urllib.parse
-
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from framework.conformance import Conformance, render_conformance_check_result
-from concrete_syntax.textual_od import parser
-from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram
-from api.od import ODAPI
-
-from transformation.ramify import ramify
-
-from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker
-from examples.semantics.operational.port import models
-from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time
-from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz
-
-from examples.semantics.operational.port import rulebased_sem
-
-state = DevState()
-scd_mmm = bootstrap_scd(state) # Load meta-meta-model
-
-### Load (meta-)models ###
-
-def parse_and_check(m_cs: str, mm, descr: str):
- m = parser.parse_od(
- state,
- m_text=m_cs,
- mm=mm)
- conf = Conformance(state, m, mm)
- print(descr, "...", render_conformance_check_result(conf.check_nominal()))
- return m
-
-port_mm = parse_and_check(models.port_mm_cs, scd_mmm, "MM")
-port_m = parse_and_check(models.port_m_cs, port_mm, "M")
-port_rt_mm = parse_and_check(models.port_rt_mm_cs, scd_mmm, "RT-MM")
-port_rt_m = parse_and_check(models.port_rt_m_cs, port_rt_mm, "RT-M")
-
-print()
-
-# print(render_class_diagram(state, port_rt_mm))
-
-### Simulate ###
-
-port_rt_mm_ramified = ramify(state, port_rt_mm)
-
-rulebased_action_generator = rulebased_sem.get_action_generator(state, port_rt_mm, port_rt_mm_ramified)
-termination_condition = rulebased_sem.TerminationCondition(state, port_rt_mm_ramified)
-
-sim = Simulator(
- action_generator=rulebased_action_generator,
- # decision_maker=RandomDecisionMaker(seed=2),
- decision_maker=InteractiveDecisionMaker(),
- termination_condition=termination_condition,
- check_conformance=True,
- verbose=True,
- renderer=render_port_textual,
- # renderer=render_port_graphviz,
-)
-
-od = ODAPI(state, port_rt_m, port_rt_mm)
-
-sim.run(od)
diff --git a/examples/semantics/operational/port/rulebased_sem.py b/examples/semantics/operational/port/rulebased_sem.py
deleted file mode 100644
index 8ff41b5..0000000
--- a/examples/semantics/operational/port/rulebased_sem.py
+++ /dev/null
@@ -1,67 +0,0 @@
-### Operational Semantics - defined by rule-based model transformation ###
-
-from concrete_syntax.textual_od.parser import parse_od
-from transformation.rule import Rule, RuleMatcherRewriter, PriorityActionGenerator
-from transformation.matcher import match_od
-from util import loader
-
-import os
-THIS_DIR = os.path.dirname(__file__)
-
-# kind: lhs, rhs, nac
-get_filename = lambda rule_name, kind: f"{THIS_DIR}/rules/r_{rule_name}_{kind}.od"
-
-
-def get_action_generator(state, rt_mm, rt_mm_ramified):
- matcher_rewriter = RuleMatcherRewriter(state, rt_mm, rt_mm_ramified)
-
- #############################################################################
- # TO IMPLEMENT: Full semantics as a set of rule-based model transformations #
-
- rules0_dict = loader.load_rules(state, get_filename, rt_mm_ramified,
- ["ship_sinks"] # <- list of rule_name of equal priority
- )
- rules1_dict = loader.load_rules(state, get_filename, rt_mm_ramified,
- ["ship_appears_in_berth"]
- )
- # rules2_dict = ...
-
- generator = PriorityActionGenerator(matcher_rewriter, [
- rules0_dict, # highest priority
- rules1_dict, # lower priority
- # rules2_dict, # lowest priority
- ])
-
- # TO IMPLEMENT: Full semantics as a set of rule-based model transformations #
- #############################################################################
-
- return generator
-
-
-
-
-# The termination condition can also be specified as a pattern:
-class TerminationCondition:
- def __init__(self, state, rt_mm_ramified):
- self.state = state
- self.rt_mm_ramified = rt_mm_ramified
-
- # TO IMPLEMENT: terminate simulation when the place 'served' contains 2 ships.
-
- ########################################
- # You should only edit the pattern below
- pattern_cs = """
- # Placeholder to make the termination condition never hold:
- :GlobalCondition {
- condition = `False`;
- }
- """
- # You should only edit the pattern above
- ########################################
-
- self.pattern = parse_od(state, pattern_cs, rt_mm_ramified)
-
- def __call__(self, od):
- for match in match_od(self.state, od.m, od.mm, self.pattern, self.rt_mm_ramified):
- # stop after the first match (no need to look for more matches):
- return "There are 2 ships served." # Termination condition statisfied
diff --git a/examples/semantics/operational/port/rules/README.txt b/examples/semantics/operational/port/rules/README.txt
deleted file mode 100644
index 485ea73..0000000
--- a/examples/semantics/operational/port/rules/README.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-The names of the files in this directory are important.
-
-A rule must always be named:
- r__.od
-
-It is allowed to have more than one NAC. In this case, the NACs must be named:
- r__nac.od
- r__nac2.od
- r__nac3.od
- ...
-
-
-For the assignment, you can delete the existing rules (they are nonsense) and start fresh.
\ No newline at end of file
diff --git a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_lhs.od b/examples/semantics/operational/port/rules/r_ship_appears_in_berth_lhs.od
deleted file mode 100644
index 70bf95a..0000000
--- a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_lhs.od
+++ /dev/null
@@ -1,4 +0,0 @@
-berthState:RAM_BerthState {
- RAM_numShips = `get_value(this) == 0`;
- RAM_status = `get_value(this) == "empty"`;
-}
\ No newline at end of file
diff --git a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_rhs.od b/examples/semantics/operational/port/rules/r_ship_appears_in_berth_rhs.od
deleted file mode 100644
index e535053..0000000
--- a/examples/semantics/operational/port/rules/r_ship_appears_in_berth_rhs.od
+++ /dev/null
@@ -1,4 +0,0 @@
-berthState:RAM_BerthState {
- RAM_numShips = `1`;
- RAM_status = `"served"`;
-}
\ No newline at end of file
diff --git a/examples/semantics/operational/port/rules/r_ship_sinks_lhs.od b/examples/semantics/operational/port/rules/r_ship_sinks_lhs.od
deleted file mode 100644
index 270c9ef..0000000
--- a/examples/semantics/operational/port/rules/r_ship_sinks_lhs.od
+++ /dev/null
@@ -1,5 +0,0 @@
-# Find any place that has at least one ship:
-
-placeState:RAM_PlaceState {
- RAM_numShips = `get_value(this) > 0`;
-}
\ No newline at end of file
diff --git a/examples/semantics/operational/port/rules/r_ship_sinks_rhs.od b/examples/semantics/operational/port/rules/r_ship_sinks_rhs.od
deleted file mode 100644
index 828f3c8..0000000
--- a/examples/semantics/operational/port/rules/r_ship_sinks_rhs.od
+++ /dev/null
@@ -1,4 +0,0 @@
-placeState:RAM_PlaceState {
- # Decrement number of ships:
- RAM_numShips = `get_value(this) - 1`;
-}
\ No newline at end of file
diff --git a/examples/semantics/operational/port/runner.py b/examples/semantics/operational/port/runner.py
deleted file mode 100644
index bdb4a8d..0000000
--- a/examples/semantics/operational/port/runner.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import urllib.parse
-
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from framework.conformance import Conformance, render_conformance_check_result
-from concrete_syntax.textual_od import parser
-from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram
-from api.od import ODAPI
-
-from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker
-from examples.semantics.operational.port import models
-from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time
-from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz
-
-# from examples.semantics.operational.port.joeris_solution import termination_condition, get_actions
-from examples.semantics.operational.port.assignment import termination_condition, get_actions
-
-state = DevState()
-scd_mmm = bootstrap_scd(state) # Load meta-meta-model
-
-### Load (meta-)models ###
-
-def parse_and_check(m_cs: str, mm, descr: str):
- m = parser.parse_od(
- state,
- m_text=m_cs,
- mm=mm)
- conf = Conformance(state, m, mm)
- print(descr, "...", render_conformance_check_result(conf.check_nominal()))
- return m
-
-port_mm = parse_and_check(models.port_mm_cs, scd_mmm, "MM")
-port_m = parse_and_check(models.port_m_cs, port_mm, "M")
-port_rt_mm = parse_and_check(models.port_rt_mm_cs, scd_mmm, "RT-MM")
-port_rt_m = parse_and_check(models.port_rt_m_cs, port_rt_mm, "RT-M")
-
-print()
-
-# print(render_class_diagram(state, port_rt_mm))
-
-### Simulate ###
-
-sim = Simulator(
- action_generator=get_actions,
- # decision_maker=RandomDecisionMaker(seed=2),
- decision_maker=InteractiveDecisionMaker(),
- termination_condition=termination_condition,
- check_conformance=True,
- verbose=True,
- renderer=render_port_textual,
- # renderer=render_port_graphviz,
-)
-
-od = ODAPI(state, port_rt_m, port_rt_mm)
-
-sim.run(od)
diff --git a/examples/semantics/operational/simulator.py b/examples/semantics/operational/simulator.py
deleted file mode 100644
index 248feda..0000000
--- a/examples/semantics/operational/simulator.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import abc
-import random
-import math
-import functools
-import sys
-
-from framework.conformance import Conformance, render_conformance_check_result
-from concrete_syntax.common import indent
-from concrete_syntax.textual_od.renderer import render_od
-from transformation.cloner import clone_od
-from api.od import ODAPI
-
-from util.simulator import MinimalSimulator, DecisionMaker, RandomDecisionMaker, InteractiveDecisionMaker
-
-
-class Simulator(MinimalSimulator):
- def __init__(self,
- action_generator,
- decision_maker: DecisionMaker,
- termination_condition=lambda od: None,
- check_conformance=True,
- verbose=True,
- renderer=lambda od: render_od(od.state, od.m, od.mm),
- ):
- super().__init__(
- action_generator=action_generator,
- decision_maker=decision_maker,
- termination_condition=lambda od: self.check_render_termination_condition(od),
- verbose=verbose,
- )
- self.check_conformance = check_conformance
- self.actual_termination_condition = termination_condition
- self.renderer = renderer
-
- def check_render_termination_condition(self, od):
- # A termination condition checker that also renders the model, and performs conformance check
- self._print("--------------")
- self._print(indent(self.renderer(od), 2))
- self._print("--------------")
- if self.check_conformance:
- conf = Conformance(od.state, od.m, od.mm)
- self._print(render_conformance_check_result(conf.check_nominal()))
- self._print()
- return self.actual_termination_condition(od)
-
-def make_actions_pure(actions, od):
- # Copy model before modifying it
- def exec_pure(action, od):
- cloned_rt_m = clone_od(od.state, od.m, od.mm)
- new_od = ODAPI(od.state, cloned_rt_m, od.mm)
- msgs = action(new_od)
- return (new_od, msgs)
-
- for descr, action in actions:
- yield (descr, functools.partial(exec_pure, action, od))
-
-def filter_valid_actions(pure_actions):
- result = {}
- def make_tuple(new_od, msgs):
- return (new_od, msgs)
- for name, callback in pure_actions:
- # print(f"attempt '{name}' ...", end='\r')
- (new_od, msgs) = callback()
- conf = Conformance(new_od.state, new_od.m, new_od.mm)
- errors = conf.check_nominal()
- # erase current line:
- # print(" ", end='\r')
- if len(errors) == 0:
- # updated RT-M is conform, we have a valid action:
- yield (name, functools.partial(make_tuple, new_od, msgs))
diff --git a/examples/semantics/translational/.gitignore b/examples/semantics/translational/.gitignore
deleted file mode 100644
index dfcb5e1..0000000
--- a/examples/semantics/translational/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-# Let's not accidently add the solution to assignment 5...
-r_*.od
-
-snapshot_after_*.od
\ No newline at end of file
diff --git a/examples/semantics/translational/merged_mm.od b/examples/semantics/translational/merged_mm.od
deleted file mode 100644
index 7ebee4a..0000000
--- a/examples/semantics/translational/merged_mm.od
+++ /dev/null
@@ -1,197 +0,0 @@
-# Auto-generated by /home/maestro/repos/MV2/examples/semantics/translational/regenerate_mm.py.
-
-# Merged run-time meta-models of 'Petri Net' and 'Port' formalisms.
-# An abstract 'Top'-class (superclass of everything else), and a 'generic_link'-association (which can connect everything with everything) have also been added.
-
-# PlantUML visualization: https://deemz.org/plantuml/pdf/hPTFYzim4CNl_XGYnqA27P8uDgM7tSEobsmWWHw3RCk9Y2CPMIcKThzxHyuViiMGPwCSzhJpqxoPfz4uo2lUD6pqockUI_lxLQl66YwLPIF66nPUVxkEF-ut2uk8_GaOQmwola5OojwL5NjXWi_WUi1wjQvuBZQMMm6ZborQdKzRVHIgwUB-rEOep4RW-POtw2MqazehJR4WucV0CrUvtB97HdckO4pHZT5dawEvH25l8RUkLZe_icWoYS3mQTmMnygJw2hBYp3sqASsqPnVt44nPrVfZJLIxJjaRdMDCkFuKMDhApGqcJs6thtJIrAIFJBQag2XVFeO-YQKCDng0uSdNuIljeQhHbgf5Kh8mawFhLTqxvN8BSygk0vPtErNgOueelZIZciE9ATNFyhB03hfNtI3KlQYTIMu-iyW_OZtkREXgTOv8AxZ32QMhT3WwN-wAV3zxtZyd3ahn7ESkoiOZkQuJnorrYTkFaDmTBl1xFZKPoleJG6oez4CPfS0Ojsh0-BAfLUZY8LNeuJSAsuQ-nLR-3GArDaUOZD0R0-Z91cGNG5VCaWipLeGDqUCak6r2_rUCg_ZarPVhnE59rvjZ8pF7gqeI-XbNB1Hn2OJHiliUFo3djuHjbMdJ2FpcV9ro1OTkdE-0NmNbJ9kSa00VNdS3uZW0sXdJ5dErKVjbaNapI_BGK92EaUgmmuIuxmtu10Q7YJclkSXHLiEwBehGSfgjOCQ7mzgVEmQltShlCnt5Iszo8AI3JcfTO1iBWPmNqz0rQ8XLalQxbm_uZ_AVm==
-
-
-CapacityConstraint:Class
-PNPlaceState:Class
-WorkerSet:Class
-State:Class
-Stateful:Class {
- abstract = True;
-}
-Source:Class {
- abstract = True;
-}
-Clock:Class {
- lower_cardinality = 1;
- upper_cardinality = 1;
-}
-BerthState:Class {
- constraint = ```
- errors = []
- numShips = get_slot_value(this, "numShips")
- status = get_slot_value(this, "status")
- if (numShips == 0) != (status == "empty"):
- errors.append(f"Inconsistent: numShips = {numShips}, but status = {status}")
- errors
- ```;
-}
-Top:Class {
- abstract = True;
-}
-Place:Class
-WorkerSetState:Class
-Berth:Class
-Generator:Class
-PNTransition:Class
-PNConnectable:Class {
- abstract = True;
-}
-Sink:Class {
- abstract = True;
-}
-ConnectionState:Class
-PlaceState:Class
-PNPlace:Class
-shipCapacities:GlobalConstraint {
- constraint = ```
- errors = []
- for _, constr in get_all_instances("CapacityConstraint"):
- cap = get_slot_value(constr, "shipCapacity")
- total = 0
- place_names = [] # for debugging
- for lnk in get_outgoing(constr, "capacityOf"):
- place = get_target(lnk)
- place_names.append(get_name(place))
- place_state = get_source(get_incoming(place, "of")[0])
- total += get_slot_value(place_state, "numShips")
- if total > cap:
- errors.append(f"The number of ships in places {','.join(place_names)} ({total}) exceeds the capacity ({cap}) of CapacityConstraint {get_name(constr)}.")
- errors
- ```;
-}
-operatingCapacities:GlobalConstraint {
- constraint = ```
- errors = []
- for _, workersetstate in get_all_instances("WorkerSetState"):
- workerset = get_target(get_outgoing(workersetstate, "of")[0])
- num_operating = len(get_outgoing(workersetstate, "isOperating"))
- num_workers = get_slot_value(workerset, "numWorkers")
- if num_operating > num_workers:
- errors.append(f"WorkerSet {get_name(workerset)} is operating more berths ({num_operating}) than there are workers ({num_workers})")
- errors
- ```;
-}
-WorkerSet_numWorkers:AttributeLink (WorkerSet -> Integer) {
- name = "numWorkers";
- constraint = `get_value(get_target(this)) >= 0`;
- optional = False;
-}
-PlaceState_numShips:AttributeLink (PlaceState -> Integer) {
- constraint = `get_value(get_target(this)) >= 0`;
- optional = False;
- name = "numShips";
-}
-ConnectionState_moved:AttributeLink (ConnectionState -> Boolean) {
- name = "moved";
- constraint = ```
- result = True
- all_successors_moved = True
- moved = get_value(get_target(this))
- conn_state = get_source(this)
- conn = get_target(get_outgoing(conn_state, "of")[0])
- tgt_place = get_target(conn)
- next_conns = get_outgoing(tgt_place, "connection")
- for next_conn in next_conns:
- next_conn_state = get_source(get_incoming(next_conn, "of")[0])
- if not get_slot_value(next_conn_state, "moved"):
- all_successors_moved = False
- if moved and not all_successors_moved:
- result = f"Connection {get_name(conn)} played before its turn."
- result
- ```;
- optional = False;
-}
-BerthState_status:AttributeLink (BerthState -> String) {
- optional = False;
- name = "status";
- constraint = ```
- (
- get_value(get_target(this)) in { "empty", "unserved", "served" }
- )
- ```;
-}
-PNPlaceState_numTokens:AttributeLink (PNPlaceState -> Integer) {
- name = "numTokens";
- constraint = `"numTokens cannot be negative" if get_value(get_target(this)) < 0 else None`;
- optional = False;
-}
-Clock_time:AttributeLink (Clock -> Integer) {
- optional = False;
- name = "time";
- constraint = `get_value(get_target(this)) >= 0`;
-}
-CapacityConstraint_shipCapacity:AttributeLink (CapacityConstraint -> Integer) {
- optional = False;
- name = "shipCapacity";
- constraint = `get_value(get_target(this)) >= 0`;
-}
-of:Association (State -> Stateful) {
- target_lower_cardinality = 1;
- source_upper_cardinality = 1;
- source_lower_cardinality = 1;
- target_upper_cardinality = 1;
-}
-arc:Association (PNConnectable -> PNConnectable)
-canOperate:Association (WorkerSet -> Berth) {
- target_lower_cardinality = 1;
-}
-inh_arc:Association (PNPlace -> PNTransition)
-connection:Association (Source -> Sink)
-pn_of:Association (PNPlaceState -> PNPlace) {
- source_upper_cardinality = 1;
- source_lower_cardinality = 1;
- target_upper_cardinality = 1;
- target_lower_cardinality = 1;
-}
-generic_link:Association (Top -> Top)
-isOperating:Association (WorkerSetState -> Berth) {
- constraint = ```
- errors = []
-
- # get status of Berth
- berth = get_target(this)
- berth_state = get_source(get_incoming(berth, "of")[0])
- status = get_slot_value(berth_state, "status")
- if status != "unserved":
- errors.append(f"Cannot operate {get_name(berth)} because there is no unserved ship there.")
-
- # only operate Berts that we can operate
- workerset = get_target(get_outgoing(get_source(this), "of")[0])
- can_operate = [get_target(lnk) for lnk in get_outgoing(workerset, "canOperate")]
- if berth not in can_operate:
- errors.append(f"Cannot operate {get_name(berth)}.")
-
- errors
- ```;
-}
-capacityOf:Association (CapacityConstraint -> Place) {
- target_lower_cardinality = 1;
-}
-:Inheritance (connection -> Stateful)
-:Inheritance (CapacityConstraint -> Top)
-:Inheritance (Sink -> Top)
-:Inheritance (generic_link -> Top)
-:Inheritance (Berth -> Place)
-:Inheritance (WorkerSet -> Stateful)
-:Inheritance (Place -> Source)
-:Inheritance (PlaceState -> State)
-:Inheritance (State -> Top)
-:Inheritance (Source -> Top)
-:Inheritance (Clock -> Top)
-:Inheritance (Stateful -> Top)
-:Inheritance (Place -> Stateful)
-:Inheritance (PNConnectable -> Top)
-:Inheritance (WorkerSetState -> State)
-:Inheritance (Place -> Sink)
-:Inheritance (BerthState -> PlaceState)
-:Inheritance (PNTransition -> PNConnectable)
-:Inheritance (ConnectionState -> State)
-:Inheritance (PNPlaceState -> Top)
-:Inheritance (Generator -> Source)
-:Inheritance (Berth -> Stateful)
-:Inheritance (PNPlace -> PNConnectable)
\ No newline at end of file
diff --git a/examples/semantics/translational/regenerate_mm.py b/examples/semantics/translational/regenerate_mm.py
deleted file mode 100644
index 0a9b6dc..0000000
--- a/examples/semantics/translational/regenerate_mm.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from concrete_syntax.textual_od import renderer
-from concrete_syntax.plantuml.renderer import render_class_diagram
-from concrete_syntax.plantuml.make_url import make_url
-from api.od import ODAPI
-
-from transformation.topify.topify import Topifier
-from transformation.merger import merge_models
-
-from util import loader
-
-from examples.semantics.operational.port import models
-
-import os
-THIS_DIR = os.path.dirname(__file__)
-
-# get file contents as string
-def read_file(filename):
- with open(THIS_DIR+'/'+filename) as file:
- return file.read()
-
-if __name__ == "__main__":
- state = DevState()
- scd_mmm = bootstrap_scd(state)
-
- # Load Petri Net meta-models
- pn_mm_cs = read_file('../../petrinet/metamodels/mm_design.od')
- pn_mm_rt_cs = pn_mm_cs + read_file('../../petrinet/metamodels/mm_runtime.od')
- pn_mm = loader.parse_and_check(state, pn_mm_cs, scd_mmm, "Petri-Net Design meta-model")
- pn_mm_rt = loader.parse_and_check(state, pn_mm_rt_cs, scd_mmm, "Petri-Net Runtime meta-model")
-
- # Load Port meta-models
- port_mm = loader.parse_and_check(state, models.port_mm_cs, scd_mmm, "Port-MM")
- port_mm_rt = loader.parse_and_check(state, models.port_rt_mm_cs, scd_mmm, "Port-MM-RT")
-
- # Merge Petri Net and Port meta-models
- print("merging...")
- merged_mm_rt = merge_models(state, mm=scd_mmm, models=[pn_mm_rt, port_mm_rt])
- print("done merging")
-
- print()
- print("topifying... (may take a while)")
- topifier = Topifier(state)
- top_merged_mm_rt = topifier.topify_cd(merged_mm_rt)
- print("done topifying")
-
- plantuml_url = make_url(render_class_diagram(state, top_merged_mm_rt))
-
- print()
- print(plantuml_url)
- print()
-
- txt = renderer.render_od(state, top_merged_mm_rt, scd_mmm)
-
- filename = THIS_DIR+"/merged_mm.od"
-
- with open(filename, "w") as file:
- file.write(f"# Auto-generated by {__file__}.\n\n")
- file.write(f"# Merged run-time meta-models of 'Petri Net' and 'Port' formalisms.\n")
- file.write(f"# An abstract 'Top'-class (superclass of everything else), and a 'generic_link'-association (which can connect everything with everything) have also been added.\n\n")
- file.write(f"# PlantUML visualization: {plantuml_url}\n\n")
- file.write(txt)
-
- print("Wrote file", filename)
diff --git a/examples/semantics/translational/renderer.py b/examples/semantics/translational/renderer.py
deleted file mode 100644
index 92a66d6..0000000
--- a/examples/semantics/translational/renderer.py
+++ /dev/null
@@ -1,90 +0,0 @@
-from api.od import ODAPI
-from concrete_syntax.graphviz.renderer import render_object_diagram, make_graphviz_id
-from concrete_syntax.graphviz.make_url import show_graphviz
-from examples.petrinet.renderer import render_petri_net_to_dot
-from examples.semantics.operational.port.renderer import render_port_to_dot
-from examples.semantics.operational.port import helpers
-
-# COLORS
-PLACE_BG = "#DAE8FC" # fill color
-PLACE_FG = "#6C8EBF" # font, line, arrow
-BERTH_BG = "#FFF2CC"
-BERTH_FG = "#D6B656"
-CAPACITY_BG = "#F5F5F5"
-CAPACITY_FG = "#666666"
-WORKER_BG = "#D5E8D4"
-WORKER_FG = "#82B366"
-GENERATOR_BG = "#FFE6CC"
-GENERATOR_FG = "#D79B00"
-CLOCK_BG = "black"
-CLOCK_FG = "white"
-
-def graphviz_style_fg_bg(fg, bg):
- return f"style=filled,fillcolor=\"{bg}\",color=\"{fg}\",fontcolor=\"{fg}\""
-
-def render_port(state, m, mm):
- dot = render_object_diagram(state, m, mm,
- reify=True,
- only_render=[
- # Only render these types
- "Place", "Berth", "CapacityConstraint", "WorkerSet", "Generator", "Clock",
- "connection", "capacityOf", "canOperate", "generic_link",
- # Petri Net types not included (they are already rendered by other function)
- # Port-State-types not included to avoid cluttering the diagram, but if you need them, feel free to add them.
- ],
- # We can style nodes/edges according to their type:
- type_to_style={
- "Place": graphviz_style_fg_bg(PLACE_FG, PLACE_BG),
- "Berth": graphviz_style_fg_bg(BERTH_FG, BERTH_BG),
- "CapacityConstraint": graphviz_style_fg_bg(CAPACITY_FG, CAPACITY_BG),
- "WorkerSet": "shape=oval,"+graphviz_style_fg_bg(WORKER_FG, WORKER_BG),
- "Generator": "shape=parallelogram,"+graphviz_style_fg_bg(GENERATOR_FG, GENERATOR_BG),
- "Clock": graphviz_style_fg_bg(CLOCK_FG, CLOCK_BG),
-
- # same blue as Place, thick line:
- "connection": f"color=\"{PLACE_FG}\",fontcolor=\"{PLACE_FG}\",penwidth=2.0",
-
- # same grey as CapacityConstraint
- "capacityOf": f"color=\"{CAPACITY_FG}\",fontcolor=\"{CAPACITY_FG}\"",
-
- # same green as WorkerSet
- "canOperate": f"color=\"{WORKER_FG}\",fontcolor=\"{WORKER_FG}\"",
-
- # purple line
- "generic_link": "color=purple,fontcolor=purple,arrowhead=onormal",
- },
- # We have control over the node/edge labels that are rendered:
- type_to_label={
- "CapacityConstraint": lambda capconstr_name, capconstr, odapi: f"{capconstr_name}\\nshipCapacity={odapi.get_slot_value(capconstr, "shipCapacity")}",
-
- "Place": lambda place_name, place, odapi: f"{place_name}\\nnumShips={helpers.get_num_ships(odapi, place)}",
-
- "Berth": lambda berth_name, berth, odapi: f"{berth_name}\\nnumShips={helpers.get_num_ships(odapi, berth)}\\nstatus={odapi.get_slot_value(helpers.design_to_state(odapi, berth), "status")}",
-
- "Clock": lambda _, clock, odapi: f"Clock\\ntime={odapi.get_slot_value(clock, "time")}",
-
- "connection": lambda conn_name, conn, odapi: f"{conn_name}\\nmoved={odapi.get_slot_value(helpers.design_to_state(odapi, conn), "moved")}",
-
- # hide generic link labels
- "generic_link": lambda lnk_name, lnk, odapi: "",
-
- "WorkerSet": lambda ws_name, ws, odapi: f"{ws_name}\\nnumWorkers={odapi.get_slot_value(ws, "numWorkers")}",
-
- # hide the type (it's already clear enough)
- "Generator": lambda gen_name, gen, odapi: gen_name,
- },
- )
- return dot
-
-def render_port_and_petri_net(state, m, mm):
- od = ODAPI(state, m, mm)
- dot = ""
- dot += "// petri net:\n"
- dot += render_petri_net_to_dot(od)
- dot += "\n// the rest:\n"
- dot += render_port(state, m, mm)
- return dot
-
-
-def show_port_and_petri_net(state, m, mm, engine="dot"):
- show_graphviz(render_port_and_petri_net(state, m, mm), engine=engine)
diff --git a/examples/semantics/translational/rules/gen_pn/r_00_place2place_lhs.od b/examples/semantics/translational/rules/gen_pn/r_00_place2place_lhs.od
deleted file mode 100644
index 75a3c7c..0000000
--- a/examples/semantics/translational/rules/gen_pn/r_00_place2place_lhs.od
+++ /dev/null
@@ -1,5 +0,0 @@
-# Look for a Place and its PlaceState:
-
-port_place:RAM_Place
-port_place_state:RAM_PlaceState
-port_of:RAM_of (port_place_state -> port_place)
diff --git a/examples/semantics/translational/rules/gen_pn/r_00_place2place_nac.od b/examples/semantics/translational/rules/gen_pn/r_00_place2place_nac.od
deleted file mode 100644
index 5df9f6a..0000000
--- a/examples/semantics/translational/rules/gen_pn/r_00_place2place_nac.od
+++ /dev/null
@@ -1,14 +0,0 @@
-# Our LHS:
-
- port_place:RAM_Place
- port_place_state:RAM_PlaceState
- port_of:RAM_of (port_place_state -> port_place)
-
-
-# The elements from our RHS (this prevents the rule from firing forever):
-
- pn_place:RAM_PNPlace
- place2place:RAM_generic_link (pn_place -> port_place)
-
- pn_place_state:RAM_PNPlaceState
- :RAM_pn_of(pn_place_state -> pn_place)
diff --git a/examples/semantics/translational/rules/gen_pn/r_00_place2place_rhs.od b/examples/semantics/translational/rules/gen_pn/r_00_place2place_rhs.od
deleted file mode 100644
index 47e7b57..0000000
--- a/examples/semantics/translational/rules/gen_pn/r_00_place2place_rhs.od
+++ /dev/null
@@ -1,22 +0,0 @@
-# Our entire LHS:
-
- port_place:RAM_Place
- port_place_state:RAM_PlaceState
- port_of:RAM_of (port_place_state -> port_place)
-
-
-# To create: a Petri Net-place, and link it to our Port-place
-
- pn_place:RAM_PNPlace {
- # new feature: you can control the name of the object to be created:
- name = `f"pn_{get_name(matched("port_place"))}"`;
- }
- place2place:RAM_generic_link (pn_place -> port_place)
-
-
-# And also create: a Petri Net-PlaceState (indicating the amount of tokens in our newly created place)
-
- pn_place_state:RAM_PNPlaceState {
- RAM_numTokens = `get_slot_value(matched('port_place_state'), "numShips")`;
- }
- :RAM_pn_of(pn_place_state -> pn_place)
\ No newline at end of file
diff --git a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_lhs.od b/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_lhs.od
deleted file mode 100644
index f15843f..0000000
--- a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_lhs.od
+++ /dev/null
@@ -1,5 +0,0 @@
-# Just look for a connection:
-
-port_src:RAM_Source
-port_snk:RAM_Sink
-port_conn:RAM_connection (port_src -> port_snk)
diff --git a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_nac.od b/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_nac.od
deleted file mode 100644
index ff5b64c..0000000
--- a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_nac.od
+++ /dev/null
@@ -1,10 +0,0 @@
-# Our LHS:
-
-port_src:RAM_Source
-port_snk:RAM_Sink
-port_conn:RAM_connection (port_src -> port_snk)
-
-# There should not yet be a Petri Net transition linked to the connection:
-
-pn_transition:RAM_PNTransition
-:RAM_generic_link (pn_transition -> port_conn)
\ No newline at end of file
diff --git a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_rhs.od b/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_rhs.od
deleted file mode 100644
index edc7d07..0000000
--- a/examples/semantics/translational/rules/gen_pn/r_10_conn2trans_rhs.od
+++ /dev/null
@@ -1,14 +0,0 @@
-# Our LHS:
-
-port_src:RAM_Source
-port_snk:RAM_Sink
-port_conn:RAM_connection (port_src -> port_snk)
-
-# Create a Petri Net transition, and link it to our port-connection:
-
-pn_transition:RAM_PNTransition {
- name = `f"pn_{get_name(matched("port_conn"))}"`;
-}
-trans2conn:RAM_generic_link (pn_transition -> port_conn)
-
-# Note that we are not yet creating any incoming/outgoing petri net arcs! This will be done in another rule.
\ No newline at end of file
diff --git a/examples/semantics/translational/runner_exec_pn.py b/examples/semantics/translational/runner_exec_pn.py
deleted file mode 100644
index 6d43121..0000000
--- a/examples/semantics/translational/runner_exec_pn.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from concrete_syntax.textual_od import parser, renderer
-from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram
-from concrete_syntax.plantuml.make_url import make_url
-from api.od import ODAPI
-
-from transformation.ramify import ramify
-from transformation.topify.topify import Topifier
-from transformation.merger import merge_models
-from transformation.ramify import ramify
-from transformation.rule import RuleMatcherRewriter, ActionGenerator
-
-from util import loader
-
-from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker
-from examples.semantics.operational.port import models
-from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time
-from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz
-from examples.petrinet.renderer import show_petri_net
-from examples.semantics.operational import simulator
-
-import os
-import sys
-THIS_DIR = os.path.dirname(__file__)
-
-# get file contents as string
-def read_file(filename):
- with open(THIS_DIR+'/'+filename) as file:
- return file.read()
-
-if __name__ == "__main__":
- if len(sys.argv) != 2:
- print("Usage:")
- print(f" python {__file__} model.od")
- print("where `model.od` is a valid instance of Port+Petri-Net.")
- sys.exit(1)
-
- model_to_open = sys.argv[1]
-
- state = DevState()
- scd_mmm = bootstrap_scd(state)
-
- print('loading merged MM...')
- merged_mm = loader.parse_and_check(state, read_file("merged_mm.od"), scd_mmm, "merged_mm.od",
- check_conformance=False, # no need to check conformance every time
- )
-
- print('ramifying...')
- ramified_merged_mm = ramify(state, merged_mm)
-
- print('loading petri net rules...')
- rules = loader.load_rules(state,
- lambda rule_name, kind: f"{THIS_DIR}/../../petrinet/operational_semantics/r_{rule_name}_{kind}.od",
- ramified_merged_mm,
- ["fire_transition"])
-
- print('loading model...')
- filename = f"{THIS_DIR}/{model_to_open}"
- with open(filename, "r") as file:
- model = loader.parse_and_check(state, file.read(), merged_mm, "model",
- check_conformance=False, # no need to check conformance every time
- )
- print('loaded', filename)
-
- print('ready!')
-
- matcher_rewriter = RuleMatcherRewriter(state, merged_mm, ramified_merged_mm)
- action_generator = ActionGenerator(matcher_rewriter, rules)
-
- def render(od):
- show_petri_net(od) # graphviz in web browser
- return renderer.render_od(state, od.m, od.mm) # text in terminal
-
- sim = simulator.Simulator(
- action_generator=action_generator,
- decision_maker=simulator.InteractiveDecisionMaker(auto_proceed=False),
- # decision_maker=simulator.RandomDecisionMaker(seed=0),
- renderer=render,
- )
-
- sim.run(ODAPI(state, model, merged_mm))
diff --git a/examples/semantics/translational/runner_translate.py b/examples/semantics/translational/runner_translate.py
deleted file mode 100644
index 7a6bf6e..0000000
--- a/examples/semantics/translational/runner_translate.py
+++ /dev/null
@@ -1,140 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from concrete_syntax.textual_od import parser, renderer
-from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram
-from concrete_syntax.plantuml.make_url import make_url
-from api.od import ODAPI
-
-from transformation.ramify import ramify
-from transformation.rule import RuleMatcherRewriter
-
-from util import loader
-from util.module_to_dict import module_to_dict
-
-from examples.semantics.operational.port import models, helpers
-from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz
-from examples.semantics.translational.renderer import show_port_and_petri_net
-from examples.petrinet.renderer import render_petri_net
-
-import os
-THIS_DIR = os.path.dirname(__file__)
-
-# get file contents as string
-def read_file(filename):
- with open(THIS_DIR+'/'+filename) as file:
- return file.read()
-
-if __name__ == "__main__":
- state = DevState()
- scd_mmm = bootstrap_scd(state)
-
- print('loading merged MM...')
- merged_mm = loader.parse_and_check(state, read_file("merged_mm.od"), scd_mmm, "merged_mm.od",
- check_conformance=False, # no need to check conformance every time
- )
-
- print('ramifying...')
- ramified_merged_mm = ramify(state, merged_mm)
-
- ###################################
- # This is the main part you want to edit (by adding/changing the transformation rules)
- # | | |
- # V V V
- rule_names = [
- # high to low priority (the list-order here matters, the alphabetic-order of the names does not):
- "00_place2place",
- "10_conn2trans",
-
- # The above two rules create a bunch of PN places and PN transitions.
- # (with generic_links to the Port-elements)
- # One way to continue, is to create PN arcs between the places and transitions.
- # Or you can also just start from scratch, if you have a better idea :)
- ]
- # The script below will keep executing the first rule until it no longer matches, then the second rule, etc.
- ###################################
-
-
- print('loading rules...')
- rules = loader.load_rules(state,
- lambda rule_name, kind: f"{THIS_DIR}/rules/gen_pn/r_{rule_name}_{kind}.od",
- ramified_merged_mm,
- rule_names)
-
- print('loading model...')
- port_m_rt_initial = loader.parse_and_check(state,
- m_cs=models.port_rt_m_cs, # <-- your final solution should work with the full model
- # m_cs=models.smaller_model_rt_cs, # <-- simpler model to try first
- # m_cs=models.smaller_model2_rt_cs, # <-- simpler model to try first
- mm=merged_mm,
- descr="initial model",
- check_conformance=False, # no need to check conformance every time
- )
-
- print('ready!')
-
- port_m_rt = port_m_rt_initial
- eval_context = {
- # make all the functions defined in 'helpers' module available to 'condition'-code in LHS/NAC/RHS:
- **module_to_dict(helpers),
- # another example: in all 'condition'-code, there will be a global variable 'meaning_of_life', equal to 42:
- 'meaning_of_life': 42, # just to demonstrate - feel free to remove this
- }
- print('The following additional globals are available:', ', '.join(list(eval_context.keys())))
- matcher_rewriter = RuleMatcherRewriter(state, merged_mm, ramified_merged_mm, eval_context=eval_context)
-
- ###################################
- # Because the matching of many different rules can be slow,
- # this script will store intermediate snapshots each time
- # after having 'exhausted' a rule.
- # When re-running the script, the stored snapshots will be loaded
- # from disk instead of re-running the rules.
- # You can force re-running the rules (e.g., because you changed the rules)
- # by deleting the `snapshot_after_*` files.
- ###################################
-
- ###################################
- # You are allowed to edit the script below, but you don't have to.
- # Changes you may want to make:
- # - outcomment the 'render_petri_net'-call (preventing popups)
- # - if you really want to do something crazy,
- # you can even write a script that uses the lower-level `match_od`/`rewrite` primitives...
- # - ??
- ###################################
-
- for i, rule_name in enumerate(rule_names):
- filename = f"{THIS_DIR}/snapshot_after_{rule_name}.od"
- print("rule =", rule_name)
- rule = rules[rule_name]
- try:
- with open(filename, "r") as file:
- port_m_rt = parser.parse_od(state, file.read(), merged_mm)
- print(f'skip rule (found {filename})')
- except FileNotFoundError:
- # Fire every rule until it cannot match any longer:
- while True:
- result = matcher_rewriter.exec_on_first_match(port_m_rt, rule, rule_name,
- in_place=True, # faster
- )
- if result == None:
- print(" no matches")
- break
- else:
- port_m_rt, lhs_match, _ = result
- print(" rewrote", lhs_match)
- txt = renderer.render_od(state, port_m_rt, merged_mm)
- with open(filename, "w") as file:
- file.write(txt)
- print('wrote', filename)
- render_petri_net(ODAPI(state, port_m_rt, merged_mm))
-
- # Uncomment to show also the port model:
- # show_port_and_petri_net(state, port_m_rt, merged_mm)
-
- # Uncomment to pause after each rendering:
- # input()
-
- ###################################
- # Once you have generated a Petri Net, you can execute the petri net:
- #
- # python runner_exec_pn.py snapshot_after_XX_name_of_my_last_rule.od
- ###################################
diff --git a/examples/woods/common.py b/examples/woods/common.py
deleted file mode 100644
index d301858..0000000
--- a/examples/woods/common.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Helpers
-def state_of(od, animal):
- return od.get_source(od.get_incoming(animal, "of")[0])
-def animal_of(od, state):
- return od.get_target(od.get_outgoing(state, "of")[0])
-def get_time(od):
- _, clock = od.get_all_instances("Clock")[0]
- return clock, od.get_slot_value(clock, "time")
-
-
-# Render our run-time state to a string
-def render_woods(od):
- txt = ""
- _, time = get_time(od)
- txt += f"T = {time}.\n"
- txt += "Bears:\n"
- def render_attacking(animal_state):
- attacking = od.get_outgoing(animal_state, "attacking")
- if len(attacking) == 1:
- whom_state = od.get_target(attacking[0])
- whom_name = od.get_name(animal_of(od, whom_state))
- return f" attacking {whom_name}"
- else:
- return ""
- def render_dead(animal_state):
- return 'dead' if od.get_slot_value(animal_state, 'dead') else 'alive'
- for _, bear_state in od.get_all_instances("BearState"):
- bear = animal_of(od, bear_state)
- hunger = od.get_slot_value(bear_state, "hunger")
- txt += f" 🐻 {od.get_name(bear)} (hunger: {hunger}, {render_dead(bear_state)}) {render_attacking(bear_state)}\n"
- txt += "Men:\n"
- for _, man_state in od.get_all_instances("ManState"):
- man = animal_of(od, man_state)
- attacked_by = od.get_incoming(man_state, "attacking")
- if len(attacked_by) == 1:
- whom_state = od.get_source(attacked_by[0])
- whom_name = od.get_name(animal_of(od, whom_state))
- being_attacked = f" being attacked by {whom_name}"
- else:
- being_attacked = ""
- txt += f" 👨 {od.get_name(man)} ({render_dead(man_state)}) {render_attacking(man_state)}{being_attacked}\n"
- return txt
-
-
-# When should simulation stop?
-def termination_condition(od):
- _, time = get_time(od)
- if time >= 10:
- return "Took too long"
-
- # End simulation when 2 animals are dead
- who_is_dead = []
- for _, animal_state in od.get_all_instances("AnimalState"):
- if od.get_slot_value(animal_state, "dead"):
- animal_name = od.get_name(animal_of(od, animal_state))
- who_is_dead.append(animal_name)
- if len(who_is_dead) >= 2:
- return f"{' and '.join(who_is_dead)} are dead"
diff --git a/examples/woods/models.py b/examples/woods/models.py
deleted file mode 100644
index 45edb15..0000000
--- a/examples/woods/models.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Design meta-model
-woods_mm_cs = """
- Animal:Class {
- abstract = True;
- }
-
- Bear:Class
- :Inheritance (Bear -> Animal)
-
- Man:Class {
- lower_cardinality = 1;
- upper_cardinality = 2;
- constraint = `get_value(get_slot(this, "weight")) > 20`;
- }
- :Inheritance (Man -> Animal)
-
-
- Man_weight:AttributeLink (Man -> Integer) {
- name = "weight";
- optional = False;
- }
-
- afraidOf:Association (Man -> Animal) {
- source_upper_cardinality = 6;
- target_lower_cardinality = 1;
- }
-"""
-# Runtime meta-model
-woods_rt_mm_cs = woods_mm_cs + """
- AnimalState:Class {
- abstract = True;
- }
- AnimalState_dead:AttributeLink (AnimalState -> Boolean) {
- name = "dead";
- optional = False;
- }
- of:Association (AnimalState -> Animal) {
- source_lower_cardinality = 1;
- source_upper_cardinality = 1;
- target_lower_cardinality = 1;
- target_upper_cardinality = 1;
- }
-
- BearState:Class {
- constraint = `get_type_name(get_target(get_outgoing(this, "of")[0])) == "Bear"`;
- }
- :Inheritance (BearState -> AnimalState)
- BearState_hunger:AttributeLink (BearState -> Integer) {
- name = "hunger";
- optional = False;
- constraint = ```
- val = get_value(get_target(this))
- val >= 0 and val <= 100
- ```;
- }
-
- ManState:Class {
- constraint = `get_type_name(get_target(get_outgoing(this, "of")[0])) == "Man"`;
- }
- :Inheritance (ManState -> AnimalState)
-
- attacking:Association (AnimalState -> ManState) {
- # Animal can only attack one Man at a time
- target_upper_cardinality = 1;
-
- # Man can only be attacked by one Animal at a time
- source_upper_cardinality = 1;
-
- constraint = ```
- attacker = get_source(this)
- if get_type_name(attacker) == "BearState":
- # only BearState has 'hunger' attribute
- hunger = get_value(get_slot(attacker, "hunger"))
- else:
- hunger = 100 # Man can always attack
- attacker_dead = get_value(get_slot(attacker, "dead"))
- attacked_state = get_target(this)
- attacked_dead = get_value(get_slot(attacked_state, "dead"))
- (
- hunger >= 50
- and not attacker_dead # cannot attack while dead
- and not attacked_dead # cannot attack whoever is dead
- )
- ```;
- }
-
- attacking_starttime:AttributeLink (attacking -> Integer) {
- name = "starttime";
- optional = False;
- constraint = ```
- val = get_value(get_target(this))
- _, clock = get_all_instances("Clock")[0]
- current_time = get_slot_value(clock, "time")
- val >= 0 and val <= current_time
- ```;
- }
-
- # Just a clock singleton for keeping the time
- Clock:Class {
- lower_cardinality = 1;
- upper_cardinality = 1;
- }
- Clock_time:AttributeLink (Clock -> Integer) {
- name = "time";
- optional = False;
- constraint = `get_value(get_target(this)) >= 0`;
- }
-"""
-
-# Our design model - the part that doesn't change
-woods_m_cs = """
- george:Man {
- weight = 80;
- }
- bill:Man {
- weight = 70;
- }
-
- teddy:Bear
- mrBrown:Bear
-
- # george is afraid of both bears
- :afraidOf (george -> teddy)
- :afraidOf (george -> mrBrown)
-
- # the men are afraid of each other
- :afraidOf (bill -> george)
- :afraidOf (george -> bill)
-"""
-
-# Our runtime model - the part that changes with every execution step
-woods_rt_initial_m_cs = woods_m_cs + """
- georgeState:ManState {
- dead = False;
- }
- :of (georgeState -> george)
-
- billState:ManState {
- dead = False;
- }
- :of (billState -> bill)
-
- teddyState:BearState {
- dead = False;
- hunger = 40;
- }
- :of (teddyState -> teddy)
-
- mrBrownState:BearState {
- dead = False;
- hunger = 80;
- }
- :of (mrBrownState -> mrBrown)
-
- clock:Clock {
- time = 0;
- }
-"""
diff --git a/examples/woods/opsem_python.py b/examples/woods/opsem_python.py
deleted file mode 100644
index 011ae00..0000000
--- a/examples/woods/opsem_python.py
+++ /dev/null
@@ -1,75 +0,0 @@
-### Operational Semantics - coded in Python ###
-
-import functools
-from examples.semantics.operational.simulator import make_actions_pure, filter_valid_actions
-from examples.woods.common import *
-
-# Action: Time advances, whoever is being attacked dies, bears become hungrier
-def action_advance_time(od):
- msgs = []
- clock, old_time = get_time(od)
- new_time = old_time + 1
- od.set_slot_value(clock, "time", new_time)
-
- for _, attacking_link in od.get_all_instances("attacking"):
- man_state = od.get_target(attacking_link)
- animal_state = od.get_source(attacking_link)
- if od.get_type_name(animal_state) == "BearState":
- od.set_slot_value(animal_state, "hunger", max(od.get_slot_value(animal_state, "hunger") - 50, 0))
- od.set_slot_value(man_state, "dead", True)
- od.delete(attacking_link)
- msgs.append(f"{od.get_name(animal_of(od, animal_state))} kills {od.get_name(animal_of(od, man_state))}.")
-
- for _, bear_state in od.get_all_instances("BearState"):
- if od.get_slot_value(bear_state, "dead"):
- continue # bear already dead
- old_hunger = od.get_slot_value(bear_state, "hunger")
- new_hunger = min(old_hunger + 10, 100)
- od.set_slot_value(bear_state, "hunger", new_hunger)
- bear = od.get_target(od.get_outgoing(bear_state, "of")[0])
- bear_name = od.get_name(bear)
- if new_hunger == 100:
- od.set_slot_value(bear_state, "dead", True)
- msgs.append(f"Bear {bear_name} dies of hunger.")
- else:
- msgs.append(f"Bear {bear_name}'s hunger level is now {new_hunger}.")
- return msgs
-
-# Action: Animal attacks Man
-# Note: We must use the names of the objects as parameters, because when cloning, the IDs of objects change!
-def action_attack(od, animal_name: str, man_name: str):
- msgs = []
- animal = od.get(animal_name)
- man = od.get(man_name)
- animal_state = state_of(od, animal)
- man_state = state_of(od, man)
- attack_link = od.create_link(None, # auto-generate link name
- "attacking", animal_state, man_state)
- _, clock = od.get_all_instances("Clock")[0]
- current_time = od.get_slot_value(clock, "time")
- od.set_slot_value(attack_link, "starttime", current_time)
- msgs.append(f"{animal_name} is now attacking {man_name}")
- return msgs
-
-# Get all actions that can be performed (including those that bring us to a non-conforming state)
-def get_all_actions(od):
- def _generate_actions(od):
- # can always advance time:
- yield ("advance time", action_advance_time)
-
- # if A is afraid of B, then B can attack A:
- for _, afraid_link in od.get_all_instances("afraidOf"):
- man = od.get_source(afraid_link)
- animal = od.get_target(afraid_link)
- animal_name = od.get_name(animal)
- man_name = od.get_name(man)
- man_state = state_of(od, man)
- animal_state = state_of(od, animal)
- descr = f"{animal_name} ({od.get_type_name(animal)}) attacks {man_name} ({od.get_type_name(man)})"
- yield (descr, functools.partial(action_attack, animal_name=animal_name, man_name=man_name))
-
- return make_actions_pure(_generate_actions(od), od)
-
-# Only get those actions that bring us to a conforming state
-def get_valid_actions(od):
- return filter_valid_actions(get_all_actions(od))
diff --git a/examples/woods/opsem_rulebased.py b/examples/woods/opsem_rulebased.py
deleted file mode 100644
index da38179..0000000
--- a/examples/woods/opsem_rulebased.py
+++ /dev/null
@@ -1,25 +0,0 @@
-### Operational Semantics - defined by rule-based model transformation ###
-
-from transformation.rule import Rule, RuleMatcherRewriter, PriorityActionGenerator
-from transformation.ramify import ramify
-from util import loader
-
-import os
-THIS_DIR = os.path.dirname(__file__)
-
-get_filename = lambda rule_name, kind: f"{THIS_DIR}/rules/r_{rule_name}_{kind}.od"
-
-def get_action_generator(state, rt_mm):
- rt_mm_ramified = ramify(state, rt_mm)
-
- matcher_rewriter = RuleMatcherRewriter(state, rt_mm, rt_mm_ramified)
-
- rules0_dict = loader.load_rules(state, get_filename, rt_mm_ramified, ["hungry_bear_dies"])
- rules1_dict = loader.load_rules(state, get_filename, rt_mm_ramified, ["advance_time", "attack"])
-
- generator = PriorityActionGenerator(matcher_rewriter, [
- rules0_dict, # highest priority
- rules1_dict, # lowest priority
- ])
-
- return generator
diff --git a/examples/woods/rules/r_advance_time_lhs.od b/examples/woods/rules/r_advance_time_lhs.od
deleted file mode 100644
index 8ed63fc..0000000
--- a/examples/woods/rules/r_advance_time_lhs.od
+++ /dev/null
@@ -1,4 +0,0 @@
-clock:RAM_Clock {
- RAM_time = `True`;
-}
-
diff --git a/examples/woods/rules/r_advance_time_rhs.od b/examples/woods/rules/r_advance_time_rhs.od
deleted file mode 100644
index 3c66ae0..0000000
--- a/examples/woods/rules/r_advance_time_rhs.od
+++ /dev/null
@@ -1,27 +0,0 @@
-clock:RAM_Clock {
- RAM_time = `get_value(this) + 1`;
-}
-
-# Advance time has a bunch of side-effects that we cannot easily model using NAC/LHS/RHS-kind of rules,
-# so we just do it in code:
-
-:GlobalCondition {
- condition = ```
- for _, attacking_link in get_all_instances("attacking"):
- man_state = get_target(attacking_link)
- animal_state = get_source(attacking_link)
- if get_type_name(animal_state) == "BearState":
- # Bear hunger decreases
- set_slot_value(animal_state, "hunger", max(get_slot_value(animal_state, "hunger") - 50, 0))
- set_slot_value(man_state, "dead", True)
- delete(attacking_link)
-
- # Bear hunger increases
- for _, bear_state in get_all_instances("BearState"):
- if get_slot_value(bear_state, "dead"):
- continue # bear already dead
- old_hunger = get_slot_value(bear_state, "hunger")
- new_hunger = min(old_hunger + 10, 100)
- set_slot_value(bear_state, "hunger", new_hunger)
- ```;
-}
diff --git a/examples/woods/rules/r_attack_lhs.od b/examples/woods/rules/r_attack_lhs.od
deleted file mode 100644
index bb5b4a6..0000000
--- a/examples/woods/rules/r_attack_lhs.od
+++ /dev/null
@@ -1,18 +0,0 @@
-# Some man is afraid of some animal:
-
-man:RAM_Man
-
-animal:RAM_Animal
-
-manAfraidOfAnimal:RAM_afraidOf (man -> animal)
-
-
-# Both man and animal have an associated state:
-
-manState:RAM_ManState
-
-man2State:RAM_of (manState -> man)
-
-animalState:RAM_AnimalState
-
-animal2State:RAM_of (animalState -> animal)
diff --git a/examples/woods/rules/r_attack_nac.od b/examples/woods/rules/r_attack_nac.od
deleted file mode 100644
index 0b3f7a8..0000000
--- a/examples/woods/rules/r_attack_nac.od
+++ /dev/null
@@ -1,7 +0,0 @@
-# Cannot attack if already attacking
-
-manState:RAM_ManState
-
-animalState:RAM_AnimalState
-
-:RAM_attacking(animalState -> manState)
\ No newline at end of file
diff --git a/examples/woods/rules/r_attack_nac2.od b/examples/woods/rules/r_attack_nac2.od
deleted file mode 100644
index 006b9bc..0000000
--- a/examples/woods/rules/r_attack_nac2.od
+++ /dev/null
@@ -1,7 +0,0 @@
-# Bear won't attack unless hungry
-
-animalState:RAM_AnimalState {
- condition = ```
- get_type_name(this) == "BearState" and get_slot_value(this, "hunger") < 50
- ```;
-}
diff --git a/examples/woods/rules/r_attack_nac3.od b/examples/woods/rules/r_attack_nac3.od
deleted file mode 100644
index 67b0e97..0000000
--- a/examples/woods/rules/r_attack_nac3.od
+++ /dev/null
@@ -1,5 +0,0 @@
-# If dead, cannot be attacked
-
-manState:RAM_ManState {
- RAM_dead = `get_value(this)`;
-}
\ No newline at end of file
diff --git a/examples/woods/rules/r_attack_nac4.od b/examples/woods/rules/r_attack_nac4.od
deleted file mode 100644
index 383d396..0000000
--- a/examples/woods/rules/r_attack_nac4.od
+++ /dev/null
@@ -1,5 +0,0 @@
-# If dead, cannot attack
-
-animalState:RAM_AnimalState {
- RAM_dead = `get_value(this)`;
-}
diff --git a/examples/woods/rules/r_attack_nac5.od b/examples/woods/rules/r_attack_nac5.od
deleted file mode 100644
index 3568dc2..0000000
--- a/examples/woods/rules/r_attack_nac5.od
+++ /dev/null
@@ -1,7 +0,0 @@
-# Not already attacking someone else:
-
-animalState:RAM_AnimalState
-
-other:RAM_ManState
-
-:RAM_attacking(animalState -> other)
\ No newline at end of file
diff --git a/examples/woods/rules/r_attack_nac6.od b/examples/woods/rules/r_attack_nac6.od
deleted file mode 100644
index 5d0c431..0000000
--- a/examples/woods/rules/r_attack_nac6.od
+++ /dev/null
@@ -1,7 +0,0 @@
-# Not already being attacked by someone else:
-
-manState:RAM_ManState
-
-other:RAM_AnimalState
-
-:RAM_attacking(other -> manState)
\ No newline at end of file
diff --git a/examples/woods/rules/r_attack_rhs.od b/examples/woods/rules/r_attack_rhs.od
deleted file mode 100644
index 1b7d27f..0000000
--- a/examples/woods/rules/r_attack_rhs.od
+++ /dev/null
@@ -1,28 +0,0 @@
-# Our entire LHS (don't delete anything)
-
- # Some man is afraid of some animal:
-
- man:RAM_Man
-
- animal:RAM_Animal
-
- manAfraidOfAnimal:RAM_afraidOf (man -> animal)
-
-
- # Both man and animal have an associated state:
-
- manState:RAM_ManState
-
- man2State:RAM_of (manState -> man)
-
- animalState:RAM_AnimalState
-
- animal2State:RAM_of (animalState -> animal)
-
-
-
-# Animal attacks man:
-
- :RAM_attacking(animalState -> manState) {
- RAM_starttime = `get_slot_value(get_all_instances("Clock")[0][1], "time")`;
- }
diff --git a/examples/woods/rules/r_hungry_bear_dies_lhs.od b/examples/woods/rules/r_hungry_bear_dies_lhs.od
deleted file mode 100644
index 3ec65ab..0000000
--- a/examples/woods/rules/r_hungry_bear_dies_lhs.od
+++ /dev/null
@@ -1,8 +0,0 @@
-bearState:RAM_BearState {
- RAM_hunger = ```
- get_value(this) == 100
- ```;
- RAM_dead = ```
- not get_value(this)
- ```;
-}
diff --git a/examples/woods/rules/r_hungry_bear_dies_rhs.od b/examples/woods/rules/r_hungry_bear_dies_rhs.od
deleted file mode 100644
index 874f881..0000000
--- a/examples/woods/rules/r_hungry_bear_dies_rhs.od
+++ /dev/null
@@ -1,4 +0,0 @@
-bearState:RAM_BearState {
- RAM_hunger = `get_value(this)`; # unchanged
- RAM_dead = `True`;
-}
diff --git a/examples/woods/woods_runner.py b/examples/woods/woods_runner.py
deleted file mode 100644
index 2027fcd..0000000
--- a/examples/woods/woods_runner.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from state.devstate import DevState
-from bootstrap.scd import bootstrap_scd
-from framework.conformance import Conformance, render_conformance_check_result
-from concrete_syntax.textual_od import parser, renderer
-from concrete_syntax.plantuml import renderer as plantuml
-from api.od import ODAPI
-
-from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker
-from examples.woods import models, opsem_python, opsem_rulebased
-from examples.woods.common import termination_condition, render_woods
-
-from util import loader
-
-state = DevState()
-scd_mmm = bootstrap_scd(state) # Load meta-meta-model
-
-### Load (meta-)models ###
-
-woods_mm = loader.parse_and_check(state, models.woods_mm_cs, scd_mmm, "MM")
-woods_rt_mm = loader.parse_and_check(state, models.woods_rt_mm_cs, scd_mmm, "RT-MM")
-woods_m = loader.parse_and_check(state, models.woods_m_cs, woods_mm, "M")
-woods_rt_m = loader.parse_and_check(state, models.woods_rt_initial_m_cs, woods_rt_mm, "RT-M")
-
-print()
-
-rulebased_action_generator = opsem_rulebased.get_action_generator(state, woods_rt_mm)
-
-sim = Simulator(
- # action_generator=opsem_python.get_valid_actions,
- # action_generator=opsem_python.get_all_actions,
- action_generator=rulebased_action_generator,
- # decision_maker=RandomDecisionMaker(seed=3),
- decision_maker=InteractiveDecisionMaker(),
- termination_condition=termination_condition,
- check_conformance=True,
- verbose=True,
- renderer=render_woods,
-)
-
-od = ODAPI(state, woods_rt_m, woods_rt_mm)
-
-sim.run(od)
diff --git a/framework/conformance.py b/framework/conformance.py
index df5a4bf..d3e71ac 100644
--- a/framework/conformance.py
+++ b/framework/conformance.py
@@ -15,6 +15,30 @@ from api.od import ODAPI, bind_api_readonly
import functools
+def eval_context_decorator(func):
+ """
+ Used to mark functions that can be called inside the evaluation context.
+ Base functions are mapped into the function, as well as the evaluation context.
+ This happens at runtime so typechecking will not be happy.
+ Important: Using the same name in the evaluation context as the function name
+ will lead to naming conflicts with the function as priority, resulting in missing argument errors.
+
+ from typing import TYPE_CHECKING
+ if TYPE_CHECKING:
+ from api.od_stub import *
+ ...
+
+ Use this to make partially fix the typechecking.
+ Optionally, define a stub for your own evaluation context and include it.
+ """
+ def wrapper(*args, api_context, eval_context, **kwargs):
+ for key, value in api_context.items():
+ func.__globals__[key] = value
+ for key, value in eval_context.items():
+ func.__globals__[key] = value
+ return func(*args, **kwargs)
+ return wrapper
+
def render_conformance_check_result(error_list):
if len(error_list) == 0:
return "CONFORM"
@@ -25,7 +49,7 @@ def render_conformance_check_result(error_list):
class Conformance:
# Parameter 'constraint_check_subtypes': whether to check local type-level constraints also on subtypes.
- def __init__(self, state: State, model: UUID, type_model: UUID, constraint_check_subtypes=True):
+ def __init__(self, state: State, model: UUID, type_model: UUID, constraint_check_subtypes=True, *, eval_context = None):
self.state = state
self.bottom = Bottom(state)
self.model = model
@@ -51,6 +75,9 @@ class Conformance:
self.structures = {}
self.candidates = {}
+ # add user defined functions to constraints
+ self.eval_context = eval_context if eval_context else {}
+
def check_nominal(self, *, log=False):
"""
@@ -248,6 +275,13 @@ class Conformance:
raise Exception(f"{description} evaluation result should be boolean or string! Instead got {result}")
# local constraints
+ _api_context = bind_api_readonly(self.odapi)
+ _global_binds = {**_api_context}
+ _eval_context = {**self.eval_context}
+ for key, code in _eval_context.items():
+ _f = functools.partial(code, **{"api_context" :_api_context, "eval_context":_eval_context})
+ _global_binds[key] = _f
+ _eval_context[key] = _f
for type_name in self.bottom.read_keys(self.type_model):
code = get_code(type_name)
if code != None:
@@ -256,7 +290,7 @@ class Conformance:
description = f"Local constraint of \"{type_name}\" in \"{obj_name}\""
# print(description)
try:
- result = exec_then_eval(code, _globals=bind_api_readonly(self.odapi), _locals={'this': obj_id}) # may raise
+ result = exec_then_eval(code, _globals=_global_binds, _locals={'this': obj_id}) # may raise
check_result(result, description)
except:
errors.append(f"Runtime error during evaluation of {description}:\n{indent(traceback.format_exc(), 6)}")
@@ -278,7 +312,7 @@ class Conformance:
if code != None:
description = f"Global constraint \"{tm_name}\""
try:
- result = exec_then_eval(code, _globals=bind_api_readonly(self.odapi)) # may raise
+ result = exec_then_eval(code, _globals=_global_binds) # may raise
check_result(result, description)
except:
errors.append(f"Runtime error during evaluation of {description}:\n{indent(traceback.format_exc(), 6)}")
diff --git a/framework/interactive_prompt.py b/framework/interactive_prompt.py
deleted file mode 100644
index 7ecd3c9..0000000
--- a/framework/interactive_prompt.py
+++ /dev/null
@@ -1,98 +0,0 @@
-from framework.manager import Manager
-from state.devstate import DevState
-from InquirerPy import prompt, separator
-from pprint import pprint
-import prompt_questions as questions
-from inspect import signature
-from uuid import UUID
-from ast import literal_eval
-
-
-def generate_context_question(ctx_type, services):
- """
- Converts service names to human readable form
- """
- choices = [
- s.__name__.replace('_', ' ') for s in services
- ]
- choices = sorted(choices)
- choices.append(separator.Separator())
- choices.append("close context")
- ctx_question = [
- {
- 'type': 'list',
- 'name': 'op',
- 'message': f'Currently in context {ctx_type.__name__}, which operation would you like to perform?',
- 'choices': choices,
- 'filter': lambda x: x.replace(' ', '_')
- }
- ]
- return ctx_question
-
-
-def main():
- state = DevState()
- man = Manager(state)
-
- while True:
- if man.current_model != None and man.current_context == None:
- # we have selected a model, so we display typing questions
- answer = prompt(questions.MODEL_SELECTED)
- ctx = man
- elif man.current_model != None and man.current_context != None:
- # we have selected both a model and a context, so we display available services
- qs = generate_context_question(type(man.current_context), man.get_services())
- answer = prompt(qs)
- if answer['op'] == 'close_context':
- man.close_context()
- continue
- else:
- ctx = man.current_context
- else:
- answer = prompt(questions.MODEL_MGMT)
- ctx = man
-
- if answer['op'] == 'exit':
- break
- else:
- method = getattr(ctx, answer['op'])
- args_questions = []
- types = {}
- for p in signature(method).parameters.values():
- types[p.name] = p.annotation if p.annotation else literal_eval # can't use filter in question dict, doesn't work for some reason...
- if p.annotation == UUID:
- args_questions.append({
- 'type': 'list',
- 'name': p.name,
- 'message': f'{p.name.replace("_", " ")}?',
- 'choices': list(man.get_models()),
- 'filter': lambda x: state.read_value(state.read_dict(state.read_root(), x))
- })
- else:
- args_questions.append({
- 'type': 'input',
- 'name': p.name,
- 'message': f'{p.name.replace("_", " ")}?',
- 'filter': lambda x: '' if x.lower() == 'false' else x
- })
- args = prompt(args_questions)
- args = {k: types[k](v) if len(v) > 0 else None for k, v in args.items()}
- try:
- output = method(**args)
- if output != None:
- try:
- if isinstance(output, str):
- raise TypeError
- output = list(output)
- if len(output) > 0:
- for o in sorted(output):
- print(f"\u2022 {o}")
- except TypeError:
- print(f"\u2022 {output}")
- except RuntimeError as e:
- print(e)
-
-
-if __name__ == '__main__':
- print("""Welcome to...\r\n __ ____ _____ \r\n | \\/ \\ \\ / /__ \\ \r\n | \\ / |\\ \\ / / ) |\r\n | |\\/| | \\ \\/ / / / \r\n | | | | \\ / / /_ \r\n |_| |_| \\/ |____| """)
- main()
diff --git a/framework/manager.py b/framework/manager.py
deleted file mode 100644
index a320acb..0000000
--- a/framework/manager.py
+++ /dev/null
@@ -1,225 +0,0 @@
-from state.base import State
-from bootstrap.scd import bootstrap_scd
-from bootstrap.pn import bootstrap_pn
-from services import implemented as services
-from framework.conformance import Conformance
-from uuid import UUID
-
-
-class Manager:
- def __init__(self, state: State):
- self.current_model = None
- self.current_context = None
- self.state = state
- bootstrap_scd(state)
- # bootstrap_pn(state, "PN")
- scd_node = self.state.read_dict(self.state.read_root(), "SCD")
- for key_node in self.state.read_dict_keys(self.state.read_root()):
- model_node = self.state.read_dict_node(self.state.read_root(), key_node)
- self.state.create_edge(model_node, scd_node)
-
- def get_models(self):
- """
- Retrieves all existing models
-
- Returns:
- Names of exising models
- """
- for key_node in self.state.read_dict_keys(self.state.read_root()):
- yield self.state.read_value(key_node)
-
- def instantiate_model(self, type_model_name: str, name: str):
- """
- Retrieves all existing models
-
- Args:
- type_model_name: name of the type model we want to instantiate
- name: name of the instance model to be created
-
- Returns:
- Nothing
- """
- root = self.state.read_root()
- type_model_node = self.state.read_dict(root, type_model_name)
- if type_model_node == None:
- raise RuntimeError(f"No type model with name {type_model_name} found.")
- else:
- # check if model is a linguistic type model
- scd_node = self.state.read_dict(self.state.read_root(), "SCD")
- incoming = self.state.read_incoming(scd_node)
- incoming = [self.state.read_edge(e)[0] for e in incoming]
- if type_model_node not in incoming:
- raise RuntimeError(f"Model with name {type_model_name} is not a type model.")
- if name in map(self.state.read_value, self.state.read_dict_keys(root)):
- raise RuntimeError(f"Model with name {name} already exists.")
- new_model_root = self.state.create_node()
- new_model_node = self.state.create_nodevalue(str(new_model_root))
- self.state.create_dict(root, name, new_model_node)
- self.state.create_edge(new_model_node, type_model_node)
- self.current_model = (name, new_model_root)
- if type_model_name not in services:
- raise RuntimeError(f"Services for type {type_model_name} not implemented.")
- self.current_context = services[type_model_name](self.current_model[1], self.state)
-
- def select_model(self, name: str):
- """
- Select a model to interact with
-
- Args:
- name: name of the model we want to interact with
-
- Returns:
- Nothing
- """
- root = self.state.read_root()
- model_node = self.state.read_dict(root, name)
- if model_node == None:
- raise RuntimeError(f"No model with name {name} found.")
- model_root = UUID(self.state.read_value(model_node))
- self.current_model = (name, model_root)
-
- def close_model(self):
- """
- Clear the currently selected model
-
- Returns:
- Nothing
- """
- self.current_model = None
- self.current_context = None
-
- def get_types(self):
- """
- Retrieve the types of the currently selected model
-
- Returns:
- Names of the model's types
- """
- root = self.state.read_root()
- if self.current_model == None:
- raise RuntimeError(f"No model currently selected.")
- name, model = self.current_model
- model_id = self.state.read_dict(root, name)
- outgoing = self.state.read_outgoing(model_id)
- outgoing = [e for e in outgoing if len(self.state.read_outgoing(e)) == 0]
- elements = [self.state.read_edge(e)[1] for e in outgoing]
- for e in elements:
- incoming = self.state.read_incoming(e)
- label_edge, = [e for e in incoming if len(self.state.read_outgoing(e)) == 1]
- label_edge, = self.state.read_outgoing(label_edge)
- _, label_node = self.state.read_edge(label_edge)
- yield self.state.read_value(label_node)
-
- def select_context(self, name: str):
- """
- Select a type to set as the current context
-
- Args:
- name: name of the type/context
-
- Returns:
- Nothing
- """
- if name not in self.get_types():
- raise RuntimeError(f"No type {name} that currently selected model conforms to.")
- if name not in services:
- raise RuntimeError(f"Services for type {name} not implemented.")
- self.current_context = services[name](self.current_model[1], self.state)
- self.current_context.from_bottom()
-
- def close_context(self):
- """
- Exit the current (type) context
-
- Returns:
- Nothing
- """
- self.current_context.to_bottom()
- self.current_context = None
-
- def get_services(self):
- """
- Retrieve the services available in the current context
-
- Returns:
- Functions exposed by the current context's implementation
- """
- if self.current_model == None:
- raise RuntimeError(f"No model currently selected.")
- if self.current_context == None:
- raise RuntimeError(f"No context currently selected.")
- yield from [
- getattr(self.current_context, func)
- for func in dir(self.current_context)
- if callable(getattr(self.current_context, func))
- and not func.startswith("__")
- and not func == "from_bottom"
- and not func == "to_bottom"
- ]
-
- def check_conformance(self, type_model_name: str, model_name: str):
- """
- If there are existing morphisms between the model and type model
- check nominal conformance
- Else
- find conformance using structural conformance check
-
- Args:
- type_model_name: name of the type model to check conformance against
- model_name: name of the instance model
-
- Returns:
- Boolean indicating whether conformance was found
- """
- root = self.state.read_root()
- type_model_node = self.state.read_dict(root, type_model_name)
- if type_model_node == None:
- raise RuntimeError(f"No type model with name {type_model_name} found.")
- model_node = self.state.read_dict(root, model_name)
- if model_node == None:
- raise RuntimeError(f"No model with name {model_node} found.")
- types = self.state.read_outgoing(model_node)
- types = [self.state.read_edge(e)[1] for e in types]
- # if type_model_node not in types:
- if True:
- print("checking structural conformance")
- conf = Conformance(self.state,
- UUID(self.state.read_value(model_node)),
- UUID(self.state.read_value(type_model_node))).check_structural(log=True)
- if conf:
- self.state.create_edge(model_node, type_model_node)
- return conf
- else:
- print("checking nominal conformance")
- return Conformance(self.state,
- UUID(self.state.read_value(model_node)),
- UUID(self.state.read_value(type_model_node))).check_nominal(log=True)
-
- def dump_state(self):
- """
- Dumps the current state of the Modelverse to a pickle file
- """
- import pickle
- with open("state.p", "wb") as file:
- pickle.dump(self.state, file)
-
- def load_state(self):
- """
- Loas a state of the Modelverse from a pickle file
- """
- import pickle
- with open("state.p", "rb") as file:
- self.state = pickle.load(file)
-
- def to_graphviz(self):
- self.state.dump("state.dot")
-
-
-if __name__ == '__main__':
- from state.devstate import DevState
- s = DevState()
- m = Manager(s)
- m.select_model("SCD")
- m.select_context("SCD")
- for f in m.get_services():
- print(f)
diff --git a/framework/prompt_questions.py b/framework/prompt_questions.py
deleted file mode 100644
index c4b8cb0..0000000
--- a/framework/prompt_questions.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from InquirerPy.separator import Separator
-
-MODEL_SELECTED = [
- {
- 'type': 'list',
- 'name': 'op',
- 'message': 'Model selected... Which operation would you like to perform?',
- 'choices': [
- 'get types',
- 'select context',
- Separator(),
- 'close model'
- ],
- 'filter': lambda x: x.replace(' ', '_')
- }
-]
-
-MODEL_MGMT = [
- {
- 'type': 'list',
- 'name': 'op',
- 'message': 'Which model management operation would you like to perform?',
- 'choices': [
- 'get models',
- 'select model',
- 'instantiate model',
- 'check conformance',
- Separator(),
- 'load state',
- 'dump state',
- 'to graphviz',
- Separator(),
- 'exit'
- ],
- 'filter': lambda x: x.replace(' ', '_')
- }
-]
diff --git a/requirements.txt b/requirements.txt
index 2105b38..179f66d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,3 @@
lark==1.1.9
-jinja2==3.1.4
\ No newline at end of file
+jinja2==3.1.4
+git+https://msdl.uantwerpen.be/git/jexelmans/drawio2py
\ No newline at end of file
diff --git a/services/od.py b/services/od.py
index 3b8700a..a9cb583 100644
--- a/services/od.py
+++ b/services/od.py
@@ -5,6 +5,7 @@ from services.primitives.integer_type import Integer
from services.primitives.string_type import String
from services.primitives.boolean_type import Boolean
from services.primitives.actioncode_type import ActionCode
+from services.primitives.bytes_type import Bytes
from api.cd import CDAPI
from typing import Optional
@@ -147,6 +148,13 @@ class OD:
actioncode_t.create(value)
return self.create_model_ref(name, "ActionCode", actioncode_node)
+ def create_bytes_value(self, name: str, value: bytes):
+ from services.primitives.bytes_type import Bytes
+ bytes_node = self.bottom.create_node()
+ bytes_t = Bytes(bytes_node, self.bottom.state)
+ bytes_t.create(value)
+ return self.create_model_ref(name, "Bytes", bytes_node)
+
# Identical to the same SCD method:
def create_model_ref(self, name: str, type_name: str, model: UUID):
# create element + morphism links
@@ -389,6 +397,8 @@ def read_primitive_value(bottom, modelref: UUID, mm: UUID):
return Boolean(referred_model, bottom.state).read(), typ_name
elif typ_name == "ActionCode":
return ActionCode(referred_model, bottom.state).read(), typ_name
+ elif typ_name == "Bytes":
+ return Bytes(referred_model, bottom.state).read(), typ_name
else:
raise Exception("Unimplemented type:", typ_name)
diff --git a/services/primitives/bytes_type.py b/services/primitives/bytes_type.py
new file mode 100644
index 0000000..04f001d
--- /dev/null
+++ b/services/primitives/bytes_type.py
@@ -0,0 +1,24 @@
+from uuid import UUID
+from state.base import State
+from services.bottom.V0 import Bottom
+
+
+class Bytes:
+ def __init__(self, model: UUID, state: State):
+ self.model = model
+ self.bottom = Bottom(state)
+ type_model_id_node, = self.bottom.read_outgoing_elements(state.read_root(), "Bytes")
+ self.type_model = UUID(self.bottom.read_value(type_model_id_node))
+
+ def create(self, value: bool):
+ if "bytes" in self.bottom.read_keys(self.model):
+ instance, = self.bottom.read_outgoing_elements(self.model, "bytes")
+ self.bottom.delete_element(instance)
+ _instance = self.bottom.create_node(value)
+ self.bottom.create_edge(self.model, _instance, "bytes")
+ _type, = self.bottom.read_outgoing_elements(self.type_model, "Bytes")
+ self.bottom.create_edge(_instance, _type, "Morphism")
+
+ def read(self):
+ instance, = self.bottom.read_outgoing_elements(self.model, "bytes")
+ return self.bottom.read_value(instance)
\ No newline at end of file
diff --git a/state/base.py b/state/base.py
index 614ae9e..78c4307 100644
--- a/state/base.py
+++ b/state/base.py
@@ -2,13 +2,14 @@ from abc import ABC, abstractmethod
from typing import Any, List, Tuple, Optional, Union
from uuid import UUID, uuid4
-primitive_types = (int, float, str, bool)
+primitive_types = (int, float, str, bool, bytes)
INTEGER = ("Integer",)
FLOAT = ("Float",)
STRING = ("String",)
BOOLEAN = ("Boolean",)
TYPE = ("Type",)
-type_values = (INTEGER, FLOAT, STRING, BOOLEAN, TYPE)
+BYTES = ("Bytes",)
+type_values = (INTEGER, FLOAT, STRING, BOOLEAN, TYPE, BYTES)
Node = UUID
diff --git a/state/test/test_create_nodevalue.py b/state/test/test_create_nodevalue.py
index 2fddbb5..ca4e8b9 100644
--- a/state/test/test_create_nodevalue.py
+++ b/state/test/test_create_nodevalue.py
@@ -171,3 +171,12 @@ def test_create_nodevalue_string_type(state):
def test_create_nodevalue_invalid_type(state):
id1 = state.create_nodevalue(("Class",))
assert id1 == None
+
+
+@pytest.mark.usefixtures("state")
+def test_create_nodevalue_bytes_type(state):
+ id1 = state.create_nodevalue(("Bytes",))
+ assert id1 != None
+
+ v = state.read_value(id1)
+ assert v == ("Bytes",)
\ No newline at end of file
diff --git a/transformation/merger.py b/transformation/merger.py
index 6caecb1..bc4033b 100644
--- a/transformation/merger.py
+++ b/transformation/merger.py
@@ -4,7 +4,7 @@ from concrete_syntax.textual_od import parser, renderer
from services.scd import SCD
from util.timer import Timer
-PRIMITIVE_TYPES = set(["Integer", "String", "Boolean", "ActionCode"])
+PRIMITIVE_TYPES = set(["Integer", "String", "Boolean", "ActionCode", "Bytes"])
# Merges N models. The models must have the same meta-model.
# Care should be taken to avoid naming collisions before calling this function.
@@ -12,7 +12,7 @@ def merge_models(state, mm, models: list[UUID]):
with Timer("merge_models"):
primitive_types = {
type_name : UUID(state.read_value(state.read_dict(state.read_root(), type_name)))
- for type_name in ["Integer", "String", "Boolean", "ActionCode"]
+ for type_name in ["Integer", "String", "Boolean", "ActionCode", "Bytes"]
}
merged = state.create_node()
@@ -52,7 +52,7 @@ def merge_models(state, mm, models: list[UUID]):
model = state.read_value(obj)
scd = SCD(merged, state)
created_obj = scd.create_model_ref(prefixed_obj_name, model)
- merged_odapi._ODAPI__recompute_mappings() # dirty!!
+ merged_odapi.recompute_mappings() # dirty!!
else:
# create node or edge
if state.is_edge(obj):
diff --git a/transformation/rewriter.py b/transformation/rewriter.py
index 100073f..3b3b443 100644
--- a/transformation/rewriter.py
+++ b/transformation/rewriter.py
@@ -22,7 +22,6 @@ class TryAgainNextRound(Exception):
# Rewrite is performed in-place (modifying `host_m`)
def rewrite(state,
- lhs_m: UUID, # LHS-pattern
rhs_m: UUID, # RHS-pattern
pattern_mm: UUID, # meta-model of both patterns (typically the RAMified host_mm)
lhs_match: dict, # a match, morphism, from lhs_m to host_m (mapping pattern name -> host name), typically found by the 'match_od'-function.
@@ -150,13 +149,13 @@ def rewrite(state,
if od.is_typed_by(bottom, rhs_type, class_type):
obj_name = first_available_name(suggested_name)
host_od._create_object(obj_name, host_type)
- host_odapi._ODAPI__recompute_mappings()
+ host_odapi.recompute_mappings()
rhs_match[rhs_name] = obj_name
elif od.is_typed_by(bottom, rhs_type, assoc_type):
_, _, host_src, host_tgt = get_src_tgt()
link_name = first_available_name(suggested_name)
host_od._create_link(link_name, host_type, host_src, host_tgt)
- host_odapi._ODAPI__recompute_mappings()
+ host_odapi.recompute_mappings()
rhs_match[rhs_name] = link_name
elif od.is_typed_by(bottom, rhs_type, attr_link_type):
host_src_name, _, host_src, host_tgt = get_src_tgt()
@@ -164,7 +163,7 @@ def rewrite(state,
host_attr_name = host_mm_odapi.get_slot_value(host_attr_link, "name")
link_name = f"{host_src_name}_{host_attr_name}" # must follow naming convention here
host_od._create_link(link_name, host_type, host_src, host_tgt)
- host_odapi._ODAPI__recompute_mappings()
+ host_odapi.recompute_mappings()
rhs_match[rhs_name] = link_name
elif rhs_type == rhs_mm_odapi.get("ActionCode"):
# If we encounter ActionCode in our RHS, we assume that the code computes the value of an attribute...
@@ -223,7 +222,12 @@ def rewrite(state,
result = exec_then_eval(python_expr,
_globals=eval_globals,
_locals={'this': host_obj}) # 'this' can be used to read the previous value of the slot
- host_odapi.overwrite_primitive_value(host_obj_name, result, is_code=False)
+ # print("EVAL", common_name, python_expr, "RESULT", result, host_obj_name)
+ try:
+ host_odapi.overwrite_primitive_value(host_obj_name, result, is_code=False)
+ except Exception as e:
+ e.add_note(f"while evaluating attribute {common_name}")
+ raise
else:
msg = f"Don't know what to do with element '{common_name}' -> '{host_obj_name}:{host_type}')"
# print(msg)
diff --git a/transformation/rule.py b/transformation/rule.py
index 81436ad..7db576e 100644
--- a/transformation/rule.py
+++ b/transformation/rule.py
@@ -117,7 +117,6 @@ class RuleMatcherRewriter:
try:
rhs_match = rewrite(self.state,
- lhs_m=lhs,
rhs_m=rhs,
pattern_mm=self.mm_ramified,
lhs_match=lhs_match,
diff --git a/transformation/schedule/Tests/Test_meta_model.py b/transformation/schedule/Tests/Test_meta_model.py
new file mode 100644
index 0000000..a0ef942
--- /dev/null
+++ b/transformation/schedule/Tests/Test_meta_model.py
@@ -0,0 +1,502 @@
+import io
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../"))
+)
+
+from api.od import ODAPI
+from bootstrap.scd import bootstrap_scd
+from transformation.schedule.rule_scheduler import RuleScheduler
+from state.devstate import DevState
+from transformation.ramify import ramify
+from util import loader
+
+
+class Test_Meta_Model(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.dir = os.path.dirname(__file__)
+ state = DevState()
+ scd_mmm = bootstrap_scd(state)
+ with open(f"{cls.dir}/models/mm_petrinet.od") as file:
+ mm_s = file.read()
+ with open(f"{cls.dir}/models/m_petrinet.od") as file:
+ m_s = file.read()
+ mm = loader.parse_and_check(state, mm_s, scd_mmm, "mm")
+ m = loader.parse_and_check(state, m_s, mm, "m")
+ mm_rt_ramified = ramify(state, mm)
+ cls.model_param = (state, m, mm)
+ cls.generator_param = (state, mm, mm_rt_ramified)
+
+ def setUp(self):
+ self.model = ODAPI(*self.model_param)
+ self.out = io.StringIO()
+ self.generator = RuleScheduler(
+ *self.generator_param,
+ directory=self.dir + "/models",
+ verbose=True,
+ outstream=self.out,
+ )
+
+ def _test_conformance(
+ self, file: str, expected_substr_err: dict[tuple[str, str], list[list[str]]]
+ ) -> None:
+ try:
+ self.generator.load_schedule(f"schedule/{file}")
+ errors = self.out.getvalue().split("\u25b8")[1:]
+ if len(errors) != len(expected_substr_err.keys()):
+ assert len(errors) == len(expected_substr_err.keys())
+ for err in errors:
+ error_lines = err.strip().split("\n")
+ line = error_lines[0]
+ for key_pattern in expected_substr_err.keys():
+ if (key_pattern[0] in line) and (key_pattern[1] in line):
+ key = key_pattern
+ break
+ else:
+ assert False
+ expected = expected_substr_err[key]
+ if (len(error_lines) - 1) != len(expected):
+ assert (len(error_lines) - 1) == len(expected)
+ it = error_lines.__iter__()
+ it.__next__()
+ for err_line in it:
+ if not any(
+ all(exp in err_line for exp in line_exp)
+ for line_exp in expected
+ ):
+ assert False
+ expected_substr_err.pop(key)
+ except AssertionError:
+ raise
+ except Exception as e:
+ assert False
+
+ def test_no_start(self):
+ self._test_conformance("no_start.od", {("Start", "Cardinality"): []})
+
+ def test_no_end(self):
+ self._test_conformance("no_end.od", {("End", "Cardinality"): []})
+
+ def test_multiple_start(self):
+ self._test_conformance("multiple_start.od", {("Start", "Cardinality"): []})
+
+ def test_multiple_end(self):
+ self._test_conformance("multiple_end.od", {("End", "Cardinality"): []})
+
+ def test_connections_start(self):
+ # try to load the following schedule.
+ # The schedules contains happy day nodes and faulty nodes.
+ # Use the error messages to select error location and further validate the multiple reasons of failure.
+ self._test_conformance(
+ "connections_start.od",
+ {
+ ("Start", "start"): [ # locate failure (contains these two substrings), make sure other do not fully overlap -> flakey test
+ ["input exec", "foo_in", "exist"], # 4 total reasons, a reason contains these three substrings
+ ["output exec", "out", "multiple"], # a reason will match to exactly one subnstring list
+ ["output exec", "foo_out", "exist"],
+ ["input data", "in", "exist"],
+ ]
+ },
+ )
+
+ def test_connections_end(self):
+ self._test_conformance(
+ "connections_end.od",
+ {
+ ("End", "end"): [
+ ["input exec", "foo_in", "exist"],
+ ["output exec", "foo_out", "exist"],
+ ["input data", "in", "multiple"],
+ ["input data", "out2", "exist"],
+ ["output data", "out", "exist"],
+ ]
+ },
+ )
+
+ def test_connections_match(self):
+ self._test_conformance(
+ "connections_match.od",
+ {
+ ("Match", "m_foo"): [
+ ["input exec", "foo_in", "exist"],
+ ["output exec", "foo", "exist"],
+ ["output exec", "fail", "multiple"],
+ ["input data", "foo_in", "exist"],
+ ["input data", "in", "multiple"],
+ ["output data", "foo_out", "exist"],
+ ]
+ },
+ )
+
+ def test_connections_rewrite(self):
+ self._test_conformance(
+ "connections_rewrite.od",
+ {
+ ("Rewrite", "r_foo1"): [
+ ["input exec", "foo_in", "exist"],
+ ["output exec", "foo", "exist"],
+ ],
+ ("Rewrite", "r_foo2"): [
+ ["output exec", "out", "multiple"],
+ ["input data", "foo_in", "exist"],
+ ["input data", "in", "multiple"],
+ ["output data", "foo_out", "exist"],
+ ],
+ },
+ )
+
+ def test_connections_action(self):
+ self._test_conformance(
+ "connections_action.od",
+ {
+ ("Action", "a_foo1"): [
+ ["input exec", "foo_in", "exist"],
+ ["output exec", "out", "multiple"],
+ ["output exec", "foo", "exist"],
+ ["input data", "in1", "multiple"],
+ ],
+ ("Action", "a_foo2"): [
+ ["input exec", "in", "exist"],
+ ["output exec", "out3", "multiple"],
+ ["output exec", "out", "exist"],
+ ["input data", "in", "exist"],
+ ["output data", "out", "exist"],
+ ],
+ },
+ )
+
+ def test_connections_modify(self):
+ #TODO:
+ # see test_connections_merge
+ self._test_conformance(
+ "connections_modify.od",
+ {
+ ("Invalid source", "Conn_exec"): [],
+ ("Invalid target", "Conn_exec"): [],
+ ("Modify", "m_foo"): [
+ ["input data", "foo_in", "exist"],
+ ["output data", "foo_out", "exist"],
+ ["input data", "in", "multiple"],
+ ],
+ ("Modify", "m_exec"): [
+ ["input exec", "in", "exist"],
+ ["input exec", "in", "exist"],
+ ["output exec", "out", "exist"],
+ ]
+ },
+ )
+
+ def test_connections_merge(self):
+ #TODO:
+ # mm:
+ # association Conn_exec [0..*] Exec -> Exec [0..*] {
+ # ...;
+ # }
+ # m:
+ # Conn_exec ( Data -> Exec) {...;} -> Invalid source type 'Merge' for link '__Conn_exec_3:Conn_exec' (1)
+ # -> Invalid target type 'End' for link '__Conn_exec_3:Conn_exec' (2)
+ # Conn_exec ( Exec -> Data) {...;} -> No error at all, inconsistent and unexpected behaviour (3)
+ # different combinations behave unexpected
+
+ self._test_conformance(
+ "connections_merge.od",
+ {
+ ("Invalid source", "Conn_exec"): [], # (1), expected
+ ("Invalid target", "Conn_exec"): [], # (2), invalid error, should not be shown
+ ("Merge", "m_foo"): [
+ ["input data", "foo_in", "exist"],
+ ["input data", "in2", "multiple"],
+ ["output data", "foo_out", "exist"],
+ ],
+ ("Merge", "m_exec"): [ # (3), checked in Merge itself
+ ["input exec", "in", "exist"],
+ ["output exec", "out", "exist"],
+ ],
+ },
+ )
+
+ def test_connections_store(self):
+ self._test_conformance(
+ "connections_store.od",
+ {
+ ("Store", "s_foo"): [
+ ["input exec", "foo", "exist"],
+ ["output exec", "out", "multiple"],
+ ["output exec", "foo", "exist"],
+ ["input data", "foo_in", "exist"],
+ ["output data", "foo_out", "exist"],
+ ["input data", "2", "multiple"],
+ ],
+ },
+ )
+
+ def test_connections_schedule(self):
+ self._test_conformance(
+ "connections_schedule.od",
+ {
+ ("Schedule", "s_foo"): [
+ ["output exec", "out", "multiple"],
+ ["input data", "in2", "multiple"],
+ ]
+ },
+ )
+
+ def test_connections_loop(self):
+ self._test_conformance(
+ "connections_loop.od",
+ {
+ ("Loop", "l_foo"): [
+ ["input exec", "foo_in", "exist"],
+ ["output exec", "out", "multiple"],
+ ["output exec", "foo", "exist"],
+ ["input data", "foo_in", "exist"],
+ ["output data", "foo_out", "exist"],
+ ["input data", "in", "multiple"],
+ ]
+ },
+ )
+
+ def test_connections_print(self):
+ self._test_conformance(
+ "connections_print.od",
+ {
+ ("Print", "p_foo"): [
+ ["input exec", "foo_in", "exist"],
+ ["output exec", "out", "multiple"],
+ ["output exec", "foo", "exist"],
+ ["input data", "foo_in", "exist"],
+ ["output data", "out", "exist"],
+ ["input data", "in", "multiple"],
+ ]
+ },
+ )
+
+ def test_fields_start(self):
+ self._test_conformance(
+ "fields_start.od",
+ {
+ ("Start", "Cardinality"): [],
+ ("Start", "string"): [
+ ["Unexpected type", "ports_exec_out", "str"],
+ ["Unexpected type", "ports_data_out", "str"],
+ ],
+ ("Start", '"int"'): [ # included " to avoid flakey test
+ ["Unexpected type", "ports_exec_out", "int"],
+ ["Unexpected type", "ports_data_out", "int"],
+ ],
+ ("Start", "tuple"): [
+ ["Unexpected type", "ports_exec_out", "tuple"],
+ ["Unexpected type", "ports_data_out", "tuple"],
+ ],
+ ("Start", "dict"): [
+ ["Unexpected type", "ports_exec_out", "dict"],
+ ["Unexpected type", "ports_data_out", "dict"],
+ ],
+ ("Start", "none"): [
+ ["Unexpected type", "ports_exec_out", "NoneType"],
+ ["Unexpected type", "ports_data_out", "NoneType"],
+ ],
+ ("Start", "invalid"): [
+ ["Invalid python", "ports_exec_out"],
+ ["Invalid python", "ports_data_out"],
+ ],
+ ("Start", "subtype"): [
+ ["Unexpected type", "ports_exec_out", "list"],
+ ["Unexpected type", "ports_data_out", "list"],
+ ],
+ ("Start", "code"): [
+ ["Unexpected type", "ports_exec_out"],
+ ["Unexpected type", "ports_data_out"],
+ ],
+ },
+ )
+
+ def test_fields_end(self):
+ self._test_conformance(
+ "fields_end.od",
+ {
+ ("End", "Cardinality"): [],
+ ("End", "string"): [
+ ["Unexpected type", "ports_exec_in", "str"],
+ ["Unexpected type", "ports_data_in", "str"],
+ ],
+ ("End", '"int"'): [
+ ["Unexpected type", "ports_exec_in", "int"],
+ ["Unexpected type", "ports_data_in", "int"],
+ ],
+ ("End", "tuple"): [
+ ["Unexpected type", "ports_exec_in", "tuple"],
+ ["Unexpected type", "ports_data_in", "tuple"],
+ ],
+ ("End", "dict"): [
+ ["Unexpected type", "ports_exec_in", "dict"],
+ ["Unexpected type", "ports_data_in", "dict"],
+ ],
+ ("End", "none"): [
+ ["Unexpected type", "ports_exec_in", "NoneType"],
+ ["Unexpected type", "ports_data_in", "NoneType"],
+ ],
+ ("End", "invalid"): [
+ ["Invalid python", "ports_exec_in"],
+ ["Invalid python", "ports_data_in"],
+ ],
+ ("End", "subtype"): [
+ ["Unexpected type", "ports_exec_in", "list"],
+ ["Unexpected type", "ports_data_in", "list"],
+ ],
+ ("End", "code"): [
+ ["Unexpected type", "ports_exec_in"],
+ ["Unexpected type", "ports_data_in"],
+ ],
+ },
+ )
+
+ def test_fields_action(self):
+ self._test_conformance(
+ "fields_action.od",
+ {
+ ("cardinality", "Action_action"): [],
+ ("Action", "string"): [
+ ["Unexpected type", "ports_exec_out", "str"],
+ ["Unexpected type", "ports_exec_in", "str"],
+ ["Unexpected type", "ports_data_out", "str"],
+ ["Unexpected type", "ports_data_in", "str"],
+ ],
+ ("Action", '"int"'): [
+ ["Unexpected type", "ports_exec_out", "int"],
+ ["Unexpected type", "ports_exec_in", "int"],
+ ["Unexpected type", "ports_data_out", "int"],
+ ["Unexpected type", "ports_data_in", "int"],
+ ],
+ ("Action", "tuple"): [
+ ["Unexpected type", "ports_exec_out", "tuple"],
+ ["Unexpected type", "ports_exec_in", "tuple"],
+ ["Unexpected type", "ports_data_out", "tuple"],
+ ["Unexpected type", "ports_data_in", "tuple"],
+ ],
+ ("Action", "dict"): [
+ ["Unexpected type", "ports_exec_out", "dict"],
+ ["Unexpected type", "ports_exec_in", "dict"],
+ ["Unexpected type", "ports_data_out", "dict"],
+ ["Unexpected type", "ports_data_in", "dict"],
+ ],
+ ("Action", "none"): [
+ ["Unexpected type", "ports_exec_out", "NoneType"],
+ ["Unexpected type", "ports_exec_in", "NoneType"],
+ ["Unexpected type", "ports_data_out", "NoneType"],
+ ["Unexpected type", "ports_data_in", "NoneType"],
+ ],
+ ('"Action"', '"invalid"'): [
+ ["Invalid python", "ports_exec_out"],
+ ["Invalid python", "ports_exec_in"],
+ ["Invalid python", "ports_data_out"],
+ ["Invalid python", "ports_data_in"],
+ ],
+ ('"Action_action"', '"invalid_action"'): [
+ ["Invalid python code"],
+ ["line"],
+ ],
+ ("Action", "subtype"): [
+ ["Unexpected type", "ports_exec_out", "list"],
+ ["Unexpected type", "ports_exec_in", "list"],
+ ["Unexpected type", "ports_data_out", "list"],
+ ["Unexpected type", "ports_data_in", "list"],
+ ],
+ ("Action", "code"): [
+ ["Unexpected type", "ports_exec_out"],
+ ["Unexpected type", "ports_exec_in"],
+ ["Unexpected type", "ports_data_out"],
+ ["Unexpected type", "ports_data_in"],
+ ],
+ },
+ )
+
+ def test_fields_modify(self):
+ self._test_conformance(
+ "fields_modify.od",
+ {
+ ("Modify", "string"): [
+ ["Unexpected type", "rename", "str"],
+ ["Unexpected type", "delete", "str"],
+ ],
+ ("Modify", "list"): [["Unexpected type", "rename", "list"]],
+ ("Modify", "set"): [["Unexpected type", "rename", "set"]],
+ ("Modify", "tuple"): [
+ ["Unexpected type", "rename", "tuple"],
+ ["Unexpected type", "delete", "tuple"],
+ ],
+ ("Modify", "dict"): [["Unexpected type", "delete", "dict"]],
+ ("Modify", "none"): [
+ ["Unexpected type", "rename", "NoneType"],
+ ["Unexpected type", "delete", "NoneType"],
+ ],
+ ("Modify", "invalid"): [
+ ["Invalid python", "rename"],
+ ["Invalid python", "delete"],
+ ],
+ ("Modify", "subtype"): [
+ ["Unexpected type", "rename", "dict"],
+ ["Unexpected type", "delete", "list"],
+ ],
+ ("Modify", "code"): [
+ ["Unexpected type", "rename"],
+ ["Unexpected type", "delete"],
+ ],
+ ("Modify", "joined"): [["rename", "delete", "disjoint"]],
+ },
+ )
+
+ def test_fields_merge(self):
+ self._test_conformance(
+ "fields_merge.od",
+ {
+ ("cardinality", "Merge_ports_data_in"): [],
+ ("Merge", "string"): [["Unexpected type", "ports_data_in", "str"]],
+ ("Merge", "tuple"): [["Unexpected type", "ports_data_in", "tuple"]],
+ ("Merge", "dict"): [["Unexpected type", "ports_data_in", "dict"]],
+ ("Merge", "none"): [["Unexpected type", "ports_data_in", "NoneType"]],
+ ("Merge", "invalid"): [["Invalid python", "ports_data_in"]],
+ ("Merge", "subtype"): [["Unexpected type", "ports_data_in", "list"]],
+ ("Merge", "code"): [["Unexpected type", "ports_data_in"]],
+ ("Merge", "no"): [["Missing", "slot", "ports_data_in"]],
+ },
+ )
+
+ def test_fields_store(self):
+ self._test_conformance(
+ "fields_store.od",
+ {
+ ("cardinality", "Store_ports"): [],
+ ("Store", "string"): [["Unexpected type", "ports", "str"]],
+ ("Store", "tuple"): [["Unexpected type", "ports", "tuple"]],
+ ("Store", "dict"): [["Unexpected type", "ports", "dict"]],
+ ("Store", "none"): [["Unexpected type", "ports", "NoneType"]],
+ ("Store", "invalid"): [["Invalid python", "ports"]],
+ ("Store", "subtype"): [["Unexpected type", "ports", "list"]],
+ ("Store", "code"): [["Unexpected type", "ports"]],
+ ("Store", "no"): [["Missing", "slot", "ports"]],
+ },
+ )
+
+ def test_fields_print(self):
+ self._test_conformance(
+ "fields_print.od",
+ {
+ ("Print_custom", "list_custom"): [["Unexpected type", "custom", "list"]],
+ ("Print_custom", "set_custom"): [["Unexpected type", "custom", "set"]],
+ ("Print_custom", "tuple_custom"): [["Unexpected type", "custom", "tuple"]],
+ ("Print_custom", "dict_custom"): [["Unexpected type", "custom", "dict"]],
+ ("Print_custom", "none_custom"): [["Unexpected type", "custom", "NoneType"]],
+ ("Print_custom", "invalid_custom"): [["Invalid python", "custom"]],
+ ("Print_custom", "subtype_custom"): [["Unexpected type", "custom", "list"]],
+ ("Print_custom", "code_custom"): [["Unexpected type", "custom"]],
+ },
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/transformation/schedule/Tests/Test_xmlparser.py b/transformation/schedule/Tests/Test_xmlparser.py
new file mode 100644
index 0000000..b3b8a08
--- /dev/null
+++ b/transformation/schedule/Tests/Test_xmlparser.py
@@ -0,0 +1,43 @@
+import os
+import unittest
+
+from transformation.schedule.rule_scheduler import RuleScheduler
+from state.devstate import DevState
+
+
+class MyTestCase(unittest.TestCase):
+ def setUp(self):
+ state = DevState()
+ self.generator = RuleScheduler(state, "", "")
+
+ def test_empty(self):
+ try:
+ self.generator.generate_schedule(
+ f"{os.path.dirname(__file__)}/drawio/Empty.drawio"
+ )
+ # buffer = io.BytesIO()
+ # self.generator.generate_dot(buffer)
+ except Exception as e:
+ assert False
+
+ def test_simple(self):
+ try:
+ self.generator.generate_schedule(
+ f"{os.path.dirname(__file__)}/drawio/StartToEnd.drawio"
+ )
+ # buffer = io.BytesIO()
+ # self.generator.generate_dot(buffer)
+ except Exception as e:
+ assert False
+
+ # def test_unsupported(self):
+ # try:
+ # self.generator.generate_schedule("Tests/drawio/Unsupported.drawio")
+ # # buffer = io.BytesIO()
+ # # self.generator.generate_dot(buffer)
+ # except Exception as e:
+ # assert(False)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/transformation/schedule/Tests/drawio/Empty.drawio b/transformation/schedule/Tests/drawio/Empty.drawio
new file mode 100644
index 0000000..b025fbc
--- /dev/null
+++ b/transformation/schedule/Tests/drawio/Empty.drawio
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/transformation/schedule/Tests/drawio/StartToEnd.drawio b/transformation/schedule/Tests/drawio/StartToEnd.drawio
new file mode 100644
index 0000000..c381120
--- /dev/null
+++ b/transformation/schedule/Tests/drawio/StartToEnd.drawio
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/transformation/schedule/Tests/drawio/Unsupported.drawio b/transformation/schedule/Tests/drawio/Unsupported.drawio
new file mode 100644
index 0000000..a9cf0fb
--- /dev/null
+++ b/transformation/schedule/Tests/drawio/Unsupported.drawio
@@ -0,0 +1,75 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/transformation/schedule/Tests/models/m_petrinet.od b/transformation/schedule/Tests/models/m_petrinet.od
new file mode 100644
index 0000000..f93a58b
--- /dev/null
+++ b/transformation/schedule/Tests/models/m_petrinet.od
@@ -0,0 +1,22 @@
+p0:PNPlace
+p1:PNPlace
+
+t0:PNTransition
+:arc (p0 -> t0)
+:arc (t0 -> p1)
+
+t1:PNTransition
+:arc (p1 -> t1)
+:arc (t1 -> p0)
+
+p0s:PNPlaceState {
+ numTokens = 1;
+}
+
+:pn_of (p0s -> p0)
+
+p1s:PNPlaceState {
+ numTokens = 0;
+}
+
+:pn_of (p1s -> p1)
diff --git a/transformation/schedule/Tests/models/mm_petrinet.od b/transformation/schedule/Tests/models/mm_petrinet.od
new file mode 100644
index 0000000..22986c3
--- /dev/null
+++ b/transformation/schedule/Tests/models/mm_petrinet.od
@@ -0,0 +1,31 @@
+# Places, transitions, arcs (and only one kind of arc)
+
+PNConnectable:Class { abstract = True; }
+
+arc:Association (PNConnectable -> PNConnectable)
+
+PNPlace:Class
+PNTransition:Class
+
+# inhibitor arc
+inh_arc:Association (PNPlace -> PNTransition)
+
+:Inheritance (PNPlace -> PNConnectable)
+:Inheritance (PNTransition -> PNConnectable)
+
+# A place has a number of tokens, and that's it.
+
+PNPlaceState:Class
+PNPlaceState_numTokens:AttributeLink (PNPlaceState -> Integer) {
+ name = "numTokens";
+ optional = False;
+ constraint = `"numTokens cannot be negative" if get_value(get_target(this)) < 0 else None`;
+}
+
+pn_of:Association (PNPlaceState -> PNPlace) {
+ # one-to-one
+ source_lower_cardinality = 1;
+ source_upper_cardinality = 1;
+ target_lower_cardinality = 1;
+ target_upper_cardinality = 1;
+}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/rules/transitions.od b/transformation/schedule/Tests/models/rules/transitions.od
new file mode 100644
index 0000000..1b87f1d
--- /dev/null
+++ b/transformation/schedule/Tests/models/rules/transitions.od
@@ -0,0 +1,13 @@
+# A place with no tokens:
+
+p:RAM_PNPlace
+ps:RAM_PNPlaceState {
+ RAM_numTokens = `True`;
+}
+:RAM_pn_of (ps -> p)
+
+# An incoming arc from that place to our transition:
+
+t:RAM_PNTransition
+
+:RAM_arc (p -> t)
diff --git a/transformation/schedule/Tests/models/schedule/connections_action.od b/transformation/schedule/Tests/models/schedule/connections_action.od
new file mode 100644
index 0000000..02cb3cf
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_action.od
@@ -0,0 +1,62 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+a_void:Action{
+ ports_data_in = `["in1", "in2"]`;
+ ports_data_out = `["out1", "out2"]`;
+ action=`print("hello foo1")`;
+}
+
+a_foo1:Action{
+ ports_data_in = `["in1", "in2"]`;
+ ports_data_out = `["out1", "out2"]`;
+ action=`print("hello foo1")`;
+}
+
+a_foo2:Action{
+ ports_exec_in = `["in2"]`;
+ ports_exec_out = `["out2", "out3"]`;
+ action=`print("hello foo2")`;
+}
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> a_foo1) {from="success";to="in";}
+:Conn_exec (m2 -> a_foo1) {from="fail";to="in";}
+:Conn_exec (m3 -> a_foo1) {from="success";to="foo_in";}
+:Conn_exec (m3 -> a_foo2) {from="fail";to="in2";}
+
+:Conn_exec (a_foo1 -> a_foo2) {from="out";to="in";}
+:Conn_exec (a_foo1 -> a_foo2) {from="out";to="in2";}
+:Conn_exec (a_foo1 -> a_foo2) {from="foo";to="in2";}
+:Conn_exec (a_foo2 -> end) {from="out";to="in";}
+:Conn_exec (a_foo2 -> end) {from="out2";to="in";}
+:Conn_exec (a_foo2 -> end) {from="out3";to="in";}
+:Conn_exec (a_foo2 -> end) {from="out3";to="in";}
+
+:Conn_data (start -> a_foo2) {from="1";to="in";}
+:Conn_data (a_foo2-> m2) {from="out";to="in";}
+
+:Conn_data (start -> a_foo1) {from="1";to="in1";}
+:Conn_data (start -> a_foo1) {from="2";to="in1";}
+:Conn_data (start -> a_foo1) {from="3";to="in2";}
+:Conn_data (a_foo1 -> end) {from="out1";to="1";}
+:Conn_data (a_foo1 -> end) {from="out1";to="2";}
+:Conn_data (a_foo1 -> end) {from="out2";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_end.od b/transformation/schedule/Tests/models/schedule/connections_end.od
new file mode 100644
index 0000000..0bc355e
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_end.od
@@ -0,0 +1,31 @@
+start:Start
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+end:End {
+ ports_exec_in = `["out", "in"]`;
+ ports_data_in = `["out", "in"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> end) {from="success";to="in";}
+:Conn_exec (m2 -> end) {from="fail";to="out";}
+:Conn_exec (m3 -> end) {from="success";to="out";}
+:Conn_exec (m3 -> end) {from="fail";to="foo_in";}
+:Conn_exec (end -> m) {from="foo_out";to="in";}
+
+:Conn_data (m -> end) {from="out";to="in";}
+:Conn_data (m2 -> end) {from="out";to="in";}
+:Conn_data (m3 -> end) {from="out";to="out";}
+:Conn_data (m3 -> end) {from="out";to="out2";}
+:Conn_data (end -> m) {from="out";to="in";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_loop.od b/transformation/schedule/Tests/models/schedule/connections_loop.od
new file mode 100644
index 0000000..922281a
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_loop.od
@@ -0,0 +1,44 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+l:Loop
+l_foo:Loop
+l_void:Loop
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> l_foo) {from="success";to="in";}
+:Conn_exec (m2 -> l_foo) {from="fail";to="in";}
+:Conn_exec (m3 -> l_foo) {from="success";to="foo_in";}
+
+:Conn_exec (l_foo -> l_foo) {from="out";to="in";}
+:Conn_exec (l_foo -> end) {from="out";to="in";}
+:Conn_exec (l_foo -> end) {from="it";to="in";}
+:Conn_exec (l_foo -> end) {from="foo";to="in";}
+
+:Conn_data (start -> l) {from="1";to="in";}
+:Conn_data (l -> m2) {from="out";to="in";}
+
+:Conn_data (start -> l_foo) {from="1";to="in";}
+:Conn_data (start -> l_foo) {from="2";to="in";}
+:Conn_data (start -> l_foo) {from="3";to="foo_in";}
+:Conn_data (l_foo -> end) {from="out";to="1";}
+:Conn_data (l_foo -> end) {from="out";to="2";}
+:Conn_data (l_foo -> end) {from="foo_out";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_match.od b/transformation/schedule/Tests/models/schedule/connections_match.od
new file mode 100644
index 0000000..63a7f44
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_match.od
@@ -0,0 +1,49 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+m_foo:Match{
+ file="rules/transition.od";
+}
+
+m_void:Match{
+ file="rules/transition.od";
+}
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> m_foo) {from="success";to="in";}
+:Conn_exec (m2 -> m_foo) {from="fail";to="in";}
+:Conn_exec (m3 -> m_foo) {from="success";to="foo_in";}
+:Conn_exec (m3 -> m_foo) {from="fail";to="in";}
+
+:Conn_exec (m_foo -> end) {from="fail";to="in";}
+:Conn_exec (m_foo -> end) {from="success";to="in";}
+:Conn_exec (m_foo -> end) {from="fail";to="in";}
+:Conn_exec (m_foo -> end) {from="foo";to="in";}
+
+:Conn_data (start -> m) {from="1";to="in";}
+:Conn_data (m -> m2) {from="out";to="in";}
+
+:Conn_data (start -> m_foo) {from="1";to="in";}
+:Conn_data (start -> m_foo) {from="2";to="in";}
+:Conn_data (start -> m_foo) {from="3";to="foo_in";}
+:Conn_data (m_foo -> end) {from="out";to="1";}
+:Conn_data (m_foo -> end) {from="out";to="2";}
+:Conn_data (m_foo -> end) {from="foo_out";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_merge.od b/transformation/schedule/Tests/models/schedule/connections_merge.od
new file mode 100644
index 0000000..8144496
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_merge.od
@@ -0,0 +1,44 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+m_exec:Merge {
+ ports_data_in = `["in1", "in2"]`;
+}
+
+m_foo:Merge {
+ ports_data_in = `["in1", "in2"]`;
+}
+
+m_void:Merge {
+ ports_data_in = `["in1", "in2"]`;
+}
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> m_exec) {from="success";to="in";}
+:Conn_exec (m_exec -> end) {from="out";to="in";}
+
+:Conn_data (start -> m_foo) {from="1";to="in1";}
+:Conn_data (start -> m_foo) {from="1";to="in2";}
+:Conn_data (start -> m_foo) {from="2";to="in2";}
+:Conn_data (start -> m_foo) {from="3";to="foo_in";}
+:Conn_data (m_foo -> end) {from="out";to="1";}
+:Conn_data (m_foo -> end) {from="out";to="2";}
+:Conn_data (m_foo -> end) {from="foo_out";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_modify.od b/transformation/schedule/Tests/models/schedule/connections_modify.od
new file mode 100644
index 0000000..9027d0c
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_modify.od
@@ -0,0 +1,42 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+m_exec:Modify
+m_foo:Modify
+m_void:Modify
+
+mo:Modify
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> m_exec) {from="success";to="in";}
+:Conn_exec (m2 -> m_exec) {from="fail";to="in";}
+
+:Conn_exec (m_exec -> end) {from="out";to="in";}
+
+:Conn_data (start -> mo) {from="1";to="in";}
+:Conn_data (mo -> m2) {from="out";to="in";}
+
+:Conn_data (start -> m_foo) {from="1";to="in";}
+:Conn_data (start -> m_foo) {from="2";to="in";}
+:Conn_data (start -> m_foo) {from="3";to="foo_in";}
+:Conn_data (m_foo -> end) {from="out";to="1";}
+:Conn_data (m_foo -> end) {from="out";to="2";}
+:Conn_data (m_foo -> end) {from="foo_out";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_print.od b/transformation/schedule/Tests/models/schedule/connections_print.od
new file mode 100644
index 0000000..9bf9126
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_print.od
@@ -0,0 +1,41 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+p_foo:Print
+p_void:Print
+
+p:Print
+
+end:End
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> p_foo) {from="success";to="in";}
+:Conn_exec (m2 -> p_foo) {from="fail";to="in";}
+:Conn_exec (m3 -> p_foo) {from="success";to="foo_in";}
+:Conn_exec (m3 -> p) {from="fail";to="in";}
+:Conn_exec (p -> end) {from="out";to="in";}
+
+:Conn_exec (p_foo -> p_foo) {from="out";to="in";}
+:Conn_exec (p_foo -> end) {from="out";to="in";}
+:Conn_exec (p_foo -> end) {from="foo";to="in";}
+
+:Conn_data (start -> p) {from="1";to="in";}
+
+:Conn_data (start -> p_foo) {from="1";to="in";}
+:Conn_data (start -> p_foo) {from="2";to="in";}
+:Conn_data (start -> p_foo) {from="3";to="foo_in";}
+:Conn_data (p_foo -> m2) {from="out";to="in";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_rewrite.od b/transformation/schedule/Tests/models/schedule/connections_rewrite.od
new file mode 100644
index 0000000..7e1b018
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_rewrite.od
@@ -0,0 +1,52 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+r_foo1:Rewrite{
+ file="rules/transition.od";
+}
+
+r_foo2:Rewrite{
+ file="rules/transition.od";
+}
+r_void:Rewrite{
+ file="rules/transition.od";
+}
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> r_foo1) {from="success";to="in";}
+:Conn_exec (m2 -> r_foo1) {from="fail";to="in";}
+:Conn_exec (m3 -> r_foo1) {from="success";to="foo_in";}
+:Conn_exec (m3 -> r_foo1) {from="fail";to="in";}
+
+:Conn_exec (r_foo1 -> r_foo2) {from="out";to="in";}
+:Conn_exec (r_foo1 -> end) {from="foo";to="in";}
+:Conn_exec (r_foo2 -> end) {from="out";to="in";}
+:Conn_exec (r_foo2 -> end) {from="out";to="in";}
+
+:Conn_data (start -> r_foo1) {from="1";to="in";}
+:Conn_data (r_foo1-> m2) {from="out";to="in";}
+
+:Conn_data (start -> r_foo2) {from="1";to="in";}
+:Conn_data (start -> r_foo2) {from="2";to="in";}
+:Conn_data (start -> r_foo2) {from="3";to="foo_in";}
+:Conn_data (r_foo2 -> end) {from="out";to="1";}
+:Conn_data (r_foo2 -> end) {from="out";to="2";}
+:Conn_data (r_foo2 -> end) {from="foo_out";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_schedule.od b/transformation/schedule/Tests/models/schedule/connections_schedule.od
new file mode 100644
index 0000000..a2e3c25
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_schedule.od
@@ -0,0 +1,50 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+s_foo:Schedule{
+ file="hello.od";
+}
+
+s_void:Schedule{
+ file="hello.od";
+}
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> s_foo) {from="success";to="in";}
+:Conn_exec (m2 -> s_foo) {from="fail";to="in";}
+:Conn_exec (m3 -> s_foo) {from="success";to="foo";}
+:Conn_exec (m3 -> s_foo) {from="fail";to="foo2";}
+
+:Conn_exec (s_foo -> s_foo) {from="out";to="in";}
+:Conn_exec (s_foo -> s_foo) {from="out";to="in2";}
+:Conn_exec (s_foo -> s_foo) {from="foo";to="foo3";}
+:Conn_exec (s_foo -> end) {from="out4";to="in";}
+:Conn_exec (s_foo -> end) {from="out2";to="in";}
+:Conn_exec (s_foo -> end) {from="out5";to="in";}
+:Conn_exec (s_foo -> end) {from="out3";to="in";}
+
+:Conn_data (start -> s_foo) {from="1";to="in1";}
+:Conn_data (start -> s_foo) {from="1";to="in2";}
+:Conn_data (start -> s_foo) {from="2";to="in2";}
+:Conn_data (start -> s_foo) {from="3";to="foo_in";}
+:Conn_data (s_foo -> end) {from="out";to="1";}
+:Conn_data (s_foo -> end) {from="out";to="2";}
+:Conn_data (s_foo -> end) {from="foo_out";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_start.od b/transformation/schedule/Tests/models/schedule/connections_start.od
new file mode 100644
index 0000000..2ade389
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_start.od
@@ -0,0 +1,27 @@
+start:Start {
+ ports_exec_out = `["out", "in"]`;
+ ports_data_out = `["out", "in"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+end:End
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (start -> m) {from="in";to="in";}
+:Conn_exec (start -> m) {from="foo_out";to="in";}
+:Conn_exec (m -> start) {from="fail";to="foo_in";}
+:Conn_exec (m -> end) {from="success";to="in";}
+
+:Conn_data (start -> m) {from="out";to="in";}
+:Conn_data (start -> m2) {from="out";to="in";}
+:Conn_data (start -> m3) {from="in";to="in";}
+:Conn_data (m -> start) {from="out";to="in";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/connections_store.od b/transformation/schedule/Tests/models/schedule/connections_store.od
new file mode 100644
index 0000000..a3e4477
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/connections_store.od
@@ -0,0 +1,47 @@
+start:Start {
+ ports_data_out = `["1", "2", "3"]`;
+}
+
+m:Match{
+ file="rules/transition.od";
+}
+m2:Match{
+ file="rules/transition.od";
+}
+m3:Match{
+ file="rules/transition.od";
+}
+
+s_foo:Store {
+ ports = `["1", "2", "3"]`;
+}
+
+s_void:Store {
+ ports = `["1", "2", "3"]`;
+}
+
+end:End {
+ ports_data_in = `["1", "2", "3"]`;
+}
+
+:Conn_exec (start -> m) {from="out";to="in";}
+:Conn_exec (m -> m2) {from="fail";to="in";}
+:Conn_exec (m -> m3) {from="success";to="in";}
+
+:Conn_exec (m2 -> s_foo) {from="success";to="in";}
+:Conn_exec (m2 -> s_foo) {from="fail";to="in";}
+:Conn_exec (m3 -> s_foo) {from="success";to="1";}
+:Conn_exec (m3 -> s_foo) {from="fail";to="foo";}
+
+:Conn_exec (s_foo -> end) {from="out";to="in";}
+:Conn_exec (s_foo -> s_foo) {from="1";to="2";}
+:Conn_exec (s_foo -> end) {from="out";to="in";}
+:Conn_exec (s_foo -> s_foo) {from="foo";to="2";}
+
+:Conn_data (start -> s_foo) {from="1";to="1";}
+:Conn_data (start -> s_foo) {from="1";to="2";}
+:Conn_data (start -> s_foo) {from="2";to="2";}
+:Conn_data (start -> s_foo) {from="3";to="foo_in";}
+:Conn_data (s_foo -> end) {from="out";to="1";}
+:Conn_data (s_foo -> end) {from="out";to="2";}
+:Conn_data (s_foo -> end) {from="foo_out";to="3";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/fields_action.od b/transformation/schedule/Tests/models/schedule/fields_action.od
new file mode 100644
index 0000000..6770059
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/fields_action.od
@@ -0,0 +1,83 @@
+string:Action {
+ ports_exec_in = `'["out", "in"]'`;
+ ports_exec_out = `'["out", "in"]'`;
+ ports_data_in = `'["out", "in"]'`;
+ ports_data_out = `'["out", "in"]'`;
+ action = `'["out", "in"]'`;
+}
+
+int:Action {
+ ports_exec_in = `123`;
+ ports_exec_out = `123`;
+ ports_data_in = `123`;
+ ports_data_out = `123`;
+ action = `123`;
+}
+
+list:Action {
+ ports_exec_out = `["out", "in"]`;
+ ports_exec_in = `["out", "in"]`;
+ ports_data_out = `["out", "in"]`;
+ ports_data_in = `["out", "in"]`;
+ action = `["out", "in"]`;
+}
+set:Action {
+ ports_exec_in = `{"out", "in"}`;
+ ports_exec_out = `{"out", "in"}`;
+ ports_data_in = `{"out", "in"}`;
+ ports_data_out = `{"out", "in"}`;
+ action = `{"out", "in"}`;
+}
+
+tuple:Action {
+ ports_exec_in = `("out", "in")`;
+ ports_exec_out = `("out", "in")`;
+ ports_data_in = `("out", "in")`;
+ ports_data_out = `("out", "in")`;
+ action = `("out", "in")`;
+}
+
+dict:Action {
+ ports_exec_in = `{"out": "in"}`;
+ ports_exec_out = `{"out": "in"}`;
+ ports_data_in = `{"out": "in"}`;
+ ports_data_out = `{"out": "in"}`;
+ action = `{"out": "in"}`;
+}
+
+none:Action {
+ ports_exec_in = `None`;
+ ports_exec_out = `None`;
+ ports_data_in = `None`;
+ ports_data_out = `None`;
+ action = `None`;
+}
+
+invalid:Action {
+ ports_exec_in = `[{a(0)['qkja("fyvka`;
+ ports_exec_out = `[{a(0)['qkja("fyvka`;
+ ports_data_in = `["", [{]]`;
+ ports_data_out = `["", [{]]`;
+ action = `hu(ja&{]8}]`;
+}
+
+subtype:Action {
+ ports_exec_in = `[1, 2]`;
+ ports_exec_out = `[1, 2]`;
+ ports_data_in = `[1, 2]`;
+ ports_data_out = `[1, 2]`;
+ action = `[1, 2]`;
+}
+
+code:Action {
+ ports_exec_in = `print("hello world")`;
+ ports_exec_out = `print("hello world")`;
+ ports_data_in = `print("hello world")`;
+ ports_data_out = `print("hello world")`;
+ action = `print("hello world")`;
+}
+
+no:Action
+
+start:Start
+end:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/fields_end.od b/transformation/schedule/Tests/models/schedule/fields_end.od
new file mode 100644
index 0000000..22a26ee
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/fields_end.od
@@ -0,0 +1,52 @@
+start:Start
+
+string:End {
+ ports_exec_in = `'["out", "in"]'`;
+ ports_data_in = `'["out", "in"]'`;
+}
+
+int:End {
+ ports_exec_in = `123`;
+ ports_data_in = `123`;
+}
+
+list:End {
+ ports_exec_in = `["out", "in"]`;
+ ports_data_in = `["out", "in"]`;
+}
+set:End {
+ ports_exec_in = `{"out", "in"}`;
+ ports_data_in = `{"out", "in"}`;
+}
+
+tuple:End {
+ ports_exec_in = `("out", "in")`;
+ ports_data_in = `("out", "in")`;
+}
+
+dict:End {
+ ports_exec_in = `{"out": "in"}`;
+ ports_data_in = `{"out": "in"}`;
+}
+
+none:End {
+ ports_exec_in = `None`;
+ ports_data_in = `None`;
+}
+
+invalid:End {
+ ports_exec_in = `[{a(0)['qkja("fyvka`;
+ ports_data_in = `["", [{]]`;
+}
+
+subtype:End {
+ ports_exec_in = `[1, 2]`;
+ ports_data_in = `[1, 2]`;
+}
+
+code:End {
+ ports_exec_in = `print("hello world")`;
+ ports_data_in = `print("hello world")`;
+}
+
+no:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/fields_merge.od b/transformation/schedule/Tests/models/schedule/fields_merge.od
new file mode 100644
index 0000000..18e3307
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/fields_merge.od
@@ -0,0 +1,39 @@
+string:Merge {
+ ports_data_in = `'["out", "in"]'`;
+}
+
+list:Merge {
+ ports_data_in = `["out", "in"]`;
+}
+set:Merge {
+ ports_data_in = `{"out", "in"}`;
+}
+
+tuple:Merge {
+ ports_data_in = `("out", "in")`;
+}
+
+dict:Merge {
+ ports_data_in = `{"out": "in"}`;
+}
+
+none:Merge {
+ ports_data_in = `None`;
+}
+
+invalid:Merge {
+ ports_data_in = `["", [{]]`;
+}
+
+subtype:Merge {
+ ports_data_in = `[1, 2]`;
+}
+
+code:Merge {
+ ports_data_in = `print("hello world")`;
+}
+
+no:Merge
+
+start:Start
+end:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/fields_modify.od b/transformation/schedule/Tests/models/schedule/fields_modify.od
new file mode 100644
index 0000000..5730efb
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/fields_modify.od
@@ -0,0 +1,51 @@
+string:Modify {
+ rename = `'["out", "in"]'`;
+ delete = `'["out", "in"]'`;
+}
+
+list:Modify {
+ rename = `["out", "in"]`;
+ delete = `["out", "in"]`;
+}
+set:Modify {
+ rename = `{"out", "in"}`;
+ delete = `{"out", "in"}`;
+}
+
+tuple:Modify {
+ rename = `("out", "in")`;
+ delete = `("out", "in")`;
+}
+
+dict:Modify {
+ rename = `{"out": "in"}`;
+ delete = `{"out": "in"}`;
+}
+
+none:Modify {
+ rename = `None`;
+ delete = `None`;
+}
+
+invalid:Modify {
+ rename = `[{a(0)['qkja("fyvka`;
+ delete = `["", [{]]`;
+}
+
+subtype:Modify {
+ rename = `{1: 2}`;
+ delete = `[1, 2]`;
+}
+
+code:Modify {
+ rename = `print("hello world")`;
+ delete = `print("hello world")`;
+}
+
+joined:Modify {
+ rename = `{"a":"1", "b":"2", "c":"3"}`;
+ delete = `{"a", "d"}`;
+}
+
+start:Start
+end:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/fields_print.od b/transformation/schedule/Tests/models/schedule/fields_print.od
new file mode 100644
index 0000000..d520e44
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/fields_print.od
@@ -0,0 +1,39 @@
+string:Print {
+ custom = `'["port_out", "port_in"]'`;
+}
+
+list:Print {
+ custom = `["port_out", "port_in"]`;
+}
+set:Print {
+ custom = `{"port_out", "port_in"}`;
+}
+
+tuple:Print {
+ custom = `("port_out", "port_in")`;
+}
+
+dict:Print {
+ custom = `{"port_out": "port_in"}`;
+}
+
+none:Print {
+ custom = `None`;
+}
+
+invalid:Print {
+ custom = `["", [{]]`;
+}
+
+subtype:Print {
+ custom = `[1, 2]`;
+}
+
+code:Print {
+ custom = `print("hello world")`;
+}
+
+no:Print
+
+start:Start
+end:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/fields_start.od b/transformation/schedule/Tests/models/schedule/fields_start.od
new file mode 100644
index 0000000..c82ea91
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/fields_start.od
@@ -0,0 +1,52 @@
+string:Start {
+ ports_exec_out = `'["out", "in"]'`;
+ ports_data_out = `'["out", "in"]'`;
+}
+
+int:Start {
+ ports_exec_out = `123`;
+ ports_data_out = `123`;
+}
+
+list:Start {
+ ports_exec_out = `["out", "in"]`;
+ ports_data_out = `["out", "in"]`;
+}
+set:Start {
+ ports_exec_out = `{"out", "in"}`;
+ ports_data_out = `{"out", "in"}`;
+}
+
+tuple:Start {
+ ports_exec_out = `("out", "in")`;
+ ports_data_out = `("out", "in")`;
+}
+
+dict:Start {
+ ports_exec_out = `{"out": "in"}`;
+ ports_data_out = `{"out": "in"}`;
+}
+
+none:Start {
+ ports_exec_out = `None`;
+ ports_data_out = `None`;
+}
+
+invalid:Start {
+ ports_exec_out = `[{a(0)['qkja("fyvka`;
+ ports_data_out = `["", [{]]`;
+}
+
+subtype:Start {
+ ports_exec_out = `[1, 2]`;
+ ports_data_out = `[1, 2]`;
+}
+
+code:Start {
+ ports_exec_out = `print("hello world")`;
+ ports_data_out = `print("hello world")`;
+}
+
+no:Start
+
+end:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/fields_store.od b/transformation/schedule/Tests/models/schedule/fields_store.od
new file mode 100644
index 0000000..ec1f38c
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/fields_store.od
@@ -0,0 +1,39 @@
+string:Store {
+ ports = `'["port_out", "port_in"]'`;
+}
+
+list:Store {
+ ports = `["port_out", "port_in"]`;
+}
+set:Store {
+ ports = `{"port_out", "port_in"}`;
+}
+
+tuple:Store {
+ ports = `("port_out", "port_in")`;
+}
+
+dict:Store {
+ ports = `{"port_out": "port_in"}`;
+}
+
+none:Store {
+ ports = `None`;
+}
+
+invalid:Store {
+ ports = `["", [{]]`;
+}
+
+subtype:Store {
+ ports = `[1, 2]`;
+}
+
+code:Store {
+ ports = `print("hello world")`;
+}
+
+no:Store
+
+start:Start
+end:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/multiple_end.od b/transformation/schedule/Tests/models/schedule/multiple_end.od
new file mode 100644
index 0000000..ae3651f
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/multiple_end.od
@@ -0,0 +1,5 @@
+start:Start
+end:End
+end2:End
+
+:Conn_exec (start -> end) {from="out";to="in";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/multiple_start.od b/transformation/schedule/Tests/models/schedule/multiple_start.od
new file mode 100644
index 0000000..0a869c8
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/multiple_start.od
@@ -0,0 +1,5 @@
+start:Start
+start2:Start
+end:End
+
+:Conn_exec (start -> end) {from="out";to="in";}
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/no_end.od b/transformation/schedule/Tests/models/schedule/no_end.od
new file mode 100644
index 0000000..e58e470
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/no_end.od
@@ -0,0 +1 @@
+start:Start
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/no_start.od b/transformation/schedule/Tests/models/schedule/no_start.od
new file mode 100644
index 0000000..36a7d96
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/no_start.od
@@ -0,0 +1 @@
+end:End
\ No newline at end of file
diff --git a/transformation/schedule/Tests/models/schedule/start_end.od b/transformation/schedule/Tests/models/schedule/start_end.od
new file mode 100644
index 0000000..bf51e88
--- /dev/null
+++ b/transformation/schedule/Tests/models/schedule/start_end.od
@@ -0,0 +1,3 @@
+start:Start
+end:End
+:Conn_exec (start -> end) {from="out";to="in";}
\ No newline at end of file
diff --git a/transformation/schedule/__init__.py b/transformation/schedule/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/transformation/schedule/doc/images/example_1.png b/transformation/schedule/doc/images/example_1.png
new file mode 100644
index 0000000..8ea0451
Binary files /dev/null and b/transformation/schedule/doc/images/example_1.png differ
diff --git a/transformation/schedule/doc/images/example_2.png b/transformation/schedule/doc/images/example_2.png
new file mode 100644
index 0000000..40994fd
Binary files /dev/null and b/transformation/schedule/doc/images/example_2.png differ
diff --git a/transformation/schedule/doc/images/example_3.png b/transformation/schedule/doc/images/example_3.png
new file mode 100644
index 0000000..d3092bb
Binary files /dev/null and b/transformation/schedule/doc/images/example_3.png differ
diff --git a/transformation/schedule/doc/images/geraniums-main.png b/transformation/schedule/doc/images/geraniums-main.png
new file mode 100644
index 0000000..42c7174
Binary files /dev/null and b/transformation/schedule/doc/images/geraniums-main.png differ
diff --git a/transformation/schedule/doc/images/geraniums-repot_flowers.png b/transformation/schedule/doc/images/geraniums-repot_flowers.png
new file mode 100644
index 0000000..4a89de0
Binary files /dev/null and b/transformation/schedule/doc/images/geraniums-repot_flowers.png differ
diff --git a/transformation/schedule/doc/schedule.md b/transformation/schedule/doc/schedule.md
new file mode 100644
index 0000000..8a1c6a6
--- /dev/null
+++ b/transformation/schedule/doc/schedule.md
@@ -0,0 +1,260 @@
+# Schedule Module
+
+This module is used to define and execute model transformations using a schedule in the muMLE framework.
+The development of this module is port of a research project of Robbe Teughels with Joeri Exelmans and Hans Vangheluwe.
+
+## Module Structure
+
+The entire module is wrapped in single interface [schedule.py](../rule_scheduler.py) responsible for loading, executing and other optional functionalities, such as generating dot files.
+Loading modules (.py and .drawio) requires compilation. All these transformations are grouped together in [generator.py](../generator.py).
+The interactions with the muMLE framework uses the custom interface: [rule_executor.py](../rule_executor.py). This reduces the dependency between the module and the framework.
+
+Schedules are compiled to python files. These files have a fixed interface defined in [schedule.pyi](../schedule.pyi).
+This interface includes functionalities that will setup the schedule structure and link patterns or other schedules from the module interface with the nodes.
+The compiled files do not include any functional implementation to reduce their size and compile time. They are linked to a libary [schedule_lib](../schedule_lib) including an implementation for each node type.
+This means that nodes can be treated as a black box by the schedule. This architecture allowing easier testing of the library as generation is fully independent of the core implementation.
+
+The implementation of a given node is similar in the inheritance compared to the original meta-model to increasing traceability between the original instance and the compiled instance.
+
+## Usage
+
+### Running Module
+
+```python
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+from transformation.ramify import ramify
+from api.od import ODAPI
+from transformation.schedule.rule_scheduler import RuleScheduler
+
+state = DevState()
+scd_mmm = bootstrap_scd(state)
+
+# load model and meta-model
+metamodel_cs = open('your_metamodel.od', 'r', encoding="utf-8").read()
+model_cs = open('your_model.od', 'r', encoding="utf-8").read()
+
+# Parse them
+metamodel = loader.parse_and_check(state, metamodel_cs, scd_mmm, "your_metamodel")
+model = loader.parse_and_check(state, model_cs, metamodel, "Example model")
+
+# Ramified model
+metamodel_ramified = ramify(state, metamodel)
+
+# scheduler
+scheduler = RuleScheduler(state, metamodel, metamodel_ramified)
+
+# load schedule
+scheduler.load_schedule("your_schedule.od")
+# scheduler.load_schedule("your_schedule.py") # compiled version (without conformance checking)
+# scheduler.load_schedule("your_schedule.drawio") # main page will be executed
+
+# execute model transformation
+api = ODAPI(state, model, metamodel)
+scheduler.run(api)
+```
+
+#### Simple example schedules (.od format)
+
+A schedule is executed from start to end or NullNode (reachable only from unconnected exec-gates).
+Given the following basic schedule (ARule without NAC), the first match of the pre-condition_pattern is used to rewrite the host graph.
+This schedule expect at least one match as the `fail' exec-gate of the match is not connected.
+Zero matches leads to a NullState, resulting in early termination.
+
+```markdown
+start:Start
+end:End
+
+# match once
+m:Match{
+ file = "your_pre-condition_pattern.od";
+ n = 1;
+}
+
+# rewrite
+r:Rewrite{
+ file = "your_post-condition_pattern.od";
+}
+
+:Conn_exec (start -> m) {from="out"; to="in";}
+:Conn_exec (m -> r) {from="success"; to="in";}
+:Conn_exec (r -> end) {from="out"; to="in";}
+
+:Conn_data (m -> r) {from="out"; to="in";}
+```
+
+
+With some small adjustments, all matches can be rewritten (FRule without NAC)
+
+```markdown
+start:Start
+end:End
+
+# match all
+m:Match{
+ file = "your_pre-condition_pattern.od";
+ # n = +INF (if missing: all matches)
+}
+
+l:Loop
+
+# rewrite
+r:Rewrite{
+ file = "your_post-condition_pattern.od";
+}
+
+:Conn_exec (start -> m) {from="out"; to="in";}
+:Conn_exec (m -> l) {from="success"; to="in";}
+:Conn_exec (l -> r) {from="it"; to="in";}
+:Conn_exec (r -> l) {from="out"; to="in";}
+:Conn_exec (l -> end) {from="out"; to="in";}
+
+:Conn_data (m -> l) {from="out"; to="in";}
+:Conn_data (l -> r) {from="out"; to="in";}
+```
+
+
+Adding a NAC to this example: adding a match using the previous match and expecting it to fail. (FRule with NAC)
+
+```markdown
+start:Start
+end:End
+
+# match all
+m:Match{
+ file = "your_pre-condition_pattern.od";
+ # n = +INF (if missing: all matches)
+}
+
+l:Loop
+
+# NAC
+n:Match{
+ file = "your_NAC_pre-condition_pattern.od";
+ n = 1; # one fail is enough
+}
+
+# rewrite
+r:Rewrite{
+ file = "your_post-condition_pattern.od";
+}
+
+:Conn_exec (start -> m) {from="out"; to="in";}
+:Conn_exec (m -> l) {from="success"; to="in";}
+:Conn_exec (l -> n) {from="it"; to="in";}
+:Conn_exec (n -> r) {from="fail"; to="in";}
+:Conn_exec (r -> l) {from="out"; to="in";}
+:Conn_exec (l -> end) {from="out"; to="in";}
+
+:Conn_data (m -> l) {from="out"; to="in";}
+:Conn_data (l -> n) {from="out"; to="in";}
+:Conn_data (l -> r) {from="out"; to="in";}
+```
+
+
+## Node Types
+
+### Start
+This node indicates the start of a schedule.
+It signature (additional ports) can be used to insert match sets or alternative exec-paths, increasing reusability.
+
+[Start](schedule_lib/start.md)
+
+### End
+Counterpart to Start node. Reaching this node result in successful termination of the schedule.
+It signature (additional ports) can be used to extract match sets or alternative exec-paths, increasing reusability.
+
+[End](schedule_lib/end.md)
+
+### Match
+Matches a pre-condition pattern on the host-graph. A primitive defined in T-Core
+
+[Match](schedule_lib/match.md)
+
+### Rewrite
+Rewrite the host-graph using a post-condition pattern. A primitive defined in T-Core
+
+[Rewrite](schedule_lib/rewrite.md)
+
+### Modify
+Modifies the match set. This allows patterns to name elements to their linking.
+This node modifies or deletes elements to be usable as pivot in another pattern with different names.
+An example usage can be found in [examples/geraniums](../../../examples/geraniums).
+
+In the following schedule, a cracked filed was matched and no longer needed.
+The Modify node deletes this, allowing for the flowering flower match node to use a pattern without this element, reducing the size and making it more general.
+ 
+
+[Modify](schedule_lib/modify.md)
+
+### Merge
+Combines multiple matches.
+Allowing patterns to be split into different parts or reuse a specific part with another match without recalculating.
+An example usage can be found in [examples/geraniums](../../../examples/geraniums).
+
+In the following sub-schedule, a new pot and the flower with old pot and their connection, is combined to move the flower in a rewrite.
+Replanting multiple flowers into one new pot would require markers, making the matching harder in order to combine these elements without the use of this node.
+
+
+
+[Merge](schedule_lib/merge.md)
+
+### Store
+Combines matches (set) into a new match set.
+Use the exec port to insert the data on the associated data-port to the set.
+
+The direct usage of this node is limited but invaluable for libraries.
+An example usage is petrinet-execution with user interface.
+This requires a list of all transitions that can fire.
+Matching "all transitions" followed by a loop to check the NAC leaves single matches.
+This nodes allows these matches to be recombined into a set that can be used to choose a transition from.
+
+[Store](schedule_lib/store.md)
+
+### Loop
+Iterate over a given match set.
+Nodes such as Match or Rewrite uses a single match as a pivot.
+Executing these nodes over all the element is possible with this node.
+See the examples in [Modify](#Modify) or [Merge](#Merge) for an example view.
+
+[Loop](schedule_lib/loop.md)
+
+### Print
+Print the input data. This is mainly used as a debugging/testing tool to validate intermediate information or state.
+
+[Print](schedule_lib/print.md)
+
+### Action
+This node allows for code to be injected into the schedule.
+This node can be used for general purpuse and even recreate all other nodes (except start and end).
+Not all functionalities can be described using the current nodes. For petrinets, an example can be to generate a visual overview of the petrinet-system.
+
+[Action.md](schedule_lib/action.md)
+
+## Edge Types
+Nodes can be connected using two different edges. The execution-edges define the execution flow of the schedule.
+These connections can only connect nodes that inherit form [ExecNode](schedule_lib/exec_node.md).
+Connecting nodes between execution-gates defined by the nodes, happens in a system of "one to many" for gates.
+The data-edges allows information to be distributed to other [DataNode](schedule_lib/data_node.md).
+This happens in the opposite way of "many to one" on data-gates.
+Data changes on a gate wil notify all connected nodes of the changes, allowing propagation through the system. Note: the data received is immutable to ensure consistent and reliable execution of the schedule.
+
+
+## file formats
+
+### .od
+This is the original textual file format used by the framework. The main advantage of this format is the integration with the framework that allows conformance checking of the scheduling language.
+Therefore, all other formats are converted to this type for conformance checking before being compiled.
+
+### .py
+All schedules are compiled to python after conformance checking. Allowing this format provides the benefit to load schedules without expensive compilation or conformance checking, reducing computational cost.
+This format is recommended in the deployment of applications where the schedule will not change.
+It is not advisable to implement schedules directly in this format as conformance checking guarantees proper working of the schedule module.
+
+### .drawio
+A visual format for the drawio application.
+The library includes a drawio [library](../schedule_lib/Schedule_lib.xml) that includes a representation with additional fields for easy integration with the application.
+The main advantage of this format is the usage of pages that allows sub-schedules be easily created and organised within one schedule. (layers are not allowed)
+
diff --git a/transformation/schedule/doc/schedule_lib/action.md b/transformation/schedule/doc/schedule_lib/action.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/action.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/data_node.md b/transformation/schedule/doc/schedule_lib/data_node.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/data_node.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/end.md b/transformation/schedule/doc/schedule_lib/end.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/end.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/exec_node.md b/transformation/schedule/doc/schedule_lib/exec_node.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/exec_node.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/loop.md b/transformation/schedule/doc/schedule_lib/loop.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/loop.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/match.md b/transformation/schedule/doc/schedule_lib/match.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/match.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/merge.md b/transformation/schedule/doc/schedule_lib/merge.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/merge.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/modify.md b/transformation/schedule/doc/schedule_lib/modify.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/modify.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/node.md b/transformation/schedule/doc/schedule_lib/node.md
new file mode 100644
index 0000000..078e00f
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/node.md
@@ -0,0 +1,41 @@
+## Node Module
+
+Defines the abstract base Node class for graph-based structures. Each Node is assigned
+a unique identifier via an external IdGenerator. The class provides an interface for
+managing execution state and generating DOT graph representations using Jinja2 templates.
+
+### Class: `Node`
+
+- **Attributes**
+ - `id: int`: A unique identifier assigned to each instance upon initialization.
+
+- **Methods**
+ - `get_id`
+ - returns: `int`, The unique node ID
+
+ Retrieves the unique identifier of the node.
+
+ - `generate_stack_frame`
+ - exec_id: `int`, The ID of the execution context.
+ - returns: `None`
+
+ Initializes a new state frame for a specific execution context.
+ Designed to be overridden in subclasses that use execution state.
+
+ - `delete_stack_frame`
+ - exec_id: `int`, The ID of the execution context.
+ - returns: `None`
+
+ Deletes the state frame for a specific execution context.
+ Designed to be overridden in subclasses that use execution state.
+
+ - `generate_dot`
+ - nodes: `list[str]`, A list to append DOT node definitions to.
+ - edges: `list[str]`, A list to append DOT edges definitions to.
+ - visited: `set[str]`, A set of already visited node IDs to avoid duplicates or recursion.
+ - template: `list[str]`, A Jinja2 template used to format the node's DOT representation.
+ - returns: `None`
+
+ Generates the DOT graph representation for this node and its relationships.
+ Must be implemented in subclasses.
+
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/print.md b/transformation/schedule/doc/schedule_lib/print.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/print.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/rewrite.md b/transformation/schedule/doc/schedule_lib/rewrite.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/rewrite.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/rule.md b/transformation/schedule/doc/schedule_lib/rule.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/rule.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/schedule.md b/transformation/schedule/doc/schedule_lib/schedule.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/schedule.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/start.md b/transformation/schedule/doc/schedule_lib/start.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/start.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/doc/schedule_lib/store.md b/transformation/schedule/doc/schedule_lib/store.md
new file mode 100644
index 0000000..9805841
--- /dev/null
+++ b/transformation/schedule/doc/schedule_lib/store.md
@@ -0,0 +1 @@
+# Under construction
\ No newline at end of file
diff --git a/transformation/schedule/generator.py b/transformation/schedule/generator.py
new file mode 100644
index 0000000..9fd08a0
--- /dev/null
+++ b/transformation/schedule/generator.py
@@ -0,0 +1,197 @@
+import sys
+import os
+from uuid import UUID
+
+from black.trans import Callable
+from jinja2.runtime import Macro
+
+from api.od import ODAPI
+from jinja2 import Environment, FileSystemLoader
+
+
+class schedule_generator:
+ def __init__(self, odApi: ODAPI):
+ self.env = Environment(
+ loader=FileSystemLoader(
+ os.path.join(os.path.dirname(__file__), "templates")
+ )
+ )
+ self.env.trim_blocks = True
+ self.env.lstrip_blocks = True
+ self.template = self.env.get_template("schedule_template.j2")
+ self.template_wrap = self.env.get_template("schedule_template_wrap.j2")
+ self.api = odApi
+
+
+ def _get_slot_value_default(item: UUID, slot: str, default):
+ if slot in self.api.get_slots(item):
+ return self.api.get_slot_value(item, slot)
+ return default
+
+ conn_data_event = {
+ "Match": lambda item: False,
+ "Rewrite": lambda item: False,
+ "Modify": lambda item: True,
+ "Merge": lambda item: True,
+ "Loop": lambda item: True,
+ "Action": lambda item: _get_slot_value_default(item, "event", False),
+ "Print": lambda item: _get_slot_value_default(item, "event", False),
+ "Store": lambda item: False,
+ "Schedule": lambda item: False,
+ "End": lambda item: False,
+ }
+
+ arg_map = {
+ "Loop": (name_dict := lambda item: {"name": self.api.get_name(item)}),
+ "Start": lambda item: {
+ **name_dict(item),
+ "ports_exec_out": eval(
+ self.api.get_slot_value_default(item, "ports_exec_out", "['out']")
+ ),
+ "ports_data_out": eval(
+ self.api.get_slot_value_default(item, "ports_data_out", "[]")
+ ),
+ },
+ "End": lambda item: {
+ **name_dict(item),
+ "ports_exec_in": eval(
+ self.api.get_slot_value_default(item, "ports_exec_in", "['in']")
+ ),
+ "ports_data_in": eval(
+ self.api.get_slot_value_default(item, "ports_data_in", "[]")
+ ),
+ },
+ "Rewrite": (
+ file_dict := lambda item: {
+ **name_dict(item),
+ "file": self.api.get_slot_value(item, "file"),
+ }
+ ),
+ "Match": lambda item: {
+ **file_dict(item),
+ "n": self.api.get_slot_value_default(item, "n", 'float("inf")'),
+ },
+ "Action": lambda item: {
+ **name_dict(item),
+ "ports_exec_in": self.api.get_slot_value_default(item, "ports_exec_in", ["in"]),
+ "ports_exec_out": self.api.get_slot_value_default(item, "ports_exec_out", ["out"]),
+ "ports_data_in": self.api.get_slot_value_default(item, "ports_data_in", []),
+ "ports_data_out": self.api.get_slot_value_default(item, "ports_data_out", []),
+ "action": repr(self.api.get_slot_value(item, "action")),
+ "init": repr(
+ self.api.get_slot_value_default(item, "init", "")
+ ),
+ },
+ "Modify": lambda item: {
+ **name_dict(item),
+ "rename": eval(self.api.get_slot_value_default(item, "rename", "{}")),
+ "delete": eval(self.api.get_slot_value_default(item, "delete", "{}")),
+ },
+ "Merge": lambda item: {
+ **name_dict(item),
+ "ports_data_in": eval(
+ self.api.get_slot_value_default(item, "ports_data_in", "[]")
+ ),
+ },
+ "Store": lambda item: {
+ **name_dict(item),
+ "ports": eval(self.api.get_slot_value_default(item, "ports", "[]")),
+ },
+ "Schedule": file_dict,
+ "Print": lambda item: {
+ **name_dict(item),
+ "label": self.api.get_slot_value_default(item, "label", ""),
+ "custom": self.api.get_slot_value_default(item, "custom", ""),
+ },
+ "Conn_exec": (
+ conn_dict := lambda item: {
+ "name_from": self.api.get_name(self.api.get_source(item)),
+ "name_to": self.api.get_name(self.api.get_target(item)),
+ "from": self.api.get_slot_value_default(item, "from", 0),
+ "to": self.api.get_slot_value_default(item, "to", 0),
+ }
+ ),
+ "Conn_data": lambda item: {
+ **conn_dict(item),
+ "event": conn_data_event[
+ self.api.get_type_name(target := self.api.get_target(item))
+ ](target),
+ },
+ }
+ self.macro_args = {
+ tp: (macro, arg_map.get(tp))
+ for tp, macro in self.template.module.__dict__.items()
+ if type(macro) == Macro
+ }
+
+ def _render(self, item):
+ type_name = self.api.get_type_name(item)
+ macro, arg_gen = self.macro_args[type_name]
+ return macro(**arg_gen(item))
+
+ def _dfs(
+ self, stack: list[UUID], get_links: Callable, get_next_node: Callable
+ ) -> tuple[set[UUID], list[UUID]]:
+ visited = set()
+ connections = list()
+ while len(stack) > 0:
+ obj = stack.pop()
+ if obj in visited:
+ continue
+ visited.add(obj)
+ for conn in get_links(self.api, obj):
+ connections.append(conn)
+ stack.append(get_next_node(self.api, conn))
+ return visited, connections
+
+ def generate_schedule(self, stream=sys.stdout):
+ start = self.api.get_all_instances("Start")[0][1]
+ end = self.api.get_all_instances("End")[0][1]
+ out = {
+ "blocks": [],
+ "blocks_name": [],
+ "blocks_start_end": [],
+ "exec_conn": [],
+ "data_conn": [],
+ "match_files": set(),
+ "matchers": [],
+ "start": self.api.get_name(start),
+ "end": self.api.get_name(end),
+ }
+
+ stack = [start, end]
+ exec_blocks, conn_exec = self._dfs(
+ stack,
+ lambda api, node: api.get_outgoing(node, "Conn_exec"),
+ lambda api, conn: api.get_target(conn),
+ )
+
+ for name, p in self.api.get_all_instances("Print"):
+ if self.api.has_slot(p, "event") and self.api.get_slot_value(p, "event"):
+ exec_blocks.add(p)
+
+ stack = list(exec_blocks)
+ blocks, conn_data = self._dfs(
+ stack,
+ lambda api, node: api.get_incoming(node, "Conn_data"),
+ lambda api, conn: api.get_source(conn),
+ )
+
+ for exec_c in conn_exec:
+ out["exec_conn"].append(self._render(exec_c))
+
+ for data_c in conn_data:
+ out["data_conn"].append(self._render(data_c))
+
+ for block in blocks:
+ out["blocks_name"].append(self.api.get_name(block))
+ if block in [start, end]:
+ out["blocks_start_end"].append(self._render(block))
+ continue
+ out["blocks"].append(self._render(block))
+ if self.api.is_instance(block, "Rule"):
+ d = self.macro_args[self.api.get_type_name(block)][1](block)
+ out["match_files"].add(d["file"])
+ out["matchers"].append(d)
+
+ print(self.template_wrap.render(out), file=stream)
diff --git a/transformation/schedule/models/eval_context.py b/transformation/schedule/models/eval_context.py
new file mode 100644
index 0000000..061b4f6
--- /dev/null
+++ b/transformation/schedule/models/eval_context.py
@@ -0,0 +1,151 @@
+from typing import TYPE_CHECKING, get_origin, get_args
+from types import UnionType
+from uuid import UUID
+
+from jinja2 import Template
+
+from framework.conformance import eval_context_decorator
+from services.primitives.string_type import String
+
+if TYPE_CHECKING:
+ from api.od_stub_readonly import get_outgoing, get_incoming, get_slot_value, get_value, get_target, has_slot
+ from eval_context_stub import *
+
+
+@eval_context_decorator
+def _check_all_connections(this, labels: list[list[str] | str]) -> list[str]:
+ err = []
+ check_incoming_exec(this, err, labels[0])
+ check_outgoing_exec(this, err, labels[1])
+ check_incoming_data(this, err, labels[2])
+ check_outgoing_data(this, err, labels[3])
+ return err
+
+@eval_context_decorator
+def _check_outgoing_exec(this, err: list[str], labels: list[str]) -> None:
+ l = set(labels)
+ gates = set()
+ for y in get_outgoing(this, "Conn_exec"):
+ if (x := get_slot_value(y, "from")) not in l:
+ err.append(f"output exec gate '{x}' does not exist. Gates: {', '.join(labels)}.")
+ if x in gates:
+ err.append(f"output exec gate '{x}' is connected to multiple gates.")
+ gates.add(x)
+
+
+@eval_context_decorator
+def _check_incoming_exec(this, err: list[str], labels: list[str]) -> None:
+ l = set(labels)
+ for y in get_incoming(this, "Conn_exec"):
+ if (x := get_slot_value(y, "to")) not in l:
+ err.append(f"input exec gate gate '{x}' does not exist. Gates: {', '.join(labels)}.")
+
+
+@eval_context_decorator
+def _check_outgoing_data(this, err: list[str], labels: list[str]) -> None:
+ l = set(labels)
+ for y in get_outgoing(this, "Conn_data"):
+ if (x := get_slot_value(y, "from")) not in l:
+ err.append(f"output data gate '{x}' does not exist. Gates: {', '.join(labels)}.")
+
+
+@eval_context_decorator
+def _check_incoming_data(this, err: list[str], labels: list[str]) -> None:
+ l = set(labels)
+ gates = set()
+ for y in get_incoming(this, "Conn_data"):
+ if (x := get_slot_value(y, "to")) not in l:
+ err.append(f"input data gate '{x}' does not exist. Gates: {', '.join(labels)}.")
+ if x in gates:
+ err.append(f"input data gate '{x}' is connected to multiple gates.")
+ gates.add(x)
+
+def check_type(x: any, typ2: any) -> bool:
+ origin = get_origin(typ2)
+ if origin is None:
+ return isinstance(x, typ2)
+ args = get_args(typ2)
+ if origin is UnionType:
+ for tp in args:
+ if check_type(x, tp):
+ return True
+ return False
+ if not isinstance(x, origin):
+ return False
+ if origin in [list, set]:
+ for value in x:
+ if not check_type(value, args[0]):
+ return False
+ elif origin is tuple:
+ if len(args) != len(x):
+ return False
+ for i, value in enumerate(x):
+ if not check_type(value, args[i]):
+ return False
+ elif origin is dict:
+ for key, value in x.items():
+ if not (check_type(key, args[0]) and check_type(value, args[1])):
+ return False
+ return True
+
+@eval_context_decorator
+def _check_slot_code_type(this: UUID, slot: str, typ: type, unique = False, *, mandatory: bool = False, blacklist: list[str] | None = None) -> list[str]:
+ err = []
+ if not (has_slot(this, slot)):
+ if mandatory:
+ err.append(f"Missing mandatory slot: '{slot}'.")
+ return err
+ try:
+ try:
+ x = eval(get_slot_value(this, slot))
+ except Exception as _:
+ err.append(f"Invalid python code for {slot}: {get_slot_value(this, slot)}")
+ return err
+
+ if not check_type(x, typ):
+ try:
+ typ_rep = typ.__name__
+ except AttributeError:
+ typ_rep = str(typ)
+ err.append(f"Unexpected type for {slot}: {type(x).__name__}, expected type: {typ_rep}")
+ return err
+
+ if unique and len(set(x)) != len(x):
+ err.append(f"elements must be unique")
+ return err
+ except Exception as e:
+ err.append(f"Unexpected error: {e}")
+ return err
+
+
+@eval_context_decorator
+def _check_jinja2_code(this: UUID, slot: str) -> list[str]:
+ if len(err:= check_slot_code_type(this, slot, str, mandatory=True)) != 0:
+ return err
+ s = eval(get_slot_value(this, slot))
+ try:
+ template = Template(s)
+ template.render(**{"data":[{}]})
+ return []
+ except Exception as e:
+ return [f"Invalid Jinja2 syntax for '{slot}':\n{e}\n{s}"]
+
+
+@eval_context_decorator
+def _check_code_syntax(code) -> list[str]:
+ try:
+ compile(code, "", "exec")
+ return []
+ except SyntaxError as e:
+ return [f"Invalid python code for: `{code}` :\n{e}"]
+
+mm_eval_context = {
+ "check_all_connections": _check_all_connections,
+ "check_outgoing_exec": _check_outgoing_exec,
+ "check_incoming_exec": _check_incoming_exec,
+ "check_outgoing_data": _check_outgoing_data,
+ "check_incoming_data": _check_incoming_data,
+ "check_slot_code_type": _check_slot_code_type,
+ "check_code_syntax": _check_code_syntax,
+ "check_jinja2_code": _check_jinja2_code,
+}
diff --git a/transformation/schedule/models/eval_context_stub.pyi b/transformation/schedule/models/eval_context_stub.pyi
new file mode 100644
index 0000000..9811909
--- /dev/null
+++ b/transformation/schedule/models/eval_context_stub.pyi
@@ -0,0 +1,6 @@
+def check_outgoing_exec(this, err: list[str], labels: list[str]) -> bool: ...
+def check_incoming_exec(this, err: list[str], labels: list[str]) -> bool: ...
+def check_outgoing_data(this, err: list[str], labels: list[str]) -> bool: ...
+def check_incoming_data(this, err: list[str], labels: list[str]) -> bool: ...
+def check_is_type(s: str, typ: any) -> bool: ...
+def check_code_syntax(code) -> bool: ...
diff --git a/transformation/schedule/models/scheduling_MM.od b/transformation/schedule/models/scheduling_MM.od
new file mode 100644
index 0000000..73f5131
--- /dev/null
+++ b/transformation/schedule/models/scheduling_MM.od
@@ -0,0 +1,194 @@
+abstract class Exec
+
+association Conn_exec [0..*] Exec -> Exec [0..*] {
+ String from;
+ String to;
+}
+
+abstract class Data
+association Conn_data [0..*] Data -> Data [0..*] {
+ String from;
+ String to;
+}
+
+class Start [1..1] (Exec, Data) {
+ optional ActionCode ports_exec_out;
+ optional ActionCode ports_data_out;
+ ```
+ err = check_slot_code_type(this, "ports_exec_out", list[str] | set[str], True)
+ err.extend(check_slot_code_type(this, "ports_data_out", list[str] | set[str], True))
+ if len(err) == 0:
+ err = check_all_connections(this, [
+ [],
+ eval(get_slot_value_default(this, "ports_exec_out", "['out']")),
+ [],
+ eval(get_slot_value_default(this, "ports_data_out", "[]"))
+ ])
+ err
+ ```;
+}
+class End [1..1] (Exec, Data) {
+ optional ActionCode ports_exec_in;
+ optional ActionCode ports_data_in;
+ ```
+ err = check_slot_code_type(this, "ports_exec_in", list[str] | set[str], True)
+ err.extend(check_slot_code_type(this, "ports_data_in", list[str] | set[str], True))
+ if len(err) == 0:
+ err = check_all_connections(this, [
+ eval(get_slot_value_default(this, "ports_exec_in", "['in']")),
+ [],
+ eval(get_slot_value_default(this, "ports_data_in", "[]")),
+ []
+ ])
+ err
+ ```;
+}
+
+abstract class Rule (Exec, Data)
+{
+ String file;
+}
+
+class Match (Rule)
+{
+ optional Integer n;
+ ```
+ check_all_connections(this, [
+ ["in"],
+ ["success", "fail"],
+ ["in"],
+ ["out"]
+ ])
+ ```;
+}
+
+class Rewrite (Rule)
+{
+ ```
+ check_all_connections(this, [
+ ["in"],
+ ["out"],
+ ["in"],
+ ["out"]
+ ])
+ ```;
+}
+
+class Action (Exec, Data)
+{
+ optional ActionCode ports_exec_in;
+ optional ActionCode ports_exec_out;
+ optional ActionCode ports_data_in;
+ optional ActionCode ports_data_out;
+ optional ActionCode init `check_code_syntax(get_value(get_target(this)))`;
+ ActionCode action `check_code_syntax(get_value(get_target(this)))`;
+ ```
+ err = check_slot_code_type(this, "ports_exec_in", list[str] | set[str], True)
+ err.extend(check_slot_code_type(this, "ports_exec_out", list[str] | set[str], True))
+ err.extend(check_slot_code_type(this, "ports_data_in", list[str] | set[str], True))
+ err.extend(check_slot_code_type(this, "ports_data_out", list[str] | set[str], True))
+ if len(err) == 0:
+ err = check_all_connections(this, [
+ eval(get_slot_value_default(this, "ports_exec_in", "['in']")),
+ eval(get_slot_value_default(this, "ports_exec_out", "['out']")),
+ eval(get_slot_value_default(this, "ports_data_in", "[]")),
+ eval(get_slot_value_default(this, "ports_data_out", "[]"))
+ ])
+ err
+ ```;
+
+}
+
+class Modify (Data)
+{
+ optional ActionCode rename;
+ optional ActionCode delete;
+ ```
+ err = check_slot_code_type(this, "rename", dict[str,str])
+ err.extend(check_slot_code_type(this, "delete", list[str] | set[str]))
+ if len(err) == 0:
+ if not (eval(get_slot_value_default(this, "rename", "dict()")).keys().isdisjoint(
+ eval(get_slot_value_default(this, "delete", "set()")))
+ ):
+ err.append("rename and delete should be disjoint.")
+ err.extend(check_all_connections(this, [
+ [],
+ [],
+ ["in"],
+ ["out"]
+ ]))
+ err
+ ```;
+}
+
+class Merge (Data)
+{
+ ActionCode ports_data_in;
+ ```
+ err = check_slot_code_type(this, "ports_data_in", list[str] | set[str], True, mandatory = True)
+ if len(err) == 0:
+ err = check_all_connections(this, [
+ [],
+ [],
+ eval(get_slot_value(this, "ports_data_in")),
+ ["out"]
+ ])
+ err
+ ```;
+}
+
+class Store (Exec, Data)
+{
+ ActionCode ports;
+ ```
+ err = check_slot_code_type(this, "ports", list[str] | set[str], True, mandatory = True, blacklist = ["in", "out"])
+ if len(err) == 0:
+ err = check_all_connections(this, [
+ [*(ports:= eval(get_slot_value(this, "ports"))), "in"],
+ [*ports, "out"],
+ ports,
+ ["out"]
+ ])
+ err
+ ```;
+}
+
+class Schedule (Exec, Data)
+{
+ String file;
+ ```
+ check_all_connections(this, [
+ {get_slot_value(conn, "to") for conn in get_incoming(this, "Conn_exec")},
+ {get_slot_value(conn, "from") for conn in get_outgoing(this, "Conn_exec")},
+ {get_slot_value(conn, "to") for conn in get_incoming(this, "Conn_data")},
+ {get_slot_value(conn, "from") for conn in get_outgoing(this, "Conn_data")}
+ ])
+ ```;
+}
+
+class Loop(Exec, Data)
+{
+ ```
+ check_all_connections(this, [
+ ["in"],
+ ["it", "out"],
+ ["in"],
+ ["out"]
+ ])
+ ```;
+}
+
+class Print(Exec, Data)
+{
+ optional Boolean event;
+ optional String label;
+ optional ActionCode custom `check_jinja2_code(get_source(this), "custom")`;
+ ```
+ check_all_connections(this, [
+ ["in"],
+ ["out"],
+ ["in"],
+ []
+ ])
+ ```;
+}
\ No newline at end of file
diff --git a/transformation/schedule/rule_executor.py b/transformation/schedule/rule_executor.py
new file mode 100644
index 0000000..da97b2f
--- /dev/null
+++ b/transformation/schedule/rule_executor.py
@@ -0,0 +1,46 @@
+from typing import Any
+from uuid import UUID
+
+from api.od import ODAPI
+from transformation.matcher import match_od
+from transformation.rewriter import rewrite
+from util.loader import parse_and_check
+
+
+class RuleExecutor:
+ def __init__(self, state, mm: UUID, mm_ramified: UUID, eval_context={}):
+ self.state = state
+ self.mm = mm
+ self.mm_ramified = mm_ramified
+ self.eval_context = eval_context
+
+ # Generates matches.
+ # Every match is a dictionary with entries LHS_element_name -> model_element_name
+ def match_rule(self, m: UUID, lhs: UUID, *, pivot: dict[Any, Any]):
+ lhs_matcher = match_od(
+ self.state,
+ host_m=m,
+ host_mm=self.mm,
+ pattern_m=lhs,
+ pattern_mm=self.mm_ramified,
+ eval_context=self.eval_context,
+ pivot=pivot,
+ )
+ return lhs_matcher
+
+ def rewrite_rule(self, od: ODAPI, rhs: UUID, *, pivot: dict[Any, Any]):
+ rhs = rewrite(
+ self.state,
+ rhs_m=rhs,
+ pattern_mm=self.mm_ramified,
+ lhs_match=pivot,
+ host_m=od.m,
+ host_mm=od.mm,
+ eval_context=self.eval_context,
+ )
+ od.recompute_mappings()
+ yield rhs
+
+ def load_match(self, file: str):
+ with open(file, "r") as f:
+ return parse_and_check(self.state, f.read(), self.mm_ramified, file)
diff --git a/transformation/schedule/rule_scheduler.py b/transformation/schedule/rule_scheduler.py
new file mode 100644
index 0000000..2b2e133
--- /dev/null
+++ b/transformation/schedule/rule_scheduler.py
@@ -0,0 +1,338 @@
+from __future__ import annotations
+
+import importlib.util
+import io
+import os
+import re
+import sys
+
+from pathlib import Path
+from time import time
+from typing import cast, TYPE_CHECKING
+
+from jinja2 import FileSystemLoader, Environment
+
+from concrete_syntax.textual_od import parser as parser_od
+from concrete_syntax.textual_cd import parser as parser_cd
+from api.od import ODAPI
+from bootstrap.scd import bootstrap_scd
+from transformation.schedule.rule_executor import RuleExecutor
+from transformation.schedule.generator import schedule_generator
+from transformation.schedule.models.eval_context import mm_eval_context
+from transformation.schedule.schedule_lib import ExecNode, Start
+from framework.conformance import Conformance, render_conformance_check_result, eval_context_decorator
+from state.devstate import DevState
+from examples.petrinet.renderer import render_petri_net_to_dot
+
+from drawio2py import parser
+from drawio2py.abstract_syntax import DrawIOFile, Edge, Vertex, Cell
+from icecream import ic
+
+from transformation.schedule.schedule_lib.funcs import IdGenerator
+
+if TYPE_CHECKING:
+ from transformation.schedule.schedule import Schedule
+
+
+class RuleScheduler:
+ __slots__ = (
+ "rule_executor",
+ "schedule_main",
+ "loaded",
+ "out",
+ "verbose",
+ "conformance",
+ "directory",
+ "eval_context",
+ "_state",
+ "_mmm_cs",
+ "sub_schedules",
+ "end_time",
+ )
+
+ def __init__(
+ self,
+ state,
+ mm_rt,
+ mm_rt_ramified,
+ *,
+ outstream=sys.stdout,
+ verbose: bool = False,
+ conformance: bool = True,
+ directory: str = "",
+ eval_context: dict[str, any] = None,
+ ):
+ self.rule_executor: RuleExecutor = RuleExecutor(state, mm_rt, mm_rt_ramified)
+ self.schedule_main: Schedule | None = None
+ self.out = outstream
+ self.verbose: bool = verbose
+ self.conformance: bool = conformance
+ self.directory: Path = Path.cwd() / directory
+ if eval_context is None:
+ eval_context = {}
+ self.eval_context: dict[str, any] = eval_context
+
+ self.loaded: dict[str, dict[str, any]] = {"od": {}, "py": {}, "drawio": {}, "rules": {}}
+
+
+ self._state = DevState()
+ self._mmm_cs = bootstrap_scd(self._state)
+
+ self.end_time = float("inf")
+ self.sub_schedules = float("inf")
+
+ def load_schedule(self, filename):
+ return self._load_schedule(filename, _main=True) is not None
+
+
+ def _load_schedule(self, filename: str, *, _main = True) -> Schedule | None:
+ if filename.endswith(".drawio"):
+ if (filename := self._generate_schedule_drawio(filename)) is None:
+ return None
+
+ if filename.endswith(".od"):
+ if (filename := self._generate_schedule_od(filename)) is None:
+ return None
+ if filename.endswith(".py"):
+ s = self._load_schedule_py(filename, _main=_main)
+ return s
+
+ raise Exception(f"Error unknown file: {filename}")
+
+ def _load_schedule_py(self, filename: str, *, _main = True) -> "Schedule":
+ if (s:= self.loaded["py"].get(filename, None)) is not None:
+ return s
+
+ spec = importlib.util.spec_from_file_location(filename, str(self.directory / filename))
+ schedule_module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(schedule_module)
+ self.loaded["py"][filename] = (s:= schedule_module.Schedule())
+ if _main:
+ self.schedule_main = s
+ self.load_matchers(s)
+ return s
+
+ def _generate_schedule_od(self, filename: str) -> str | None:
+ if (s:= self.loaded.get(("od", filename), None)) is not None:
+ return s
+ file = str(self.directory / filename)
+ self._print("Generating schedule ...")
+ with open(f"{os.path.dirname(__file__)}/models/scheduling_MM.od", "r") as f_MM:
+ mm_cs = f_MM.read()
+ try:
+ with open(file, "r") as f_M:
+ m_cs = f_M.read()
+ except FileNotFoundError:
+ self._print(f"File not found: {file}")
+ return None
+
+ self._print("OK\n\nParsing models\n\tParsing meta model")
+ try:
+ scheduling_mm = parser_cd.parse_cd(
+ self._state,
+ m_text=mm_cs,
+ )
+ except Exception as e:
+ self._print(
+ f"Error while parsing meta-model: scheduling_MM.od\n\t{e}"
+ )
+ return None
+ self._print(f"\tParsing '{filename}' model")
+ try:
+ scheduling_m = parser_od.parse_od(
+ self._state, m_text=m_cs, mm=scheduling_mm
+ )
+ except Exception as e:
+ self._print(f"\033[91mError while parsing model: {filename}\n\t{e}\033[0m")
+ return None
+ if self.conformance:
+ success = True
+ self._print("OK\n\tmeta-meta-model a valid class diagram")
+ conf_err = Conformance(
+ self._state, self._mmm_cs, self._mmm_cs
+ ).check_nominal()
+ b = len(conf_err)
+ success = success and not b
+ self._print(
+ f"\t\t{'\033[91m' if b else ''}{render_conformance_check_result(conf_err)}{'\033[0m' if b else ''}"
+ )
+ self._print(
+ f"Is our '{filename}' model a valid 'scheduling_MM.od' diagram?"
+ )
+ conf_err = Conformance(
+ self._state, scheduling_m, scheduling_mm, eval_context=mm_eval_context
+ ).check_nominal()
+ b = len(conf_err)
+ success = success and not b
+ self._print(
+ f"\t\t{'\033[91m' if b else ''}{render_conformance_check_result(conf_err)}{'\033[0m' if b else ''}"
+ )
+ if not success:
+ return None
+ od = ODAPI(self._state, scheduling_m, scheduling_mm)
+ g = schedule_generator(od)
+
+ output_buffer = io.StringIO()
+ g.generate_schedule(output_buffer)
+ outfilename = f"{".".join(filename.split(".")[:-1])}.py"
+ open(self.directory / outfilename, "w", encoding='utf-8').write(output_buffer.getvalue())
+ self._print("Schedule generated")
+ self.loaded[("od", filename)] = outfilename
+ return outfilename
+
+ def _print(self, *args) -> None:
+ if self.verbose:
+ print(*args, file=self.out)
+
+ def load_matchers(self, schedule: "Schedule") -> None:
+ matchers = dict()
+ for file in schedule.get_matchers():
+ if (r:= self.loaded.get(("rule", file), None)) is None:
+ self.loaded[("rule", file)] = (r:= self.rule_executor.load_match(self.directory / file))
+ matchers[file] = r
+ schedule.init_schedule(self, self.rule_executor, matchers)
+
+ def generate_dot(self, file: str) -> None:
+ env = Environment(
+ loader=FileSystemLoader(
+ os.path.join(os.path.dirname(__file__), "templates")
+ )
+ )
+ env.trim_blocks = True
+ env.lstrip_blocks = True
+ template_dot = env.get_template("schedule_dot.j2")
+
+ nodes = []
+ edges = []
+ visit = set()
+ for schedule in self.loaded["py"].values():
+ schedule.generate_dot(nodes, edges, visit, template_dot)
+ with open(self.directory / file, "w") as f_dot:
+ f_dot.write(template_dot.render(nodes=nodes, edges=edges))
+
+ def run(self, model) -> tuple[int, str]:
+ self._print("Start simulation")
+ if 'pydevd' in sys.modules:
+ self.end_time = time() + 1000
+ else:
+ self.end_time = time() + 10000
+ return self._runner(model, self.schedule_main, "out", IdGenerator.generate_exec_id(), {})
+
+ def _runner(self, model, schedule: Schedule, exec_port: str, exec_id: int, data: dict[str, any]) -> tuple[int, any]:
+ self._generate_stackframe(schedule, exec_id)
+ cur_node = schedule.start
+ cur_node.run_init(exec_port, exec_id, data)
+ while self.end_time > time():
+ cur_node, port = cur_node.nextState(exec_id)
+ termination_reason = cur_node.execute(port, exec_id, model)
+ if termination_reason is not None:
+ self._delete_stackframe(schedule, exec_id)
+ return termination_reason
+
+ self._delete_stackframe(schedule, exec_id)
+ return -1, "limit reached"
+
+
+ def _generate_stackframe(self, schedule: Schedule, exec_id: int) -> None:
+ for node in schedule.nodes:
+ node.generate_stack_frame(exec_id)
+
+ def _delete_stackframe(self, schedule: Schedule, exec_id: int) -> None:
+ for node in schedule.nodes:
+ node.delete_stack_frame(exec_id)
+
+
+ def _generate_schedule_drawio(self, filename:str) -> str | None:
+ if (s:= self.loaded["drawio"].get(filename, None)) is not None:
+ return s
+ env = Environment(
+ loader=FileSystemLoader(
+ os.path.join(os.path.dirname(__file__), "templates")
+ )
+ )
+ env.trim_blocks = True
+ env.lstrip_blocks = True
+ template = env.get_template("schedule_muMLE.j2")
+ main: bool = False
+
+ node_map: dict[str, list[str | dict[str,str]]]
+ id_counter: int
+ def _get_node_id_map(elem: Cell) -> list[str | dict[str,str]]:
+ nonlocal node_map, id_counter
+ if (e_id := node_map.get(elem.id, None)) is None:
+ e_id = [f"{re.sub(r'[^a-zA-Z1-9_]', '', elem.properties["name"])}_{id_counter}", {}]
+ id_counter += 1
+ node_map[elem.id] = e_id
+ return e_id
+
+ edges: list[tuple[tuple[str, str, str, str], tuple[str,str,str,str]]] = []
+ def _parse_edge(elem: Edge):
+ nonlocal edges
+ try:
+ edges.append((
+ (
+ _get_node_id_map(elem.source.parent.parent.parent)[0],
+ elem.source.properties["label"],
+ elem.source.properties["type"],
+ elem.source.parent.value
+ ),
+ (
+ _get_node_id_map(elem.target.parent.parent.parent)[0],
+ elem.target.properties["label"],
+ elem.target.properties["type"],
+ elem.target.parent.value
+ )
+ ))
+ except AttributeError as e:
+ raise Exception(f"Missing attribute {e}")
+ return
+
+ def _parse_vertex(elem: Vertex):
+ nonlocal edges
+ try:
+ elem_map = _get_node_id_map(elem)
+ elem_map[1] = elem.properties
+ properties = elem_map[1]
+ properties.pop("label")
+ properties.pop("name")
+ properties.pop("placeholders")
+ if properties.get("type") == "Schedule":
+ if not re.search(r'\.(py|od)$', properties["file"]):
+ properties["file"] = f"{filename}/{properties["file"]}.od"
+ except AttributeError as e:
+ raise Exception(f"Missing attribute {e}")
+ return
+
+
+ abstract_syntax: DrawIOFile = parser.Parser.parse(str(self.directory / filename))
+ filename = filename.removesuffix(".drawio")
+ (self.directory / filename).mkdir(parents=False, exist_ok=True)
+ for page in abstract_syntax.pages:
+ if page.name == "main":
+ main = True
+ if len(page.root.children) != 1:
+ raise Exception(f"Only 1 layer allowed (keybind: ctr+shift+L)")
+ edges = []
+ id_counter = 1
+ node_map = {}
+
+ for element in page.root.children[0].children:
+ match element.__class__.__name__:
+ case "Edge":
+ _parse_edge(cast(Edge, element))
+ case "Vertex":
+ _parse_vertex(cast(Vertex, element))
+ for elem in element.children[0].children:
+ if elem.__class__.__name__ == "Edge":
+ _parse_edge(cast(Edge, elem))
+ continue
+ case _:
+ raise Exception(f"Unexpected element: {element}")
+ with open(self.directory / f"{filename}/{page.name}.od", "w", encoding="utf-8") as f:
+ f.write(template.render(nodes=node_map, edges=edges))
+ if main:
+ self.loaded["drawio"][filename] = (filename_out := f"{filename}/main.od")
+ return filename_out
+
+ self._print("drawio schedule requires main page to automatically load.")
+ return None
diff --git a/transformation/schedule/schedule.pyi b/transformation/schedule/schedule.pyi
new file mode 100644
index 0000000..0edc014
--- /dev/null
+++ b/transformation/schedule/schedule.pyi
@@ -0,0 +1,18 @@
+from typing import TYPE_CHECKING
+from transformation.schedule.schedule_lib import *
+if TYPE_CHECKING:
+ from transformation.schedule.rule_executor import RuleExecutor
+ from rule_scheduler import RuleScheduler
+
+class Schedule:
+ __slots__ = {
+ "start",
+ "end",
+ "nodes"
+ }
+ def __init__(self): ...
+
+ @staticmethod
+ def get_matchers(): ...
+ def init_schedule(self, scheduler: RuleScheduler, rule_executor: RuleExecutor, matchers): ...
+ def generate_dot(self, *args, **kwargs): ...
\ No newline at end of file
diff --git a/transformation/schedule/schedule_lib/Schedule_lib.xml b/transformation/schedule/schedule_lib/Schedule_lib.xml
new file mode 100644
index 0000000..5dd1480
--- /dev/null
+++ b/transformation/schedule/schedule_lib/Schedule_lib.xml
@@ -0,0 +1,93 @@
+[
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"start_name\" type=\"Start\" ports_exec_out=\"["out"]\" ports_data_out=\"[]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><mxCell id=\"5\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"5\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 100,
+ "aspect": "fixed",
+ "title": "Start Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"end_name\" type=\"End\" ports_exec_in=\"["in"]\" ports_data_in=\"[]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell></root></mxGraphModel>",
+ "w": 160,
+ "h": 100,
+ "aspect": "fixed",
+ "title": "End Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type% %file% matches: %n%\" placeholders=\"1\" name=\"match_name\" type=\"Match\" file=\"rule_filename.od\" n=\"1\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=60;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"220\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"60\" width=\"160\" height=\"160\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"7\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"success\" type=\"exec\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"fail\" type=\"exec\" id=\"10\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 220,
+ "aspect": "fixed",
+ "title": "Match Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type% %file%\" placeholders=\"1\" name=\"rewrite_name\" type=\"Rewrite\" file=\"rule_filename.od\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry y=\"1.1368683772161603e-13\" width=\"160\" height=\"150\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"110\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"-70\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"out\" type=\"data\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 150,
+ "aspect": "fixed",
+ "title": "Rewrite Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"action_name\" type=\"Action\" ports_exec_in=\"["in"]\" ports_exec_out=\"["out"]\" ports_data_in=\"[]\" ports_data_out=\"[]\" action=\"print("hello world")\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 100,
+ "aspect": "fixed",
+ "title": "Action Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"modify_name\" type=\"Modify\" rename=\"{"t":"transition"}\" delete=\"[]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 100,
+ "aspect": "fixed",
+ "title": "Modify Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"merge_name\" type=\"Merge\" ports_data_in=\"["input1", "input2"]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"150\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"110\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"input1\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input2\" type=\"data\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"7\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 150,
+ "aspect": "fixed",
+ "title": "Merge Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"store_name\" type=\"Store\" ports=\"["input1"]\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"200\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"160\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input1\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input1\" type=\"data\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"8\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"8\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"out\" type=\"exec\" id=\"10\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"8\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"input1\" type=\"exec\" id=\"11\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"8\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 200,
+ "aspect": "fixed",
+ "title": "Store Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"loop_name\" type=\"Loop\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"200\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"160\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"data\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"exec\" id=\"6\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"7\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"160\" as=\"geometry\"><mxRectangle width=\"80\" height=\"160\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"110\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"it\" type=\"exec\" id=\"9\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"out\" type=\"exec\" id=\"10\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"7\"><mxGeometry x=\"10\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 200,
+ "aspect": "fixed",
+ "title": "Loop Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type% %file%\" placeholders=\"1\" name=\"schedule_name\" type=\"Schedule\" file=\"schedule_page-name\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"100\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"60\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"60\" as=\"geometry\"><mxRectangle width=\"80\" height=\"60\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 100,
+ "aspect": "fixed",
+ "title": "Schedule Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"%name%: %type%\" placeholders=\"1\" name=\"print_name\" type=\"Print\" event=\"False\" custom=\"{{ data }}\" id=\"2\"><mxCell style=\"shape=table;childLayout=tableLayout;startSize=40;collapsible=0;recursiveResize=1;expand=0;fontStyle=1;editable=1;movable=1;resizable=1;rotatable=0;deletable=1;locked=0;connectable=0;allowArrows=0;pointerEvents=0;perimeter=rectanglePerimeter;rounded=1;container=1;dropTarget=0;swimlaneHead=1;swimlaneBody=1;top=1;noLabel=0;autosize=0;resizeHeight=0;spacing=2;metaEdit=1;resizeWidth=0;arcSize=10;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"160\" height=\"150\" as=\"geometry\"/></mxCell></object><mxCell id=\"3\" value=\"\" style=\"shape=tableRow;horizontal=0;swimlaneHead=0;swimlaneBody=0;top=0;left=0;strokeColor=inherit;bottom=0;right=0;dropTarget=0;fontStyle=0;fillColor=none;points=[[0,0.5],[1,0.5]];startSize=0;collapsible=0;recursiveResize=1;expand=0;rounded=0;allowArrows=0;connectable=0;autosize=1;resizeHeight=1;rotatable=0;\" vertex=\"1\" parent=\"2\"><mxGeometry y=\"40\" width=\"160\" height=\"110\" as=\"geometry\"/></mxCell><mxCell id=\"4\" value=\"Input\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=60;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"in\" type=\"exec\" id=\"5\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"4\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><mxCell id=\"6\" value=\"Output\" style=\"swimlane;swimlaneHead=0;swimlaneBody=0;fontStyle=0;strokeColor=inherit;connectable=0;fillColor=none;startSize=40;collapsible=0;recursiveResize=1;expand=0;allowArrows=0;autosize=1;rotatable=0;noLabel=1;overflow=hidden;swimlaneLine=0;editable=0;\" vertex=\"1\" parent=\"3\"><mxGeometry x=\"80\" width=\"80\" height=\"110\" as=\"geometry\"><mxRectangle width=\"80\" height=\"110\" as=\"alternateBounds\"/></mxGeometry></mxCell><object label=\"out\" type=\"exec\" id=\"7\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"10\" y=\"10\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object><object label=\"in\" type=\"data\" id=\"8\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"6\"><mxGeometry x=\"-70\" y=\"60\" width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 160,
+ "h": 150,
+ "aspect": "fixed",
+ "title": "Print Node"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"out\" type=\"exec\" id=\"2\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 60,
+ "h": 40,
+ "aspect": "fixed",
+ "title": "Exec Gate"
+ },
+ {
+ "xml": "<mxGraphModel><root><mxCell id=\"0\"/><mxCell id=\"1\" parent=\"0\"/><object label=\"in\" type=\"data\" id=\"2\"><mxCell style=\"rounded=0;whiteSpace=wrap;html=1;fillColor=#d5e8d4;strokeColor=#82b366;\" vertex=\"1\" parent=\"1\"><mxGeometry width=\"60\" height=\"40\" as=\"geometry\"/></mxCell></object></root></mxGraphModel>",
+ "w": 60,
+ "h": 40,
+ "aspect": "fixed",
+ "title": "Data Gate"
+ }
+]
\ No newline at end of file
diff --git a/transformation/schedule/schedule_lib/__init__.py b/transformation/schedule/schedule_lib/__init__.py
new file mode 100644
index 0000000..4df5a3d
--- /dev/null
+++ b/transformation/schedule/schedule_lib/__init__.py
@@ -0,0 +1,31 @@
+from .action import Action
+from .data_node import DataNode
+from .end import End
+from .exec_node import ExecNode
+from .loop import Loop
+from .match import Match
+from .merge import Merge
+from .modify import Modify
+from .null_node import NullNode
+from .print import Print
+from .rewrite import Rewrite
+from .start import Start
+from .store import Store
+from .sub_schedule import SubSchedule
+
+__all__ = [
+ "Action",
+ "DataNode",
+ "End",
+ "ExecNode",
+ "Loop",
+ "Match",
+ "Merge",
+ "Modify",
+ "NullNode",
+ "Rewrite",
+ "Print",
+ "Start",
+ "Store",
+ "SubSchedule",
+]
diff --git a/transformation/schedule/schedule_lib/action.py b/transformation/schedule/schedule_lib/action.py
new file mode 100644
index 0000000..9f10406
--- /dev/null
+++ b/transformation/schedule/schedule_lib/action.py
@@ -0,0 +1,106 @@
+from typing import List, override, Type
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from .funcs import not_visited, generate_dot_node
+from .exec_node import ExecNode
+from .data_node import DataNode
+
+class ActionState:
+ def __init__(self):
+ self.var = {"output_gate": "out"}
+
+class Action(ExecNode, DataNode):
+ def __init__(
+ self,
+ ports_exec_in: list[str],
+ ports_exec_out: list[str],
+ ports_data_in: list[str],
+ ports_data_out: list[str],
+ code: str = "",
+ init: str = "",
+ ) -> None:
+ self.gates: tuple[list[str], list[str], list[str], list[str]] = (ports_exec_in, ports_exec_out, ports_data_in, ports_data_out)
+ super().__init__()
+ self.state: dict[int, ActionState] = {}
+ self.var_globals = {}
+ self.code = code
+ self.init = init
+
+ @override
+ def get_exec_input_gates(self) -> list[str]:
+ return self.gates[0]
+
+ @override
+ def get_exec_output_gates(self) -> list[str]:
+ return self.gates[1]
+
+ @override
+ def get_data_input_gates(self) -> list[str]:
+ return self.gates[2]
+
+ @override
+ def get_data_output_gates(self) -> list[str]:
+ return self.gates[3]
+
+ @override
+ def nextState(self, exec_id: int) -> tuple["ExecNode", str]:
+ state = self.get_state(exec_id)
+ return self.next_node[state.var["output_gate"]]
+
+ def get_state(self, exec_id) -> ActionState:
+ return self.state[exec_id]
+
+ @override
+ def generate_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state[exec_id] = (state := ActionState())
+ if self.init:
+ exec (self.init, {"var": state.var}, {"globals": self.var_globals})
+ @override
+ def delete_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state.pop(exec_id)
+
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ state = self.get_state(exec_id)
+ exec(
+ self.code,
+ {
+ "api": od,
+ "var": state.var,
+ "data_in": {port: value.get_data(exec_id) for port, value in self.data_in.items() if value is not None},
+ "data_out": {port: value.get_data(exec_id) for port, value in self.data_out.items() if value is not None},
+ "globals": self.var_globals,
+ },
+ )
+ for gate, d in self.data_out.items():
+ DataNode.input_event(self, gate, exec_id)
+ return None
+
+ def input_event(self, gate: str, exec_id: int) -> None:
+ return
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"action",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ ExecNode.generate_dot(self, nodes, edges, visited, template)
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/data.py b/transformation/schedule/schedule_lib/data.py
new file mode 100644
index 0000000..7cafc5b
--- /dev/null
+++ b/transformation/schedule/schedule_lib/data.py
@@ -0,0 +1,83 @@
+from symtable import Class
+from typing import Any, Generator, Callable, Iterator, TYPE_CHECKING, override
+
+if TYPE_CHECKING:
+ from transformation.schedule.schedule_lib import DataNode
+
+
+class DataState:
+ def __init__(self, data: Any):
+ self.data: list[dict[Any, Any]] = []
+
+class Data:
+ __slots__ = ("state", "_parent")
+
+ def __init__(self, parent: "DataNode") -> None:
+ self.state: dict[int, DataState] = dict()
+ self._parent = parent
+
+ def __dir__(self):
+ return [attr for attr in super().__dir__() if attr != "_super"]
+
+ def get_data(self, exec_id: int) -> list[dict[str, str]]:
+ state = self.get_state(exec_id)
+ return state.data
+
+ def get_state(self, exec_id) -> DataState:
+ return self.state[exec_id]
+
+ def store_data(self, exec_id: int, data_gen: Generator, n: int) -> bool:
+ state = self.get_state(exec_id)
+ state.data.clear()
+ if n == 0:
+ return True
+ i: int = 0
+ while (match := next(data_gen, None)) is not None:
+ state.data.append(match)
+ i += 1
+ if i >= n:
+ break
+ else:
+ if n == float("inf"):
+ return bool(len(state.data))
+ state.data.clear()
+ return False
+ return True
+
+ def get_parent(self) -> "DataNode":
+ return self._parent
+
+ def replace(self, exec_id: int, data: list[dict[str, str]]) -> None:
+ state = self.get_state(exec_id)
+ state.data.clear()
+ state.data.extend(data)
+
+ def append(self, exec_id: int, data: dict[str, str]) -> None:
+ self.get_state(exec_id).data.append(data)
+
+ def extend(self, exec_id: int, data: list[dict[str, str]]) -> None:
+ self.get_state(exec_id).data.extend(data)
+
+ def clear(self, exec_id: int) -> None:
+ self.get_state(exec_id).data.clear()
+
+ def pop(self, exec_id: int, index: int =-1) -> Any:
+ return self.get_state(exec_id).data.pop(index)
+
+ def empty(self, exec_id: int) -> bool:
+ return len(self.get_state(exec_id).data) == 0
+
+ def __getitem__(self, index):
+ raise NotImplementedError
+
+ def __iter__(self, exec_id: int) -> Iterator[dict[str, str]]:
+ return self.get_state(exec_id).data.__iter__()
+
+ def __len__(self, exec_id: int) -> int:
+ return self.get_state(exec_id).data.__len__()
+
+ def generate_stack_frame(self, exec_id: int) -> None:
+ self.state[exec_id] = DataState(exec_id)
+
+ def delete_stack_frame(self, exec_id: int) -> None:
+ self.state.pop(exec_id)
\ No newline at end of file
diff --git a/transformation/schedule/schedule_lib/data_node.py b/transformation/schedule/schedule_lib/data_node.py
new file mode 100644
index 0000000..01e9b76
--- /dev/null
+++ b/transformation/schedule/schedule_lib/data_node.py
@@ -0,0 +1,101 @@
+from abc import abstractmethod
+from typing import Any, Generator, List, override
+
+from jinja2 import Template
+
+from .data import Data
+from .funcs import generate_dot_edge
+from .node import Node
+
+
+class DataNodeState:
+ def __init__(self) -> None:
+ super().__init__()
+
+
+class DataNode(Node):
+ def __init__(self) -> None:
+ super().__init__()
+ self.eventsub: dict[str, list[tuple[DataNode, str]]] = {
+ gate: [] for gate in self.get_data_output_gates()
+ }
+ self.data_out: dict[str, Data] = {
+ name: Data(self) for name in self.get_data_output_gates()
+ }
+ self.data_in: dict[str, Data | None] = {
+ name: None for name in self.get_data_input_gates()
+ }
+
+ @staticmethod
+ def get_data_input_gates() -> List[str]:
+ return ["in"]
+
+ @staticmethod
+ def get_data_output_gates() -> List[str]:
+ return ["out"]
+
+ @override
+ def generate_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ for d in self.data_out.values():
+ d.generate_stack_frame(exec_id)
+
+ @override
+ def delete_stack_frame(self, exec_id: int) -> None:
+ super().delete_stack_frame(exec_id)
+ for d in self.data_out.values():
+ d.delete_stack_frame(exec_id)
+
+ def connect_data(
+ self, data_node: "DataNode", from_gate: str, to_gate: str, eventsub=True
+ ) -> None:
+ if from_gate not in self.get_data_output_gates():
+ raise Exception(f"from_gate {from_gate} is not a valid port")
+ if to_gate not in data_node.get_data_input_gates():
+ raise Exception(f"to_gate {to_gate} is not a valid port")
+ data_node.data_in[to_gate] = self.data_out[from_gate]
+ if eventsub:
+ self.eventsub[from_gate].append((data_node, to_gate))
+
+ def store_data(self, exec_id, data_gen: Generator, port: str, n: int) -> None:
+ self.data_out[port].store_data(exec_id, data_gen, n)
+ for sub, gate in self.eventsub[port]:
+ sub.input_event(gate, exec_id)
+
+ def get_input_data(self, gate: str, exec_id: int) -> list[dict[Any, Any]]:
+ data = self.data_in[gate]
+ if data is None:
+ return [{}]
+ return data.get_data(exec_id)
+
+ @abstractmethod
+ def input_event(self, gate: str, exec_id: int) -> None:
+ for sub, gate_sub in self.eventsub[gate]:
+ sub.input_event(gate_sub, exec_id)
+
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ for port, data in self.data_in.items():
+ if data is not None:
+ source = data.get_parent()
+ generate_dot_edge(
+ source,
+ self,
+ edges,
+ template,
+ kwargs={
+ "prefix": "d",
+ "from_gate": [
+ port
+ for port, value in source.data_out.items()
+ if value == data
+ ][0],
+ "to_gate": port,
+ "color": "green",
+ },
+ )
+ data.get_parent().generate_dot(nodes, edges, visited, template)
+ for gate_form, subs in self.eventsub.items():
+ for sub, gate in subs:
+ sub.generate_dot(nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/end.py b/transformation/schedule/schedule_lib/end.py
new file mode 100644
index 0000000..a0218d8
--- /dev/null
+++ b/transformation/schedule/schedule_lib/end.py
@@ -0,0 +1,80 @@
+from typing import List, override, Type
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from . import DataNode
+from .exec_node import ExecNode
+from .funcs import not_visited, generate_dot_node
+
+class EndState:
+ def __init__(self) -> None:
+ self.end_gate: str = ""
+
+class End(ExecNode, DataNode):
+ @override
+ def input_event(self, gate: str, exec_id: int) -> None:
+ pass
+
+ def __init__(self, ports_exec: List[str], ports_data: List[str]) -> None:
+ self.ports_exec = ports_exec
+ self.ports_data = ports_data
+ super().__init__()
+ self.state: dict[int, EndState] = {}
+
+ @override
+ def get_exec_input_gates(self):
+ return self.ports_exec
+
+ @staticmethod
+ @override
+ def get_exec_output_gates():
+ return []
+
+ @override
+ def get_data_input_gates(self):
+ return self.ports_data
+
+ @staticmethod
+ @override
+ def get_data_output_gates():
+ return []
+
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ state = self.get_state(exec_id)
+ state.end_gate = port
+ return 1, {"exec_gate": state.end_gate, "data_out": {port: data.get_data(exec_id) for port, data in self.data_in.items()}}
+
+ def get_state(self, exec_id) -> EndState:
+ return self.state[exec_id]
+
+ @override
+ def generate_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state[exec_id] = EndState()
+
+ @override
+ def delete_stack_frame(self, exec_id: int) -> None:
+ super().delete_stack_frame(exec_id)
+ self.state.pop(exec_id)
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": "end",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ }
+ )
diff --git a/transformation/schedule/schedule_lib/exec_node.py b/transformation/schedule/schedule_lib/exec_node.py
new file mode 100644
index 0000000..ea1cc8b
--- /dev/null
+++ b/transformation/schedule/schedule_lib/exec_node.py
@@ -0,0 +1,61 @@
+from abc import abstractmethod
+from typing import override
+from jinja2 import Template
+
+from api.od import ODAPI
+from .funcs import generate_dot_edge
+from .node import Node
+
+
+class ExecNode(Node):
+ def __init__(self) -> None:
+ super().__init__()
+
+ from .null_node import NullNode
+ self.next_node: dict[str, tuple[ExecNode, str]] = {}
+ for port in self.get_exec_output_gates():
+ self.next_node[port] = (NullNode(), "in")
+
+ def nextState(self, exec_id: int) -> tuple["ExecNode", str]:
+ return self.next_node["out"]
+
+ @staticmethod
+ def get_exec_input_gates():
+ return ["in"]
+
+ @staticmethod
+ def get_exec_output_gates():
+ return ["out"]
+
+ def connect(self, next_state: "ExecNode", from_gate: str, to_gate: str) -> None:
+ if from_gate not in self.get_exec_output_gates():
+ raise Exception(f"from_gate {from_gate} is not a valid port")
+ if to_gate not in next_state.get_exec_input_gates():
+ raise Exception(f"to_gate {to_gate} is not a valid port")
+ self.next_node[from_gate] = (next_state, to_gate)
+
+ @abstractmethod
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ return None
+
+ @override
+ def generate_dot(
+ self, nodes: list[str], edges: list[str], visited: set[int], template: Template
+ ) -> None:
+ for out_port, edge in self.next_node.items():
+ template.render()
+ generate_dot_edge(
+ self,
+ edge[0],
+ edges,
+ template,
+ kwargs={
+ "prefix": "e",
+ "from_gate": out_port,
+ "to_gate": edge[1],
+ "color": "darkblue",
+ },
+ )
+
+ for edge in self.next_node.values():
+ edge[0].generate_dot(nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/funcs.py b/transformation/schedule/schedule_lib/funcs.py
new file mode 100644
index 0000000..6a01eb0
--- /dev/null
+++ b/transformation/schedule/schedule_lib/funcs.py
@@ -0,0 +1,56 @@
+from typing import Callable, List
+
+from jinja2 import Template
+
+from .singleton import Singleton
+
+
+class IdGenerator(metaclass=Singleton):
+ exec_id = -1
+ node_id = -1
+
+ @classmethod
+ def generate_node_id(cls) -> int:
+ cls.node_id +=1
+ return cls.node_id
+
+ @classmethod
+ def generate_exec_id(cls) -> int:
+ cls.exec_id += 1
+ return cls.exec_id
+
+def generate_dot_wrap(func) -> Callable:
+ def wrapper(self, *args, **kwargs) -> str:
+ nodes = []
+ edges = []
+ self.reset_visited()
+ func(self, nodes, edges, *args, **kwargs)
+ return f"digraph G {{\n\t{"\n\t".join(nodes)}\n\t{"\n\t".join(edges)}\n}}"
+
+ return wrapper
+
+
+def not_visited(func) -> Callable:
+ def wrapper(
+ self, nodes: List[str], edges: List[str], visited: set[int], *args, **kwargs
+ ) -> None:
+ if self in visited:
+ return
+ visited.add(self)
+ func(self, nodes, edges, visited, *args, **kwargs)
+
+ return wrapper
+
+
+def generate_dot_node(self, nodes: List[str], template: Template, **kwargs) -> None:
+ nodes.append(template.module.__getattribute__("Node")(**{**kwargs, "id": self.id}))
+
+
+def generate_dot_edge(
+ self, target, edges: List[str], template: Template, kwargs
+) -> None:
+ edges.append(
+ template.module.__getattribute__("Edge")(
+ **{**kwargs, "from_id": self.id, "to_id": target.id}
+ )
+ )
diff --git a/transformation/schedule/schedule_lib/loop.py b/transformation/schedule/schedule_lib/loop.py
new file mode 100644
index 0000000..8837080
--- /dev/null
+++ b/transformation/schedule/schedule_lib/loop.py
@@ -0,0 +1,74 @@
+import functools
+from typing import List, Generator, override, Type
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from .exec_node import ExecNode
+from .data_node import DataNode
+from .data_node import Data
+from .funcs import not_visited, generate_dot_node
+
+class Loop(ExecNode, DataNode):
+ def __init__(self) -> None:
+ super().__init__()
+ self.cur_data: Data = Data(self)
+
+ @staticmethod
+ @override
+ def get_exec_output_gates():
+ return ["it", "out"]
+
+ @override
+ def generate_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.cur_data.generate_stack_frame(exec_id)
+
+ @override
+ def delete_stack_frame(self, exec_id: int) -> None:
+ super().delete_stack_frame(exec_id)
+ self.cur_data.delete_stack_frame(exec_id)
+
+ @override
+ def nextState(self, exec_id: int) -> tuple[ExecNode, str]:
+ return self.next_node["out" if self.data_out["out"].empty(exec_id) else "it"]
+
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ self.data_out["out"].clear(exec_id)
+
+ if not self.cur_data.empty(exec_id):
+ self.data_out["out"].append(exec_id, self.cur_data.pop(exec_id,0))
+ DataNode.input_event(self, "out", exec_id)
+ return None
+
+ def input_event(self, gate: str, exec_id: int) -> None:
+ self.cur_data.replace(exec_id, self.get_input_data(gate, exec_id))
+ data_o = self.data_out["out"]
+ if data_o.empty(exec_id):
+ return
+ data_o.clear(exec_id)
+ DataNode.input_event(self, "out", exec_id)
+
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"loop",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ ExecNode.generate_dot(self, nodes, edges, visited, template)
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/match.py b/transformation/schedule/schedule_lib/match.py
new file mode 100644
index 0000000..e0b097f
--- /dev/null
+++ b/transformation/schedule/schedule_lib/match.py
@@ -0,0 +1,67 @@
+from typing import List, override, Type
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from transformation.schedule.rule_executor import RuleExecutor
+from .exec_node import ExecNode
+from .data_node import DataNode
+from .funcs import not_visited, generate_dot_node
+
+class Match(ExecNode, DataNode):
+ def input_event(self, gate: str, exec_id: int) -> None:
+ pass
+
+ def __init__(self, label: str, n: int | float) -> None:
+ super().__init__()
+ self.label: str = label
+ self.n: int = n
+ self.rule = None
+ self.rule_executer: RuleExecutor | None = None
+
+ @override
+ def nextState(self, exec_id: int) -> tuple[ExecNode, str]:
+ return self.next_node["fail" if self.data_out["out"].empty(exec_id) else "success"]
+
+ @staticmethod
+ @override
+ def get_exec_output_gates():
+ return ["success", "fail"]
+
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ pivot = {}
+ if self.data_in is not None:
+ pivot = self.get_input_data("in", exec_id)[0]
+ # TODO: remove this print
+ print(f"matching: {self.label}\n\tpivot: {pivot}")
+ self.store_data( exec_id,
+ self.rule_executer.match_rule(od.m, self.rule, pivot=pivot), "out", self.n
+ )
+ return None
+
+ def init_rule(self, rule, rule_executer):
+ self.rule = rule
+ self.rule_executer = rule_executer
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"match\n{self.label}\nn = {self.n}",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ ExecNode.generate_dot(self, nodes, edges, visited, template)
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/merge.py b/transformation/schedule/schedule_lib/merge.py
new file mode 100644
index 0000000..d31b809
--- /dev/null
+++ b/transformation/schedule/schedule_lib/merge.py
@@ -0,0 +1,57 @@
+from typing import List, override, Type
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from transformation.schedule.rule_executor import RuleExecutor
+from . import ExecNode
+from .exec_node import ExecNode
+from .data_node import DataNode, DataNodeState
+from .funcs import not_visited, generate_dot_node
+
+class Merge(DataNode):
+ def __init__(self, ports: list[str]) -> None:
+ self.in_data_ports = ports # ports must be defined before super.__init__
+ super().__init__()
+ self.in_data_ports.reverse()
+
+ @override
+ def get_data_input_gates(self) -> list[str]:
+ return self.in_data_ports
+
+ @override
+ def input_event(self, gate: str, exec_id: int) -> None:
+ out = self.data_out["out"]
+ b = (not out.empty(exec_id)) and (self.data_in[gate].empty(exec_id))
+ out.clear(exec_id)
+ if b:
+ DataNode.input_event(self, "out", exec_id)
+ return
+
+ # TODO: only first element or all?
+ if any(data.empty(exec_id) for data in self.data_in.values()):
+ return
+ d: dict[str, str] = dict()
+ for gate in self.in_data_ports:
+ for key, value in self.data_in[gate].get_data(exec_id)[0].items():
+ d[key] = value
+ out.append(exec_id, d)
+ DataNode.input_event(self, "out", exec_id)
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"merge",
+ "ports_data": (
+ self.get_data_input_gates()[::-1],
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/modify.py b/transformation/schedule/schedule_lib/modify.py
new file mode 100644
index 0000000..ad4859e
--- /dev/null
+++ b/transformation/schedule/schedule_lib/modify.py
@@ -0,0 +1,49 @@
+from typing import List, override
+
+from jinja2 import Template
+
+from transformation.schedule.schedule_lib.funcs import not_visited, generate_dot_node
+from .data_node import DataNode
+
+
+class Modify(DataNode):
+ def __init__(self, rename: dict[str, str], delete: dict[str, str]) -> None:
+ super().__init__()
+ self.rename: dict[str, str] = rename
+ self.delete: set[str] = set(delete)
+
+ @override
+ def input_event(self, gate: str, exec_id: int) -> None:
+ data_i = self.get_input_data(gate, exec_id)
+ if len(data_i):
+ self.data_out["out"].clear(exec_id)
+ for data in data_i:
+ self.data_out["out"].append(exec_id,
+ {
+ self.rename.get(key, key): value
+ for key, value in data.items()
+ if key not in self.delete
+ }
+ )
+ else:
+ if self.data_out["out"].empty(exec_id):
+ return
+ super().input_event("out", exec_id)
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"modify",
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/node.py b/transformation/schedule/schedule_lib/node.py
new file mode 100644
index 0000000..022c73c
--- /dev/null
+++ b/transformation/schedule/schedule_lib/node.py
@@ -0,0 +1,70 @@
+"""
+node.py
+
+Defines the abstract base Node class for graph-based structures. Each Node is assigned
+a unique identifier via an external IdGenerator. The class provides an interface for
+managing execution state and generating DOT graph representations.
+"""
+
+from abc import abstractmethod
+from jinja2 import Template
+from .funcs import IdGenerator
+
+
+class Node:
+ """
+ Abstract base class for graph nodes. Each Node has a unique ID and supports
+ context-dependent state management for execution scenarios. Subclasses must
+ implement the DOT graph generation logic.
+ """
+
+ @abstractmethod
+ def __init__(self) -> None:
+ """
+ Initializes the Node instance with a unique ID.
+
+ Attributes:
+ id (int): A unique identifier assigned by IdGenerator.
+ """
+ self.id: int = IdGenerator.generate_node_id()
+
+ def get_id(self) -> int:
+ """
+ Retrieves the unique identifier of the node.
+
+ Returns:
+ int: The unique node ID.
+ """
+ return self.id
+
+ def generate_stack_frame(self, exec_id: int) -> None:
+ """
+ Initializes a new state frame for a specific execution context.
+ Designed to be overridden in subclasses that use execution state.
+
+ Args:
+ exec_id (int): The ID of the execution context.
+ """
+
+ def delete_stack_frame(self, exec_id: int) -> None:
+ """
+ Deletes the state frame for a specific execution context.
+ Designed to be overridden in subclasses that use execution state.
+
+ Args:
+ exec_id (int): The ID of the execution context.
+ """
+
+ @abstractmethod
+ def generate_dot(
+ self, nodes: list[str], edges: list[str], visited: set[int], template: Template
+ ) -> None:
+ """
+ Generates the DOT graph representation for this node and its relationships.
+
+ Args:
+ nodes (list[str]): A list to append DOT node definitions to.
+ edges (list[str]): A list to append DOT edge definitions to.
+ visited (set[int]): A set of already visited node IDs to avoid duplicates or recursion.
+ template (Template): A Jinja2 template used to format the node's DOT representation.
+ """
diff --git a/transformation/schedule/schedule_lib/null_node.py b/transformation/schedule/schedule_lib/null_node.py
new file mode 100644
index 0000000..f7c44ad
--- /dev/null
+++ b/transformation/schedule/schedule_lib/null_node.py
@@ -0,0 +1,80 @@
+"""
+null_node.py
+
+Defines the NullNode class, a no-op singleton execution node used for open execution pins
+in the object diagram execution graph.
+"""
+
+from abc import ABC
+from typing import List, Type
+from jinja2 import Template
+from api.od import ODAPI
+from .funcs import generate_dot_node
+from .singleton import Singleton
+from .exec_node import ExecNode
+
+class NullNode(ExecNode, metaclass=Singleton):
+ """
+ A no-op execution node representing a null operation.
+
+ This node is typically used to represent a placeholder or open execution pin.
+ It always returns a fixed result and does not perform any operation.
+ """
+
+ def __init__(self):
+ """
+ Initializes the NullNode instance.
+ Inherits unique ID and state behavior from ExecNode.
+ """
+ super().__init__()
+
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ """
+ Simulates execution by returning a static result indicating an open pin.
+
+ Args:
+ port (str): The name of the input port.
+ exec_id (int): The current execution ID.
+ od (ODAPI): The Object Diagram API instance providing execution context.
+
+ Returns:
+ tuple[int, str] | None: A tuple (-1, "open pin reached") indicating a no-op.
+ """
+ return -1, "open pin reached"
+
+ @staticmethod
+ def get_exec_output_gates():
+ """
+ Returns the list of output gates for execution.
+
+ Returns:
+ list: An empty list, as NullNode has no output gates.
+ """
+ return []
+
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ """
+ Generates DOT graph representation for this node if it hasn't been visited.
+
+ Args:
+ nodes (List[str]): A list to accumulate DOT node definitions.
+ edges (List[str]): A list to accumulate DOT edge definitions.
+ visited (set[int]): Set of already visited node IDs to avoid cycles.
+ template (Template): A Jinja2 template used to render the node's DOT representation.
+ """
+ if self.id in visited:
+ return
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": "null",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ }
+ )
diff --git a/transformation/schedule/schedule_lib/print.py b/transformation/schedule/schedule_lib/print.py
new file mode 100644
index 0000000..3b237a2
--- /dev/null
+++ b/transformation/schedule/schedule_lib/print.py
@@ -0,0 +1,60 @@
+from typing import List, override
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from transformation.schedule.schedule_lib.funcs import not_visited, generate_dot_node
+from .exec_node import ExecNode
+from .data_node import DataNode
+
+
+class Print(ExecNode, DataNode):
+ def __init__(self, label: str = "", custom: str = "") -> None:
+ super().__init__()
+ self.label = label
+
+ if custom:
+ template = Template(custom, trim_blocks=True, lstrip_blocks=True)
+ self._print = (
+ lambda self_, exec_id: print(template.render(data=self.get_input_data("in", exec_id)))
+ ).__get__(self, Print)
+
+ @staticmethod
+ @override
+ def get_data_output_gates():
+ return []
+
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ self._print(exec_id)
+ return
+
+ @override
+ def input_event(self, gate: str, exec_id: int) -> None:
+ if not self.data_in[gate].empty(exec_id):
+ self._print(exec_id)
+
+ def _print(self, exec_id: int) -> None:
+ print(f"{self.label}{self.get_input_data("in", exec_id)}")
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"print",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ ExecNode.generate_dot(self, nodes, edges, visited, template)
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/rewrite.py b/transformation/schedule/schedule_lib/rewrite.py
new file mode 100644
index 0000000..2196d1d
--- /dev/null
+++ b/transformation/schedule/schedule_lib/rewrite.py
@@ -0,0 +1,56 @@
+import functools
+from typing import List, Type
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from .exec_node import ExecNode
+from .data_node import DataNode
+from .funcs import not_visited, generate_dot_node
+from ..rule_executor import RuleExecutor
+
+class Rewrite(ExecNode, DataNode):
+
+ def __init__(self, label: str) -> None:
+ super().__init__()
+ self.label = label
+ self.rule = None
+ self.rule_executor: RuleExecutor | None = None
+
+ def init_rule(self, rule, rule_executer):
+ self.rule = rule
+ self.rule_executor = rule_executer
+
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ pivot = {}
+ if self.data_in is not None:
+ pivot = self.get_input_data("in", exec_id)[0]
+ # TODO: remove print
+ print(f"rewrite: {self.label}\n\tpivot: {pivot}")
+ self.store_data( exec_id,
+ self.rule_executor.rewrite_rule(od, self.rule, pivot=pivot), "out", 1
+ )
+ return None
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"rewrite\n{self.label}",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ ExecNode.generate_dot(self, nodes, edges, visited, template)
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/singleton.py b/transformation/schedule/schedule_lib/singleton.py
new file mode 100644
index 0000000..91ac5cf
--- /dev/null
+++ b/transformation/schedule/schedule_lib/singleton.py
@@ -0,0 +1,9 @@
+from abc import ABCMeta
+
+class Singleton(ABCMeta):
+ _instances = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
+ return cls._instances[cls]
diff --git a/transformation/schedule/schedule_lib/start.py b/transformation/schedule/schedule_lib/start.py
new file mode 100644
index 0000000..441e95f
--- /dev/null
+++ b/transformation/schedule/schedule_lib/start.py
@@ -0,0 +1,83 @@
+from typing import List, override
+
+from jinja2 import Template
+
+from . import DataNode
+from .exec_node import ExecNode
+from .funcs import not_visited, generate_dot_node
+
+class StartState:
+ def __init__(self) -> None:
+ super().__init__()
+ self.start_gate: str = ""
+
+class Start(ExecNode, DataNode):
+ def __init__(self, ports_exec: List[str], ports_data: List[str]) -> None:
+ self.state: dict[int, StartState] = {}
+ self.ports_exec = ports_exec
+ self.ports_data = ports_data
+ super().__init__()
+
+ def run_init(self, gate: str, exec_id: int, data: dict[str, any]) -> None:
+ state = self.get_state(exec_id)
+ state.start_gate = gate
+ for port, d in data.items():
+ self.data_out[port].replace(exec_id, d)
+ DataNode.input_event(self, port, exec_id)
+
+ def nextState(self, exec_id: int) -> tuple["ExecNode", str]:
+ state = self.get_state(exec_id)
+ return self.next_node[state.start_gate]
+
+ def get_state(self, exec_id) -> StartState:
+ return self.state[exec_id]
+
+ @override
+ def generate_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state[exec_id] = StartState()
+
+ @override
+ def delete_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state.pop(exec_id)
+
+ @staticmethod
+ @override
+ def get_exec_input_gates():
+ return []
+
+ @override
+ def get_exec_output_gates(self):
+ return self.ports_exec
+
+ @staticmethod
+ @override
+ def get_data_input_gates():
+ return []
+
+ @override
+ def get_data_output_gates(self):
+ return self.ports_data
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": "start",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ }
+ )
+ super().generate_dot(nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/store.py b/transformation/schedule/schedule_lib/store.py
new file mode 100644
index 0000000..4aced26
--- /dev/null
+++ b/transformation/schedule/schedule_lib/store.py
@@ -0,0 +1,92 @@
+from typing import List, override
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from .data import Data
+from .exec_node import ExecNode
+from .data_node import DataNode
+from .funcs import not_visited, generate_dot_node
+
+class StoreState:
+ def __init__(self) -> None:
+ self.last_port: str = "in"
+
+class Store(ExecNode, DataNode):
+ def __init__(self, ports: list[str]) -> None:
+ self.ports = ports
+ super().__init__()
+ self.state: dict[int, StoreState] = {}
+ self.cur_data: Data = Data(self)
+
+ @override
+ def get_exec_input_gates(self) -> list[str]:
+ return [*self.ports, "in"]
+
+ @override
+ def get_exec_output_gates(self) -> list[str]:
+ return [*self.ports, "out"]
+
+ @override
+ def get_data_input_gates(self) -> list[str]:
+ return self.ports
+
+ @override
+ def nextState(self, exec_id: int) -> tuple[ExecNode, str]:
+ return self.next_node[self.get_state(exec_id).last_port]
+
+ @override
+ def input_event(self, gate: str, exec_id: int) -> None:
+ return
+
+ def get_state(self, exec_id) -> StoreState:
+ return self.state[exec_id]
+
+ @override
+ def generate_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state[exec_id] = StoreState()
+ self.cur_data.generate_stack_frame(exec_id)
+
+ @override
+ def delete_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state.pop(exec_id)
+ self.cur_data.delete_stack_frame(exec_id)
+
+
+ @override
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ state = self.get_state(exec_id)
+ if port == "in":
+ self.data_out["out"].replace(exec_id, self.cur_data.get_data(exec_id))
+ self.cur_data.clear(exec_id)
+ DataNode.input_event(self, "out", True)
+ state.last_port = "out"
+ return None
+ self.cur_data.extend(exec_id, self.get_input_data(port, exec_id))
+ state.last_port = port
+ return None
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": f"store",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ },
+ )
+ ExecNode.generate_dot(self, nodes, edges, visited, template)
+ DataNode.generate_dot(self, nodes, edges, visited, template)
diff --git a/transformation/schedule/schedule_lib/sub_schedule.py b/transformation/schedule/schedule_lib/sub_schedule.py
new file mode 100644
index 0000000..048658c
--- /dev/null
+++ b/transformation/schedule/schedule_lib/sub_schedule.py
@@ -0,0 +1,107 @@
+from typing import List, override, TYPE_CHECKING
+
+from jinja2 import Template
+
+from api.od import ODAPI
+from . import DataNode
+from .exec_node import ExecNode
+from .funcs import not_visited, generate_dot_node, IdGenerator
+
+if TYPE_CHECKING:
+ from ..rule_scheduler import RuleScheduler
+
+
+class ScheduleState:
+ def __init__(self) -> None:
+ self.end_gate: str = ""
+
+class SubSchedule(ExecNode, DataNode):
+ def __init__(self, scheduler: "RuleScheduler", file: str) -> None:
+ self.schedule = scheduler._load_schedule(file, _main=False)
+ self.scheduler = scheduler
+ super().__init__()
+ self.state: dict[int, ScheduleState] = {}
+
+ @override
+ def nextState(self, exec_id: int) -> tuple["ExecNode", str]:
+ return self.next_node[self.get_state(exec_id).end_gate]
+
+ @override
+ def get_exec_input_gates(self) -> "List[ExecNode]":
+ return self.schedule.start.get_exec_output_gates()
+
+ @override
+ def get_exec_output_gates(self) -> "List[ExecNode]":
+ return [*self.schedule.end.get_exec_input_gates()]
+
+ @override
+ def get_data_input_gates(self) -> "List[ExecNode]":
+ return self.schedule.start.get_data_output_gates()
+
+ @override
+ def get_data_output_gates(self) -> "List[ExecNode]":
+ return self.schedule.end.get_data_input_gates()
+
+ def get_state(self, exec_id) -> ScheduleState:
+ return self.state[exec_id]
+
+ @override
+ def generate_stack_frame(self, exec_id: int) -> None:
+ super().generate_stack_frame(exec_id)
+ self.state[exec_id] = ScheduleState()
+
+ @override
+ def delete_stack_frame(self, exec_id: int) -> None:
+ super().delete_stack_frame(exec_id)
+ self.state.pop(exec_id)
+
+
+ @override
+ def execute(self, port: str, exec_id: int, od: ODAPI) -> tuple[int, any] | None:
+ runstatus, result = self.scheduler._runner(
+ od,
+ self.schedule,
+ port,
+ IdGenerator.generate_exec_id(),
+ {
+ port: self.get_input_data(port, exec_id)
+ for port, value in self.data_in.items()
+ if value is not None and not value.empty(exec_id)
+ },
+ )
+ if runstatus != 1:
+ return runstatus, result
+ self.get_state(exec_id).end_gate = result["exec_gate"]
+ results_data = result["data_out"]
+ for port, data in self.data_out.items():
+ if port in results_data:
+ self.data_out[port].replace(exec_id, results_data[port])
+ DataNode.input_event(self, port, exec_id)
+ continue
+
+ if not data.empty(exec_id):
+ data.clear(exec_id)
+ DataNode.input_event(self, port, exec_id)
+ return None
+
+ @not_visited
+ def generate_dot(
+ self, nodes: List[str], edges: List[str], visited: set[int], template: Template
+ ) -> None:
+ generate_dot_node(
+ self,
+ nodes,
+ template,
+ **{
+ "label": "rrrrrrrrrr",
+ "ports_exec": (
+ self.get_exec_input_gates(),
+ self.get_exec_output_gates(),
+ ),
+ "ports_data": (
+ self.get_data_input_gates(),
+ self.get_data_output_gates(),
+ ),
+ }
+ )
+ super().generate_dot(nodes, edges, visited, template)
diff --git a/transformation/schedule/templates/schedule_dot.j2 b/transformation/schedule/templates/schedule_dot.j2
new file mode 100644
index 0000000..ca715dc
--- /dev/null
+++ b/transformation/schedule/templates/schedule_dot.j2
@@ -0,0 +1,65 @@
+digraph G {
+ rankdir=LR;
+ compound=true;
+ node [shape=rect];
+{% for node in nodes %}
+ {{ node }}
+{% endfor %}
+
+{% for edge in edges %}
+ {{ edge }}
+{% endfor %}
+}
+
+{% macro Node(label, id, ports_exec=[], ports_data=[], debug = False) %}
+subgraph cluster_{{ id }} {
+ label = "
+ {%- if debug %}
+ {{ id }}_
+ {%- endif -%}
+ {{ label }}"
+
+ style = rounded;
+ input_{{ id }} [
+ shape=rect;
+ label= {{ Gate_Table(ports_exec[0], ports_data[0]) }}
+ ];
+ output_{{ id }} [
+ shape=rect;
+ label= {{ Gate_Table(ports_exec[1], ports_data[1]) }}
+ ];
+ input_{{ id }}->output_{{ id }} [style=invis];
+ }
+{%- endmacro %}
+
+{%- macro Edge(from_id, to_id, from_gate, to_gate, prefix, color) %}
+output_{{ from_id }}:{{ prefix }}_{{ from_gate }} -> input_{{ to_id }}:{{ prefix }}_{{ to_gate }} [color = {{ color }}]
+{%- endmacro %}
+
+{%- macro Gate_Table(ports_exec, ports_data) %}
+
+ <
+ {% if ports_exec or ports_data %}
+ {% if ports_exec %}
+
+
+ {% for port_e in ports_exec %}
+
{{ port_e }}
+ {% endfor %}
+
+
+ {% endif %}
+ {% if ports_data %}
+
+
+ {% for port_d in ports_data %}
+
{{ port_d }}
+ {% endfor %}
+
+
+ {% endif %}
+ {% else %}
+
+ {% endif %}
+
>
+{%- endmacro %}
\ No newline at end of file
diff --git a/transformation/schedule/templates/schedule_muMLE.j2 b/transformation/schedule/templates/schedule_muMLE.j2
new file mode 100644
index 0000000..624b203
--- /dev/null
+++ b/transformation/schedule/templates/schedule_muMLE.j2
@@ -0,0 +1,28 @@
+{% for id, param in nodes.items() -%}
+ {{ param[0] }}:{{ param[1].pop("type") }}
+ {%- if param[1] %}
+ {
+ {% for key, value in param[1].items() %}
+ {% if value %}
+ {% if key in ["file"] %}
+ {% set value = '"' ~ value ~ '"' %}
+ {% elif key in ["custom"] %}
+ {% set value = '`"' ~ value.replace('\n', '\\n') ~ '"`' %}
+ {% elif key in ["action", "init"] %}
+ {% set value = '\n```\n' ~ value ~ '\n```' %}
+ {% elif key in ["ports", "ports_exec_in", "ports_exec_out", "ports_data_in", "ports_data_out", "rename", "delete"] %}
+ {% set value = '`' ~ value.replace('\n', '\\n') ~ '`' %}
+ {% endif %}
+ {{ key }} = {{ value }};
+ {% endif %}
+ {% endfor %}
+}
+ {% endif %}
+
+{% endfor %}
+
+{%- for edge in edges %}
+ {% set source = edge[0] %}
+ {% set target = edge[1] %}
+:Conn_{{ source[2] }} ({{ source[0] }} -> {{ target[0] }}) {from="{{ source[1] }}"; to="{{ target[1] }}";}
+{% endfor -%}
\ No newline at end of file
diff --git a/transformation/schedule/templates/schedule_template.j2 b/transformation/schedule/templates/schedule_template.j2
new file mode 100644
index 0000000..e696681
--- /dev/null
+++ b/transformation/schedule/templates/schedule_template.j2
@@ -0,0 +1,51 @@
+{% macro Start(name, ports_exec_out, ports_data_out) %}
+{{ name }} = Start({{ ports_exec_out }}, {{ ports_data_out }})
+{%- endmacro %}
+
+{% macro End(name, ports_exec_in, ports_data_in) %}
+{{ name }} = End({{ ports_exec_in }}, {{ ports_data_in }})
+{%- endmacro %}
+
+{% macro Match(name, file, n) %}
+{{ name }} = Match("{{ file }}", {{ n }})
+{%- endmacro %}
+
+{% macro Rewrite(name, file) %}
+{{ name }} = Rewrite("{{ file }}")
+{%- endmacro %}
+
+{% macro Action(name, ports_exec_in, ports_exec_out, ports_data_in, ports_data_out, action, init) %}
+{{ name }} = Action({{ ports_exec_in }}, {{ ports_exec_out }}, {{ ports_data_in }}, {{ ports_data_out }}, {{ action }}, {{ init }})
+{%- endmacro %}
+
+{% macro Modify(name, rename, delete) %}
+{{ name }} = Modify({{ rename }}, {{ delete }})
+{%- endmacro %}
+
+{% macro Merge(name, ports_data_in) %}
+{{ name }} = Merge({{ ports_data_in }})
+{%- endmacro %}
+
+{% macro Store(name, ports) %}
+{{ name }} = Store({{ ports }})
+{%- endmacro %}
+
+{% macro Schedule(name, file) %}
+{{ name }} = SubSchedule(scheduler, "{{ file }}")
+{%- endmacro %}
+
+{% macro Loop(name) %}
+{{ name }} = Loop()
+{%- endmacro %}
+
+{% macro Print(name, label, custom) %}
+{{ name }} = Print("{{ label }}", {{ custom }})
+{%- endmacro %}
+
+{% macro Conn_exec(name_from, name_to, from, to) %}
+{{ name_from }}.connect({{ name_to }},"{{ from }}","{{ to }}")
+{%- endmacro %}
+
+{% macro Conn_data(name_from, name_to, from, to, event) %}
+{{ name_from }}.connect_data({{ name_to }}, "{{ from }}", "{{ to }}", {{ event }})
+{%- endmacro %}
\ No newline at end of file
diff --git a/transformation/schedule/templates/schedule_template_wrap.j2 b/transformation/schedule/templates/schedule_template_wrap.j2
new file mode 100644
index 0000000..d1e8dfc
--- /dev/null
+++ b/transformation/schedule/templates/schedule_template_wrap.j2
@@ -0,0 +1,48 @@
+#generated from somewhere i do not now but it here so live with it
+
+from transformation.schedule.schedule_lib import *
+
+class Schedule:
+ def __init__(self):
+ self.start: Start | None = None
+ self.end: End | None = None
+ self.nodes: list[DataNode] = []
+
+ @staticmethod
+ def get_matchers():
+ return [
+ {% for file in match_files %}
+ "{{ file }}",
+ {% endfor %}
+ ]
+
+ def init_schedule(self, scheduler, rule_executer, matchers):
+ {% for block in blocks_start_end%}
+ {{ block }}
+ {% endfor %}
+ self.start = {{ start }}
+ self.end = {{ end }}
+ {% for block in blocks%}
+ {{ block }}
+ {% endfor %}
+
+ {% for conn in exec_conn%}
+ {{ conn }}
+ {% endfor %}
+ {% for conn_d in data_conn%}
+ {{ conn_d }}
+ {% endfor %}
+
+ {% for match in matchers %}
+ {{ match["name"] }}.init_rule(matchers["{{ match["file"] }}"], rule_executer)
+ {% endfor %}
+
+ self.nodes = [
+ {% for name in blocks_name%}
+ {{ name }},
+ {% endfor %}
+ ]
+ return None
+
+ def generate_dot(self, *args, **kwargs):
+ return self.start.generate_dot(*args, **kwargs)
\ No newline at end of file
diff --git a/tutorial/00_metamodeling.py b/tutorial/00_metamodeling.py
new file mode 100644
index 0000000..6b12fe0
--- /dev/null
+++ b/tutorial/00_metamodeling.py
@@ -0,0 +1,157 @@
+# Before we can create a model in muMLE, we have to create a meta-model.
+
+# Here's an example of a (silly) meta-model.
+# We use a textual concrete syntax:
+
+mm_cs = """
+ # A class named 'A':
+ A:Class
+
+ # A class named 'B':
+ B:Class
+
+ # An association from 'A' to 'B':
+ a2b:Association (A -> B) {
+ # Every 'A' must be associated with at least one 'B'
+ target_lower_cardinality = 1;
+ }
+"""
+
+# Now, we create a model that is an instance of our meta-model:
+
+m_cs = """
+ myA:A
+
+ myB:B
+
+ myLnk:a2b (myA -> myB)
+"""
+
+# Notice that the syntax for meta-model and model is the same: We always declare a named object/link, followed by a colon (:) and the name of the type. The type name refers to the name of an object/link in the meta-model of our model.
+
+
+# So far we've only created text strings in Python. To parse them as models, we first create our 'state', which is a mutable graph that will contain our models and meta-models:
+
+
+from state.devstate import DevState
+
+state = DevState()
+
+
+# Next, we must load the Simple Class Diagrams (SCD) meta-meta-model into our 'state'. The SCD meta-meta-model is a meta-model for our meta-model, and it is also a meta-model for itself.
+
+# The meta-meta-model is not specified in textual syntax because it is typed by itself. In textual syntax, it would contain things like:
+# Class:Class
+# which is an object typed by itself. The parser cannot handle this (or circular dependencies in general). Therefore, we load the meta-meta-model by mutating the 'state' directly at a very low level:
+
+from bootstrap.scd import bootstrap_scd
+
+print("Loading meta-meta-model...")
+mmm = bootstrap_scd(state)
+print("OK")
+
+# Now that the meta-meta-model has been loaded, we can parse our meta-model:
+
+from concrete_syntax.textual_od import parser
+
+print()
+print("Parsing meta-model...")
+mm = parser.parse_od(
+ state,
+ m_text=mm_cs, # the string of text to parse
+ mm=mmm, # the meta-model of class diagrams (= our meta-meta-model)
+)
+print("OK")
+
+
+# And we can parse our model, the same way:
+
+print()
+print("Parsing model...")
+m = parser.parse_od(
+ state,
+ m_text=m_cs,
+ mm=mm, # this time, the meta-model is the previous model we parsed
+)
+print("OK")
+
+
+# Now we can do a conformance check:
+
+from framework.conformance import Conformance, render_conformance_check_result
+
+print()
+print("Is our model a valid instance of our meta model?")
+conf = Conformance(state, m, mm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# Looks like it is OK!
+
+
+# We can also check if our meta-model is a valid class diagram:
+
+print()
+print("Is our meta-model a valid class diagram?")
+conf = Conformance(state, mm, mmm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# Also good.
+
+
+# Finally, we can even check if the meta-meta-model is a valid instance of itself (it should be):
+
+print()
+print("Is our meta-model a valid class diagram?")
+conf = Conformance(state, mmm, mmm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# All good!
+
+
+# Now let's make things a bit more interesting and introduce non-conformance:
+
+m2_cs = """
+ myA:A
+ myA2:A
+
+ myB:B
+
+ myLnk:a2b (myA -> myB)
+"""
+
+# Parse it:
+
+m2 = parser.parse_od(
+ state,
+ m_text=m2_cs,
+ mm=mm,
+)
+
+# The above model is non-conformant because 'myA2' should have at least one outgoing link of type 'a2b', but it doesn't.
+
+print()
+print("Is model 'm2' a valid instance of our meta-model? (it should not be)")
+conf = Conformance(state, m2, mm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# It should be non-conformant.
+
+
+# Finally, let's render everything as PlantUML:
+
+from concrete_syntax.plantuml import renderer as plantuml
+from concrete_syntax.plantuml.make_url import make_url
+
+uml = (""
+ + plantuml.render_package("Meta-model", plantuml.render_class_diagram(state, mm))
+ + plantuml.render_package("Model", plantuml.render_object_diagram(state, m, mm))
+ + plantuml.render_trace_conformance(state, m, mm)
+ # + plantuml.render_package("Meta-meta-model", plantuml.render_class_diagram(state, mmm))
+ # + plantuml.render_trace_conformance(state, mm, mmm)
+)
+
+print()
+print("PlantUML output:", make_url(uml))
+
+
+# On to the next tutorial...
diff --git a/tutorial/01_constraints.py b/tutorial/01_constraints.py
new file mode 100644
index 0000000..fd0a193
--- /dev/null
+++ b/tutorial/01_constraints.py
@@ -0,0 +1,92 @@
+# We now make our meta-model more interesting by adding a 'price' attribute to B, and constraints to it.
+
+mm_cs = """
+ # class named 'A':
+ A:Class
+
+ # class named 'B':
+ B:Class {
+ constraint = ```
+ # Price must be less than 100
+ get_value(get_slot(this, "price")) < 100
+ ```;
+ }
+
+ # 'B' has an attribute 'price':
+ B_price:AttributeLink (B -> Integer) {
+ name = "price";
+ optional = False;
+ }
+
+ # An association from 'A' to 'B':
+ a2b:Association (A -> B) {
+ # Every 'A' must be associated with at least one 'B'
+ target_lower_cardinality = 1;
+ }
+
+ totalPriceLessThan500:GlobalConstraint {
+ constraint = ```
+ total_price = 0;
+ for b_name, b_id in get_all_instances("B"):
+ total_price += get_value(get_slot(b_id, "price"))
+ total_price < 500
+ ```;
+ }
+"""
+
+####
+# Note: The name 'B_price' follows a fixed format: _.
+# This format must be followed!
+####
+
+# We update our model to include a price:
+
+m_cs = """
+ myA:A
+
+ myB:B {
+ price = 1000;
+ }
+
+ myLnk:a2b (myA -> myB)
+"""
+
+
+# And do a conformance check:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from concrete_syntax.textual_od import parser
+from framework.conformance import Conformance, render_conformance_check_result
+
+state = DevState()
+print("Loading meta-meta-model...")
+mmm = bootstrap_scd(state)
+print("OK")
+
+print()
+print("Parsing meta-model...")
+mm = parser.parse_od(
+ state,
+ m_text=mm_cs, # the string of text to parse
+ mm=mmm, # the meta-model of class diagrams (= our meta-meta-model)
+)
+print("OK")
+
+print()
+print("Parsing model...")
+m = parser.parse_od(
+ state,
+ m_text=m_cs,
+ mm=mm, # this time, the meta-model is the previous model we parsed
+)
+print("OK")
+
+print()
+print("Is our model a valid instance of our meta model?")
+conf = Conformance(state, m, mm)
+print(render_conformance_check_result(conf.check_nominal()))
+
+# Can you fix the constraint violation?
+
+
diff --git a/tutorial/02_inheritance.py b/tutorial/02_inheritance.py
new file mode 100644
index 0000000..9de434f
--- /dev/null
+++ b/tutorial/02_inheritance.py
@@ -0,0 +1,61 @@
+# The following meta-model has an inheritance relation:
+
+mm_cs = """
+ MyAbstractClass:Class {
+ abstract = True;
+ }
+
+ MyConcreteClass:Class
+
+ :Inheritance (MyConcreteClass -> MyAbstractClass)
+
+ Z:Class
+
+ myZ:Association (MyAbstractClass -> Z) {
+ target_lower_cardinality = 1;
+ }
+
+"""
+
+# Note that we didn't give our inheritance link a name. A unique name will be auto-generated by the parser.
+
+
+# A (non-conforming) instance:
+
+m_nonconform_cs = """
+ cc:MyConcreteClass
+ z:Z
+"""
+
+
+# Check conformance:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+
+state = DevState()
+mmm = bootstrap_scd(state)
+
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+
+print("should be non-conform:")
+m_nonconform = loader.parse_and_check(state, m_nonconform_cs, mm, "m_nonconform")
+
+
+# The reason for the non-conformance is that all cardinalities and constraints are inherited. Therefore 'MyConcreteClass' must have at least one outgoing 'myZ' link as well.
+
+# We fix the non-conformance by adding this link:
+
+m_conform_cs = m_nonconform_cs + """
+ :myZ (cc -> z)
+"""
+
+# Now everything will be fine
+
+print("should be conform:")
+m_conform = loader.parse_and_check(state, m_conform_cs, mm, "m_conform")
+print("OK")
+
+
+# On to the next tutorial...
\ No newline at end of file
diff --git a/tutorial/03_api.py b/tutorial/03_api.py
new file mode 100644
index 0000000..7cdd3ea
--- /dev/null
+++ b/tutorial/03_api.py
@@ -0,0 +1,71 @@
+# We reuse our (meta-)model from the previous tutorial. For this tutorial, it doesn't really matter what the models look like.
+
+mm_cs = """
+ MyAbstractClass:Class {
+ abstract = True;
+ }
+
+ MyConcreteClass:Class
+
+ :Inheritance (MyConcreteClass -> MyAbstractClass)
+
+ Z:Class
+
+ myZ:Association (MyAbstractClass -> Z) {
+ target_lower_cardinality = 1;
+ }
+"""
+
+m_cs = """
+ cc:MyConcreteClass
+ z:Z
+ :myZ (cc -> z)
+"""
+
+
+# We parse everything:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+
+state = DevState()
+mmm = bootstrap_scd(state)
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+m = loader.parse_and_check(state, m_cs, mm, "m")
+
+
+# We can query the model via an API called ODAPI (Object Diagram API):
+
+from api.od import ODAPI
+
+odapi = ODAPI(state, m, mm)
+
+ls = odapi.get_all_instances("MyAbstractClass", include_subtypes=True)
+
+print("result of get_all_instances:")
+print(ls)
+
+# Observing the output above, we see that we got a list of tuples (object_name, UUID).
+# We can also modify the model via the same API:
+
+(cc_name, cc_id) = ls[0]
+z2 = odapi.create_object("z2", "Z")
+odapi.create_link("lnk", "myZ", cc_id, z2)
+
+# And we can observe the modified model:
+
+from concrete_syntax.textual_od.renderer import render_od
+from concrete_syntax.common import indent
+
+print()
+print("the modified model:")
+print(indent(render_od(state, m, mm, hide_names=False), 2))
+
+# BTW, notice that the anonymous link of type 'myZ' from the original model was automatically given a unique name (starting with two underscores).
+
+# The full ODAPI is documented on page 6 of this PDF:
+# http://msdl.uantwerpen.be/people/hv/teaching/MSBDesign/202425/assignments/assignment6.pdf
+
+
+# On to the next tutorial...
diff --git a/tutorial/04_transformation.py b/tutorial/04_transformation.py
new file mode 100644
index 0000000..2a50af4
--- /dev/null
+++ b/tutorial/04_transformation.py
@@ -0,0 +1,167 @@
+# We now get to the interesting part: model transformation.
+
+# We start with a meta-model and a model, and parse them:
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+from concrete_syntax.textual_od.renderer import render_od
+from concrete_syntax.common import indent
+from concrete_syntax.plantuml import renderer as plantuml
+from concrete_syntax.plantuml.make_url import make_url as make_plantuml_url
+from framework.conformance import Conformance, render_conformance_check_result
+
+mm_cs = """
+ Bear:Class
+ Animal:Class {
+ abstract = True;
+ }
+ Man:Class {
+ lower_cardinality = 1;
+ upper_cardinality = 2;
+ }
+ Man_weight:AttributeLink (Man -> Integer) {
+ name = "weight";
+ optional = False;
+ }
+ afraidOf:Association (Man -> Animal) {
+ # Every Man afraid of at least one Animal
+ target_lower_cardinality = 1;
+ }
+ :Inheritance (Man -> Animal)
+ :Inheritance (Bear -> Animal)
+"""
+
+m_cs = """
+ george:Man {
+ weight = 80;
+ }
+ mrBrown:Bear
+ teddy:Bear
+ :afraidOf (george -> mrBrown)
+ :afraidOf (george -> teddy)
+"""
+
+state = DevState()
+mmm = bootstrap_scd(state)
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+m = loader.parse_and_check(state, m_cs, mm, "m")
+
+
+# We will perform a simple model transformation, where we specify a Left Hand Side (LHS) and Right Hand Side (RHS) pattern. As we will see, both the LHS- and RHS-patterns are models too, and thus we need a meta-model for them. This meta-model can be auto-generated as follows:
+
+from transformation.ramify import ramify
+
+ramified_mm = ramify(state, mm)
+
+# Let's see what it looks like:
+
+print("RAMified meta-model:")
+print(indent(render_od(state, ramified_mm, mmm), 2))
+
+# Note that our RAMified meta-model is also a valid class diagram:
+
+print()
+print("Is valid class diagram?")
+print(render_conformance_check_result(Conformance(state, ramified_mm, mmm).check_nominal()))
+
+# We now specify our patterns.
+# We create a rule that looks for a Man with weight > 60, who is afraid of an animal:
+
+lhs_cs = """
+ # object to match
+ man:RAM_Man {
+ # match only men heavy enough
+ RAM_weight = `get_value(this) > 60`;
+ }
+
+ scaryAnimal:RAM_Animal
+ manAfraidOfAnimal:RAM_afraidOf (man -> scaryAnimal)
+"""
+
+lhs = loader.parse_and_check(state, lhs_cs, ramified_mm, "lhs")
+
+# As you can see, in our pattern-language, the names of the types have been prefixed with 'RAM_'. This is to distinguish them from the original types.
+# Further, the type of the 'weight'-attribute has changed: it used to be Integer, but now it's ActionCode, meaning we can write Python-expression in it. In a LHS-pattern, we write an expression that evaluates to a (Python) boolean. In our example, the expression is evaluated on every Man-object. If the result is True, the object can be matched, otherwise it cannot.
+
+
+# Let's see what happens if we match our LHS-pattern with our model:
+
+from transformation.matcher import match_od
+
+generator = match_od(state, m, mm, lhs, ramified_mm)
+
+# Matching is lazy: 'match_od' returns a generator object, so it will only look for the next match if you ask it to do so. The reason is that sometimes, we're only interested in the first match, whereas producing all the matches can take a lot of time on big models, and the number of matches can also be very big. But our example is small so let's just generate all the matches:
+
+all_matches = list(generator) # generate all matches
+
+import pprint
+
+print()
+print("All matches:\n", pprint.pformat(all_matches))
+
+# A match is just a Python dictionary mapping names of our LHS-pattern to names of our model.
+# There should be 2 matches: 'man' will always be matched with 'george', but 'scaryAnimal' can be matched with either 'mrBrown' or 'teddy'.
+
+
+# So far we've only queried our model. We can modify the model by specifying a RHS-pattern:
+# Objects/links that occur in RHS but not in LHS are CREATED
+# Objects/links that occur in LHS but not in RHS are DELETED
+# Objects/links that occur in both LHS and RHS remain, but we can still UPDATE their attributes.
+
+# Here's a RHS-pattern:
+
+rhs_cs = """
+ man:RAM_Man {
+ # man gains weight
+ RAM_weight = `get_value(this) + 5`;
+ }
+
+ # to create:
+ bill:RAM_Man {
+ RAM_weight = `100`;
+ }
+ billAfraidOfMan:RAM_afraidOf (bill -> man)
+"""
+
+rhs = loader.parse_and_check(state, rhs_cs, ramified_mm, "rhs")
+
+
+# Our RHS-pattern does not contain the objects 'scaryAnimal' or 'manAfraidOfAnimal' of our LHS, so these will be deleted. The objects 'bill' and 'billAfraidOfMan' will be created. The attribute 'weight' of 'man' (matched with 'george' in our example) will be incremented by 5.
+
+# Notice that the weight of the new object 'bill' is the Python-expression `100` (in backticks), not the Integer 100.
+
+# Let's rewrite our model:
+
+from transformation.cloner import clone_od
+from transformation import rewriter
+
+m_rewritten = clone_od(state, m, mm) # copy our model before rewriting (this is optional - we do this so we can later render the model before and after rewrite in a single PlantUML diagram)
+
+lhs_match = all_matches[0] # select one match
+rhs_match = rewriter.rewrite(state, rhs, ramified_mm, lhs_match, m_rewritten, mm)
+
+# Let's render everything as PlantUML:
+
+uml = (""
+ + plantuml.render_package("MM", plantuml.render_class_diagram(state, mm))
+ + plantuml.render_package("RAMified MM", plantuml.render_class_diagram(state, ramified_mm))
+ + plantuml.render_package("LHS", plantuml.render_object_diagram(state, lhs, ramified_mm))
+ + plantuml.render_package("RHS", plantuml.render_object_diagram(state, rhs, ramified_mm))
+ + plantuml.render_package("M (before rewrite)", plantuml.render_object_diagram(state, m, mm))
+ + plantuml.render_package("M (after rewrite)", plantuml.render_object_diagram(state, m_rewritten, mm))
+
+ + plantuml.render_trace_ramifies(state, mm, ramified_mm)
+
+ + plantuml.render_trace_match(state, lhs_match, lhs, m, "orange")
+ + plantuml.render_trace_match(state, rhs_match, rhs, m_rewritten, "red")
+
+ + plantuml.render_trace_conformance(state, lhs, ramified_mm)
+ + plantuml.render_trace_conformance(state, rhs, ramified_mm)
+ + plantuml.render_trace_conformance(state, m, mm)
+ + plantuml.render_trace_conformance(state, m_rewritten, mm)
+)
+
+print()
+print("PlantUML:", make_plantuml_url(uml))
+
diff --git a/tutorial/05_advanced_transformation.py b/tutorial/05_advanced_transformation.py
new file mode 100644
index 0000000..deaf376
--- /dev/null
+++ b/tutorial/05_advanced_transformation.py
@@ -0,0 +1,214 @@
+# In this tutorial, we implement the semantics of Petri Nets by means of model transformation.
+# Compared to the previous tutorial, it only introduces one more feature: pivots.
+
+# Consider the following Petri Net language meta-model:
+
+mm_cs = """
+ Place:Class
+ Transition:Class
+
+ Place_tokens:AttributeLink (Place -> Integer) {
+ optional = False;
+ name = "tokens";
+ constraint = `get_value(get_target(this)) >= 0`;
+ }
+
+ P2T:Association (Place -> Transition)
+ T2P:Association (Transition -> Place)
+
+ P2T_weight:AttributeLink (P2T -> Integer) {
+ optional = False;
+ name = "weight";
+ constraint = `get_value(get_target(this)) >= 0`;
+ }
+
+ T2P_weight:AttributeLink (T2P -> Integer) {
+ optional = False;
+ name = "weight";
+ constraint = `get_value(get_target(this)) >= 0`;
+ }
+"""
+
+# We now create the following Petri Net:
+# https://upload.wikimedia.org/wikipedia/commons/4/4d/Two-boundedness-cb.png
+
+m_cs = """
+ p1:Place { tokens = 0; }
+ p2:Place { tokens = 0; }
+ cp1:Place { tokens = 2; }
+ cp2:Place { tokens = 2; }
+
+ t1:Transition
+ t2:Transition
+ t3:Transition
+
+ :T2P (t1 -> p1) { weight = 1; }
+ :P2T (p1 -> t2) { weight = 1; }
+ :T2P (t2 -> cp1) { weight = 1; }
+ :P2T (cp1 -> t1) { weight = 1; }
+
+ :T2P (t2 -> p2) { weight = 1; }
+ :P2T (p2 -> t3) { weight = 1; }
+ :T2P (t3 -> cp2) { weight = 1; }
+ :P2T (cp2 -> t2) { weight = 1; }
+"""
+
+# The usual...
+
+from state.devstate import DevState
+from bootstrap.scd import bootstrap_scd
+from util import loader
+from transformation.ramify import ramify
+from transformation.matcher import match_od
+from transformation.cloner import clone_od
+from transformation import rewriter
+from concrete_syntax.textual_od.renderer import render_od
+from concrete_syntax.common import indent
+from api.od import ODAPI
+
+state = DevState()
+mmm = bootstrap_scd(state)
+mm = loader.parse_and_check(state, mm_cs, mmm, "mm")
+m = loader.parse_and_check(state, m_cs, mm, "m")
+
+mm_ramified = ramify(state, mm)
+
+
+# We will now implement Petri Net operational semantics by means of model transformation.
+
+
+# Look for any transition:
+
+lhs_transition_cs = """
+ t:RAM_Transition
+"""
+
+# But, if that transition has an incoming arc (P2T) from a place with not enough tokens, the transition cannot fire. We can express this as a pattern:
+
+lhs_transition_disabled_cs = """
+ t:RAM_Transition
+ p:RAM_Place
+ :RAM_P2T (p -> t) {
+ condition = ```
+ place = get_source(this)
+ tokens = get_slot_value(place, "tokens")
+ weight = get_slot_value(this, "weight")
+ tokens < weight # True means: cannot fire
+ ```;
+ }
+"""
+
+# Parse these patterns:
+lhs_transition = loader.parse_and_check(state, lhs_transition_cs, mm_ramified, "lhs_transition")
+lhs_transition_disabled = loader.parse_and_check(state, lhs_transition_disabled_cs, mm_ramified, "lhs_transition_disabled")
+
+# To find enabled transitions, we first match our first pattern (looking for a transition), and then we try to 'grow' that match with our second, "Negative Application Condition" (NAC) pattern. If growing the match with the second pattern is possible, we abort and look for another transition.
+# To grow a match, we use the 'pivot'-argument of the match-function. A pivot is a partial match that needs to be grown.
+# This results in the following generator function:
+
+def find_enabled_transitions(m):
+ for match in match_od(state, m, mm, lhs_transition, mm_ramified):
+ for match_nac in match_od(state, m, mm, lhs_transition_disabled, mm_ramified, pivot=match): # <-- notice the pivot :)
+ # transition is disabled
+ break # find next transition
+ else:
+ # we've found an enabled transition:
+ yield match
+
+# Let's see if it works:
+
+enabled = list(find_enabled_transitions(m))
+print("enabled PN transitions:", enabled)
+
+
+# Next, to fire a transition:
+# - we decrement the number of tokens of every incoming place
+# - we increment the number of tokens of every outgoing place
+# We do this also by growing our match: given an enabled transition (already matched), we match for *any* incoming place, and rewrite that place to reduce its tokens. Next, we look for *any* outgoing place, and increment its tokens.
+
+# Decrement incoming
+lhs_incoming_cs = """
+ t:RAM_Transition # <-- we already know this transition is enabled
+ inplace:RAM_Place {
+ RAM_tokens = `True`; # this needs to be here, otherwise, the rewriter will try to create a new attribute rather than update the existing one
+ }
+ inarc:RAM_P2T (inplace -> t)
+"""
+rhs_incoming_cs = """
+ t:RAM_Transition
+ inplace:RAM_Place {
+ RAM_tokens = ```
+ weight = get_slot_value(matched("inarc"), "weight")
+ print("adding", weight, "tokens to", get_name(this))
+ get_value(this) - weight
+ ```;
+ }
+ inarc:RAM_P2T (inplace -> t)
+"""
+
+# Increment outgoing
+lhs_outgoing_cs = """
+ t:RAM_Transition
+ outplace:RAM_Place {
+ RAM_tokens = `True`; # this needs to be here, otherwise, the rewriter will try to create a new attribute rather than update the existing one
+ }
+ outarc:RAM_T2P (t -> outplace)
+"""
+rhs_outgoing_cs = """
+ t:RAM_Transition
+ outplace:RAM_Place {
+ RAM_tokens = ```
+ weight = get_slot_value(matched("outarc"), "weight")
+ print("removing", weight, "tokens from", get_name(this))
+ get_value(this) + weight
+ ```;
+ }
+ outarc:RAM_T2P (t -> outplace)
+"""
+
+# Parse all the patterns
+lhs_incoming = loader.parse_and_check(state, lhs_incoming_cs, mm_ramified, "lhs_incoming")
+rhs_incoming = loader.parse_and_check(state, rhs_incoming_cs, mm_ramified, "rhs_incoming")
+lhs_outgoing = loader.parse_and_check(state, lhs_outgoing_cs, mm_ramified, "lhs_outgoing")
+rhs_outgoing = loader.parse_and_check(state, rhs_outgoing_cs, mm_ramified, "rhs_outgoing")
+
+# Firing is really simple:
+def fire_transition(m, transition_match):
+ for match_incoming in match_od(state, m, mm, lhs_incoming, mm_ramified, pivot=transition_match):
+ rewriter.rewrite(state, rhs_incoming, mm_ramified, match_incoming, m, mm)
+ for match_outgoing in match_od(state, m, mm, lhs_outgoing, mm_ramified, pivot=transition_match):
+ rewriter.rewrite(state, rhs_outgoing, mm_ramified, match_outgoing, m, mm)
+
+def show_petri_net(m):
+ odapi = ODAPI(state, m, mm)
+ p1 = odapi.get_slot_value(odapi.get("p1"), "tokens")
+ p2 = odapi.get_slot_value(odapi.get("p2"), "tokens")
+ cp1 = odapi.get_slot_value(odapi.get("cp1"), "tokens")
+ cp2 = odapi.get_slot_value(odapi.get("cp2"), "tokens")
+ return f"""
+ t1 t2 t3
+ ███ p1 ███ p2 ███
+ ███ ███ ███
+ ███─────► ( {p1} )─────►███─────► ( {p2} )─────►███
+ ███ ███ ███
+ ▲ │ ▲ │
+ │ │ │ │
+ │ │ │ │
+ │ │ │ │
+ │ │ │ │
+ └───────( {cp1} )◄──────┘ └──────( {cp2} )◄───────┘
+
+ cp1 cp2 """
+
+# Let's see if it works:
+while len(enabled) > 0:
+ print(show_petri_net(m))
+ print("\nenabled PN transitions:", enabled)
+ to_fire = enabled[0]['t']
+ print("press ENTER to fire", to_fire)
+ input()
+ print("firing transition:", to_fire)
+ fire_transition(m, enabled[0])
+ enabled = list(find_enabled_transitions(m))
+
+# That's it!
diff --git a/util/loader.py b/util/loader.py
index 4a29d63..3b5112b 100644
--- a/util/loader.py
+++ b/util/loader.py
@@ -39,8 +39,11 @@ KINDS = ["nac", "lhs", "rhs"]
# Phony name generator that raises an error if you try to use it :)
class LHSNameGenerator:
def __call__(self, type_name):
+ if type_name == "GlobalCondition":
+ return parser.DefaultNameGenerator()(type_name)
raise Exception(f"Error: Object or link of type '{type_name}' does not have a name.\nAnonymous objects/links are not allowed in the LHS of a rule, because they can have unintended consequences. Please give all of the elements in the LHS explicit names.")
+
# load model transformation rules
def load_rules(state, get_filename, rt_mm_ramified, rule_names, check_conformance=True):
rules = {}
diff --git a/util/simulator.py b/util/simulator.py
index cdbe6a6..c967bbd 100644
--- a/util/simulator.py
+++ b/util/simulator.py
@@ -27,6 +27,8 @@ class RandomDecisionMaker(DecisionMaker):
def __call__(self, actions):
arr = [action for descr, action in actions]
+ if len(arr) == 0:
+ return
i = math.floor(self.r.random()*len(arr))
return arr[i]
@@ -91,7 +93,7 @@ class MinimalSimulator:
self._print("Start simulation")
self._print(f"Decision maker: {self.decision_maker}")
step_counter = 0
- while True:
+ while step_counter < 10:
termination_reason = self.termination_condition(model)
if termination_reason != None:
self._print(f"Termination condition satisfied.\nReason: {termination_reason}.")