diff --git a/api/od.py b/api/od.py index 3060974..85ea368 100644 --- a/api/od.py +++ b/api/od.py @@ -7,6 +7,7 @@ from services.primitives.string_type import String from services.primitives.actioncode_type import ActionCode from uuid import UUID from typing import Optional +from util.timer import Timer NEXT_ID = 0 @@ -154,10 +155,19 @@ class ODAPI: class_name = self.get_name(self.get_type(obj)) return self.od.get_attr_link_name(class_name, attr_name) != None + def get_slots(self, obj: UUID) -> list[str]: + return [attr_name for attr_name, _ in self.od.get_slots(obj)] + def get_slot_value(self, obj: UUID, attr_name: str): slot = self.get_slot(obj, attr_name) return self.get_value(slot) + # does the given slot contain code? + # this complements `get_slot_value` which will return code as a string + def slot_has_code(self, obj: UUID, attr_name: str): + slot = self.get_slot(obj, attr_name) + return self.get_type_name(slot) == "ActionCode" + # Returns the given default value if the slot does not exist on the object. # The attribute must exist in the object's class, or an exception will be thrown. # The slot may not exist however, if the attribute is defined as 'optional' in the class. @@ -238,7 +248,6 @@ class ODAPI: self.__recompute_mappings() return obj - # internal use # Get API methods as bound functions, to pass as globals to 'eval' # Readonly version is used for: diff --git a/concrete_syntax/textual_od/parser.py b/concrete_syntax/textual_od/parser.py index 5dda904..33e7b5e 100644 --- a/concrete_syntax/textual_od/parser.py +++ b/concrete_syntax/textual_od/parser.py @@ -30,8 +30,10 @@ BOOL: "True" | "False" CODE: /`[^`]*`/ INDENTED_CODE: /```[^`]*```/ +type_name: IDENTIFIER + # name (optional) type -object: [IDENTIFIER] ":" IDENTIFIER [link_spec] ["{" slot* "}"] +object: [IDENTIFIER] ":" type_name [link_spec] ["{" slot* "}"] link_spec: "(" IDENTIFIER "->" IDENTIFIER ")" @@ -41,7 +43,8 @@ slot: IDENTIFIER "=" literal ";" parser = Lark(grammar, parser='lalr') # given a concrete syntax text string, and a meta-model, parses the CS -def parse_od(state, m_text, mm): +# Parameter 'type_transform' is useful for adding prefixes to the type names, when parsing a model and pretending it is an instance of a prefixed meta-model. +def parse_od(state, m_text, mm, type_transform=lambda type_name: type_name): tree = parser.parse(m_text) m = state.create_node() @@ -60,6 +63,13 @@ def parse_od(state, m_text, mm): def link_spec(self, el): [src, tgt] = el return (src, tgt) + + def type_name(self, el): + type_name = el[0] + if type_name in primitive_types: + return type_name + else: + return type_transform(el[0]) def slot(self, el): [attr_name, value] = el @@ -69,7 +79,10 @@ def parse_od(state, m_text, mm): [obj_name, type_name, link] = el[0:3] slots = el[3:] if state.read_dict(m, obj_name) != None: - raise Exception(f"Element '{obj_name}:{type_name}': name '{obj_name}' already in use. Object names must be unique.") + msg = f"Element '{obj_name}:{type_name}': name '{obj_name}' already in use." + # raise Exception(msg + " Names must be unique") + print(msg + " Ignoring.") + return if obj_name == None: # object/link names are optional # generate a unique name if no name given diff --git a/concrete_syntax/textual_od/renderer.py b/concrete_syntax/textual_od/renderer.py index 922fc55..a3fc030 100644 --- a/concrete_syntax/textual_od/renderer.py +++ b/concrete_syntax/textual_od/renderer.py @@ -9,7 +9,7 @@ def render_od(state, m_id, mm_id, hide_names=True): m_od = od.OD(mm_id, m_id, state) - serialized = set() + serialized = set(["Integer", "String", "Boolean", "ActionCode"]) # assume these types always already exist def display_name(name: str): # object names that start with "__" are hidden @@ -28,15 +28,17 @@ def render_od(state, m_id, mm_id, hide_names=True): for class_name, objects in m_od.get_all_objects().items(): for object_name, object_node in objects.items(): + if class_name == "ModelRef": + continue # skip modelrefs, they fuckin ma shit up output += f"\n{display_name(object_name)}:{class_name}" output += write_attributes(object_node) serialized.add(object_name) - links = m_od.get_all_links() + todo_links = m_od.get_all_links() - while len(links) != 0: + while len(todo_links) != 0: postponed = {} - for assoc_name, links in links.items(): + for assoc_name, links in todo_links.items(): for link_name, (link_edge, src_name, tgt_name) in links.items(): if link_name in serialized: continue @@ -47,6 +49,8 @@ def render_od(state, m_id, mm_id, hide_names=True): # links can also have slots: output += write_attributes(link_edge) serialized.add(link_name) - links = postponed + if len(postponed) == len(todo_links): + raise Exception(f"We got stuck! Links = {postponed}") + todo_links = postponed return output \ No newline at end of file diff --git a/examples/petrinet/renderer.py b/examples/petrinet/renderer.py index a7d4777..3cd0d64 100644 --- a/examples/petrinet/renderer.py +++ b/examples/petrinet/renderer.py @@ -39,5 +39,5 @@ def render_petri_net(od: ODAPI): src_name = od.get_name(od.get_source(arc)) tgt_name = od.get_name(od.get_target(arc)) dot += f"{src_name} -> {tgt_name};" - show_graphviz(dot, engine="circo") + show_graphviz(dot, engine="dot") return "" diff --git a/examples/semantics/operational/port/models.py b/examples/semantics/operational/port/models.py index 0055542..0ef84b1 100644 --- a/examples/semantics/operational/port/models.py +++ b/examples/semantics/operational/port/models.py @@ -192,7 +192,7 @@ port_rt_mm_cs = port_mm_cs + """ port_m_cs = """ gen:Generator - # newly arrive ships collect here + # newly arrived ships collect here waiting:Place c1:connection (gen -> waiting) diff --git a/examples/semantics/translational/merged_mm.od b/examples/semantics/translational/merged_mm.od new file mode 100644 index 0000000..f8e8c44 --- /dev/null +++ b/examples/semantics/translational/merged_mm.od @@ -0,0 +1,193 @@ +# Auto-generated by /home/maestro/repos/MV2/examples/semantics/translational/regenerate_mm.py + +# PlantUML visualization: https://deemz.org/plantuml/pdf/hPT1Zzem48Nl-HKMnqeWqcG1HLKFkuUg5nO9f1wHSSPiHB2HOofLxVxtsYOWTXKfByWSkCtdcNdFuqaYQjuqRFJ2JrnKzi-BLeqrl5AMZHXlsBJzRJl-_2-ZajZVXB7chJfT8QnWFvMbFPdaFMaFM2rNDHUqjjmIYgQdW5RduqOVI3LTt5_Q7CYi2SwNn1Lw2Usa3afJPexudl2Txvomx9uXppMCuTqOVJO2pKMcym2vgbfhSM3fP9A2uLaUccEh8tMrvPcCVHlI6pcRNzpXOiw-qsjhAhNlA7EZJoXpaT_N66o5XlBqFlJcdK4bySKz8xG43fNteJz8aU5M6pHyD_jG-79Zk6egMsa54yfEZwsMxjuh4fRlQhWF8k_sQwKEA88-oD7cuCePf8Uy3A2Z_asbSzYprZLnzSaW0uZvT7gREsitrJe7H3lEOA88XIQzrVWfyEtVUDpF_4fvFyxV5GZdvtXCd9CMmBdh2EAuZDWx_mW0sRbPYcO75EkV2KnPvx-eoieighBfF2ekYsjZoMCQ1L9sGB42A1OsYd-AWEm8lsJznORX2E9cCOsIPpcWh7-KmEnsPLGfPDJnwLRVg0DgDujxAu1f34lXNqVePSpOA6MJ2NFBx7ZytJsz8sohBfW6y_N8W9xwSxu0V1zLC6w0zyH_UTmEIE43tCvOCC7LwyalYuZBd2qUACID2NVERGL3wdaQBaXOXGfsKbj8ayL3cYoy9dk_NLTYMxjz55b55e-S5CIfceis_iEclsibFUI2e4xxVVqg_mC= + + +CapacityConstraint:Class +PNPlaceState:Class +WorkerSet:Class +State:Class +Stateful:Class { + abstract = True; +} +Source:Class { + abstract = True; +} +Clock:Class { + upper_cardinality = 1; + lower_cardinality = 1; +} +BerthState:Class { + constraint = ``` + errors = [] + numShips = get_slot_value(this, "numShips") + status = get_slot_value(this, "status") + if (numShips == 0) != (status == "empty"): + errors.append(f"Inconsistent: numShips = {numShips}, but status = {status}") + errors + ```; +} +Top:Class { + abstract = True; +} +Place:Class +WorkerSetState:Class +Berth:Class +Generator:Class +PNTransition:Class +PNConnectable:Class { + abstract = True; +} +Sink:Class { + abstract = True; +} +ConnectionState:Class +PlaceState:Class +PNPlace:Class +shipCapacities:GlobalConstraint { + constraint = ``` + errors = [] + for _, constr in get_all_instances("CapacityConstraint"): + cap = get_slot_value(constr, "shipCapacity") + total = 0 + place_names = [] # for debugging + for lnk in get_outgoing(constr, "capacityOf"): + place = get_target(lnk) + place_names.append(get_name(place)) + place_state = get_source(get_incoming(place, "of")[0]) + total += get_slot_value(place_state, "numShips") + if total > cap: + errors.append(f"The number of ships in places {','.join(place_names)} ({total}) exceeds the capacity ({cap}) of CapacityConstraint {get_name(constr)}.") + errors + ```; +} +operatingCapacities:GlobalConstraint { + constraint = ``` + errors = [] + for _, workersetstate in get_all_instances("WorkerSetState"): + workerset = get_target(get_outgoing(workersetstate, "of")[0]) + num_operating = len(get_outgoing(workersetstate, "isOperating")) + num_workers = get_slot_value(workerset, "numWorkers") + if num_operating > num_workers: + errors.append(f"WorkerSet {get_name(workerset)} is operating more berths ({num_operating}) than there are workers ({num_workers})") + errors + ```; +} +WorkerSet_numWorkers:AttributeLink (WorkerSet -> Integer) { + optional = False; + name = "numWorkers"; + constraint = `get_value(get_target(this)) >= 0`; +} +PlaceState_numShips:AttributeLink (PlaceState -> Integer) { + name = "numShips"; + constraint = `get_value(get_target(this)) >= 0`; + optional = False; +} +ConnectionState_moved:AttributeLink (ConnectionState -> Boolean) { + constraint = ``` + result = True + all_successors_moved = True + moved = get_value(get_target(this)) + conn_state = get_source(this) + conn = get_target(get_outgoing(conn_state, "of")[0]) + tgt_place = get_target(conn) + next_conns = get_outgoing(tgt_place, "connection") + for next_conn in next_conns: + next_conn_state = get_source(get_incoming(next_conn, "of")[0]) + if not get_slot_value(next_conn_state, "moved"): + all_successors_moved = False + if moved and not all_successors_moved: + result = f"Connection {get_name(conn)} played before its turn." + result + ```; + optional = False; + name = "moved"; +} +BerthState_status:AttributeLink (BerthState -> String) { + constraint = ``` + ( + get_value(get_target(this)) in { "empty", "unserved", "served" } + ) + ```; + optional = False; + name = "status"; +} +PNPlaceState_numTokens:AttributeLink (PNPlaceState -> Integer) { + constraint = `"numTokens cannot be negative" if get_value(get_target(this)) < 0 else None`; + optional = False; + name = "numTokens"; +} +Clock_time:AttributeLink (Clock -> Integer) { + constraint = `get_value(get_target(this)) >= 0`; + optional = False; + name = "time"; +} +CapacityConstraint_shipCapacity:AttributeLink (CapacityConstraint -> Integer) { + constraint = `get_value(get_target(this)) >= 0`; + optional = False; + name = "shipCapacity"; +} +of:Association (State -> Stateful) { + source_upper_cardinality = 1; + source_lower_cardinality = 1; + target_upper_cardinality = 1; + target_lower_cardinality = 1; +} +arc:Association (PNConnectable -> PNConnectable) +canOperate:Association (WorkerSet -> Berth) { + target_lower_cardinality = 1; +} +connection:Association (Source -> Sink) +pn_of:Association (PNPlaceState -> PNPlace) { + source_lower_cardinality = 1; + target_upper_cardinality = 1; + target_lower_cardinality = 1; + source_upper_cardinality = 1; +} +generic_link:Association (Top -> Top) +isOperating:Association (WorkerSetState -> Berth) { + constraint = ``` + errors = [] + + # get status of Berth + berth = get_target(this) + berth_state = get_source(get_incoming(berth, "of")[0]) + status = get_slot_value(berth_state, "status") + if status != "unserved": + errors.append(f"Cannot operate {get_name(berth)} because there is no unserved ship there.") + + # only operate Berts that we can operate + workerset = get_target(get_outgoing(get_source(this), "of")[0]) + can_operate = [get_target(lnk) for lnk in get_outgoing(workerset, "canOperate")] + if berth not in can_operate: + errors.append(f"Cannot operate {get_name(berth)}.") + + errors + ```; +} +capacityOf:Association (CapacityConstraint -> Place) { + target_lower_cardinality = 1; +} +:Inheritance (connection -> Stateful) +:Inheritance (CapacityConstraint -> Top) +:Inheritance (Sink -> Top) +:Inheritance (Berth -> Place) +:Inheritance (WorkerSet -> Stateful) +:Inheritance (Place -> Source) +:Inheritance (PlaceState -> State) +:Inheritance (State -> Top) +:Inheritance (Source -> Top) +:Inheritance (Clock -> Top) +:Inheritance (Stateful -> Top) +:Inheritance (Place -> Stateful) +:Inheritance (PNConnectable -> Top) +:Inheritance (WorkerSetState -> State) +:Inheritance (Place -> Sink) +:Inheritance (BerthState -> PlaceState) +:Inheritance (generic_link -> Top) +:Inheritance (PNTransition -> PNConnectable) +:Inheritance (ConnectionState -> State) +:Inheritance (PNPlaceState -> Top) +:Inheritance (Generator -> Source) +:Inheritance (Berth -> Stateful) +:Inheritance (PNPlace -> PNConnectable) \ No newline at end of file diff --git a/examples/semantics/translational/regenerate_mm.py b/examples/semantics/translational/regenerate_mm.py new file mode 100644 index 0000000..b76eae1 --- /dev/null +++ b/examples/semantics/translational/regenerate_mm.py @@ -0,0 +1,63 @@ +from state.devstate import DevState +from bootstrap.scd import bootstrap_scd +from concrete_syntax.textual_od import renderer +from concrete_syntax.plantuml.renderer import render_class_diagram +from concrete_syntax.plantuml.make_url import make_url +from api.od import ODAPI + +from transformation.topify.topify import Topifier +from transformation.merger import merge_models + +from util import loader + +from examples.semantics.operational.port import models + +import os +THIS_DIR = os.path.dirname(__file__) + +# get file contents as string +def read_file(filename): + with open(THIS_DIR+'/'+filename) as file: + return file.read() + +if __name__ == "__main__": + state = DevState() + scd_mmm = bootstrap_scd(state) + + # Load Petri Net meta-models + pn_mm_cs = read_file('../../petrinet/metamodels/mm_design.od') + pn_mm_rt_cs = pn_mm_cs + read_file('../../petrinet/metamodels/mm_runtime.od') + pn_mm = loader.parse_and_check(state, pn_mm_cs, scd_mmm, "Petri-Net Design meta-model") + pn_mm_rt = loader.parse_and_check(state, pn_mm_rt_cs, scd_mmm, "Petri-Net Runtime meta-model") + + # Load Port meta-models + port_mm = loader.parse_and_check(state, models.port_mm_cs, scd_mmm, "Port-MM") + port_mm_rt = loader.parse_and_check(state, models.port_rt_mm_cs, scd_mmm, "Port-MM-RT") + + # Merge Petri Net and Port meta-models + print("merging...") + merged_mm_rt = merge_models(state, mm=scd_mmm, models=[pn_mm_rt, port_mm_rt]) + print("done merging") + + print() + print("topifying... (may take a while)") + topifier = Topifier(state) + top_merged_mm_rt = topifier.topify_cd(merged_mm_rt) + print("done topifying") + + plantuml_url = make_url(render_class_diagram(state, top_merged_mm_rt)) + + print() + print(plantuml_url) + print() + + txt = renderer.render_od(state, top_merged_mm_rt, scd_mmm) + + filename = THIS_DIR+"/merged_mm.od" + + with open(filename, "w") as file: + file.write(f"# Auto-generated by {__file__}\n\n") + file.write(f"# PlantUML visualization: {plantuml_url}\n\n") + file.write(txt) + + print("Wrote file", filename) diff --git a/examples/semantics/translational/rules/.gitignore b/examples/semantics/translational/rules/.gitignore new file mode 100644 index 0000000..d5877da --- /dev/null +++ b/examples/semantics/translational/rules/.gitignore @@ -0,0 +1,2 @@ +# Let's not accidently add the solution to assignment 5... +r_*.od \ No newline at end of file diff --git a/examples/semantics/translational/runner.py b/examples/semantics/translational/runner.py new file mode 100644 index 0000000..a156785 --- /dev/null +++ b/examples/semantics/translational/runner.py @@ -0,0 +1,35 @@ +from state.devstate import DevState +from bootstrap.scd import bootstrap_scd +from concrete_syntax.textual_od import parser, renderer +from concrete_syntax.plantuml.renderer import render_object_diagram, render_class_diagram +from concrete_syntax.plantuml.make_url import make_url +from api.od import ODAPI + +from transformation.ramify import ramify +from transformation.topify.topify import Topifier +from transformation.merger import merge_models + +from util import loader + +from examples.semantics.operational.simulator import Simulator, RandomDecisionMaker, InteractiveDecisionMaker +from examples.semantics.operational.port import models +from examples.semantics.operational.port.helpers import design_to_state, state_to_design, get_time +from examples.semantics.operational.port.renderer import render_port_textual, render_port_graphviz + +import os +THIS_DIR = os.path.dirname(__file__) + +# get file contents as string +def read_file(filename): + with open(THIS_DIR+'/'+filename) as file: + return file.read() + +if __name__ == "__main__": + state = DevState() + scd_mmm = bootstrap_scd(state) + + # Load merged Petri Net and Port meta-model: + merged_mm = loader.parse_and_check(state, read_file("merged_mm.od"), scd_mmm, "merged_mm.od") + + # Load Port initial runtime model: + port_m_rt_initial = loader.parse_and_check(state, models.port_rt_m_cs, merged_mm, "Port-M-RT-initial") diff --git a/services/od.py b/services/od.py index 25116af..3b8700a 100644 --- a/services/od.py +++ b/services/od.py @@ -59,7 +59,6 @@ class OD: object_node = self.bottom.create_node() self.bottom.create_edge(self.model, object_node, name) # attach to model self.bottom.create_edge(object_node, class_node, "Morphism") # typed-by link - return object_node def get_class_of_object(self, object_name: str): @@ -191,9 +190,15 @@ class OD: # used for attribute-links and association-links def _create_link(self, link_name: str, type_edge: UUID, src_obj_node: UUID, tgt_obj_node: UUID): # print('create_link', link_name, type_edge, src_obj_node, tgt_obj_node) - + if not isinstance(src_obj_node, UUID): + raise Exception("Expected source object to be UUID") + if not isinstance(tgt_obj_node, UUID): + raise Exception("Expected target object to be UUID") # the link itself is unlabeled: link_edge = self.bottom.create_edge(src_obj_node, tgt_obj_node) + if link_edge == None: + # Why does the above call silently fail?????? + raise Exception("Could not create link") # it is only in the context of the model, that the link has a name: self.bottom.create_edge(self.model, link_edge, link_name) # add to model self.bottom.create_edge(link_edge, type_edge, "Morphism") diff --git a/state/pystate.py b/state/pystate.py index 3afc130..c28ccf4 100644 --- a/state/pystate.py +++ b/state/pystate.py @@ -33,6 +33,7 @@ class PyState(State): return new_id def create_edge(self, source: Element, target: Element) -> Optional[Edge]: + # TODO: why does this call SILENTLY fail if source/target does not exist ??????????? if source not in self.edges and source not in self.nodes: return None elif target not in self.edges and target not in self.nodes: diff --git a/transformation/cloner.py b/transformation/cloner.py index 79636ba..e2c4505 100644 --- a/transformation/cloner.py +++ b/transformation/cloner.py @@ -6,5 +6,4 @@ from concrete_syntax.common import indent def clone_od(state, m: UUID, mm: UUID): # cheap-ass implementation: render and parse cs = renderer.render_od(state, m, mm, hide_names=False) - # print(indent(cs, 6)) return parser.parse_od(state, cs, mm) \ No newline at end of file diff --git a/transformation/merger.py b/transformation/merger.py new file mode 100644 index 0000000..6caecb1 --- /dev/null +++ b/transformation/merger.py @@ -0,0 +1,78 @@ +from api.od import ODAPI +from uuid import UUID +from concrete_syntax.textual_od import parser, renderer +from services.scd import SCD +from util.timer import Timer + +PRIMITIVE_TYPES = set(["Integer", "String", "Boolean", "ActionCode"]) + +# Merges N models. The models must have the same meta-model. +# Care should be taken to avoid naming collisions before calling this function. +def merge_models(state, mm, models: list[UUID]): + with Timer("merge_models"): + primitive_types = { + type_name : UUID(state.read_value(state.read_dict(state.read_root(), type_name))) + for type_name in ["Integer", "String", "Boolean", "ActionCode"] + } + + merged = state.create_node() + merged_odapi = ODAPI(state, m=merged, mm=mm) + + scd_mmm = UUID(state.read_value(state.read_dict(state.read_root(), "SCD"))) + + mm_odapi = ODAPI(state, m=mm, mm=scd_mmm) + types = mm_odapi.get_all_instances("Class", include_subtypes=True) + all_objs = [] + for type_name, type_obj in types: + for model in models: + m_odapi = ODAPI(state, m=model, mm=mm) + for obj_name, obj in m_odapi.get_all_instances(type_name, include_subtypes=False): + all_objs.append((obj_name, obj, type_name, m_odapi)) + todo = all_objs + + have = {} + + mapping = {} + while len(todo) > 0: + next_round = [] + # if 'mm' is SCD, class_name will be 'Class', 'Association', ... + for tup in todo: + obj_name, obj, type_name, m_odapi = tup + prefixed_obj_name = obj_name + if obj_name in PRIMITIVE_TYPES: + if prefixed_obj_name in have: + # Don't rename primitive types. Instead, merge them. + mapping[obj] = mapping[have[prefixed_obj_name]] + continue + while prefixed_obj_name in have: + prefixed_obj_name = prefixed_obj_name + '_bis' # make name unique + if prefixed_obj_name != obj_name: + print(f"Warning: renaming {obj_name} to {prefixed_obj_name} to avoid naming collision.") + if type_name == "ModelRef": + model = state.read_value(obj) + scd = SCD(merged, state) + created_obj = scd.create_model_ref(prefixed_obj_name, model) + merged_odapi._ODAPI__recompute_mappings() # dirty!! + else: + # create node or edge + if state.is_edge(obj): + source, target = state.read_edge(obj) + if source not in mapping or target not in mapping: + next_round.append(tup) + continue # try again later... + else: + created_obj = merged_odapi.create_link(prefixed_obj_name, type_name, mapping[source], mapping[target]) + else: + created_obj = merged_odapi.create_object(prefixed_obj_name, type_name) + mapping[obj] = created_obj + have[obj_name] = obj + # copy slots + for attr_name in m_odapi.get_slots(obj): + value = m_odapi.get_slot_value(obj, attr_name) + is_code = m_odapi.slot_has_code(obj, attr_name) + merged_odapi.set_slot_value(created_obj, attr_name, value, is_code=is_code) + if len(next_round) == len(todo): + raise Exception("We got stuck!") + todo = next_round + + return merged \ No newline at end of file diff --git a/transformation/rule.py b/transformation/rule.py index a8a4b2d..b0c633b 100644 --- a/transformation/rule.py +++ b/transformation/rule.py @@ -1,3 +1,5 @@ +from concrete_syntax.textual_od.renderer import render_od + import pprint from typing import Generator, Callable from uuid import UUID @@ -93,8 +95,18 @@ class RuleMatcherRewriter: e.add_note(f"while matching LHS of '{rule_name}'") raise - def exec_rule(self, m: UUID, lhs: UUID, rhs: UUID, lhs_match: dict, rule_name: str): - cloned_m = clone_od(self.state, m, self.mm) + def exec_rule(self, m: UUID, lhs: UUID, rhs: UUID, lhs_match: dict, rule_name: str, in_place=False): + if in_place: + # dangerous + cloned_m = m + else: + cloned_m = clone_od(self.state, m, self.mm) + + # print('before clone:') + # print(render_od(self.state, m, self.mm)) + # print('after clone:') + # print(render_od(self.state, cloned_m, self.mm)) + try: rhs_match = rewrite(self.state, lhs_m=lhs, diff --git a/transformation/topify/rules/r_create_top_rhs.od b/transformation/topify/rules/r_create_top_rhs.od index b424a6c..bff25c0 100644 --- a/transformation/topify/rules/r_create_top_rhs.od +++ b/transformation/topify/rules/r_create_top_rhs.od @@ -4,5 +4,8 @@ condition = ``` top = create_object("Top", "Class") set_slot_value(top, "abstract", True) + lnk = create_link("generic_link", "Association", top, top) + # lnk also inherits top: + create_link(None, "Inheritance", lnk, top) ```; } \ No newline at end of file diff --git a/transformation/topify/topify.py b/transformation/topify/topify.py index b1b020e..b83624f 100644 --- a/transformation/topify/topify.py +++ b/transformation/topify/topify.py @@ -2,41 +2,51 @@ from uuid import UUID from transformation.rule import RuleMatcherRewriter from transformation.ramify import ramify from util.loader import load_rules +from util.timer import Timer +from concrete_syntax.textual_od.renderer import render_od import os THIS_DIR = os.path.dirname(__file__) -# Given a class diagram, extend it (in-place) with a "Top"-type, i.e., an (abstract) supertype of all types. The set of instances of the "Top" is always the set of all objects in the diagram. -def topify_cd(state, cd: UUID): - # meta-meta-model - scd_mmm = UUID(state.read_value(state.read_dict(state.read_root(), "SCD"))) +class Topifier: + def __init__(self, state): + self.state = state + # meta-meta-model + self.scd_mmm = UUID(state.read_value(state.read_dict(state.read_root(), "SCD"))) + self.scd_mmm_ramified = ramify(state, self.scd_mmm) + self.matcher_rewriter = RuleMatcherRewriter(state, self.scd_mmm, self.scd_mmm_ramified) - scd_mmm_ramified = ramify(state, scd_mmm) + # topification is implemented via model transformation + self.rules = load_rules(state, + lambda rule_name, kind: f"{THIS_DIR}/rules/r_{rule_name}_{kind}.od", + self.scd_mmm_ramified, ["create_top", "create_inheritance"], + check_conformance=False, + ) - matcher_rewriter = RuleMatcherRewriter(state, scd_mmm, scd_mmm_ramified) - - # topification is implemented via model transformation - rules = load_rules(state, - lambda rule_name, kind: f"{THIS_DIR}/rules/r_{rule_name}_{kind}.od", - scd_mmm_ramified, ["create_top", "create_inheritance"]) + # Given a class diagram, extend it with a "Top"-type, i.e., an (abstract) supertype of all types. The set of instances of the "Top" is always the set of all objects in the diagram. + def topify_cd(self, cd: UUID): + with Timer("topify_cd"): + # 1. Execute rule 'create_top' once + rule = self.rules["create_top"] + match_set = list(self.matcher_rewriter.match_rule(cd, rule.lhs, rule.nacs, "create_top")) + if len(match_set) != 1: + raise Exception(f"Expected rule 'create_top' to match only once, instead got {len(match_set)} matches") + lhs_match = match_set[0] + cd, rhs_match = self.matcher_rewriter.exec_rule(cd, rule.lhs, rule.rhs, lhs_match, "create_top") - # 1. Execute rule 'create_top' once - rule = rules["create_top"] - match_set = list(matcher_rewriter.match_rule(cd, rule.lhs, rule.nacs, "create_top")) - if len(match_set) != 1: - raise Exception(f"Expected rule 'create_top' to match only once, instead got {len(match_set)} matches") - lhs_match = match_set[0] - cd, rhs_match = matcher_rewriter.exec_rule(cd, rule.lhs, rule.rhs, lhs_match, "create_top") + # 2. Execute rule 'create_inheritance' as many times as possible + rule = self.rules["create_inheritance"] - # 2. Execute rule 'create_inheritance' as many times as possible - rule = rules["create_inheritance"] - while True: - iterator = matcher_rewriter.match_rule(cd, rule.lhs, rule.nacs, "create_inheritance") - # find first match, and re-start matching - try: - lhs_match = iterator.__next__() # may throw StopIteration - cd, rhs_match = matcher_rewriter.exec_rule(cd, rule.lhs, rule.rhs, lhs_match, "create_inheritance") - except StopIteration: - break # no more matches + # for match in self.matcher_rewriter.match_rule(cd, rule.lhs, rule.nacs, "create_inheritance"): + # self.matcher_rewriter.exec_rule(cd, rule.lhs, rule.rhs, match, "create_inheritance", in_place=True) + # render_od(self.state, cd, self.scd_mmm) - return cd \ No newline at end of file + while True: + iterator = self.matcher_rewriter.match_rule(cd, rule.lhs, rule.nacs, "create_inheritance") + # find first match, and re-start matching + try: + lhs_match = iterator.__next__() # may throw StopIteration + cd, rhs_match = self.matcher_rewriter.exec_rule(cd, rule.lhs, rule.rhs, lhs_match, "create_inheritance") + except StopIteration: + break # no more matches + return cd \ No newline at end of file diff --git a/util/loader.py b/util/loader.py index 5b6bbaf..db48422 100644 --- a/util/loader.py +++ b/util/loader.py @@ -4,12 +4,13 @@ from concrete_syntax.textual_od import parser from transformation.rule import Rule # parse model and check conformance -def parse_and_check(state, m_cs, mm, descr: str, check_conformance=True): +def parse_and_check(state, m_cs, mm, descr: str, check_conformance=True, type_transform=lambda type_name: type_name): try: m = parser.parse_od( state, m_text=m_cs, mm=mm, + type_transform=type_transform, ) except Exception as e: e.add_note("While parsing model " + descr)