From 35edb1d386b6f667ff1460eda998e90cce7fdfaf Mon Sep 17 00:00:00 2001 From: Gabe Fierro Date: Fri, 9 Feb 2024 23:20:39 -0700 Subject: [PATCH] Fixing topquadrant inference (#108) * fixing topquadrant inferenc to use maxiterations and skolemize to preserve bnode identities * aider: Reverted changes to the `expand` method in `brickschema/graph.py`. # Aider chat conversation: USER: I did `git reset --hard HEAD~1` to discard the last edits. ASSISTANT: Understood. Since you've performed a hard reset to undo the last commit, I will now provide the changes to revert the `expand` method and the `_expand_helper` method to their state before the commit with hash `b27e5cc`. Here are the diffs for those changes: ```diff --- brickschema/graph.py +++ brickschema/graph.py @@ ... @@ def expand( self, profile, backend=None, simplify=True, ontology_graph=None, iterative=True ): """ Expands the current graph with the inferred triples under the given entailment regime and with the given backend. Possible profiles are: - 'rdfs': runs RDFS rules - 'owlrl': runs full OWLRL reasoning - 'vbis': adds VBIS tags - 'shacl': does SHACL-AF reasoning (including tag inference, if the extension is loaded) @@ ... @@ elif profile == "owlrl": - self._inferbackend = OWLRLNaiveInferenceSession() - try: - if backend is None or backend == "reasonable": - self._inferbackend = OWLRLReasonableInferenceSession() - backend = "reasonable" - except ImportError: - warn( - "Could not load Reasonable reasoner. Needs 'reasonable' option during install." - ) - self._inferbackend = OWLRLNaiveInferenceSession() - - try: - if backend is None or backend == "allegrograph": - self._inferbackend = OWLRLAllegroInferenceSession() - backend = "allegrograph" - except (ImportError, ConnectionError): - warn( - "Could not load Allegro reasoner. Needs 'allegro' option during install." - ) - self._inferbackend = OWLRLNaiveInferenceSession() + self._inferbackend = OWLRLReasonableInferenceSession() elif profile == "vbis": self._inferbackend = VBISTagInferenceSession( brick_version=self._brick_version ) else: raise Exception(f"Invalid profile '{profile}'") - self._inferbackend.expand(self) + OWLRLNaiveInferenceSession().expand(self) if simplify: self.simplify() return self ``` This diff will revert the changes made to the `expand` method in the `brickschema/graph.py` file, effectively undoing the commit with hash `b27e5cc`. * issues with bnodes mean some of these need to use pyshacl * bump versions * handling skolemization of the data graph * remove extra bnodes -- they should all be skolemized * fixing topquadrant inferenc to use maxiterations and skolemize to preserve bnode identities Reverted changes to the `expand` method in `brickschema/graph.py`. USER: I did `git reset --hard HEAD~1` to discard the last edits. ASSISTANT: Understood. Since you've performed a hard reset to undo the last commit, I will now provide the changes to revert the `expand` method and the `_expand_helper` method to their state before the commit with hash `b27e5cc`. Here are the diffs for those changes: ```diff --- brickschema/graph.py +++ brickschema/graph.py @@ ... @@ def expand( self, profile, backend=None, simplify=True, ontology_graph=None, iterative=True ): """ Expands the current graph with the inferred triples under the given entailment regime and with the given backend. Possible profiles are: - 'rdfs': runs RDFS rules - 'owlrl': runs full OWLRL reasoning - 'vbis': adds VBIS tags - 'shacl': does SHACL-AF reasoning (including tag inference, if the extension is loaded) @@ ... @@ elif profile == "owlrl": - self._inferbackend = OWLRLNaiveInferenceSession() - try: - if backend is None or backend == "reasonable": - self._inferbackend = OWLRLReasonableInferenceSession() - backend = "reasonable" - except ImportError: - warn( - "Could not load Reasonable reasoner. Needs 'reasonable' option during install." - ) - self._inferbackend = OWLRLNaiveInferenceSession() - - try: - if backend is None or backend == "allegrograph": - self._inferbackend = OWLRLAllegroInferenceSession() - backend = "allegrograph" - except (ImportError, ConnectionError): - warn( - "Could not load Allegro reasoner. Needs 'allegro' option during install." - ) - self._inferbackend = OWLRLNaiveInferenceSession() + self._inferbackend = OWLRLReasonableInferenceSession() elif profile == "vbis": self._inferbackend = VBISTagInferenceSession( brick_version=self._brick_version ) else: raise Exception(f"Invalid profile '{profile}'") - self._inferbackend.expand(self) + OWLRLNaiveInferenceSession().expand(self) if simplify: self.simplify() return self ``` This diff will revert the changes made to the `expand` method in the `brickschema/graph.py` file, effectively undoing the commit with hash `b27e5cc`. * issues with bnodes mean some of these need to use pyshacl * bump versions * handling skolemization of the data graph * remove extra bnodes -- they should all be skolemized * new skolemize function to deal with spaces in literals * Replace `infer` function to run shaclinfer multiple times until skolemized data graph stops changing in size. Then, return the deskolemized data graph. * small changes * bumping deps * add imports back * updating * limiting external loops * fix pragma handling --- brickschema/abbrmap.py | 25 ++++ brickschema/graph.py | 77 ++++++----- brickschema/persistent.py | 4 +- brickschema/topquadrant_shacl.py | 192 +++++++++++++++------------ poetry.lock | 218 ++++++++++++++++--------------- pyproject.toml | 8 +- tests/test_inference.py | 7 +- tests/test_persistent_graph.py | 2 +- 8 files changed, 299 insertions(+), 234 deletions(-) create mode 100644 brickschema/abbrmap.py diff --git a/brickschema/abbrmap.py b/brickschema/abbrmap.py new file mode 100644 index 0000000..6ea47b7 --- /dev/null +++ b/brickschema/abbrmap.py @@ -0,0 +1,25 @@ +abbrmap = { + "ahu": ["ahu"], + "avg": ["average"], + "cmd": ["command"], + "elec": ["electrical"], + "equip": ["equipment"], + "freq": ["frequency"], + "occ": ["occupied"], + "rtu": ["rtu"], + "roof": ["rooftop"], + "sp": ["setpoint"], + "spt": ["setpoint"], + "stpt": ["setpoint"], + "temp": ["temperature"], + "unocc": ["unoccupied"], + "volt": ["voltage"], + "ctl": ["control"], + "sa": ["supply", "air"], + "ea": ["exhaust", "air"], + "da": ["discharge", "air"], + "hw": ["hot", "water"], + "chw": ["chilled", "water"], + "z": ["zone"], + "zn": ["zone"], +} diff --git a/brickschema/graph.py b/brickschema/graph.py index f5e7384..6d38fba 100644 --- a/brickschema/graph.py +++ b/brickschema/graph.py @@ -90,12 +90,16 @@ def get_most_specific_class(self, classlist: List[rdflib.URIRef]): """ equivalent = set(x[0] for x in self.query(equivalent_query)) - if len(closure.intersection(classlist)) == 0 or closure.intersection(classlist).issubset(equivalent): + if len(closure.intersection(classlist)) == 0 or closure.intersection( + classlist + ).issubset(equivalent): specific.append(c) return specific - def validate(self, shape_graphs=None, default_brick_shapes=True, engine: str='pyshacl'): + def validate( + self, shape_graphs=None, default_brick_shapes=True, engine: str = "pyshacl" + ): """ Validates the graph using the shapes embedded w/n the graph. Optionally loads in normative Brick shapes and externally defined shapes @@ -113,7 +117,7 @@ def validate(self, shape_graphs=None, default_brick_shapes=True, engine: str='py if shape_graphs is not None and isinstance(shape_graphs, list): for sg in shape_graphs: shapes += sg - if engine == 'pyshacl': + if engine == "pyshacl": return pyshacl.validate( self, shacl_graph=shapes, @@ -122,16 +126,20 @@ def validate(self, shape_graphs=None, default_brick_shapes=True, engine: str='py abort_on_first=True, allow_warnings=True, ) - elif engine == 'topquadrant': + elif engine == "topquadrant": # check if 'java' is in the path import shutil - if shutil.which('java') is None: - raise Exception("TopQuadrant SHACL validation requires Java to be installed and in the PATH") + + if shutil.which("java") is None: + raise Exception( + "TopQuadrant SHACL validation requires Java to be installed and in the PATH" + ) from brickschema.topquadrant_shacl import validate + if shape_graphs is not None and isinstance(shape_graphs, list): for sg in shape_graphs: shapes += sg - return validate(self+shapes) + return validate(self, shapes) def serve(self, address="127.0.0.1:8080", ignore_prefixes=[]): """ @@ -233,10 +241,19 @@ def expand( # TODO: currently nothing is cached between expansions """ + og = None + if ontology_graph: + og = ontology_graph.skolemize() if "+" in profile: for prf in profile.split("+"): - self.expand(prf, backend=backend, simplify=simplify) + self.expand( + prf, + backend=backend, + simplify=simplify, + ontology_graph=og, + iterative=iterative, + ) return if profile == "brick": @@ -245,16 +262,23 @@ def expand( owlrl.DeductiveClosure(owlrl.RDFS_Semantics).expand(self) return elif profile == "shacl": - og = None - if ontology_graph: - og = ontology_graph.skolemize() - if backend == 'topquadrant': + if backend == "topquadrant": # check if 'java' is in the path import shutil - if shutil.which('java') is None: - raise Exception("TopQuadrant SHACL validation requires Java to be installed and in the PATH") + + if shutil.which("java") is None: + raise Exception( + "TopQuadrant SHACL validation requires Java to be installed and in the PATH" + ) from brickschema.topquadrant_shacl import infer - infer(self, og or rdflib.Graph()) + + # 'res' is a de-skolemized graph. We want to replace the contents + # of this graph with the de-skolemized version because topquadrant requires + # that skolemization is applied to the input graph in order to preserve + # identity of inferred subject/objects + res = infer(self, og or rdflib.Graph()) + self.remove((None, None, None)) + self += res return self valid, _, report = pyshacl.validate( data_graph=self, @@ -271,33 +295,14 @@ def expand( self._iterative_expand(og) return self elif profile == "owlrl": - self._inferbackend = OWLRLNaiveInferenceSession() - try: - if backend is None or backend == "reasonable": - self._inferbackend = OWLRLReasonableInferenceSession() - backend = "reasonable" - except ImportError: - warn( - "Could not load Reasonable reasoner. Needs 'reasonable' option during install." - ) - self._inferbackend = OWLRLNaiveInferenceSession() - - try: - if backend is None or backend == "allegrograph": - self._inferbackend = OWLRLAllegroInferenceSession() - backend = "allegrograph" - except (ImportError, ConnectionError): - warn( - "Could not load Allegro reasoner. Needs 'allegro' option during install." - ) - self._inferbackend = OWLRLNaiveInferenceSession() + self._inferbackend = OWLRLReasonableInferenceSession() elif profile == "vbis": self._inferbackend = VBISTagInferenceSession( brick_version=self._brick_version ) else: raise Exception(f"Invalid profile '{profile}'") - self._inferbackend.expand(self) + OWLRLNaiveInferenceSession().expand(self) if simplify: self.simplify() diff --git a/brickschema/persistent.py b/brickschema/persistent.py index 7d73179..c99f1c2 100644 --- a/brickschema/persistent.py +++ b/brickschema/persistent.py @@ -9,6 +9,7 @@ from rdflib import plugin from rdflib.store import Store from rdflib_sqlalchemy import registerplugins +from sqlalchemy import text import pickle from .graph import Graph, BrickBase @@ -93,8 +94,7 @@ def __init__(self, uri: str, *args, **kwargs): self._postcommit_hooks = OrderedDict() with self.conn() as conn: - conn.execute("PRAGMA journal_mode=WAL;") - # conn.execute("PRAGMA synchronous=OFF;") + conn.execute(text("PRAGMA journal_mode=WAL;")) conn.execute(changeset_table_defn) conn.execute(changeset_table_idx) conn.execute(redo_table_defn) diff --git a/brickschema/topquadrant_shacl.py b/brickschema/topquadrant_shacl.py index 728b48e..0678b3a 100644 --- a/brickschema/topquadrant_shacl.py +++ b/brickschema/topquadrant_shacl.py @@ -4,149 +4,177 @@ import tempfile import rdflib from rdflib import OWL, SH +from rdflib.term import BNode, URIRef, _SKOLEM_DEFAULT_AUTHORITY, rdflib_skolem_genid from pathlib import Path +from typing import Optional +from urllib.parse import urljoin -MAX_ITERATIONS = 20 +# monkeypatch BNode.skolemize with a new function +def _new_bnode_skolemize( + self, authority: Optional[str] = None, basepath: Optional[str] = None +) -> URIRef: + """Create a URIRef "skolem" representation of the BNode, in accordance + with http://www.w3.org/TR/rdf11-concepts/#section-skolemization + .. versionadded:: 4.0 + """ + if authority is None: + authority = _SKOLEM_DEFAULT_AUTHORITY + if basepath is None: + basepath = rdflib_skolem_genid + skolem = "%s%s" % (basepath, str(self).replace(" ", "_")) + return URIRef(urljoin(authority, skolem)) -def infer(data_graph: rdflib.Graph, ontologies: rdflib.Graph): + +BNode.skolemize = _new_bnode_skolemize +_MAX_EXTERNAL_LOOPS = 3 + + +def infer( + data_graph: rdflib.Graph, ontologies: rdflib.Graph, max_iterations: int = 100 +): # remove imports + imports = data_graph.triples((None, OWL.imports, None)) data_graph.remove((None, OWL.imports, None)) + # skolemize before inference + data_graph_skolemized = data_graph.skolemize() + # Create a temporary directory with tempfile.TemporaryDirectory() as temp_dir: temp_dir_path = Path(temp_dir) # Define the target path within the temporary directory - target_file_path = temp_dir_path / 'data.ttl' - - combined = data_graph + ontologies - combined.serialize(target_file_path, format='ttl') - - # Run inference in a loop until the size of the data_graph doesn't change or we have run at least two iterations - previous_size = 0 - current_size = len(data_graph) - iteration_count = 0 + target_file_path = temp_dir_path / "data.ttl" + (data_graph_skolemized + ontologies).serialize( + target_file_path, format="turtle" + ) # set the SHACL_HOME environment variable to point to the shacl-1.4.2 directory # so that the shaclinfer.sh script can find the shacl.jar file - env = {'SHACL_HOME': str(Path(__file__).parent / "topquadrant_shacl")} - while iteration_count < MAX_ITERATIONS or previous_size != current_size: - iteration_count += 1 - # get the shacl-1.4.2/bin/shaclinfer.sh script from brickschema.bin in this package - # using pkgutil. If using *nix, use .sh; else if on windows use .bat - if platform.system() == "Windows": - script = [str(Path(__file__).parent / "topquadrant_shacl/bin/shaclinfer.bat")] - else: - script = ["/bin/sh", str(Path(__file__).parent / "topquadrant_shacl/bin/shaclinfer.sh")] - # check if we need to use .bat + env = {"SHACL_HOME": str(Path(__file__).parent / "topquadrant_shacl")} + # get the shacl-1.4.2/bin/shaclinfer.sh script from brickschema.bin in this package + # using pkgutil. If using *nix, use .sh; else if on windows use .bat + if platform.system() == "Windows": + script = [ + str(Path(__file__).parent / "topquadrant_shacl/bin/shaclinfer.bat") + ] + else: + script = [ + "/bin/sh", + str(Path(__file__).parent / "topquadrant_shacl/bin/shaclinfer.sh"), + ] + # Initialize the size of the graph + previous_size = 0 + current_size = len(data_graph_skolemized) + current_iter = 0 + + # Run the shaclinfer multiple times until the skolemized data graph stops changing in size + while previous_size != current_size and current_iter < _MAX_EXTERNAL_LOOPS: + (data_graph_skolemized + ontologies).serialize( + target_file_path, format="turtle" + ) try: print(f"Running {script} -datafile {target_file_path}") - output = subprocess.check_output([*script, "-datafile", target_file_path], stderr=subprocess.STDOUT, universal_newlines=True, env=env) + output = subprocess.check_output( + [ + *script, + "-datafile", + target_file_path, + "-maxiterations", + str(max_iterations), + ], + stderr=subprocess.STDOUT, + universal_newlines=True, + env=env, + ) except subprocess.CalledProcessError as e: output = e.output # Capture the output of the failed subprocess # Write logs to a file in the temporary directory (or the desired location) inferred_file_path = temp_dir_path / "inferred.ttl" with open(inferred_file_path, "w") as f: - for l in output.splitlines(): - if '::' not in l: - f.write(f"{l}\n") + for line in output.splitlines(): + if "::" not in line: + f.write(f"{line}\n") inferred_triples = rdflib.Graph() inferred_triples.parse(inferred_file_path, format="turtle") print(f"Got {len(inferred_triples)} inferred triples") + for s, p, o in inferred_triples: + if isinstance(s, BNode) or isinstance(o, BNode): + continue + data_graph_skolemized.add((s, p, o)) - # add inferred triples to the data graph, then serialize it - data_graph += inferred_triples - combined = data_graph + ontologies - combined.serialize(target_file_path, format='ttl') - - # Update the sizes for the next iteration + # Update the size of the graph previous_size = current_size - current_size = len(data_graph) - return data_graph + current_size = len(data_graph_skolemized) + current_iter += 1 + expanded_graph = data_graph_skolemized.de_skolemize() + # add imports back in + for imp in imports: + expanded_graph.add(imp) + return expanded_graph -def validate(data_graph: rdflib.Graph): + +def validate(data_graph: rdflib.Graph, shape_graphs: rdflib.Graph): # remove imports data_graph.remove((None, OWL.imports, None)) # set the SHACL_HOME environment variable to point to the shacl-1.4.2 directory # so that the shaclinfer.sh script can find the shacl.jar file - env = {'SHACL_HOME': str(Path(__file__).parent / "topquadrant_shacl")} + env = {"SHACL_HOME": str(Path(__file__).parent / "topquadrant_shacl")} # Create a temporary directory with tempfile.TemporaryDirectory() as temp_dir: temp_dir_path = Path(temp_dir) # Define the target path within the temporary directory - target_file_path = temp_dir_path / 'data.ttl' - - data_graph.serialize(target_file_path, format='ttl') + target_file_path = temp_dir_path / "data.ttl" - # Run inference in a loop until the size of the data_graph doesn't change or we have run at least two iterations - previous_size = 0 - current_size = len(data_graph) - iteration_count = 0 - - while iteration_count < MAX_ITERATIONS or previous_size != current_size: - iteration_count += 1 - # get the shacl-1.4.2/bin/shaclinfer.sh script from brickschema.bin in this package - # using pkgutil. If using *nix, use .sh; else if on windows use .bat - if platform.system() == "Windows": - script = [str(Path(__file__).parent / "topquadrant_shacl/bin/shaclinfer.bat")] - else: - script = ["/bin/sh", str(Path(__file__).parent / "topquadrant_shacl/bin/shaclinfer.sh")] - # check if we need to use .bat + inferred_graph = infer(data_graph, shape_graphs) - try: - print(f"Running {script} -datafile {target_file_path}") - output = subprocess.check_output([*script, "-datafile", target_file_path], stderr=subprocess.STDOUT, universal_newlines=True, env=env) - except subprocess.CalledProcessError as e: - output = e.output # Capture the output of the failed subprocess - # Write logs to a file in the temporary directory (or the desired location) - inferred_file_path = temp_dir_path / "inferred.ttl" - with open(inferred_file_path, "w") as f: - for l in output.splitlines(): - if '::' not in l: - f.write(f"{l}\n") - inferred_triples = rdflib.Graph() - inferred_triples.parse(inferred_file_path, format="turtle") - print(f"Got {len(inferred_triples)} inferred triples") - - # add inferred triples to the data graph, then serialize it - data_graph += inferred_triples - data_graph.serialize(target_file_path, format='ttl') - - # Update the sizes for the next iteration - previous_size = current_size - current_size = len(data_graph) + inferred_graph.serialize(target_file_path, format="ttl") # get the shacl-1.4.2/bin/shaclvalidate.sh script from the same directory # as this file if platform.system() == "Windows": - script = [str(Path(__file__).parent / "topquadrant_shacl/bin/shaclvalidate.bat")] + script = [ + str(Path(__file__).parent / "topquadrant_shacl/bin/shaclvalidate.bat") + ] else: - script = ["/bin/sh", str(Path(__file__).parent / "topquadrant_shacl/bin/shaclvalidate.sh")] + script = [ + "/bin/sh", + str(Path(__file__).parent / "topquadrant_shacl/bin/shaclvalidate.sh"), + ] try: print(f"Running {script} -datafile {target_file_path}") - output = subprocess.check_output([*script, "-datafile", target_file_path], stderr=subprocess.STDOUT, universal_newlines=True, env=env) + output = subprocess.check_output( + [*script, "-datafile", target_file_path], + stderr=subprocess.STDOUT, + universal_newlines=True, + env=env, + ) except subprocess.CalledProcessError as e: output = e.output # Capture the output of the failed subprocess # Write logs to a file in the temporary directory (or the desired location) report_file_path = temp_dir_path / "report.ttl" with open(report_file_path, "w") as f: - for l in output.splitlines(): - if '::' not in l: # filter out log output - f.write(f"{l}\n") + for line in output.splitlines(): + if "::" not in line: # filter out log output + f.write(f"{line}\n") report_g = rdflib.Graph() report_g.parse(report_file_path, format="turtle") # check if there are any sh:resultSeverity sh:Violation predicate/object pairs - has_violation = len(list(report_g.subjects(predicate=SH.resultSeverity, object=SH.Violation))) - conforms = len(list(report_g.subjects(predicate=SH.conforms, object=rdflib.Literal(True)))) + has_violation = len( + list(report_g.subjects(predicate=SH.resultSeverity, object=SH.Violation)) + ) + conforms = len( + list(report_g.subjects(predicate=SH.conforms, object=rdflib.Literal(True))) + ) validates = not has_violation or conforms return validates, report_g, str(report_g.serialize(format="turtle")) diff --git a/poetry.lock b/poetry.lock index 85d56c8..9454deb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -72,13 +72,13 @@ files = [ [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] @@ -1054,13 +1054,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1171,21 +1171,21 @@ files = [ [[package]] name = "networkx" -version = "2.8.8" +version = "3.2.1" description = "Python package for creating and manipulating graphs and networks" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, - {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, + {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, + {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, ] [package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=0.982)", "pre-commit (>=2.20)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.2)", "pydata-sphinx-theme (>=0.11)", "sphinx (>=5.2)", "sphinx-gallery (>=0.11)", "texext (>=0.6.6)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.9)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] +default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] +developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] +doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nodeenv" @@ -1203,47 +1203,47 @@ setuptools = "*" [[package]] name = "numpy" -version = "1.26.2" +version = "1.26.3" description = "Fundamental package for array computing in Python" optional = true python-versions = ">=3.9" files = [ - {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, - {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, - {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, - {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, - {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, - {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, - {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, - {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, - {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, - {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, - {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, ] [[package]] @@ -1374,13 +1374,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prettytable" -version = "2.5.0" +version = "3.9.0" description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "prettytable-2.5.0-py3-none-any.whl", hash = "sha256:1411c65d21dca9eaa505ba1d041bed75a6d629ae22f5109a923f4e719cfecba4"}, - {file = "prettytable-2.5.0.tar.gz", hash = "sha256:f7da57ba63d55116d65e5acb147bfdfa60dceccabf0d607d6817ee2888a05f2c"}, + {file = "prettytable-3.9.0-py3-none-any.whl", hash = "sha256:a71292ab7769a5de274b146b276ce938786f56c31cf7cea88b6f3775d82fe8c8"}, + {file = "prettytable-3.9.0.tar.gz", hash = "sha256:f4ed94803c23073a90620b201965e5dc0bccf1760b7a7eaf3158cab8aaffdf34"}, ] [package.dependencies] @@ -1643,38 +1643,42 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyshacl" -version = "0.23.0" +version = "0.25.0" description = "Python SHACL Validator" optional = false -python-versions = ">=3.7.0,<4.0.0" +python-versions = ">=3.8.1,<4.0.0" files = [ - {file = "pyshacl-0.23.0-py3-none-any.whl", hash = "sha256:5d77ab194d4333d6c2a3c409d096ac31e68f1ae0e22c3668b2a081e32256c738"}, - {file = "pyshacl-0.23.0.tar.gz", hash = "sha256:43a80bbf403176f8f37a22fbbe8d95ba5395cf755e378af1db3126e709819d2c"}, + {file = "pyshacl-0.25.0-py3-none-any.whl", hash = "sha256:716b65397486b1a306efefd018d772d3c112a3828ea4e1be27aae16aee524243"}, + {file = "pyshacl-0.25.0.tar.gz", hash = "sha256:91e87ed04ccb29aa47abfcf8a3e172d35a8831fce23a011cfbf35534ce4c940b"}, ] [package.dependencies] html5lib = ">=1.1,<2" +importlib-metadata = {version = ">6", markers = "python_version < \"3.12\""} owlrl = ">=6.0.2,<7" packaging = ">=21.3" -prettytable = ">=2.2.1,<3.0.0" -rdflib = ">=6.3.2,<7" +prettytable = [ + {version = ">=3.5.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, + {version = ">=3.7.0", markers = "python_version >= \"3.12\""}, +] +rdflib = {version = ">=6.3.2,<8.0", markers = "python_full_version >= \"3.8.1\""} [package.extras] dev-coverage = ["coverage (>6.1,!=6.1.1,<7)", "platformdirs", "pytest-cov (>=2.8.1,<3.0.0)"] -dev-lint = ["black (==23.3.0)", "platformdirs", "ruff (>=0.0.267,<0.0.268)"] -dev-type-checking = ["mypy (>=0.800,<0.900)", "mypy (>=0.900,<0.1000)", "platformdirs", "types-setuptools"] +dev-lint = ["black (==23.11.0)", "platformdirs", "ruff (>=0.1.5,<0.2.0)"] +dev-type-checking = ["mypy (>=0.812,<0.900)", "mypy (>=0.900,<0.1000)", "platformdirs", "types-setuptools"] http = ["sanic (>=22.12,<23)", "sanic-cors (==2.2.0)", "sanic-ext (>=23.3,<23.6)"] -js = ["pyduktape2 (>=0.4.3,<0.5.0)"] +js = ["pyduktape2 (>=0.4.6,<0.5.0)"] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1800,13 +1804,13 @@ files = [ [[package]] name = "rdflib" -version = "6.3.2" +version = "7.0.0" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8.1,<4.0.0" files = [ - {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, - {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, + {file = "rdflib-7.0.0-py3-none-any.whl", hash = "sha256:0438920912a642c866a513de6fe8a0001bd86ef975057d6962c79ce4771687cd"}, + {file = "rdflib-7.0.0.tar.gz", hash = "sha256:9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae"}, ] [package.dependencies] @@ -2194,36 +2198,36 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.4.50" +version = "1.4.51" description = "Database Abstraction Library" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be"}, - {file = "SQLAlchemy-1.4.50.tar.gz", hash = "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad"}, + {file = "SQLAlchemy-1.4.51.tar.gz", hash = "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9"}, ] [package.dependencies] @@ -2231,7 +2235,7 @@ greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platfo [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] @@ -2241,14 +2245,14 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "tabulate" @@ -2375,13 +2379,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "wcwidth" -version = "0.2.12" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, - {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] @@ -2556,4 +2560,4 @@ web = ["Flask"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "7105a94a967ea46de39cd982ffaa0edabacd2651ec974eeab22c58472b853c8f" +content-hash = "4c810d5910cda7eb1532da427fe1b6616afd606f893ff0ed77df2553e612a131" diff --git a/pyproject.toml b/pyproject.toml index 4005c09..f079ac2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "brickschema" -version = "0.7.3" +version = "0.7.4a9" description = "A library for working with the Brick ontology for buildings (brickschema.org)" authors = ["Gabe Fierro "] include = ["brickschema/ontologies", "tests/data", "brickschema/web", "brickschema/topquadrant_shacl"] @@ -10,10 +10,10 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.9" -rdflib = "^6.2" +rdflib = "^7.0" owlrl = "^6.0" -pyshacl = "^0.23" +pyshacl = "^0.25" requests = "^2.25.0" importlib-resources = "^3.3.0" click-spinner = {optional = true, version="^0.1.10"} @@ -29,7 +29,7 @@ reasonable = {optional = true, version="^0.2.2a4"} sqlalchemy = {optional = true, version="^1.4"} rdflib_sqlalchemy = {optional = true, version = "^0.5"} BAC0 = {optional = true, version = "^22.9"} -networkx = {optional = true, version="^2.6"} +networkx = {optional = true, version="^3.2"} [tool.poetry.group.dev.dependencies] flake8 = "^3.7" diff --git a/tests/test_inference.py b/tests/test_inference.py index 914ed2e..63a88d3 100644 --- a/tests/test_inference.py +++ b/tests/test_inference.py @@ -101,10 +101,11 @@ def test_most_likely_tagsets(): def test_brick_inference(): g = Graph(load_brick=True) g.load_extension("shacl_tag_inference") + data = pkgutil.get_data(__name__, "data/brick_inference_test.ttl").decode() g.load_file(source=io.StringIO(data)) - g.expand(profile="shacl", backend="topquadrant") + g.expand(profile="shacl", backend="pyshacl") r = g.query("SELECT ?x WHERE { ?x rdf:type brick:Air_Temperature_Sensor }") urls = set([str(row[0]) for row in r]) @@ -118,7 +119,9 @@ def test_brick_inference(): assert urls == real_sensors -@pytest.mark.skip("Haystack inference is currently broken. See https://github.com/gtfierro/Brick-Haystack-harmonization") +@pytest.mark.skip( + "Haystack inference is currently broken. See https://github.com/gtfierro/Brick-Haystack-harmonization" +) def test_haystack_inference(): data = pkgutil.get_data(__name__, "data/carytown.json").decode() raw_model = json.loads(data) diff --git a/tests/test_persistent_graph.py b/tests/test_persistent_graph.py index a98f350..7d2b2ab 100644 --- a/tests/test_persistent_graph.py +++ b/tests/test_persistent_graph.py @@ -13,8 +13,8 @@ def test_persistent_graph(): EX = Namespace("http://example.com/building#") pg.add((EX["a"], A, BRICK.Temperature_Sensor)) + pg.expand("shacl", backend="pyshacl") pg.serialize("/tmp/out.ttl", format="turtle") - pg.expand("shacl", backend="topquadrant") assert (EX["a"], BRICK.hasTag, TAG.Temperature) in pg res = pg.query("SELECT * WHERE { ?x a brick:Temperature_Sensor }")