diff --git a/kgforge/core/archetypes/dataset_store.py b/kgforge/core/archetypes/dataset_store.py
index 4896e0f9..b00d94f1 100644
--- a/kgforge/core/archetypes/dataset_store.py
+++ b/kgforge/core/archetypes/dataset_store.py
@@ -77,8 +77,7 @@ def types(self) -> Optional[List[str]]:
return list(self.model.mappings(self.model.source, False).keys())
def search(
- self, filters: List[Union[Dict, Filter]], resolvers: Optional[List[Resolver]] = None,
- **params
+ self, resolvers: Optional[List[Resolver]], filters: List[Union[Dict, Filter]], **params
) -> Optional[List[Resource]]:
"""Search within the database.
:param map: bool
diff --git a/kgforge/core/archetypes/model.py b/kgforge/core/archetypes/model.py
index f886a514..0ffe5264 100644
--- a/kgforge/core/archetypes/model.py
+++ b/kgforge/core/archetypes/model.py
@@ -15,11 +15,12 @@
import json
from abc import ABC, abstractmethod
from pathlib import Path
-from typing import Any, Dict, List, Optional, Union, Type
+from typing import Any, Dict, List, Optional, Union, Type, Tuple
import hjson
from pandas import DataFrame
+
from kgforge.core.resource import Resource
from kgforge.core.archetypes.mapping import Mapping
from kgforge.core.commons.attributes import repr_class, sort_attrs
@@ -27,6 +28,10 @@
from kgforge.core.commons.exceptions import ConfigurationError, ValidationError
from kgforge.core.commons.execution import run
from kgforge.core.commons.imports import import_class
+from kgforge.core.commons.sparql_query_builder import SPARQLQueryBuilder
+
+DEFAULT_LIMIT = 100
+DEFAULT_OFFSET = 0
class Model(ABC):
@@ -117,6 +122,48 @@ def _template(self, type: str, only_required: bool) -> Dict:
# Mappings.
+ def sparql(
+ self, query: str,
+ debug: bool,
+ limit: int = DEFAULT_LIMIT,
+ offset: int = DEFAULT_OFFSET,
+ **params
+ ) -> List[Resource]:
+ rewrite = params.get("rewrite", True)
+
+ context_as_dict, prefixes, vocab = self.get_context_prefix_vocab()
+
+ qr = (
+ SPARQLQueryBuilder.rewrite_sparql(
+ query,
+ context_as_dict=context_as_dict,
+ prefixes=prefixes,
+ vocab=vocab
+ )
+ if self.context() is not None and rewrite
+ else query
+ )
+
+ qr = SPARQLQueryBuilder.apply_limit_and_offset_to_query(
+ query=qr,
+ limit=limit,
+ offset=offset,
+ default_limit=DEFAULT_LIMIT,
+ default_offset=DEFAULT_OFFSET
+ )
+
+ if debug:
+ SPARQLQueryBuilder.debug_query(qr)
+
+ return self._sparql(qr)
+
+ @abstractmethod
+ def _sparql(self, query: str) -> List[Resource]:
+ # POLICY Should notify of failures with exception QueryingError including a message.
+ # POLICY Resource _store_metadata should not be set (default is None).
+ # POLICY Resource _synchronized should not be set (default is False).
+ ...
+
def sources(self, pretty: bool) -> Optional[List[str]]:
sources = sorted(self._sources())
if pretty:
@@ -189,8 +236,7 @@ def _initialize_service(self, source: str, **source_config) -> Any:
context_config = source_config.pop("context", {})
context_iri = context_config.get("iri", None)
if origin == "directory":
- dirpath = Path(source)
- return self._service_from_directory(dirpath, context_iri)
+ return self._service_from_directory(Path(source), context_iri)
if origin == "url":
return self._service_from_url(source, context_iri)
if origin == "store":
@@ -215,3 +261,7 @@ def _service_from_store(
store: 'Store', context_config: Optional[dict], **source_config
) -> Any:
...
+
+ @abstractmethod
+ def get_context_prefix_vocab(self) -> Tuple[Optional[Dict], Optional[Dict], Optional[str]]:
+ ...
diff --git a/kgforge/core/archetypes/read_only_store.py b/kgforge/core/archetypes/read_only_store.py
index a16c70f6..89666f36 100644
--- a/kgforge/core/archetypes/read_only_store.py
+++ b/kgforge/core/archetypes/read_only_store.py
@@ -24,7 +24,6 @@
from kgforge.core.commons.exceptions import (
DownloadingError,
)
-from kgforge.core.commons.execution import not_supported
from kgforge.core.commons.sparql_query_builder import SPARQLQueryBuilder
from kgforge.core.reshaping import collect_values
from kgforge.core.wrappings import Filter
@@ -54,16 +53,9 @@ def __init__(
def __repr__(self) -> str:
return repr_class(self)
- @staticmethod
- def _context_to_dict(context: Context):
- return {
- k: v["@id"] if isinstance(v, Dict) and "@id" in v else v
- for k, v in context.document["@context"].items()
- }
-
def get_context_prefix_vocab(self) -> Tuple[Optional[Dict], Optional[Dict], Optional[str]]:
return (
- ReadOnlyStore._context_to_dict(self.model_context().document),
+ Context.context_to_dict(self.model_context()),
self.model_context().prefixes,
self.model_context().vocab
)
diff --git a/kgforge/core/archetypes/store.py b/kgforge/core/archetypes/store.py
index 21bf9ed7..5e3f0583 100644
--- a/kgforge/core/archetypes/store.py
+++ b/kgforge/core/archetypes/store.py
@@ -18,11 +18,11 @@
from kgforge.core.archetypes.read_only_store import ReadOnlyStore, DEFAULT_LIMIT, DEFAULT_OFFSET
from kgforge.core.archetypes.model import Model
-from kgforge.core.commons import Context
from kgforge.core.resource import Resource
from kgforge.core.archetypes.mapping import Mapping
from kgforge.core.archetypes.mapper import Mapper
from kgforge.core.commons.attributes import repr_class
+from kgforge.core.commons.context import Context
from kgforge.core.commons.es_query_builder import ESQueryBuilder
from kgforge.core.commons.exceptions import (
DeprecationError,
@@ -32,7 +32,7 @@
UpdatingError,
UploadingError
)
-from kgforge.core.commons.execution import not_supported, run
+from kgforge.core.commons.execution import run
class Store(ReadOnlyStore):
@@ -292,3 +292,25 @@ def _freeze_one(self, resource: Resource) -> None:
self._freeze_one(v)
if hasattr(resource, "id"):
resource.id = self.versioned_id_template.format(x=resource)
+
+ # Utils.
+
+ @abstractmethod
+ def _initialize_service(
+ self,
+ endpoint: Optional[str],
+ bucket: Optional[str],
+ token: Optional[str],
+ searchendpoints: Optional[Dict] = None,
+ **store_config,
+ ) -> Any:
+ # POLICY Should initialize the access to the store according to its configuration.
+ pass
+
+ def rewrite_uri(self, uri: str, context: Context, **kwargs) -> str:
+ """Rewrite a given uri using the store Context
+ :param uri: a URI to rewrite.
+ :param context: a Store Context object
+ :return: str
+ """
+ pass
diff --git a/kgforge/core/commons/context.py b/kgforge/core/commons/context.py
index b9631f6f..2febae52 100644
--- a/kgforge/core/commons/context.py
+++ b/kgforge/core/commons/context.py
@@ -60,3 +60,10 @@ def is_http_iri(self):
def has_vocab(self):
return self.vocab is not None
+
+ @staticmethod
+ def context_to_dict(context: 'Context'):
+ return {
+ k: v["@id"] if isinstance(v, Dict) and "@id" in v else v
+ for k, v in context.document["@context"].items()
+ }
diff --git a/kgforge/core/commons/es_query_builder.py b/kgforge/core/commons/es_query_builder.py
index cb23fc19..09c34175 100644
--- a/kgforge/core/commons/es_query_builder.py
+++ b/kgforge/core/commons/es_query_builder.py
@@ -196,6 +196,16 @@ def apply_limit_and_offset_to_query(query, limit, default_limit, offset, default
return query
+ @staticmethod
+ def apply_limit_and_offset_to_query(query, limit, default_limit, offset, default_offset):
+ # TODO should there be an elastic search default limit?
+ if limit:
+ query["size"] = limit
+ if offset:
+ query["from"] = offset
+
+ return query
+
def _look_up_known_parent_paths(f, last_path, property_path, m):
if (
diff --git a/kgforge/core/forge.py b/kgforge/core/forge.py
index 747fa98a..2370c86a 100644
--- a/kgforge/core/forge.py
+++ b/kgforge/core/forge.py
@@ -256,6 +256,14 @@ def __init__(self, configuration: Union[str, Dict], **kwargs) -> None:
# Formatters.
self._formatters: Optional[Dict[str, str]] = config.pop("Formatters", None)
+ def get_model(self) -> Model:
+ """Exposes the model."""
+ return self._model
+
+ def get_store(self) -> Store:
+ """Exposes the store."""
+ return self._store
+
@catch
def prefixes(self, pretty: bool = True) -> Optional[Dict[str, str]]:
"""
diff --git a/kgforge/specializations/models/demo_model.py b/kgforge/specializations/models/demo_model.py
index 71046878..c558ca1c 100644
--- a/kgforge/specializations/models/demo_model.py
+++ b/kgforge/specializations/models/demo_model.py
@@ -29,6 +29,12 @@
class DemoModel(Model):
"""An example to show how to implement a Model and to demonstrate how it is used."""
+ def _sparql(self, query: str) -> List[Resource]:
+ raise not_supported()
+
+ def get_context_prefix_vocab(self) -> Tuple[Optional[Dict], Optional[Dict], Optional[str]]:
+ raise not_supported()
+
# Vocabulary.
def _prefixes(self) -> Dict[str, str]:
@@ -96,7 +102,7 @@ def _validate_one(self, resource: Resource, type_: str) -> None:
# Utils.
@staticmethod
- def _service_from_directory(dirpath: Path, context_iri: str, **dir_config):
+ def _service_from_directory(dirpath: Path, context_iri: Optional[str]):
return ModelLibrary(dirpath)
@staticmethod
diff --git a/kgforge/specializations/models/rdf/collectors.py b/kgforge/specializations/models/rdf/collectors.py
index 8e3bac1b..e76f0bec 100644
--- a/kgforge/specializations/models/rdf/collectors.py
+++ b/kgforge/specializations/models/rdf/collectors.py
@@ -47,7 +47,7 @@ def __init__(self, shape: Shape) -> None:
@abstractmethod
def constraint(cls) -> URIRef:
"""Returns the Shacl constraint URI of the collector"""
- raise NotImplementedError()
+ ...
@abstractmethod
def collect(
@@ -64,7 +64,7 @@ def collect(
properties, attributes: Tuple(list,dict), the collected properties and attributes
respectively
"""
- raise NotImplementedError()
+ ...
def get_shape_target_classes(self) -> List:
"""Returns a list of target and implicit classes if any of the shape
@@ -488,3 +488,19 @@ def get_node_path(node: NodeProperties, path: URIRef, field: str):
else:
result.append(values)
return result
+
+
+ALL_COLLECTORS = [
+ AndCollector,
+ OrCollector,
+ PropertyCollector,
+ NodeCollector,
+ PropertyCollector,
+ MinCountCollector,
+ DatatypeCollector,
+ InCollector,
+ ClassCollector,
+ NodeKindCollector,
+ XoneCollector,
+ HasValueCollector
+]
diff --git a/kgforge/specializations/models/rdf/pyshacl_shape_wrapper.py b/kgforge/specializations/models/rdf/pyshacl_shape_wrapper.py
new file mode 100644
index 00000000..9a2037fa
--- /dev/null
+++ b/kgforge/specializations/models/rdf/pyshacl_shape_wrapper.py
@@ -0,0 +1,88 @@
+from pyshacl import Shape, ShapesGraph
+from rdflib import Graph, URIRef
+from pyshacl.constraints import ALL_CONSTRAINT_PARAMETERS
+
+from typing import List, Optional, Set, Tuple, Dict
+
+from kgforge.specializations.models.rdf.collectors import ALL_COLLECTORS
+
+
+ALL_COLLECTORS_MAP = {c.constraint(): c for c in ALL_COLLECTORS}
+
+
+class ShapeWrapper(Shape):
+ __slots__ = ('__dict__',)
+
+ def __init__(self, shape: Shape) -> None:
+ super().__init__(shape.sg, shape.node, shape._p, shape._path, shape.logger)
+
+ def parameters(self):
+ return (
+ p for p, v in self.sg.predicate_objects(self.node)
+ if p in ALL_CONSTRAINT_PARAMETERS
+ )
+
+ def traverse(self, predecessors: Set[URIRef]) -> Tuple[List, Dict]:
+ """ traverses the Shape SHACL properties to collect constrained properties
+
+ This function is injected to pyshacl Shape object in order to traverse the Shacl graph.
+ It will call a specific collector depending on the SHACL property present in the NodeShape
+
+ Args:
+ predecessors: list of nodes that have being traversed, used to break circular
+ recursion
+
+ Returns:
+ properties, attributes: Tuple(list,dict), the collected properties and attributes
+ respectively gathered from the collectors
+ """
+
+ parameters = self.parameters()
+ properties = []
+ attributes = {}
+ done_collectors = set()
+ for param in iter(parameters):
+ if param in ALL_COLLECTORS_MAP:
+ constraint_collector = ALL_COLLECTORS_MAP[param]
+ if constraint_collector not in done_collectors:
+ c = constraint_collector(self)
+ predecessors.add(self.node)
+ props, attrs = c.collect(predecessors)
+ if attrs:
+ attributes.update(attrs)
+ if props:
+ properties.extend(props)
+ done_collectors.add(constraint_collector)
+ if predecessors:
+ predecessors.remove(self.node)
+ else:
+ # FIXME: there are some SHACL constrains that are not implemented
+ # raise IndexError(f"{param} not implemented!")
+ pass
+
+ return properties, attributes
+
+
+class ShapesGraphWrapper(ShapesGraph):
+
+ def __init__(self, graph: Graph) -> None:
+ super().__init__(graph)
+ # the following line triggers the shape loading -> see pyshacl.ShapesGraph
+ self._shapes = self.shapes
+
+ def lookup_shape_from_node(self, node: URIRef) -> Optional[ShapeWrapper]:
+ """ Overwrite function to inject the transverse function for only to requested nodes.
+
+ Args:
+ node (URIRef): The node to look up.
+
+ Returns:
+ Shape: The Shacl shape of the requested node.
+ """
+ shape: Shape = self._node_shape_cache.get(node, None)
+ if shape:
+ return ShapeWrapper(shape)
+ # if not hasattr(shape_wrapper, "traverse"):
+ # shape_wrapper.traverse = types.MethodType(traverse, shape_wrapper)
+ # return shape_wrapper
+ return None
diff --git a/kgforge/specializations/models/rdf/service.py b/kgforge/specializations/models/rdf/rdf_model_service.py
similarity index 52%
rename from kgforge/specializations/models/rdf/service.py
rename to kgforge/specializations/models/rdf/rdf_model_service.py
index 95ccec49..e0a49f9f 100644
--- a/kgforge/specializations/models/rdf/service.py
+++ b/kgforge/specializations/models/rdf/rdf_model_service.py
@@ -11,137 +11,44 @@
#
# You should have received a copy of the GNU Lesser General Public License
# along with Blue Brain Nexus Forge. If not, see .
-import types
+import json
+from abc import abstractmethod, ABC
from typing import List, Dict, Tuple, Set, Optional
-from abc import abstractmethod
-from pyshacl.constraints import ALL_CONSTRAINT_PARAMETERS
-from pyshacl.shape import Shape
-from pyshacl.shapes_graph import ShapesGraph
-from rdflib import Graph, URIRef, RDF, XSD
+from rdflib import Graph, URIRef, RDF, XSD
+from kgforge.core.commons.sparql_query_builder import SPARQLQueryBuilder
from kgforge.core.resource import Resource
from kgforge.core.commons.context import Context
from kgforge.core.commons.exceptions import ConfigurationError
from kgforge.core.conversions.rdf import as_graph
-from kgforge.specializations.models.rdf.collectors import (AndCollector, NodeCollector,
- PropertyCollector, MinCountCollector,
- DatatypeCollector, InCollector,
- ClassCollector, NodeKindCollector,
- OrCollector, XoneCollector,
- HasValueCollector)
+
from kgforge.specializations.models.rdf.node_properties import NodeProperties
from kgforge.specializations.models.rdf.utils import as_term
+from kgforge.specializations.models.rdf.pyshacl_shape_wrapper import ShapesGraphWrapper
-ALL_COLLECTORS = [
- AndCollector,
- OrCollector,
- PropertyCollector,
- NodeCollector,
- PropertyCollector,
- MinCountCollector,
- DatatypeCollector,
- InCollector,
- ClassCollector,
- NodeKindCollector,
- XoneCollector,
- HasValueCollector
-]
-ALL_COLLECTORS_MAP = {c.constraint(): c for c in ALL_COLLECTORS}
-
-
-def traverse(self, predecessors: Set[URIRef]) -> Tuple[List, Dict]:
- """ traverses the Shape SACL properties to collect constrained properties
-
- This function is injected to pyshacl Shape object in order to traverse the Shacl graph.
- It will call a specific collector depending on the SHACL property present in the NodeShape
-
- Args:
- predecessors: list of nodes that have being traversed, used to break circular
- recursion
-
- Returns:
- properties, attributes: Tuple(list,dict), the collected properties and attributes
- respectively gathered from the collectors
- """
-
- parameters = self.parameters()
- properties = []
- attributes = {}
- done_collectors = set()
- for param in iter(parameters):
- if param in ALL_COLLECTORS_MAP:
- constraint_collector = ALL_COLLECTORS_MAP[param]
- if constraint_collector not in done_collectors:
- c = constraint_collector(self)
- predecessors.add(self.node)
- props, attrs = c.collect(predecessors)
- if attrs:
- attributes.update(attrs)
- if props:
- properties.extend(props)
- done_collectors.add(constraint_collector)
- if predecessors:
- predecessors.remove(self.node)
- else:
- # FIXME: there are some SHACL constrains that are not implemented
- # raise IndexError(f"{param} not implemented!")
- pass
-
- return properties, attributes
-
-
-class ShapeWrapper(Shape):
- __slots__ = ('__dict__',)
-
- def __init__(self, shape: Shape) -> None:
- super().__init__(shape.sg, shape.node, shape._p, shape._path, shape.logger)
-
- def parameters(self):
- return (p for p, v in self.sg.predicate_objects(self.node)
- if p in ALL_CONSTRAINT_PARAMETERS)
-
-
-class ShapesGraphWrapper(ShapesGraph):
-
- def __init__(self, graph: Graph) -> None:
- super().__init__(graph)
- # the following line triggers the shape loading
- self._shapes = self.shapes
-
- def lookup_shape_from_node(self, node: URIRef) -> Shape:
- """ Overwrite function to inject the transverse function for only to requested nodes.
- Args:
- node (URIRef): The node to look up.
+class RdfModelService(ABC):
- Returns:
- Shape: The Shacl shape of the requested node.
- """
- shape = self._node_shape_cache[node]
- if shape:
- shape_wrapper = ShapeWrapper(self._node_shape_cache[node])
- if not hasattr(shape_wrapper, "traverse"):
- shape_wrapper.traverse = types.MethodType(traverse, shape_wrapper)
- return shape_wrapper
- return shape
-
-
-class RdfService:
-
- def __init__(self, graph: Graph, context_iri: Optional[str] = None) -> None:
+ def __init__(self, context_iri: Optional[str] = None):
if context_iri is None:
raise ConfigurationError("RdfModel requires a context")
- self._graph = graph
+
+ self._graph, self.shape_to_source, self.class_to_shape = self._build_shapes_map()
+ self._shapes_graph = ShapesGraphWrapper(self._graph)
+
self._context_cache = {}
- self.classes_to_shapes = self._build_shapes_map()
- resolved_context = self.resolve_context(context_iri)
- self.context = Context(resolved_context, context_iri)
- self.types_to_shapes: Dict = self._build_types_to_shapes()
- def schema_source_id(self, schema_iri: str) -> str:
- # POLICY Should return the id of the resource containing the schema
- raise NotImplementedError()
+ self.context = Context(self.resolve_context(context_iri), context_iri)
+ self.types_to_shapes: Dict[str, URIRef] = self._build_types_to_shapes()
+
+ def get_shape_source(self, schema_iri: URIRef) -> str:
+ return self.shape_to_source[schema_iri]
+
+ def sparql(self, query: str) -> List[Resource]:
+ e = self._graph.query(query)
+ results = json.loads(e.serialize(format="json"))
+ return SPARQLQueryBuilder.build_resource_from_select_query(results["results"]["bindings"])
@abstractmethod
def materialize(self, iri: URIRef) -> NodeProperties:
@@ -153,56 +60,60 @@ def materialize(self, iri: URIRef) -> NodeProperties:
Returns:
A NodeProperty object with the collected properties
"""
- raise NotImplementedError()
+ ...
def validate(self, resource: Resource, type_: str):
- try:
- if isinstance(resource.type, list) and type_ is None:
- raise ValueError(
- "Resource has list of types as attribute and type_ parameter is not specified. "
- "Please provide a type_ parameter to validate against it."
- )
- if type_ is None:
- shape_iri = self.types_to_shapes[resource.type]
- else:
- shape_iri = self.types_to_shapes[type_]
- except AttributeError as exc:
- raise TypeError("Resource requires a type attribute") from exc
+
+ if "type" not in resource.__dict__:
+ raise TypeError("Resource requires a type attribute")
+
+ if isinstance(resource.type, list) and type_ is None:
+ raise ValueError(
+ "Resource has list of types as attribute and type_ parameter is not specified. "
+ "Please provide a type_ parameter to validate against it."
+ )
+
+ shape_iri = self.types_to_shapes.get(resource.type if type_ is None else type_, None)
+
+ if shape_iri is None:
+ raise ValueError(f"Unknown type {type_}")
data_graph = as_graph(resource, False, self.context, None, None)
+
return self._validate(shape_iri, data_graph)
@abstractmethod
def _validate(self, iri: str, data_graph: Graph) -> Tuple[bool, Graph, str]:
- raise NotImplementedError()
+ ...
@abstractmethod
def resolve_context(self, iri: str) -> Dict:
"""For a given IRI return its resolved context recursively"""
- raise NotImplementedError()
+ ...
@abstractmethod
def generate_context(self) -> Dict:
"""Generates a JSON-LD context with the classes and terms present in the SHACL graph."""
- raise NotImplementedError()
+ ...
@abstractmethod
- def _build_shapes_map(self) -> Dict:
- """Queries the source and returns a map of owl:Class to sh:NodeShape"""
- raise NotImplementedError()
+ def _build_shapes_map(self) -> Tuple[Graph, Dict[URIRef, str], Dict[str, URIRef]]:
+ ...
- def _build_types_to_shapes(self):
+ def _build_types_to_shapes(self) -> Dict[str, URIRef]:
"""Iterates the classes_to_shapes dictionary to create a term to shape dictionary filtering
the terms available in the context """
types_to_shapes: Dict = {}
- for k, v in self.classes_to_shapes.items():
+ for k, v in self.class_to_shape.items():
term = self.context.find_term(str(k))
if term:
- key = term.name
if term.name not in types_to_shapes:
types_to_shapes[term.name] = v
else:
- print("WARN: duplicated term", key, k, [key], v)
+ print("WARN: duplicated term", term.name, k, [term.name], v)
+ else:
+ print(f"WARN: missing term: {str(k)} in context")
+
return types_to_shapes
def _generate_context(self) -> Dict:
@@ -249,7 +160,7 @@ def traverse_properties(properties) -> Tuple[Dict, Dict]:
return l_prefixes, l_terms
target_classes = []
- for k in self.classes_to_shapes.keys():
+ for k in self.class_to_shape.keys():
key = as_term(k)
if key not in target_classes:
target_classes.append(key)
@@ -257,7 +168,7 @@ def traverse_properties(properties) -> Tuple[Dict, Dict]:
# TODO: should this raise an error?
print("duplicated term", key, k)
- for type_, shape in self.classes_to_shapes.items():
+ for type_, shape in self.class_to_shape.items():
t_prefix, t_namespace, t_name = self._graph.compute_qname(type_)
prefixes.update({t_prefix: str(t_namespace)})
types_.update({t_name: {"@id": ":".join((t_prefix, t_name))}})
diff --git a/kgforge/specializations/models/rdf/directory_service.py b/kgforge/specializations/models/rdf/rdf_model_service_from_directory.py
similarity index 51%
rename from kgforge/specializations/models/rdf/directory_service.py
rename to kgforge/specializations/models/rdf/rdf_model_service_from_directory.py
index 4452f24a..28fe5e1a 100644
--- a/kgforge/specializations/models/rdf/directory_service.py
+++ b/kgforge/specializations/models/rdf/rdf_model_service_from_directory.py
@@ -21,22 +21,17 @@
from kgforge.core.commons.context import Context
from kgforge.specializations.models.rdf.node_properties import NodeProperties
-from kgforge.specializations.models.rdf.service import RdfService, ShapesGraphWrapper
+from kgforge.specializations.models.rdf.rdf_model_service import RdfModelService
-class DirectoryService(RdfService):
+class RdfModelServiceFromDirectory(RdfModelService):
- def __init__(self, dirpath: Path, context_iri: str) -> None:
- self._graph = load_rdf_files(dirpath)
- self._sg = ShapesGraphWrapper(self._graph)
- super().__init__(self._graph, context_iri)
-
- def schema_source_id(self, schema_iri: str) -> str:
- # FIXME should return the file path where the schema is in
- return schema_iri
+ def __init__(self, dir_path: Path, context_iri: str) -> None:
+ self.dir_path = dir_path
+ super().__init__(context_iri=context_iri)
def materialize(self, iri: URIRef) -> NodeProperties:
- sh = self._sg.lookup_shape_from_node(iri)
+ sh = self._shapes_graph.lookup_shape_from_node(iri)
predecessors = set()
props, attrs = sh.traverse(predecessors)
if props:
@@ -61,25 +56,54 @@ def resolve_context(self, iri: str) -> Dict:
def generate_context(self) -> Dict:
return self._generate_context()
- def _build_shapes_map(self) -> Dict:
+ def _build_shapes_map(self) -> Tuple[Graph, Dict[URIRef, str], Dict[str, URIRef]]:
+
query = """
- PREFIX rdfs:
- PREFIX sh:
- SELECT ?type ?shape WHERE {
- { ?shape sh:targetClass ?type .}
- UNION {
- SELECT (?shape as ?type) ?shape WHERE {
- ?shape a sh:NodeShape .
- ?shape a rdfs:Class
- }
- }
- } ORDER BY ?type"""
- res = self._graph.query(query)
- return {row["type"]: row["shape"] for row in res}
-
-
-def load_rdf_files(path: Path) -> Graph:
- memory_graph = Graph()
+ PREFIX rdfs:
+ PREFIX sh:
+ SELECT ?type ?shape WHERE {
+ { ?shape sh:targetClass ?type .}
+ UNION {
+ SELECT (?shape as ?type) ?shape WHERE {
+ ?shape a sh:NodeShape .
+ ?shape a rdfs:Class
+ }
+ }
+ } ORDER BY ?type
+ """
+
+ class_to_shape: Dict[str, URIRef] = {}
+ shape_to_file: Dict[URIRef, str] = {}
+ graph = Graph()
+
+ extensions = [".ttl", ".n3", ".json", ".rdf"]
+ for f in self.dir_path.rglob(os.path.join("*.*")):
+ graph_i = Graph()
+ if f.suffix in extensions:
+ file_format = guess_format(f.name)
+ if file_format is None:
+ file_format = "json-ld"
+ graph_i.parse(f.as_posix(), format=file_format)
+
+ res = graph_i.query(query)
+
+ class_to_shape_i = dict(
+ (row["type"], URIRef(row["shape"]))
+ for row in res
+ )
+ class_to_shape.update(class_to_shape_i)
+
+ shape_to_file.update(dict(
+ (e, f.as_posix())
+ for e in class_to_shape_i.values()
+ ))
+
+ graph += graph_i
+
+ return graph, shape_to_file, class_to_shape
+
+
+def load_rdf_files_into_graph(path: Path, memory_graph: Graph) -> Graph:
extensions = [".ttl", ".n3", ".json", ".rdf"]
for f in path.rglob(os.path.join("*.*")):
if f.suffix in extensions:
@@ -87,4 +111,5 @@ def load_rdf_files(path: Path) -> Graph:
if file_format is None:
file_format = "json-ld"
memory_graph.parse(f.as_posix(), format=file_format)
+
return memory_graph
diff --git a/kgforge/specializations/models/rdf/store_service.py b/kgforge/specializations/models/rdf/rdf_model_service_from_store.py
similarity index 72%
rename from kgforge/specializations/models/rdf/store_service.py
rename to kgforge/specializations/models/rdf/rdf_model_service_from_store.py
index 86a6446c..e90801f6 100644
--- a/kgforge/specializations/models/rdf/store_service.py
+++ b/kgforge/specializations/models/rdf/rdf_model_service_from_store.py
@@ -22,11 +22,13 @@
from kgforge.core.conversions.rdf import as_jsonld
from kgforge.core.archetypes.store import Store
from kgforge.specializations.models.rdf.node_properties import NodeProperties
-from kgforge.specializations.models.rdf.service import RdfService, ShapesGraphWrapper
+from kgforge.specializations.models.rdf.pyshacl_shape_wrapper import ShapesGraphWrapper, \
+ ShapeWrapper
+from kgforge.specializations.models.rdf.rdf_model_service import RdfModelService
from kgforge.specializations.stores.nexus import Service
-class StoreService(RdfService):
+class RdfModelServiceFromStore(RdfModelService):
def __init__(self, default_store: Store, context_iri: Optional[str] = None,
context_store: Optional[Store] = None) -> None:
@@ -34,21 +36,20 @@ def __init__(self, default_store: Store, context_iri: Optional[str] = None,
self.default_store = default_store
self.context_store = context_store or default_store
# FIXME: define a store independent strategy
- self.NXV = Namespace(self.default_store.service.namespace) if hasattr(self.default_store.service, "namespace") \
+ self.NXV = Namespace(self.default_store.service.namespace) \
+ if hasattr(self.default_store.service, "namespace") \
else Namespace(Service.NEXUS_NAMESPACE_FALLBACK)
- self.store_metadata_iri = self.default_store.service.store_context if hasattr(self.default_store.service, "store_context") \
+
+ self.store_metadata_iri = self.default_store.service.store_context \
+ if hasattr(self.default_store.service, "store_context") \
else Namespace(Service.NEXUS_CONTEXT_FALLBACK)
- self._shapes_to_resources: Dict
+
self._imported = []
- self._graph = Graph()
- self._sg = ShapesGraphWrapper(self._graph)
- super().__init__(self._graph, context_iri)
- def schema_source_id(self, schema_iri: str) -> str:
- return self._shapes_to_resources[schema_iri]
+ super().__init__(context_iri=context_iri)
def materialize(self, iri: URIRef) -> NodeProperties:
- shape = self._type_shape(iri)
+ shape: ShapeWrapper = self._load_and_get_type_shape(iri)
predecessors = set()
props, attrs = shape.traverse(predecessors)
if props:
@@ -57,24 +58,22 @@ def materialize(self, iri: URIRef) -> NodeProperties:
def _validate(self, iri: str, data_graph: Graph) -> Tuple[bool, Graph, str]:
# _type_shape will make sure all the shapes for this type are in the graph
- self._type_shape(iri)
+ self._load_and_get_type_shape(URIRef(iri))
return validate(data_graph, shacl_graph=self._graph)
def resolve_context(self, iri: str) -> Dict:
- if iri in self._context_cache:
- return self._context_cache[iri]
- document = self.recursive_resolve(iri)
- self._context_cache.update({iri: document})
- return document
+ if iri not in self._context_cache:
+ self._context_cache[iri] = self.recursive_resolve(iri)
+
+ return self._context_cache[iri]
def generate_context(self) -> Dict:
- for v in self._shapes_to_resources.values():
- self._load_shape(v)
- # reloads the shapes graph
- self._sg = ShapesGraphWrapper(self._graph)
+ for v in self.shape_to_source.values():
+ self._load_shape_and_reload_shapes_graph(v)
+
return self._generate_context()
- def _build_shapes_map(self) -> Dict:
+ def _build_shapes_map(self) -> Tuple[Graph, Dict[URIRef, str], Dict[str, URIRef]]:
query = f"""
PREFIX rdfs:
PREFIX sh:
@@ -89,22 +88,24 @@ def _build_shapes_map(self) -> Dict:
}}
}}
}} ORDER BY ?type"""
+
# make sure to get all types
limit = 100
offset = 0
count = limit
- class_to_shapes = {}
- shape_resource = {}
+ class_to_shape: Dict[str, URIRef] = {}
+ shape_to_resource: Dict[URIRef, str] = {}
+
while count == limit:
resources = self.context_store.sparql(query, debug=False, limit=limit, offset=offset)
for r in resources:
shape_uri = URIRef(r.shape)
- class_to_shapes[r.type] = shape_uri
- shape_resource[shape_uri] = URIRef(r.resource_id)
+ class_to_shape[r.type] = shape_uri
+ shape_to_resource[shape_uri] = URIRef(r.resource_id)
count = len(resources)
- offset += limit
- self._shapes_to_resources = shape_resource
- return class_to_shapes
+ offset += count
+
+ return Graph(), shape_to_resource, class_to_shape
def recursive_resolve(self, context: Union[Dict, List, str]) -> Dict:
document = {}
@@ -113,10 +114,12 @@ def recursive_resolve(self, context: Union[Dict, List, str]) -> Dict:
context.remove(self.store_metadata_iri)
if hasattr(self.default_store.service, "store_local_context") and\
self.default_store.service.store_local_context in context:
+
context.remove(self.default_store.service.store_local_context)
for x in context:
document.update(self.recursive_resolve(x))
elif isinstance(context, str):
+
try:
local_only = not self.default_store == self.context_store
doc = self.default_store.service.resolve_context(context, local_only=local_only)
@@ -125,12 +128,13 @@ def recursive_resolve(self, context: Union[Dict, List, str]) -> Dict:
doc = self.context_store.service.resolve_context(context, local_only=False)
except ValueError as e:
raise e
+
document.update(self.recursive_resolve(doc))
elif isinstance(context, dict):
document.update(context)
return document
- def _load_shape(self, resource_id):
+ def _load_shape(self, resource_id: str):
if resource_id not in self._imported:
try:
shape = self.context_store.retrieve(resource_id, version=None, cross_bucket=False)
@@ -139,8 +143,11 @@ def _load_shape(self, resource_id):
# failed, don't try to load again
self._imported.append(resource_id)
else:
- json_dict = as_jsonld(shape, form="compacted", store_metadata=False, model_context=None,
- metadata_context=None, context_resolver=self.context_store.service.resolve_context)
+ json_dict = as_jsonld(
+ shape, form="compacted", store_metadata=False, model_context=None,
+ metadata_context=None,
+ context_resolver=self.context_store.service.resolve_context
+ )
# this double conversion was due blank nodes were not "regenerated" with json-ld
temp_graph = Graph().parse(data=json.dumps(json_dict), format="json-ld")
self._graph.parse(data=temp_graph.serialize(format="n3"), format="n3")
@@ -149,12 +156,15 @@ def _load_shape(self, resource_id):
for dependency in shape.imports:
self._load_shape(self.context.expand(dependency))
- def _type_shape(self, iri: URIRef):
+ def _load_and_get_type_shape(self, iri: URIRef) -> ShapeWrapper:
try:
- shape = self._sg.lookup_shape_from_node(iri)
+ return self._shapes_graph.lookup_shape_from_node(iri)
except KeyError:
- self._load_shape(self._shapes_to_resources[iri])
- # reloads the shapes graph
- self._sg = ShapesGraphWrapper(self._graph)
- shape = self._sg.lookup_shape_from_node(iri)
- return shape
+ shape_resource_id: str = self.shape_to_source[iri]
+ self._load_shape_and_reload_shapes_graph(shape_resource_id)
+ return self._shapes_graph.lookup_shape_from_node(iri)
+
+ def _load_shape_and_reload_shapes_graph(self, resource_id: str):
+ self._load_shape(resource_id)
+ # reloads the shapes graph
+ self._shapes_graph = ShapesGraphWrapper(self._graph)
diff --git a/kgforge/specializations/models/rdf_model.py b/kgforge/specializations/models/rdf_model.py
index edf40dd5..d5606bf5 100644
--- a/kgforge/specializations/models/rdf_model.py
+++ b/kgforge/specializations/models/rdf_model.py
@@ -14,7 +14,7 @@
import datetime
import re
from pathlib import Path
-from typing import Dict, List, Callable, Optional, Any, Union
+from typing import Dict, List, Callable, Optional, Any, Union, Tuple
from pyshacl.consts import SH
from rdflib import URIRef, Literal
@@ -29,9 +29,9 @@
from kgforge.core.commons.exceptions import ValidationError
from kgforge.core.commons.execution import run, not_supported
from kgforge.specializations.models.rdf.collectors import NodeProperties
-from kgforge.specializations.models.rdf.directory_service import DirectoryService
-from kgforge.specializations.models.rdf.service import RdfService
-from kgforge.specializations.models.rdf.store_service import StoreService
+from kgforge.specializations.models.rdf.rdf_model_service_from_directory import RdfModelServiceFromDirectory
+from kgforge.specializations.models.rdf.rdf_model_service import RdfModelService
+from kgforge.specializations.models.rdf.rdf_model_service_from_store import RdfModelServiceFromStore
from kgforge.specializations.models.rdf.utils import as_term
DEFAULT_VALUE = {
@@ -67,6 +67,13 @@
class RdfModel(Model):
"""Specialization of Model that follows SHACL shapes"""
+ def get_context_prefix_vocab(self) -> Tuple[Optional[Dict], Optional[Dict], Optional[str]]:
+ return (
+ Context.context_to_dict(self.context()),
+ self.context().prefixes,
+ self.context().vocab
+ )
+
# Vocabulary.
def _prefixes(self) -> Dict[str, str]:
@@ -86,17 +93,26 @@ def _generate_context(self) -> Context:
if document:
return Context(document)
+ def _sparql(self, query) -> List[Resource]:
+ return self.service.sparql(query)
+
# Templates.
def _template(self, type: str, only_required: bool) -> Dict:
- try:
- uri = self.service.types_to_shapes[type]
- except KeyError as exc:
- raise ValueError("type '" + type + "' not found in " + self.source) from exc
+ uri = self.get_shape_from_type(type)
node_properties = self.service.materialize(uri)
dictionary = parse_attributes(node_properties, only_required, None)
return dictionary
+ def get_shape_from_type(self, type: str) -> URIRef:
+ if type not in self.service.types_to_shapes:
+ raise ValueError(f"Type {type} not found")
+ return self.service.types_to_shapes[type]
+
+ def schema_source(self, type: str) -> str:
+ shape_iri: URIRef = self.get_shape_from_type(type)
+ return self.service.get_shape_source(shape_iri)
+
# Validation.
def schema_id(self, type: str) -> str:
@@ -133,8 +149,8 @@ def _validate_one(self, resource: Resource, type_: str) -> None:
# Utils.
@staticmethod
- def _service_from_directory(dirpath: Path, context_iri: str, **dir_config) -> RdfService:
- return DirectoryService(dirpath, context_iri)
+ def _service_from_directory(dirpath: Path, context_iri: str) -> RdfModelService:
+ return RdfModelServiceFromDirectory(dirpath, context_iri)
@staticmethod
def _service_from_store(store: Callable, context_config: Optional[Dict], **source_config) -> Any:
@@ -152,16 +168,17 @@ def _service_from_store(store: Callable, context_config: Optional[Dict], **sourc
source_config.pop("endpoint", None)
source_config.pop("token", None)
source_config.pop("bucket", None)
+
context_store: Store = store(
endpoint=context_endpoint, bucket=context_bucket, token=context_token,
**source_config
)
# FIXME: define a store independent StoreService
- service = StoreService(default_store, context_iri, context_store)
+ service = RdfModelServiceFromStore(default_store, context_iri, context_store)
else:
- service = StoreService(default_store, context_iri, None)
+ service = RdfModelServiceFromStore(default_store, context_iri, None)
else:
- service = StoreService(default_store)
+ service = RdfModelServiceFromStore(default_store)
return service
@@ -198,7 +215,8 @@ def parse_attributes(node: NodeProperties, only_required: bool,
return attributes
-def parse_properties(items: List[NodeProperties], only_required: bool, inherited_constraint: str) -> Dict:
+def parse_properties(items: List[NodeProperties], only_required: bool,
+ inherited_constraint: str) -> Dict:
props = {}
for item in items:
props.update(parse_attributes(item, only_required, inherited_constraint))
diff --git a/kgforge/specializations/stores/bluebrain_nexus.py b/kgforge/specializations/stores/bluebrain_nexus.py
index f68396d3..e87c1993 100644
--- a/kgforge/specializations/stores/bluebrain_nexus.py
+++ b/kgforge/specializations/stores/bluebrain_nexus.py
@@ -32,15 +32,16 @@
from aiohttp import ClientSession, MultipartWriter
from aiohttp.hdrs import CONTENT_DISPOSITION, CONTENT_TYPE
-from kgforge.core.commons.dictionaries import update_dict
-from kgforge.core.commons.es_query_builder import ESQueryBuilder
-from kgforge.core.commons.sparql_query_builder import SPARQLQueryBuilder
from kgforge.core.resource import Resource
from kgforge.core.archetypes.model import Model
from kgforge.core.archetypes.store import Store
from kgforge.core.archetypes.mapping import Mapping
from kgforge.core.archetypes.mapper import Mapper
from kgforge.core.archetypes.resolver import Resolver
+from kgforge.core.archetypes.store import DEFAULT_LIMIT
+from kgforge.core.commons.dictionaries import update_dict
+from kgforge.core.commons.es_query_builder import ESQueryBuilder
+from kgforge.core.commons.sparql_query_builder import SPARQLQueryBuilder
from kgforge.core.commons.actions import LazyAction
from kgforge.core.commons.context import Context
from kgforge.core.commons.exceptions import (
@@ -565,7 +566,7 @@ def _update_many(self, resources: List[Resource], schema_id: str) -> None:
)
def _update_one(self, resource: Resource, schema_id: str) -> None:
- context = self.model_context() or self.context
+ context = self.model_context or self.context
data = as_jsonld(
resource,
"compacted",
@@ -686,15 +687,14 @@ def _deprecate_one(self, resource: Resource) -> None:
# Querying.
def search(
- self, filters: List[Union[Dict, Filter]], resolvers: Optional[List[Resolver]],
- **params
+ self, resolvers: Optional[List[Resolver]], filters: List[Union[Dict, Filter]], **params
) -> List[Resource]:
if self.model_context() is None:
raise ValueError("context model missing")
debug = params.get("debug", False)
- limit = params.get("limit", 100)
+ limit = params.get("limit", DEFAULT_LIMIT)
offset = params.get("offset", None)
deprecated = params.get("deprecated", False)
cross_bucket = params.get("cross_bucket", False)
@@ -706,13 +706,14 @@ def search(
search_endpoint = params.get(
"search_endpoint", self.service.sparql_endpoint["type"]
)
- if search_endpoint not in [
- self.service.sparql_endpoint["type"],
- self.service.elastic_endpoint["type"],
- ]:
+
+ supported_search_endpoints = [
+ self.service.sparql_endpoint["type"], self.service.elastic_endpoint["type"],
+ ]
+ if search_endpoint not in supported_search_endpoints:
raise ValueError(
- f"The provided search_endpoint value '{search_endpoint}' is not supported. Supported "
- f"search_endpoint values are: '{self.service.sparql_endpoint['type'], self.service.elastic_endpoint['type']}'"
+ f"The provided search_endpoint value '{search_endpoint}' is not supported. "
+ f"Supported search_endpoint values are: {supported_search_endpoints}"
)
if "filters" in params:
raise ValueError(
@@ -876,9 +877,9 @@ def reformat_contexts(model_context: Context, metadata_context: Optional[Context
ctx = {}
if metadata_context and metadata_context.document:
- ctx.update(BlueBrainNexus._context_to_dict(metadata_context))
+ ctx.update(Context.context_to_dict(metadata_context))
- ctx.update(BlueBrainNexus._context_to_dict(model_context))
+ ctx.update(Context.context_to_dict(model_context))
prefixes = model_context.prefixes
@@ -1046,7 +1047,7 @@ def _freeze_many(self, resources: List[Resource]) -> None:
raise not_supported()
-def _create_select_query(vars_, statements, distinct, search_in_graph):
+def _create_select_query(vars_, statements, distinct: bool, search_in_graph: bool):
where_clauses = (
f"{{ Graph ?g {{{statements}}}}}" if search_in_graph else f"{{{statements}}}"
)
diff --git a/kgforge/specializations/stores/demo_store.py b/kgforge/specializations/stores/demo_store.py
index d2a9805d..34dcb591 100644
--- a/kgforge/specializations/stores/demo_store.py
+++ b/kgforge/specializations/stores/demo_store.py
@@ -137,7 +137,7 @@ def _deprecate_one(self, resource: Resource) -> None:
# Querying.
def search(
- self, filters: List[Union[Dict, Filter]], resolvers: Optional[List[Resolver]], **params
+ self, resolvers: Optional[List[Resolver]], filters: List[Union[Dict, Filter]], **params
) -> List[Resource]:
cross_bucket = params.get("cross_bucket", None)
diff --git a/kgforge/specializations/stores/nexus/service.py b/kgforge/specializations/stores/nexus/service.py
index 76be51a0..52b45f1e 100644
--- a/kgforge/specializations/stores/nexus/service.py
+++ b/kgforge/specializations/stores/nexus/service.py
@@ -106,8 +106,8 @@ def __init__(
self.namespace = namespace
self.project_property = project_property
self.store_metadata_keys = [
- "_constrainedBy", "_createdAt", "_createdBy", "_deprecated", "_incoming", "_outgoing",
- "_project", "_rev", "_schemaProject", "_self", "_updatedAt", "_updatedBy"
+ "_constrainedBy", "_createdAt", "_createdBy", "_deprecated", "_incoming",
+ "_outgoing", "_project", "_rev", "_schemaProject", "_self", "_updatedAt", "_updatedBy"
]
self.deprecated_property = deprecated_property
@@ -246,38 +246,45 @@ def get_project_context(self) -> Dict:
return context
def resolve_context(self, iri: str, local_only: Optional[bool] = False) -> Dict:
- if iri in self.context_cache:
- return self.context_cache[iri]
- try:
- context_to_resolve = (
- self.store_local_context if iri == self.store_context else iri
- )
+ context_to_resolve = (
+ self.store_local_context if iri == self.store_context else iri
+ )
+
+ if context_to_resolve not in self.context_cache:
+
url = "/".join((self.url_resolver, "_", quote_plus(context_to_resolve)))
- response = requests.get(url, headers=self.headers)
- response.raise_for_status()
- resource = response.json()
- except Exception as exc:
- if not local_only:
- try:
- context = Context(context_to_resolve)
- except URLError as exc2:
- raise ValueError(f"{context_to_resolve} is not resolvable") from exc2
-
- document = context.document["@context"]
+
+ try:
+ response = requests.get(url, headers=self.headers)
+ response.raise_for_status()
+ resource = response.json()
+ except Exception as exc:
+ if not local_only:
+ try:
+ context = Context(context_to_resolve)
+ except URLError as exc2:
+ raise ValueError(f"{context_to_resolve} is not resolvable") from exc2
+
+ document = context.document["@context"]
+ else:
+ raise ValueError(f"{context_to_resolve} is not resolvable") from exc
else:
- raise ValueError(f"{context_to_resolve} is not resolvable") from exc
- else:
- # Make sure context is not deprecated
- if '_deprecated' in resource and resource['_deprecated']:
- raise ConfigurationError(f"Context {context_to_resolve} exists but was deprecated")
- document = json.loads(json.dumps(resource["@context"]))
- if isinstance(document, list):
- if self.store_context in document:
- document.remove(self.store_context)
- if self.store_local_context in document:
- document.remove(self.store_local_context)
- self.context_cache.update({context_to_resolve: document})
- return document
+ # Make sure context is not deprecated
+ if '_deprecated' in resource and resource['_deprecated']:
+ raise ConfigurationError(
+ f"Context {context_to_resolve} exists but was deprecated"
+ )
+ document = json.loads(json.dumps(resource["@context"]))
+
+ if isinstance(document, list):
+ if self.store_context in document:
+ document.remove(self.store_context)
+ if self.store_local_context in document:
+ document.remove(self.store_local_context)
+
+ self.context_cache[context_to_resolve] = document
+
+ return self.context_cache[context_to_resolve]
def batch_request(
self,
diff --git a/tests/core/archetypes/test_store.py b/tests/core/archetypes/test_store.py
index b9359ea2..a2c8a442 100644
--- a/tests/core/archetypes/test_store.py
+++ b/tests/core/archetypes/test_store.py
@@ -71,4 +71,4 @@ def test_freeze(config, store_metadata_value):
assert dataset.derivation.entity.id == "http://derivation1?_version=1"
assert dataset.generation.activity.id == "http://generation1?_version=1"
assert dataset.contribution.agent.id == "http://contribution1?_version=1"
- assert dataset.invalidation.activity.id == "http://invalidation1?_version=1"
+ assert dataset.invalidation.activity.id == "http://invalidation1?_version=1"
\ No newline at end of file
diff --git a/tests/data/shacl-model/commons/ontology-1.ttl b/tests/data/shacl-model/commons/ontology-1.ttl
new file mode 100644
index 00000000..56849df8
--- /dev/null
+++ b/tests/data/shacl-model/commons/ontology-1.ttl
@@ -0,0 +1,84 @@
+@prefix bmo: .
+@prefix vann: .
+@prefix nsg: .
+@prefix owl: .
+@prefix parms: .
+@prefix prov: .
+@prefix rdfs: .
+@prefix schema: .
+@prefix skos: .
+@prefix xsd: .
+
+schema:name a owl:AnnotationProperty ;
+ rdfs:label "name"@en ;
+ skos:altLabel "name"@en .
+
+schema:unitCode a owl:AnnotationProperty ;
+ rdfs:label "unitCode"@en ;
+ skos:altLabel "units"@en .
+
+owl:equivalentClass a owl:AnnotationProperty ;
+ rdfs:label "equivalentClass"@en .
+
+skos:altLabel a owl:AnnotationProperty ;
+ skos:altLabel "altLabel"@en .
+
+skos:definition a owl:AnnotationProperty ;
+ rdfs:label "definition"@en .
+
+skos:example a owl:AnnotationProperty .
+
+skos:notation a owl:AnnotationProperty .
+
+skos:note a owl:AnnotationProperty .
+
+skos:prefLabel a owl:AnnotationProperty ;
+ rdfs:label "prefLabel"@en .
+
+bmo:AtlasDistanceToLayer a owl:Class ;
+ rdfs:label "AtlasDistanceToLayer"@en ;
+ rdfs:subClassOf bmo:ModelBrainParameter .
+
+bmo:AtlasETypeRatio a owl:Class ;
+ rdfs:label "AtlasETypeRatio"@en ;
+ rdfs:subClassOf bmo:ModelBrainParameter ;
+ skos:altLabel "Atlas e-type ratio"@en ;
+ skos:prefLabel "Atlas EType Ratio"@en .
+
+bmo:BrainVolumeParameter a owl:Class ;
+ rdfs:label "Model Brain Volume Parameter"@en ;
+ rdfs:subClassOf bmo:ModelBrainParameter .
+
+bmo:ConductanceDensity a owl:Class ;
+ rdfs:label "Conductance Density"@en ;
+ rdfs:subClassOf [ a owl:Restriction ;
+ owl:onProperty bmo:compartment ;
+ owl:someValuesFrom bmo:NeuronPart ],
+ [ a owl:Restriction ;
+ owl:onProperty nsg:ion ;
+ owl:someValuesFrom bmo:Ion ],
+ bmo:EModelParameter,
+ bmo:NeuronPartFeature .
+
+bmo:ConnectivityModelCoefficient a owl:Class ;
+ rdfs:label "Connectivity Model Coefficient"@en ;
+ rdfs:subClassOf bmo:ModelConnectivityParameter .
+
+bmo:DendriteSynapseDensity a owl:Class ;
+ rdfs:label "DendriteSynapseDensity"@en ;
+ rdfs:subClassOf bmo:ModelBrainParameter .
+
+bmo:EModelParameterConstraint a owl:Class ;
+ rdfs:label "EModel Parameter Constraint"@en ;
+ rdfs:subClassOf bmo:ModelBrainParameterConstraint .
+
+bmo:METypeRatio a owl:Class ;
+ rdfs:label "ME-Type Ratio"@en ;
+ rdfs:subClassOf bmo:ModelBrainParameter ;
+ skos:prefLabel "ME-Type Ratio"^^xsd:string .
+
+ a owl:Ontology ;
+ rdfs:label "Brain Modeling Parameter Ontology"^^xsd:string ;
+ vann:preferredNamespacePrefix "parms"^^xsd:string ;
+ schema:title "Brain Modeling Parameter Ontology"^^xsd:string ;
+ owl:versionInfo "R4"^^xsd:string .
diff --git a/tests/specializations/models/data.py b/tests/specializations/models/data.py
index e3f2b1e6..6742e643 100644
--- a/tests/specializations/models/data.py
+++ b/tests/specializations/models/data.py
@@ -13,6 +13,7 @@
# along with Blue Brain Nexus Forge. If not, see .
from copy import deepcopy
+from utils import full_path_relative_to_root
ORGANIZATION = {
"id": "",
@@ -134,4 +135,14 @@
"Organization": "http://www.example.com/OrganizationShape",
"Person": "http://www.example.com/PersonShape",
"PostalAddress": "http://schema.org/PostalAddress",
-}
\ No newline at end of file
+}
+
+SCHEMA_SOURCE_MAP = {
+ "Activity": full_path_relative_to_root('tests/data/shacl-model/commons/shapes-2.json'),
+ "Association": full_path_relative_to_root('tests/data/shacl-model/commons/shapes-1.json'),
+ "Building": full_path_relative_to_root('tests/data/shacl-model/commons/shapes-3.json'),
+ "Employee": full_path_relative_to_root('tests/data/shacl-model/commons/shapes-1.json'),
+ "Organization": full_path_relative_to_root('tests/data/shacl-model/commons/shapes-1.json'),
+ "Person": full_path_relative_to_root('tests/data/shacl-model/commons/shapes-1.json'),
+ "PostalAddress": full_path_relative_to_root('tests/data/shacl-model/commons/shapes-1.json'),
+}
diff --git a/tests/specializations/models/test_rdf_model.py b/tests/specializations/models/test_rdf_model.py
index 099169f7..7497cd82 100644
--- a/tests/specializations/models/test_rdf_model.py
+++ b/tests/specializations/models/test_rdf_model.py
@@ -23,9 +23,11 @@
@pytest.fixture
def rdf_model(context_iri_file):
- return RdfModel(full_path_relative_to_root("tests/data/shacl-model"),
- context={"iri": context_iri_file},
- origin="directory")
+ return RdfModel(
+ full_path_relative_to_root("tests/data/shacl-model"),
+ context={"iri": context_iri_file},
+ origin="directory"
+ )
class TestVocabulary:
@@ -96,8 +98,34 @@ def valid_activity_resource(self, activity_json):
@pytest.mark.parametrize("type_,", TYPES_SCHEMAS_MAP.keys())
def test_type_to_schema(self, rdf_model: RdfModel, type_):
- # FIXME TYPES_SCHEMAS_MAP should be a type to file dictionary
- assert rdf_model.schema_id(type_) == TYPES_SCHEMAS_MAP[type_]
+
+ # The problem:
+ # For DirectoryService,
+ # the best way to track the file from which a schema originates is
+ # - before building the shapes map
+ # - on service initialisation, when graph loading (gets all schemas),
+
+ # For StoreService,
+ # the best way to track the file from which a schema originates is
+ # - when building the shapes map, querying the store to get resource_id
+ # - not on service initialisation, no graph loading
+ # (empty graph provided, lazily loaded on request)
+
+ # Solution: graph loading should happen in building the shapes map. Shape loading returns:
+ # the graph with the shapes (empty for Store, full for Directory)
+ # shape_to_source
+ # class_to_shape
+
+ # Would mean that in
+ # - RdfModelServiceFromStore g = Graph() would happen in the implementation of
+ # _build_shapes_map, and not in constructor
+ # - RdfModelFromDirectory load_rdf_files_into_graph() would happen in the implementation of
+ # _build_shapes_map, and not in constructor
+
+ # - RdfModelService: self.shape_to_source, self.class_to_shape can be parameters of the
+ # constructor of this abstract class, and they'd be passed to super by the implementations
+
+ assert rdf_model.schema_source(type_) == SCHEMA_SOURCE_MAP[type_]
def test_validate_one(self, rdf_model: RdfModel, valid_activity_resource):
rdf_model.validate(valid_activity_resource, False, type_="Activity")
@@ -119,3 +147,14 @@ def test_validate_many(self, rdf_model: RdfModel, valid_activity_resource,
assert (valid_activity_resource._last_action.operation ==
invalid_activity_resource._last_action.operation ==
rdf_model._validate_many.__name__)
+
+ def test_query_model(self, rdf_model: RdfModel):
+
+ q_template = "SELECT ?id WHERE { ?id a %s }"
+ res1 = rdf_model.sparql(q_template % "owl:Ontology", debug=True)
+ res2 = rdf_model.sparql(q_template % "owl:AnnotationProperty", debug=True)
+ res3 = rdf_model.sparql(q_template % "owl:Class", debug=True)
+
+ assert len(res1) == 1
+ assert len(res2) == 9
+ assert len(res3) == 8