aboutsummaryrefslogtreecommitdiffstats
path: root/bsfs
diff options
context:
space:
mode:
Diffstat (limited to 'bsfs')
-rw-r--r--bsfs/graph/__init__.py15
-rw-r--r--bsfs/graph/ac/__init__.py20
-rw-r--r--bsfs/graph/ac/base.py67
-rw-r--r--bsfs/graph/ac/null.py53
-rw-r--r--bsfs/graph/graph.py65
-rw-r--r--bsfs/graph/nodes.py243
-rw-r--r--bsfs/triple_store/__init__.py20
-rw-r--r--bsfs/triple_store/base.py128
-rw-r--r--bsfs/triple_store/sparql.py253
9 files changed, 864 insertions, 0 deletions
diff --git a/bsfs/graph/__init__.py b/bsfs/graph/__init__.py
new file mode 100644
index 0000000..3a131e9
--- /dev/null
+++ b/bsfs/graph/__init__.py
@@ -0,0 +1,15 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+
+# exports
+__all__: typing.Sequence[str] = []
+
+## EOF ##
diff --git a/bsfs/graph/ac/__init__.py b/bsfs/graph/ac/__init__.py
new file mode 100644
index 0000000..420de01
--- /dev/null
+++ b/bsfs/graph/ac/__init__.py
@@ -0,0 +1,20 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .base import AccessControlBase
+from .null import NullAC
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'AccessControlBase',
+ 'NullAC',
+ )
+
+## EOF ##
diff --git a/bsfs/graph/ac/base.py b/bsfs/graph/ac/base.py
new file mode 100644
index 0000000..70475d2
--- /dev/null
+++ b/bsfs/graph/ac/base.py
@@ -0,0 +1,67 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import abc
+import typing
+
+# bsfs imports
+from bsfs import schema as _schema
+from bsfs.triple_store import TripleStoreBase
+from bsfs.utils import URI
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'AccessControlBase',
+ )
+
+
+## code ##
+
+class AccessControlBase(abc.ABC):
+ """
+ """
+
+ #
+ __backend: TripleStoreBase
+
+ #
+ __user: URI
+
+ def __init__(
+ self,
+ backend: TripleStoreBase,
+ user: URI,
+ ):
+ self.__backend = backend
+ self.__user = URI(user)
+
+ @abc.abstractmethod
+ def is_protected_predicate(self, pred: _schema.Predicate) -> bool:
+ """Return True if a predicate cannot be modified manually."""
+
+ @abc.abstractmethod
+ def create(self, node_type: _schema.Node, guids: typing.Iterable[URI]):
+ """Perform post-creation operations on nodes, e.g. ownership information."""
+
+ @abc.abstractmethod
+ def link_from_node(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which outbound links can be written."""
+
+ @abc.abstractmethod
+ def link_to_node(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which inbound links can be written."""
+
+ @abc.abstractmethod
+ def write_literal(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes to which literals can be attached."""
+
+ @abc.abstractmethod
+ def createable(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes that are allowed to be created."""
+
+
+## EOF ##
diff --git a/bsfs/graph/ac/null.py b/bsfs/graph/ac/null.py
new file mode 100644
index 0000000..a39b7b9
--- /dev/null
+++ b/bsfs/graph/ac/null.py
@@ -0,0 +1,53 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs import schema as _schema
+from bsfs.namespace import ns
+from bsfs.utils import URI
+
+# inner-module imports
+from . import base
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'NullAC',
+ )
+
+
+## code ##
+
+class NullAC(base.AccessControlBase):
+ """
+ """
+
+ def is_protected_predicate(self, pred: _schema.Predicate) -> bool:
+ """Return True if a predicate cannot be modified manually."""
+ return pred.uri == ns.bsm.t_created
+
+ def create(self, node_type: _schema.Node, guids: typing.Iterable[URI]):
+ """Perform post-creation operations on nodes, e.g. ownership information."""
+
+ def link_from_node(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which outbound links can be written."""
+ return guids
+
+ def link_to_node(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which inbound links can be written."""
+ return guids
+
+ def write_literal(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes to which literals can be attached."""
+ return guids
+
+ def createable(self, node_type: _schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes that are allowed to be created."""
+ return guids
+
+## EOF ##
diff --git a/bsfs/graph/graph.py b/bsfs/graph/graph.py
new file mode 100644
index 0000000..06271f6
--- /dev/null
+++ b/bsfs/graph/graph.py
@@ -0,0 +1,65 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs.schema import Schema
+from bsfs.triple_store import TripleStoreBase
+from bsfs.utils import URI, typename
+
+# inner-module imports
+from . import nodes
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Graph',
+ )
+
+
+## code ##
+
+class Graph():
+ """
+ """
+ # link to the triple storage backend.
+ __backend: TripleStoreBase
+
+ # user uri.
+ __user: URI
+
+ def __init__(self, backend: TripleStoreBase, user: URI):
+ self.__backend = backend
+ self.__user = user
+
+ def __hash__(self) -> int:
+ return hash((type(self), self.__backend, self.__user))
+
+ def __eq__(self, other) -> bool:
+ return isinstance(other, type(self)) \
+ and self.__backend == other.__backend \
+ and self.__user == other.__user
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}(backend={repr(self.__backend)}, user={self.__user})'
+
+ def __str__(self) -> str:
+ return f'{typename(self)}({str(self.__backend)}, {self.__user})'
+
+ @property
+ def schema(self) -> Schema:
+ """Return the store's local schema."""
+ return self.__backend.schema
+
+ def nodes(self, node_type: URI, guids: typing.Iterable[URI]) -> nodes.Nodes:
+ """
+ """
+ node_type = self.schema.node(node_type)
+ # NOTE: Nodes constructor materializes guids.
+ return nodes.Nodes(self.__backend, self.__user, node_type, guids)
+
+## EOF ##
diff --git a/bsfs/graph/nodes.py b/bsfs/graph/nodes.py
new file mode 100644
index 0000000..7d2e9b3
--- /dev/null
+++ b/bsfs/graph/nodes.py
@@ -0,0 +1,243 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import itertools
+import time
+import typing
+
+# bsfs imports
+from bsfs import schema as _schema
+from bsfs.namespace import ns
+from bsfs.triple_store import TripleStoreBase
+from bsfs.utils import errors, URI, typename
+
+# inner-module imports
+from . import ac
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Nodes',
+ )
+
+
+## code ##
+
+class Nodes():
+ """
+ NOTE: guids may or may not exist. This is not verified as nodes are created on demand.
+ """
+
+ # triple store backend.
+ __backend: TripleStoreBase
+
+ # user uri.
+ __user: URI
+
+ # node type.
+ __node_type: _schema.Node
+
+ # guids of nodes. Can be empty.
+ __guids: typing.Set[URI]
+
+ def __init__(
+ self,
+ backend: TripleStoreBase,
+ user: URI,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ self.__backend = backend
+ self.__user = user
+ self.__node_type = node_type
+ self.__guids = set(guids)
+ self.__ac = ac.NullAC(self.__backend, self.__user)
+
+ def __eq__(self, other: typing.Any) -> bool:
+ return isinstance(other, Nodes) \
+ and self.__backend == other.__backend \
+ and self.__user == other.__user \
+ and self.__node_type == other.__node_type \
+ and self.__guids == other.__guids
+
+ def __hash__(self) -> int:
+ return hash((type(self), self.__backend, self.__user, self.__node_type, tuple(sorted(self.__guids))))
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}({self.__backend}, {self.__user}, {self.__node_type}, {self.__guids})'
+
+ def __str__(self) -> str:
+ return f'{typename(self)}({self.__node_type}, {self.__guids})'
+
+ @property
+ def node_type(self) -> _schema.Node:
+ """Return the node's type."""
+ return self.__node_type
+
+ @property
+ def guids(self) -> typing.Iterator[URI]:
+ """Return all node guids."""
+ return iter(self.__guids)
+
+ def set(
+ self,
+ pred: URI, # FIXME: URI or _schema.Predicate?
+ value: typing.Any,
+ ) -> 'Nodes':
+ """
+ """
+ try:
+ # insert triples
+ self.__set(pred, value)
+ # save changes
+ self.__backend.commit()
+
+ except (
+ errors.PermissionDeniedError, # tried to set a protected predicate (ns.bsm.t_created)
+ errors.ConsistencyError, # node types are not in the schema or don't match the predicate
+ errors.InstanceError, # guids/values don't have the correct type
+ TypeError, # value is supposed to be a Nodes instance
+ ValueError, # multiple values passed to unique predicate
+ ):
+ # revert changes
+ self.__backend.rollback()
+ # notify the client
+ raise
+
+ return self
+
+ def set_from_iterable(
+ self,
+ predicate_values: typing.Iterable[typing.Tuple[URI, typing.Any]], # FIXME: URI or _schema.Predicate?
+ ) -> 'Nodes':
+ """
+ """
+ # TODO: Could group predicate_values by predicate to gain some efficiency
+ # TODO: ignore errors on some predicates; For now this could leave residual
+ # data (e.g. some nodes were created, some not).
+ try:
+ # insert triples
+ for pred, value in predicate_values:
+ self.__set(pred, value)
+ # save changes
+ self.__backend.commit()
+
+ except (
+ errors.PermissionDeniedError, # tried to set a protected predicate (ns.bsm.t_created)
+ errors.ConsistencyError, # node types are not in the schema or don't match the predicate
+ errors.InstanceError, # guids/values don't have the correct type
+ TypeError, # value is supposed to be a Nodes instance
+ ValueError, # multiple values passed to unique predicate
+ ):
+ # revert changes
+ self.__backend.rollback()
+ # notify the client
+ raise
+
+ return self
+
+ def __set(
+ self,
+ predicate: URI,
+ value: typing.Any,
+ #on_error: str = 'ignore', # ignore, rollback
+ ):
+ """
+ """
+ # get normalized predicate. Raises KeyError if *pred* not in the schema.
+ pred = self.__backend.schema.predicate(predicate)
+
+ # node_type must be a subclass of the predicate's domain
+ node_type = self.node_type
+ if not node_type <= pred.domain:
+ raise errors.ConsistencyError(f'{node_type} must be a subclass of {pred.domain}')
+
+ # check reserved predicates (access controls, metadata, internal structures)
+ # FIXME: Needed? Could be integrated into other AC methods (by passing the predicate!)
+ # This could allow more fine-grained predicate control (e.g. based on ownership)
+ # rather than a global approach like this.
+ if self.__ac.is_protected_predicate(pred):
+ raise errors.PermissionDeniedError(pred)
+
+ # set operation affects all nodes (if possible)
+ guids = set(self.guids)
+
+ # ensure subject node existence; create nodes if need be
+ guids = set(self._ensure_nodes(node_type, guids))
+
+ # check value
+ if isinstance(pred.range, _schema.Literal):
+ # check write permissions on existing nodes
+ # As long as the user has write permissions, we don't restrict
+ # the creation or modification of literal values.
+ guids = set(self.__ac.write_literal(node_type, guids))
+
+ # insert literals
+ # TODO: Support passing iterators as values for non-unique predicates
+ self.__backend.set(
+ node_type,
+ guids,
+ pred,
+ [value],
+ )
+
+ elif isinstance(pred.range, _schema.Node):
+ # check value type
+ if not isinstance(value, Nodes):
+ raise TypeError(value)
+ # value's node_type must be a subclass of the predicate's range
+ if not value.node_type <= pred.range:
+ raise errors.ConsistencyError(f'{value.node_type} must be a subclass of {pred.range}')
+
+ # check link permissions on source nodes
+ # Link permissions cover adding and removing links on the source node.
+ # Specifically, link permissions also allow to remove links to other
+ # nodes if needed (e.g. for unique predicates).
+ guids = set(self.__ac.link_from_node(node_type, guids))
+
+ # get link targets
+ targets = set(value.guids)
+ # ensure existence of value nodes; create nodes if need be
+ targets = set(self._ensure_nodes(value.node_type, targets))
+ # check link permissions on target nodes
+ targets = set(self.__ac.link_to_node(value.node_type, targets))
+
+ # insert node links
+ self.__backend.set(
+ node_type,
+ guids,
+ pred,
+ targets,
+ )
+
+ else:
+ raise errors.UnreachableError()
+
+ def _ensure_nodes(
+ self,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ # check node existence
+ guids = set(guids)
+ existing = set(self.__backend.exists(node_type, guids))
+ # get nodes to be created
+ missing = guids - existing
+ # create nodes if need be
+ if len(missing) > 0:
+ # check which missing nodes can be created
+ missing = set(self.__ac.createable(node_type, missing))
+ # create nodes
+ self.__backend.create(node_type, missing)
+ # add bookkeeping triples
+ self.__backend.set(node_type, missing,
+ self.__backend.schema.predicate(ns.bsm.t_created), [time.time()])
+ # add permission triples
+ self.__ac.create(node_type, missing)
+ # return available nodes
+ return existing | missing
+
+## EOF ##
diff --git a/bsfs/triple_store/__init__.py b/bsfs/triple_store/__init__.py
new file mode 100644
index 0000000..fb5a8a9
--- /dev/null
+++ b/bsfs/triple_store/__init__.py
@@ -0,0 +1,20 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .base import TripleStoreBase
+from .sparql import SparqlStore
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'SparqlStore',
+ 'TripleStoreBase',
+ )
+
+## EOF ##
diff --git a/bsfs/triple_store/base.py b/bsfs/triple_store/base.py
new file mode 100644
index 0000000..a2668c3
--- /dev/null
+++ b/bsfs/triple_store/base.py
@@ -0,0 +1,128 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import abc
+import typing
+
+# inner-module imports
+from bsfs.utils import URI, typename
+import bsfs.schema as _schema
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'TripleStoreBase',
+ )
+
+
+## code ##
+
+class TripleStoreBase(abc.ABC):
+ """
+ """
+
+ # storage's URI. None implies a temporary location.
+ uri: typing.Optional[URI] = None
+
+ def __init__(self, uri: typing.Optional[URI] = None):
+ self.uri = uri
+
+ def __hash__(self) -> int:
+ uri = self.uri if self.uri is not None else id(self)
+ return hash((type(self), uri))
+
+ def __eq__(self, other) -> bool:
+ return isinstance(other, type(self)) \
+ and (( self.uri is not None \
+ and other.uri is not None \
+ and self.uri == other.uri ) \
+ or id(self) == id(other))
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}(uri={self.uri})'
+
+ def __str__(self) -> str:
+ return f'{typename(self)}(uri={self.uri})'
+
+ def is_persistent(self) -> bool:
+ """Return True if data is stored persistently."""
+ return self.uri is not None
+
+
+ @classmethod
+ @abc.abstractmethod
+ def Open(
+ cls,
+ uri: str,
+ **kwargs: typing.Any,
+ ) -> 'TripleStoreBase':
+ """Return a TripleStoreBase instance connected to *uri*."""
+
+ @abc.abstractmethod
+ def commit(self):
+ """Commit the current transaction."""
+
+ @abc.abstractmethod
+ def rollback(self):
+ """Undo changes since the last commit."""
+
+ @property
+ @abc.abstractmethod
+ def schema(self) -> _schema.Schema:
+ """Return the store's local schema."""
+
+ @schema.setter
+ def schema(self, schema: _schema.Schema):
+ """Migrate to new schema by adding or removing class definitions.
+
+ Commits before and after the migration.
+
+ Instances of removed classes will be deleted irreversably.
+ Note that modifying an existing class is not directly supported.
+ Also, it is generally discouraged, since changing definitions may
+ lead to inconsistencies across multiple clients in a distributed
+ setting. Instead, consider introducing a new class under its own
+ uri. Such a migration would look as follows:
+
+ 1. Add new class definitions.
+ 2. Create instances of the new classes and copy relevant data.
+ 3. Remove the old definitions.
+
+ To modify a class, i.e., re-use a previous uri with a new
+ class definition, you would have to migrate via temporary
+ class definitions, and thus repeat the above procedure two times.
+
+ """
+
+ @abc.abstractmethod
+ def exists(
+ self,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ """
+ """
+
+ @abc.abstractmethod
+ def create(
+ self,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ """Create *guid* nodes with type *subject*."""
+
+ @abc.abstractmethod
+ def set(
+ self,
+ node_type: _schema.Node, # FIXME: is the node_type even needed? Couldn't I infer from the predicate?
+ guids: typing.Iterable[URI],
+ predicate: _schema.Predicate,
+ values: typing.Iterable[typing.Any],
+ ):
+ """
+ """
+
+## EOF ##
diff --git a/bsfs/triple_store/sparql.py b/bsfs/triple_store/sparql.py
new file mode 100644
index 0000000..3eab869
--- /dev/null
+++ b/bsfs/triple_store/sparql.py
@@ -0,0 +1,253 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import itertools
+import typing
+import rdflib
+
+# bsfs imports
+from bsfs.utils import URI
+from bsfs.utils import errors
+import bsfs.schema as _schema
+
+# inner-module imports
+from . import base
+
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'SparqlStore',
+ )
+
+
+## code ##
+
+class Transaction():
+ """Lightweight rdflib transactions for in-memory databases."""
+
+ def __init__(self, graph):
+ self._graph = graph
+ self.commit() # initialize
+
+ def commit(self):
+ self._added = []
+ self._removed = []
+
+ def rollback(self):
+ for triple in self._added:
+ self._graph.remove(triple)
+ for triple in self._removed:
+ self._graph.add(triple)
+
+ def add(self, triple):
+ if triple not in self._graph:
+ self._added.append(triple)
+ self._graph.add(triple)
+
+ def remove(self, triple):
+ if triple in self._graph:
+ self._removed.append(triple)
+ self._graph.remove(triple)
+
+
+class SparqlStore(base.TripleStoreBase):
+ """
+ """
+
+ def __init__(self, uri: typing.Optional[URI] = None):
+ super().__init__(uri)
+ self.graph = rdflib.Graph()
+ self.transaction = Transaction(self.graph)
+ self.__schema = _schema.Schema.Empty()
+
+ @classmethod
+ def Open(
+ cls,
+ uri: str,
+ **kwargs: typing.Any,
+ ) -> 'SparqlStore':
+ return cls(None)
+
+ def commit(self):
+ self.transaction.commit()
+
+ def rollback(self):
+ self.transaction.rollback()
+
+ @property
+ def schema(self) -> _schema.Schema:
+ """Return the current schema."""
+ return self.__schema
+
+ @schema.setter
+ def schema(self, schema: _schema.Schema):
+ """Migrate to new schema by adding or removing class definitions.
+
+ Commits before and after the migration.
+
+ Instances of removed classes will be deleted irreversably.
+ Note that modifying an existing class is not directly supported.
+ Also, it is generally discouraged, since changing definitions may
+ lead to inconsistencies across multiple clients in a distributed
+ setting. Instead, consider introducing a new class under its own
+ uri. Such a migration would look as follows:
+
+ 1. Add new class definitions.
+ 2. Create instances of the new classes and copy relevant data.
+ 3. Remove the old definitions.
+
+ To modify a class, i.e., re-use a previous uri with a new
+ class definition, you would have to migrate via temporary
+ class definitions, and thus repeat the above procedure two times.
+
+ """
+ # check args: Schema instanace
+ if not isinstance(schema, _schema.Schema):
+ raise TypeError(schema)
+ # check compatibility: No contradicting definitions
+ if not self.schema.consistent_with(schema):
+ raise errors.ConsistencyError(f'{schema} is inconsistent with {self.schema}')
+
+ # commit the current transaction
+ self.commit()
+
+ # adjust instances:
+ # nothing to do for added classes
+ # delete instances of removed classes
+
+ # get deleted classes
+ sub = self.schema - schema
+
+ # remove predicate instances
+ for pred in sub.predicates:
+ for src, trg in self.graph.subject_objects(rdflib.URIRef(pred.uri)):
+ self.transaction.remove((src, rdflib.URIRef(pred.uri), trg))
+
+ # remove node instances
+ for node in sub.nodes:
+ # iterate through node instances
+ for inst in self.graph.subjects(rdflib.RDF.type, rdflib.URIRef(node.uri)):
+ # remove triples where the instance is in the object position
+ for src, pred in self.graph.subject_predicates(inst):
+ self.transaction.remove((src, pred, inst))
+ # remove triples where the instance is in the subject position
+ for pred, trg in self.graph.predicate_objects(inst):
+ self.transaction.remove((inst, pred, trg))
+ # remove instance
+ self.transaction.remove((inst, rdflib.RDF.type, rdflib.URIRef(node.uri)))
+
+ # NOTE: Nothing to do for literals
+
+ # commit instance changes
+ self.commit()
+
+ # migrate schema
+ self.__schema = schema
+
+
+ def _has_type(self, subject: URI, node_type: _schema.Node) -> bool:
+ """Return True if *subject* is a node of class *node_type* or a subclass thereof."""
+ if node_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
+
+ subject_types = list(self.graph.objects(rdflib.URIRef(subject), rdflib.RDF.type))
+ if len(subject_types) == 0:
+ return False
+ elif len(subject_types) == 1:
+ node = self.schema.node(URI(subject_types[0]))
+ if node == node_type:
+ return True
+ elif node_type in node.parents():
+ return True
+ else:
+ return False
+ else:
+ raise errors.UnreachableError()
+
+ def exists(
+ self,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ """
+ """
+ return {subj for subj in guids if self._has_type(subj, node_type)}
+
+ def create(
+ self,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ """
+ """
+ # check node_type
+ if node_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
+ # check and create guids
+ for guid in guids:
+ guid = rdflib.URIRef(guid)
+ # check node existence
+ if (guid, rdflib.RDF.type, None) in self.graph:
+ # FIXME: node exists and may have a different type! ignore? raise? report?
+ continue
+ # add node
+ self.transaction.add((guid, rdflib.RDF.type, rdflib.URIRef(node_type.uri)))
+
+ def set(
+ self,
+ node_type: _schema.Node, # FIXME: is the node_type even needed? Couldn't I infer from the predicate?
+ guids: typing.Iterable[URI],
+ predicate: _schema.Predicate,
+ values: typing.Iterable[typing.Any],
+ ):
+ # check node_type
+ if node_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
+ # check predicate
+ if predicate not in self.schema.predicates():
+ raise errors.ConsistencyError(f'{predicate} is not defined in the schema')
+ if not node_type <= predicate.domain:
+ raise errors.ConsistencyError(f'{node_type} must be a subclass of {predicate.domain}')
+ # NOTE: predicate.range is in the schema since predicate is in the schema.
+ # check values
+ if len(values) == 0:
+ return
+ if predicate.unique and len(values) != 1:
+ raise ValueError(values)
+ if isinstance(predicate.range, _schema.Node):
+ values = set(values) # materialize to safeguard against iterators passed as argument
+ inconsistent = {val for val in values if not self._has_type(val, predicate.range)}
+ # catches nodes that don't exist and nodes that have an inconsistent type
+ if len(inconsistent) > 0:
+ raise errors.InstanceError(inconsistent)
+ # check guids
+ # FIXME: Fail or skip inexistent nodes?
+ guids = set(guids)
+ inconsistent = {guid for guid in guids if not self._has_type(guid, node_type)}
+ if len(inconsistent) > 0:
+ raise errors.InstanceError(inconsistent)
+
+ # add triples
+ pred = rdflib.URIRef(predicate.uri)
+ for guid, value in itertools.product(guids, values):
+ guid = rdflib.URIRef(guid)
+ # convert value
+ if isinstance(predicate.range, _schema.Literal):
+ value = rdflib.Literal(value, datatype=rdflib.URIRef(predicate.range.uri))
+ elif isinstance(predicate.range, _schema.Node):
+ value = rdflib.URIRef(value)
+ else:
+ raise errors.UnreachableError()
+ # clear triples for unique predicates
+ if predicate.unique:
+ for obj in self.graph.objects(guid, pred):
+ if obj != value:
+ self.transaction.remove((guid, pred, obj))
+ # add triple
+ self.transaction.add((guid, pred, value))
+
+## EOF ##