aboutsummaryrefslogtreecommitdiffstats
path: root/bsfs
diff options
context:
space:
mode:
authorMatthias Baumgartner <dev@igsor.net>2022-12-18 14:21:11 +0100
committerMatthias Baumgartner <dev@igsor.net>2022-12-18 14:21:11 +0100
commit91437ba89d35bf482f3d9671bb99ef2fc69f5985 (patch)
treee9bfe27e5a641c040cfa8fe747a7cbb28091079c /bsfs
parent87e4cd5a4581094f490f79d4f1cf91f51897660f (diff)
parente94368c75468e3e94382b12705e55d396249eaca (diff)
downloadbsfs-91437ba89d35bf482f3d9671bb99ef2fc69f5985.tar.gz
bsfs-91437ba89d35bf482f3d9671bb99ef2fc69f5985.tar.bz2
bsfs-91437ba89d35bf482f3d9671bb99ef2fc69f5985.zip
Merge branch 'develop' into main
Diffstat (limited to 'bsfs')
-rw-r--r--bsfs/__init__.py23
-rw-r--r--bsfs/apps/__init__.py20
-rw-r--r--bsfs/apps/init.py73
-rw-r--r--bsfs/apps/migrate.py67
-rw-r--r--bsfs/front/__init__.py20
-rw-r--r--bsfs/front/bsfs.py29
-rw-r--r--bsfs/front/builder.py75
-rw-r--r--bsfs/graph/__init__.py18
-rw-r--r--bsfs/graph/ac/__init__.py20
-rw-r--r--bsfs/graph/ac/base.py71
-rw-r--r--bsfs/graph/ac/null.py52
-rw-r--r--bsfs/graph/graph.py113
-rw-r--r--bsfs/graph/nodes.py217
-rw-r--r--bsfs/graph/schema.nt18
-rw-r--r--bsfs/namespace/__init__.py21
-rw-r--r--bsfs/namespace/namespace.py104
-rw-r--r--bsfs/namespace/predefined.py39
-rw-r--r--bsfs/schema/__init__.py22
-rw-r--r--bsfs/schema/schema.py386
-rw-r--r--bsfs/schema/types.py276
-rw-r--r--bsfs/triple_store/__init__.py20
-rw-r--r--bsfs/triple_store/base.py148
-rw-r--r--bsfs/triple_store/sparql.py253
-rw-r--r--bsfs/utils/__init__.py25
-rw-r--r--bsfs/utils/commons.py23
-rw-r--r--bsfs/utils/errors.py41
-rw-r--r--bsfs/utils/uri.py246
-rw-r--r--bsfs/utils/uuid.py108
28 files changed, 2528 insertions, 0 deletions
diff --git a/bsfs/__init__.py b/bsfs/__init__.py
new file mode 100644
index 0000000..079ffaf
--- /dev/null
+++ b/bsfs/__init__.py
@@ -0,0 +1,23 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import collections
+import typing
+
+# bsfs imports
+from .front import Open
+
+# constants
+T_VERSION_INFO = collections.namedtuple('T_VERSION_INFO', ('major', 'minor', 'micro')) # pylint: disable=invalid-name
+version_info = T_VERSION_INFO(0, 0, 1)
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Open',
+ )
+
+## EOF ##
diff --git a/bsfs/apps/__init__.py b/bsfs/apps/__init__.py
new file mode 100644
index 0000000..7efaa87
--- /dev/null
+++ b/bsfs/apps/__init__.py
@@ -0,0 +1,20 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .init import main as init
+from .migrate import main as migrate
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'init',
+ 'migrate',
+ )
+
+## EOF ##
diff --git a/bsfs/apps/init.py b/bsfs/apps/init.py
new file mode 100644
index 0000000..3e2ef37
--- /dev/null
+++ b/bsfs/apps/init.py
@@ -0,0 +1,73 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import argparse
+import json
+import sys
+import typing
+
+# bsfs imports
+from bsfs.utils import errors
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'main',
+ )
+
+## code ##
+
+def init_sparql_store(user) -> typing.Any:
+ """Initialize a SparqlStore backend. Returns a configuration to load it."""
+ # nothing to do for non-persistent store
+ # return config to storage
+ return {
+ 'Graph': {
+ 'user': user,
+ 'backend': {
+ 'SparqlStore': {},
+ },
+ }
+ }
+
+
+def main(argv):
+ """Create a new bsfs storage structure."""
+ parser = argparse.ArgumentParser(description=main.__doc__, prog='init')
+ # global arguments
+ parser.add_argument('--user', type=str, default='http://example.com/me',
+ help='Default user.')
+ parser.add_argument('--output', type=str, default=None,
+ help='Write the config to a file instead of standard output.')
+ #parser.add_argument('--schema', type=str, default=None,
+ # help='Initial schema.')
+ # storage selection
+ parser.add_argument('store', choices=('sparql', ),
+ help='Which storage to initialize.')
+ # storage args
+ # parse args
+ args = parser.parse_args(argv)
+
+ # initialize selected storage
+ if args.store == 'sparql':
+ config = init_sparql_store(args.user)
+ else:
+ raise errors.UnreachableError()
+
+ # print config
+ if args.output is not None:
+ with open(args.output, mode='wt', encoding='UTF-8') as ofile:
+ json.dump(config, ofile)
+ else:
+ json.dump(config, sys.stdout)
+
+
+## main ##
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
+
+## EOF ##
diff --git a/bsfs/apps/migrate.py b/bsfs/apps/migrate.py
new file mode 100644
index 0000000..91c1661
--- /dev/null
+++ b/bsfs/apps/migrate.py
@@ -0,0 +1,67 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import argparse
+import json
+import logging
+import sys
+import typing
+
+# bsfs imports
+import bsfs
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'main',
+ )
+
+
+## code ##
+
+logger = logging.getLogger(__name__)
+
+def main(argv):
+ """Migrate a storage structure to a modified schema."""
+ parser = argparse.ArgumentParser(description=main.__doc__, prog='migrate')
+ parser.add_argument('--remove', action='store_true', default=False,
+ help='Remove classes that are not specified in the provided schema.')
+ parser.add_argument('config', type=str, default=None,
+ help='Path to the storage config file.')
+ parser.add_argument('schema', nargs=argparse.REMAINDER,
+ help='Paths to schema files. Reads from standard input if no file is supplied.')
+ args = parser.parse_args(argv)
+
+ # load storage config
+ with open(args.config, mode='rt', encoding='UTF-8') as ifile:
+ config = json.load(ifile)
+ # open bsfs storage
+ graph = bsfs.Open(config)
+
+ # initialize schema
+ schema = bsfs.schema.Schema.Empty()
+ if len(args.schema) == 0:
+ # assemble schema from standard input
+ schema = schema + bsfs.schema.Schema.from_string(sys.stdin.read())
+ else:
+ # assemble schema from input files
+ for pth in args.schema:
+ with open(pth, mode='rt', encoding='UTF-8') as ifile:
+ schema = schema + bsfs.schema.Schema.from_string(ifile.read())
+
+ # migrate schema
+ graph.migrate(schema, not args.remove)
+
+ # return the migrated storage
+ return graph
+
+
+## main ##
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
+
+## EOF ##
diff --git a/bsfs/front/__init__.py b/bsfs/front/__init__.py
new file mode 100644
index 0000000..92886ab
--- /dev/null
+++ b/bsfs/front/__init__.py
@@ -0,0 +1,20 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .bsfs import Open
+from .builder import build_graph
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Open',
+ 'build_graph',
+ )
+
+## EOF ##
diff --git a/bsfs/front/bsfs.py b/bsfs/front/bsfs.py
new file mode 100644
index 0000000..968b3f5
--- /dev/null
+++ b/bsfs/front/bsfs.py
@@ -0,0 +1,29 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs.graph import Graph
+
+# inner-module imports
+from . import builder
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Open',
+ )
+
+
+## code ##
+
+# NOTE: Capitalized to mark entry point and to separate from builtin open.
+def Open(cfg: typing.Any) -> Graph: # pylint: disable=invalid-name
+ """Open a BSFS storage and return a `bsfs.graph.Graph` instance."""
+ return builder.build_graph(cfg)
+
+## EOF ##
diff --git a/bsfs/front/builder.py b/bsfs/front/builder.py
new file mode 100644
index 0000000..73f1703
--- /dev/null
+++ b/bsfs/front/builder.py
@@ -0,0 +1,75 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs.graph import Graph
+from bsfs.triple_store import TripleStoreBase, SparqlStore
+from bsfs.utils import URI, errors
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'build_graph',
+ )
+
+# constants
+_graph_classes = {
+ 'Graph': Graph,
+ }
+
+_backend_classes = {
+ 'SparqlStore': SparqlStore,
+ }
+
+
+## code ##
+
+def build_backend(cfg: typing.Any) -> TripleStoreBase:
+ """Build and return a backend from user-provided config."""
+ # essential checks
+ if not isinstance(cfg, dict):
+ raise TypeError(cfg)
+ if len(cfg) != 1:
+ raise errors.ConfigError(f'expected a single key that identifies the backend class, found {list(cfg)}')
+ # unpack from config
+ name = next(iter(cfg))
+ args = cfg[name]
+ # check name
+ if name not in _backend_classes:
+ raise errors.ConfigError(f'{name} is not a valid triple store class name')
+ # build and return backend
+ cls = _backend_classes[name]
+ return cls.Open(**args)
+
+
+def build_graph(cfg: typing.Any) -> Graph:
+ """Build and return a Graph from user-provided config."""
+ # essential checks
+ if not isinstance(cfg, dict):
+ raise TypeError(cfg)
+ if len(cfg) != 1:
+ raise errors.ConfigError(f'expected a single key that identifies the graph class, found {list(cfg)}')
+ # unpack from config
+ name = next(iter(cfg))
+ args = cfg[name]
+ # check name
+ if name not in _graph_classes:
+ raise errors.ConfigError(f'{name} is not a valid graph class name')
+ # check user argument
+ if 'user' not in args:
+ raise errors.ConfigError('required argument "user" is not provided')
+ user = URI(args['user'])
+ # check backend argument
+ if 'backend' not in args:
+ raise errors.ConfigError('required argument "backend" is not provided')
+ backend = build_backend(args['backend'])
+ # build and return graph
+ cls = _graph_classes[name]
+ return cls(backend, user)
+
+## EOF ##
diff --git a/bsfs/graph/__init__.py b/bsfs/graph/__init__.py
new file mode 100644
index 0000000..82d2235
--- /dev/null
+++ b/bsfs/graph/__init__.py
@@ -0,0 +1,18 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .graph import Graph
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Graph',
+ )
+
+## EOF ##
diff --git a/bsfs/graph/ac/__init__.py b/bsfs/graph/ac/__init__.py
new file mode 100644
index 0000000..420de01
--- /dev/null
+++ b/bsfs/graph/ac/__init__.py
@@ -0,0 +1,20 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .base import AccessControlBase
+from .null import NullAC
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'AccessControlBase',
+ 'NullAC',
+ )
+
+## EOF ##
diff --git a/bsfs/graph/ac/base.py b/bsfs/graph/ac/base.py
new file mode 100644
index 0000000..bc9aeb3
--- /dev/null
+++ b/bsfs/graph/ac/base.py
@@ -0,0 +1,71 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import abc
+import typing
+
+# bsfs imports
+from bsfs import schema
+from bsfs.triple_store import TripleStoreBase
+from bsfs.utils import URI
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'AccessControlBase',
+ )
+
+
+## code ##
+
+class AccessControlBase(abc.ABC):
+ """Defines the interface for access control policies.
+
+ An access control policy governs which actions a user may take to query
+ or to manipulate a graph.
+
+ """
+
+ # The triple store backend.
+ _backend: TripleStoreBase
+
+ # The current user.
+ _user: URI
+
+ def __init__(
+ self,
+ backend: TripleStoreBase,
+ user: URI,
+ ):
+ self._backend = backend
+ self._user = URI(user)
+
+ @abc.abstractmethod
+ def is_protected_predicate(self, pred: schema.Predicate) -> bool:
+ """Return True if a predicate cannot be modified manually."""
+
+ @abc.abstractmethod
+ def create(self, node_type: schema.Node, guids: typing.Iterable[URI]):
+ """Perform post-creation operations on nodes, e.g. ownership information."""
+
+ @abc.abstractmethod
+ def link_from_node(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which outbound links can be written."""
+
+ @abc.abstractmethod
+ def link_to_node(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which inbound links can be written."""
+
+ @abc.abstractmethod
+ def write_literal(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes to which literals can be attached."""
+
+ @abc.abstractmethod
+ def createable(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes that are allowed to be created."""
+
+
+## EOF ##
diff --git a/bsfs/graph/ac/null.py b/bsfs/graph/ac/null.py
new file mode 100644
index 0000000..36838bd
--- /dev/null
+++ b/bsfs/graph/ac/null.py
@@ -0,0 +1,52 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs import schema
+from bsfs.namespace import ns
+from bsfs.utils import URI
+
+# inner-module imports
+from . import base
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'NullAC',
+ )
+
+
+## code ##
+
+class NullAC(base.AccessControlBase):
+ """The NULL access control implements a dummy policy that allows any action to any user."""
+
+ def is_protected_predicate(self, pred: schema.Predicate) -> bool:
+ """Return True if a predicate cannot be modified manually."""
+ return pred.uri == ns.bsm.t_created
+
+ def create(self, node_type: schema.Node, guids: typing.Iterable[URI]):
+ """Perform post-creation operations on nodes, e.g. ownership information."""
+
+ def link_from_node(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which outbound links can be written."""
+ return guids
+
+ def link_to_node(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes for which inbound links can be written."""
+ return guids
+
+ def write_literal(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes to which literals can be attached."""
+ return guids
+
+ def createable(self, node_type: schema.Node, guids: typing.Iterable[URI]) -> typing.Iterable[URI]:
+ """Return nodes that are allowed to be created."""
+ return guids
+
+## EOF ##
diff --git a/bsfs/graph/graph.py b/bsfs/graph/graph.py
new file mode 100644
index 0000000..b7b9f1c
--- /dev/null
+++ b/bsfs/graph/graph.py
@@ -0,0 +1,113 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import os
+import typing
+
+# bsfs imports
+from bsfs.schema import Schema
+from bsfs.triple_store import TripleStoreBase
+from bsfs.utils import URI, typename
+
+# inner-module imports
+from . import nodes as _nodes
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Graph',
+ )
+
+
+## code ##
+
+class Graph():
+ """The Graph class is
+
+ The Graph class provides a convenient interface to query and access a graph.
+ Since it logically builds on the concept of graphs it is easier to
+ navigate than raw triple stores. Naturally, it uses a triple store
+ as *backend*. It also controls actions via access permissions to a *user*.
+
+ """
+
+ # link to the triple storage backend.
+ _backend: TripleStoreBase
+
+ # user uri.
+ _user: URI
+
+ def __init__(self, backend: TripleStoreBase, user: URI):
+ self._backend = backend
+ self._user = user
+ # ensure Graph schema requirements
+ self.migrate(self._backend.schema)
+
+ def __hash__(self) -> int:
+ return hash((type(self), self._backend, self._user))
+
+ def __eq__(self, other) -> bool:
+ return isinstance(other, type(self)) \
+ and self._backend == other._backend \
+ and self._user == other._user
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}(backend={repr(self._backend)}, user={self._user})'
+
+ def __str__(self) -> str:
+ return f'{typename(self)}({str(self._backend)}, {self._user})'
+
+ @property
+ def schema(self) -> Schema:
+ """Return the store's local schema."""
+ return self._backend.schema
+
+ def migrate(self, schema: Schema, append: bool = True) -> 'Graph':
+ """Migrate the current schema to a new *schema*.
+
+ Appends to the current schema by default; control this via *append*.
+ The `Graph` may add additional classes to the schema that are required for its interals.
+
+ """
+ # check args
+ if not isinstance(schema, Schema):
+ raise TypeError(schema)
+ # append to current schema
+ if append:
+ schema = schema + self._backend.schema
+ # add Graph schema requirements
+ with open(os.path.join(os.path.dirname(__file__), 'schema.nt'), mode='rt', encoding='UTF-8') as ifile:
+ schema = schema + Schema.from_string(ifile.read())
+ # migrate schema in backend
+ # FIXME: consult access controls!
+ self._backend.schema = schema
+ # return self
+ return self
+
+ def nodes(self, node_type: URI, guids: typing.Iterable[URI]) -> _nodes.Nodes:
+ """Return nodes *guids* of type *node_type* as a `bsfs.graph.Nodes` instance.
+
+ Note that the *guids* need not to exist (however, the *node_type* has
+ to be part of the schema). Inexistent guids will be created (using
+ *node_type*) once some data is assigned to them.
+
+ """
+ type_ = self.schema.node(node_type)
+ # NOTE: Nodes constructor materializes guids.
+ return _nodes.Nodes(self._backend, self._user, type_, guids)
+
+ def node(self, node_type: URI, guid: URI) -> _nodes.Nodes:
+ """Return node *guid* of type *node_type* as a `bsfs.graph.Nodes` instance.
+
+ Note that the *guids* need not to exist (however, the *node_type* has
+ to be part of the schema). An inexistent guid will be created (using
+ *node_type*) once some data is assigned to them.
+
+ """
+ type_ = self.schema.node(node_type)
+ return _nodes.Nodes(self._backend, self._user, type_, {guid})
+
+## EOF ##
diff --git a/bsfs/graph/nodes.py b/bsfs/graph/nodes.py
new file mode 100644
index 0000000..c417a0e
--- /dev/null
+++ b/bsfs/graph/nodes.py
@@ -0,0 +1,217 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import time
+import typing
+
+# bsfs imports
+from bsfs import schema as _schema
+from bsfs.namespace import ns
+from bsfs.triple_store import TripleStoreBase
+from bsfs.utils import errors, URI, typename
+
+# inner-module imports
+from . import ac
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Nodes',
+ )
+
+
+## code ##
+
+class Nodes():
+ """
+ NOTE: guids may or may not exist. This is not verified as nodes are created on demand.
+ """
+
+ # triple store backend.
+ _backend: TripleStoreBase
+
+ # user uri.
+ _user: URI
+
+ # node type.
+ _node_type: _schema.Node
+
+ # guids of nodes. Can be empty.
+ _guids: typing.Set[URI]
+
+ def __init__(
+ self,
+ backend: TripleStoreBase,
+ user: URI,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ self._backend = backend
+ self._user = user
+ self._node_type = node_type
+ self._guids = set(guids)
+ self.__ac = ac.NullAC(self._backend, self._user)
+
+ def __eq__(self, other: typing.Any) -> bool:
+ return isinstance(other, Nodes) \
+ and self._backend == other._backend \
+ and self._user == other._user \
+ and self._node_type == other._node_type \
+ and self._guids == other._guids
+
+ def __hash__(self) -> int:
+ return hash((type(self), self._backend, self._user, self._node_type, tuple(sorted(self._guids))))
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}({self._backend}, {self._user}, {self._node_type}, {self._guids})'
+
+ def __str__(self) -> str:
+ return f'{typename(self)}({self._node_type}, {self._guids})'
+
+ @property
+ def node_type(self) -> _schema.Node:
+ """Return the node's type."""
+ return self._node_type
+
+ @property
+ def guids(self) -> typing.Iterator[URI]:
+ """Return all node guids."""
+ return iter(self._guids)
+
+ def set(
+ self,
+ pred: URI, # FIXME: URI or _schema.Predicate?
+ value: typing.Any,
+ ) -> 'Nodes':
+ """Set predicate *pred* to *value*."""
+ return self.set_from_iterable([(pred, value)])
+
+ def set_from_iterable(
+ self,
+ predicate_values: typing.Iterable[typing.Tuple[URI, typing.Any]], # FIXME: URI or _schema.Predicate?
+ ) -> 'Nodes':
+ """Set mutliple predicate-value pairs at once."""
+ # TODO: Could group predicate_values by predicate to gain some efficiency
+ # TODO: ignore errors on some predicates; For now this could leave residual
+ # data (e.g. some nodes were created, some not).
+ try:
+ # insert triples
+ for pred, value in predicate_values:
+ self.__set(pred, value)
+ # save changes
+ self._backend.commit()
+
+ except (
+ errors.PermissionDeniedError, # tried to set a protected predicate (ns.bsm.t_created)
+ errors.ConsistencyError, # node types are not in the schema or don't match the predicate
+ errors.InstanceError, # guids/values don't have the correct type
+ TypeError, # value is supposed to be a Nodes instance
+ ValueError, # multiple values passed to unique predicate
+ ):
+ # revert changes
+ self._backend.rollback()
+ # notify the client
+ raise
+
+ # FIXME: How about other errors? Shouldn't I then rollback as well?!
+
+ return self
+
+ def __set(self, predicate: URI, value: typing.Any):
+ """
+ """
+ # get normalized predicate. Raises KeyError if *pred* not in the schema.
+ pred = self._backend.schema.predicate(predicate)
+
+ # node_type must be a subclass of the predicate's domain
+ node_type = self.node_type
+ if not node_type <= pred.domain:
+ raise errors.ConsistencyError(f'{node_type} must be a subclass of {pred.domain}')
+
+ # check reserved predicates (access controls, metadata, internal structures)
+ # FIXME: Needed? Could be integrated into other AC methods (by passing the predicate!)
+ # This could allow more fine-grained predicate control (e.g. based on ownership)
+ # rather than a global approach like this.
+ if self.__ac.is_protected_predicate(pred):
+ raise errors.PermissionDeniedError(pred)
+
+ # set operation affects all nodes (if possible)
+ guids = set(self.guids)
+
+ # ensure subject node existence; create nodes if need be
+ guids = set(self._ensure_nodes(node_type, guids))
+
+ # check value
+ if isinstance(pred.range, _schema.Literal):
+ # check write permissions on existing nodes
+ # As long as the user has write permissions, we don't restrict
+ # the creation or modification of literal values.
+ guids = set(self.__ac.write_literal(node_type, guids))
+
+ # insert literals
+ # TODO: Support passing iterators as values for non-unique predicates
+ self._backend.set(
+ node_type,
+ guids,
+ pred,
+ [value],
+ )
+
+ elif isinstance(pred.range, _schema.Node):
+ # check value type
+ if not isinstance(value, Nodes):
+ raise TypeError(value)
+ # value's node_type must be a subclass of the predicate's range
+ if not value.node_type <= pred.range:
+ raise errors.ConsistencyError(f'{value.node_type} must be a subclass of {pred.range}')
+
+ # check link permissions on source nodes
+ # Link permissions cover adding and removing links on the source node.
+ # Specifically, link permissions also allow to remove links to other
+ # nodes if needed (e.g. for unique predicates).
+ guids = set(self.__ac.link_from_node(node_type, guids))
+
+ # get link targets
+ targets = set(value.guids)
+ # ensure existence of value nodes; create nodes if need be
+ targets = set(self._ensure_nodes(value.node_type, targets))
+ # check link permissions on target nodes
+ targets = set(self.__ac.link_to_node(value.node_type, targets))
+
+ # insert node links
+ self._backend.set(
+ node_type,
+ guids,
+ pred,
+ targets,
+ )
+
+ else:
+ raise errors.UnreachableError()
+
+ def _ensure_nodes(self, node_type: _schema.Node, guids: typing.Iterable[URI]):
+ """
+ """
+ # check node existence
+ guids = set(guids)
+ existing = set(self._backend.exists(node_type, guids))
+ # get nodes to be created
+ missing = guids - existing
+ # create nodes if need be
+ if len(missing) > 0:
+ # check which missing nodes can be created
+ missing = set(self.__ac.createable(node_type, missing))
+ # create nodes
+ self._backend.create(node_type, missing)
+ # add bookkeeping triples
+ self._backend.set(node_type, missing,
+ self._backend.schema.predicate(ns.bsm.t_created), [time.time()])
+ # add permission triples
+ self.__ac.create(node_type, missing)
+ # return available nodes
+ return existing | missing
+
+## EOF ##
diff --git a/bsfs/graph/schema.nt b/bsfs/graph/schema.nt
new file mode 100644
index 0000000..8612681
--- /dev/null
+++ b/bsfs/graph/schema.nt
@@ -0,0 +1,18 @@
+
+# generic prefixes
+prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+prefix xsd: <http://www.w3.org/2001/XMLSchema#>
+
+# bsfs prefixes
+prefix bsfs: <http://bsfs.ai/schema/>
+prefix bsm: <http://bsfs.ai/schema/Meta#>
+
+# literals
+xsd:integer rdfs:subClassOf bsfs:Literal .
+
+# predicates
+bsm:t_created rdfs:subClassOf bsfs:Predicate ;
+ rdfs:domain bsfs:Node ;
+ rdfs:range xsd:integer ;
+ bsfs:unique "true"^^xsd:boolean .
+
diff --git a/bsfs/namespace/__init__.py b/bsfs/namespace/__init__.py
new file mode 100644
index 0000000..98d472f
--- /dev/null
+++ b/bsfs/namespace/__init__.py
@@ -0,0 +1,21 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from . import predefined as ns
+from .namespace import ClosedNamespace, Namespace
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'ClosedNamespace',
+ 'Namespace',
+ 'ns',
+ )
+
+## EOF ##
diff --git a/bsfs/namespace/namespace.py b/bsfs/namespace/namespace.py
new file mode 100644
index 0000000..f652dcd
--- /dev/null
+++ b/bsfs/namespace/namespace.py
@@ -0,0 +1,104 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs.utils import URI, typename
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'ClosedNamespace',
+ 'Namespace',
+ )
+
+
+## code ##
+
+class Namespace():
+ """A namespace consists of a common prefix that is used in a set of URIs.
+
+ Note that the prefix must include the separator between
+ path and fragment (typically a '#' or a '/').
+ """
+
+ # namespace prefix.
+ prefix: URI
+
+ # fragment separator.
+ fsep: str
+
+ # path separator.
+ psep: str
+
+ def __init__(self, prefix: URI, fsep: str = '#', psep: str = '/'):
+ # ensure prefix type
+ prefix = URI(prefix)
+ # truncate fragment separator
+ while prefix.endswith(fsep):
+ prefix = URI(prefix[:-1])
+ # truncate path separator
+ while prefix.endswith(psep):
+ prefix = URI(prefix[:-1])
+ # store members
+ self.prefix = prefix
+ self.fsep = fsep
+ self.psep = psep
+
+ def __eq__(self, other: typing.Any) -> bool:
+ return isinstance(other, type(self)) \
+ and self.prefix == other.prefix \
+ and self.fsep == other.fsep \
+ and self.psep == other.psep
+
+ def __hash__(self) -> int:
+ return hash((type(self), self.prefix, self.fsep, self.psep))
+
+ def __str__(self) -> str:
+ return f'{typename(self)}({self.prefix})'
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}({self.prefix}, {self.fsep}, {self.psep})'
+
+ def __getattr__(self, fragment: str) -> URI:
+ """Return prefix + fragment."""
+ return URI(self.prefix + self.fsep + fragment)
+
+ def __getitem__(self, fragment: str) -> URI:
+ """Alias for getattr(self, fragment)."""
+ return self.__getattr__(fragment)
+
+ def __add__(self, value: typing.Any) -> 'Namespace':
+ """Concatenate another namespace to this one."""
+ if not isinstance(value, str):
+ return NotImplemented
+ return Namespace(self.prefix + self.psep + value, self.fsep, self.psep)
+
+
+class ClosedNamespace(Namespace):
+ """Namespace that covers a restricted set of URIs."""
+
+ # set of permissible fragments.
+ fragments: typing.Set[str]
+
+ def __init__(self, prefix: URI, *args: str, fsep: str = '#', psep: str = '/'):
+ super().__init__(prefix, fsep, psep)
+ self.fragments = set(args)
+
+ def __eq__(self, other: typing.Any) -> bool:
+ return super().__eq__(other) and self.fragments == other.fragments
+
+ def __hash__(self) -> int:
+ return hash((type(self), self.prefix, tuple(sorted(self.fragments))))
+
+ def __getattr__(self, fragment: str) -> URI:
+ """Return prefix + fragment or raise a KeyError if the fragment is not part of this namespace."""
+ if fragment not in self.fragments:
+ raise KeyError(f'{fragment} is not a valid fragment of namespace {self.prefix}')
+ return super().__getattr__(fragment)
+
+## EOF ##
diff --git a/bsfs/namespace/predefined.py b/bsfs/namespace/predefined.py
new file mode 100644
index 0000000..cd48a46
--- /dev/null
+++ b/bsfs/namespace/predefined.py
@@ -0,0 +1,39 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs.utils import URI
+
+# inner-module imports
+from . import namespace
+
+# essential bsfs namespaces
+bsfs: namespace.Namespace = namespace.Namespace(URI('http://bsfs.ai/schema'), fsep='/')
+
+# additional bsfs namespaces
+bse: namespace.Namespace = namespace.Namespace(URI('http://bsfs.ai/schema/Entity'))
+bsm: namespace.Namespace = namespace.Namespace(URI('http://bsfs.ai/schema/Meta'))
+
+# generic namespaces
+rdf: namespace.Namespace = namespace.Namespace(URI('http://www.w3.org/1999/02/22-rdf-syntax-ns'))
+rdfs: namespace.Namespace = namespace.Namespace(URI('http://www.w3.org/2000/01/rdf-schema'))
+schema: namespace.Namespace = namespace.Namespace(URI('http://schema.org'), fsep='/')
+xsd: namespace.Namespace = namespace.Namespace(URI('http://www.w3.org/2001/XMLSchema'))
+
+__all__: typing.Sequence[str] = (
+ 'bse',
+ 'bsfs',
+ 'bsm',
+ 'rdf',
+ 'rdfs',
+ 'schema',
+ 'xsd',
+ )
+
+## EOF ##
diff --git a/bsfs/schema/__init__.py b/bsfs/schema/__init__.py
new file mode 100644
index 0000000..ad4d456
--- /dev/null
+++ b/bsfs/schema/__init__.py
@@ -0,0 +1,22 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .schema import Schema
+from .types import Literal, Node, Predicate
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Literal',
+ 'Node',
+ 'Predicate',
+ 'Schema',
+ )
+
+## EOF ##
diff --git a/bsfs/schema/schema.py b/bsfs/schema/schema.py
new file mode 100644
index 0000000..c5d4571
--- /dev/null
+++ b/bsfs/schema/schema.py
@@ -0,0 +1,386 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+from collections import abc, namedtuple
+import typing
+import rdflib
+
+# bsfs imports
+from bsfs.namespace import ns
+from bsfs.utils import errors, URI, typename
+
+# inner-module imports
+from . import types
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Schema',
+ )
+
+
+## code ##
+
+class Schema():
+ """Graph schema.
+
+ Use `Schema.Empty()` to create a new, empty Schema rather than construct
+ it directly.
+
+ The schema is defined by three sets: Predicates, Nodes, and Literals.
+
+ The Schema class guarantees two properties: completeness and consistency.
+ Completeness means that the schema covers all class that are referred to
+ by any other class in the schema. Consistency means that each class is
+ identified by a unique URI and all classes that use that URI consequently
+ use the same definition.
+
+ """
+
+ # node classes.
+ _nodes: typing.Dict[URI, types.Node]
+
+ # literal classes.
+ _literals: typing.Dict[URI, types.Literal]
+
+ # predicate classes.
+ _predicates: typing.Dict[URI, types.Predicate]
+
+ def __init__(
+ self,
+ predicates: typing.Iterable[types.Predicate],
+ nodes: typing.Optional[typing.Iterable[types.Node]] = None,
+ literals: typing.Optional[typing.Iterable[types.Literal]] = None,
+ ):
+ # materialize arguments
+ if nodes is None:
+ nodes = set()
+ if literals is None:
+ literals = set()
+ nodes = set(nodes)
+ literals = set(literals)
+ predicates = set(predicates)
+ # include parents in predicates set
+ # TODO: review type annotations and ignores for python >= 3.11 (parents is _Type but should be typing.Self)
+ predicates |= {par for pred in predicates for par in pred.parents()} # type: ignore [misc]
+ # include predicate domain in nodes set
+ nodes |= {pred.domain for pred in predicates}
+ # include predicate range in nodes and literals sets
+ prange = {pred.range for pred in predicates if pred.range is not None}
+ nodes |= {vert for vert in prange if isinstance(vert, types.Node)}
+ literals |= {vert for vert in prange if isinstance(vert, types.Literal)}
+ # include parents in nodes and literals sets
+ # NOTE: Must be done after predicate domain/range was handled
+ # so that their parents are included as well.
+ nodes |= {par for node in nodes for par in node.parents()} # type: ignore [misc]
+ literals |= {par for lit in literals for par in lit.parents()} # type: ignore [misc]
+ # assign members
+ self._nodes = {node.uri: node for node in nodes}
+ self._literals = {lit.uri: lit for lit in literals}
+ self._predicates = {pred.uri: pred for pred in predicates}
+ # verify unique uris
+ if len(nodes) != len(self._nodes):
+ raise errors.ConsistencyError('inconsistent nodes')
+ if len(literals) != len(self._literals):
+ raise errors.ConsistencyError('inconsistent literals')
+ if len(predicates) != len(self._predicates):
+ raise errors.ConsistencyError('inconsistent predicates')
+ # verify globally unique uris
+ n_uris = len(set(self._nodes) | set(self._literals) | set(self._predicates))
+ if n_uris != len(self._nodes) + len(self._literals) + len(self._predicates):
+ raise errors.ConsistencyError('URI dual use')
+
+
+ ## essentials ##
+
+ def __str__(self) -> str:
+ return f'{typename(self)}()'
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}({sorted(self._nodes)}, {sorted(self._literals)}, {sorted(self._predicates)})'
+
+ def __hash__(self) -> int:
+ return hash((
+ type(self),
+ tuple(sorted(self._nodes.values())),
+ tuple(sorted(self._literals.values())),
+ tuple(sorted(self._predicates.values())),
+ ))
+
+ def __eq__(self, other: typing.Any) -> bool:
+ return isinstance(other, type(self)) \
+ and self._nodes == other._nodes \
+ and self._literals == other._literals \
+ and self._predicates == other._predicates
+
+
+ ## operators ##
+
+ SchemaDiff = namedtuple('SchemaDiff', ['nodes', 'literals', 'predicates'])
+
+ def _issubset(self, other: 'Schema') -> bool:
+ # inconsistent schema can't be ordered.
+ if not self.consistent_with(other):
+ return False
+ # since schemas are consistent, it's sufficient to compare their URIs.
+ # self's sets are fully contained in other's sets
+ # pylint: disable=protected-access
+ return set(self._predicates) <= set(other._predicates) \
+ and set(self._nodes) <= set(other._nodes) \
+ and set(self._literals) <= set(other._literals)
+
+ def __lt__(self, other: typing.Any) -> bool:
+ """Return True if *other* is a true subset of *self*."""
+ if not isinstance(other, Schema): # other is not a Schema
+ return NotImplemented
+ return self != other and self._issubset(other)
+
+ def __le__(self, other: typing.Any) -> bool:
+ """Return True if *other* is a subset of *self*."""
+ if not isinstance(other, Schema): # other is not a Schema
+ return NotImplemented
+ return self == other or self._issubset(other)
+
+ def __gt__(self, other: typing.Any) -> bool:
+ """Return True if *other* is a true superset of *self*."""
+ if not isinstance(other, Schema): # other is not a Schema
+ return NotImplemented
+ return self != other and other._issubset(self)
+
+ def __ge__(self, other: typing.Any) -> bool:
+ """Return True if *other* is a superset of *self*."""
+ if not isinstance(other, Schema): # other is not a Schema
+ return NotImplemented
+ return self == other or other._issubset(self)
+
+ def diff(self, other: 'Schema') -> SchemaDiff:
+ """Return node, literals, and predicates that are in *self* but not in *other*."""
+ return self.SchemaDiff(
+ nodes=set(self.nodes()) - set(other.nodes()),
+ literals=set(self.literals()) - set(other.literals()),
+ predicates=set(self.predicates()) - set(other.predicates()),
+ )
+
+ def __sub__(self, other: typing.Any) -> SchemaDiff:
+ """Alias for `Schema.diff`."""
+ if not isinstance(other, Schema):
+ return NotImplemented
+ return self.diff(other)
+
+ def consistent_with(self, other: 'Schema') -> bool:
+ """Checks if two schemas have different predicate, node, or literal definitions for the same uri."""
+ # check arg
+ if not isinstance(other, Schema):
+ raise TypeError(other)
+ # node consistency
+ nodes = set(self.nodes()) | set(other.nodes())
+ nuris = {node.uri for node in nodes}
+ if len(nodes) != len(nuris):
+ return False
+ # literal consistency
+ literals = set(self.literals()) | set(other.literals())
+ luris = {lit.uri for lit in literals}
+ if len(literals) != len(luris):
+ return False
+ # predicate consistency
+ predicates = set(self.predicates()) | set(other.predicates())
+ puris = {pred.uri for pred in predicates}
+ if len(predicates) != len(puris):
+ return False
+ # global consistency
+ if len(puris | luris | nuris) != len(nodes) + len(literals) + len(predicates):
+ return False
+ # all checks passed
+ return True
+
+ @classmethod
+ def Union( # pylint: disable=invalid-name # capitalized classmethod
+ cls,
+ *args: typing.Union['Schema', typing.Iterable['Schema']]
+ ) -> 'Schema':
+ """Combine multiple Schema instances into a single one.
+ As argument, you can either pass multiple Schema instances, or a single
+ iterable over Schema instances. Any abc.Iterable will be accepted.
+
+ Example:
+
+ >>> a, b, c = Schema.Empty(), Schema.Empty(), Schema.Empty()
+ >>> # multiple Schema instances
+ >>> Schema.Union(a, b, c)
+ >>> # A single iterable over Schema instances
+ >>> Schema.Union([a, b, c])
+
+ """
+ if len(args) == 0:
+ raise TypeError('Schema.Union requires at least one argument (Schema or Iterable)')
+ if isinstance(args[0], cls): # args is sequence of Schema instances
+ pass
+ elif len(args) == 1 and isinstance(args[0], abc.Iterable): # args is a single iterable
+ args = args[0] # type: ignore [assignment] # we checked and thus know that args[0] is an iterable
+ else:
+ raise TypeError(f'expected multiple Schema instances or a single Iterable, found {args}')
+
+ nodes, literals, predicates = set(), set(), set()
+ for schema in args:
+ # check argument
+ if not isinstance(schema, cls):
+ raise TypeError(schema)
+ # merge with previous schemas
+ nodes |= set(schema.nodes())
+ literals |= set(schema.literals())
+ predicates |= set(schema.predicates())
+ # return new Schema instance
+ return cls(predicates, nodes, literals)
+
+ def union(self, other: 'Schema') -> 'Schema':
+ """Merge *other* and *self* into a new Schema. *self* takes precedence."""
+ # check type
+ if not isinstance(other, type(self)):
+ raise TypeError(other)
+ # return combined schemas
+ return self.Union(self, other)
+
+ def __add__(self, other: typing.Any) -> 'Schema':
+ """Alias for Schema.union."""
+ try: # return merged schemas
+ return self.union(other)
+ except TypeError:
+ return NotImplemented
+
+ def __or__(self, other: typing.Any) -> 'Schema':
+ """Alias for Schema.union."""
+ return self.__add__(other)
+
+
+ ## getters ##
+ # FIXME: nodes, predicates, literals could be properties
+ # FIXME: interchangeability of URI and _Type?!
+
+ def has_node(self, node: URI) -> bool:
+ """Return True if a Node with URI *node* is part of the schema."""
+ return node in self._nodes
+
+ def has_literal(self, lit: URI) -> bool:
+ """Return True if a Literal with URI *lit* is part of the schema."""
+ return lit in self._literals
+
+ def has_predicate(self, pred: URI) -> bool:
+ """Return True if a Predicate with URI *pred* is part of the schema."""
+ return pred in self._predicates
+
+ def nodes(self) -> typing.Iterable[types.Node]:
+ """Return an iterator over Node classes."""
+ return self._nodes.values()
+
+ def literals(self) -> typing.Iterable[types.Literal]:
+ """Return an iterator over Literal classes."""
+ return self._literals.values()
+
+ def predicates(self) -> typing.Iterable[types.Predicate]:
+ """Return an iterator over Predicate classes."""
+ return self._predicates.values()
+
+ def node(self, uri: URI) -> types.Node:
+ """Return the Node matching the *uri*."""
+ return self._nodes[uri]
+
+ def predicate(self, uri: URI) -> types.Predicate:
+ """Return the Predicate matching the *uri*."""
+ return self._predicates[uri]
+
+ def literal(self, uri: URI) -> types.Literal:
+ """Return the Literal matching the *uri*."""
+ return self._literals[uri]
+
+
+ ## constructors ##
+
+
+ @classmethod
+ def Empty(cls) -> 'Schema': # pylint: disable=invalid-name # capitalized classmethod
+ """Return a minimal Schema."""
+ node = types.Node(ns.bsfs.Node, None)
+ literal = types.Literal(ns.bsfs.Literal, None)
+ predicate = types.Predicate(
+ uri=ns.bsfs.Predicate,
+ parent=None,
+ domain=node,
+ range=None,
+ unique=False,
+ )
+ return cls((predicate, ), (node, ), (literal, ))
+
+
+ @classmethod
+ def from_string(cls, schema: str) -> 'Schema': # pylint: disable=invalid-name # capitalized classmethod
+ """Load and return a Schema from a string."""
+ # parse string into rdf graph
+ graph = rdflib.Graph()
+ graph.parse(data=schema, format='turtle')
+
+ def _fetch_hierarchically(factory, curr):
+ # emit current node
+ yield curr
+ # walk through childs
+ for child in graph.subjects(rdflib.URIRef(ns.rdfs.subClassOf), rdflib.URIRef(curr.uri)):
+ # convert to URI
+ child = URI(child)
+ # check circular dependency
+ if child == curr.uri or child in {node.uri for node in curr.parents()}:
+ raise errors.ConsistencyError('circular dependency')
+ # recurse and emit (sub*)childs
+ yield from _fetch_hierarchically(factory, factory(child, curr))
+
+ # fetch nodes
+ nodes = set(_fetch_hierarchically(types.Node, types.Node(ns.bsfs.Node, None)))
+ nodes_lut = {node.uri: node for node in nodes}
+ if len(nodes_lut) != len(nodes):
+ raise errors.ConsistencyError('inconsistent nodes')
+
+ # fetch literals
+ literals = set(_fetch_hierarchically(types.Literal, types.Literal(ns.bsfs.Literal, None)))
+ literals_lut = {lit.uri: lit for lit in literals}
+ if len(literals_lut) != len(literals):
+ raise errors.ConsistencyError('inconsistent literals')
+
+ # fetch predicates
+ def build_predicate(uri, parent):
+ uri = rdflib.URIRef(uri)
+ # get domain
+ domains = set(graph.objects(uri, rdflib.RDFS.domain))
+ if len(domains) != 1:
+ raise errors.ConsistencyError(f'inconsistent domain: {domains}')
+ dom = nodes_lut.get(next(iter(domains)))
+ if dom is None:
+ raise errors.ConsistencyError('missing domain')
+ # get range
+ ranges = set(graph.objects(uri, rdflib.RDFS.range))
+ if len(ranges) != 1:
+ raise errors.ConsistencyError(f'inconsistent range: {ranges}')
+ rng = next(iter(ranges))
+ rng = nodes_lut.get(rng, literals_lut.get(rng))
+ if rng is None:
+ raise errors.ConsistencyError('missing range')
+ # get unique flag
+ uniques = set(graph.objects(uri, rdflib.URIRef(ns.bsfs.unique)))
+ if len(uniques) != 1:
+ raise errors.ConsistencyError(f'inconsistent unique flags: {uniques}')
+ unique = bool(next(iter(uniques)))
+ # build Predicate
+ return types.Predicate(URI(uri), parent, dom, rng, unique)
+
+ root_predicate = types.Predicate(
+ uri=ns.bsfs.Predicate,
+ parent=None,
+ domain=nodes_lut[ns.bsfs.Node],
+ range=None, # FIXME: Unclear how to handle this! Can be either a Literal or a Node
+ unique=False,
+ )
+ predicates = _fetch_hierarchically(build_predicate, root_predicate)
+ # return Schema
+ return cls(predicates, nodes, literals)
+
+## EOF ##
diff --git a/bsfs/schema/types.py b/bsfs/schema/types.py
new file mode 100644
index 0000000..54a7e99
--- /dev/null
+++ b/bsfs/schema/types.py
@@ -0,0 +1,276 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# bsfs imports
+from bsfs.utils import errors, URI, typename
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'Literal',
+ 'Node',
+ 'Predicate',
+ )
+
+
+## code ##
+
+class _Type():
+ """A class is defined via its uri.
+
+ Classes define a partial order.
+ The order operators indicate whether some class is a
+ superclass (greater-than) or a subclass (less-than) of another.
+ Comparisons are only supported within the same type.
+
+ For example, consider the class hierarchy below:
+
+ Vehicle
+ Two-wheel
+ Bike
+ Bicycle
+
+ >>> vehicle = _Type('Vehicle')
+ >>> twowheel = _Type('Two-wheel', vehicle)
+ >>> bike = _Type('Bike', twowheel)
+ >>> bicycle = _Type('Bicycle', twowheel)
+
+ Two-wheel is equivalent to itself
+ >>> twowheel == vehicle
+ False
+ >>> twowheel == twowheel
+ True
+ >>> twowheel == bicycle
+ False
+
+ Two-wheel is a true subclass of Vehicle
+ >>> twowheel < vehicle
+ True
+ >>> twowheel < twowheel
+ False
+ >>> twowheel < bicycle
+ False
+
+ Two-wheel is a subclass of itself and Vehicle
+ >>> twowheel <= vehicle
+ True
+ >>> twowheel <= twowheel
+ True
+ >>> twowheel <= bicycle
+ False
+
+ Two-wheel is a true superclass of Bicycle
+ >>> twowheel > vehicle
+ False
+ >>> twowheel > twowheel
+ False
+ >>> twowheel > bicycle
+ True
+
+ Two-wheel is a superclass of itself and Bicycle
+ >>> twowheel >= vehicle
+ False
+ >>> twowheel >= twowheel
+ True
+ >>> twowheel >= bicycle
+ True
+
+ Analoguous to sets, this is not a total order:
+ >>> bike < bicycle
+ False
+ >>> bike > bicycle
+ False
+ >>> bike == bicycle
+ False
+ """
+
+ # class uri.
+ uri: URI
+
+ # parent's class uris.
+ parent: typing.Optional['_Type'] # TODO: for python >=3.11: use typing.Self
+
+ def __init__(
+ self,
+ uri: URI,
+ parent: typing.Optional['_Type'] = None,
+ ):
+ self.uri = uri
+ self.parent = parent
+
+ def parents(self) -> typing.Generator['_Type', None, None]:
+ """Generate a list of parent nodes."""
+ curr = self.parent
+ while curr is not None:
+ yield curr
+ curr = curr.parent
+
+ def get_child(self, uri: URI, **kwargs):
+ """Return a child of the current class."""
+ return type(self)(uri, self, **kwargs)
+
+ def __str__(self) -> str:
+ return f'{typename(self)}({self.uri})'
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}({self.uri}, {repr(self.parent)})'
+
+ def __hash__(self) -> int:
+ return hash((type(self), self.uri, self.parent))
+
+ # NOTE: For equality and order functions (lt, gt, le, ge) we explicitly want type equality!
+ # Consider the statements below, with class Vehicle(_Type) and class TwoWheel(Vehicle):
+ # * Vehicle('foo', None) == TwoWheel('foo', None): Instances of different types cannot be equivalent.
+ # * Vehicle('foo', None) <= TwoWheel('foo', None): Cannot compare the different types Vehicles and TwoWheel.
+
+ def __eq__(self, other: typing.Any) -> bool:
+ """Return True iff *self* is equivalent to *other*."""
+ # pylint: disable=unidiomatic-typecheck
+ return type(other) is type(self) \
+ and self.uri == other.uri \
+ and self.parent == other.parent
+
+
+ def __lt__(self, other: typing.Any) -> bool:
+ """Return True iff *self* is a true subclass of *other*."""
+ if not type(self) is type(other): # type mismatch # pylint: disable=unidiomatic-typecheck
+ return NotImplemented
+ if self.uri == other.uri: # equivalence
+ return False
+ if self in other.parents(): # superclass
+ return False
+ if other in self.parents(): # subclass
+ return True
+ # not related
+ return False
+
+ def __le__(self, other: typing.Any) -> bool:
+ """Return True iff *self* is equivalent or a subclass of *other*."""
+ if not type(self) is type(other): # type mismatch # pylint: disable=unidiomatic-typecheck
+ return NotImplemented
+ if self.uri == other.uri: # equivalence
+ return True
+ if self in other.parents(): # superclass
+ return False
+ if other in self.parents(): # subclass
+ return True
+ # not related
+ return False
+
+ def __gt__(self, other: typing.Any) -> bool:
+ """Return True iff *self* is a true superclass of *other*."""
+ if not type(self) is type(other): # type mismatch # pylint: disable=unidiomatic-typecheck
+ return NotImplemented
+ if self.uri == other.uri: # equivalence
+ return False
+ if self in other.parents(): # superclass
+ return True
+ if other in self.parents(): # subclass
+ return False
+ # not related
+ return False
+
+ def __ge__(self, other: typing.Any) -> bool:
+ """Return True iff *self* is eqiuvalent or a superclass of *other*."""
+ if not type(self) is type(other): # type mismatch # pylint: disable=unidiomatic-typecheck
+ return NotImplemented
+ if self.uri == other.uri: # equivalence
+ return True
+ if self in other.parents(): # superclass
+ return True
+ if other in self.parents(): # subclass
+ return False
+ # not related
+ return False
+
+
+class _Vertex(_Type):
+ """Graph vertex types. Can be a Node or a Literal."""
+ def __init__(self, uri: URI, parent: typing.Optional['_Vertex']):
+ super().__init__(uri, parent)
+
+
+class Node(_Vertex):
+ """Node type."""
+ def __init__(self, uri: URI, parent: typing.Optional['Node']):
+ super().__init__(uri, parent)
+
+
+class Literal(_Vertex):
+ """Literal type."""
+ def __init__(self, uri: URI, parent: typing.Optional['Literal']):
+ super().__init__(uri, parent)
+
+
+class Predicate(_Type):
+ """Predicate type."""
+
+ # source type.
+ domain: Node
+
+ # destination type.
+ range: typing.Optional[typing.Union[Node, Literal]]
+
+ # maximum cardinality of type.
+ unique: bool
+
+ def __init__(
+ self,
+ # Type members
+ uri: URI,
+ parent: typing.Optional['Predicate'],
+ # Predicate members
+ domain: Node,
+ range: typing.Optional[typing.Union[Node, Literal]], # pylint: disable=redefined-builtin
+ unique: bool,
+ ):
+ # check arguments
+ if not isinstance(domain, Node):
+ raise TypeError(domain)
+ if range is not None and not isinstance(range, Node) and not isinstance(range, Literal):
+ raise TypeError(range)
+ # initialize
+ super().__init__(uri, parent)
+ self.domain = domain
+ self.range = range
+ self.unique = unique
+
+ def __hash__(self) -> int:
+ return hash((super().__hash__(), self.domain, self.range, self.unique))
+
+ def __eq__(self, other: typing.Any) -> bool:
+ return super().__eq__(other) \
+ and self.domain == other.domain \
+ and self.range == other.range \
+ and self.unique == other.unique
+
+ def get_child(
+ self,
+ uri: URI,
+ domain: typing.Optional[Node] = None,
+ range: typing.Optional[_Vertex] = None, # pylint: disable=redefined-builtin
+ unique: typing.Optional[bool] = None,
+ **kwargs,
+ ):
+ """Return a child of the current class."""
+ if domain is None:
+ domain = self.domain
+ if not domain <= self.domain:
+ raise errors.ConsistencyError(f'{domain} must be a subclass of {self.domain}')
+ if range is None:
+ range = self.range
+ if range is None: # inherited range from ns.bsfs.Predicate
+ raise ValueError('range must be defined by the parent or argument')
+ if self.range is not None and not range <= self.range:
+ raise errors.ConsistencyError(f'{range} must be a subclass of {self.range}')
+ if unique is None:
+ unique = self.unique
+ return super().get_child(uri, domain=domain, range=range, unique=unique, **kwargs)
+
+
+## EOF ##
diff --git a/bsfs/triple_store/__init__.py b/bsfs/triple_store/__init__.py
new file mode 100644
index 0000000..fb5a8a9
--- /dev/null
+++ b/bsfs/triple_store/__init__.py
@@ -0,0 +1,20 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from .base import TripleStoreBase
+from .sparql import SparqlStore
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'SparqlStore',
+ 'TripleStoreBase',
+ )
+
+## EOF ##
diff --git a/bsfs/triple_store/base.py b/bsfs/triple_store/base.py
new file mode 100644
index 0000000..6561262
--- /dev/null
+++ b/bsfs/triple_store/base.py
@@ -0,0 +1,148 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import abc
+import typing
+
+# inner-module imports
+from bsfs.utils import URI, typename
+import bsfs.schema as _schema
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'TripleStoreBase',
+ )
+
+
+## code ##
+
+class TripleStoreBase(abc.ABC):
+ """TripleStore base class.
+
+ Use the `Open` method to create a new instance and to initialize
+ the required structures.
+
+ Triple stores express a graph via its (subject, predicate, object) triples.
+ They provides methods to add and remove triples, and to query the storage
+ for given graph structures. The subject is always a node in the graph,
+ whereas nodes are identifiable by a unique URI. Note that blank nodes
+ (without an explicit URI) are not supported. The object can be another
+ Node or a Literal value. The relation between a subject and an object
+ is expressed via a Predicate. The graph structures are governed by a
+ schema that defines which Node, Literal, and Predicate classes exist
+ and how they can interact (see `bsfs.schema.Schema`).
+
+ """
+
+ # storage's URI. None implies a temporary location.
+ uri: typing.Optional[URI] = None
+
+ def __init__(self, uri: typing.Optional[URI] = None):
+ self.uri = uri
+
+ def __hash__(self) -> int:
+ uri = self.uri if self.uri is not None else id(self)
+ return hash((type(self), uri))
+
+ def __eq__(self, other) -> bool:
+ return isinstance(other, type(self)) \
+ and (( self.uri is not None \
+ and other.uri is not None \
+ and self.uri == other.uri ) \
+ or id(self) == id(other))
+
+ def __repr__(self) -> str:
+ return f'{typename(self)}(uri={self.uri})'
+
+ def __str__(self) -> str:
+ return f'{typename(self)}(uri={self.uri})'
+
+ def is_persistent(self) -> bool:
+ """Return True if data is stored persistently."""
+ return self.uri is not None
+
+
+ @classmethod
+ @abc.abstractmethod
+ def Open(cls, **kwargs: typing.Any) -> 'TripleStoreBase': # pylint: disable=invalid-name # capitalized classmethod
+ """Return a TripleStoreBase instance connected to *uri*."""
+
+ @abc.abstractmethod
+ def commit(self):
+ """Commit the current transaction."""
+
+ @abc.abstractmethod
+ def rollback(self):
+ """Undo changes since the last commit."""
+
+ @property
+ @abc.abstractmethod
+ def schema(self) -> _schema.Schema:
+ """Return the store's local schema."""
+
+ @schema.setter
+ @abc.abstractmethod
+ def schema(self, schema: _schema.Schema):
+ """Migrate to new schema by adding or removing class definitions.
+
+ Commits before and after the migration.
+
+ Instances of removed classes will be deleted irreversably.
+ Note that modifying an existing class is not directly supported.
+ Also, it is generally discouraged, since changing definitions may
+ lead to inconsistencies across multiple clients in a distributed
+ setting. Instead, consider introducing a new class under its own
+ uri. Such a migration would look as follows:
+
+ 1. Add new class definitions.
+ 2. Create instances of the new classes and copy relevant data.
+ 3. Remove the old definitions.
+
+ To modify a class, i.e., re-use a previous uri with a new
+ class definition, you would have to migrate via temporary
+ class definitions, and thus repeat the above procedure two times.
+
+ """
+
+ @abc.abstractmethod
+ def exists(
+ self,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ) -> typing.Iterable[URI]:
+ """Return those *guids* that exist and have type *node_type* or a subclass thereof."""
+
+ @abc.abstractmethod
+ def create(
+ self,
+ node_type: _schema.Node,
+ guids: typing.Iterable[URI],
+ ):
+ """Create *guid* nodes with type *subject*."""
+
+ @abc.abstractmethod
+ def set(
+ self,
+ node_type: _schema.Node, # FIXME: is the node_type even needed? Couldn't I infer from the predicate?
+ guids: typing.Iterable[URI],
+ predicate: _schema.Predicate,
+ values: typing.Iterable[typing.Any],
+ ):
+ """Add triples to the graph.
+
+ It is assumed that all of *guids* exist and have *node_type*.
+ This method adds a triple (guid, predicate, value) for every guid in
+ *guids* and each value in *values* (cartesian product). Note that
+ *values* must have length one for unique predicates, and that
+ currently existing values will be overwritten in this case.
+ It also verifies that all symbols are part of the schema and that
+ the *predicate* matches the *node_type*.
+ Raises `bsfs.errors.ConsistencyError` if these assumptions are violated.
+
+ """
+
+## EOF ##
diff --git a/bsfs/triple_store/sparql.py b/bsfs/triple_store/sparql.py
new file mode 100644
index 0000000..7516dff
--- /dev/null
+++ b/bsfs/triple_store/sparql.py
@@ -0,0 +1,253 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import itertools
+import typing
+import rdflib
+
+# bsfs imports
+from bsfs import schema as bsc
+from bsfs.utils import errors, URI
+
+# inner-module imports
+from . import base
+
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'SparqlStore',
+ )
+
+
+## code ##
+
+class _Transaction():
+ """Lightweight rdflib transactions for in-memory databases."""
+
+ # graph instance.
+ _graph: rdflib.Graph
+
+ # current log of added triples.
+ _added: typing.List[typing.Any]
+
+ # current log of removed triples.
+ _removed: typing.List[typing.Any]
+
+ def __init__(self, graph: rdflib.Graph):
+ self._graph = graph
+ # initialize internal structures
+ self.commit()
+
+ def commit(self):
+ """Commit temporary changes."""
+ self._added = []
+ self._removed = []
+
+ def rollback(self):
+ """Undo changes since the last commit."""
+ for triple in self._added:
+ self._graph.remove(triple)
+ for triple in self._removed:
+ self._graph.add(triple)
+
+ def add(self, triple: typing.Any):
+ """Add a triple to the graph."""
+ if triple not in self._graph:
+ self._added.append(triple)
+ self._graph.add(triple)
+
+ def remove(self, triple: typing.Any):
+ """Remove a triple from the graph."""
+ if triple in self._graph:
+ self._removed.append(triple)
+ self._graph.remove(triple)
+
+
+class SparqlStore(base.TripleStoreBase):
+ """Sparql-based triple store.
+
+ The sparql triple store uses a third-party backend
+ (currently rdflib) to store triples and manages them via
+ the Sparql query language.
+
+ """
+
+ # The rdflib graph.
+ _graph: rdflib.Graph
+
+ # Current transaction.
+ _transaction: _Transaction
+
+ # The local schema.
+ _schema: bsc.Schema
+
+ def __init__(self):
+ super().__init__(None)
+ self._graph = rdflib.Graph()
+ self._transaction = _Transaction(self._graph)
+ self._schema = bsc.Schema.Empty()
+
+ # NOTE: mypy and pylint complain about the **kwargs not being listed (contrasting super)
+ # However, not having it here is clearer since it's explicit that there are no arguments.
+ @classmethod
+ def Open(cls) -> 'SparqlStore': # type: ignore [override] # pylint: disable=arguments-differ
+ return cls()
+
+ def commit(self):
+ self._transaction.commit()
+
+ def rollback(self):
+ self._transaction.rollback()
+
+ @property
+ def schema(self) -> bsc.Schema:
+ return self._schema
+
+ @schema.setter
+ def schema(self, schema: bsc.Schema):
+ # check args: Schema instanace
+ if not isinstance(schema, bsc.Schema):
+ raise TypeError(schema)
+ # check compatibility: No contradicting definitions
+ if not self.schema.consistent_with(schema):
+ raise errors.ConsistencyError(f'{schema} is inconsistent with {self.schema}')
+
+ # commit the current transaction
+ self.commit()
+
+ # adjust instances:
+ # nothing to do for added classes
+ # delete instances of removed classes
+
+ # get deleted classes
+ sub = self.schema - schema
+
+ # remove predicate instances
+ for pred in sub.predicates:
+ for src, trg in self._graph.subject_objects(rdflib.URIRef(pred.uri)):
+ self._transaction.remove((src, rdflib.URIRef(pred.uri), trg))
+
+ # remove node instances
+ for node in sub.nodes:
+ # iterate through node instances
+ for inst in self._graph.subjects(rdflib.RDF.type, rdflib.URIRef(node.uri)):
+ # remove triples where the instance is in the object position
+ for src, pred in self._graph.subject_predicates(inst):
+ self._transaction.remove((src, pred, inst))
+ # remove triples where the instance is in the subject position
+ for pred, trg in self._graph.predicate_objects(inst):
+ self._transaction.remove((inst, pred, trg))
+ # remove instance
+ self._transaction.remove((inst, rdflib.RDF.type, rdflib.URIRef(node.uri)))
+
+ # NOTE: Nothing to do for literals
+
+ # commit instance changes
+ self.commit()
+
+ # migrate schema
+ self._schema = schema
+
+
+ def _has_type(self, subject: URI, node_type: bsc.Node) -> bool:
+ """Return True if *subject* is a node of class *node_type* or a subclass thereof."""
+ if node_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
+
+ subject_types = list(self._graph.objects(rdflib.URIRef(subject), rdflib.RDF.type))
+ if len(subject_types) == 0:
+ return False
+ if len(subject_types) == 1:
+ node = self.schema.node(URI(subject_types[0])) # type: ignore [arg-type] # URI is a subtype of str
+ if node == node_type:
+ return True
+ if node_type in node.parents():
+ return True
+ return False
+ raise errors.UnreachableError()
+
+ def exists(
+ self,
+ node_type: bsc.Node,
+ guids: typing.Iterable[URI],
+ ) -> typing.Iterable[URI]:
+ return (subj for subj in guids if self._has_type(subj, node_type))
+
+ def create(
+ self,
+ node_type: bsc.Node,
+ guids: typing.Iterable[URI],
+ ):
+ # check node_type
+ if node_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
+ # check and create guids
+ for guid in guids:
+ subject = rdflib.URIRef(guid)
+ # check node existence
+ if (subject, rdflib.RDF.type, None) in self._graph:
+ # FIXME: node exists and may have a different type! ignore? raise? report?
+ continue
+ # add node
+ self._transaction.add((subject, rdflib.RDF.type, rdflib.URIRef(node_type.uri)))
+
+ def set(
+ self,
+ node_type: bsc.Node,
+ guids: typing.Iterable[URI],
+ predicate: bsc.Predicate,
+ values: typing.Iterable[typing.Any],
+ ):
+ # check node_type
+ if node_type not in self.schema.nodes():
+ raise errors.ConsistencyError(f'{node_type} is not defined in the schema')
+ # check predicate
+ if predicate not in self.schema.predicates():
+ raise errors.ConsistencyError(f'{predicate} is not defined in the schema')
+ if not node_type <= predicate.domain:
+ raise errors.ConsistencyError(f'{node_type} must be a subclass of {predicate.domain}')
+ # NOTE: predicate.range is in the schema since predicate is in the schema.
+ # materialize values
+ values = set(values)
+ # check values
+ if len(values) == 0:
+ return
+ if predicate.unique and len(values) != 1:
+ raise ValueError(values)
+ if isinstance(predicate.range, bsc.Node):
+ values = set(values) # materialize to safeguard against iterators passed as argument
+ inconsistent = {val for val in values if not self._has_type(val, predicate.range)}
+ # catches nodes that don't exist and nodes that have an inconsistent type
+ if len(inconsistent) > 0:
+ raise errors.InstanceError(inconsistent)
+ # check guids
+ # FIXME: Fail or skip inexistent nodes?
+ guids = set(guids)
+ inconsistent = {guid for guid in guids if not self._has_type(guid, node_type)}
+ if len(inconsistent) > 0:
+ raise errors.InstanceError(inconsistent)
+
+ # add triples
+ pred = rdflib.URIRef(predicate.uri)
+ for guid, value in itertools.product(guids, values):
+ guid = rdflib.URIRef(guid)
+ # convert value
+ if isinstance(predicate.range, bsc.Literal):
+ value = rdflib.Literal(value, datatype=rdflib.URIRef(predicate.range.uri))
+ elif isinstance(predicate.range, bsc.Node):
+ value = rdflib.URIRef(value)
+ else:
+ raise errors.UnreachableError()
+ # clear triples for unique predicates
+ if predicate.unique:
+ for obj in self._graph.objects(guid, pred):
+ if obj != value:
+ self._transaction.remove((guid, pred, obj))
+ # add triple
+ self._transaction.add((guid, pred, value))
+
+## EOF ##
diff --git a/bsfs/utils/__init__.py b/bsfs/utils/__init__.py
new file mode 100644
index 0000000..94680ee
--- /dev/null
+++ b/bsfs/utils/__init__.py
@@ -0,0 +1,25 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# inner-module imports
+from . import errors
+from .commons import typename
+from .uri import URI
+from .uuid import UUID, UCID
+
+# exports
+__all__ : typing.Sequence[str] = (
+ 'UCID',
+ 'URI',
+ 'UUID',
+ 'errors',
+ 'typename',
+ )
+
+## EOF ##
diff --git a/bsfs/utils/commons.py b/bsfs/utils/commons.py
new file mode 100644
index 0000000..bad2fe0
--- /dev/null
+++ b/bsfs/utils/commons.py
@@ -0,0 +1,23 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'typename',
+ )
+
+
+## code ##
+
+def typename(obj) -> str:
+ """Return the type name of *obj*."""
+ return type(obj).__name__
+
+
+## EOF ##
diff --git a/bsfs/utils/errors.py b/bsfs/utils/errors.py
new file mode 100644
index 0000000..c5e8e16
--- /dev/null
+++ b/bsfs/utils/errors.py
@@ -0,0 +1,41 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import typing
+
+# exports
+__all__: typing.Sequence[str] = (
+ )
+
+
+## code ##
+
+class _BSFSError(Exception):
+ """Generic bsfs error."""
+
+class SchemaError(_BSFSError):
+ """Generic schema errios."""
+
+class ConsistencyError(SchemaError):
+ """A requested operation is inconsistent with the schema."""
+
+class InstanceError(SchemaError):
+ """An instance affected by some operation is inconsistent with the schema."""
+
+class PermissionDeniedError(_BSFSError):
+ """An operation was aborted due to access control restrictions."""
+
+class ProgrammingError(_BSFSError):
+ """An assertion-like error that indicates a code-base issue."""
+
+class UnreachableError(ProgrammingError):
+ """Bravo, you've reached a point in code that should logically not be reachable."""
+
+class ConfigError(_BSFSError):
+ """User config issue."""
+
+## EOF ##
diff --git a/bsfs/utils/uri.py b/bsfs/utils/uri.py
new file mode 100644
index 0000000..84854a4
--- /dev/null
+++ b/bsfs/utils/uri.py
@@ -0,0 +1,246 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+import re
+import typing
+
+# constants
+RX_URI = re.compile(r'''
+ ^
+ (?:(?P<scheme>[^:/?#]+):)? # scheme, ://-delimited
+ (?://(?P<authority>[^/?#]*))? # authority (user@host:port), [/#?]-delimited
+ (?P<path>[^?#]*) # path, [#?]-delimited
+ (?:\?(?P<query>[^#]*))? # query, [#]-delimited
+ (?:\#(?P<fragment>.*))? # fragment, remaining characters
+ $
+ ''', re.VERBOSE + re.IGNORECASE)
+
+RX_HOST = re.compile(r'''
+ ^
+ (?:(?P<userinfo>[^@]*)@)? # userinfo
+ (?P<host>
+ (?:\[[^\]]+\]) | # IPv6 address
+ (?:[^:]+) # IPv4 address or regname
+ )
+ (?::(?P<port>\d*))? # port
+ $
+ ''', re.VERBOSE + re.IGNORECASE)
+
+# exports
+__all__: typing.Sequence[str] = (
+ 'URI',
+ )
+
+
+## code ##
+
+def _get_optional(
+ regexp: re.Pattern,
+ query: str,
+ grp: str
+ ) -> str:
+ """Return the regular expression *regexp*'s group *grp* of *query*
+ or raise a `ValueError` if the *query* doesn't match the expression.
+ """
+ parts = regexp.search(query)
+ if parts is not None:
+ if parts.group(grp) is not None:
+ return parts.group(grp)
+ raise ValueError(query)
+
+
+class URI(str):
+ """URI additions to built-in strings.
+
+ Provides properties to access the different components of an URI,
+ according to RFC 3986 (https://datatracker.ietf.org/doc/html/rfc3986).
+
+ Note that this class does not actually validate an URI but only offers
+ access to components of a *well-formed* URI. Use `urllib.parse` for
+ more advanced purposes.
+
+ """
+
+ def __new__(cls, value: str):
+ """Create a new URI instance.
+ Raises a `ValueError` if the (supposed) URI is malformatted.
+ """
+ if not cls.is_parseable(value):
+ raise ValueError(value)
+ return str.__new__(cls, value)
+
+ @staticmethod
+ def is_parseable(query: str) -> bool:
+ """Return True if the *query* can be decomposed into the URI components.
+
+ Note that a valid URI is always parseable, however, an invalid URI
+ might be parseable as well. The return value of this method makes
+ no claim about the validity of an URI!
+
+ """
+ # check uri
+ parts = RX_URI.match(query)
+ if parts is not None:
+ # check authority
+ authority = parts.group('authority')
+ if authority is None or RX_HOST.match(authority) is not None:
+ return True
+ # some check not passed
+ return False
+
+ @staticmethod
+ def compose(
+ path: str,
+ scheme: typing.Optional[str] = None,
+ authority: typing.Optional[str] = None,
+ user: typing.Optional[str] = None,
+ host: typing.Optional[str] = None,
+ port: typing.Optional[int] = None,
+ query: typing.Optional[str] = None,
+ fragment: typing.Optional[str] = None,
+ ):
+ """URI composition from components.
+
+ If the *host* argument is supplied, the authority is composed of *user*,
+ *host*, and *port* arguments, and the *authority* argument is ignored.
+ Note that if the *host* is an IPv6 address, it must be enclosed in brackets.
+ """
+ # strip whitespaces
+ path = path.strip()
+
+ # compose authority
+ if host is not None:
+ authority = ''
+ if user is not None:
+ authority += user + '@'
+ authority += host
+ if port is not None:
+ authority += ':' + str(port)
+
+ # ensure root on path
+ if path[0] != '/':
+ path = '/' + path
+
+ # compose uri
+ uri = ''
+ if scheme is not None:
+ uri += scheme + ':'
+ if authority is not None:
+ uri += '//' + authority
+ uri += path
+ if query is not None:
+ uri += '?' + query
+ if fragment is not None:
+ uri += '#' + fragment
+
+ # return as URI
+ return URI(uri)
+
+ @property
+ def scheme(self) -> str:
+ """Return the protocol/scheme part of the URI."""
+ return _get_optional(RX_URI, self, 'scheme')
+
+ @property
+ def authority(self) -> str:
+ """Return the authority part of the URI, including userinfo and port."""
+ return _get_optional(RX_URI, self, 'authority')
+
+ @property
+ def userinfo(self) -> str:
+ """Return the userinfo part of the URI."""
+ return _get_optional(RX_HOST, self.authority, 'userinfo')
+
+ @property
+ def host(self) -> str:
+ """Return the host part of the URI."""
+ return _get_optional(RX_HOST, self.authority, 'host')
+
+ @property
+ def port(self) -> int:
+ """Return the port part of the URI."""
+ return int(_get_optional(RX_HOST, self.authority, 'port'))
+
+ @property
+ def path(self) -> str:
+ """Return the path part of the URI."""
+ return _get_optional(RX_URI, self, 'path')
+
+ @property
+ def query(self) -> str:
+ """Return the query part of the URI."""
+ return _get_optional(RX_URI, self, 'query')
+
+ @property
+ def fragment(self) -> str:
+ """Return the fragment part of the URI."""
+ return _get_optional(RX_URI, self, 'fragment')
+
+ def get(self, component: str, default: typing.Optional[typing.Any] = None) -> typing.Optional[typing.Any]:
+ """Return the component or a default value."""
+ # check args
+ if component not in ('scheme', 'authority', 'userinfo', 'host',
+ 'port', 'path', 'query', 'fragment'):
+ raise ValueError(component)
+ try:
+ # return component's value
+ return getattr(self, component)
+ except ValueError:
+ # return the default value
+ return default
+
+
+ # overload composition methods
+
+ def __add__(self, *args) -> 'URI':
+ return URI(super().__add__(*args))
+
+ def join(self, *args) -> 'URI':
+ return URI(super().join(*args))
+
+ def __mul__(self, *args) -> 'URI':
+ return URI(super().__mul__(*args))
+
+ def __rmul__(self, *args) -> 'URI':
+ return URI(super().__rmul__(*args))
+
+
+ # overload casefold methods
+
+ def lower(self, *args) -> 'URI':
+ return URI(super().lower(*args))
+
+ def upper(self, *args) -> 'URI':
+ return URI(super().upper(*args))
+
+
+ # overload stripping methods
+
+ def strip(self, *args) -> 'URI':
+ return URI(super().strip(*args))
+
+ def lstrip(self, *args) -> 'URI':
+ return URI(super().lstrip(*args))
+
+ def rstrip(self, *args) -> 'URI':
+ return URI(super().rstrip(*args))
+
+
+ # overload formatting methods
+
+ def format(self, *args, **kwargs) -> 'URI':
+ return URI(super().format(*args, **kwargs))
+
+ def __mod__(self, *args) -> 'URI':
+ return URI(super().__mod__(*args))
+
+ def replace(self, *args) -> 'URI':
+ return URI(super().replace(*args))
+
+
+
+## EOF ##
diff --git a/bsfs/utils/uuid.py b/bsfs/utils/uuid.py
new file mode 100644
index 0000000..6366b18
--- /dev/null
+++ b/bsfs/utils/uuid.py
@@ -0,0 +1,108 @@
+"""
+
+Part of the BlackStar filesystem (bsfs) module.
+A copy of the license is provided with the project.
+Author: Matthias Baumgartner, 2022
+"""
+# imports
+from collections import abc
+import hashlib
+import os
+import platform
+import random
+import threading
+import time
+import typing
+import uuid
+
+# constants
+HASH = hashlib.sha256
+
+# exports
+__all__: typing.Sequence[str] = [
+ 'UCID',
+ 'UUID',
+ ]
+
+
+## code ##
+
+class UUID(abc.Iterator, abc.Callable): # type: ignore [misc] # abc.Callable "is an invalid base class"
+ """Generate 256-bit universally unique IDs.
+
+ This is a 'best-effort' kind of implementation that tries to ensure global
+ uniqueness, even tough actual uniqueness cannot be guaranteed.
+ The approach is different from python's uuid module (which implements
+ RFC 4122) in that it generates longer UUIDs and in that it cannot be
+ reconstructed whether two UUIDs were generated on the same system.
+
+ The ID is a cryptographic hash over several components:
+ * host
+ * system
+ * process
+ * thread
+ * random
+ * time
+ * cpu cycles
+ * content (if available)
+
+ """
+
+ # host identifier
+ host: str
+
+ # system identifier
+ system: str
+
+ # process identifier
+ process: str
+
+ # thread identifier
+ thread: str
+
+ def __init__(self, seed: typing.Optional[int] = None):
+ # initialize static components
+ self.host = str(uuid.getnode())
+ self.system = '-'.join(platform.uname())
+ self.process = str(os.getpid())
+ self.thread = str(threading.get_ident())
+ # initialize random component
+ random.seed(seed)
+
+ def __call__(self, content: typing.Optional[str] = None) -> str: # pylint: disable=arguments-differ
+ """Return a globally unique ID."""
+ # content component
+ content = str(content) if content is not None else ''
+ # time component
+ now = str(time.time())
+ # clock component
+ clk = str(time.perf_counter())
+ # random component
+ rnd = str(random.random())
+ # build the token from all available components
+ token = self.host + self.system + self.process + self.thread + rnd + now + clk + content
+ # return the token's hash
+ return HASH(token.encode('ascii', 'ignore')).hexdigest()
+
+ def __iter__(self) -> typing.Iterator[str]:
+ """Iterate indefinitely over universally unique IDs."""
+ return self
+
+ def __next__(self) -> str:
+ """Generate universally unique IDs."""
+ return self()
+
+
+class UCID():
+ """Generate 256-bit content IDs.
+
+ Effectively computes a cryptographic hash over the content.
+
+ """
+ @staticmethod
+ def from_path(path: str) -> str:
+ """Read the content from a file."""
+ with open(path, 'rb') as ifile:
+ return HASH(ifile.read()).hexdigest()
+
+## EOF ##