From d2b4a528465dc01e8db92b61293c458c7911a333 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 31 Oct 2022 12:21:22 +0100 Subject: essential interfaces (reader, extractor, errors) --- bsie/__init__.py | 13 +++++++++++++ bsie/base/__init__.py | 24 ++++++++++++++++++++++++ bsie/base/errors.py | 22 ++++++++++++++++++++++ bsie/base/extractor.py | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ bsie/base/reader.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ bsie/utils/__init__.py | 20 ++++++++++++++++++++ bsie/utils/bsfs.py | 20 ++++++++++++++++++++ bsie/utils/node.py | 39 +++++++++++++++++++++++++++++++++++++++ 8 files changed, 236 insertions(+) create mode 100644 bsie/__init__.py create mode 100644 bsie/base/__init__.py create mode 100644 bsie/base/errors.py create mode 100644 bsie/base/extractor.py create mode 100644 bsie/base/reader.py create mode 100644 bsie/utils/__init__.py create mode 100644 bsie/utils/bsfs.py create mode 100644 bsie/utils/node.py (limited to 'bsie') diff --git a/bsie/__init__.py b/bsie/__init__.py new file mode 100644 index 0000000..2f2477a --- /dev/null +++ b/bsie/__init__.py @@ -0,0 +1,13 @@ +"""The BSIE module extracts triples from files for insertion into a BSFS storage. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# exports +__all__: typing.Sequence[str] = [] + +## EOF ## diff --git a/bsie/base/__init__.py b/bsie/base/__init__.py new file mode 100644 index 0000000..0154862 --- /dev/null +++ b/bsie/base/__init__.py @@ -0,0 +1,24 @@ +"""The base module defines the BSIE interfaces. + +You'll mostly find abstract classes here. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# inner-module imports +from . import errors +from . import extractor +from . import reader + +# exports +__all__: typing.Sequence[str] = ( + 'errors', + 'extractor', + 'reader', + ) + +## EOF ## diff --git a/bsie/base/errors.py b/bsie/base/errors.py new file mode 100644 index 0000000..f86ffb2 --- /dev/null +++ b/bsie/base/errors.py @@ -0,0 +1,22 @@ +"""Common BSIE exceptions. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# exports +__all__: typing.Sequence[str] = [] + + +## code ## + +class _BSIE_Error(Exception): + """Generic BSIE error.""" + +class ReaderError(_BSIE_Error): + """The Reader failed to read the given file.""" + +## EOF ## diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py new file mode 100644 index 0000000..d5b0922 --- /dev/null +++ b/bsie/base/extractor.py @@ -0,0 +1,50 @@ +"""The Extractor classes transform content into triples. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import abc +import collections +import typing + +# inner-module imports +from . import reader +from bsie.utils import node +from bsie.utils.bsfs import URI, typename + +# exports +__all__: typing.Sequence[str] = ( + 'Extractor', + ) + + +## code ## + +class Extractor(abc.ABC, collections.abc.Iterable, collections.abc.Callable): + """Produce (node, predicate, value)-triples from some content.""" + + # what type of content is expected (i.e. reader subclass). + CONTENT_READER: typing.Optional[typing.Type[reader.Reader]] = None + + def __str__(self) -> str: + return typename(self) + + def __repr__(self) -> str: + return f'{typename(self)}()' + + @abc.abstractmethod + def schema(self) -> str: + """Return the schema (predicates and nodes) produced by this Extractor.""" + + @abc.abstractmethod + def extract( + self, + subject: node.Node, + content: typing.Any, + predicates: typing.Iterable[URI], + ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: + """Return (node, predicate, value) triples.""" + +## EOF ## diff --git a/bsie/base/reader.py b/bsie/base/reader.py new file mode 100644 index 0000000..f29e451 --- /dev/null +++ b/bsie/base/reader.py @@ -0,0 +1,48 @@ +"""The Reader classes return high-level content structures from files. + +The Reader fulfills two purposes: + First, it brokers between multiple libraries and file formats. + Second, it separates multiple aspects of a file into distinct content types. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import abc +import typing + +# inner-module imports +from bsie.utils.bsfs import URI, typename + +# exports +__all__: typing.Sequence[str] = ( + 'Aggregator', + 'Reader', + ) + + +## code ## + +class Reader(abc.ABC): + """Read and return some content from a file.""" + + # In what data structure content is returned + CONTENT_TYPE = typing.Union[typing.Any] + # NOTE: Child classes must also assign a typing.Union even if there's + # only one options + + def __str__(self) -> str: + return typename(self) + + def __repr__(self) -> str: + return f'{typename(self)}()' + + # FIXME: How about using contexts instead of calls? + @abc.abstractmethod + def __call__(self, path: URI) -> CONTENT_TYPE: + """Return some content of the file at *path*. + Raises a `ReaderError` if the reader cannot make sense of the file format. + """ + +## EOF ## diff --git a/bsie/utils/__init__.py b/bsie/utils/__init__.py new file mode 100644 index 0000000..1137187 --- /dev/null +++ b/bsie/utils/__init__.py @@ -0,0 +1,20 @@ +"""Common tools and definitions. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# inner-module imports +from . import bsfs +from . import node + +# exports +__all__: typing.Sequence[str] = ( + 'bsfs', + 'node', + ) + +## EOF ## diff --git a/bsie/utils/bsfs.py b/bsie/utils/bsfs.py new file mode 100644 index 0000000..33eb178 --- /dev/null +++ b/bsie/utils/bsfs.py @@ -0,0 +1,20 @@ +"""BSFS bridge, provides BSFS bindings for BSIE. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# bsfs imports +from bsfs.utils import URI +from bsfs.utils import typename + +# exports +__all__: typing.Sequence[str] = ( + 'URI', + 'typename', + ) + +## EOF ## diff --git a/bsie/utils/node.py b/bsie/utils/node.py new file mode 100644 index 0000000..60863a4 --- /dev/null +++ b/bsie/utils/node.py @@ -0,0 +1,39 @@ +"""Lighweight Node to bridge to BSFS. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# inner-module imports +from bsie.utils.bsfs import URI + +# exports +__all__: typing.Sequence[str] = ( + 'Node' + ) + + +## code ## + +class Node(): + """Lightweight Node, disconnected from any bsfs structures.""" + + # node type. + node_type: URI + + # node URI. + uri: URI + + def __init__( + self, + node_type: URI, + uri: URI, + ): + # assign members + self.node_type = URI(node_type) + self.uri = URI(uri) + +## EOF ## -- cgit v1.2.3 From 068b3651c16916877eb8d5fdfec52485a507e204 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 31 Oct 2022 13:05:31 +0100 Subject: path and stat readers --- bsie/reader/__init__.py | 19 +++++++++++++++++++ bsie/reader/path.py | 31 +++++++++++++++++++++++++++++++ bsie/reader/stat.py | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 84 insertions(+) create mode 100644 bsie/reader/__init__.py create mode 100644 bsie/reader/path.py create mode 100644 bsie/reader/stat.py (limited to 'bsie') diff --git a/bsie/reader/__init__.py b/bsie/reader/__init__.py new file mode 100644 index 0000000..a45f22b --- /dev/null +++ b/bsie/reader/__init__.py @@ -0,0 +1,19 @@ +"""The Reader classes return high-level content structures from files. + +The Reader fulfills two purposes: + First, it brokers between multiple libraries and file formats. + Second, it separates multiple aspects of a file into distinct content types. + +Often, different libraries focus on reading different types of content from a +file. E.g. one would use different modules to read file system infos than to +read exif or pixel data of an image. Hence, this module is organized by content +type. Each distinct type can be implemented in a file or submodule that +provides a Reader implementation. Through utilization of submodules, different +file formats can be supported. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" + +## EOF ## diff --git a/bsie/reader/path.py b/bsie/reader/path.py new file mode 100644 index 0000000..d27c664 --- /dev/null +++ b/bsie/reader/path.py @@ -0,0 +1,31 @@ +"""The Path reader produces a file path. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import os +import typing + +# inner-module imports +from bsie.base import reader + +# exports +__all__: typing.Sequence[str] = ( + 'Path', + ) + + +## code ## + +class Path(reader.Reader): + """Return the path.""" + + CONTENT_TYPE = typing.Union[str] + + def __call__(self, path: str) -> CONTENT_TYPE: + return path + + +## EOF ## diff --git a/bsie/reader/stat.py b/bsie/reader/stat.py new file mode 100644 index 0000000..f0b83fb --- /dev/null +++ b/bsie/reader/stat.py @@ -0,0 +1,34 @@ +"""The Stat reader produces filesystem stat information. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import os +import typing + +# inner-module imports +from bsie.base import reader, errors + +# exports +__all__: typing.Sequence[str] = ( + 'Stat', + ) + + +## code ## + +class Stat(reader.Reader): + """Read and return the filesystem's stat infos.""" + + CONTENT_TYPE = typing.Union[os.stat_result] + + def __call__(self, path: str) -> CONTENT_TYPE: + try: + return os.stat(path) + except Exception: + raise errors.ReaderError(path) + + +## EOF ## -- cgit v1.2.3 From 2da348c638ac5058d5acf09ab5df323ee04503d5 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Mon, 31 Oct 2022 14:14:42 +0100 Subject: constant, filesize, and filename extractors --- bsie/base/extractor.py | 3 +- bsie/extractor/__init__.py | 15 ++++++++ bsie/extractor/generic/__init__.py | 16 +++++++++ bsie/extractor/generic/constant.py | 52 ++++++++++++++++++++++++++++ bsie/extractor/generic/path.py | 70 +++++++++++++++++++++++++++++++++++++ bsie/extractor/generic/stat.py | 71 ++++++++++++++++++++++++++++++++++++++ bsie/utils/__init__.py | 2 ++ bsie/utils/bsfs.py | 5 +-- bsie/utils/namespaces.py | 25 ++++++++++++++ 9 files changed, 255 insertions(+), 4 deletions(-) create mode 100644 bsie/extractor/__init__.py create mode 100644 bsie/extractor/generic/__init__.py create mode 100644 bsie/extractor/generic/constant.py create mode 100644 bsie/extractor/generic/path.py create mode 100644 bsie/extractor/generic/stat.py create mode 100644 bsie/utils/namespaces.py (limited to 'bsie') diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index d5b0922..ea43925 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -6,7 +6,6 @@ Author: Matthias Baumgartner, 2022 """ # imports import abc -import collections import typing # inner-module imports @@ -22,7 +21,7 @@ __all__: typing.Sequence[str] = ( ## code ## -class Extractor(abc.ABC, collections.abc.Iterable, collections.abc.Callable): +class Extractor(abc.ABC): """Produce (node, predicate, value)-triples from some content.""" # what type of content is expected (i.e. reader subclass). diff --git a/bsie/extractor/__init__.py b/bsie/extractor/__init__.py new file mode 100644 index 0000000..ef31343 --- /dev/null +++ b/bsie/extractor/__init__.py @@ -0,0 +1,15 @@ +"""Extractors produce triples from some content. + +Each Extractor class is linked to the Reader class whose content it requires. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# exports +__all__: typing.Sequence[str] = [] + +## EOF ## diff --git a/bsie/extractor/generic/__init__.py b/bsie/extractor/generic/__init__.py new file mode 100644 index 0000000..0cb7e7f --- /dev/null +++ b/bsie/extractor/generic/__init__.py @@ -0,0 +1,16 @@ +"""Generic extractors focus on information that is typically available on all +files. Examples include file system information (file name and size, mime type, +etc.) and information that is independent of the actual file (constant triples, +host platform infos, current time, etc.). + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# exports +__all__: typing.Sequence[str] = [] + +## EOF ## diff --git a/bsie/extractor/generic/constant.py b/bsie/extractor/generic/constant.py new file mode 100644 index 0000000..e243131 --- /dev/null +++ b/bsie/extractor/generic/constant.py @@ -0,0 +1,52 @@ +"""The Constant extractor produces pre-specified triples. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# inner-module imports +from bsie.base import extractor +from bsie.utils.bsfs import URI +from bsie.utils.node import Node + +# exports +__all__: typing.Sequence[str] = ( + 'Constant', + ) + + +## code ## + +class Constant(extractor.Extractor): + """Extract information from file's path.""" + + CONTENT_READER = None + + def __init__( + self, + schema: str, + tuples: typing.Iterable[typing.Tuple[URI, typing.Any]], + ): + self._schema = schema + self._tuples = tuples + # FIXME: use schema instance for predicate checking + #self._tuples = [(pred, value) for pred, value in tuples if pred in schema] + # FIXME: use schema instance for value checking + + def schema(self) -> str: + return self._schema + + def extract( + self, + subject: Node, + content: None, + predicates: typing.Iterable[URI], + ) -> typing.Iterator[typing.Tuple[Node, URI, typing.Any]]: + for pred, value in self._tuples: + if pred in predicates: + yield subject, pred, value + +## EOF ## diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py new file mode 100644 index 0000000..c39bbd2 --- /dev/null +++ b/bsie/extractor/generic/path.py @@ -0,0 +1,70 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import os +import typing + +# inner-module imports +from bsie.base import extractor +from bsie.utils import node, ns +from bsie.utils.bsfs import URI +import bsie.reader.path + +# exports +__all__: typing.Sequence[str] = ( + 'Path', + ) + + +## code ## + +class Path(extractor.Extractor): + """Extract information from file's path.""" + + CONTENT_READER = bsie.reader.path.Path + + def __init__(self): + self.__callmap = { + ns.bse.filename: self.__filename, + } + + def schema(self) -> str: + return ''' + bse:filename a bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:string ; + rdf:label "File name"^^xsd:string ; + schema:description "Filename of entity in some filesystem."^^xsd:string ; + owl:maxCardinality "INF"^^xsd:number . + ''' + + def extract( + self, + subject: node.Node, + content: CONTENT_READER.CONTENT_TYPE, + predicates: typing.Iterable[URI], + ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: + for pred in predicates: + # find callback + clbk = self.__callmap.get(pred) + if clbk is None: + continue + # get value + value = clbk(content) + if value is None: + continue + # produce triple + yield subject, pred, value + + def __filename(self, path: str) -> str: + try: + return os.path.basename(path) + except Exception: + # FIXME: some kind of error reporting (e.g. logging) + return None + +## EOF ## diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py new file mode 100644 index 0000000..d74369c --- /dev/null +++ b/bsie/extractor/generic/stat.py @@ -0,0 +1,71 @@ +"""Extract information from the file system, such as filesize. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# inner-module imports +from bsie.base import extractor +from bsie.utils import node, ns +from bsie.utils.bsfs import URI +import bsie.reader.stat + + +# exports +__all__: typing.Sequence[str] = ( + 'Stat', + ) + + +## code ## + +class Stat(extractor.Extractor): + """Extract information from the file system.""" + + CONTENT_READER = bsie.reader.stat.Stat + + def __init__(self): + self.__callmap = { + ns.bse.filesize: self.__filesize, + } + + def schema(self) -> str: + return ''' + bse:filesize a bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:integer ; + rdf:label "File size"^^xsd:string ; + schema:description "File size of entity in some filesystem."^^xsd:string ; + owl:maxCardinality "INF"^^xsd:number . + ''' + + def extract( + self, + subject: node.Node, + content: CONTENT_READER.CONTENT_TYPE, + predicates: typing.Iterable[URI], + ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: + for pred in predicates: + # find callback + clbk = self.__callmap.get(pred) + if clbk is None: + continue + # get value + value = clbk(content) + if value is None: + continue + # produce triple + yield subject, pred, value + + def __filesize(self, content: CONTENT_READER.CONTENT_TYPE) -> int: + """Return the file size.""" + try: + return content.st_size + except Exception: + # FIXME: some kind of error reporting (e.g. logging) + return None + +## EOF ## diff --git a/bsie/utils/__init__.py b/bsie/utils/__init__.py index 1137187..bd22236 100644 --- a/bsie/utils/__init__.py +++ b/bsie/utils/__init__.py @@ -9,12 +9,14 @@ import typing # inner-module imports from . import bsfs +from . import namespaces as ns from . import node # exports __all__: typing.Sequence[str] = ( 'bsfs', 'node', + 'ns', ) ## EOF ## diff --git a/bsie/utils/bsfs.py b/bsie/utils/bsfs.py index 33eb178..1ae657c 100644 --- a/bsie/utils/bsfs.py +++ b/bsie/utils/bsfs.py @@ -8,11 +8,12 @@ Author: Matthias Baumgartner, 2022 import typing # bsfs imports -from bsfs.utils import URI -from bsfs.utils import typename +from bsfs.namespace import Namespace +from bsfs.utils import URI, typename # exports __all__: typing.Sequence[str] = ( + 'Namespace', 'URI', 'typename', ) diff --git a/bsie/utils/namespaces.py b/bsie/utils/namespaces.py new file mode 100644 index 0000000..67ccc71 --- /dev/null +++ b/bsie/utils/namespaces.py @@ -0,0 +1,25 @@ +"""Default namespaces used throughout BSIE. + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# bsie imports +from . import bsfs as _bsfs + +# constants +bse = _bsfs.Namespace('http://bsfs.ai/schema/Entity#') +bsfs = _bsfs.Namespace('http://bsfs.ai/schema/') +bsm = _bsfs.Namespace('http://bsfs.ai/schema/meta#') + +# export +__all__: typing.Sequence[str] = ( + 'bse', + 'bsfs', + 'bsm', + ) + +## EOF ## -- cgit v1.2.3 From e174a25585e64eb1b0759440cad48d642dd31829 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Fri, 25 Nov 2022 14:31:29 +0100 Subject: use schema and predicate types in extractors --- bsie/base/errors.py | 13 +++++++--- bsie/base/extractor.py | 51 +++++++++++++++++++++++++++++++++----- bsie/extractor/generic/constant.py | 20 +++++++-------- bsie/extractor/generic/path.py | 40 ++++++++++++++++-------------- bsie/extractor/generic/stat.py | 34 ++++++++++++------------- bsie/utils/bsfs.py | 2 ++ bsie/utils/namespaces.py | 3 ++- bsie/utils/node.py | 2 +- 8 files changed, 109 insertions(+), 56 deletions(-) (limited to 'bsie') diff --git a/bsie/base/errors.py b/bsie/base/errors.py index f86ffb2..eedce3b 100644 --- a/bsie/base/errors.py +++ b/bsie/base/errors.py @@ -8,15 +8,22 @@ Author: Matthias Baumgartner, 2022 import typing # exports -__all__: typing.Sequence[str] = [] +__all__: typing.Sequence[str] = ( + 'ExtractorError', + ) + + ## code ## -class _BSIE_Error(Exception): +class _BSIEError(Exception): """Generic BSIE error.""" -class ReaderError(_BSIE_Error): +class ExtractorError(_BSIEError): + """The Extractor failed to process the given content.""" + +class ReaderError(_BSIEError): """The Reader failed to read the given file.""" ## EOF ## diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index ea43925..a6a69c6 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -11,13 +11,38 @@ import typing # inner-module imports from . import reader from bsie.utils import node -from bsie.utils.bsfs import URI, typename +from bsie.utils.bsfs import schema as _schema, typename # exports __all__: typing.Sequence[str] = ( 'Extractor', ) +# constants + +# essential definitions typically used in extractor schemas. +# NOTE: The definition here is only for convenience; Each Extractor must implement its use, if so desired. +SCHEMA_PREAMBLE = ''' + # common external prefixes + prefix owl: + prefix rdf: + prefix rdfs: + prefix xsd: + prefix schema: + + # common bsfs prefixes + prefix bsfs: + prefix bse: + + # essential nodes + bsfs:Entity rdfs:subClassOf bsfs:Node . + + # common definitions + xsd:string rdfs:subClassOf bsfs:Literal . + xsd:integer rdfs:subClassOf bsfs:Literal . + + ''' + ## code ## @@ -27,23 +52,37 @@ class Extractor(abc.ABC): # what type of content is expected (i.e. reader subclass). CONTENT_READER: typing.Optional[typing.Type[reader.Reader]] = None + # extractor schema. + schema: _schema.Schema + + def __init__(self, schema: _schema.Schema): + self.schema = schema + def __str__(self) -> str: return typename(self) def __repr__(self) -> str: return f'{typename(self)}()' - @abc.abstractmethod - def schema(self) -> str: - """Return the schema (predicates and nodes) produced by this Extractor.""" + + def predicates(self) -> typing.Iterator[_schema.Predicate]: + """Return the predicates that may be part of extracted triples.""" + # NOTE: Some predicates in the schema might not occur in actual triples, + # but are defined due to predicate class hierarchy. E.g., bsfs:Predicate + # is part of every schema but should not be used in triples. + # Announcing all predicates might not be the most efficient way, however, + # it is the most safe one. Concrete extractors that produce additional + # predicates (e.g. auxiliary nodes with their own predicates) should + # overwrite this method to only include the principal predicates. + return self.schema.predicates() @abc.abstractmethod def extract( self, subject: node.Node, content: typing.Any, - predicates: typing.Iterable[URI], - ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: + predicates: typing.Iterable[_schema.Predicate], + ) -> typing.Iterator[typing.Tuple[node.Node, _schema.Predicate, typing.Any]]: """Return (node, predicate, value) triples.""" ## EOF ## diff --git a/bsie/extractor/generic/constant.py b/bsie/extractor/generic/constant.py index e243131..795bac6 100644 --- a/bsie/extractor/generic/constant.py +++ b/bsie/extractor/generic/constant.py @@ -7,9 +7,9 @@ Author: Matthias Baumgartner, 2022 # imports import typing -# inner-module imports +# bsie imports from bsie.base import extractor -from bsie.utils.bsfs import URI +from bsie.utils.bsfs import URI, schema as _schema from bsie.utils.node import Node # exports @@ -25,26 +25,26 @@ class Constant(extractor.Extractor): CONTENT_READER = None + # predicate/value pairs to be produced. + _tuples: typing.Tuple[typing.Tuple[_schema.Predicate, typing.Any], ...] + def __init__( self, schema: str, tuples: typing.Iterable[typing.Tuple[URI, typing.Any]], ): - self._schema = schema - self._tuples = tuples - # FIXME: use schema instance for predicate checking - #self._tuples = [(pred, value) for pred, value in tuples if pred in schema] + super().__init__(_schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + schema)) + # NOTE: Raises a KeyError if the predicate is not part of the schema + self._tuples = tuple((self.schema.predicate(p_uri), value) for p_uri, value in tuples) # FIXME: use schema instance for value checking - def schema(self) -> str: - return self._schema def extract( self, subject: Node, content: None, - predicates: typing.Iterable[URI], - ) -> typing.Iterator[typing.Tuple[Node, URI, typing.Any]]: + predicates: typing.Iterable[_schema.Predicate], + ) -> typing.Iterator[typing.Tuple[Node, _schema.Predicate, typing.Any]]: for pred, value in self._tuples: if pred in predicates: yield subject, pred, value diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py index c39bbd2..f358a79 100644 --- a/bsie/extractor/generic/path.py +++ b/bsie/extractor/generic/path.py @@ -8,11 +8,10 @@ Author: Matthias Baumgartner, 2022 import os import typing -# inner-module imports +# bsie imports from bsie.base import extractor from bsie.utils import node, ns -from bsie.utils.bsfs import URI -import bsie.reader.path +from bsie.utils.bsfs import schema # exports __all__: typing.Sequence[str] = ( @@ -27,30 +26,31 @@ class Path(extractor.Extractor): CONTENT_READER = bsie.reader.path.Path - def __init__(self): - self.__callmap = { - ns.bse.filename: self.__filename, - } + # mapping from predicate to handler function. + _callmap: typing.Dict[schema.Predicate, typing.Callable[[str], typing.Any]] - def schema(self) -> str: - return ''' - bse:filename a bsfs:Predicate ; + def __init__(self): + super().__init__(schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' + bse:filename rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; rdfs:range xsd:string ; - rdf:label "File name"^^xsd:string ; + rdfs:label "File name"^^xsd:string ; schema:description "Filename of entity in some filesystem."^^xsd:string ; owl:maxCardinality "INF"^^xsd:number . - ''' + ''')) + self._callmap = { + self.schema.predicate(ns.bse.filename): self.__filename, + } def extract( self, subject: node.Node, content: CONTENT_READER.CONTENT_TYPE, - predicates: typing.Iterable[URI], - ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: + predicates: typing.Iterable[schema.Predicate], + ) -> typing.Iterator[typing.Tuple[node.Node, schema.Predicate, typing.Any]]: for pred in predicates: # find callback - clbk = self.__callmap.get(pred) + clbk = self._callmap.get(pred) if clbk is None: continue # get value @@ -60,11 +60,15 @@ class Path(extractor.Extractor): # produce triple yield subject, pred, value - def __filename(self, path: str) -> str: + def __filename(self, path: str) -> typing.Optional[str]: try: return os.path.basename(path) - except Exception: - # FIXME: some kind of error reporting (e.g. logging) + except Exception: # some error, skip. + # FIXME: some kind of error reporting (e.g. logging)? + # Options: (a) Fail silently (current); (b) Skip and report to log; + # (c) Raise ExtractorError (aborts extraction); (d) separate content type + # checks from basename errors (report content type errors, skip basename + # errors) return None ## EOF ## diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py index d74369c..e5387af 100644 --- a/bsie/extractor/generic/stat.py +++ b/bsie/extractor/generic/stat.py @@ -5,14 +5,13 @@ A copy of the license is provided with the project. Author: Matthias Baumgartner, 2022 """ # imports +import os import typing -# inner-module imports +# bsie imports from bsie.base import extractor from bsie.utils import node, ns -from bsie.utils.bsfs import URI -import bsie.reader.stat - +from bsie.utils.bsfs import schema as _schema # exports __all__: typing.Sequence[str] = ( @@ -27,30 +26,31 @@ class Stat(extractor.Extractor): CONTENT_READER = bsie.reader.stat.Stat - def __init__(self): - self.__callmap = { - ns.bse.filesize: self.__filesize, - } + # mapping from predicate to handler function. + _callmap: typing.Dict[_schema.Predicate, typing.Callable[[os.stat_result], typing.Any]] - def schema(self) -> str: - return ''' - bse:filesize a bsfs:Predicate ; + def __init__(self): + super().__init__(_schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' + bse:filesize rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; rdfs:range xsd:integer ; - rdf:label "File size"^^xsd:string ; + rdfs:label "File size"^^xsd:string ; schema:description "File size of entity in some filesystem."^^xsd:string ; owl:maxCardinality "INF"^^xsd:number . - ''' + ''')) + self._callmap = { + self.schema.predicate(ns.bse.filesize): self.__filesize, + } def extract( self, subject: node.Node, content: CONTENT_READER.CONTENT_TYPE, - predicates: typing.Iterable[URI], - ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: + predicates: typing.Iterable[_schema.Predicate], + ) -> typing.Iterator[typing.Tuple[node.Node, _schema.Predicate, typing.Any]]: for pred in predicates: # find callback - clbk = self.__callmap.get(pred) + clbk = self._callmap.get(pred) if clbk is None: continue # get value @@ -60,7 +60,7 @@ class Stat(extractor.Extractor): # produce triple yield subject, pred, value - def __filesize(self, content: CONTENT_READER.CONTENT_TYPE) -> int: + def __filesize(self, content: os.stat_result) -> typing.Optional[int]: """Return the file size.""" try: return content.st_size diff --git a/bsie/utils/bsfs.py b/bsie/utils/bsfs.py index 1ae657c..01ec5d1 100644 --- a/bsie/utils/bsfs.py +++ b/bsie/utils/bsfs.py @@ -8,6 +8,7 @@ Author: Matthias Baumgartner, 2022 import typing # bsfs imports +from bsfs import schema from bsfs.namespace import Namespace from bsfs.utils import URI, typename @@ -15,6 +16,7 @@ from bsfs.utils import URI, typename __all__: typing.Sequence[str] = ( 'Namespace', 'URI', + 'schema', 'typename', ) diff --git a/bsie/utils/namespaces.py b/bsie/utils/namespaces.py index 67ccc71..13be96b 100644 --- a/bsie/utils/namespaces.py +++ b/bsie/utils/namespaces.py @@ -7,13 +7,14 @@ Author: Matthias Baumgartner, 2022 # imports import typing -# bsie imports +# inner-module imports from . import bsfs as _bsfs # constants bse = _bsfs.Namespace('http://bsfs.ai/schema/Entity#') bsfs = _bsfs.Namespace('http://bsfs.ai/schema/') bsm = _bsfs.Namespace('http://bsfs.ai/schema/meta#') +xsd = _bsfs.Namespace('http://www.w3.org/2001/XMLSchema#') # export __all__: typing.Sequence[str] = ( diff --git a/bsie/utils/node.py b/bsie/utils/node.py index 60863a4..3a0f06b 100644 --- a/bsie/utils/node.py +++ b/bsie/utils/node.py @@ -12,7 +12,7 @@ from bsie.utils.bsfs import URI # exports __all__: typing.Sequence[str] = ( - 'Node' + 'Node', ) -- cgit v1.2.3 From b96c6e2096c387b70e2a4c1f0bc53b6044a0dc6f Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Fri, 25 Nov 2022 14:36:27 +0100 Subject: decouple readers and extractors; use strings for reference and repeated type annotations --- bsie/base/extractor.py | 5 ++--- bsie/base/reader.py | 11 ++--------- bsie/extractor/generic/path.py | 4 ++-- bsie/extractor/generic/stat.py | 4 ++-- bsie/reader/path.py | 7 ++----- bsie/reader/stat.py | 6 ++---- 6 files changed, 12 insertions(+), 25 deletions(-) (limited to 'bsie') diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index a6a69c6..7acf2bd 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -8,8 +8,7 @@ Author: Matthias Baumgartner, 2022 import abc import typing -# inner-module imports -from . import reader +# bsie imports from bsie.utils import node from bsie.utils.bsfs import schema as _schema, typename @@ -50,7 +49,7 @@ class Extractor(abc.ABC): """Produce (node, predicate, value)-triples from some content.""" # what type of content is expected (i.e. reader subclass). - CONTENT_READER: typing.Optional[typing.Type[reader.Reader]] = None + CONTENT_READER: typing.Optional[str] = None # extractor schema. schema: _schema.Schema diff --git a/bsie/base/reader.py b/bsie/base/reader.py index f29e451..e59abef 100644 --- a/bsie/base/reader.py +++ b/bsie/base/reader.py @@ -12,12 +12,11 @@ Author: Matthias Baumgartner, 2022 import abc import typing -# inner-module imports +# bsie imports from bsie.utils.bsfs import URI, typename # exports __all__: typing.Sequence[str] = ( - 'Aggregator', 'Reader', ) @@ -27,20 +26,14 @@ __all__: typing.Sequence[str] = ( class Reader(abc.ABC): """Read and return some content from a file.""" - # In what data structure content is returned - CONTENT_TYPE = typing.Union[typing.Any] - # NOTE: Child classes must also assign a typing.Union even if there's - # only one options - def __str__(self) -> str: return typename(self) def __repr__(self) -> str: return f'{typename(self)}()' - # FIXME: How about using contexts instead of calls? @abc.abstractmethod - def __call__(self, path: URI) -> CONTENT_TYPE: + def __call__(self, path: URI) -> typing.Any: """Return some content of the file at *path*. Raises a `ReaderError` if the reader cannot make sense of the file format. """ diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py index f358a79..f346f97 100644 --- a/bsie/extractor/generic/path.py +++ b/bsie/extractor/generic/path.py @@ -24,7 +24,7 @@ __all__: typing.Sequence[str] = ( class Path(extractor.Extractor): """Extract information from file's path.""" - CONTENT_READER = bsie.reader.path.Path + CONTENT_READER = 'bsie.reader.path.Path' # mapping from predicate to handler function. _callmap: typing.Dict[schema.Predicate, typing.Callable[[str], typing.Any]] @@ -45,7 +45,7 @@ class Path(extractor.Extractor): def extract( self, subject: node.Node, - content: CONTENT_READER.CONTENT_TYPE, + content: str, predicates: typing.Iterable[schema.Predicate], ) -> typing.Iterator[typing.Tuple[node.Node, schema.Predicate, typing.Any]]: for pred in predicates: diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py index e5387af..7088c0a 100644 --- a/bsie/extractor/generic/stat.py +++ b/bsie/extractor/generic/stat.py @@ -24,7 +24,7 @@ __all__: typing.Sequence[str] = ( class Stat(extractor.Extractor): """Extract information from the file system.""" - CONTENT_READER = bsie.reader.stat.Stat + CONTENT_READER = 'bsie.reader.stat.Stat' # mapping from predicate to handler function. _callmap: typing.Dict[_schema.Predicate, typing.Callable[[os.stat_result], typing.Any]] @@ -45,7 +45,7 @@ class Stat(extractor.Extractor): def extract( self, subject: node.Node, - content: CONTENT_READER.CONTENT_TYPE, + content: os.stat_result, predicates: typing.Iterable[_schema.Predicate], ) -> typing.Iterator[typing.Tuple[node.Node, _schema.Predicate, typing.Any]]: for pred in predicates: diff --git a/bsie/reader/path.py b/bsie/reader/path.py index d27c664..d60f187 100644 --- a/bsie/reader/path.py +++ b/bsie/reader/path.py @@ -5,10 +5,9 @@ A copy of the license is provided with the project. Author: Matthias Baumgartner, 2022 """ # imports -import os import typing -# inner-module imports +# bsie imports from bsie.base import reader # exports @@ -22,9 +21,7 @@ __all__: typing.Sequence[str] = ( class Path(reader.Reader): """Return the path.""" - CONTENT_TYPE = typing.Union[str] - - def __call__(self, path: str) -> CONTENT_TYPE: + def __call__(self, path: str) -> str: return path diff --git a/bsie/reader/stat.py b/bsie/reader/stat.py index f0b83fb..6d40ab8 100644 --- a/bsie/reader/stat.py +++ b/bsie/reader/stat.py @@ -8,7 +8,7 @@ Author: Matthias Baumgartner, 2022 import os import typing -# inner-module imports +# bsie imports from bsie.base import reader, errors # exports @@ -22,9 +22,7 @@ __all__: typing.Sequence[str] = ( class Stat(reader.Reader): """Read and return the filesystem's stat infos.""" - CONTENT_TYPE = typing.Union[os.stat_result] - - def __call__(self, path: str) -> CONTENT_TYPE: + def __call__(self, path: str) -> os.stat_result: try: return os.stat(path) except Exception: -- cgit v1.2.3 From 9ce32829b2bb85907a34a543bfcaa9183d1e362c Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Fri, 25 Nov 2022 14:39:18 +0100 Subject: string conversion and equality checks --- bsie/base/extractor.py | 7 +++++++ bsie/base/reader.py | 6 ++++++ bsie/extractor/generic/constant.py | 6 ++++++ bsie/utils/node.py | 18 ++++++++++++++++-- 4 files changed, 35 insertions(+), 2 deletions(-) (limited to 'bsie') diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index 7acf2bd..2fc4f18 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -63,6 +63,13 @@ class Extractor(abc.ABC): def __repr__(self) -> str: return f'{typename(self)}()' + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, type(self)) \ + and self.CONTENT_READER == other.CONTENT_READER \ + and self.schema == other.schema + + def __hash__(self) -> int: + return hash((type(self), self.CONTENT_READER, self.schema)) def predicates(self) -> typing.Iterator[_schema.Predicate]: """Return the predicates that may be part of extracted triples.""" diff --git a/bsie/base/reader.py b/bsie/base/reader.py index e59abef..b7eabf7 100644 --- a/bsie/base/reader.py +++ b/bsie/base/reader.py @@ -32,6 +32,12 @@ class Reader(abc.ABC): def __repr__(self) -> str: return f'{typename(self)}()' + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, type(self)) + + def __hash__(self) -> int: + return hash(type(self)) + @abc.abstractmethod def __call__(self, path: URI) -> typing.Any: """Return some content of the file at *path*. diff --git a/bsie/extractor/generic/constant.py b/bsie/extractor/generic/constant.py index 795bac6..7da792a 100644 --- a/bsie/extractor/generic/constant.py +++ b/bsie/extractor/generic/constant.py @@ -38,6 +38,12 @@ class Constant(extractor.Extractor): self._tuples = tuple((self.schema.predicate(p_uri), value) for p_uri, value in tuples) # FIXME: use schema instance for value checking + def __eq__(self, other: typing.Any) -> bool: + return super().__eq__(other) \ + and self._tuples == other._tuples + + def __hash__(self) -> int: + return hash((super().__hash__(), self._tuples)) def extract( self, diff --git a/bsie/utils/node.py b/bsie/utils/node.py index 3a0f06b..c9c494f 100644 --- a/bsie/utils/node.py +++ b/bsie/utils/node.py @@ -7,8 +7,8 @@ Author: Matthias Baumgartner, 2022 # imports import typing -# inner-module imports -from bsie.utils.bsfs import URI +# bsie imports +from bsie.utils.bsfs import URI, typename # exports __all__: typing.Sequence[str] = ( @@ -36,4 +36,18 @@ class Node(): self.node_type = URI(node_type) self.uri = URI(uri) + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, Node) \ + and other.node_type == self.node_type \ + and other.uri == self.uri + + def __hash__(self) -> int: + return hash((type(self), self.node_type, self.uri)) + + def __str__(self) -> str: + return f'{typename(self)}({self.node_type}, {self.uri})' + + def __repr__(self) -> str: + return f'{typename(self)}({self.node_type}, {self.uri})' + ## EOF ## -- cgit v1.2.3 From c9a1dea230054f5d6f40b7fd5e3930609c5f6416 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Fri, 25 Nov 2022 14:41:38 +0100 Subject: code analysis tool configs and minor fixes --- bsie/base/errors.py | 3 +-- bsie/reader/stat.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) (limited to 'bsie') diff --git a/bsie/base/errors.py b/bsie/base/errors.py index eedce3b..a86b7e8 100644 --- a/bsie/base/errors.py +++ b/bsie/base/errors.py @@ -10,11 +10,10 @@ import typing # exports __all__: typing.Sequence[str] = ( 'ExtractorError', + 'ReaderError', ) - - ## code ## class _BSIEError(Exception): diff --git a/bsie/reader/stat.py b/bsie/reader/stat.py index 6d40ab8..592d912 100644 --- a/bsie/reader/stat.py +++ b/bsie/reader/stat.py @@ -25,8 +25,8 @@ class Stat(reader.Reader): def __call__(self, path: str) -> os.stat_result: try: return os.stat(path) - except Exception: - raise errors.ReaderError(path) + except Exception as err: + raise errors.ReaderError(path) from err ## EOF ## -- cgit v1.2.3 From 3e6a69ce7f109f0fd4352507ad60d58d4cbd24a7 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Fri, 25 Nov 2022 14:43:12 +0100 Subject: builders and pipeline --- bsie/base/errors.py | 8 ++ bsie/tools/__init__.py | 20 +++++ bsie/tools/builder.py | 217 +++++++++++++++++++++++++++++++++++++++++++++++++ bsie/tools/pipeline.py | 121 +++++++++++++++++++++++++++ bsie/utils/bsfs.py | 3 +- 5 files changed, 368 insertions(+), 1 deletion(-) create mode 100644 bsie/tools/__init__.py create mode 100644 bsie/tools/builder.py create mode 100644 bsie/tools/pipeline.py (limited to 'bsie') diff --git a/bsie/base/errors.py b/bsie/base/errors.py index a86b7e8..760351f 100644 --- a/bsie/base/errors.py +++ b/bsie/base/errors.py @@ -9,7 +9,9 @@ import typing # exports __all__: typing.Sequence[str] = ( + 'BuilderError', 'ExtractorError', + 'LoaderError', 'ReaderError', ) @@ -19,6 +21,12 @@ __all__: typing.Sequence[str] = ( class _BSIEError(Exception): """Generic BSIE error.""" +class BuilderError(_BSIEError): + """The Builder failed to create an instance.""" + +class LoaderError(BuilderError): + """Failed to load a module or class.""" + class ExtractorError(_BSIEError): """The Extractor failed to process the given content.""" diff --git a/bsie/tools/__init__.py b/bsie/tools/__init__.py new file mode 100644 index 0000000..8ca9620 --- /dev/null +++ b/bsie/tools/__init__.py @@ -0,0 +1,20 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# inner-module imports +from . import builder +from . import pipeline + +# exports +__all__: typing.Sequence[str] = ( + 'builder', + 'pipeline', + ) + +## EOF ## diff --git a/bsie/tools/builder.py b/bsie/tools/builder.py new file mode 100644 index 0000000..8f7a410 --- /dev/null +++ b/bsie/tools/builder.py @@ -0,0 +1,217 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import importlib +import logging +import typing + +# bsie imports +from bsie import base +from bsie.base import errors +from bsie.utils.bsfs import URI, typename + +# inner-module imports +from . import pipeline + +# exports +__all__: typing.Sequence[str] = ( + 'ExtractorBuilder', + 'PipelineBuilder', + 'ReaderBuilder', + ) + + +## code ## + +logger = logging.getLogger(__name__) + +def _safe_load(module_name: str, class_name: str): + """Get a class from a module. Raise BuilderError if anything goes wrong.""" + try: + # load the module + module = importlib.import_module(module_name) + except Exception as err: + # cannot import module + raise errors.LoaderError(f'cannot load module {module_name}') from err + + try: + # get the class from the module + cls = getattr(module, class_name) + except Exception as err: + # cannot find the class + raise errors.LoaderError(f'cannot load class {class_name} from module {module_name}') from err + + return cls + + +def _unpack_name(name): + """Split a name into its module and class component (dot-separated).""" + if not isinstance(name, str): + raise TypeError(name) + if '.' not in name: + raise ValueError('name must be a qualified class name.') + module_name, class_name = name[:name.rfind('.')], name[name.rfind('.')+1:] + if module_name == '': + raise ValueError('name must be a qualified class name.') + return module_name, class_name + + +class ReaderBuilder(): + """Build `bsie.base.reader.Reader` instances. + + Readers are defined via their qualified class name + (e.g., bsie.reader.path.Path) and optional keyword + arguments that are passed to the constructor via + the *kwargs* argument (name as key, kwargs as value). + The ReaderBuilder keeps a cache of previously built + reader instances, as they are anyway built with + identical keyword arguments. + + """ + + # keyword arguments + kwargs: typing.Dict[str, typing.Dict[str, typing.Any]] + + # cached readers + cache: typing.Dict[str, base.reader.Reader] + + def __init__(self, kwargs: typing.Dict[str, typing.Dict[str, typing.Any]]): + self.kwargs = kwargs + self.cache = {} + + def build(self, name: str) -> base.reader.Reader: + """Return an instance for the qualified class name.""" + # return cached instance + if name in self.cache: + return self.cache[name] + + # check name and get module/class components + module_name, class_name = _unpack_name(name) + + # import reader class + cls = _safe_load(module_name, class_name) + + # get kwargs + kwargs = self.kwargs.get(name, {}) + if not isinstance(kwargs, dict): + raise TypeError(f'expected a kwargs dict, found {typename(kwargs)}') + + try: # build, cache, and return instance + obj = cls(**kwargs) + # cache instance + self.cache[name] = obj + # return instance + return obj + + except Exception as err: + raise errors.BuilderError(f'failed to build reader {name} due to {typename(err)}: {err}') from err + + +class ExtractorBuilder(): + """Build `bsie.base.extractor.Extractor instances. + + It is permissible to build multiple instances of the same extractor + (typically with different arguments), hence the ExtractorBuilder + receives a list of build specifications. Each specification is + a dict with a single key (extractor's qualified name) and a dict + to be used as keyword arguments. + Example: [{'bsie.extractor.generic.path.Path': {}}, ] + + """ + + # build specifications + specs: typing.List[typing.Dict[str, typing.Dict[str, typing.Any]]] + + def __init__(self, specs: typing.List[typing.Dict[str, typing.Dict[str, typing.Any]]]): + self.specs = specs + + def __iter__(self) -> typing.Iterator[int]: + """Iterate over extractor specifications.""" + return iter(range(len(self.specs))) + + def build(self, index: int) -> base.extractor.Extractor: + """Return an instance of the n'th extractor (n=*index*).""" + # get build instructions + specs = self.specs[index] + + # check specs structure. expecting[{name: {kwargs}}] + if not isinstance(specs, dict): + raise TypeError(f'expected a dict, found {typename(specs)}') + if len(specs) != 1: + raise TypeError(f'expected a dict of length one, found {len(specs)}') + + # get name and args from specs + name = next(iter(specs.keys())) + kwargs = specs[name] + + # check kwargs structure + if not isinstance(kwargs, dict): + raise TypeError(f'expected a dict, found {typename(kwargs)}') + + # check name and get module/class components + module_name, class_name = _unpack_name(name) + + # import extractor class + cls = _safe_load(module_name, class_name) + + try: # build and return instance + return cls(**kwargs) + + except Exception as err: + raise errors.BuilderError(f'failed to build extractor {name} due to {typename(err)}: {err}') from err + + +class PipelineBuilder(): + """Build `bsie.tools.pipeline.Pipeline` instances.""" + + def __init__( + self, + prefix: URI, + reader_builder: ReaderBuilder, + extractor_builder: ExtractorBuilder, + ): + self.prefix = prefix + self.rbuild = reader_builder + self.ebuild = extractor_builder + + def build(self) -> pipeline.Pipeline: + """Return a Pipeline instance.""" + ext2rdr = {} + + for eidx in self.ebuild: + # build extractor + try: + ext = self.ebuild.build(eidx) + + except errors.LoaderError as err: # failed to load extractor; skip + logger.error('failed to load extractor: %s', err) + continue + + except errors.BuilderError as err: # failed to build instance; skip + logger.error(str(err)) + continue + + try: + # get reader required by extractor + if ext.CONTENT_READER is not None: + rdr = self.rbuild.build(ext.CONTENT_READER) + else: + rdr = None + # store extractor + ext2rdr[ext] = rdr + + except errors.LoaderError as err: # failed to load reader + logger.error('failed to load reader: %s', err) + + except errors.BuilderError as err: # failed to build reader + logger.error(str(err)) + + return pipeline.Pipeline(self.prefix, ext2rdr) + + + +## EOF ## diff --git a/bsie/tools/pipeline.py b/bsie/tools/pipeline.py new file mode 100644 index 0000000..8e1c992 --- /dev/null +++ b/bsie/tools/pipeline.py @@ -0,0 +1,121 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +from collections import defaultdict +import logging +import typing + +# bsie imports +from bsie import base +from bsie.utils import ns +from bsie.utils.node import Node +from bsie.utils.bsfs import schema as _schema, URI, uuid as _uuid, typename + +# exports +__all__: typing.Sequence[str] = ( + 'Pipeline', + ) + +## code ## + +logger = logging.getLogger(__name__) + +class Pipeline(): + """Extraction pipeline to generate triples from files. + + The Pipeline binds readers and extractors, and performs + the necessary operations to produce triples from a file. + It takes a best-effort approach to extract as many triples + as possible. Errors during the extraction are passed over + and reported to the log. + + """ + + # combined extractor schemas. + schema: _schema.Schema + + # node prefix. + _prefix: URI + + # extractor -> reader mapping + _ext2rdr: typing.Dict[base.extractor.Extractor, typing.Optional[base.reader.Reader]] + + def __init__( + self, + prefix: URI, + ext2rdr: typing.Dict[base.extractor.Extractor, typing.Optional[base.reader.Reader]] + ): + # store core members + self._prefix = prefix + self._ext2rdr = ext2rdr + # compile schema from all extractors + self.schema = _schema.Schema.Union(ext.schema for ext in ext2rdr) + + def __str__(self) -> str: + return typename(self) + + def __repr__(self) -> str: + return f'{typename(self)}(...)' + + def __hash__(self) -> int: + return hash((type(self), self._prefix, self.schema, tuple(self._ext2rdr), tuple(self._ext2rdr.values()))) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, type(self)) \ + and self.schema == other.schema \ + and self._prefix == other._prefix \ + and self._ext2rdr == other._ext2rdr + + def __call__( + self, + path: URI, + predicates: typing.Optional[typing.Iterable[_schema.Predicate]] = None, + ) -> typing.Iterator[typing.Tuple[Node, _schema.Predicate, typing.Any]]: + """Extract triples from the file at *path*. Optionally, limit triples to *predicates*.""" + # get predicates + predicates = set(predicates) if predicates is not None else set(self.schema.predicates()) + + # get extractors + extractors = {ext for ext in self._ext2rdr if not set(ext.predicates()).isdisjoint(predicates)} + + # corner-case short-cut + if len(extractors) == 0: + return + + # get readers -> extractors mapping + rdr2ext = defaultdict(set) + for ext in extractors: + rdr = self._ext2rdr[ext] + rdr2ext[rdr].add(ext) + + # create subject for file + uuid = _uuid.UCID.from_path(path) + subject = Node(ns.bsfs.Entity, self._prefix + uuid) + + # extract information + for rdr, extrs in rdr2ext.items(): + try: + # get content + content = rdr(path) if rdr is not None else None + + # apply extractors on this content + for ext in extrs: + try: + # get predicate/value tuples + for node, pred, value in ext.extract(subject, content, predicates): + yield node, pred, value + + except base.errors.ExtractorError as err: + # critical extractor failure. + logger.error('%s failed to extract triples from content: %s', ext, err) + + except base.errors.ReaderError as err: + # failed to read any content. skip. + logger.error('%s failed to read content: %s', rdr, err) + + +## EOF ## diff --git a/bsie/utils/bsfs.py b/bsie/utils/bsfs.py index 01ec5d1..a4b7626 100644 --- a/bsie/utils/bsfs.py +++ b/bsie/utils/bsfs.py @@ -10,7 +10,7 @@ import typing # bsfs imports from bsfs import schema from bsfs.namespace import Namespace -from bsfs.utils import URI, typename +from bsfs.utils import URI, typename, uuid # exports __all__: typing.Sequence[str] = ( @@ -18,6 +18,7 @@ __all__: typing.Sequence[str] = ( 'URI', 'schema', 'typename', + 'uuid', ) ## EOF ## -- cgit v1.2.3 From edc747252a04675c46059215751719b6666a77f9 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Sat, 3 Dec 2022 18:57:58 +0100 Subject: adapt to schema interface update: owl:maxCardinality changed to bsfs:unique --- bsie/base/extractor.py | 1 - bsie/extractor/generic/path.py | 2 +- bsie/extractor/generic/stat.py | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) (limited to 'bsie') diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index 2fc4f18..75b7173 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -23,7 +23,6 @@ __all__: typing.Sequence[str] = ( # NOTE: The definition here is only for convenience; Each Extractor must implement its use, if so desired. SCHEMA_PREAMBLE = ''' # common external prefixes - prefix owl: prefix rdf: prefix rdfs: prefix xsd: diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py index f346f97..e6b901e 100644 --- a/bsie/extractor/generic/path.py +++ b/bsie/extractor/generic/path.py @@ -36,7 +36,7 @@ class Path(extractor.Extractor): rdfs:range xsd:string ; rdfs:label "File name"^^xsd:string ; schema:description "Filename of entity in some filesystem."^^xsd:string ; - owl:maxCardinality "INF"^^xsd:number . + bsfs:unique "false"^^xsd:boolean . ''')) self._callmap = { self.schema.predicate(ns.bse.filename): self.__filename, diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py index 7088c0a..6493d37 100644 --- a/bsie/extractor/generic/stat.py +++ b/bsie/extractor/generic/stat.py @@ -36,7 +36,7 @@ class Stat(extractor.Extractor): rdfs:range xsd:integer ; rdfs:label "File size"^^xsd:string ; schema:description "File size of entity in some filesystem."^^xsd:string ; - owl:maxCardinality "INF"^^xsd:number . + bsfs:unique "false"^^xsd:boolean . ''')) self._callmap = { self.schema.predicate(ns.bse.filesize): self.__filesize, -- cgit v1.2.3 From 559e643bb1fa39feefd2eb73847ad9420daf1deb Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Wed, 14 Dec 2022 06:10:25 +0100 Subject: bsie extraction and info apps --- bsie/__init__.py | 6 +++ bsie/apps/__init__.py | 20 ++++++++ bsie/apps/index.py | 131 +++++++++++++++++++++++++++++++++++++++++++++++ bsie/apps/info.py | 74 ++++++++++++++++++++++++++ bsie/base/errors.py | 6 +++ bsie/lib/__init__.py | 13 +++++ bsie/lib/bsie.py | 80 +++++++++++++++++++++++++++++ bsie/tools/pipeline.py | 4 ++ bsie/utils/namespaces.py | 2 +- 9 files changed, 335 insertions(+), 1 deletion(-) create mode 100644 bsie/apps/__init__.py create mode 100644 bsie/apps/index.py create mode 100644 bsie/apps/info.py create mode 100644 bsie/lib/__init__.py create mode 100644 bsie/lib/bsie.py (limited to 'bsie') diff --git a/bsie/__init__.py b/bsie/__init__.py index 2f2477a..2b874bd 100644 --- a/bsie/__init__.py +++ b/bsie/__init__.py @@ -5,8 +5,14 @@ A copy of the license is provided with the project. Author: Matthias Baumgartner, 2022 """ # imports +import collections import typing +# constants +version_info = collections.namedtuple('version_info', + ('major', 'minor', 'micro')) \ + (0, 0, 1) + # exports __all__: typing.Sequence[str] = [] diff --git a/bsie/apps/__init__.py b/bsie/apps/__init__.py new file mode 100644 index 0000000..a548c3c --- /dev/null +++ b/bsie/apps/__init__.py @@ -0,0 +1,20 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# inner-module imports +from .index import main as index +from .info import main as info + +# exports +__all__: typing.Sequence[str] = ( + 'index', + 'info', + ) + +## EOF ## diff --git a/bsie/apps/index.py b/bsie/apps/index.py new file mode 100644 index 0000000..821aa4c --- /dev/null +++ b/bsie/apps/index.py @@ -0,0 +1,131 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import argparse +import os +import typing + +# bsfs imports +import bsfs + +# bsie imports +from bsie.base import errors +from bsie.lib.bsie import BSIE +from bsie.tools import builder +from bsie.utils.bsfs import URI + +# exports +__all__: typing.Sequence[str] = ( + 'main', + ) + + +## code ## + +def main(argv): + """Index files or directories into BSFS.""" + parser = argparse.ArgumentParser(description=main.__doc__, prog='index') + parser.add_argument('--user', type=URI, default=URI('http://example.com/me'), + help='') + parser.add_argument('--collect', action='append', default=[], + help='') + parser.add_argument('--discard', action='append', default=[], + help='') + parser.add_argument('-r', '--recursive', action='store_true', default=False, + help='') + parser.add_argument('--follow', action='store_true', default=False, + help='') + parser.add_argument('--print', action='store_true', default=False, + help='') + parser.add_argument('input_file', nargs=argparse.REMAINDER, + help='') + args = parser.parse_args(argv) + + # FIXME: Read reader/extractor configs from a config file + # reader builder + rbuild = builder.ReaderBuilder({}) + # extractor builder + ebuild = builder.ExtractorBuilder([ + {'bsie.extractor.generic.path.Path': {}}, + {'bsie.extractor.generic.stat.Stat': {}}, + {'bsie.extractor.generic.constant.Constant': dict( + tuples=[('http://bsfs.ai/schema/Entity#author', 'Me, myself, and I')], + schema=''' + bse:author rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:string ; + bsfs:unique "true"^^xsd:boolean . + ''', + )}, + ]) + # pipeline builder + prefix = URI(args.user + ('file#' if args.user.endswith('/') else '/file#')) + pbuild = builder.PipelineBuilder( + prefix, + rbuild, + ebuild, + ) + + # build pipeline + pipeline = pbuild.build() + # build BSIE frontend + bsie = BSIE(pipeline, args.collect, args.discard) + + + def walk(handle): + """Walk through given input files.""" + # FIXME: collect all triples by node, set all predicates at once + # FIXME: simplify code (below but maybe also above) + # FIXME: How to handle dependencies between data? + # E.g. do I still want to link to a tag despite not being permitted to set its label? + # FIXME: node renaming? + + # index input paths + for path in args.input_file: + if os.path.isdir(path) and args.recursive: + for dirpath, _, filenames in os.walk(path, topdown=True, followlinks=args.follow): + for filename in filenames: + for node, pred, value in bsie.from_file(os.path.join(dirpath, filename)): + handle(node, pred, value) + elif os.path.isfile(path): + for node, pred, value in bsie.from_file(path): + handle(node, pred, value) + else: + raise errors.UnreachableError() + + + if args.print: + walk(print) + return None + + else: + # initialize bsfs + # NOTE: With presistent storages, the schema migration will be a seaparte operation. + # Here, we'd simply examine the schema and potentially discard more predicates. + store = bsfs.Open({ + 'Graph': { + 'user': args.user, + 'backend': { + 'SparqlStore': {}}, + }}) + store.migrate(bsie.schema) + # process files + def handle(node, pred, value): + store.node(node.node_type, node.uri).set(pred.uri, value) + walk(handle) + # return store + return store + + + +## main ## + +if __name__ == '__main__': + import sys + main(sys.argv[1:]) + +## EOF ## diff --git a/bsie/apps/info.py b/bsie/apps/info.py new file mode 100644 index 0000000..8cc6dca --- /dev/null +++ b/bsie/apps/info.py @@ -0,0 +1,74 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import argparse +import sys +import typing + +# bsie imports +from bsie.base import errors +from bsie.tools import builder +from bsie.utils.bsfs import URI + +# exports +__all__: typing.Sequence[str] = ( + 'main', + ) + + +## code ## + +def main(argv): + """Show information from BSIE.""" + parser = argparse.ArgumentParser(description=main.__doc__, prog='info') + parser.add_argument('what', choices=('predicates', ), + help='Select what information to show.') + args = parser.parse_args(argv) + + # FIXME: Read reader/extractor configs from a config file + # reader builder + rbuild = builder.ReaderBuilder({}) + # extractor builder + ebuild = builder.ExtractorBuilder([ + {'bsie.extractor.generic.path.Path': {}}, + {'bsie.extractor.generic.stat.Stat': {}}, + {'bsie.extractor.generic.constant.Constant': dict( + tuples=[('http://bsfs.ai/schema/Entity#author', 'Me, myself, and I')], + schema=''' + bse:author rdfs:subClassOf bsfs:Predicate ; + rdfs:domain bsfs:Entity ; + rdfs:range xsd:string ; + bsfs:unique "true"^^xsd:boolean . + ''', + )}, + ]) + # pipeline builder + pbuild = builder.PipelineBuilder( + URI('http://example.com/me/file#'), # not actually used + rbuild, + ebuild, + ) + + # build pipeline + pipeline = pbuild.build() + + # show info + if args.what == 'predicates': + # show predicates + for pred in pipeline.schema.predicates(): + print(pred.uri) + else: + # args.what is already checked by argparse + raise errors.UnreachableError() + + +## main ## + +if __name__ == '__main__': + main(sys.argv[1:]) + +## EOF ## diff --git a/bsie/base/errors.py b/bsie/base/errors.py index 760351f..dc3c30e 100644 --- a/bsie/base/errors.py +++ b/bsie/base/errors.py @@ -33,4 +33,10 @@ class ExtractorError(_BSIEError): class ReaderError(_BSIEError): """The Reader failed to read the given file.""" +class ProgrammingError(_BSIEError): + """An assertion-like error that indicates a code-base issue.""" + +class UnreachableError(ProgrammingError): + """Bravo, you've reached a point in code that should logically not be reachable.""" + ## EOF ## diff --git a/bsie/lib/__init__.py b/bsie/lib/__init__.py new file mode 100644 index 0000000..f6c9018 --- /dev/null +++ b/bsie/lib/__init__.py @@ -0,0 +1,13 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# exports +__all__: typing.Sequence[str] = [] + +## EOF ## diff --git a/bsie/lib/bsie.py b/bsie/lib/bsie.py new file mode 100644 index 0000000..aeccc8c --- /dev/null +++ b/bsie/lib/bsie.py @@ -0,0 +1,80 @@ +""" + +Part of the bsie module. +A copy of the license is provided with the project. +Author: Matthias Baumgartner, 2022 +""" +# imports +import typing + +# bsie imports +from bsie.tools.pipeline import Pipeline +from bsie.utils import node, ns +from bsie.utils.bsfs import URI, schema as schema_ + +# exports +__all__: typing.Sequence[str] = ( + 'BSIE', + ) + + +## code ## + +class BSIE(): + """Extract triples from files. + + Controls which predicates to extract (*collect*) and + which to not extract (*discard*). Note that this only affects + principal predicates not auxililary predicates like, e.g., tag labels. + + """ + + # predicates to extract. + predicates: typing.Set[URI] + + # local schema. + schema: schema_.Schema + + def __init__( + self, + # pipeline builder. + pipeline: Pipeline, + # predicates to extract at most. None implies all available w.r.t. extractors. + collect: typing.Optional[typing.Iterable[URI]] = None, + # predicates to discard. + discard: typing.Optional[typing.Iterable[URI]] = None, + ): + # store pipeline + self.pipeline = pipeline + # start off with available predicates + self.predicates = {pred.uri for pred in self.pipeline.predicates()} + # limit predicates to specified ones by argument. + if collect is not None: + collect = set(collect) + if len(collect) > 0: + self.predicates &= collect + # discard predicates. + if discard is not None: + self.predicates -= set(discard) + # discard ns.bsfs.Predicate + self.predicates.discard(ns.bsfs.Predicate) + # compile a schema that only contains the requested predicates (and implied types) + self.schema = schema_.Schema({ + self.pipeline.schema.predicate(pred) for pred in self.predicates}) + + def from_file( + self, + path: URI, + predicates: typing.Optional[typing.Iterable[URI]] = None, + ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: + """Produce triples for a given *path*. Limit to *predicates* if given.""" + # get requested predicates. + predicates = set(predicates) if predicates is not None else self.predicates + # filter through requested predicates. + predicates &= self.predicates + # predicate lookup + predicates = {self.schema.predicate(pred) for pred in predicates} + # invoke pipeline + yield from self.pipeline(path, predicates) + +## EOF ## diff --git a/bsie/tools/pipeline.py b/bsie/tools/pipeline.py index 8e1c992..da422c0 100644 --- a/bsie/tools/pipeline.py +++ b/bsie/tools/pipeline.py @@ -70,6 +70,10 @@ class Pipeline(): and self._prefix == other._prefix \ and self._ext2rdr == other._ext2rdr + def predicates(self) -> typing.Iterator[_schema.Predicate]: + """Return the predicates that are extracted from a file.""" + return iter({pred for ext in self._ext2rdr for pred in ext.predicates()}) + def __call__( self, path: URI, diff --git a/bsie/utils/namespaces.py b/bsie/utils/namespaces.py index 13be96b..2fcb2dc 100644 --- a/bsie/utils/namespaces.py +++ b/bsie/utils/namespaces.py @@ -13,7 +13,7 @@ from . import bsfs as _bsfs # constants bse = _bsfs.Namespace('http://bsfs.ai/schema/Entity#') bsfs = _bsfs.Namespace('http://bsfs.ai/schema/') -bsm = _bsfs.Namespace('http://bsfs.ai/schema/meta#') +bsm = _bsfs.Namespace('http://bsfs.ai/schema/Meta#') xsd = _bsfs.Namespace('http://www.w3.org/2001/XMLSchema#') # export -- cgit v1.2.3 From 3dc3e9a9b0fc8c9727f91359814866d3deae6e79 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 16:42:07 +0100 Subject: minor fixes and comments --- bsie/__init__.py | 5 ++--- bsie/base/extractor.py | 9 +++++++-- bsie/utils/namespaces.py | 1 + 3 files changed, 10 insertions(+), 5 deletions(-) (limited to 'bsie') diff --git a/bsie/__init__.py b/bsie/__init__.py index 2b874bd..96e6953 100644 --- a/bsie/__init__.py +++ b/bsie/__init__.py @@ -9,9 +9,8 @@ import collections import typing # constants -version_info = collections.namedtuple('version_info', - ('major', 'minor', 'micro')) \ - (0, 0, 1) +T_VERSION_INFO = collections.namedtuple('T_VERSION_INFO', ('major', 'minor', 'micro')) +version_info = T_VERSION_INFO(0, 0, 1) # exports __all__: typing.Sequence[str] = [] diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index 75b7173..bfa403c 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -20,7 +20,7 @@ __all__: typing.Sequence[str] = ( # constants # essential definitions typically used in extractor schemas. -# NOTE: The definition here is only for convenience; Each Extractor must implement its use, if so desired. +# NOTE: This preamble is only for convenience; Each Extractor must implement its use, if so desired. SCHEMA_PREAMBLE = ''' # common external prefixes prefix rdf: @@ -45,7 +45,12 @@ SCHEMA_PREAMBLE = ''' ## code ## class Extractor(abc.ABC): - """Produce (node, predicate, value)-triples from some content.""" + """Produce (subject, predicate, value)-triples from some content. + The Extractor produces princpal predicates that provide information + about the content itself (i.e., triples that include the subject), + and may also generate triples with auxiliary predicates if the + extracted value is a node itself. + """ # what type of content is expected (i.e. reader subclass). CONTENT_READER: typing.Optional[str] = None diff --git a/bsie/utils/namespaces.py b/bsie/utils/namespaces.py index 2fcb2dc..d6e1c72 100644 --- a/bsie/utils/namespaces.py +++ b/bsie/utils/namespaces.py @@ -21,6 +21,7 @@ __all__: typing.Sequence[str] = ( 'bse', 'bsfs', 'bsm', + 'xsd', ) ## EOF ## -- cgit v1.2.3 From 49cf03fc212c813862453de5352436dc90d1e458 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 16:50:53 +0100 Subject: imports and init files --- bsie/apps/index.py | 7 ++----- bsie/base/__init__.py | 8 ++++---- bsie/base/reader.py | 8 ++++---- bsie/lib/__init__.py | 7 ++++++- bsie/reader/stat.py | 2 +- bsie/tools/__init__.py | 4 ++-- bsie/utils/bsfs.py | 3 ++- bsie/utils/node.py | 18 +++++++++--------- 8 files changed, 30 insertions(+), 27 deletions(-) (limited to 'bsie') diff --git a/bsie/apps/index.py b/bsie/apps/index.py index 821aa4c..aa26d0f 100644 --- a/bsie/apps/index.py +++ b/bsie/apps/index.py @@ -9,14 +9,11 @@ import argparse import os import typing -# bsfs imports -import bsfs - # bsie imports from bsie.base import errors -from bsie.lib.bsie import BSIE +from bsie.lib import BSIE from bsie.tools import builder -from bsie.utils.bsfs import URI +from bsie.utils import bsfs # exports __all__: typing.Sequence[str] = ( diff --git a/bsie/base/__init__.py b/bsie/base/__init__.py index 0154862..0d362cd 100644 --- a/bsie/base/__init__.py +++ b/bsie/base/__init__.py @@ -11,14 +11,14 @@ import typing # inner-module imports from . import errors -from . import extractor -from . import reader +from .extractor import Extractor +from .reader import Reader # exports __all__: typing.Sequence[str] = ( + 'Extractor', + 'Reader', 'errors', - 'extractor', - 'reader', ) ## EOF ## diff --git a/bsie/base/reader.py b/bsie/base/reader.py index b7eabf7..cbabd36 100644 --- a/bsie/base/reader.py +++ b/bsie/base/reader.py @@ -13,7 +13,7 @@ import abc import typing # bsie imports -from bsie.utils.bsfs import URI, typename +from bsie.utils import bsfs # exports __all__: typing.Sequence[str] = ( @@ -27,10 +27,10 @@ class Reader(abc.ABC): """Read and return some content from a file.""" def __str__(self) -> str: - return typename(self) + return bsfs.typename(self) def __repr__(self) -> str: - return f'{typename(self)}()' + return f'{bsfs.typename(self)}()' def __eq__(self, other: typing.Any) -> bool: return isinstance(other, type(self)) @@ -39,7 +39,7 @@ class Reader(abc.ABC): return hash(type(self)) @abc.abstractmethod - def __call__(self, path: URI) -> typing.Any: + def __call__(self, path: bsfs.URI) -> typing.Any: """Return some content of the file at *path*. Raises a `ReaderError` if the reader cannot make sense of the file format. """ diff --git a/bsie/lib/__init__.py b/bsie/lib/__init__.py index f6c9018..578c2c4 100644 --- a/bsie/lib/__init__.py +++ b/bsie/lib/__init__.py @@ -7,7 +7,12 @@ Author: Matthias Baumgartner, 2022 # imports import typing +# inner-module imports +from .bsie import BSIE + # exports -__all__: typing.Sequence[str] = [] +__all__: typing.Sequence[str] = ( + 'BSIE', + ) ## EOF ## diff --git a/bsie/reader/stat.py b/bsie/reader/stat.py index 592d912..fc5fb24 100644 --- a/bsie/reader/stat.py +++ b/bsie/reader/stat.py @@ -9,7 +9,7 @@ import os import typing # bsie imports -from bsie.base import reader, errors +from bsie.base import errors, reader # exports __all__: typing.Sequence[str] = ( diff --git a/bsie/tools/__init__.py b/bsie/tools/__init__.py index 8ca9620..803c321 100644 --- a/bsie/tools/__init__.py +++ b/bsie/tools/__init__.py @@ -9,12 +9,12 @@ import typing # inner-module imports from . import builder -from . import pipeline +from .pipeline import Pipeline # exports __all__: typing.Sequence[str] = ( 'builder', - 'pipeline', + 'Pipeline', ) ## EOF ## diff --git a/bsie/utils/bsfs.py b/bsie/utils/bsfs.py index a4b7626..c48049d 100644 --- a/bsie/utils/bsfs.py +++ b/bsie/utils/bsfs.py @@ -8,13 +8,14 @@ Author: Matthias Baumgartner, 2022 import typing # bsfs imports -from bsfs import schema +from bsfs import Open, schema from bsfs.namespace import Namespace from bsfs.utils import URI, typename, uuid # exports __all__: typing.Sequence[str] = ( 'Namespace', + 'Open', 'URI', 'schema', 'typename', diff --git a/bsie/utils/node.py b/bsie/utils/node.py index c9c494f..ecf39cd 100644 --- a/bsie/utils/node.py +++ b/bsie/utils/node.py @@ -8,7 +8,7 @@ Author: Matthias Baumgartner, 2022 import typing # bsie imports -from bsie.utils.bsfs import URI, typename +from bsie.utils import bsfs # exports __all__: typing.Sequence[str] = ( @@ -22,19 +22,19 @@ class Node(): """Lightweight Node, disconnected from any bsfs structures.""" # node type. - node_type: URI + node_type: bsfs.URI # node URI. - uri: URI + uri: bsfs.URI def __init__( self, - node_type: URI, - uri: URI, + node_type: bsfs.URI, + uri: bsfs.URI, ): # assign members - self.node_type = URI(node_type) - self.uri = URI(uri) + self.node_type = bsfs.URI(node_type) + self.uri = bsfs.URI(uri) def __eq__(self, other: typing.Any) -> bool: return isinstance(other, Node) \ @@ -45,9 +45,9 @@ class Node(): return hash((type(self), self.node_type, self.uri)) def __str__(self) -> str: - return f'{typename(self)}({self.node_type}, {self.uri})' + return f'{bsfs.typename(self)}({self.node_type}, {self.uri})' def __repr__(self) -> str: - return f'{typename(self)}({self.node_type}, {self.uri})' + return f'{bsfs.typename(self)}({self.node_type}, {self.uri})' ## EOF ## -- cgit v1.2.3 From 3b7fee369924eb7704709edeb8c17fff9c020dfb Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 17:06:09 +0100 Subject: import fixes --- bsie/base/extractor.py | 5 +++-- bsie/extractor/generic/constant.py | 9 +++++---- bsie/extractor/generic/path.py | 6 +++--- bsie/extractor/generic/stat.py | 6 +++--- bsie/lib/bsie.py | 11 ++++++----- bsie/tools/builder.py | 17 +++++++++-------- bsie/tools/pipeline.py | 6 +++--- 7 files changed, 32 insertions(+), 28 deletions(-) (limited to 'bsie') diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index bfa403c..a5c7846 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -11,6 +11,7 @@ import typing # bsie imports from bsie.utils import node from bsie.utils.bsfs import schema as _schema, typename +from bsie.utils import bsfs, node, ns # exports __all__: typing.Sequence[str] = ( @@ -62,10 +63,10 @@ class Extractor(abc.ABC): self.schema = schema def __str__(self) -> str: - return typename(self) + return bsfs.typename(self) def __repr__(self) -> str: - return f'{typename(self)}()' + return f'{bsfs.typename(self)}()' def __eq__(self, other: typing.Any) -> bool: return isinstance(other, type(self)) \ diff --git a/bsie/extractor/generic/constant.py b/bsie/extractor/generic/constant.py index 7da792a..f9e3415 100644 --- a/bsie/extractor/generic/constant.py +++ b/bsie/extractor/generic/constant.py @@ -11,6 +11,7 @@ import typing from bsie.base import extractor from bsie.utils.bsfs import URI, schema as _schema from bsie.utils.node import Node +from bsie.utils import bsfs, node # exports __all__: typing.Sequence[str] = ( @@ -26,14 +27,14 @@ class Constant(extractor.Extractor): CONTENT_READER = None # predicate/value pairs to be produced. - _tuples: typing.Tuple[typing.Tuple[_schema.Predicate, typing.Any], ...] + _tuples: typing.Tuple[typing.Tuple[bsfs.schema.Predicate, typing.Any], ...] def __init__( self, schema: str, - tuples: typing.Iterable[typing.Tuple[URI, typing.Any]], + tuples: typing.Iterable[typing.Tuple[bsfs.URI, typing.Any]], ): - super().__init__(_schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + schema)) + super().__init__(bsfs.schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + schema)) # NOTE: Raises a KeyError if the predicate is not part of the schema self._tuples = tuple((self.schema.predicate(p_uri), value) for p_uri, value in tuples) # FIXME: use schema instance for value checking @@ -47,7 +48,7 @@ class Constant(extractor.Extractor): def extract( self, - subject: Node, + subject: node.Node, content: None, predicates: typing.Iterable[_schema.Predicate], ) -> typing.Iterator[typing.Tuple[Node, _schema.Predicate, typing.Any]]: diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py index e6b901e..2cc592a 100644 --- a/bsie/extractor/generic/path.py +++ b/bsie/extractor/generic/path.py @@ -10,8 +10,8 @@ import typing # bsie imports from bsie.base import extractor -from bsie.utils import node, ns from bsie.utils.bsfs import schema +from bsie.utils import bsfs, node, ns # exports __all__: typing.Sequence[str] = ( @@ -27,10 +27,10 @@ class Path(extractor.Extractor): CONTENT_READER = 'bsie.reader.path.Path' # mapping from predicate to handler function. - _callmap: typing.Dict[schema.Predicate, typing.Callable[[str], typing.Any]] + _callmap: typing.Dict[bsfs.schema.Predicate, typing.Callable[[str], typing.Any]] def __init__(self): - super().__init__(schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' + super().__init__(bsfs.schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' bse:filename rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; rdfs:range xsd:string ; diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py index 6493d37..dfde7d2 100644 --- a/bsie/extractor/generic/stat.py +++ b/bsie/extractor/generic/stat.py @@ -10,8 +10,8 @@ import typing # bsie imports from bsie.base import extractor -from bsie.utils import node, ns from bsie.utils.bsfs import schema as _schema +from bsie.utils import bsfs, node, ns # exports __all__: typing.Sequence[str] = ( @@ -27,10 +27,10 @@ class Stat(extractor.Extractor): CONTENT_READER = 'bsie.reader.stat.Stat' # mapping from predicate to handler function. - _callmap: typing.Dict[_schema.Predicate, typing.Callable[[os.stat_result], typing.Any]] + _callmap: typing.Dict[bsfs.schema.Predicate, typing.Callable[[os.stat_result], typing.Any]] def __init__(self): - super().__init__(_schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' + super().__init__(bsfs.schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' bse:filesize rdfs:subClassOf bsfs:Predicate ; rdfs:domain bsfs:Entity ; rdfs:range xsd:integer ; diff --git a/bsie/lib/bsie.py b/bsie/lib/bsie.py index aeccc8c..3aeee2b 100644 --- a/bsie/lib/bsie.py +++ b/bsie/lib/bsie.py @@ -9,8 +9,9 @@ import typing # bsie imports from bsie.tools.pipeline import Pipeline -from bsie.utils import node, ns from bsie.utils.bsfs import URI, schema as schema_ +from bsie.tools import Pipeline +from bsie.utils import bsfs, node, ns # exports __all__: typing.Sequence[str] = ( @@ -39,10 +40,10 @@ class BSIE(): self, # pipeline builder. pipeline: Pipeline, - # predicates to extract at most. None implies all available w.r.t. extractors. - collect: typing.Optional[typing.Iterable[URI]] = None, - # predicates to discard. - discard: typing.Optional[typing.Iterable[URI]] = None, + # principals to extract at most. None implies all available w.r.t. extractors. + collect: typing.Optional[typing.Iterable[bsfs.URI]] = None, + # principals to discard. + discard: typing.Optional[typing.Iterable[bsfs.URI]] = None, ): # store pipeline self.pipeline = pipeline diff --git a/bsie/tools/builder.py b/bsie/tools/builder.py index 8f7a410..8c6b931 100644 --- a/bsie/tools/builder.py +++ b/bsie/tools/builder.py @@ -13,6 +13,7 @@ import typing from bsie import base from bsie.base import errors from bsie.utils.bsfs import URI, typename +from bsie.utils import bsfs # inner-module imports from . import pipeline @@ -61,7 +62,7 @@ def _unpack_name(name): class ReaderBuilder(): - """Build `bsie.base.reader.Reader` instances. + """Build `bsie.base.Reader` instances. Readers are defined via their qualified class name (e.g., bsie.reader.path.Path) and optional keyword @@ -83,7 +84,7 @@ class ReaderBuilder(): self.kwargs = kwargs self.cache = {} - def build(self, name: str) -> base.reader.Reader: + def build(self, name: str) -> base.Reader: """Return an instance for the qualified class name.""" # return cached instance if name in self.cache: @@ -98,7 +99,7 @@ class ReaderBuilder(): # get kwargs kwargs = self.kwargs.get(name, {}) if not isinstance(kwargs, dict): - raise TypeError(f'expected a kwargs dict, found {typename(kwargs)}') + raise TypeError(f'expected a kwargs dict, found {bsfs.typename(kwargs)}') try: # build, cache, and return instance obj = cls(**kwargs) @@ -108,11 +109,11 @@ class ReaderBuilder(): return obj except Exception as err: - raise errors.BuilderError(f'failed to build reader {name} due to {typename(err)}: {err}') from err + raise errors.BuilderError(f'failed to build reader {name} due to {bsfs.typename(err)}: {err}') from err class ExtractorBuilder(): - """Build `bsie.base.extractor.Extractor instances. + """Build `bsie.base.Extractor instances. It is permissible to build multiple instances of the same extractor (typically with different arguments), hence the ExtractorBuilder @@ -133,14 +134,14 @@ class ExtractorBuilder(): """Iterate over extractor specifications.""" return iter(range(len(self.specs))) - def build(self, index: int) -> base.extractor.Extractor: + def build(self, index: int) -> base.Extractor: """Return an instance of the n'th extractor (n=*index*).""" # get build instructions specs = self.specs[index] # check specs structure. expecting[{name: {kwargs}}] if not isinstance(specs, dict): - raise TypeError(f'expected a dict, found {typename(specs)}') + raise TypeError(f'expected a dict, found {bsfs.typename(specs)}') if len(specs) != 1: raise TypeError(f'expected a dict of length one, found {len(specs)}') @@ -150,7 +151,7 @@ class ExtractorBuilder(): # check kwargs structure if not isinstance(kwargs, dict): - raise TypeError(f'expected a dict, found {typename(kwargs)}') + raise TypeError(f'expected a dict, found {bsfs.typename(kwargs)}') # check name and get module/class components module_name, class_name = _unpack_name(name) diff --git a/bsie/tools/pipeline.py b/bsie/tools/pipeline.py index da422c0..7fdd935 100644 --- a/bsie/tools/pipeline.py +++ b/bsie/tools/pipeline.py @@ -11,9 +11,9 @@ import typing # bsie imports from bsie import base -from bsie.utils import ns from bsie.utils.node import Node from bsie.utils.bsfs import schema as _schema, URI, uuid as _uuid, typename +from bsie.utils import bsfs, node, ns # exports __all__: typing.Sequence[str] = ( @@ -56,10 +56,10 @@ class Pipeline(): self.schema = _schema.Schema.Union(ext.schema for ext in ext2rdr) def __str__(self) -> str: - return typename(self) + return bsfs.typename(self) def __repr__(self) -> str: - return f'{typename(self)}(...)' + return f'{bsfs.typename(self)}(...)' def __hash__(self) -> int: return hash((type(self), self._prefix, self.schema, tuple(self._ext2rdr), tuple(self._ext2rdr.values()))) -- cgit v1.2.3 From 8e6d27ea75d2c8d68f6dd8b3d529aaa278f291cc Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 17:12:56 +0100 Subject: file node class in default schema --- bsie/base/extractor.py | 17 ++++++++--------- bsie/extractor/generic/path.py | 2 +- bsie/extractor/generic/stat.py | 2 +- bsie/tools/pipeline.py | 4 ++-- 4 files changed, 12 insertions(+), 13 deletions(-) (limited to 'bsie') diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index a5c7846..678dcec 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -35,6 +35,7 @@ SCHEMA_PREAMBLE = ''' # essential nodes bsfs:Entity rdfs:subClassOf bsfs:Node . + bsfs:File rdfs:subClassOf bsfs:Entity . # common definitions xsd:string rdfs:subClassOf bsfs:Literal . @@ -77,15 +78,13 @@ class Extractor(abc.ABC): return hash((type(self), self.CONTENT_READER, self.schema)) def predicates(self) -> typing.Iterator[_schema.Predicate]: - """Return the predicates that may be part of extracted triples.""" - # NOTE: Some predicates in the schema might not occur in actual triples, - # but are defined due to predicate class hierarchy. E.g., bsfs:Predicate - # is part of every schema but should not be used in triples. - # Announcing all predicates might not be the most efficient way, however, - # it is the most safe one. Concrete extractors that produce additional - # predicates (e.g. auxiliary nodes with their own predicates) should - # overwrite this method to only include the principal predicates. - return self.schema.predicates() + ent = self.schema.node(ns.bsfs.Entity) + return ( + pred + for pred + in self.schema.predicates() + if pred.domain <= ent or (pred.range is not None and pred.range <= ent) + ) @abc.abstractmethod def extract( diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py index 2cc592a..00165e3 100644 --- a/bsie/extractor/generic/path.py +++ b/bsie/extractor/generic/path.py @@ -32,7 +32,7 @@ class Path(extractor.Extractor): def __init__(self): super().__init__(bsfs.schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' bse:filename rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; + rdfs:domain bsfs:File ; rdfs:range xsd:string ; rdfs:label "File name"^^xsd:string ; schema:description "Filename of entity in some filesystem."^^xsd:string ; diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py index dfde7d2..0f4267f 100644 --- a/bsie/extractor/generic/stat.py +++ b/bsie/extractor/generic/stat.py @@ -32,7 +32,7 @@ class Stat(extractor.Extractor): def __init__(self): super().__init__(bsfs.schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + ''' bse:filesize rdfs:subClassOf bsfs:Predicate ; - rdfs:domain bsfs:Entity ; + rdfs:domain bsfs:File ; rdfs:range xsd:integer ; rdfs:label "File size"^^xsd:string ; schema:description "File size of entity in some filesystem."^^xsd:string ; diff --git a/bsie/tools/pipeline.py b/bsie/tools/pipeline.py index 7fdd935..3d08993 100644 --- a/bsie/tools/pipeline.py +++ b/bsie/tools/pipeline.py @@ -97,8 +97,8 @@ class Pipeline(): rdr2ext[rdr].add(ext) # create subject for file - uuid = _uuid.UCID.from_path(path) - subject = Node(ns.bsfs.Entity, self._prefix + uuid) + uuid = bsfs.uuid.UCID.from_path(path) + subject = node.Node(ns.bsfs.File, self._prefix + 'file#' + uuid) # extract information for rdr, extrs in rdr2ext.items(): -- cgit v1.2.3 From 5d9526783ad8432c7d6dfe18c0e9f2b37950b470 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 17:16:25 +0100 Subject: Pipeline.prefix as Namespace instead of URI --- bsie/apps/index.py | 5 ++--- bsie/apps/info.py | 4 ++-- bsie/tools/builder.py | 13 +++++++++++-- bsie/tools/pipeline.py | 4 ++-- 4 files changed, 17 insertions(+), 9 deletions(-) (limited to 'bsie') diff --git a/bsie/apps/index.py b/bsie/apps/index.py index aa26d0f..e37684b 100644 --- a/bsie/apps/index.py +++ b/bsie/apps/index.py @@ -26,7 +26,7 @@ __all__: typing.Sequence[str] = ( def main(argv): """Index files or directories into BSFS.""" parser = argparse.ArgumentParser(description=main.__doc__, prog='index') - parser.add_argument('--user', type=URI, default=URI('http://example.com/me'), + parser.add_argument('--user', type=bsfs.URI, default=bsfs.URI('http://example.com/me'), help='') parser.add_argument('--collect', action='append', default=[], help='') @@ -60,9 +60,8 @@ def main(argv): )}, ]) # pipeline builder - prefix = URI(args.user + ('file#' if args.user.endswith('/') else '/file#')) pbuild = builder.PipelineBuilder( - prefix, + bsfs.Namespace(args.user + ('/' if not args.user.endswith('/') else '')), rbuild, ebuild, ) diff --git a/bsie/apps/info.py b/bsie/apps/info.py index 8cc6dca..eaf1f71 100644 --- a/bsie/apps/info.py +++ b/bsie/apps/info.py @@ -12,7 +12,7 @@ import typing # bsie imports from bsie.base import errors from bsie.tools import builder -from bsie.utils.bsfs import URI +from bsie.utils import bsfs # exports __all__: typing.Sequence[str] = ( @@ -48,7 +48,7 @@ def main(argv): ]) # pipeline builder pbuild = builder.PipelineBuilder( - URI('http://example.com/me/file#'), # not actually used + bsfs.Namespace('http://example.com/me/'), # not actually used rbuild, ebuild, ) diff --git a/bsie/tools/builder.py b/bsie/tools/builder.py index 8c6b931..24aea84 100644 --- a/bsie/tools/builder.py +++ b/bsie/tools/builder.py @@ -163,15 +163,24 @@ class ExtractorBuilder(): return cls(**kwargs) except Exception as err: - raise errors.BuilderError(f'failed to build extractor {name} due to {typename(err)}: {err}') from err + raise errors.BuilderError(f'failed to build extractor {name} due to {bsfs.typename(err)}: {err}') from err class PipelineBuilder(): """Build `bsie.tools.pipeline.Pipeline` instances.""" + # Prefix to be used in the Pipeline. + prefix: bsfs.Namespace + + # builder for Readers. + rbuild: ReaderBuilder + + # builder for Extractors. + ebuild: ExtractorBuilder + def __init__( self, - prefix: URI, + prefix: bsfs.Namespace, reader_builder: ReaderBuilder, extractor_builder: ExtractorBuilder, ): diff --git a/bsie/tools/pipeline.py b/bsie/tools/pipeline.py index 3d08993..834bd99 100644 --- a/bsie/tools/pipeline.py +++ b/bsie/tools/pipeline.py @@ -39,14 +39,14 @@ class Pipeline(): schema: _schema.Schema # node prefix. - _prefix: URI + _prefix: bsfs.Namespace # extractor -> reader mapping _ext2rdr: typing.Dict[base.extractor.Extractor, typing.Optional[base.reader.Reader]] def __init__( self, - prefix: URI, + prefix: bsfs.Namespace, ext2rdr: typing.Dict[base.extractor.Extractor, typing.Optional[base.reader.Reader]] ): # store core members -- cgit v1.2.3 From 3426b4e201cf03b78d2a3f144876955fcda2f66b Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 17:17:53 +0100 Subject: extractor interface revision * schema as property * predicates -> principals --- bsie/base/extractor.py | 21 +++++++++++++-------- bsie/extractor/generic/constant.py | 8 +++----- bsie/extractor/generic/path.py | 7 +++---- bsie/extractor/generic/stat.py | 7 +++---- 4 files changed, 22 insertions(+), 21 deletions(-) (limited to 'bsie') diff --git a/bsie/base/extractor.py b/bsie/base/extractor.py index 678dcec..c44021b 100644 --- a/bsie/base/extractor.py +++ b/bsie/base/extractor.py @@ -9,8 +9,6 @@ import abc import typing # bsie imports -from bsie.utils import node -from bsie.utils.bsfs import schema as _schema, typename from bsie.utils import bsfs, node, ns # exports @@ -58,10 +56,10 @@ class Extractor(abc.ABC): CONTENT_READER: typing.Optional[str] = None # extractor schema. - schema: _schema.Schema + _schema: bsfs.schema.Schema - def __init__(self, schema: _schema.Schema): - self.schema = schema + def __init__(self, schema: bsfs.schema.Schema): + self._schema = schema def __str__(self) -> str: return bsfs.typename(self) @@ -77,7 +75,14 @@ class Extractor(abc.ABC): def __hash__(self) -> int: return hash((type(self), self.CONTENT_READER, self.schema)) - def predicates(self) -> typing.Iterator[_schema.Predicate]: + @property + def schema(self) -> bsfs.schema.Schema: + """Return the extractor's schema.""" + return self._schema + + @property + def principals(self) -> typing.Iterator[bsfs.schema.Predicate]: + """Return the principal predicates, i.e., relations from/to the extraction subject.""" ent = self.schema.node(ns.bsfs.Entity) return ( pred @@ -91,8 +96,8 @@ class Extractor(abc.ABC): self, subject: node.Node, content: typing.Any, - predicates: typing.Iterable[_schema.Predicate], - ) -> typing.Iterator[typing.Tuple[node.Node, _schema.Predicate, typing.Any]]: + principals: typing.Iterable[bsfs.schema.Predicate], + ) -> typing.Iterator[typing.Tuple[node.Node, bsfs.schema.Predicate, typing.Any]]: """Return (node, predicate, value) triples.""" ## EOF ## diff --git a/bsie/extractor/generic/constant.py b/bsie/extractor/generic/constant.py index f9e3415..cdb2ef6 100644 --- a/bsie/extractor/generic/constant.py +++ b/bsie/extractor/generic/constant.py @@ -9,8 +9,6 @@ import typing # bsie imports from bsie.base import extractor -from bsie.utils.bsfs import URI, schema as _schema -from bsie.utils.node import Node from bsie.utils import bsfs, node # exports @@ -50,10 +48,10 @@ class Constant(extractor.Extractor): self, subject: node.Node, content: None, - predicates: typing.Iterable[_schema.Predicate], - ) -> typing.Iterator[typing.Tuple[Node, _schema.Predicate, typing.Any]]: + principals: typing.Iterable[bsfs.schema.Predicate], + ) -> typing.Iterator[typing.Tuple[node.Node, bsfs.schema.Predicate, typing.Any]]: for pred, value in self._tuples: - if pred in predicates: + if pred in principals: yield subject, pred, value ## EOF ## diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py index 00165e3..23ae80b 100644 --- a/bsie/extractor/generic/path.py +++ b/bsie/extractor/generic/path.py @@ -10,7 +10,6 @@ import typing # bsie imports from bsie.base import extractor -from bsie.utils.bsfs import schema from bsie.utils import bsfs, node, ns # exports @@ -46,9 +45,9 @@ class Path(extractor.Extractor): self, subject: node.Node, content: str, - predicates: typing.Iterable[schema.Predicate], - ) -> typing.Iterator[typing.Tuple[node.Node, schema.Predicate, typing.Any]]: - for pred in predicates: + principals: typing.Iterable[bsfs.schema.Predicate], + ) -> typing.Iterator[typing.Tuple[node.Node, bsfs.schema.Predicate, typing.Any]]: + for pred in principals: # find callback clbk = self._callmap.get(pred) if clbk is None: diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py index 0f4267f..1dcfedf 100644 --- a/bsie/extractor/generic/stat.py +++ b/bsie/extractor/generic/stat.py @@ -10,7 +10,6 @@ import typing # bsie imports from bsie.base import extractor -from bsie.utils.bsfs import schema as _schema from bsie.utils import bsfs, node, ns # exports @@ -46,9 +45,9 @@ class Stat(extractor.Extractor): self, subject: node.Node, content: os.stat_result, - predicates: typing.Iterable[_schema.Predicate], - ) -> typing.Iterator[typing.Tuple[node.Node, _schema.Predicate, typing.Any]]: - for pred in predicates: + principals: typing.Iterable[bsfs.schema.Predicate], + ) -> typing.Iterator[typing.Tuple[node.Node, bsfs.schema.Predicate, typing.Any]]: + for pred in principals: # find callback clbk = self._callmap.get(pred) if clbk is None: -- cgit v1.2.3 From 37510d134458bf954ca2da6d40be0d6c76661e8c Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 17:19:21 +0100 Subject: bsie/pipeline interface revision: * predicates -> principals * schema as property * principals as property * information hiding * full subschema instead of only predicates --- bsie/lib/bsie.py | 61 +++++++++++++++++++++++++++++--------------------- bsie/tools/pipeline.py | 52 +++++++++++++++++++++++++++--------------- 2 files changed, 70 insertions(+), 43 deletions(-) (limited to 'bsie') diff --git a/bsie/lib/bsie.py b/bsie/lib/bsie.py index 3aeee2b..e087fa9 100644 --- a/bsie/lib/bsie.py +++ b/bsie/lib/bsie.py @@ -8,8 +8,6 @@ Author: Matthias Baumgartner, 2022 import typing # bsie imports -from bsie.tools.pipeline import Pipeline -from bsie.utils.bsfs import URI, schema as schema_ from bsie.tools import Pipeline from bsie.utils import bsfs, node, ns @@ -30,11 +28,14 @@ class BSIE(): """ + # pipeline + _pipeline: Pipeline + # predicates to extract. - predicates: typing.Set[URI] + _principals: typing.Set[bsfs.URI] # local schema. - schema: schema_.Schema + _schema: bsfs.schema.Schema def __init__( self, @@ -46,36 +47,46 @@ class BSIE(): discard: typing.Optional[typing.Iterable[bsfs.URI]] = None, ): # store pipeline - self.pipeline = pipeline - # start off with available predicates - self.predicates = {pred.uri for pred in self.pipeline.predicates()} - # limit predicates to specified ones by argument. + self._pipeline = pipeline + # start off with available principals + self._principals = {pred.uri for pred in self._pipeline.principals} + # limit principals to specified ones by argument. if collect is not None: collect = set(collect) if len(collect) > 0: - self.predicates &= collect - # discard predicates. + self._principals &= collect + # discard principals. if discard is not None: - self.predicates -= set(discard) + self._principals -= set(discard) # discard ns.bsfs.Predicate - self.predicates.discard(ns.bsfs.Predicate) - # compile a schema that only contains the requested predicates (and implied types) - self.schema = schema_.Schema({ - self.pipeline.schema.predicate(pred) for pred in self.predicates}) + self._principals.discard(ns.bsfs.Predicate) + # compile a schema that only contains the requested principals (and auxiliary predicates) + self._schema = self._pipeline.subschema( + self._pipeline.schema.predicate(pred) for pred in self._principals) + + @property + def schema(self) -> bsfs.schema.Schema: + """Return the BSIE schema.""" + return self._schema + + @property + def principals(self) -> typing.Iterator[bsfs.URI]: + """Return an iterator to the principal predicates.""" + return iter(self._principals) def from_file( self, - path: URI, - predicates: typing.Optional[typing.Iterable[URI]] = None, - ) -> typing.Iterator[typing.Tuple[node.Node, URI, typing.Any]]: - """Produce triples for a given *path*. Limit to *predicates* if given.""" - # get requested predicates. - predicates = set(predicates) if predicates is not None else self.predicates - # filter through requested predicates. - predicates &= self.predicates + path: bsfs.URI, + principals: typing.Optional[typing.Iterable[bsfs.URI]] = None, + ) -> typing.Iterator[typing.Tuple[node.Node, bsfs.URI, typing.Any]]: + """Produce triples for a given *path*. Limit to *principals* if given.""" + # get requested principals. + principals = set(principals) if principals is not None else self._principals + # filter through requested principals. + principals &= self._principals # predicate lookup - predicates = {self.schema.predicate(pred) for pred in predicates} + principals = {self.schema.predicate(pred) for pred in principals} # invoke pipeline - yield from self.pipeline(path, predicates) + yield from self._pipeline(path, principals) ## EOF ## diff --git a/bsie/tools/pipeline.py b/bsie/tools/pipeline.py index 834bd99..52ce526 100644 --- a/bsie/tools/pipeline.py +++ b/bsie/tools/pipeline.py @@ -11,8 +11,6 @@ import typing # bsie imports from bsie import base -from bsie.utils.node import Node -from bsie.utils.bsfs import schema as _schema, URI, uuid as _uuid, typename from bsie.utils import bsfs, node, ns # exports @@ -36,7 +34,7 @@ class Pipeline(): """ # combined extractor schemas. - schema: _schema.Schema + _schema: bsfs.schema.Schema # node prefix. _prefix: bsfs.Namespace @@ -53,7 +51,7 @@ class Pipeline(): self._prefix = prefix self._ext2rdr = ext2rdr # compile schema from all extractors - self.schema = _schema.Schema.Union(ext.schema for ext in ext2rdr) + self._schema = bsfs.schema.Schema.Union(ext.schema for ext in ext2rdr) def __str__(self) -> str: return bsfs.typename(self) @@ -62,29 +60,47 @@ class Pipeline(): return f'{bsfs.typename(self)}(...)' def __hash__(self) -> int: - return hash((type(self), self._prefix, self.schema, tuple(self._ext2rdr), tuple(self._ext2rdr.values()))) + return hash((type(self), self._prefix, self._schema, tuple(self._ext2rdr), tuple(self._ext2rdr.values()))) def __eq__(self, other: typing.Any) -> bool: return isinstance(other, type(self)) \ - and self.schema == other.schema \ + and self._schema == other._schema \ and self._prefix == other._prefix \ and self._ext2rdr == other._ext2rdr - def predicates(self) -> typing.Iterator[_schema.Predicate]: - """Return the predicates that are extracted from a file.""" - return iter({pred for ext in self._ext2rdr for pred in ext.predicates()}) + @property + def schema(self) -> bsfs.schema.Schema: + """Return the pipeline's schema (combined from all extractors).""" + return self._schema + + @property + def principals(self) -> typing.Iterator[bsfs.schema.Predicate]: + """Return the principal predicates that can be extracted.""" + return iter({pred for ext in self._ext2rdr for pred in ext.principals}) + + def subschema(self, principals: typing.Iterable[bsfs.schema.Predicate]) -> bsfs.schema.Schema: + """Return the subset of the schema that supports the given *principals*.""" + # materialize principals + principals = set(principals) + # collect and combine schemas from extractors + return bsfs.schema.Schema.Union({ + ext.schema + for ext + in self._ext2rdr + if not set(ext.principals).isdisjoint(principals) + }) def __call__( self, - path: URI, - predicates: typing.Optional[typing.Iterable[_schema.Predicate]] = None, - ) -> typing.Iterator[typing.Tuple[Node, _schema.Predicate, typing.Any]]: - """Extract triples from the file at *path*. Optionally, limit triples to *predicates*.""" - # get predicates - predicates = set(predicates) if predicates is not None else set(self.schema.predicates()) + path: bsfs.URI, + principals: typing.Optional[typing.Iterable[bsfs.schema.Predicate]] = None, + ) -> typing.Iterator[typing.Tuple[node.Node, bsfs.schema.Predicate, typing.Any]]: + """Extract triples from the file at *path*. Optionally, limit triples to *principals*.""" + # get principals + principals = set(principals) if principals is not None else set(self.schema.predicates()) # get extractors - extractors = {ext for ext in self._ext2rdr if not set(ext.predicates()).isdisjoint(predicates)} + extractors = {ext for ext in self._ext2rdr if not set(ext.principals).isdisjoint(principals)} # corner-case short-cut if len(extractors) == 0: @@ -110,8 +126,8 @@ class Pipeline(): for ext in extrs: try: # get predicate/value tuples - for node, pred, value in ext.extract(subject, content, predicates): - yield node, pred, value + for subject, pred, value in ext.extract(subject, content, principals): + yield subject, pred, value except base.errors.ExtractorError as err: # critical extractor failure. -- cgit v1.2.3 From 3b41b2a4b7532c911b63b41066a75b3e1546d214 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Thu, 15 Dec 2022 17:21:20 +0100 Subject: minor test improvements and information hiding in builder --- bsie/tools/builder.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) (limited to 'bsie') diff --git a/bsie/tools/builder.py b/bsie/tools/builder.py index 24aea84..190d9bf 100644 --- a/bsie/tools/builder.py +++ b/bsie/tools/builder.py @@ -12,7 +12,6 @@ import typing # bsie imports from bsie import base from bsie.base import errors -from bsie.utils.bsfs import URI, typename from bsie.utils import bsfs # inner-module imports @@ -75,20 +74,20 @@ class ReaderBuilder(): """ # keyword arguments - kwargs: typing.Dict[str, typing.Dict[str, typing.Any]] + _kwargs: typing.Dict[str, typing.Dict[str, typing.Any]] # cached readers - cache: typing.Dict[str, base.reader.Reader] + _cache: typing.Dict[str, base.Reader] def __init__(self, kwargs: typing.Dict[str, typing.Dict[str, typing.Any]]): - self.kwargs = kwargs - self.cache = {} + self._kwargs = kwargs + self._cache = {} def build(self, name: str) -> base.Reader: """Return an instance for the qualified class name.""" # return cached instance - if name in self.cache: - return self.cache[name] + if name in self._cache: + return self._cache[name] # check name and get module/class components module_name, class_name = _unpack_name(name) @@ -97,14 +96,14 @@ class ReaderBuilder(): cls = _safe_load(module_name, class_name) # get kwargs - kwargs = self.kwargs.get(name, {}) + kwargs = self._kwargs.get(name, {}) if not isinstance(kwargs, dict): raise TypeError(f'expected a kwargs dict, found {bsfs.typename(kwargs)}') try: # build, cache, and return instance obj = cls(**kwargs) # cache instance - self.cache[name] = obj + self._cache[name] = obj # return instance return obj @@ -125,19 +124,19 @@ class ExtractorBuilder(): """ # build specifications - specs: typing.List[typing.Dict[str, typing.Dict[str, typing.Any]]] + _specs: typing.List[typing.Dict[str, typing.Dict[str, typing.Any]]] def __init__(self, specs: typing.List[typing.Dict[str, typing.Dict[str, typing.Any]]]): - self.specs = specs + self._specs = specs def __iter__(self) -> typing.Iterator[int]: """Iterate over extractor specifications.""" - return iter(range(len(self.specs))) + return iter(range(len(self._specs))) def build(self, index: int) -> base.Extractor: """Return an instance of the n'th extractor (n=*index*).""" # get build instructions - specs = self.specs[index] + specs = self._specs[index] # check specs structure. expecting[{name: {kwargs}}] if not isinstance(specs, dict): -- cgit v1.2.3 From 5850ff2bcb1052883cf301590126609b0657fbc9 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Sun, 18 Dec 2022 13:37:02 +0100 Subject: cosmetic changes --- bsie/__init__.py | 2 +- bsie/apps/index.py | 28 +++++++++++----------------- bsie/extractor/generic/constant.py | 2 +- bsie/extractor/generic/path.py | 3 ++- bsie/extractor/generic/stat.py | 2 +- 5 files changed, 16 insertions(+), 21 deletions(-) (limited to 'bsie') diff --git a/bsie/__init__.py b/bsie/__init__.py index 96e6953..8d2308c 100644 --- a/bsie/__init__.py +++ b/bsie/__init__.py @@ -9,7 +9,7 @@ import collections import typing # constants -T_VERSION_INFO = collections.namedtuple('T_VERSION_INFO', ('major', 'minor', 'micro')) +T_VERSION_INFO = collections.namedtuple('T_VERSION_INFO', ('major', 'minor', 'micro')) # pylint: disable=invalid-name version_info = T_VERSION_INFO(0, 0, 1) # exports diff --git a/bsie/apps/index.py b/bsie/apps/index.py index e37684b..1dbfdd8 100644 --- a/bsie/apps/index.py +++ b/bsie/apps/index.py @@ -98,23 +98,17 @@ def main(argv): walk(print) return None - else: - # initialize bsfs - # NOTE: With presistent storages, the schema migration will be a seaparte operation. - # Here, we'd simply examine the schema and potentially discard more predicates. - store = bsfs.Open({ - 'Graph': { - 'user': args.user, - 'backend': { - 'SparqlStore': {}}, - }}) - store.migrate(bsie.schema) - # process files - def handle(node, pred, value): - store.node(node.node_type, node.uri).set(pred.uri, value) - walk(handle) - # return store - return store + # initialize bsfs + # NOTE: With presistent storages, the schema migration will be a seaparte operation. + # Here, we'd simply examine the schema and potentially discard more predicates. + store = bsfs.Open(bsfs.init_sparql_store(args.user)) + store.migrate(bsie.schema) + # process files + def handle(node, pred, value): + store.node(node.node_type, node.uri).set(pred.uri, value) + walk(handle) + # return store + return store diff --git a/bsie/extractor/generic/constant.py b/bsie/extractor/generic/constant.py index cdb2ef6..11384e6 100644 --- a/bsie/extractor/generic/constant.py +++ b/bsie/extractor/generic/constant.py @@ -35,7 +35,7 @@ class Constant(extractor.Extractor): super().__init__(bsfs.schema.Schema.from_string(extractor.SCHEMA_PREAMBLE + schema)) # NOTE: Raises a KeyError if the predicate is not part of the schema self._tuples = tuple((self.schema.predicate(p_uri), value) for p_uri, value in tuples) - # FIXME: use schema instance for value checking + # TODO: use schema instance for value checking def __eq__(self, other: typing.Any) -> bool: return super().__eq__(other) \ diff --git a/bsie/extractor/generic/path.py b/bsie/extractor/generic/path.py index 23ae80b..7018e12 100644 --- a/bsie/extractor/generic/path.py +++ b/bsie/extractor/generic/path.py @@ -62,7 +62,8 @@ class Path(extractor.Extractor): def __filename(self, path: str) -> typing.Optional[str]: try: return os.path.basename(path) - except Exception: # some error, skip. + except Exception: # pylint: disable=broad-except # we explicitly want to catch everything + # some error, skip # FIXME: some kind of error reporting (e.g. logging)? # Options: (a) Fail silently (current); (b) Skip and report to log; # (c) Raise ExtractorError (aborts extraction); (d) separate content type diff --git a/bsie/extractor/generic/stat.py b/bsie/extractor/generic/stat.py index 1dcfedf..0b9ce29 100644 --- a/bsie/extractor/generic/stat.py +++ b/bsie/extractor/generic/stat.py @@ -63,7 +63,7 @@ class Stat(extractor.Extractor): """Return the file size.""" try: return content.st_size - except Exception: + except Exception: # pylint: disable=broad-except # we explicitly want to catch everything # FIXME: some kind of error reporting (e.g. logging) return None -- cgit v1.2.3 From 057e09d6537bf5c39815661a75819081e3e5fda7 Mon Sep 17 00:00:00 2001 From: Matthias Baumgartner Date: Sun, 18 Dec 2022 13:37:59 +0100 Subject: adaptions to updates in bsfs --- bsie/tools/pipeline.py | 7 +++++-- bsie/utils/bsfs.py | 2 ++ bsie/utils/namespaces.py | 8 ++++---- 3 files changed, 11 insertions(+), 6 deletions(-) (limited to 'bsie') diff --git a/bsie/tools/pipeline.py b/bsie/tools/pipeline.py index 52ce526..20e8ddf 100644 --- a/bsie/tools/pipeline.py +++ b/bsie/tools/pipeline.py @@ -18,6 +18,9 @@ __all__: typing.Sequence[str] = ( 'Pipeline', ) +# constants +FILE_PREFIX = 'file#' + ## code ## logger = logging.getLogger(__name__) @@ -48,7 +51,7 @@ class Pipeline(): ext2rdr: typing.Dict[base.extractor.Extractor, typing.Optional[base.reader.Reader]] ): # store core members - self._prefix = prefix + self._prefix = prefix + FILE_PREFIX self._ext2rdr = ext2rdr # compile schema from all extractors self._schema = bsfs.schema.Schema.Union(ext.schema for ext in ext2rdr) @@ -114,7 +117,7 @@ class Pipeline(): # create subject for file uuid = bsfs.uuid.UCID.from_path(path) - subject = node.Node(ns.bsfs.File, self._prefix + 'file#' + uuid) + subject = node.Node(ns.bsfs.File, self._prefix[uuid]) # extract information for rdr, extrs in rdr2ext.items(): diff --git a/bsie/utils/bsfs.py b/bsie/utils/bsfs.py index c48049d..0b88479 100644 --- a/bsie/utils/bsfs.py +++ b/bsie/utils/bsfs.py @@ -9,6 +9,7 @@ import typing # bsfs imports from bsfs import Open, schema +from bsfs.apps.init import init_sparql_store from bsfs.namespace import Namespace from bsfs.utils import URI, typename, uuid @@ -17,6 +18,7 @@ __all__: typing.Sequence[str] = ( 'Namespace', 'Open', 'URI', + 'init_sparql_store', 'schema', 'typename', 'uuid', diff --git a/bsie/utils/namespaces.py b/bsie/utils/namespaces.py index d6e1c72..a29fc1b 100644 --- a/bsie/utils/namespaces.py +++ b/bsie/utils/namespaces.py @@ -11,10 +11,10 @@ import typing from . import bsfs as _bsfs # constants -bse = _bsfs.Namespace('http://bsfs.ai/schema/Entity#') -bsfs = _bsfs.Namespace('http://bsfs.ai/schema/') -bsm = _bsfs.Namespace('http://bsfs.ai/schema/Meta#') -xsd = _bsfs.Namespace('http://www.w3.org/2001/XMLSchema#') +bse = _bsfs.Namespace('http://bsfs.ai/schema/Entity') +bsfs = _bsfs.Namespace('http://bsfs.ai/schema', fsep='/') +bsm = _bsfs.Namespace('http://bsfs.ai/schema/Meta') +xsd = _bsfs.Namespace('http://www.w3.org/2001/XMLSchema') # export __all__: typing.Sequence[str] = ( -- cgit v1.2.3