From 7e2716bde40bee31f7aab6d04bedb756705d4574 Mon Sep 17 00:00:00 2001 From: hsolbrig Date: Thu, 22 Apr 2021 13:54:21 -0500 Subject: [PATCH] Checkpoint --- .github/workflows/main.yaml | 69 +++++++++ .github/workflows/pr-test.yaml | 27 ++++ .github/workflows/pypi-publish.yaml | 34 +++++ .gitignore | 3 + AUTHORS | 2 + ChangeLog | 7 + Pipfile | 19 +++ checkout_outputs.sh | 3 + db.sh | 1 + dr.sh | 1 + ds.sh | 1 + hide_test_changes.sh | 3 + linkml_model/__init__.py | 10 -- linkml_runtime/dumpers/__init__.py | 7 + linkml_runtime/dumpers/dumper_root.py | 27 ++++ linkml_runtime/dumpers/json_dumper.py | 78 +++++----- linkml_runtime/dumpers/rdf_dumper.py | 138 +++++++++--------- linkml_runtime/dumpers/yaml_dumper.py | 14 +- linkml_runtime/linkml_model/README.md | 6 + linkml_runtime/linkml_model/__init__.py | 10 ++ .../linkml_model}/annotations.py | 6 +- .../linkml_model}/extensions.py | 4 +- .../linkml_model}/linkml_files.py | 109 +++++++++++++- .../linkml_model}/mappings.py | 4 +- .../linkml_model}/meta.py | 8 +- .../linkml_model}/types.py | 2 +- linkml_runtime/loaders/__init__.py | 7 + linkml_runtime/loaders/document_loader.py | 70 --------- linkml_runtime/loaders/json_loader.py | 67 +++------ linkml_runtime/loaders/loader_root.py | 106 ++++++++++---- linkml_runtime/loaders/rdf_loader.py | 119 ++++++++------- linkml_runtime/loaders/yaml_loader.py | 27 ++-- linkml_runtime/utils/compile_python.py | 2 + linkml_runtime/utils/formatutils.py | 14 -- linkml_runtime/utils/permissiblevalueimpl.py | 5 +- linkml_runtime/utils/shexutils.py | 0 linkml_runtime/utils/yamlutils.py | 5 +- requirements-dev.txt | 13 ++ requirements.txt | 23 +++ setup.cfg | 37 +++++ setup.py | 12 ++ show_test_changes.sh | 3 + tests/__init__.py | 10 ++ tests/support/clicktestcase.py | 22 +-- tests/support/compare_rdf.py | 90 ++++++++++++ tests/support/dirutils.py | 4 +- tests/support/filters.py | 4 +- tests/support/mismatchlog.py | 2 +- tests/support/test_environment.py | 6 - tests/test_issues/__init__.py | 0 tests/test_issues/environment.py | 3 + tests/test_issues/input/__init__.py | 0 .../input/issue_368.context.jsonld | 23 +++ tests/test_issues/input/issue_368.py | 68 +++++++++ tests/test_issues/input/issue_368_imports.py | 65 +++++++++ tests/test_issues/test_issue_368_enums.py | 40 +++++ tests/test_loaders_dumpers/README.md | 15 ++ tests/test_loaders_dumpers/__init__.py | 1 - tests/test_loaders_dumpers/environment.py | 1 - tests/test_loaders_dumpers/input/README.md | 0 .../input/obo_sample.expanded.jsonld | 90 ------------ .../input/obo_sample.jsonld | 58 +++----- .../input/obo_sample_nested.ttl | 23 --- .../jsonld_context/Dockerfile | 23 --- .../test_loaders_dumpers/jsonld_context/ds.sh | 0 .../jsonld_10/termci_schema.context.jsonld | 14 +- .../jsonld_11/Package.context.jsonld | 28 ---- .../jsonld_context/nginx/context_server.crt | 18 --- .../jsonld_context/nginx/context_server.key | 28 ---- .../jsonld_context/nginx/localhost.cnf | 8 - .../jsonld_context/nginx/mime.types | 98 ------------- .../jsonld_context/nginx/nginx.conf | 26 ---- ...{ldtestcase.py => loaderdumpertestcase.py} | 26 ++-- .../models/termci_schema.py | 56 +------ tests/test_loaders_dumpers/output/generated | 1 - .../output/obo_sample_context_d.json | 35 ----- .../output/obo_sample_context_ds.json | 35 ----- .../output/obo_sample_d.json | 34 ----- .../output/obo_sample_d.yaml | 22 --- .../output/obo_sample_ds.json | 34 ----- .../output/obo_sample_ds.yaml | 22 --- .../output/obo_sample_json.yaml | 22 --- .../output/obo_sample_yaml.yaml | 22 --- tests/test_loaders_dumpers/test_dumpers.py | 15 +- .../test_loaders_dumpers/test_ld_11_issue.py | 2 +- tests/test_loaders_dumpers/test_loaders.py | 46 +++--- tests/test_utils/__init__.py | 5 +- tests/test_utils/input/yaml1.yaml | 2 +- tests/test_utils/input/yaml2.yaml | 2 +- tests/test_utils/test_formatutils.py | 1 - tests/test_utils/test_namespaces.py | 3 +- tests/test_utils/test_yaml_utils.py | 9 +- tox.ini | 13 ++ 93 files changed, 1168 insertions(+), 1110 deletions(-) create mode 100644 .github/workflows/main.yaml create mode 100644 .github/workflows/pr-test.yaml create mode 100644 .github/workflows/pypi-publish.yaml create mode 100644 AUTHORS create mode 100644 ChangeLog create mode 100644 Pipfile create mode 100644 checkout_outputs.sh create mode 100755 db.sh create mode 100755 dr.sh create mode 100755 ds.sh create mode 100644 hide_test_changes.sh delete mode 100644 linkml_model/__init__.py create mode 100644 linkml_runtime/dumpers/dumper_root.py create mode 100644 linkml_runtime/linkml_model/README.md create mode 100644 linkml_runtime/linkml_model/__init__.py rename {linkml_model => linkml_runtime/linkml_model}/annotations.py (95%) rename {linkml_model => linkml_runtime/linkml_model}/extensions.py (97%) rename {linkml_model => linkml_runtime/linkml_model}/linkml_files.py (54%) rename {linkml_model => linkml_runtime/linkml_model}/mappings.py (94%) rename {linkml_model => linkml_runtime/linkml_model}/meta.py (99%) rename {linkml_model => linkml_runtime/linkml_model}/types.py (99%) delete mode 100644 linkml_runtime/loaders/document_loader.py create mode 100644 linkml_runtime/utils/shexutils.py create mode 100644 requirements-dev.txt create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 show_test_changes.sh create mode 100644 tests/support/compare_rdf.py create mode 100644 tests/test_issues/__init__.py create mode 100644 tests/test_issues/environment.py create mode 100644 tests/test_issues/input/__init__.py create mode 100644 tests/test_issues/input/issue_368.context.jsonld create mode 100644 tests/test_issues/input/issue_368.py create mode 100644 tests/test_issues/input/issue_368_imports.py create mode 100644 tests/test_issues/test_issue_368_enums.py create mode 100644 tests/test_loaders_dumpers/README.md create mode 100644 tests/test_loaders_dumpers/input/README.md delete mode 100644 tests/test_loaders_dumpers/input/obo_sample.expanded.jsonld delete mode 100644 tests/test_loaders_dumpers/input/obo_sample_nested.ttl mode change 100755 => 100644 tests/test_loaders_dumpers/jsonld_context/ds.sh rename tests/test_loaders_dumpers/{ldtestcase.py => loaderdumpertestcase.py} (81%) delete mode 100644 tests/test_loaders_dumpers/output/generated delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_context_d.json delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_context_ds.json delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_d.json delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_d.yaml delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_ds.json delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_ds.yaml delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_json.yaml delete mode 100644 tests/test_loaders_dumpers/output/obo_sample_yaml.yaml create mode 100644 tox.ini diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml new file mode 100644 index 00000000..bd8b7760 --- /dev/null +++ b/.github/workflows/main.yaml @@ -0,0 +1,69 @@ +name: Build + +on: + push: + branches: [ main ] + +jobs: + update-requirements: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Update requirements file + run: | + python -m pip install --upgrade pip + pip install pipenv-to-requirements + pipenv_to_requirements + + - name: Check in requirements.txt and requirements-dev.txt + run: | + git add requirements*.txt + if [[ ! -z $(git status -s requirements*.txt) ]] + then + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git commit -m 'Automatically generated requirements.txt and requirements-dev.txt' requirements*.txt + git push + fi + + unittests-n-commits: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7.1, 3.8, 3.9] + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pipenv + pipenv install --dev + + - name: Test with unittest + run: | + pipenv run python -m unittest + + - name: Check in test outputs + if: ${{ matrix.python-version == '3.9' }} + run: | + find tests -name output -exec git add --force {} \; + if [[ ! -z $(git status -s tests) ]] + then + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git commit -m 'Automated adding outputs from tests' tests + git push + fi diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml new file mode 100644 index 00000000..c9e92a68 --- /dev/null +++ b/.github/workflows/pr-test.yaml @@ -0,0 +1,27 @@ +name: Pull request unit tests + +on: + pull_request: + branches: [ main ] + +jobs: + + build-pipenv: + + runs-on: ubuntu-latest + strategy: + python-version: [3.7.1, 3.8, 3.9] + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install pipenv + uses: dschep/install-pipenv-action@v1 + - name: Install dependencies and test + run: | + pipenv install --dev + pipenv run python -m unittest diff --git a/.github/workflows/pypi-publish.yaml b/.github/workflows/pypi-publish.yaml new file mode 100644 index 00000000..eb47fa67 --- /dev/null +++ b/.github/workflows/pypi-publish.yaml @@ -0,0 +1,34 @@ +name: Publish Python Package + +on: + release: + types: [created] + +jobs: + build-n-publish: + name: Build and publish Python 🐍 distributions 📦 to PyPI + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install wheel + + - name: build a binary wheel dist + run: | + rm -fr dist + python setup.py bdist_wheel sdist + + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@v1.2.2 + with: + user: __token__ + password: ${{ secrets.pypi_password }} diff --git a/.gitignore b/.gitignore index 1430a99d..2c2b1946 100644 --- a/.gitignore +++ b/.gitignore @@ -137,3 +137,6 @@ tests/test_config.ini # Don't lock Pipfile.lock + +# No Pycharm +.idea/ diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 00000000..7c5c1694 --- /dev/null +++ b/AUTHORS @@ -0,0 +1,2 @@ +Harold Solbrig +hsolbrig diff --git a/ChangeLog b/ChangeLog new file mode 100644 index 00000000..ec4e9733 --- /dev/null +++ b/ChangeLog @@ -0,0 +1,7 @@ +CHANGES +======= + +* Remove YAMLRoot dependency on JsonObj +* Checkpoint +* checkpoint +* Initial commit diff --git a/Pipfile b/Pipfile new file mode 100644 index 00000000..63007428 --- /dev/null +++ b/Pipfile @@ -0,0 +1,19 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +hbreader = "*" +pyld = { git = "https://github.com/hsolbrig/pyld"} +jsonasobj = { git = "https://github.com/hsolbrig/jsonasobj"} +pyyaml = ">=5.1" +rdflib = "*" +rdflib-pyld-compat = "*" +rdflib-jsonld = "*" +click = "*" +prefixcommons = "*" +shexjsg = "*" + +[dev-packages] +requests = "*" diff --git a/checkout_outputs.sh b/checkout_outputs.sh new file mode 100644 index 00000000..801fa3dc --- /dev/null +++ b/checkout_outputs.sh @@ -0,0 +1,3 @@ +#!/bin/bash +# checkout (update) all of the outputs to revert to what is on github +git checkout `find tests -name output | xargs` diff --git a/db.sh b/db.sh new file mode 100755 index 00000000..e7298637 --- /dev/null +++ b/db.sh @@ -0,0 +1 @@ +docker image build . -t context_server diff --git a/dr.sh b/dr.sh new file mode 100755 index 00000000..2a19d1f8 --- /dev/null +++ b/dr.sh @@ -0,0 +1 @@ +docker run -it --rm -d -p 8000:80 -p 8443:443 --name context_server -v `pwd`/:/usr/share/nginx/html context_server diff --git a/ds.sh b/ds.sh new file mode 100755 index 00000000..311a02a6 --- /dev/null +++ b/ds.sh @@ -0,0 +1 @@ +docker stop context_server diff --git a/hide_test_changes.sh b/hide_test_changes.sh new file mode 100644 index 00000000..1f0d07b8 --- /dev/null +++ b/hide_test_changes.sh @@ -0,0 +1,3 @@ +#!/bin/bash +# Make all of the test output files invisible to git +git update-index --assume-unchanged `git status -s | grep tests | grep \/output\/ | sed 's/.* tests\//tests\//' | xargs` diff --git a/linkml_model/__init__.py b/linkml_model/__init__.py deleted file mode 100644 index abbd5540..00000000 --- a/linkml_model/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from linkml_model.types import String, Integer, Boolean, Float, Double, Decimal, Time, Date, Datetime, Uriorcurie, Uri, \ - Ncname, Objectidentifier, Nodeidentifier -from linkml_model.extensions import Extension, Extensible -from linkml_model.annotations import Annotation, Annotatable -from linkml_model.meta import ElementName, SchemaDefinitionName, TypeDefinitionName, SubsetDefinitionName, DefinitionName, \ - EnumDefinitionName, SlotDefinitionName, ClassDefinitionName, PrefixPrefixPrefix, LocalNameLocalNameSource, \ - AltDescriptionSource, PermissibleValueText, Element, SchemaDefinition, TypeDefinition, SubsetDefinition, \ - Definition, EnumDefinition, SlotDefinition, ClassDefinition, Prefix, LocalName, Example, AltDescription, \ - PermissibleValue, PvFormulaOptions - diff --git a/linkml_runtime/dumpers/__init__.py b/linkml_runtime/dumpers/__init__.py index e69de29b..0611c5be 100644 --- a/linkml_runtime/dumpers/__init__.py +++ b/linkml_runtime/dumpers/__init__.py @@ -0,0 +1,7 @@ +from linkml_runtime.dumpers.json_dumper import JSONDumper +from linkml_runtime.dumpers.rdf_dumper import RDFDumper +from linkml_runtime.dumpers.yaml_dumper import YAMLDumper + +json_dumper = JSONDumper() +rdf_dumper = RDFDumper() +yaml_dumper = YAMLDumper() diff --git a/linkml_runtime/dumpers/dumper_root.py b/linkml_runtime/dumpers/dumper_root.py new file mode 100644 index 00000000..e81ab0ef --- /dev/null +++ b/linkml_runtime/dumpers/dumper_root.py @@ -0,0 +1,27 @@ +from abc import ABC, abstractmethod + +from linkml_runtime.utils.yamlutils import YAMLRoot + + +class Dumper(ABC): + """ Abstract base class for all dumpers """ + + def dump(self, element: YAMLRoot, to_file: str, **_) -> None: + """ + Write element to to_file + :param element: LinkML object to be dumped + :param to_file: file to dump to + :@param _: method specific arguments + """ + with open(to_file, 'w') as output_file: + output_file.write(self.dumps(element, **_)) + + @abstractmethod + def dumps(self, element: YAMLRoot, **_) -> str: + """ + Convert element to a string + @param element: YAMLRoot object to be rendered + @param _: method specific arguments + @return: stringified representation of element + """ + raise NotImplementedError() diff --git a/linkml_runtime/dumpers/json_dumper.py b/linkml_runtime/dumpers/json_dumper.py index d4168332..6aaa4658 100644 --- a/linkml_runtime/dumpers/json_dumper.py +++ b/linkml_runtime/dumpers/json_dumper.py @@ -1,47 +1,51 @@ +import json from typing import Dict +from linkml_runtime.dumpers.dumper_root import Dumper from linkml_runtime.utils.context_utils import CONTEXTS_PARAM_TYPE from linkml_runtime.utils.yamlutils import YAMLRoot, as_json_object -from jsonasobj import as_json -def remove_empty_items(obj: Dict) -> Dict: - """ - Remove empty items from obj - :param obj: - :return: copy of dictionary with empty lists/dicts and Nones removed - """ - return {k: v for k, v in obj.items() if not (v is None or v == [] or v == {})} +class JSONDumper(Dumper): + def dump(self, element: YAMLRoot, to_file: str, contexts: CONTEXTS_PARAM_TYPE = None) -> None: + """ + Write element as json to to_file + :param element: LinkML object to be serialized as YAML + :param to_file: file to write to + :param contexts: JSON-LD context(s) in the form of: + * file name + * URL + * JSON String + * dict + * JSON Object + * A list containing elements of any type named above + """ + super().dump(element, to_file, contexts=contexts) -def dump(element: YAMLRoot, to_file: str, contexts: CONTEXTS_PARAM_TYPE = None) -> None: - """ - Write element as json to to_file - :param element: LinkML object to be serialized as YAML - :param to_file: file to write to - :param contexts: JSON-LD context(s) in the form of: - * file name - * URL - * JSON String - * dict - * JSON Object - * A list containing elements of any type named above - """ - with open(to_file, 'w') as outf: - outf.write(dumps(element, contexts)) + def dumps(self, element: YAMLRoot, contexts: CONTEXTS_PARAM_TYPE = None) -> str: + """ + Return element as a JSON or a JSON-LD string + :param element: LinkML object to be emitted + :param contexts: JSON-LD context(s) in the form of: + * file name + * URL + * JSON String + * dict + * JSON Object + * A list containing elements of any type named above + :return: JSON Object representing the element + """ + return json.dumps(as_json_object(element, contexts), + default=lambda o: self.remove_empty_items(o) if isinstance(o, YAMLRoot) else json.JSONDecoder().decode(o), + indent=' ') -def dumps(element: YAMLRoot, contexts: CONTEXTS_PARAM_TYPE = None) -> str: - """ - Return element as a JSON or a JSON-LD string - :param element: LinkML object to be emitted - :param contexts: JSON-LD context(s) in the form of: - * file name - * URL - * JSON String - * dict - * JSON Object - * A list containing elements of any type named above - :return: JSON Object representing the element - """ - return as_json(as_json_object(element, contexts), filtr=remove_empty_items, indent=' ') + @staticmethod + def remove_empty_items(obj: Dict) -> Dict: + """ + Remove empty items from obj + :param obj: + :return: copy of dictionary with empty lists/dicts and Nones removed + """ + return {k: v for k, v in obj.__dict__.items() if not (v is None or v == [] or v == {})} diff --git a/linkml_runtime/dumpers/rdf_dumper.py b/linkml_runtime/dumpers/rdf_dumper.py index 41180c0a..fcc81a83 100644 --- a/linkml_runtime/dumpers/rdf_dumper.py +++ b/linkml_runtime/dumpers/rdf_dumper.py @@ -6,82 +6,82 @@ from rdflib import Graph from rdflib_pyld_compat import rdflib_graph_from_pyld_jsonld -from linkml_runtime.dumpers import json_dumper + +from linkml_runtime.dumpers.dumper_root import Dumper from linkml_runtime.utils.context_utils import CONTEXTS_PARAM_TYPE, CONTEXT_TYPE from linkml_runtime.utils.yamlutils import YAMLRoot -def as_rdf_graph(element: YAMLRoot, contexts: CONTEXTS_PARAM_TYPE, namespaces: CONTEXT_TYPE = None) -> Graph: - """ - Convert element into an RDF graph guided by the context(s) in contexts - :param element: element to represent in RDF - :param contexts: JSON-LD context(s) in the form of: - * file name - * URL - * JSON String - * dict - * JSON Object - * A list containing elements of any type named above - :param namespaces: A file name, URL, JSON String, dict or JSON object that includes the set of namespaces to - be bound to the return graph. If absent, contexts get used - :return: rdflib Graph containing element - """ - if isinstance(contexts, list): - inp_contexts = [json.loads(hbread(c)) for c in contexts] - else: - inp_contexts = json.loads(hbread(contexts)) - - rdf_jsonld = expand(json_dumper.dumps(element), options=dict(expandContext=inp_contexts)) - g = rdflib_graph_from_pyld_jsonld(rdf_jsonld) - - if namespaces is not None: - ns_source = json.loads(hbread(namespaces)) - else: - ns_source = inp_contexts +class RDFDumper(Dumper): + def as_rdf_graph(self, element: YAMLRoot, contexts: CONTEXTS_PARAM_TYPE, namespaces: CONTEXT_TYPE = None) -> Graph: + """ + Convert element into an RDF graph guided by the context(s) in contexts + :param element: element to represent in RDF + :param contexts: JSON-LD context(s) in the form of: + * file name + * URL + * JSON String + * dict + * JSON Object + * A list containing elements of any type named above + :param namespaces: A file name, URL, JSON String, dict or JSON object that includes the set of namespaces to + be bound to the return graph. If absent, contexts get used + :return: rdflib Graph containing element + """ + if isinstance(contexts, list): + inp_contexts = [json.loads(hbread(c)) for c in contexts] + else: + inp_contexts = json.loads(hbread(contexts)) - # TODO: make a utility out of this or add it to prefixcommons - if ns_source and '@context' in ns_source: - ns_contexts = ns_source['@context'] - if isinstance(ns_contexts, dict): - ns_contexts = [ns_contexts] - for ns_context in ns_contexts: - if isinstance(ns_context, dict): - for pfx, ns in ns_context.items(): - if isinstance(ns, dict): - if '@id' in ns and ns.get('@prefix', False): - ns = ns['@id'] - else: - continue - if not pfx.startswith('@'): - g.bind(pfx, ns) + from linkml_runtime.dumpers import json_dumper + rdf_jsonld = expand(json_dumper.dumps(element), options=dict(expandContext=inp_contexts)) + g = rdflib_graph_from_pyld_jsonld(rdf_jsonld) - return g + if namespaces is not None: + ns_source = json.loads(hbread(namespaces)) + else: + ns_source = inp_contexts + # TODO: make a utility out of this or add it to prefixcommons + if ns_source and '@context' in ns_source: + ns_contexts = ns_source['@context'] + if isinstance(ns_contexts, dict): + ns_contexts = [ns_contexts] + for ns_context in ns_contexts: + if isinstance(ns_context, dict): + for pfx, ns in ns_context.items(): + if isinstance(ns, dict): + if '@id' in ns and ns.get('@prefix', False): + ns = ns['@id'] + else: + continue + if not pfx.startswith('@'): + g.bind(pfx, ns) -def dump(element: YAMLRoot, to_file: str, contexts: CONTEXTS_PARAM_TYPE, fmt: str = 'turtle') -> None: - """ - Write element as rdf to to_file - :param element: LinkML object to be emitted - :param to_file: file to write to - :param contexts: JSON-LD context(s) in the form of: - * file name - * URL - * JSON String - * dict - * JSON Object - * A list containing elements of any type named above - :param fmt: RDF format - """ - with open(to_file, 'w') as outf: - outf.write(dumps(element, contexts, fmt)) + return g + def dump(self, element: YAMLRoot, to_file: str, contexts: CONTEXTS_PARAM_TYPE = None, fmt: str = 'turtle') -> None: + """ + Write element as rdf to to_file + :param element: LinkML object to be emitted + :param to_file: file to write to + :param contexts: JSON-LD context(s) in the form of: + * file name + * URL + * JSON String + * dict + * JSON Object + * A list containing elements of any type named above + :param fmt: RDF format + """ + super().dump(element, to_file, contexts=contexts, fmt=fmt) -def dumps(element: YAMLRoot, contexts: CONTEXTS_PARAM_TYPE, fmt: Optional[str] = 'turtle') -> str: - """ - Convert element into an RDF graph guided by the context(s) in contexts - :param element: element to represent in RDF - :param contexts: JSON-LD context(s) in the form of a file or URL, a json string or a json obj - :param fmt: rdf format - :return: rdflib Graph containing element - """ - return as_rdf_graph(element, contexts).serialize(format=fmt).decode() + def dumps(self, element: YAMLRoot, contexts: CONTEXTS_PARAM_TYPE = None, fmt: Optional[str] = 'turtle') -> str: + """ + Convert element into an RDF graph guided by the context(s) in contexts + :param element: element to represent in RDF + :param contexts: JSON-LD context(s) in the form of a file or URL, a json string or a json obj + :param fmt: rdf format + :return: rdflib Graph containing element + """ + return self.as_rdf_graph(element, contexts).serialize(format=fmt).decode() diff --git a/linkml_runtime/dumpers/yaml_dumper.py b/linkml_runtime/dumpers/yaml_dumper.py index cd6f5a1d..692e7d72 100644 --- a/linkml_runtime/dumpers/yaml_dumper.py +++ b/linkml_runtime/dumpers/yaml_dumper.py @@ -1,13 +1,11 @@ import yaml -from linkml_runtime.utils.yamlutils import YAMLRoot +from linkml_runtime.dumpers.dumper_root import Dumper +from linkml_runtime.utils.yamlutils import YAMLRoot -def dump(element: YAMLRoot, to_file: str) -> None: - """ Emit element to to_file """ - with open(to_file, 'w') as outf: - outf.write(dumps(element)) +class YAMLDumper(Dumper): -def dumps(element: YAMLRoot) -> str: - """ Return element formatted as a YAML string """ - return yaml.dump(element, Dumper=yaml.SafeDumper, sort_keys=False) + def dumps(self, element: YAMLRoot, **kwargs) -> str: + """ Return element formatted as a YAML string """ + return yaml.dump(element, Dumper=yaml.SafeDumper, sort_keys=False, **kwargs) diff --git a/linkml_runtime/linkml_model/README.md b/linkml_runtime/linkml_model/README.md new file mode 100644 index 00000000..126ad654 --- /dev/null +++ b/linkml_runtime/linkml_model/README.md @@ -0,0 +1,6 @@ +# linkml_model +This is a _copy_ of the root python files in the [linkml-model](https://linkml/linkml-model) package. + +We maintain this as a copy rather importing it because of a circular dependency -- linkml-model +needs the runtime to operate and the runtime needs linkml-model. + diff --git a/linkml_runtime/linkml_model/__init__.py b/linkml_runtime/linkml_model/__init__.py new file mode 100644 index 00000000..a486e778 --- /dev/null +++ b/linkml_runtime/linkml_model/__init__.py @@ -0,0 +1,10 @@ +from linkml_runtime.linkml_model.types import String, Integer, Boolean, Float, Double, Decimal, Time, Date, Datetime, Uriorcurie, Uri, \ + Ncname, Objectidentifier, Nodeidentifier +from linkml_runtime.linkml_model.extensions import Extension, Extensible +from linkml_runtime.linkml_model.annotations import Annotation, Annotatable +from linkml_runtime.linkml_model.meta import ElementName, SchemaDefinitionName, TypeDefinitionName, SubsetDefinitionName, DefinitionName, \ + EnumDefinitionName, SlotDefinitionName, ClassDefinitionName, PrefixPrefixPrefix, LocalNameLocalNameSource, \ + AltDescriptionSource, PermissibleValueText, Element, SchemaDefinition, TypeDefinition, SubsetDefinition, \ + Definition, EnumDefinition, SlotDefinition, ClassDefinition, Prefix, LocalName, Example, AltDescription, \ + PermissibleValue, PvFormulaOptions + diff --git a/linkml_model/annotations.py b/linkml_runtime/linkml_model/annotations.py similarity index 95% rename from linkml_model/annotations.py rename to linkml_runtime/linkml_model/annotations.py index f1221790..93fa9be6 100644 --- a/linkml_model/annotations.py +++ b/linkml_runtime/linkml_model/annotations.py @@ -1,5 +1,5 @@ # Auto generated from annotations.yaml by pythongen.py version: 0.9.0 -# Generation date: 2021-03-25 11:34 +# Generation date: 2021-04-05 18:10 # Schema: annotations # # id: https://w3id.org/linkml/annotations @@ -21,8 +21,8 @@ from rdflib import Namespace, URIRef from linkml_runtime.utils.curienamespace import CurieNamespace from linkml_runtime.utils.metamodelcore import URIorCURIE -from linkml_model.extensions import Extension -from linkml_model.types import String, Uriorcurie +from linkml_runtime.linkml_model.extensions import Extension +from linkml_runtime.linkml_model.types import String, Uriorcurie metamodel_version = "1.7.0" diff --git a/linkml_model/extensions.py b/linkml_runtime/linkml_model/extensions.py similarity index 97% rename from linkml_model/extensions.py rename to linkml_runtime/linkml_model/extensions.py index ca70677c..656199c9 100644 --- a/linkml_model/extensions.py +++ b/linkml_runtime/linkml_model/extensions.py @@ -1,5 +1,5 @@ # Auto generated from extensions.yaml by pythongen.py version: 0.9.0 -# Generation date: 2021-03-25 11:34 +# Generation date: 2021-04-05 18:10 # Schema: extensions # # id: https://w3id.org/linkml/extensions @@ -21,7 +21,7 @@ from rdflib import Namespace, URIRef from linkml_runtime.utils.curienamespace import CurieNamespace from linkml_runtime.utils.metamodelcore import URIorCURIE -from linkml_model.types import String, Uriorcurie +from linkml_runtime.linkml_model.types import String, Uriorcurie metamodel_version = "1.7.0" diff --git a/linkml_model/linkml_files.py b/linkml_runtime/linkml_model/linkml_files.py similarity index 54% rename from linkml_model/linkml_files.py rename to linkml_runtime/linkml_model/linkml_files.py index 3a282d10..ddfb7864 100644 --- a/linkml_model/linkml_files.py +++ b/linkml_runtime/linkml_model/linkml_files.py @@ -84,7 +84,7 @@ def _build_path(source: Source, fmt: Format) -> str: def _build_loc(base: str, source: Source, fmt: Format) -> str: - return f"{base}{_build_path(source, fmt)}" + return f"{base}{_build_path(source, fmt)}".replace('blob/', '') def URL_FOR(source: Source, fmt: Format) -> str: @@ -134,3 +134,110 @@ def tag_to_commit(tag: str) -> str: return tag_to_commit(release) +class ModelFile: + class ModelLoc: + def __init__(self, model: Source, fmt: Format) -> str: + self._model = model + self._format = fmt + + def __str__(self): + return f"{self._model.value}.{self._format.value}" + + def __repr__(self): + return str(self) + + @property + def url(self) -> str: + return URL_FOR(self._model, self._format) + + @property + def file(self) -> str: + return LOCAL_PATH_FOR(self._model, self._format) + + def github_loc(self, tag: Optional[str] = None, branch: Optional[str] = None, release: ReleaseTag = None) -> str: + if not tag and not branch and not release: + return GITHUB_IO_PATH_FOR(self._model, self._format) + if tag: + return GITHUB_PATH_FOR(self._model, self._format, tag, branch or "main") + else: + return GITHUB_PATH_FOR(self._model, self._format, release or ReleaseTag.CURRENT, branch or "main") + + def __init__(self, model: Source) -> None: + self._model = model + + def __str__(self): + return self._model.value + + def __repr__(self): + return str(self) + + @property + def yaml(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.YAML) + + @property + def graphql(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.GRAPHQL) + + @property + def html(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.HTML) + + @property + def json(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.JSON) + + @property + def jsonld(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.JSONLD) + + @property + def jsonschema(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.JSON_SCHEMA) + + @property + def model_jsonld(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.NATIVE_JSONLD) + + @property + def model_rdf(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.NATIVE_RDF) + + @property + def model_shexc(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.NATIVE_SHEXC) + + @property + def model_shexj(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.NATIVE_SHEXJ) + + @property + def owl(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.OWL) + + @property + def python(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.PYTHON) + + @property + def rdf(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.RDF) + + @property + def shexc(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.SHEXC) + + @property + def shexj(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.SHEXJ) + + @property + def yaml(self) -> ModelLoc: + return ModelFile.ModelLoc(self._model, Format.YAML) + + +meta = ModelFile(Source.META) +types = ModelFile(Source.TYPES) +annotations = ModelFile(Source.ANNOTATIONS) +extensions = ModelFile(Source.EXTENSIONS) +mappings = ModelFile(Source.MAPPINGS) diff --git a/linkml_model/mappings.py b/linkml_runtime/linkml_model/mappings.py similarity index 94% rename from linkml_model/mappings.py rename to linkml_runtime/linkml_model/mappings.py index f81d9df5..3a3add32 100644 --- a/linkml_model/mappings.py +++ b/linkml_runtime/linkml_model/mappings.py @@ -1,5 +1,5 @@ # Auto generated from mappings.yaml by pythongen.py version: 0.9.0 -# Generation date: 2021-03-25 11:34 +# Generation date: 2021-04-05 18:09 # Schema: mappings # # id: https://w3id.org/linkml/mappings @@ -21,7 +21,7 @@ from rdflib import Namespace, URIRef from linkml_runtime.utils.curienamespace import CurieNamespace from linkml_runtime.utils.metamodelcore import URIorCURIE -from linkml_model.types import Uriorcurie +from linkml_runtime.linkml_model.types import Uriorcurie metamodel_version = "1.7.0" diff --git a/linkml_model/meta.py b/linkml_runtime/linkml_model/meta.py similarity index 99% rename from linkml_model/meta.py rename to linkml_runtime/linkml_model/meta.py index 2f24985e..d7f7f328 100644 --- a/linkml_model/meta.py +++ b/linkml_runtime/linkml_model/meta.py @@ -1,5 +1,5 @@ # Auto generated from meta.yaml by pythongen.py version: 0.9.0 -# Generation date: 2021-03-25 11:34 +# Generation date: 2021-04-05 18:10 # Schema: meta # # id: https://w3id.org/linkml/meta @@ -21,9 +21,9 @@ from rdflib import Namespace, URIRef from linkml_runtime.utils.curienamespace import CurieNamespace from linkml_runtime.utils.metamodelcore import Bool, NCName, URI, URIorCURIE, XSDDateTime -from linkml_model.annotations import Annotation -from linkml_model.extensions import Extension -from linkml_model.types import Boolean, Datetime, Integer, Ncname, String, Uri, Uriorcurie +from linkml_runtime.linkml_model.annotations import Annotation +from linkml_runtime.linkml_model.extensions import Extension +from linkml_runtime.linkml_model.types import Boolean, Datetime, Integer, Ncname, String, Uri, Uriorcurie metamodel_version = "1.7.0" diff --git a/linkml_model/types.py b/linkml_runtime/linkml_model/types.py similarity index 99% rename from linkml_model/types.py rename to linkml_runtime/linkml_model/types.py index 8ed73a49..f74d7ff3 100644 --- a/linkml_model/types.py +++ b/linkml_runtime/linkml_model/types.py @@ -1,5 +1,5 @@ # Auto generated from types.yaml by pythongen.py version: 0.9.0 -# Generation date: 2021-03-25 11:34 +# Generation date: 2021-04-05 18:10 # Schema: types # # id: https://w3id.org/linkml/types diff --git a/linkml_runtime/loaders/__init__.py b/linkml_runtime/loaders/__init__.py index e69de29b..35ec317b 100644 --- a/linkml_runtime/loaders/__init__.py +++ b/linkml_runtime/loaders/__init__.py @@ -0,0 +1,7 @@ +from linkml_runtime.loaders.json_loader import JSONLoader +from linkml_runtime.loaders.rdf_loader import RDFLoader +from linkml_runtime.loaders.yaml_loader import YAMLLoader + +json_loader = JSONLoader() +rdf_loader = RDFLoader() +yaml_loader = YAMLLoader() diff --git a/linkml_runtime/loaders/document_loader.py b/linkml_runtime/loaders/document_loader.py deleted file mode 100644 index cf100f20..00000000 --- a/linkml_runtime/loaders/document_loader.py +++ /dev/null @@ -1,70 +0,0 @@ -import json -from urllib.parse import urlparse, urljoin - -from pyld.jsonld import requests_document_loader, JsonLdError - - -def pyld_document_loader(**kwargs): - """ - Create a Requests or a file based document loader. - - Can be used to setup extra Requests args such as verify, cert, timeout, - or others. - - :param kwargs: extra keyword args for Requests get() call. - - :return: the RemoteDocument loader function. - """ - - def loader(url, prev_url, options=None): - """ - Retrieves JSON-LD from a URL, a file location or as text - - :param url: the URL to retrieve. - :param prev_url: Dictionary to carry the previous URL referenced - :param options: Additional options - - :return: the RemoteDocument. - """ - if options is None: - options = {} - - # Process text being passed in as the document - if url.strip()[0] in '[{' or '\n' in url: - return { - 'contentType': 'text/plain', - 'contextUrl': None, - 'documentUrl': None, - 'document': json.loads(url) - } - - # process relative URL - pieces = urlparse(url) - if not any([pieces.scheme, pieces.netloc]): - if prev_url['url']: - url = urljoin(prev_url['url'], url) - pieces = urlparse(url) - else: - prev_url['url'] = url - - # check for file access - if pieces.scheme == 'file': - try: - with open(pieces.path) as f: - doc = f.read() - return { - 'contentType': 'text/plain', - 'contextUrl': None, - 'documentUrl': url, - 'document': json.loads(doc) - } - except Exception as cause: - raise JsonLdError( - f'Could not retrieve a JSON-LD document from {url}.', - 'jsonld.LoadDocumentError', code='loading document failed', - cause=cause) - else: - return requests_document_loader(**kwargs)(url, options) - - prev_url_holder = {} - return lambda url, options: loader(url, prev_url_holder, options) diff --git a/linkml_runtime/loaders/json_loader.py b/linkml_runtime/loaders/json_loader.py index 073aebb4..a57a9bd9 100644 --- a/linkml_runtime/loaders/json_loader.py +++ b/linkml_runtime/loaders/json_loader.py @@ -1,55 +1,30 @@ import json -from typing import Union, TextIO, Optional, Dict, Type, Any +from typing import Union, TextIO, Optional, Dict, Type from hbreader import FileInfo -from linkml_runtime.loaders.loader_root import load_source +from linkml_runtime.loaders.loader_root import Loader from linkml_runtime.utils.yamlutils import YAMLRoot -def json_clean(inp: Any) -> Any: - """ - Remove empty values and JSON-LD relics from an input file - @param inp: JSON-LD representation - @return: JSON representation - """ - def _is_empty(o) -> bool: - return o is None or o == [] or o == {} +class JSONLoader(Loader): - if isinstance(inp, list): - for e in [inpe for inpe in inp if _is_empty(inpe)]: - del(inp[e]) - for e in inp: - json_clean(e) - elif isinstance(inp, dict): - for k, v in list(inp.items()): - if k.startswith('@') or _is_empty(v): - del(inp[k]) - else: - json_clean(v) - return inp - - -def load(source: Union[str, dict, TextIO], base_dir: Optional[str], target_class: Type[YAMLRoot], - metadata: Optional[FileInfo]) -> YAMLRoot: - def loader(data: Union[str, dict], _: FileInfo) -> Optional[Dict]: - data_as_dict = json.loads(data) if isinstance(data, str) else data - typ = data_as_dict.pop('@type', None) - # TODO: Remove this when https://github.com/linkml/issues/364 gets fixed - if not typ: + def load(self, source: Union[str, dict, TextIO], target_class: Type[YAMLRoot], *, base_dir: Optional[str] = None, + metadata: Optional[FileInfo] = None, **_) -> YAMLRoot: + def loader(data: Union[str, dict], _: FileInfo) -> Optional[Dict]: + data_as_dict = json.loads(data) if isinstance(data, str) else data typ = data_as_dict.pop('@type', None) - if typ and typ != target_class.__name__: - # TODO: connect this up with the logging facility or warning? - print(f"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}") - return json_clean(data_as_dict) - - if not metadata: - metadata = FileInfo() - if base_dir and not metadata.base_path: - metadata.base_path = base_dir - return load_source(source, loader, target_class, accept_header="application/ld+json, application/json, text/json", - metadata=metadata) - - -def loads(source: str, target_class: Type[YAMLRoot], metadata: Optional[FileInfo] = None) -> YAMLRoot: - return load(source, None, target_class, metadata) + # TODO: Remove this when https://github.com/linkml/issues/364 gets fixed + if not typ: + typ = data_as_dict.pop('@type', None) + if typ and typ != target_class.__name__: + # TODO: connect this up with the logging facility or warning? + print(f"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}") + return self.json_clean(data_as_dict) + + if not metadata: + metadata = FileInfo() + if base_dir and not metadata.base_path: + metadata.base_path = base_dir + return self.load_source(source, loader, target_class, + accept_header="application/ld+json, application/json, text/json", metadata=metadata) diff --git a/linkml_runtime/loaders/loader_root.py b/linkml_runtime/loaders/loader_root.py index 9ae415ba..6609cf30 100644 --- a/linkml_runtime/loaders/loader_root.py +++ b/linkml_runtime/loaders/loader_root.py @@ -1,33 +1,85 @@ -from typing import TextIO, Union, Optional, Callable, Dict, Type +from abc import ABC, abstractmethod +from typing import TextIO, Union, Optional, Callable, Dict, Type, Any from hbreader import FileInfo, hbread from linkml_runtime.utils.yamlutils import YAMLRoot -def load_source(source: Union[str, dict, TextIO], - loader: Callable[[Union[str, Dict], FileInfo], Optional[Dict]], - target_class: Type[YAMLRoot], - accept_header: Optional[str] = "text/plain, application/yaml;q=0.9", - metadata: Optional[FileInfo] = None) -> Optional[YAMLRoot]: - """ Base loader - convert a file, url, string, open file handle or dictionary into an instance - of target_class - - :param source: URL, file name, block of text, Existing Object or open file handle - :param loader: Take a stringified image or a dictionary and return a loadable dictionary - :param target_class: Destination class - :param accept_header: Accept header to use if doing a request - :param metadata: Metadata about the source. Filled in as we go along - - :return: Instance of the target class if loader worked - """ - - # Makes coding easier down the line if we've got this, even if it is strictly internal - if metadata is None: - metadata = FileInfo() - if not isinstance(source, dict): - data = hbread(source, metadata, metadata.base_path, accept_header) - else: - data = source - data_as_dict = loader(data, metadata) - return target_class(**data_as_dict) if data_as_dict is not None else None +class Loader(ABC): + + @staticmethod + def json_clean(inp: Any) -> Any: + """ + Remove empty values and JSON-LD relics from an input file + @param inp: JSON-LD representation + @return: JSON representation + """ + def _is_empty(o) -> bool: + return o is None or o == [] or o == {} + + if isinstance(inp, list): + for e in [inp_e for inp_e in inp if _is_empty(inp_e)]: + del(inp[e]) + for e in inp: + Loader.json_clean(e) + elif isinstance(inp, dict): + for k, v in list(inp.items()): + if k.startswith('@') or _is_empty(v): + del(inp[k]) + else: + Loader.json_clean(v) + return inp + + def load_source(self, + source: Union[str, dict, TextIO], + loader: Callable[[Union[str, Dict], FileInfo], Optional[Dict]], + target_class: Type[YAMLRoot], + accept_header: Optional[str] = "text/plain, application/yaml;q=0.9", + metadata: Optional[FileInfo] = None) -> Optional[YAMLRoot]: + """ Base loader - convert a file, url, string, open file handle or dictionary into an instance + of target_class + + :param source: URL, file name, block of text, Existing Object or open file handle + :param loader: Take a stringified image or a dictionary and return a loadable dictionary + :param target_class: Destination class + :param accept_header: Accept header to use if doing a request + :param metadata: Metadata about the source. Filled in as we go along + + :return: Instance of the target class if loader worked + """ + + # Makes coding easier down the line if we've got this, even if it is strictly internal + if metadata is None: + metadata = FileInfo() + if not isinstance(source, dict): + data = hbread(source, metadata, metadata.base_path, accept_header) + else: + data = source + data_as_dict = loader(data, metadata) + return target_class(**data_as_dict) if data_as_dict is not None else None + + @abstractmethod + def load(self, source: Union[str, dict, TextIO], target_class: Type[YAMLRoot], *, base_dir: Optional[str] = None, + metadata: Optional[FileInfo] = None, **_) -> YAMLRoot: + """ + Load source as an instance of target_class + @param source: source file/text/url to load + @param target_class: destination class + @param base_dir: scoping directory for source if it is a file or url + @param metadata: metadata about the source + @param _: extensions + @return: instance of target_class + """ + raise NotImplementedError() + + def loads(self, source: str, target_class: Type[YAMLRoot], *, metadata: Optional[FileInfo] = None, **_) -> YAMLRoot: + """ + Load source as a string + @param source: source + @param target_class: destination class + @param metadata: metadata about the source + @param _: extensions + @return: instance of taarget_class + """ + return self.load(source, target_class, metadata=metadata) diff --git a/linkml_runtime/loaders/rdf_loader.py b/linkml_runtime/loaders/rdf_loader.py index eb6d1eba..58f21e01 100644 --- a/linkml_runtime/loaders/rdf_loader.py +++ b/linkml_runtime/loaders/rdf_loader.py @@ -2,83 +2,82 @@ from hbreader import FileInfo +from linkml_runtime.loaders.loader_root import Loader from linkml_runtime.utils.context_utils import CONTEXTS_PARAM_TYPE from linkml_runtime.utils.yamlutils import YAMLRoot from pyld import jsonld from rdflib import Graph from rdflib_pyld_compat import pyld_jsonld_from_rdflib_graph -from linkml_runtime.loaders.json_loader import json_clean -from linkml_runtime.loaders.loader_root import load_source from linkml_runtime.loaders.requests_ssl_patch import no_ssl_verification # TODO: figure out what mime types go here. I think we can find the complete set in rdflib RDF_MIME_TYPES = "application/x-turtle;q=0.9, application/rdf+n3;q=0.8, application/rdf+xml;q=0.5, text/plain;q=0.1" -def load(source: Union[str, TextIO, Graph], base_dir: Optional[str], target_class: Type[YAMLRoot], - contexts: CONTEXTS_PARAM_TYPE, fmt: Optional[str] = 'turtle', metadata: Optional[FileInfo] = None) -> YAMLRoot: - """ - Load the RDF in source into the python target_class structure - :param source: RDF data source. Can be a URL, a file name, an RDF string, an open handle or an existing graph - :param base_dir: Base directory that can be used if file name or URL. This is copied into metadata if present - :param target_class: LinkML class to load the RDF into - :param contexts: JSON-LD context(s) to use to generate the JSON that will be loaded into target_class. This is - optional because, if source is in JSON-LD format, it is possible that the contexts are already there - :param fmt: format of source if it isn't an existing Graph - :param metadata: source information. Used by some loaders to record where information came from - :return: Instance of target_class - """ - - def loader(data: Union[str, dict], _: FileInfo) -> Optional[dict]: +class RDFLoader(Loader): + def load(self, source: Union[str, TextIO, Graph], target_class: Type[YAMLRoot], *, base_dir: Optional[str] = None, + contexts: CONTEXTS_PARAM_TYPE = None, fmt: Optional[str] = 'turtle', + metadata: Optional[FileInfo] = None) -> YAMLRoot: """ - Process an RDF graph or a JSON-LD string. We do this by using pyld_jsonld_from_rdflib_graph to emit a JSON-LD - string and then process it with jsonld.frame. - - :param data: Graph or JSON-LD string - :param _: Unused - part of signature for other implementations - :return: Dictionary to load into the target class + Load the RDF in source into the python target_class structure + :param source: RDF data source. Can be a URL, a file name, an RDF string, an open handle or an existing graph + :param base_dir: Base directory that can be used if file name or URL. This is copied into metadata if present + :param target_class: LinkML class to load the RDF into + :param contexts: JSON-LD context(s) to use to generate the JSON that will be loaded into target_class. This is + optional because, if source is in JSON-LD format, it is possible that the contexts are already there + :param fmt: format of source if it isn't an existing Graph + :param metadata: source information. Used by some loaders to record where information came from + :return: Instance of target_class """ - if isinstance(data, str): - if fmt != 'json-ld': - g = Graph() - g.parse(data=data, format=fmt) - data = pyld_jsonld_from_rdflib_graph(g) - if not isinstance(data, dict): - # TODO: Add a context processor to the source w/ CONTEXTS_PARAM_TYPE - # TODO: figure out what to do base options below - # TODO: determine whether jsonld.frame can handle something other than string input - data_as_dict = jsonld.frame(data, contexts) - else: - data_as_dict = data - typ = data_as_dict.pop('@type', None) - # TODO: remove this when we get the Biolinkml issue fixed - if not typ: - typ = data_as_dict.pop('type', None) - if typ and typ != target_class.class_name: - # TODO: connect this up with the logging facility or warning? - print(f"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}") - return json_clean(data_as_dict) + def loader(data: Union[str, dict], _: FileInfo) -> Optional[dict]: + """ + Process an RDF graph or a JSON-LD string. We do this by using pyld_jsonld_from_rdflib_graph to + emit a JSON-LD string and then process it with jsonld.frame. - if not metadata: - metadata = FileInfo() - if base_dir and not metadata.base_path: - metadata.base_path = base_dir + :param data: Graph or JSON-LD string + :param _: Unused - part of signature for other implementations + :return: Dictionary to load into the target class + """ + # Where we sit -- we don't have the start string (and other info?) in the context, so we don't get a package + # on the way out + # Also - if we don't pop type below, we need to act accordingly + if isinstance(data, str): + if fmt != 'json-ld': + g = Graph() + g.parse(data=data, format=fmt) + data = pyld_jsonld_from_rdflib_graph(g) - # If the inpute is a graph, convert it to JSON-LD - if isinstance(source, Graph): - source = pyld_jsonld_from_rdflib_graph(source) - fmt = 'json-ld' + if not isinstance(data, dict): + # TODO: Add a context processor to the source w/ CONTEXTS_PARAM_TYPE + # TODO: figure out what to do base options below + # TODO: determine whether jsonld.frame can handle something other than string input + # frame = {'@context': contexts, '@type': f'{target_class.__name__}'} + data_as_dict = jsonld.frame(data, contexts) + else: + data_as_dict = data + typ = data_as_dict.pop('@type', None) + # TODO: remove this when we get the Biolinkml issue fixed + if not typ: + typ = data_as_dict.pop('type', None) + if typ and typ != target_class.class_name: + # TODO: connect this up with the logging facility or warning? + print(f"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}") + return self.json_clean(data_as_dict) - # While we may want to allow full SSL verification at some point, the general philosophy is that content forgery - # is not going to be a serious problem. - # TODO: Make the SSL option a settable parameter in the package itself - with no_ssl_verification(): - return load_source(source, loader, target_class, accept_header=RDF_MIME_TYPES, metadata=metadata) + if not metadata: + metadata = FileInfo() + if base_dir and not metadata.base_path: + metadata.base_path = base_dir + # If the input is a graph, convert it to JSON-LD + if isinstance(source, Graph): + source = pyld_jsonld_from_rdflib_graph(source) + fmt = 'json-ld' -def loads(source: Union[str, TextIO, Graph], target_class: Type[YAMLRoot], - contexts: CONTEXTS_PARAM_TYPE, fmt: Optional[str] = 'turtle', - metadata: Optional[FileInfo] = None) -> YAMLRoot: - return load(source, None, target_class, contexts, fmt, metadata) + # While we may want to allow full SSL verification at some point, the general philosophy is that content forgery + # is not going to be a serious problem. + # TODO: Make the SSL option a settable parameter in the package itself + with no_ssl_verification(): + return self.load_source(source, loader, target_class, accept_header=RDF_MIME_TYPES, metadata=metadata) diff --git a/linkml_runtime/loaders/yaml_loader.py b/linkml_runtime/loaders/yaml_loader.py index 349ca778..b9c79cac 100644 --- a/linkml_runtime/loaders/yaml_loader.py +++ b/linkml_runtime/loaders/yaml_loader.py @@ -4,23 +4,20 @@ import yaml from hbreader import FileInfo +from linkml_runtime.loaders.loader_root import Loader from linkml_runtime.utils.yamlutils import YAMLRoot, DupCheckYamlLoader -from linkml_runtime.loaders.loader_root import load_source +class YAMLLoader(Loader): -def load(source: Union[str, dict, TextIO], target_class: Type[YAMLRoot], base_dir: Optional[str] = None, - metadata: Optional[FileInfo] = None) -> YAMLRoot: - def loader(data: Union[str, dict], _: FileInfo) -> Optional[Dict]: - return yaml.load(StringIO(data), DupCheckYamlLoader) if isinstance(data, str) else data + def load(self, source: Union[str, dict, TextIO], target_class: Type[YAMLRoot], *, base_dir: Optional[str] = None, + metadata: Optional[FileInfo] = None, **_) -> YAMLRoot: + def loader(data: Union[str, dict], _: FileInfo) -> Optional[Dict]: + return yaml.load(StringIO(data), DupCheckYamlLoader) if isinstance(data, str) else data - if not metadata: - metadata = FileInfo() - if base_dir and not metadata.base_path: - metadata.base_path = base_dir - return load_source(source, loader, target_class, accept_header="text/yaml, application/yaml;q=0.9", - metadata=metadata) - - -def loads(source: str, target_class: Type[YAMLRoot], metadata: Optional[FileInfo] = None) -> YAMLRoot: - return load(source, target_class, metadata=metadata) + if not metadata: + metadata = FileInfo() + if base_dir and not metadata.base_path: + metadata.base_path = base_dir + return self.load_source(source, loader, target_class, accept_header="text/yaml, application/yaml;q=0.9", + metadata=metadata) diff --git a/linkml_runtime/utils/compile_python.py b/linkml_runtime/utils/compile_python.py index f988d5b9..dc820069 100644 --- a/linkml_runtime/utils/compile_python.py +++ b/linkml_runtime/utils/compile_python.py @@ -23,6 +23,8 @@ def compile_python(text_or_fn: str, package_path: str = None) -> ModuleType: @return: Compiled module """ python_txt = file_text(text_or_fn) + if package_path is None and python_txt != text_or_fn: + package_path = text_or_fn spec = compile(python_txt, 'test', 'exec') module = ModuleType('test') if package_path: diff --git a/linkml_runtime/utils/formatutils.py b/linkml_runtime/utils/formatutils.py index 8b64e1e2..3db3296c 100644 --- a/linkml_runtime/utils/formatutils.py +++ b/linkml_runtime/utils/formatutils.py @@ -1,8 +1,6 @@ import re from typing import List -from pyshex.shex_evaluator import EvaluationResult - ws_pattern = re.compile(r'\s+') us_pattern = re.compile(r'_+') @@ -83,15 +81,3 @@ def wrapped_annotation(txt: str) -> str: else: rval.append(line) return '\n\t'.join(rval) - - -def shex_results_as_string(rslts: EvaluationResult) -> str: - """ Pretty print ShEx Evaluation result """ - # TODO: Add this method to ShEx itself - rval = [f"Evalutating: {str(rslts.focus)} against {str(rslts.start)}"] - if rslts.result: - rval.append("Result: CONFORMS") - else: - rval.append("Result: NonConforming") - rval += rslts.reason.split('\n') - return '\n'.join(rval) diff --git a/linkml_runtime/utils/permissiblevalueimpl.py b/linkml_runtime/utils/permissiblevalueimpl.py index 11282e01..95f23207 100644 --- a/linkml_runtime/utils/permissiblevalueimpl.py +++ b/linkml_runtime/utils/permissiblevalueimpl.py @@ -5,10 +5,11 @@ from linkml_model.meta import PermissibleValue, EnumDefinition, PvFormulaOptions from linkml_runtime.utils.curienamespace import CurieNamespace -from linkml_runtime.utils.metamodelcore import URIorCURIE, empty_list -from linkml_runtime.utils.uritypes_from_tccm import RenderingURI +from linkml_runtime.utils.metamodelcore import URIorCURIE, empty_list, URI +# from linkml_runtime.utils.uritypes_from_tccm import RenderingURI from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str +RenderingURI = URI class PermissibleValueImpl(PermissibleValue): """ diff --git a/linkml_runtime/utils/shexutils.py b/linkml_runtime/utils/shexutils.py new file mode 100644 index 00000000..e69de29b diff --git a/linkml_runtime/utils/yamlutils.py b/linkml_runtime/utils/yamlutils.py index ee6a45e0..d239a868 100644 --- a/linkml_runtime/utils/yamlutils.py +++ b/linkml_runtime/utils/yamlutils.py @@ -2,7 +2,7 @@ from typing import Union, Any, List, Optional, Type, Callable import yaml -from jsonasobj import JsonObj, as_json +from jsonasobj import JsonObj, as_json, ExtendedNamespace from rdflib import Graph from yaml.constructor import ConstructorError @@ -19,11 +19,12 @@ def __str__(self): return where -class YAMLRoot(JsonObj): +class YAMLRoot(ExtendedNamespace): """ The root object for all python YAML representations """ + def __post_init__(self, **kwargs): if kwargs: messages: List[str] = [] diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..cebc3924 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,13 @@ +################################################################################ +# This requirements file has been automatically generated from `Pipfile` with +# `pipenv-to-requirements` +# +# +# This has been done to maintain backward compatibility with tools and services +# that do not support `Pipfile` yet. +# +# Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and +# `Pipfile.lock` and then regenerate `requirements*.txt`. +################################################################################ + +requests diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..65b7b7c2 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,23 @@ +################################################################################ +# This requirements file has been automatically generated from `Pipfile` with +# `pipenv-to-requirements` +# +# +# This has been done to maintain backward compatibility with tools and services +# that do not support `Pipfile` yet. +# +# Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and +# `Pipfile.lock` and then regenerate `requirements*.txt`. +################################################################################ + +click +hbreader +jsonasobj +linkml-model +prefixcommons +pyld +pyyaml>=5.1 +rdflib +rdflib-jsonld +rdflib-pyld-compat +shexjsg diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..e0064f98 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,37 @@ +[metadata] +name = linkml_runtime +url = https://github.com/biolink/linkml-runtime +author = Harold Solbrig +author-email = solbrig@jhu.edu +summary = LinkML Runtime Environment +description = Runtime Environment for the Linked Open Data Modeling Language +home-page = http://linkml.github.io/linkml-runtime +license = CC0 1.0 Universal +python-requires = >=3.7 +classifiers = + Development Status :: 4 - Beta + Environment :: Console + Intended Audience :: Developers + Intended Audience :: Science/Research + Intended Audience :: Healthcare Industry + License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication + Programming Language :: Python :: 3 :: Only + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 +keywords = + linkml + lod + rdf + owl + yaml + model + metamodel + +[files] +packages = + linkml_runtime + +[entry_points] +console_scripts = + comparefiles = linkml.utils.comparefiles:cli diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..aa6b10fe --- /dev/null +++ b/setup.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +import sys +from setuptools import setup +from warnings import warn + +if sys.version_info < (3, 7, 0): + warn(f"Some URL processing will fail with python 3.7.5 or earlier. Current version: {sys.version_info}") + +setup( + setup_requires=['pbr'], + pbr=True, +) diff --git a/show_test_changes.sh b/show_test_changes.sh new file mode 100644 index 00000000..e8f98625 --- /dev/null +++ b/show_test_changes.sh @@ -0,0 +1,3 @@ +#!/bin/bash +# Make all of the test output changes visible to git +git update-index --no-assume-unchanged `find tests -type f | grep \/output\/ | xargs` diff --git a/tests/__init__.py b/tests/__init__.py index 93dc347e..396a1754 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -31,6 +31,16 @@ # the final output USE_LOCAL_IMPORT_MAP = test_settings.getboolean('USE_LOCAL_IMPORT_MAP', False) + +# There are lots of warnings emitted by the generators. Default logging level +DEFAULT_LOG_LEVEL = eval(test_settings.get('DEFAULT_LOG_LEVEL', 'logging.ERROR')) +DEFAULT_LOG_LEVEL_TEXT = test_settings.get('DEFAULT_LOG_LEVEL_TEXT', 'ERROR') + + +# Skip RDF comparison, as it takes a lot of time +SKIP_RDF_COMPARE = test_settings.getboolean('SKIP_RDF_COMPARE', False) +SKIP_RDF_COMPARE_REASON = test_settings.get('SKIP_RDF_COMPARE_REASON', 'tests/__init__.py RDF output not checked SKIP_RDF_COMPARE is True') + # Exception for use in script testing. Global to prevent redefinition class CLIExitException(Exception): def __init__(self, code: int) -> None: diff --git a/tests/support/clicktestcase.py b/tests/support/clicktestcase.py index b4f0077a..fe14b62d 100644 --- a/tests/support/clicktestcase.py +++ b/tests/support/clicktestcase.py @@ -4,10 +4,9 @@ from typing import Union, List, Optional, Callable from warnings import warn -from tests import DEFAULT_LOG_LEVEL_TEXT -from tests.utils.dirutils import make_and_clear_directory -from tests.utils.compare_rdf import compare_rdf -from tests.utils.test_environment import TestEnvironmentTestCase, TestEnvironment +from tests.support.dirutils import make_and_clear_directory +from tests.support.compare_rdf import compare_rdf +from tests.support.test_environment import TestEnvironmentTestCase, TestEnvironment class ClickTestCase(TestEnvironmentTestCase): @@ -47,21 +46,21 @@ def n3_comparator(expected_data: str, actual_data: str) -> str: return compare_rdf(expected_data, actual_data, "n3") @staticmethod - def rdf_comparator(expected_data: str, actual_data: str, fmt: Optional[str]='turtle') -> str: + def rdf_comparator(expected_data: str, actual_data: str, fmt: Optional[str] = 'turtle') -> str: """ compare expected_data to actual_data using basic RDF comparator method """ return compare_rdf(expected_data, actual_data, fmt=fmt) @staticmethod - def always_pass_comparator(self, expected_data: str, new_data: str) -> Optional[str]: + def always_pass_comparator(expected_data: str, new_data: str) -> Optional[str]: """ No-op comparator -- everyone passes! + :param expected_data: :param new_data: :return: """ return None - @staticmethod def closein_comparison(expected_txt: str, actual_txt: str) -> None: """ Assist with testing comparison -- zero in on the first difference in a big string @@ -102,7 +101,8 @@ def do_test(self, @param expected_error: If present, we expect this error @param filtr: Filter to remove date and app specific information from text. Only used for single file generation @param is_directory: True means output is to a directory - @param add_yaml: True means add the default meta.yaml file. False means both yaml and importmap are pre-supplied + @param add_yaml: True means add the default meta.yaml file. False means both yaml and importmap + are pre-supplied @param comparator: If present, use this method for comparison """ assert testFileOrDirectory @@ -112,8 +112,9 @@ def do_test(self, warn("filtr and comparator parameters aren't implemented for directory generation") if add_yaml and (not arg_list or arg_list[0] != '--help'): - arg_list.insert(0, self.env.meta_yaml) - arg_list += ["--importmap", self.env.import_map, "--log_level", DEFAULT_LOG_LEVEL_TEXT] + raise NotImplementedError("This is an artifact from elsewhere") + # arg_list.insert(0, self.env.meta_yaml) + # arg_list += ["--importmap", self.env.import_map, "--log_level", DEFAULT_LOG_LEVEL_TEXT] target = os.path.join(self.testdir, testFileOrDirectory) self.temp_file_path(self.testdir, is_dir=True) @@ -137,7 +138,6 @@ def do_gen(): else: do_gen() - @classmethod def temp_directory(cls, base: str) -> str: """ diff --git a/tests/support/compare_rdf.py b/tests/support/compare_rdf.py new file mode 100644 index 00000000..704951f9 --- /dev/null +++ b/tests/support/compare_rdf.py @@ -0,0 +1,90 @@ +import re +from contextlib import redirect_stdout +from io import StringIO +from typing import Union, Optional + +from rdflib import Graph, RDF +from rdflib.compare import to_isomorphic, IsomorphicGraph, graph_diff + +from linkml_model.meta import LINKML +# TODO: Find out why test_issue_namespace is emitting generation_date in the TYPE namespace +from tests import SKIP_RDF_COMPARE, SKIP_RDF_COMPARE_REASON + +TYPE = LINKML + + +def to_graph(inp: Union[Graph, str], fmt: Optional[str] = "turtle") -> Graph: + """ + Convert inp into a graph + :param inp: Graph, file name, url or text + :param fmt: expected format of inp + :return: Graph representing inp + """ + if isinstance(inp, Graph): + return inp + g = Graph() + # If there is no input then return an empty graph + if not inp.strip(): + return g + if not inp.strip().startswith('{') and '\n' not in inp and '\r' not in inp: + with open(inp) as f: + inp = f.read() + g.parse(data=inp, format=fmt) + return g + + +def print_triples(g: Graph) -> None: + """ + Print the contents of g into stdout + :param g: graph to print + """ + g_text = re.sub(r'@prefix.*\n', '', g.serialize(format="turtle").decode()) + print(g_text) + + +def compare_rdf(expected: Union[Graph, str], actual: Union[Graph, str], fmt: Optional[str] = "turtle") -> Optional[str]: + """ + Compare expected to actual, returning a string if there is a difference + :param expected: expected RDF. Can be Graph, file name, uri or text + :param actual: actual RDF. Can be Graph, file name, uri or text + :param fmt: RDF format + :return: None if they match else summary of difference + """ + def rem_metadata(g: Graph) -> IsomorphicGraph: + # Remove list declarations from target + for s in g.subjects(RDF.type, RDF.List): + g.remove((s, RDF.type, RDF.List)) + for t in g: + if t[1] in (LINKML.generation_date, LINKML.source_file_date, LINKML.source_file_size, + TYPE.generation_date, TYPE.source_file_date, TYPE.source_file_size): + g.remove(t) + g_iso = to_isomorphic(g) + return g_iso + + # Bypass compare if settings have turned it off + if SKIP_RDF_COMPARE: + print(f"tests/utils/compare_rdf.py: {SKIP_RDF_COMPARE_REASON}") + return None + + expected_graph = to_graph(expected, fmt) + expected_isomorphic = rem_metadata(expected_graph) + actual_graph = to_graph(actual, fmt) + actual_isomorphic = rem_metadata(actual_graph) + + # Graph compare takes a Looong time + in_both, in_old, in_new = graph_diff(expected_isomorphic, actual_isomorphic) + # if old_iso != new_iso: + # in_both, in_old, in_new = graph_diff(old_iso, new_iso) + old_len = len(list(in_old)) + new_len = len(list(in_new)) + if old_len or new_len: + txt = StringIO() + with redirect_stdout(txt): + print("----- Missing Triples -----") + if old_len: + print_triples(in_old) + print("----- Added Triples -----") + if new_len: + print_triples(in_new) + return txt.getvalue() + return None diff --git a/tests/support/dirutils.py b/tests/support/dirutils.py index e84563ae..14a83061 100644 --- a/tests/support/dirutils.py +++ b/tests/support/dirutils.py @@ -24,7 +24,7 @@ def make_and_clear_directory(dirbase: str) -> None: def file_text(txt_or_fname: str) -> str: """ Determine whether text_or_fname is a file name or a string and, if a file name, read it - :param text_or_fname: + :param txt_or_fname: :return: """ if len(txt_or_fname) > 4 and '\n' not in txt_or_fname: @@ -81,7 +81,7 @@ def are_dir_trees_equal(dir1: str, dir2: str) -> Optional[str]: @return: None if directories match, else summary of differences """ def has_local_diffs(dc: dircmp) -> bool: - return dc.diff_files or dc.funny_files or dc.left_only or dc.right_only + return bool(dc.diff_files or dc.funny_files or dc.left_only or dc.right_only) def has_diffs(dc: dircmp) -> bool: return has_local_diffs(dc) or any(has_diffs(sd) for sd in dc.subdirs.values()) diff --git a/tests/support/filters.py b/tests/support/filters.py index f12b9642..ced4017d 100644 --- a/tests/support/filters.py +++ b/tests/support/filters.py @@ -1,9 +1,9 @@ """ Metadata filters for test cases -- various tools to remove metadata from output """ import re -from json import dumps +from json import loads -from jsonasobj import loads, as_json +from jsonasobj import as_json def ldcontext_metadata_filter(s: str) -> str: diff --git a/tests/support/mismatchlog.py b/tests/support/mismatchlog.py index 7a8c5e87..57a6b8ba 100644 --- a/tests/support/mismatchlog.py +++ b/tests/support/mismatchlog.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Optional, List -base_dir = os.path.abspath(os.path.join( os.path.dirname(__file__), '..', '..')) +base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) class MismatchLog: diff --git a/tests/support/test_environment.py b/tests/support/test_environment.py index 02fe581d..22414b2c 100644 --- a/tests/support/test_environment.py +++ b/tests/support/test_environment.py @@ -11,9 +11,6 @@ from pathlib import Path from typing import Optional, Callable, Union, List -from linkml_model import linkml_files -from linkml_model.linkml_files import Source, Format - from tests.support.dirutils import are_dir_trees_equal from tests.support.mismatchlog import MismatchLog @@ -47,9 +44,6 @@ def __init__(self, filedir: str) -> None: # Get the parent's directory name. If it is a test directory, borrow from its environment parent = Path(self.cwd).parts[-2] - self.meta_yaml = linkml_files.LOCAL_PATH_FOR(Source.META, Format.YAML) - self.types_yaml = linkml_files.LOCAL_PATH_FOR(Source.TYPES, Format.YAML) - self.mapping_yaml = linkml_files.LOCAL_PATH_FOR(Source.TYPES, Format.YAML) if parent.startswith('test'): parent_env = import_module('..environment', __package__) self.import_map = parent_env.env.import_map diff --git a/tests/test_issues/__init__.py b/tests/test_issues/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_issues/environment.py b/tests/test_issues/environment.py new file mode 100644 index 00000000..bdc14a95 --- /dev/null +++ b/tests/test_issues/environment.py @@ -0,0 +1,3 @@ +from tests.support.test_environment import TestEnvironment + +env = TestEnvironment(__file__) diff --git a/tests/test_issues/input/__init__.py b/tests/test_issues/input/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_issues/input/issue_368.context.jsonld b/tests/test_issues/input/issue_368.context.jsonld new file mode 100644 index 00000000..61c1fded --- /dev/null +++ b/tests/test_issues/input/issue_368.context.jsonld @@ -0,0 +1,23 @@ +{ + "@context": { + "bms": "https://microbiomedata/schema/", + "linkml": "https://w3id.org/linkml/", + "skos": "http://www.w3.org/2004/02/skos/core#", + "@vocab": "https://microbiomedata/schema/", + "slot_1": { + "@context": { + "@vocab": null, + "text": "skos:notation", + "description": "skos:prefLabel", + "meaning": "@id" + }, + "@id": "https://microbiomedata/schema/slot_1" + }, + "ParentClass": { + "@id": "https://microbiomedata/schema/mixs/ParentClass" + }, + "SampleClass": { + "@id": "https://microbiomedata/schema/SampleClass" + } + } +} diff --git a/tests/test_issues/input/issue_368.py b/tests/test_issues/input/issue_368.py new file mode 100644 index 00000000..dc89a590 --- /dev/null +++ b/tests/test_issues/input/issue_368.py @@ -0,0 +1,68 @@ +# Auto generated from issue_368.yaml by pythongen.py version: 0.9.0 +# Generation date: 2021-03-26 14:21 +# Schema: bms +# +# id: https://microbiomedata/schema +# description: +# license: https://creativecommons.org/publicdomain/zero/1.0/ + +import dataclasses +import sys +import re +from typing import Optional, List, Union, Dict, ClassVar, Any +from dataclasses import dataclass +from linkml_runtime.linkml_model.meta import EnumDefinition, PermissibleValue, PvFormulaOptions + +from linkml_runtime.utils.slot import Slot +from linkml_runtime.utils.metamodelcore import empty_list, empty_dict, bnode +from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str, extended_float, extended_int +from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs +from linkml_runtime.utils.formatutils import camelcase, underscore, sfx +from linkml_runtime.utils.enumerations import EnumDefinitionImpl +from rdflib import Namespace, URIRef +from linkml_runtime.utils.curienamespace import CurieNamespace +from . issue_368_imports import ParentClass, SampleEnum + +metamodel_version = "1.7.0" + +# Overwrite dataclasses _init_fn to add **kwargs in __init__ +dataclasses._init_fn = dataclasses_init_fn_with_kwargs + +# Namespaces +LINKML = CurieNamespace('linkml', 'https://w3id.org/linkml/') +DEFAULT_ = CurieNamespace('', 'https://microbiomedata/schema/') + + +# Types + +# Class references + + + +@dataclass +class SampleClass(ParentClass): + _inherited_slots: ClassVar[List[str]] = [] + + class_class_uri: ClassVar[URIRef] = URIRef("https://microbiomedata/schema/SampleClass") + class_class_curie: ClassVar[str] = None + class_name: ClassVar[str] = "SampleClass" + class_model_uri: ClassVar[URIRef] = URIRef("https://microbiomedata/schema/SampleClass") + + slot_1: Optional[Union[str, "SampleEnum"]] = None + + def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]): + if self.slot_1 is not None and not isinstance(self.slot_1, SampleEnum): + self.slot_1 = SampleEnum(self.slot_1) + + super().__post_init__(**kwargs) + + +# Enumerations + + +# Slots +class slots: + pass + +slots.slot_1 = Slot(uri=DEFAULT_.slot_1, name="slot_1", curie=DEFAULT_.curie('slot_1'), + model_uri=DEFAULT_.slot_1, domain=None, range=Optional[Union[str, "SampleEnum"]]) diff --git a/tests/test_issues/input/issue_368_imports.py b/tests/test_issues/input/issue_368_imports.py new file mode 100644 index 00000000..c451e594 --- /dev/null +++ b/tests/test_issues/input/issue_368_imports.py @@ -0,0 +1,65 @@ +# Auto generated from issue_368_imports.yaml by pythongen.py version: 0.9.0 +# Generation date: 2021-03-26 14:21 +# Schema: mixs +# +# id: https://microbiomedata/schema/mixs +# description: +# license: + +import dataclasses +import sys +import re +from typing import Optional, List, Union, Dict, ClassVar, Any +from dataclasses import dataclass +from linkml_model.meta import EnumDefinition, PermissibleValue, PvFormulaOptions + +from linkml_runtime.utils.slot import Slot +from linkml_runtime.utils.metamodelcore import empty_list, empty_dict, bnode +from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str, extended_float, extended_int +from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs +from linkml_runtime.utils.formatutils import camelcase, underscore, sfx +from linkml_runtime.utils.enumerations import EnumDefinitionImpl +from rdflib import Namespace, URIRef +from linkml_runtime.utils.curienamespace import CurieNamespace + + +metamodel_version = "1.7.0" + +# Overwrite dataclasses _init_fn to add **kwargs in __init__ +dataclasses._init_fn = dataclasses_init_fn_with_kwargs + +# Namespaces +DEFAULT_ = CurieNamespace('', 'https://microbiomedata/schema/mixs/') + + +# Types + +# Class references + + + +class ParentClass(YAMLRoot): + _inherited_slots: ClassVar[List[str]] = [] + + class_class_uri: ClassVar[URIRef] = URIRef("https://microbiomedata/schema/mixs/ParentClass") + class_class_curie: ClassVar[str] = None + class_name: ClassVar[str] = "parent_class" + class_model_uri: ClassVar[URIRef] = URIRef("https://microbiomedata/schema/mixs/ParentClass") + + +# Enumerations +class SampleEnum(EnumDefinitionImpl): + + pva = PermissibleValue(text="pva", + description="PVA description") + pvb = PermissibleValue(text="pvb", + description="PVB description") + + _defn = EnumDefinition( + name="SampleEnum", + ) + +# Slots +class slots: + pass + diff --git a/tests/test_issues/test_issue_368_enums.py b/tests/test_issues/test_issue_368_enums.py new file mode 100644 index 00000000..4fbe8437 --- /dev/null +++ b/tests/test_issues/test_issue_368_enums.py @@ -0,0 +1,40 @@ +import unittest +from typing import Callable + +from linkml_runtime.dumpers import json_dumper, yaml_dumper, rdf_dumper +from tests.test_issues.environment import env +from tests.test_loaders_dumpers.loaderdumpertestcase import LoaderDumperTestCase +from linkml_runtime.utils.compile_python import compile_python + + +class Issue368TestCase(LoaderDumperTestCase): + env = env + + def header(self, txt: str) -> str: + return '\n' + ("=" * 20) + f" {txt} " + ("=" * 20) + + def test_issue_368_enums(self): + """ Test Enum generation """ + + module = compile_python(env.input_path('issue_368.py')) + + enum_inst = module.SampleEnum("pva") # EnumInstanceImpl + example = module.SampleClass(slot_1="pva") + assert hasattr(example, "slot_1") + assert example.slot_1.code.text == enum_inst.code.text + assert str(example.slot_1) == "pva: PVA description" + + def dump_and_load(dumper: Callable, sfx: str) -> None: + fname = env.actual_path(f'issue_368_1.{sfx}') + dumper(example, fname) + with open(fname) as f: + print(f'\n----- {sfx} -----') + print(f.read()) + + dump_and_load(json_dumper.dump, 'json') + dump_and_load(yaml_dumper.dump, 'yaml') + dump_and_load(lambda obj, fname: rdf_dumper.dump(obj, fname, env.input_path("issue_368.context.jsonld")), 'ttl') + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_loaders_dumpers/README.md b/tests/test_loaders_dumpers/README.md new file mode 100644 index 00000000..11edac11 --- /dev/null +++ b/tests/test_loaders_dumpers/README.md @@ -0,0 +1,15 @@ +# Description of this directory +This directory does a basic functional test of the different flavors of loaders and dumpers. The +layout is as follows: +* input - contains sample(s) of input in the different possible formats. + * .json - plain old json representation of the input + * .jsonld - RDF representation in JSONLD expanded format + * .ttl - RDF representation in Turtle format + * .yaml - YAML format. **Note:** YAML format is also used to test the dumpers -- see below +* jsonld_context - this contains a docker file to set up a JSON-LD context server. +* models - +* output - contains sample(s) of dumper output in various formats. + + +## Process +The `loader` tests diff --git a/tests/test_loaders_dumpers/__init__.py b/tests/test_loaders_dumpers/__init__.py index bc367a76..0068396e 100644 --- a/tests/test_loaders_dumpers/__init__.py +++ b/tests/test_loaders_dumpers/__init__.py @@ -23,7 +23,6 @@ NCIT = Namespace("http://purl.obolibrary.org/obo/ncit#") TERMCI = Namespace("https://hotecosystem.org/termci/") SHACL = Namespace("http://www.w3.org/ns/shacl#") -NCIT = Namespace("http://purl.obolibrary.org/obo/NCI_") CONTEXT_SVR = f"http://localhost:{HTTP_TEST_PORT}/" CONTEXT_SSL_SVR = f'https://localhost:{HTTPS_TEST_PORT}/' diff --git a/tests/test_loaders_dumpers/environment.py b/tests/test_loaders_dumpers/environment.py index dce84503..bdc14a95 100644 --- a/tests/test_loaders_dumpers/environment.py +++ b/tests/test_loaders_dumpers/environment.py @@ -1,4 +1,3 @@ from tests.support.test_environment import TestEnvironment -# This set of tests actually works with the test root directories env = TestEnvironment(__file__) diff --git a/tests/test_loaders_dumpers/input/README.md b/tests/test_loaders_dumpers/input/README.md new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_loaders_dumpers/input/obo_sample.expanded.jsonld b/tests/test_loaders_dumpers/input/obo_sample.expanded.jsonld deleted file mode 100644 index 2537c231..00000000 --- a/tests/test_loaders_dumpers/input/obo_sample.expanded.jsonld +++ /dev/null @@ -1,90 +0,0 @@ -[ - { - "@id": "_:_:b0", - "@type": [ - "https://hotecosystem.org/termci/Package" - ], - "https://hotecosystem.org/termci/system": [ - { - "@id": "http://purl.obolibrary.org/obo/" - } - ] - }, - { - "@id": "http://purl.obolibrary.org/obo/NCI_C147557", - "http://www.w3.org/2004/02/skos/core#broader": [ - { - "@id": "http://purl.obolibrary.org/obo/NCI_C91102" - } - ], - "http://www.w3.org/2004/02/skos/core#definition": [ - { - "@value": "A question associated with the TSCYC questionnaire." - } - ], - "http://www.w3.org/2004/02/skos/core#inScheme": [ - { - "@id": "http://purl.obolibrary.org/obo/" - } - ], - "http://www.w3.org/2004/02/skos/core#notation": [ - { - "@value": "C147557" - } - ], - "http://www.w3.org/2004/02/skos/core#prefLabel": [ - { - "@value": "TSCYC Questionnaire Question" - } - ] - }, - { - "@id": "http://purl.obolibrary.org/obo/", - "http://www.w3.org/2004/02/skos/core#hasConcept": [ - { - "@id": "http://purl.obolibrary.org/obo/NCI_C147557" - }, - { - "@id": "http://purl.obolibrary.org/obo/NCI_C147796" - } - ], - "http://www.w3.org/ns/shacl#prefix": [ - { - "@value": "OBO" - } - ] - }, - { - "@id": "http://purl.obolibrary.org/obo/NCI_C147796", - "http://www.w3.org/2004/02/skos/core#broader": [ - { - "@id": "http://purl.obolibrary.org/obo/NCIT_C147557" - } - ], - "http://www.w3.org/2004/02/skos/core#definition": [ - { - "@value": "Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often the child has done, felt, or experienced each of the following things in the last month: Being frightened of men." - } - ], - "http://www.w3.org/2004/02/skos/core#inScheme": [ - { - "@id": "http://purl.obolibrary.org/obo/" - } - ], - "http://www.w3.org/2004/02/skos/core#notation": [ - { - "@value": "C147796" - } - ], - "http://www.w3.org/2004/02/skos/core#prefLabel": [ - { - "@value": "TSCYC - Being Frightened of Men" - } - ], - "http://www.w3.org/2004/02/skos/core#seeAlso": [ - { - "@value": "http://purl.obolibrary.org/obo/NCI_C147796" - } - ] - } -] \ No newline at end of file diff --git a/tests/test_loaders_dumpers/input/obo_sample.jsonld b/tests/test_loaders_dumpers/input/obo_sample.jsonld index 2cbdd74d..27b2d0e4 100644 --- a/tests/test_loaders_dumpers/input/obo_sample.jsonld +++ b/tests/test_loaders_dumpers/input/obo_sample.jsonld @@ -1,40 +1,40 @@ [ { - "@id": "http://purl.obolibrary.org/obo/NCI_C147796", + "@id": "_:_:b0", + "@type": [ + "https://hotecosystem.org/termci/Package" + ], + "https://hotecosystem.org/termci/system": [ + { + "@id": "http://purl.obolibrary.org/obo/" + } + ] + }, + { + "@id": "http://purl.obolibrary.org/obo/NCI_C147557", "http://www.w3.org/2004/02/skos/core#broader": [ { - "@id": "http://purl.obolibrary.org/obo/NCI_C147557" + "@id": "http://purl.obolibrary.org/obo/NCI_C91102" } ], "http://www.w3.org/2004/02/skos/core#definition": [ { - "@value": "Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often the child has done, felt, or experienced each of the following things in the last month: Being frightened of men." + "@value": "A question associated with the TSCYC questionnaire." } - ] - }, - { - "@id": "_:_:b0", - "@type": [ - "https://hotecosystem.org/termci/Package" ], - "https://hotecosystem.org/termci/system": [ + "http://www.w3.org/2004/02/skos/core#inScheme": [ { "@id": "http://purl.obolibrary.org/obo/" } ], "http://www.w3.org/2004/02/skos/core#notation": [ { - "@value": "C147796" + "@value": "C147557" } ], "http://www.w3.org/2004/02/skos/core#prefLabel": [ { - "@value": "TSCYC - Being Frightened of Men" - } - ], - "http://www.w3.org/2004/02/skos/core#seeAlso": [ - { - "@value": "http://purl.obolibrary.org/obo/NCI_C147796" + "@value": "TSCYC Questionnaire Question" } ] }, @@ -72,30 +72,18 @@ ] }, { - "@id": "http://purl.obolibrary.org/obo/NCI_C147557", - "http://www.w3.org/2004/02/skos/core#broader": [ - { - "@id": "http://purl.obolibrary.org/obo/NCI_C91102" - } - ], - "http://www.w3.org/2004/02/skos/core#definition": [ + "@id": "http://purl.obolibrary.org/obo/", + "http://www.w3.org/2004/02/skos/core#hasConcept": [ { - "@value": "A question associated with the TSCYC questionnaire." - } - ], - "http://www.w3.org/2004/02/skos/core#inScheme": [ + "@id": "http://purl.obolibrary.org/obo/NCI_C147796" + }, { - "@id": "http://purl.obolibrary.org/obo/" - } - ], - "http://www.w3.org/2004/02/skos/core#notation": [ - { - "@value": "C147557" + "@id": "http://purl.obolibrary.org/obo/NCI_C147557" } ], "http://www.w3.org/ns/shacl#prefix": [ { - "@value": "TSCYC Questionnaire Question" + "@value": "OBO" } ] } diff --git a/tests/test_loaders_dumpers/input/obo_sample_nested.ttl b/tests/test_loaders_dumpers/input/obo_sample_nested.ttl deleted file mode 100644 index ece56d36..00000000 --- a/tests/test_loaders_dumpers/input/obo_sample_nested.ttl +++ /dev/null @@ -1,23 +0,0 @@ -@prefix ns1: . -@prefix ns2: . -@prefix ns3: . - - ns2:broader ; - ns2:definition "Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often the child has done, felt, or experienced each of the following things in the last month: Being frightened of men." ; - ns2:inScheme ; - ns2:notation "C147796" ; - ns2:prefLabel "TSCYC - Being Frightened of Men" ; - ns2:seeAlso "http://purl.obolibrary.org/obo/NCI_C147796" . - - ns2:broader ; - ns2:definition "A question associated with the TSCYC questionnaire." ; - ns2:inScheme ; - ns2:notation "C147557" ; - ns2:prefLabel "TSCYC Questionnaire Question" . - - ns2:hasConcept , - ; - ns1:prefix "OBO" . - -[] ns3:system . - diff --git a/tests/test_loaders_dumpers/jsonld_context/Dockerfile b/tests/test_loaders_dumpers/jsonld_context/Dockerfile index d577c612..e69de29b 100644 --- a/tests/test_loaders_dumpers/jsonld_context/Dockerfile +++ b/tests/test_loaders_dumpers/jsonld_context/Dockerfile @@ -1,23 +0,0 @@ -FROM nginx - -LABEL maintainer="Harold Solbrig " -LABEL description="Local docker image for loader/dumper testing context" - -# Add the application/ld+json to the mime types setting -COPY nginx/mime.types /etc/nginx/mime.types - -# Include the various CORS settings and the like into the config file -COPY nginx/nginx.conf /etc/nginx/conf.d/default.conf - -# A set of non-signed certificates -COPY nginx/context_server.crt /etc/nginx/certs/context_server.crt -COPY nginx/context_server.key /etc/nginx/certs/context_server.key - -# Add vim to the server so we can edit via exec if so desired -RUN apt-get update -y && \ - apt-get install apt-file -y && \ - apt-file update && \ - apt-get install vim -y && \ - rm -rf /var/cache/apk/* - -EXPOSE 80 443 diff --git a/tests/test_loaders_dumpers/jsonld_context/ds.sh b/tests/test_loaders_dumpers/jsonld_context/ds.sh old mode 100755 new mode 100644 diff --git a/tests/test_loaders_dumpers/jsonld_context/jsonld_10/termci_schema.context.jsonld b/tests/test_loaders_dumpers/jsonld_context/jsonld_10/termci_schema.context.jsonld index 35b6655a..0108b5f5 100644 --- a/tests/test_loaders_dumpers/jsonld_context/jsonld_10/termci_schema.context.jsonld +++ b/tests/test_loaders_dumpers/jsonld_context/jsonld_10/termci_schema.context.jsonld @@ -1,7 +1,5 @@ { - "_comments": "Auto generated from termci_schema.yaml by jsonldcontextgen.py version: 0.1.1\nGeneration date: 2021-02-12 11:24\nSchema: termci_schema\n\nid: https://w3id.org/termci_schema\ndescription: Terminology Code Index model\nlicense: https://creativecommons.org/publicdomain/zero/1.0/\n", - "@context": { - "type": "@type", + "@context": { "linkml": "https://w3id.org/linkml/", "dc": "http://purl.org/dc/elements/1.1/", "sct": "http://snomed.info/id/", @@ -14,7 +12,8 @@ }, "uri": "@id", "contents": { - "@type": "@id" + "@type": "@id", + "@id": "skos:hasConcept" }, "defined_in": { "@type": "@id", @@ -35,7 +34,9 @@ "@id": "skos:broader" }, "system": { - "@type": "@id" + "@type": "@id", + "@id": "termci:system", + "@container": "@set" }, "prefix": { "@id": "sh:prefix" @@ -54,6 +55,7 @@ "ConceptSystem": { "@id": "skos:ConceptScheme" } - } + }, + "@type": "Package" } diff --git a/tests/test_loaders_dumpers/jsonld_context/jsonld_11/Package.context.jsonld b/tests/test_loaders_dumpers/jsonld_context/jsonld_11/Package.context.jsonld index 0032fe9c..e69de29b 100644 --- a/tests/test_loaders_dumpers/jsonld_context/jsonld_11/Package.context.jsonld +++ b/tests/test_loaders_dumpers/jsonld_context/jsonld_11/Package.context.jsonld @@ -1,28 +0,0 @@ -{ - "@context": [ - "termci_namespaces.context.jsonld", - { - "@vocab": "https://hotecosystem.org/termci/", - "type": "@type", - "system": { - "@type": "@id", - "@id": "termci:system", - "@container": "@set", - "@context": "ConceptSystem.context.jsonld" - } - } - ], - "@omitGraph": true, - "@type": "termci:Package", - "system": { - "@embed": "@always", - "contents": { - "defined_in": { - "@embed": "@never" - }, - "narrower_than": { - "@embed": "@never" - } - } - } -} diff --git a/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.crt b/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.crt index 29a7ac66..e69de29b 100644 --- a/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.crt +++ b/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.crt @@ -1,18 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIC5jCCAc4CCQD+EPKogWb9XjANBgkqhkiG9w0BAQsFADA1MQswCQYDVQQGEwJV -UzESMBAGA1UECAwJTWlubmVzb3RhMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMjEw -MjEwMjMzMjIzWhcNMjIwMjEwMjMzMjIzWjA1MQswCQYDVQQGEwJVUzESMBAGA1UE -CAwJTWlubmVzb3RhMRIwEAYDVQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDQ0u6ntViMr0p2gZDQqWkIcTu5bgCxeCFaX0RxfQ2O -v5MBgnrYDjyHL2frcahTa6XN9F25qfGlRnTkBdS7TPQKAP7m4IlFkHDa7ylfdLyi -X+J4nqeSZNqLANwl9wsXtTqeutrwrFFQPyldOlIol6PS6r51KHU0R4rmqnEeZ3LT -+OVclJTCOs7TLorcV24RKFdmXjcQDvNNljvahh5Lag/j3TFOiY/ky0qgnQ0BKj2a -UgppQo60Qanolt9uJKWKGuHXrRorsK8KxxsPFZPz6BFhZYVrffEny0rPjWdwMeO/ -SrlZooX0dLag9abKVyWy70JU+YkxFipL00DBzsBDOVVjAgMBAAEwDQYJKoZIhvcN -AQELBQADggEBAMb5ENzB3hNbpJIiRl5zvLprqq7vE1jJdu/xHTyJZoTC3QjEmOZd -qNy1yI23tx0Mb7/MrFgMVDZpOc8gXGjJzlg5x4Q3DeFYNxd2n6B5+H6xzNyk4ocw -/8ifmI3dtsv98LisvYvLAI237t/Pp6n0IjNoEkUbhh8qQe9Ua5gCM9S+9AFcNU9z -itlzm3GTF94H5ImRmPgeNfODikKD/ujkJh6djadkYVbttbXeLNpTPbmPNtWDQZsz -SLwMTEGm0HEvlskN5Zhqgaxd3+Y2/+VSyQEb75H+17Lr4UBGz2i7pjrZRT2uSH+r -ZqOf2K8DSxlNcbmfiGyeGviByawq8tuFtGQ= ------END CERTIFICATE----- diff --git a/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.key b/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.key index edcf4dde..e69de29b 100644 --- a/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.key +++ b/tests/test_loaders_dumpers/jsonld_context/nginx/context_server.key @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDQ0u6ntViMr0p2 -gZDQqWkIcTu5bgCxeCFaX0RxfQ2Ov5MBgnrYDjyHL2frcahTa6XN9F25qfGlRnTk -BdS7TPQKAP7m4IlFkHDa7ylfdLyiX+J4nqeSZNqLANwl9wsXtTqeutrwrFFQPyld -OlIol6PS6r51KHU0R4rmqnEeZ3LT+OVclJTCOs7TLorcV24RKFdmXjcQDvNNljva -hh5Lag/j3TFOiY/ky0qgnQ0BKj2aUgppQo60Qanolt9uJKWKGuHXrRorsK8KxxsP -FZPz6BFhZYVrffEny0rPjWdwMeO/SrlZooX0dLag9abKVyWy70JU+YkxFipL00DB -zsBDOVVjAgMBAAECggEBAMRaQiuRuhR20qaBh+CcGvXkOwtNovs8gOgQAjRQKMpb -2mel/8peAI4Ahk99jI/7/Wid6O8mG+CTUmACF5iScDlj3ErNQGYXvfYuwZFPG/v1 -+uX+EIT3rgLcKSTNmTGOp/MOscVx0feJnGRb7Vrb0yAEuFdDVol05+eixkCXdwJW -KGOPUkfDRxfY1Sufo6TwHEpcnrvjYOkgZAwVpCsME2+7bQUHyxMvdnmWBlA3p5kZ -yXSDCWGCD6R1u+yua1VYWmj9C/AK2xFOxxAQ1J8Aw3EZPVShw7ZufMDG3cqylJqg -tpUopvOcj1v/F7Mq/YdBTdUgiqmPtMujuCSzcsD7B1kCgYEA8Xav79tEqB0huXGR -B99dFILTcwPQmT95QcxGI1xrGbx9R0qbsH4gba9GOjLzYFUNO2Z86RfDfXE2fb9+ -lDTtN4ZTnhHVlApLM/IqUtaPxW0sQ/1MbOidrZzvxzOSrhQSX/IRi9pIflWL2KaN -3dq1RZpEhJLnHH7Sb0FRB8u1S88CgYEA3WU4IjZBoIO1anFW7hqdMpGdmMl83c1i -RylZg/5JcR0F0i0WKAn4cAVoDk50DK4uAN+iDJouZe9EkBV9rwsKy5E7fKc04RlC -xotA1pdHtf4uzTXhr67qVaKAL62UamCsleIfAwQpQgz7D7S21jtk81fN5iMulT71 -HMrmfF1rXi0CgYBBEef1rlRBLaCR9d3Mjk/qIBtValjDg4xsw9y+2L/v7pCgLqXo -GQUqKcq2Elbtxd3L/SF6OzIuUt0JZBp7XAv1I5jTByFURLXhaMgpDLMv0+zS4aa/ -zqv3RkyDlQ1ZJgWRn41E7+0LRohqOPAQMkkZWapPQ7tOPYr9+k8Y17eilwKBgQC7 -di1lqpM/GsbN7lIxIo/gdbP/3vu/w1xn7PdC8Fm3UV/QayTLmkWmdK17D6rr4cBK -NRG9of+PiF2BsvDM+oiVfc/k6J+5ye3kt5ybF1U9IJD4aZtDatQ41nO76zo0KM95 -yhGAOFHG/77FubrnAzujYMGTEE8tSDW7v4IR4/g5sQKBgQDpSq60WK42/jRF+IyC -fxzZjXgH5v7/izUh2EIf4CnvplaQ7f8v3ztq+iatBQbNH2mryDBsUP/7sJM9oYi1 -AwRBZbNFM0IE4rzwaxq0YcjyGqV5aQC5hwHK2DZj9UhDQPCVzHIxlKJ8KEqi81Rd -m9AicmxZ/tpB6F7W/a9es07eGg== ------END PRIVATE KEY----- diff --git a/tests/test_loaders_dumpers/jsonld_context/nginx/localhost.cnf b/tests/test_loaders_dumpers/jsonld_context/nginx/localhost.cnf index 57d711fd..e69de29b 100644 --- a/tests/test_loaders_dumpers/jsonld_context/nginx/localhost.cnf +++ b/tests/test_loaders_dumpers/jsonld_context/nginx/localhost.cnf @@ -1,8 +0,0 @@ -[req] -distinguished_name = context_server -prompt = no - -[context_server] -C = US -ST = Minnesota -CN = localhost \ No newline at end of file diff --git a/tests/test_loaders_dumpers/jsonld_context/nginx/mime.types b/tests/test_loaders_dumpers/jsonld_context/nginx/mime.types index 8d512519..e69de29b 100644 --- a/tests/test_loaders_dumpers/jsonld_context/nginx/mime.types +++ b/tests/test_loaders_dumpers/jsonld_context/nginx/mime.types @@ -1,98 +0,0 @@ - -types { - text/html html htm shtml; - text/css css; - text/xml xml; - image/gif gif; - image/jpeg jpeg jpg; - application/javascript js; - application/atom+xml atom; - application/rss+xml rss; - - text/mathml mml; - text/plain txt; - text/vnd.sun.j2me.app-descriptor jad; - text/vnd.wap.wml wml; - text/x-component htc; - - image/png png; - image/svg+xml svg svgz; - image/tiff tif tiff; - image/vnd.wap.wbmp wbmp; - image/webp webp; - image/x-icon ico; - image/x-jng jng; - image/x-ms-bmp bmp; - - font/woff woff; - font/woff2 woff2; - - application/java-archive jar war ear; - application/json json; - application/ld+json jsonld; - application/mac-binhex40 hqx; - application/msword doc; - application/pdf pdf; - application/postscript ps eps ai; - application/rtf rtf; - application/vnd.apple.mpegurl m3u8; - application/vnd.google-earth.kml+xml kml; - application/vnd.google-earth.kmz kmz; - application/vnd.ms-excel xls; - application/vnd.ms-fontobject eot; - application/vnd.ms-powerpoint ppt; - application/vnd.oasis.opendocument.graphics odg; - application/vnd.oasis.opendocument.presentation odp; - application/vnd.oasis.opendocument.spreadsheet ods; - application/vnd.oasis.opendocument.text odt; - application/vnd.openxmlformats-officedocument.presentationml.presentation - pptx; - application/vnd.openxmlformats-officedocument.spreadsheetml.sheet - xlsx; - application/vnd.openxmlformats-officedocument.wordprocessingml.document - docx; - application/vnd.wap.wmlc wmlc; - application/x-7z-compressed 7z; - application/x-cocoa cco; - application/x-java-archive-diff jardiff; - application/x-java-jnlp-file jnlp; - application/x-makeself run; - application/x-perl pl pm; - application/x-pilot prc pdb; - application/x-rar-compressed rar; - application/x-redhat-package-manager rpm; - application/x-sea sea; - application/x-shockwave-flash swf; - application/x-stuffit sit; - application/x-tcl tcl tk; - application/x-x509-ca-cert der pem crt; - application/x-xpinstall xpi; - application/xhtml+xml xhtml; - application/xspf+xml xspf; - application/zip zip; - - application/octet-stream bin exe dll; - application/octet-stream deb; - application/octet-stream dmg; - application/octet-stream iso img; - application/octet-stream msi msp msm; - - audio/midi mid midi kar; - audio/mpeg mp3; - audio/ogg ogg; - audio/x-m4a m4a; - audio/x-realaudio ra; - - video/3gpp 3gpp 3gp; - video/mp2t ts; - video/mp4 mp4; - video/mpeg mpeg mpg; - video/quicktime mov; - video/webm webm; - video/x-flv flv; - video/x-m4v m4v; - video/x-mng mng; - video/x-ms-asf asx asf; - video/x-ms-wmv wmv; - video/x-msvideo avi; -} diff --git a/tests/test_loaders_dumpers/jsonld_context/nginx/nginx.conf b/tests/test_loaders_dumpers/jsonld_context/nginx/nginx.conf index cd0a57bd..e69de29b 100644 --- a/tests/test_loaders_dumpers/jsonld_context/nginx/nginx.conf +++ b/tests/test_loaders_dumpers/jsonld_context/nginx/nginx.conf @@ -1,26 +0,0 @@ -server { - listen 80; - listen 443 ssl; - ssl_certificate /etc/nginx/certs/context_server.crt; - ssl_certificate_key /etc/nginx/certs/context_server.key; - listen [::]:443 ssl; - server_name localhost; - - #access_log /var/log/nginx/host.access.log main; - - location / { - root /usr/share/nginx/html; - try_files $uri.jsonld $uri $uri/ = 404; - autoindex on; - - - if ($request_method = 'GET') { - add_header 'Access-Control-Expose-Headers' 'Content-Length, Content-Range'; - add_header 'Access-Control-Allow-Origin' '*'; - add_header 'Access-Control-Allow-Credentials' 'true'; - add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; - add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type'; - } - } -} - diff --git a/tests/test_loaders_dumpers/ldtestcase.py b/tests/test_loaders_dumpers/loaderdumpertestcase.py similarity index 81% rename from tests/test_loaders_dumpers/ldtestcase.py rename to tests/test_loaders_dumpers/loaderdumpertestcase.py index 3b9cf0d5..f64170ef 100644 --- a/tests/test_loaders_dumpers/ldtestcase.py +++ b/tests/test_loaders_dumpers/loaderdumpertestcase.py @@ -1,16 +1,17 @@ import os -import urllib -from typing import Callable, Type, Union, TextIO, Optional, List +from typing import Callable, Type, Optional, List +from urllib.parse import urlparse from hbreader import FileInfo, hbread +import tests.environment as test_base from linkml_runtime.dumpers import yaml_dumper +from linkml_runtime.loaders.loader_root import Loader from linkml_runtime.utils.yamlutils import YAMLRoot from tests.support.test_environment import TestEnvironment, TestEnvironmentTestCase -import tests.environment as test_base -class LDTestCase(TestEnvironmentTestCase): +class LoaderDumperTestCase(TestEnvironmentTestCase): env = TestEnvironment(__file__) def dump_test(self, filename: str, dumper: Callable[[str], None], comparator: Callable[[str], str] = None)\ @@ -23,7 +24,7 @@ def dump_test(self, filename: str, dumper: Callable[[str], None], comparator: C :returns: Success indicator """ actual_file = self.env.actual_path(filename) - expected_file = self.env.expected_path(filename.replace('.', '_d.')) + expected_file = self.env.expected_path('dump', filename) dumper(actual_file) @@ -39,11 +40,11 @@ def dumps_test(self, filename: str, dumper: Callable[[], str], comparator: Calla :param comparator: content comparator """ actual = dumper() - expected_file = self.env.expected_path(filename.replace('.', '_ds.')) + expected_file = self.env.expected_path('dumps', filename) return self.env.eval_single_file(expected_file, actual, comparator=comparator) - def loader_test(self, filename: str, model: Type[YAMLRoot], loader) -> None: + def loader_test(self, filename: str, model: Type[YAMLRoot], loader: Loader) -> None: """ Test the various permutations of the supplied loader using the input file 'filename' -- both load and loads @@ -53,14 +54,15 @@ def loader_test(self, filename: str, model: Type[YAMLRoot], loader) -> None: """ metadata = FileInfo() name, typ = filename.rsplit('.', 1) - expected_yaml = self.env.expected_path(name + '_' + typ + ".yaml") - python_obj: YAMLRoot = loader.load(filename, self.env.indir, model, metadata) + expected_yaml = self.env.expected_path('load', name + '_' + typ + ".yaml") + python_obj: YAMLRoot = loader.load(filename, model, metadata=metadata, base_dir=self.env.indir) self.env.eval_single_file(expected_yaml, yaml_dumper.dumps(python_obj)) # Make sure metadata gets filled out properly rel_path = os.path.abspath(os.path.join(test_base.env.cwd, '..')) self.assertEqual('tests/test_loaders_dumpers/input', os.path.relpath(metadata.base_path, rel_path)) - self.assertEqual(f'tests/test_loaders_dumpers/input/{filename}', os.path.relpath(metadata.source_file, rel_path)) + self.assertEqual(f'tests/test_loaders_dumpers/input/{filename}', os.path.relpath(metadata.source_file, + rel_path)) fileinfo = FileInfo() hbread(filename, fileinfo, self.env.indir) @@ -68,7 +70,7 @@ def loader_test(self, filename: str, model: Type[YAMLRoot], loader) -> None: # Load from a string expected = hbread(filename, base_path=self.env.indir) - python_obj: YAMLRoot = loader.loads(expected, model, metadata.clear()) + python_obj: YAMLRoot = loader.loads(expected, model, metadata=metadata.clear()) self.env.eval_single_file(expected_yaml, yaml_dumper.dumps(python_obj)) @staticmethod @@ -80,7 +82,7 @@ def check_context_servers(possible_server: List[str]) -> Optional[str]: :return: Particular server to use """ def is_listening(svr: str) -> bool: - components = urllib.parse.urlparse(svr) + components = urlparse(svr) import socket with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: return s.connect_ex((components.hostname, components.port)) == 0 diff --git a/tests/test_loaders_dumpers/models/termci_schema.py b/tests/test_loaders_dumpers/models/termci_schema.py index 9a00a2c2..389a753b 100644 --- a/tests/test_loaders_dumpers/models/termci_schema.py +++ b/tests/test_loaders_dumpers/models/termci_schema.py @@ -1,5 +1,5 @@ # Auto generated from termci_schema.yaml by pythongen.py version: 0.9.0 -# Generation date: 2021-02-12 11:22 +# Generation date: 2021-04-22 12:19 # Schema: termci_schema # # id: https://w3id.org/termci_schema @@ -11,21 +11,17 @@ import re from typing import Optional, List, Union, Dict, ClassVar, Any from dataclasses import dataclass -from linkml_model.meta import EnumDefinition, PermissibleValue, PvFormulaOptions from linkml_runtime.utils.slot import Slot from linkml_runtime.utils.metamodelcore import empty_list, empty_dict, bnode from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str, extended_float, extended_int -if sys.version_info < (3, 7, 6): - from linkml_runtime.utils.dataclass_extensions_375 import dataclasses_init_fn_with_kwargs -else: - from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs +from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs from linkml_runtime.utils.formatutils import camelcase, underscore, sfx from linkml_runtime.utils.enumerations import EnumDefinitionImpl from rdflib import Namespace, URIRef from linkml_runtime.utils.curienamespace import CurieNamespace from linkml_runtime.utils.metamodelcore import URI, URIorCURIE -from linkml_model.types import String, Uri, Uriorcurie +from linkml_runtime.linkml_model.types import String, Uri, Uriorcurie metamodel_version = "1.7.0" @@ -33,8 +29,8 @@ dataclasses._init_fn = dataclasses_init_fn_with_kwargs # Namespaces -BIOLINKML = CurieNamespace('linkml', 'https://w3id.org/linkml/') DC = CurieNamespace('dc', 'http://purl.org/dc/elements/1.1/') +LINKML = CurieNamespace('linkml', 'https://w3id.org/linkml/') SCT = CurieNamespace('sct', 'http://snomed.info/id/') SH = CurieNamespace('sh', 'http://www.w3.org/ns/shacl#') SKOS = CurieNamespace('skos', 'http://www.w3.org/2004/02/skos/core#') @@ -157,7 +153,7 @@ def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]): if self.contents is None: self.contents = [] - if not isinstance(self.contents, (list)): + if not isinstance(self.contents, (list, dict)): self.contents = [self.contents] self._normalize_inlined_slot(slot_name="contents", slot_type=ConceptReference, key_name="uri", inlined_as_list=True, keyed=True) @@ -181,7 +177,7 @@ class Package(YAMLRoot): def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]): if self.system is None: self.system = [] - if not isinstance(self.system, (list)): + if not isinstance(self.system, (list, dict)): self.system = [self.system] self._normalize_inlined_slot(slot_name="system", slot_type=ConceptSystem, key_name="namespace", inlined_as_list=True, keyed=True) @@ -192,44 +188,4 @@ def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]): # Slots -class slots: - pass - -slots.code = Slot(uri=SKOS.notation, name="code", curie=SKOS.curie('notation'), - model_uri=TERMCI.code, domain=None, range=str) - -slots.designation = Slot(uri=SKOS.prefLabel, name="designation", curie=SKOS.curie('prefLabel'), - model_uri=TERMCI.designation, domain=None, range=Optional[str]) - -slots.definition = Slot(uri=SKOS.definition, name="definition", curie=SKOS.curie('definition'), - model_uri=TERMCI.definition, domain=None, range=Optional[str]) - -slots.reference = Slot(uri=SKOS.seeAlso, name="reference", curie=SKOS.curie('seeAlso'), - model_uri=TERMCI.reference, domain=None, range=Optional[Union[Union[str, URI], List[Union[str, URI]]]]) - -slots.defined_in = Slot(uri=SKOS.inScheme, name="defined_in", curie=SKOS.curie('inScheme'), - model_uri=TERMCI.defined_in, domain=None, range=Union[str, ConceptSystemNamespace]) - -slots.narrower_than = Slot(uri=SKOS.broader, name="narrower_than", curie=SKOS.curie('broader'), - model_uri=TERMCI.narrower_than, domain=None, range=Optional[Union[Union[str, ConceptReferenceUri], List[Union[str, ConceptReferenceUri]]]]) - -slots.prefix = Slot(uri=SH.prefix, name="prefix", curie=SH.curie('prefix'), - model_uri=TERMCI.prefix, domain=None, range=str) - -slots.namespace = Slot(uri=SH.namespace, name="namespace", curie=SH.curie('namespace'), - model_uri=TERMCI.namespace, domain=None, range=URIRef) - -slots.root_concept = Slot(uri=SKOS.hasTopConcept, name="root_concept", curie=SKOS.curie('hasTopConcept'), - model_uri=TERMCI.root_concept, domain=None, range=Optional[Union[Union[str, ConceptReferenceUri], List[Union[str, ConceptReferenceUri]]]]) - -slots.description = Slot(uri=DC.description, name="description", curie=DC.curie('description'), - model_uri=TERMCI.description, domain=None, range=Optional[str]) - -slots.concept_uri = Slot(uri=TERMCI.uri, name="concept_uri", curie=TERMCI.curie('uri'), - model_uri=TERMCI.concept_uri, domain=None, range=URIRef) - -slots.contents = Slot(uri=TERMCI.contents, name="contents", curie=TERMCI.curie('contents'), - model_uri=TERMCI.contents, domain=None, range=Optional[Union[Dict[Union[str, ConceptReferenceUri], Union[dict, ConceptReference]], List[Union[dict, ConceptReference]]]]) -slots.package__system = Slot(uri=TERMCI.system, name="package__system", curie=TERMCI.curie('system'), - model_uri=TERMCI.package__system, domain=None, range=Optional[Union[Dict[Union[str, ConceptSystemNamespace], Union[dict, ConceptSystem]], List[Union[dict, ConceptSystem]]]]) diff --git a/tests/test_loaders_dumpers/output/generated b/tests/test_loaders_dumpers/output/generated deleted file mode 100644 index f79ee716..00000000 --- a/tests/test_loaders_dumpers/output/generated +++ /dev/null @@ -1 +0,0 @@ -Generated for safety. Directory will not be cleared if this file is not present \ No newline at end of file diff --git a/tests/test_loaders_dumpers/output/obo_sample_context_d.json b/tests/test_loaders_dumpers/output/obo_sample_context_d.json deleted file mode 100644 index 95d6cf2d..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_context_d.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "system": [ - { - "namespace": "http://purl.obolibrary.org/obo/", - "prefix": "OBO", - "contents": [ - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147796", - "code": "C147796", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC - Being Frightened of Men", - "definition": "Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often the child has done, felt, or experienced each of the following things in the last month: Being frightened of men.", - "reference": [ - "http://purl.obolibrary.org/obo/NCI_C147796" - ], - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C147557" - ] - }, - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147557", - "code": "C147557", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC Questionnaire Question", - "definition": "A question associated with the TSCYC questionnaire.", - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C91102" - ] - } - ] - } - ], - "@type": "Package", - "@context": {} -} \ No newline at end of file diff --git a/tests/test_loaders_dumpers/output/obo_sample_context_ds.json b/tests/test_loaders_dumpers/output/obo_sample_context_ds.json deleted file mode 100644 index 95d6cf2d..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_context_ds.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "system": [ - { - "namespace": "http://purl.obolibrary.org/obo/", - "prefix": "OBO", - "contents": [ - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147796", - "code": "C147796", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC - Being Frightened of Men", - "definition": "Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often the child has done, felt, or experienced each of the following things in the last month: Being frightened of men.", - "reference": [ - "http://purl.obolibrary.org/obo/NCI_C147796" - ], - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C147557" - ] - }, - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147557", - "code": "C147557", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC Questionnaire Question", - "definition": "A question associated with the TSCYC questionnaire.", - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C91102" - ] - } - ] - } - ], - "@type": "Package", - "@context": {} -} \ No newline at end of file diff --git a/tests/test_loaders_dumpers/output/obo_sample_d.json b/tests/test_loaders_dumpers/output/obo_sample_d.json deleted file mode 100644 index 8d59db9a..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_d.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "system": [ - { - "namespace": "http://purl.obolibrary.org/obo/", - "prefix": "OBO", - "contents": [ - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147796", - "code": "C147796", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC - Being Frightened of Men", - "definition": "Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often the child has done, felt, or experienced each of the following things in the last month: Being frightened of men.", - "reference": [ - "http://purl.obolibrary.org/obo/NCI_C147796" - ], - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C147557" - ] - }, - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147557", - "code": "C147557", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC Questionnaire Question", - "definition": "A question associated with the TSCYC questionnaire.", - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C91102" - ] - } - ] - } - ], - "@type": "Package" -} \ No newline at end of file diff --git a/tests/test_loaders_dumpers/output/obo_sample_d.yaml b/tests/test_loaders_dumpers/output/obo_sample_d.yaml deleted file mode 100644 index c4f8a162..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_d.yaml +++ /dev/null @@ -1,22 +0,0 @@ -system: -- namespace: http://purl.obolibrary.org/obo/ - prefix: OBO - contents: - - uri: http://purl.obolibrary.org/obo/NCI_C147796 - code: C147796 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC - Being Frightened of Men - definition: 'Trauma Symptom Checklist for Young Children (TSCYC) Please indicate - how often the child has done, felt, or experienced each of the following things - in the last month: Being frightened of men.' - reference: - - http://purl.obolibrary.org/obo/NCI_C147796 - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C147557 - - uri: http://purl.obolibrary.org/obo/NCI_C147557 - code: C147557 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC Questionnaire Question - definition: A question associated with the TSCYC questionnaire. - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C91102 diff --git a/tests/test_loaders_dumpers/output/obo_sample_ds.json b/tests/test_loaders_dumpers/output/obo_sample_ds.json deleted file mode 100644 index 8d59db9a..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_ds.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "system": [ - { - "namespace": "http://purl.obolibrary.org/obo/", - "prefix": "OBO", - "contents": [ - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147796", - "code": "C147796", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC - Being Frightened of Men", - "definition": "Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often the child has done, felt, or experienced each of the following things in the last month: Being frightened of men.", - "reference": [ - "http://purl.obolibrary.org/obo/NCI_C147796" - ], - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C147557" - ] - }, - { - "uri": "http://purl.obolibrary.org/obo/NCI_C147557", - "code": "C147557", - "defined_in": "http://purl.obolibrary.org/obo/", - "designation": "TSCYC Questionnaire Question", - "definition": "A question associated with the TSCYC questionnaire.", - "narrower_than": [ - "http://purl.obolibrary.org/obo/NCI_C91102" - ] - } - ] - } - ], - "@type": "Package" -} \ No newline at end of file diff --git a/tests/test_loaders_dumpers/output/obo_sample_ds.yaml b/tests/test_loaders_dumpers/output/obo_sample_ds.yaml deleted file mode 100644 index c4f8a162..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_ds.yaml +++ /dev/null @@ -1,22 +0,0 @@ -system: -- namespace: http://purl.obolibrary.org/obo/ - prefix: OBO - contents: - - uri: http://purl.obolibrary.org/obo/NCI_C147796 - code: C147796 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC - Being Frightened of Men - definition: 'Trauma Symptom Checklist for Young Children (TSCYC) Please indicate - how often the child has done, felt, or experienced each of the following things - in the last month: Being frightened of men.' - reference: - - http://purl.obolibrary.org/obo/NCI_C147796 - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C147557 - - uri: http://purl.obolibrary.org/obo/NCI_C147557 - code: C147557 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC Questionnaire Question - definition: A question associated with the TSCYC questionnaire. - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C91102 diff --git a/tests/test_loaders_dumpers/output/obo_sample_json.yaml b/tests/test_loaders_dumpers/output/obo_sample_json.yaml deleted file mode 100644 index c4f8a162..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_json.yaml +++ /dev/null @@ -1,22 +0,0 @@ -system: -- namespace: http://purl.obolibrary.org/obo/ - prefix: OBO - contents: - - uri: http://purl.obolibrary.org/obo/NCI_C147796 - code: C147796 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC - Being Frightened of Men - definition: 'Trauma Symptom Checklist for Young Children (TSCYC) Please indicate - how often the child has done, felt, or experienced each of the following things - in the last month: Being frightened of men.' - reference: - - http://purl.obolibrary.org/obo/NCI_C147796 - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C147557 - - uri: http://purl.obolibrary.org/obo/NCI_C147557 - code: C147557 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC Questionnaire Question - definition: A question associated with the TSCYC questionnaire. - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C91102 diff --git a/tests/test_loaders_dumpers/output/obo_sample_yaml.yaml b/tests/test_loaders_dumpers/output/obo_sample_yaml.yaml deleted file mode 100644 index c4f8a162..00000000 --- a/tests/test_loaders_dumpers/output/obo_sample_yaml.yaml +++ /dev/null @@ -1,22 +0,0 @@ -system: -- namespace: http://purl.obolibrary.org/obo/ - prefix: OBO - contents: - - uri: http://purl.obolibrary.org/obo/NCI_C147796 - code: C147796 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC - Being Frightened of Men - definition: 'Trauma Symptom Checklist for Young Children (TSCYC) Please indicate - how often the child has done, felt, or experienced each of the following things - in the last month: Being frightened of men.' - reference: - - http://purl.obolibrary.org/obo/NCI_C147796 - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C147557 - - uri: http://purl.obolibrary.org/obo/NCI_C147557 - code: C147557 - defined_in: http://purl.obolibrary.org/obo/ - designation: TSCYC Questionnaire Question - definition: A question associated with the TSCYC questionnaire. - narrower_than: - - http://purl.obolibrary.org/obo/NCI_C91102 diff --git a/tests/test_loaders_dumpers/test_dumpers.py b/tests/test_loaders_dumpers/test_dumpers.py index 7ac11de0..01bc3a91 100644 --- a/tests/test_loaders_dumpers/test_dumpers.py +++ b/tests/test_loaders_dumpers/test_dumpers.py @@ -5,9 +5,10 @@ from rdflib import Namespace, SKOS, Literal from linkml_runtime.dumpers import yaml_dumper, json_dumper, rdf_dumper +from linkml_runtime.utils.yamlutils import as_json_object from tests.test_loaders_dumpers import LD_11_DIR, LD_11_SSL_SVR, LD_11_SVR, HTTP_TEST_PORT, HTTPS_TEST_PORT, \ GITHUB_LD10_CONTEXT, GITHUB_LD11_CONTEXT -from tests.test_loaders_dumpers.ldtestcase import LDTestCase +from tests.test_loaders_dumpers.loaderdumpertestcase import LoaderDumperTestCase from tests.test_loaders_dumpers.models.termci_schema import ConceptReference, ConceptSystem, Package from tests.support.clicktestcase import ClickTestCase @@ -15,12 +16,13 @@ NCIT = Namespace("http://purl.obolibrary.org/obo/NCI_") -class DumpersTestCase(LDTestCase): +class DumpersTestCase(LoaderDumperTestCase): + pass @classmethod def setUpClass(cls) -> None: """ Generate a small sample TermCI instance for testing purposes """ - LDTestCase.setUpClass() + LoaderDumperTestCase.setUpClass() e1 = ConceptReference(OBO.NCI_C147796, code="C147796", defined_in=OBO, designation="TSCYC - Being Frightened of Men", definition="Trauma Symptom Checklist for Young Children (TSCYC) Please indicate how often" @@ -45,7 +47,7 @@ def test_json_dumper(self): # TODO: Same as test_yaml_dumper self.dump_test('obo_sample.json', lambda out_fname: json_dumper.dump(self.test_package, out_fname)) - obo_json_obj = cast(Package, json_dumper.as_json_object(self.test_package)) + obo_json_obj = cast(Package, as_json_object(self.test_package)) self.assertEqual(OBO, obo_json_obj.system[0].namespace) self.assertEqual('C147796', obo_json_obj.system[0].contents[0].code) @@ -57,7 +59,7 @@ def test_json_dumper(self): lambda: json_dumper.dumps(self.test_package, GITHUB_LD11_CONTEXT + 'termci_schema_inlined.context.jsonld')) - @unittest.skip("This needs an enhanced (https://github.com/hsolbrig/pyld) version of pyld") + @unittest.skipIf(False, "This needs an enhanced (https://github.com/hsolbrig/pyld) version of pyld") def test_rdf_dumper(self): """ Test the rdf dumper """ contexts = os.path.join(LD_11_DIR, 'termci_schema_inlined.context.jsonld') @@ -74,10 +76,9 @@ def test_rdf_dumper(self): # Build a vanilla jsonld image for subsequent testing fname = 'obo_sample.jsonld' - dumped_fname = 'obo_sample_d.jsonld' # Dump_test appends the "_d" self.dump_test(fname, lambda out_file: rdf_dumper.dump(self.test_package, out_file, contexts, fmt='json-ld'), comparator=lambda e, a: ClickTestCase.rdf_comparator(e, a, fmt='json-ld')) - with open(self.env.expected_path(dumped_fname)) as f: + with open(self.env.expected_path('dump', fname)) as f: txt = f.read() with open(self.env.input_path('obo_sample.jsonld'), 'w') as f: f.write(txt) diff --git a/tests/test_loaders_dumpers/test_ld_11_issue.py b/tests/test_loaders_dumpers/test_ld_11_issue.py index 16008253..4bffd304 100644 --- a/tests/test_loaders_dumpers/test_ld_11_issue.py +++ b/tests/test_loaders_dumpers/test_ld_11_issue.py @@ -85,7 +85,7 @@ def strip_cruft(entry: Dict) -> Dict: return json.dumps(json_obj, indent=' ') - @unittest.skip("uri will load as namespace until JSONLD 1.1 is working") + @unittest.skipIf(True, "uri will load as namespace until JSONLD 1.1 is working") def test_rdf_frame(self): options = dict(expandContext=context, base=str(TERMCI)) diff --git a/tests/test_loaders_dumpers/test_loaders.py b/tests/test_loaders_dumpers/test_loaders.py index 65783d82..b9a1c1e0 100644 --- a/tests/test_loaders_dumpers/test_loaders.py +++ b/tests/test_loaders_dumpers/test_loaders.py @@ -1,48 +1,58 @@ import os import unittest +from typing import Union, TextIO, Type, Optional -from linkml_runtime.loaders import yaml_loader, json_loader, rdf_loader -from tests.test_loaders_dumpers import LD_10_SVR, LD_10_SSL_SVR, LD_10_DIR +from hbreader import FileInfo + +from linkml_runtime.loaders import yaml_loader, json_loader, rdf_loader, RDFLoader +from linkml_runtime.utils.yamlutils import YAMLRoot +from tests.test_loaders_dumpers import LD_11_SVR, LD_11_SSL_SVR, LD_11_DIR from tests.test_loaders_dumpers.environment import env -from tests.test_loaders_dumpers.ldtestcase import LDTestCase +from tests.test_loaders_dumpers.loaderdumpertestcase import LoaderDumperTestCase from tests.test_loaders_dumpers.models.termci_schema import Package -class LoadersUnitTest(LDTestCase): +class LoadersUnitTest(LoaderDumperTestCase): env = env @classmethod def setUpClass(cls) -> None: - cls.context_server = cls.check_context_servers([LD_10_SVR, LD_10_SSL_SVR]) + cls.context_server = cls.check_context_servers([LD_11_SVR, LD_11_SSL_SVR]) if not cls.context_server: - cls.context_server = LD_10_DIR + cls.context_server = LD_11_DIR def test_yaml_loader(self): + """ Load obo_sample.yaml, emit obo_sample_yaml.yaml and compare to obo_sample_output.yaml """ self.loader_test('obo_sample.yaml', Package, yaml_loader) def test_json_loader(self): + """ Load obo_sample.json, emit obo_sample_json.yaml and check the results """ self.loader_test('obo_sample.json', Package, json_loader) - @unittest.skip("This needs an enhanced (https://github.com/hsolbrig/pyld) version of pyld") + @unittest.skipIf(True, "This test will not work until https://github.com/digitalbazaar/pyld/issues/149 is fixed") def test_rdf_loader(self): - if self.context_server == LD_10_DIR: + """ Load obo_sample.ttl, emit obo_sample_ttl.yaml and check the results + Load obo_sample.jsonld, emit obo_sample_jsonld.yaml and check the results + """ + if self.context_server == LD_11_DIR: raise unittest.SkipTest("*****> Loading skipped until JSON-LD processor can handle non-http files") - contexts = os.path.join(self.context_server, 'termci_schema.context.jsonld') + contexts = os.path.join(self.context_server, 'termci_schema_inlined.context.jsonld') fmt = 'turtle' - class loader_wrapper: - @staticmethod - def load(source, base, target, metadata): - return rdf_loader.load(source, base, target, contexts, fmt, metadata) + class RDFLoaderWrapper(RDFLoader): + def load(self, source: Union[str, dict, TextIO], target_class: Type[YAMLRoot], *, + base_dir: Optional[str] = None, metadata: Optional[FileInfo] = None, **_) -> YAMLRoot: + return rdf_loader.load(source, target_class, base_dir=LoadersUnitTest.env.indir, fmt=fmt, + metadata=metadata, contexts=contexts) - @staticmethod - def loads(source, target, metadata): - return rdf_loader.loads(source, target, contexts, fmt, metadata) + def loads(self, source: str, target_class: Type[YAMLRoot], *, metadata: Optional[FileInfo] = None, **_) \ + -> YAMLRoot: + return rdf_loader.loads(source, target_class, contexts=contexts, fmt=fmt, metadata=metadata) - self.loader_test('obo_sample.ttl', Package, loader_wrapper) + self.loader_test('obo_sample.ttl', Package, RDFLoaderWrapper()) fmt = 'json-ld' - self.loader_test('obo_sample.jsonld', Package, loader_wrapper) + self.loader_test('obo_sample.jsonld', Package, RDFLoaderWrapper()) if __name__ == '__main__': diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index 9034c98d..1e205f39 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -1,4 +1,3 @@ -from linkml_model.linkml_files import Source, Format, URL_FOR, LINKML_URL_BASE +METAMODEL_CONTEXT_URI = "https://w3id.org/linkml/meta.context.jsonld" +META_BASE_URI = "https://w3id.org/linkml/" -METAMODEL_CONTEXT_URI = URL_FOR(Source.META, Format.JSONLD) -META_BASE_URI = LINKML_URL_BASE diff --git a/tests/test_utils/input/yaml1.yaml b/tests/test_utils/input/yaml1.yaml index b2110390..b5244a31 100644 --- a/tests/test_utils/input/yaml1.yaml +++ b/tests/test_utils/input/yaml1.yaml @@ -1,3 +1,3 @@ name: YAML_with_duplicate_key f1: yaml1.yaml -f1: 17 \ No newline at end of file +f1: 17 diff --git a/tests/test_utils/input/yaml2.yaml b/tests/test_utils/input/yaml2.yaml index 73663f53..8c1b9091 100644 --- a/tests/test_utils/input/yaml2.yaml +++ b/tests/test_utils/input/yaml2.yaml @@ -3,4 +3,4 @@ f1: yaml1.yaml f2: f3: abc f4: def - f3: 17 \ No newline at end of file + f3: 17 diff --git a/tests/test_utils/test_formatutils.py b/tests/test_utils/test_formatutils.py index fb3c57c9..5952e550 100644 --- a/tests/test_utils/test_formatutils.py +++ b/tests/test_utils/test_formatutils.py @@ -35,6 +35,5 @@ def test_linestuff(self): hope. """, wrapped_annotation(text)) - if __name__ == '__main__': unittest.main() diff --git a/tests/test_utils/test_namespaces.py b/tests/test_utils/test_namespaces.py index 8c030ef3..ca513cb7 100644 --- a/tests/test_utils/test_namespaces.py +++ b/tests/test_utils/test_namespaces.py @@ -28,8 +28,9 @@ def test_namespaces(self): with self.assertRaises(ValueError): ns["123"] = "http://example.org/foo/" - with self.assertRaises(KeyError): + with self.assertRaises(KeyError) as e: ns.FOO + self.assertEqual("'foo'", str(e.exception), "Unknown namespace should raise a KeyError with a lower case entry") ns._default = ns['meta'] ns._default = ns['meta'] diff --git a/tests/test_utils/test_yaml_utils.py b/tests/test_utils/test_yaml_utils.py index 951dc701..07f3e333 100644 --- a/tests/test_utils/test_yaml_utils.py +++ b/tests/test_utils/test_yaml_utils.py @@ -1,11 +1,8 @@ import unittest import yaml -from jsonasobj import as_json -from linkml_model import SchemaDefinition -from linkml_runtime.loaders import yaml_loader -from linkml_runtime.utils.yamlutils import DupCheckYamlLoader, as_yaml +from linkml_runtime.utils.yamlutils import DupCheckYamlLoader from tests.support.test_environment import TestEnvironmentTestCase from tests.test_utils.environment import env @@ -28,10 +25,6 @@ def test_dupcheck_loader(self): s1 = yaml.load(f, DupCheckYamlLoader) self.assertEqual('schema1', s1['name']) - def test_as_json(self): - schema = yaml_loader.load(env.input_path('schema6.yaml'), SchemaDefinition) - env.eval_single_file(env.expected_path('schema6.json'), as_json(schema), filtr=lambda s: s) - if __name__ == '__main__': unittest.main() diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..3a64d70b --- /dev/null +++ b/tox.ini @@ -0,0 +1,13 @@ +[tox] +envlist = py37, py38, py39 +setenv = PIPENV_SKIP_LOCK=1 + PIPENV_DEV=1 + PIPENV_IGNORE_VIRTUALENVS=1 + +[testenv] +whitelist_externals = python +deps=unittest2 + tox-pipenv +commands= pipenv install --dev + pipenv run python -m unittest + comparefiles --help