Skip to content

Commit

Permalink
rough draft
Browse files Browse the repository at this point in the history
  • Loading branch information
mavaylon1 committed Jan 10, 2024
1 parent 748d305 commit bb40b5a
Show file tree
Hide file tree
Showing 8 changed files with 95 additions and 4 deletions.
16 changes: 16 additions & 0 deletions docs/gallery/example_config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
- data_type: VectorData
namespace:
namespace_version:
fields:
data: /Users/mavaylon/Research/NWB/hdmf2/hdmf/tests/unit/example_test_term_set.yaml
field2: ...
# - data_type: #Container2
# namespace:
# namespace_version:
# fields:
# - name:
# doc:
# termset_path:
# - name:
# doc:
# termset_path:
6 changes: 5 additions & 1 deletion src/hdmf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,13 @@
from .container import Container, Data, DataRegion, HERDManager
from .region import ListSlicer
from .utils import docval, getargs
from .term_set import TermSet, TermSetWrapper
from .term_set import TermSet, TermSetWrapper, TermSetConfigurator


# a global TermSetConfigurator
global TS_CONFIG
TS_CONFIG = TermSetConfigurator()

@docval(
{"name": "dataset", "type": None, "doc": "the HDF5 dataset to slice"},
{"name": "region", "type": None, "doc": "the region reference to use to slice"},
Expand Down
4 changes: 1 addition & 3 deletions src/hdmf/build/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,7 @@ def __init__(self, **kwargs):
self.__data_types = dict()
self.__default_mapper_cls = mapper_cls
self.__class_generator = ClassGenerator()
self.__load_termset_config = True
self.register_generator(CustomClassGenerator)
self.register_generator(MCIClassGenerator)

Expand Down Expand Up @@ -480,9 +481,6 @@ def load_namespaces(self, **kwargs):
self.register_container_type(new_ns, dt, container_cls)
return deps

def load_config(self, **kwargs):
pass

@docval({"name": "namespace", "type": str, "doc": "the namespace containing the data_type"},
{"name": "data_type", "type": str, "doc": "the data type to create a AbstractContainer class for"},
{"name": "autogen", "type": bool, "doc": "autogenerate class if one does not exist", "default": True},
Expand Down
17 changes: 17 additions & 0 deletions src/hdmf/common/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,23 @@ def available_namespaces():
return __TYPE_MAP.namespace_catalog.namespaces


@docval({'name': 'config_path', 'type': str, 'doc': 'Path to the configuartion file.',

Check failure on line 110 in src/hdmf/common/__init__.py

View workflow job for this annotation

GitHub Actions / Check for spelling errors

configuartion ==> configuration
'default': '/Users/mavaylon/Research/NWB/hdmf2/hdmf/docs/gallery/example_config.yaml'}) #update path
def load_termset_config(config_path: str):
"""
Load the configuration file for validation on the fields defined for the objects within the file.
By default, the curated configuration file is used, but can take in a custom file.
"""
return __TS_CONFIG.load_termset_config(config_path)


def unload_termset_config():
"""
Remove validation according to termset configuration file.
"""
return __TS_CONFIG.unload_termset_config()


# a function to get the container class for a give type
@docval({'name': 'data_type', 'type': str,
'doc': 'the data_type to get the Container class for'},
Expand Down
23 changes: 23 additions & 0 deletions src/hdmf/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import Type
from uuid import uuid4
from warnings import warn
import yaml

import h5py
import numpy as np
Expand All @@ -13,6 +14,8 @@
from .data_utils import DataIO, append_data, extend_data
from .utils import docval, get_docval, getargs, ExtenderMeta, get_data_shape, popargs, LabelledDict

from .term_set import TermSet, TermSetWrapper


def _set_exp(cls):
"""Set a class as being experimental"""
Expand Down Expand Up @@ -232,6 +235,24 @@ def __init__(self, **kwargs):
self.__read_io = None
self.__obj = None

@docval({'name': 'fields', 'type': dict, 'doc': 'The fields/parameters/attibutes for the object.'})

Check failure on line 238 in src/hdmf/container.py

View workflow job for this annotation

GitHub Actions / Check for spelling errors

attibutes ==> attributes
def init_validation(self, fields):
# load termset configuartion file from global Config

Check failure on line 240 in src/hdmf/container.py

View workflow job for this annotation

GitHub Actions / Check for spelling errors

configuartion ==> configuration
from . import TS_CONFIG #update path
# Before calling super().__init__() and before setting fields, check for config file for
# validation via TermSetWrapper.
with open(TS_CONFIG.path, 'r') as config:
termset_config = yaml.safe_load(config)
object_name = self.__class__.__name__

for obj_config in termset_config:
if obj_config['data_type'] == object_name:
for attr in obj_config['fields']:
if attr in fields: # make sure any custom fields are not handled (i.e., make an extension)
termset_path = obj_config['fields'][attr]
termset = TermSet(term_schema_path=termset_path)
fields[attr] = TermSetWrapper(value=fields[attr], termset=termset)

@property
def read_io(self):
"""
Expand Down Expand Up @@ -785,6 +806,8 @@ class Data(AbstractContainer):
@docval({'name': 'name', 'type': str, 'doc': 'the name of this container'},
{'name': 'data', 'type': ('scalar_data', 'array_data', 'data'), 'doc': 'the source of the data'})
def __init__(self, **kwargs):
self.init_validation(fields=kwargs)
breakpoint()
data = popargs('data', kwargs)
super().__init__(**kwargs)

Expand Down
25 changes: 25 additions & 0 deletions src/hdmf/term_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,3 +304,28 @@ def extend(self, arg):
else:
msg = ('"%s" is not in the term set.' % ', '.join([str(item) for item in bad_data]))
raise ValueError(msg)

class TermSetConfigurator:
"""
"""
def __init__(self):
self.path = '/Users/mavaylon/Research/NWB/hdmf2/hdmf/docs/gallery/example_config.yaml'

# @property
# def config_path(self):
# return self.__config_path

@docval({'name': 'config_path', 'type': str, 'doc': 'Path to the configuartion file.'})

Check failure on line 319 in src/hdmf/term_set.py

View workflow job for this annotation

GitHub Actions / Check for spelling errors

configuartion ==> configuration
def load_termset_config(config_path: str):
"""
Load the configuration file for validation on the fields defined for the objects within the file.
By default, the curated configuration file is used, but can take in a custom file.
"""
self.path = config_path

def unload_termset_config():
"""
Remove validation according to termset configuration file.
"""
self.path = None
4 changes: 4 additions & 0 deletions tests/unit/common/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@
except ImportError:
LINKML_INSTALLED = False

class TestVDConfig(TestCase):
def test_init_config(self):
vd = VectorData(name='data', description='',data=['Homo sapiens'])


class TestDynamicTable(TestCase):

Expand Down
4 changes: 4 additions & 0 deletions tests/unit/test_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ def test_link_and_get_resources(self):
er_get = em.get_linked_resources()
self.assertEqual(er, er_get)

class TestContainerConfig(TestCase):
def test_init_config(self):
obj = Container('obj1')


class TestContainer(TestCase):

Expand Down

0 comments on commit bb40b5a

Please sign in to comment.