Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
- Fixed broken and redirecting links in documentation. @bendichter [#2165](https://github.com/NeurodataWithoutBorders/pynwb/pull/2165)

### Added
- Added HERD to to `general` within the the `NWBFile`. @mavaylon1 [#2111](https://github.com/NeurodataWithoutBorders/pynwb/pull/2111)
- Added `get_starting_time()` and `get_duration()` methods to `TimeSeries` to get the starting time and duration of the time series. @h-mayorquin [#2146](https://github.com/NeurodataWithoutBorders/pynwb/pull/2146)
- Added `get_starting_time()` and `get_duration()` methods to `TimeIntervals` to get the earliest start time and total duration (span from earliest start to latest stop) of all intervals. @h-mayorquin [#2146](https://github.com/NeurodataWithoutBorders/pynwb/pull/2146)
- Added `get_starting_time()` and `get_duration()` methods to `Units` to get the earliest spike time and total duration (span from earliest to latest spike) across all units. @h-mayorquin [#2164](https://github.com/NeurodataWithoutBorders/pynwb/pull/2164)
Expand Down Expand Up @@ -58,7 +59,6 @@
- Updated tests to skip streaming tests gracefully if offline. @rly [#2113](https://github.com/NeurodataWithoutBorders/pynwb/pull/2113)
- Added check in `PlaneSegmentation` constructor for required columns. @rly [#2102](https://github.com/NeurodataWithoutBorders/pynwb/pull/2102)


## PyNWB 3.1.0 (July 8, 2025)

### Breaking changes
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ classifiers = [
]
dependencies = [
"h5py>=3.6.0",
"hdmf>=4.1.2,<5",
"hdmf>=5.0.0,<6",
"numpy>=1.24.0",
"pandas>=1.3.5",
"python-dateutil>=2.8.2",
Expand Down
2 changes: 1 addition & 1 deletion requirements-min.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# minimum versions of package dependencies for installing PyNWB
h5py==3.6.0
hdmf==4.1.2
hdmf==5.0.0
numpy==1.24.0
pandas==1.3.5
python-dateutil==2.8.2
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# pinned dependencies to reproduce an entire development environment to use PyNWB
h5py==3.12.1
hdmf==4.1.2
hdmf==5.0.0
numpy==2.1.1
pandas==2.2.3
python-dateutil==2.9.0.post0
Expand Down
40 changes: 39 additions & 1 deletion src/pynwb/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

from hdmf.common import DynamicTableRegion, DynamicTable
from hdmf.container import HERDManager
from hdmf.common import HERD
from hdmf.utils import docval, getargs, get_docval, popargs, popargs_to_dict, AllowPositional

from . import register_class, CORE_NAMESPACE
Expand Down Expand Up @@ -287,6 +288,7 @@ class NWBFile(MultiContainerInterface, HERDManager):
{'name': 'trials', 'child': True, 'required_name': 'trials'},
{'name': 'units', 'child': True, 'required_name': 'units'},
{'name': 'subject', 'child': True, 'required_name': 'subject'},
{'name': 'external_resources', 'child': True, 'required_name': 'external_resources'},
{'name': 'sweep_table', 'child': True, 'required_name': 'sweep_table'},
{'name': 'invalid_times', 'child': True, 'required_name': 'invalid_times'},
# icephys_filtering is temporary. /intracellular_ephys/filtering dataset will be deprecated
Expand Down Expand Up @@ -339,6 +341,8 @@ class NWBFile(MultiContainerInterface, HERDManager):
{'name': 'keywords', 'type': 'array_data', 'doc': 'Terms to search over', 'default': None},
{'name': 'notes', 'type': str,
'doc': 'Notes about the experiment.', 'default': None},
{'name': 'external_resources', 'type': HERD,
'doc': 'the HERD external resources object for this NWBFile', 'default': None},
{'name': 'pharmacology', 'type': str,
'doc': 'Description of drugs used, including how and when they were administered. '
'Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.', 'default': None},
Expand Down Expand Up @@ -483,9 +487,16 @@ def __init__(self, **kwargs):
'icephys_experimental_conditions'
]
args_to_set = popargs_to_dict(keys_to_set, kwargs)
external_resources = popargs('external_resources', kwargs)
kwargs['name'] = 'root'
super().__init__(**kwargs)

self._external_herd = None
self._internal_herd = None

if external_resources is not None:
self.external_resources = external_resources

# add timezone to session_start_time if missing
session_start_time = args_to_set['session_start_time']
if session_start_time.tzinfo is None:
Expand Down Expand Up @@ -570,6 +581,33 @@ def all_children(self):
stack.append(c)
return ret

def link_resources(self, herd):
"""Link an external HERD object as the external resources for this file.

The linked HERD will be returned by the ``external_resources`` property
but will not be written on export; the original internal HERD (if any)
is preserved in the exported file.
"""
self._external_herd = herd

@property
def external_resources(self):
"""Return the HERD external resources object for this NWBFile.

If an external HERD has been linked via ``link_resources``, that object
is returned. Otherwise, the internal HERD set via ``__init__`` or the
setter is returned.
"""
if self._external_herd is not None:
return self._external_herd
return self._internal_herd

@external_resources.setter
def external_resources(self, herd):
"""Set the internal HERD external resources object for this NWBFile."""
self._internal_herd = herd
self._internal_herd.parent = self

@property
def objects(self):
if self.__obj is None:
Expand Down Expand Up @@ -1152,4 +1190,4 @@ def ElectrodeTable(name='electrodes',
description='metadata about extracellular electrodes'):
warn("The ElectrodeTable convenience function is deprecated. Please create a new instance of "
"the ElectrodesTable class instead.", DeprecationWarning)
return ElectrodesTable()
return ElectrodesTable()
2 changes: 2 additions & 0 deletions src/pynwb/io/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ def __init__(self, spec):

self.map_spec('subject', general_spec.get_group('subject'))

self.map_spec('external_resources', general_spec.get_group('external_resources'))

device_spec = general_spec.get_group('devices')
self.unmap(device_spec)
self.map_spec('devices', device_spec.get_neurodata_type('Device'))
Expand Down
192 changes: 192 additions & 0 deletions tests/unit/test_resources.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,27 @@
import warnings
from datetime import datetime
from uuid import uuid4
import os
import numpy as np

from dateutil import tz

from pynwb.resources import HERD
from pynwb.file import Subject
from pynwb import NWBHDF5IO, NWBFile
from pynwb.testing import TestCase


class TestNWBContainer(TestCase):
def setUp(self):
self.path = "resources_file.nwb"
self.export_path = "export_file.nwb"

def tearDown(self):
for path in [self.path, self.export_path]:
if os.path.isfile(path):
os.remove(path)

def test_constructor(self):
"""
Test constructor
Expand All @@ -17,3 +34,178 @@ def test_constructor(self):
)
er = HERD()
self.assertIsInstance(er, HERD)

def test_nwbfile_init_herd(self):
session_start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific"))
herd = HERD()
nwbfile = NWBFile(
session_description="A Person undergoing brain pokes.",
identifier=str(uuid4()),
session_start_time=session_start_time,
external_resources=herd
)
self.assertTrue(isinstance(nwbfile.external_resources, HERD))

def test_nwbfile_set_herd(self):
session_start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific"))
herd = HERD()
nwbfile = NWBFile(
session_description="A Person undergoing brain pokes.",
identifier=str(uuid4()),
session_start_time=session_start_time,
)
nwbfile.external_resources = herd
self.assertTrue(isinstance(nwbfile.external_resources, HERD))
self.assertEqual(nwbfile.external_resources.parent, nwbfile)

def test_resources_roundtrip(self):
session_start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific"))

nwbfile = NWBFile(
session_description="A Person undergoing brain pokes.",
identifier=str(uuid4()),
session_start_time=session_start_time,
)
subject = Subject(
subject_id="001",
age="26",
description="human 5",
species='Homo sapiens',
sex="M",
)

nwbfile.subject = subject
herd = HERD()
nwbfile.external_resources = herd

nwbfile.external_resources.add_ref(container=nwbfile.subject,
key=nwbfile.subject.species,
entity_id="NCBI_TAXON:9606",
entity_uri='https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id=9606')

with NWBHDF5IO(self.path, "w") as io:
io.write(nwbfile)

with NWBHDF5IO(self.path, "r") as io:
read_nwbfile = io.read()
self.assertEqual(
read_nwbfile.external_resources.keys[:],
np.array(
[('Homo sapiens',)],
dtype=[('key', 'O')]
)
)

self.assertEqual(
read_nwbfile.external_resources.entities[:],
np.array(
[
('NCBI_TAXON:9606',
'https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id=9606')
],
dtype=[('entity_id', 'O'), ('entity_uri', 'O')]
)
)

self.assertEqual(
read_nwbfile.external_resources.objects[:],
np.array(
[
(0,
subject.object_id,
'Subject',
'',
'')
],
dtype=[
('files_idx', '<u4'),
('object_id', 'O'),
('object_type', 'O'),
('relative_path', 'O'),
('field', 'O')
]
)
)

def test_link_resources(self):
"""
Note: Make sure that the internal HERD is not overwritten on export.
"""
session_start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific"))

nwbfile = NWBFile(
session_description="A Person undergoing brain pokes.",
identifier=str(uuid4()),
session_start_time=session_start_time,
)
subject = Subject(
subject_id="001",
age="26",
description="human 5",
species='Homo sapiens',
sex="M",
)

nwbfile.subject = subject
herd = HERD()
nwbfile.external_resources = herd

nwbfile.external_resources.add_ref(container=nwbfile.subject,
key=nwbfile.subject.species,
entity_id="NCBI_TAXON:9606",
entity_uri='https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id=9606')

with NWBHDF5IO(self.path, "w") as io:
io.write(nwbfile)

with NWBHDF5IO(self.path, mode='r') as read_io:
read_nwbfile = read_io.read()
read_nwbfile.link_resources(HERD())

self.assertEqual(read_nwbfile.external_resources.keys.data, [])
self.assertEqual(read_nwbfile.external_resources.entities.data, [])
self.assertEqual(read_nwbfile.external_resources.objects.data, [])

with NWBHDF5IO(self.export_path, mode='w') as export_io:
export_io.export(src_io=read_io, nwbfile=read_nwbfile)

with NWBHDF5IO(self.export_path, mode='r') as read_export_io:
read_export_nwbfile = read_export_io.read()
self.assertEqual(
read_export_nwbfile.external_resources.keys[:],
np.array(
[('Homo sapiens',)],
dtype=[('key', 'O')]
)
)

self.assertEqual(
read_export_nwbfile.external_resources.entities[:],
np.array(
[
('NCBI_TAXON:9606',
'https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id=9606')
],
dtype=[('entity_id', 'O'), ('entity_uri', 'O')]
)
)

self.assertEqual(
read_export_nwbfile.external_resources.objects[:],
np.array(
[
(0,
subject.object_id,
'Subject',
'',
'')
],
dtype=[
('files_idx', '<u4'),
('object_id', 'O'),
('object_type', 'O'),
('relative_path', 'O'),
('field', 'O')
]
)
)
Loading