Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion brukerapi/config/properties_fid_custom.json
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
"dwell_s": [
{
"cmd": "1./ @sw_hz / 2.",
"conditions": []
"conditions": ["@sw_hz!=0"]
}
],
"TR": [
Expand Down
41 changes: 30 additions & 11 deletions brukerapi/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,16 @@
"load": LOAD_STAGES["all"],
"mmap": False,
},
"2dseq": {
"parameter_files": ["visu_pars"],
"property_files": [Path(__file__).parents[0] / "config/properties_2dseq_core.json", Path(__file__).parents[0] / "config/properties_2dseq_custom.json"],
"load": LOAD_STAGES["all"],
'fid_proc': {
"parameter_files" : ['acqp', 'method'],
"property_files": [Path(__file__).parents[0] / 'config/properties_fid_core.json', Path(__file__).parents[0] / 'config/properties_fid_custom.json'],
"load": LOAD_STAGES['all'],
"mmap": False
},
'2dseq': {
"parameter_files": ['visu_pars'],
"property_files": [Path(__file__).parents[0] / 'config/properties_2dseq_core.json', Path(__file__).parents[0] / 'config/properties_2dseq_custom.json'],
"load": LOAD_STAGES['all'],
"scale": True,
"mmap": False,
},
Expand Down Expand Up @@ -77,6 +83,15 @@
"AdjStatePerScan": "./AdjStatePerScan",
"AdjStatePerStudy": "../AdjStatePerStudy",
},
"fid_proc": {
"method": "../../method",
"acqp": "../../acqp",
"subject": "../../../subject",
"reco": "./reco",
"visu_pars": "./visu_pars",
"AdjStatePerScan": "../../AdjStatePerScan",
"AdjStatePerStudy": "../../../AdjStatePerStudy",
},
"2dseq": {
"method": "../../method",
"acqp": "../../acqp",
Expand Down Expand Up @@ -156,7 +171,7 @@ def __init__(self, path, **state):
containing it. It is possible, to create an empty object using the load switch.

:param path: **str** path to dataset
:raise: :UnsuportedDatasetType: In case `Dataset.type` is not in SUPPORTED
:raise: :UnsupportedDatasetType: In case `Dataset.type` is not in SUPPORTED
:raise: :IncompleteDataset: If any of the JCAMP-DX files, necessary to create a Dataset instance is missing

"""
Expand Down Expand Up @@ -248,8 +263,12 @@ def _validate(self):
raise UnsuportedDatasetType(self.type)

# Check whether all necessary JCAMP-DX files are present
if self._state.get("load") >= LOAD_STAGES["parameters"] and not (set(DEFAULT_STATES[self.type]["parameter_files"]) <= set(os.listdir(str(self.path.parent)))):
raise IncompleteDataset
if self._state.get('load') >= LOAD_STAGES['parameters']:
for i in DEFAULT_STATES[self.type]['parameter_files']:
param_path = self.path.parent / RELATIVE_PATHS[self.type][i]
if i not in set(os.listdir(str(param_path.parent))):
raise IncompleteDataset


def load(self):
"""
Expand Down Expand Up @@ -381,7 +400,8 @@ def load_properties(self):

def unload_properties(self):
for property in self._properties:
delattr(self, property)
if hasattr(self, property):
delattr(self,property)
self._properties = []
self._state["load_properties"] = False

Expand Down Expand Up @@ -486,7 +506,7 @@ def load_schema(self):
"""
Load the schema for given data set.
"""
if self.type == "fid":
if self.type in ["fid", "fid_proc"]:
self._schema = SchemaFid(self)
elif self.type == "2dseq":
self._schema = Schema2dseq(self)
Expand Down Expand Up @@ -547,10 +567,9 @@ def _read_binary_file(self, path, dtype, shape):
"""
# TODO debug with this
try:
assert os.stat(str(path)).st_size == np.prod(shape) * dtype.itemsize
assert os.stat(str(path)).st_size >= np.prod(shape) * dtype.itemsize
except AssertionError:
raise ValueError("Dimension mismatch") from AssertionError

return np.array(np.memmap(path, dtype=dtype, shape=shape, order="F")[:])

def _write_data(self, path):
Expand Down
1 change: 1 addition & 0 deletions brukerapi/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
# properties required for loading of the data array for each dataset type
REQUIRED_PROPERTIES = {
"fid": ["numpy_dtype", "channels", "block_size", "acq_lenght", "scheme_id", "block_count", "encoding_space", "permute", "k_space", "encoded_dim", "shape_storage", "dim_type"],
"fid_proc": ["numpy_dtype", "channels", "block_size", "acq_lenght", "scheme_id", "block_count", "encoding_space", "permute", "k_space", "encoded_dim", "shape_storage", "dim_type"],
"2dseq": [
"pv_version",
"numpy_dtype",
Expand Down