diff --git a/brukerapi/config/properties_fid_custom.json b/brukerapi/config/properties_fid_custom.json index 5b40143..839276a 100644 --- a/brukerapi/config/properties_fid_custom.json +++ b/brukerapi/config/properties_fid_custom.json @@ -71,7 +71,7 @@ "dwell_s": [ { "cmd": "1./ @sw_hz / 2.", - "conditions": [] + "conditions": ["@sw_hz!=0"] } ], "TR": [ diff --git a/brukerapi/dataset.py b/brukerapi/dataset.py index f52fc2f..8b3d9f2 100644 --- a/brukerapi/dataset.py +++ b/brukerapi/dataset.py @@ -40,10 +40,16 @@ "load": LOAD_STAGES["all"], "mmap": False, }, - "2dseq": { - "parameter_files": ["visu_pars"], - "property_files": [Path(__file__).parents[0] / "config/properties_2dseq_core.json", Path(__file__).parents[0] / "config/properties_2dseq_custom.json"], - "load": LOAD_STAGES["all"], + 'fid_proc': { + "parameter_files" : ['acqp', 'method'], + "property_files": [Path(__file__).parents[0] / 'config/properties_fid_core.json', Path(__file__).parents[0] / 'config/properties_fid_custom.json'], + "load": LOAD_STAGES['all'], + "mmap": False + }, + '2dseq': { + "parameter_files": ['visu_pars'], + "property_files": [Path(__file__).parents[0] / 'config/properties_2dseq_core.json', Path(__file__).parents[0] / 'config/properties_2dseq_custom.json'], + "load": LOAD_STAGES['all'], "scale": True, "mmap": False, }, @@ -77,6 +83,15 @@ "AdjStatePerScan": "./AdjStatePerScan", "AdjStatePerStudy": "../AdjStatePerStudy", }, + "fid_proc": { + "method": "../../method", + "acqp": "../../acqp", + "subject": "../../../subject", + "reco": "./reco", + "visu_pars": "./visu_pars", + "AdjStatePerScan": "../../AdjStatePerScan", + "AdjStatePerStudy": "../../../AdjStatePerStudy", + }, "2dseq": { "method": "../../method", "acqp": "../../acqp", @@ -156,7 +171,7 @@ def __init__(self, path, **state): containing it. It is possible, to create an empty object using the load switch. :param path: **str** path to dataset - :raise: :UnsuportedDatasetType: In case `Dataset.type` is not in SUPPORTED + :raise: :UnsupportedDatasetType: In case `Dataset.type` is not in SUPPORTED :raise: :IncompleteDataset: If any of the JCAMP-DX files, necessary to create a Dataset instance is missing """ @@ -248,8 +263,12 @@ def _validate(self): raise UnsuportedDatasetType(self.type) # Check whether all necessary JCAMP-DX files are present - if self._state.get("load") >= LOAD_STAGES["parameters"] and not (set(DEFAULT_STATES[self.type]["parameter_files"]) <= set(os.listdir(str(self.path.parent)))): - raise IncompleteDataset + if self._state.get('load') >= LOAD_STAGES['parameters']: + for i in DEFAULT_STATES[self.type]['parameter_files']: + param_path = self.path.parent / RELATIVE_PATHS[self.type][i] + if i not in set(os.listdir(str(param_path.parent))): + raise IncompleteDataset + def load(self): """ @@ -381,7 +400,8 @@ def load_properties(self): def unload_properties(self): for property in self._properties: - delattr(self, property) + if hasattr(self, property): + delattr(self,property) self._properties = [] self._state["load_properties"] = False @@ -486,7 +506,7 @@ def load_schema(self): """ Load the schema for given data set. """ - if self.type == "fid": + if self.type in ["fid", "fid_proc"]: self._schema = SchemaFid(self) elif self.type == "2dseq": self._schema = Schema2dseq(self) @@ -547,10 +567,9 @@ def _read_binary_file(self, path, dtype, shape): """ # TODO debug with this try: - assert os.stat(str(path)).st_size == np.prod(shape) * dtype.itemsize + assert os.stat(str(path)).st_size >= np.prod(shape) * dtype.itemsize except AssertionError: raise ValueError("Dimension mismatch") from AssertionError - return np.array(np.memmap(path, dtype=dtype, shape=shape, order="F")[:]) def _write_data(self, path): diff --git a/brukerapi/schemas.py b/brukerapi/schemas.py index b3b3db4..e6a2292 100644 --- a/brukerapi/schemas.py +++ b/brukerapi/schemas.py @@ -10,6 +10,7 @@ # properties required for loading of the data array for each dataset type REQUIRED_PROPERTIES = { "fid": ["numpy_dtype", "channels", "block_size", "acq_lenght", "scheme_id", "block_count", "encoding_space", "permute", "k_space", "encoded_dim", "shape_storage", "dim_type"], + "fid_proc": ["numpy_dtype", "channels", "block_size", "acq_lenght", "scheme_id", "block_count", "encoding_space", "permute", "k_space", "encoded_dim", "shape_storage", "dim_type"], "2dseq": [ "pv_version", "numpy_dtype",