diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 00000000..d1471385 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,95 @@ +name: Benchmark +on: + push: + branches: + - test_benchmarking + +jobs: + master-benchmark: + name: Benchmark Master + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - name: compile + run: | + git submodule update --init + mkdir build + ./compile.sh --docker-build + docker run -i --rm -v $(pwd):/host_pwd -w /host_pwd artss_docker sh -c "cd build && cmake .. && make artss_bench && ./benchmarks/bin/artss_bench --benchmark_format=json | tee ../benchmark_result.json" + - name: Download previous benchmark data + uses: actions/cache@v1 + with: + path: ./cache + key: ${{ runner.os }}-benchmark + + - name: Store benchmark result + uses: benchmark-action/github-action-benchmark@v1 + with: + # What benchmark tool the output.txt came from + tool: 'googlecpp' + # Where the output from the benchmark tool is stored + output-file-path: benchmark_result.json + # Where the previous data file is stored + github-token: ${{ secrets.GITHUB_TOKEN }} + auto-push: true + + benchmark: + name: Benchmark + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - name: compile + run: | + git submodule update --init + mkdir build + ./compile.sh --docker-build + docker run -i --rm -v $(pwd):/host_pwd -w /host_pwd artss_docker sh -c "cd build && cmake .. && make artss_bench && ./benchmarks/bin/artss_bench --benchmark_format=json | tee ../benchmark_result.json" + - name: Download previous benchmark data + uses: actions/cache@v1 + with: + path: ./cache + key: ${{ runner.os }}-benchmark + + - name: Store benchmark result + uses: benchmark-action/github-action-benchmark@v1 + with: + # What benchmark tool the output.txt came from + tool: 'googlecpp' + # Where the output from the benchmark tool is stored + output-file-path: benchmark_result.json + # Where the previous data file is stored + external-data-json-path: ./cache/benchmark-data.json + github-token: ${{ secrets.GITHUB_TOKEN }} + comment-always: true + + benchmark-gpu: + name: Benchmark-GPU + runs-on: gpu + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - name: compile + run: | + git submodule update --init + mkdir build + ./compile.sh --docker-build + docker run --gpus all -i --rm -v $(pwd):/host_pwd -w /host_pwd --user 1000:1000 artss_docker sh -c "./compile.sh --jobs 32 --gpu_benchmark && cd build && make artss_bench && ./benchmarks/bin/artss_bench --benchmark_format=json | tee ../benchmark_result_gb.json" + - name: Download previous benchmark data + uses: actions/cache@v1 + with: + path: ./cache + key: ${{ runner.os }}-benchmark + + - name: Store benchmark result + uses: benchmark-action/github-action-benchmark@v1 + with: + # What benchmark tool the output.txt came from + tool: 'googlecpp' + # Where the output from the benchmark tool is stored + output-file-path: benchmark_result_gb.json + # Where the previous data file is stored + external-data-json-path: ./cache/benchmark-data.json + github-token: ${{ secrets.GITHUB_TOKEN }} + comment-always: true diff --git a/.github/workflows/ccpp.yml b/.github/workflows/ccpp.yml index b013dfc9..9dd901d0 100644 --- a/.github/workflows/ccpp.yml +++ b/.github/workflows/ccpp.yml @@ -16,6 +16,7 @@ jobs: - name: compile run: | git submodule update --init + sudo apt-get install -y mpich libhdf5-dev ./compile.sh --jobs 4 --gcc -s --checkout build-gcc-serial-benchmarking: @@ -27,6 +28,7 @@ jobs: - name: compile run: | git submodule update --init + sudo apt-get install -y mpich libhdf5-dev ./compile.sh --jobs 4 --gcc --sb --checkout build-pgi-multicore: @@ -97,6 +99,16 @@ jobs: ./compile.sh --docker-build docker run -i --rm -v $(pwd):/host_pwd -w /host_pwd artss_docker sh -c "./compile.sh --jobs 4 -s -m -d && cd build && ctest --output-on-failure -E 'gpu|gtest' --checkout" + cmake-tests-gpu: + + runs-on: gpu + steps: + - uses: actions/checkout@v2 + - name: compile + run: | + git submodule update --init + ./compile.sh --docker-build + docker run -i --rm -v $(pwd):/host_pwd -w /host_pwd --user 1000:1000 artss_docker sh -c "./compile.sh --jobs 32 -g -m -d && cd build && ctest --output-on-failure --checkout" unit-tests_serial: runs-on: ubuntu-latest diff --git a/.gitmodules b/.gitmodules index d320d5f0..f4532dd2 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,3 +7,9 @@ [submodule "external/googletest"] path = external/googletest url = https://github.com/google/googletest/ +[submodule "external/HighFive"] + path = external/HighFive + url = https://github.com/BlueBrain/HighFive.git +[submodule "external/benchmark"] + path = external/benchmark + url = https://github.com/google/benchmark diff --git a/CMakeLists.txt b/CMakeLists.txt index 8402a62b..f217ee46 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -146,6 +146,15 @@ if(NOT TARGET gtest::gtest) add_subdirectory(external/googletest) endif() +if(NOT TARGET benchmark::benchmark) + # Stand-alone build + find_package(benchmark::benchmark QUIET) +endif() +# use local fallback +if(NOT TARGET benchmark::benchmark) + add_subdirectory(external/benchmark) +endif() + if(NOT TARGET MPI) find_package(MPI) endif() @@ -197,6 +206,10 @@ add_subdirectory(gtests) add_subdirectory(src/dataAssimilation) add_subdirectory(src/TCP) + +set(HIGHFIVE_USE_BOOST OFF) +add_subdirectory(external/HighFive) + get_target_property(ARTSS_SOURCE_FILES artss SOURCES) add_executable(artss_data_assimilation_serial ${ARTSS_SOURCE_FILES} src/main.cpp) target_compile_options(artss_data_assimilation_serial PUBLIC ${ARTSS_COMPILE_OPTIONS}) @@ -204,3 +217,8 @@ target_compile_options(artss_data_assimilation_serial PUBLIC -DASSIMILATION) target_link_libraries(artss_data_assimilation_serial PRIVATE spdlog::spdlog) target_link_libraries(artss_data_assimilation_serial PRIVATE fmt::fmt) target_link_libraries(artss_data_assimilation_serial PRIVATE MPI::MPI_CXX) +target_link_libraries(artss_data_assimilation_serial PRIVATE HighFive) + + +set(BENCHMARK_ENABLE_TESTING OFF) +add_subdirectory(benchmarks) diff --git a/benchmarks/CMakeLists.txt b/benchmarks/CMakeLists.txt new file mode 100644 index 00000000..759b9345 --- /dev/null +++ b/benchmarks/CMakeLists.txt @@ -0,0 +1,13 @@ +add_executable(artss_bench ${ARTSS_SOURCE_FILES}) + +add_subdirectory(field) + +target_include_directories(artss_bench PRIVATE ${CMAKE_SOURCE_DIR}) +target_link_libraries(artss_bench benchmark::benchmark) +target_link_libraries(artss_bench gtest) +target_link_libraries(artss_bench spdlog::spdlog) +target_link_libraries(artss_bench fmt::fmt) +target_link_libraries(artss_bench MPI::MPI_CXX) +target_link_libraries(artss_bench HighFive) + +add_test(NAME artss_bench COMMAND artss_bench) diff --git a/benchmarks/Environment.h b/benchmarks/Environment.h new file mode 100644 index 00000000..05b2410d --- /dev/null +++ b/benchmarks/Environment.h @@ -0,0 +1,25 @@ +/// \file Environment.cpp +/// \brief +/// \date Dec 10, 2021 +/// \author My Linh Wuerzburger +/// \copyright <2015-2021> Forschungszentrum Juelich All rights reserved. + + +#ifndef BENCH_ENVIRONMENT_H_ +#define BENCH_ENVIRONMENT_H_ + +#include +#include "src/utility/Utility.h" + +static int setup_done = 0; + +// Override this to define how to set up the environment. +static void DoSetup(const benchmark::State& state) { + if (setup_done) + return; + + Utility::create_logger("info", "gtest.log"); + setup_done = 1; +} + +#endif /* BENCH_FIELD_FIELD_H_ */ diff --git a/benchmarks/field/CMakeLists.txt b/benchmarks/field/CMakeLists.txt new file mode 100644 index 00000000..a6357d3e --- /dev/null +++ b/benchmarks/field/CMakeLists.txt @@ -0,0 +1,4 @@ +target_sources(artss_bench + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/Field.cpp +) diff --git a/benchmarks/field/Field.cpp b/benchmarks/field/Field.cpp new file mode 100644 index 00000000..94d14404 --- /dev/null +++ b/benchmarks/field/Field.cpp @@ -0,0 +1,56 @@ +#include + +#include "../Environment.h" +#include "src/field/Field.h" + +#include + + +static void BM_AddScalar(benchmark::State &state) { + // Utility::create_logger("info", "bench.log"); + size_t size = state.range(0); + + for (auto _ : state) { + Field a(UNKNOWN_FIELD, 0.0, 0, size); + a += M_E; + } +} + +static void BM_AddFields(benchmark::State &state) { + // Utility::create_logger("info", "bench.log"); + size_t size = state.range(0); + + for (auto _ : state) { + Field a(UNKNOWN_FIELD, 0.0, 0, size); + Field b(UNKNOWN_FIELD, 0.0, 0, size); + a += b; + } +} + +static void BM_MulScalar(benchmark::State &state) { + // Utility::create_logger("info", "bench.log"); + size_t size = state.range(0); + + for (auto _ : state) { + Field a(UNKNOWN_FIELD, 0.0, 0, size); + a *= M_E; + } +} + +static void BM_MulFields(benchmark::State &state) { + // Utility::create_logger("info", "bench.log"); + size_t size = state.range(0); + + for (auto _ : state) { + Field a(UNKNOWN_FIELD, 0.0, 0, size); + Field b(UNKNOWN_FIELD, 0.0, 0, size); + a *= b; + } +} + +BENCHMARK(BM_AddScalar)->ThreadRange(1, 8)->Range(8, 8<<20)->Setup(DoSetup); +BENCHMARK(BM_AddFields)->ThreadRange(1, 8)->Range(8, 8<<20)->Setup(DoSetup); +BENCHMARK(BM_MulScalar)->ThreadRange(1, 8)->Range(8, 8<<20)->Setup(DoSetup); +BENCHMARK(BM_MulFields)->ThreadRange(1, 8)->Range(8, 8<<20)->Setup(DoSetup); + +BENCHMARK_MAIN(); diff --git a/data_assimilation/data_assimilation.py b/data_assimilation/data_assimilation.py index 0d8c6f41..6edab3c9 100644 --- a/data_assimilation/data_assimilation.py +++ b/data_assimilation/data_assimilation.py @@ -1,23 +1,33 @@ -import wsgiref.validate +#!/usr/bin/env python3 + +import struct from datetime import datetime + +import h5py +import wsgiref.validate import numpy as np +def is_float(x: str) -> bool: + try: + float(x) + return True + except ValueError: + return False + + def get_date_now() -> str: return datetime.now().strftime('%a %b %d %H:%M:%S %Y') def write_field_data(file_name: str, data: dict, field_keys: list): - file = open(file_name, 'w') - for key in field_keys: - if key in data.keys(): + with h5py.File(file_name, 'w') as out: + for key in field_keys: + if key not in data.keys(): + continue + field = data[key] - line = '' - for number in field: - line += f'{number};' - line += '\n' - file.write(line) - file.close() + out.create_dataset(key, (len(field), ), dtype='d') class FieldReader: @@ -32,23 +42,13 @@ def __init__(self): self.read_header() def read_header(self): - fname = open(self.file_name, 'r') - # first line - line = fname.readline() - self.dt = float(line.split(';')[3]) - # second line - line = fname.readline().split(';')[1:] - self.grid_resolution = {'Nx': int(line[0]), 'Ny': int(line[1]), 'Nz': int(line[2])} - # third line - line = fname.readline().strip() - self.fields = line.split(';')[1:] - # fourth line - line = fname.readline().strip() - self.date = datetime.strptime(line.split(';')[1], '%a %b %d %H:%M:%S %Y') - # fifth line - line = fname.readline().strip() - self.xml_file_name = line.split(';')[1] - fname.close() + with h5py.File(self.file_name, 'r') as inp: + metadata = inp['metadata'] + self.dt = metadata['dt'][()] + self.grid_resolution = list(metadata['domain'][:]) + self.fields = list(metadata['fields'].asstr()[:]) + self.date = datetime.strptime(metadata['date'].asstr()[()][0].strip(), '%a %b %d %H:%M:%S %Y') + self.xml_file_name = metadata['xml'][()][0] def print_header(self): print(f'dt: {self.dt}') @@ -57,37 +57,13 @@ def print_header(self): print(f'date: {self.date}') print(f'xml file name: {self.xml_file_name}') - def get_line_from_file(self, line_number: int) -> str: - return self.get_lines_from_file([line_number])[0] - - def get_pos_from_all_time_steps(self, line_numbers: list) -> list: - lines = [] - max_val = max(line_numbers) - file = open(self.file_name, 'r') - for i, line in enumerate(file): - if i in line_numbers: - lines.append(line) - if i > max_val: - break - file.close() - return lines - - def get_lines_from_file(self, line_numbers: list) -> list: - lines = [] - max_val = max(line_numbers) - file = open(self.file_name, 'r') - for i, line in enumerate(file): - if i in line_numbers: - lines.append(line) - if i > max_val: - break - file.close() - return lines + + def get_ts(self) -> [float]: + with h5py.File(self.file_name, 'r') as inp: + return sorted([float(x) for x in inp if is_float(x)]) def get_t_current(self) -> float: - first_line = self.get_line_from_file(0) - t_cur = first_line.split(';')[1] - return float(t_cur) + return self.get_ts()[-1] def get_xml_file_name(self) -> str: return self.xml_file_name @@ -104,24 +80,15 @@ def read_field_data(self, time_step: float) -> dict: print(f'cannot read time step {time_step} as the current time step is {t_cur}') return {} else: - number_of_fields = len(self.fields) - steps = int(time_step / self.dt) - 1 - - starting_line = 5 + (number_of_fields + 1) * steps + 1 - lines = self.get_lines_from_file(list(range(starting_line, starting_line + number_of_fields + 1))) fields = {} - for i in range(number_of_fields): - fields[self.fields[i]] = np.fromstring(lines[i], dtype=np.float, sep=';') + print(f'read time step {time_step}') + with h5py.File(self.file_name, 'r') as inp: + inp = inp[f'/{time_step:.5e}'] + for i in inp: + fields[i] = np.array(inp[i][0]) + print(f'read field: {i} {fields[i].shape}') + return fields def write_field_data(self, file_name: str, data: dict): - file = open(file_name, 'w') - for key in self.fields: - if key in data.keys(): - field = data[key] - line = '' - for number in field: - line += f'{number};' - line += '\n' - file.write(line) - file.close() + write_field_data(file_name=file_name, data=data, field_keys=self.fields) diff --git a/data_assimilation/main.py b/data_assimilation/main.py index a6fec78c..f5ece15f 100644 --- a/data_assimilation/main.py +++ b/data_assimilation/main.py @@ -60,6 +60,49 @@ def create_gradient_field(Nx: int, Ny: int, Nz: int) -> ndarray: return field +def gradient_tmp(): + reader = FieldReader() + reader.print_header() + + xml = XML(reader.get_xml_file_name()) + xml.read_xml() + domain = Domain(xml.domain, xml.obstacles) + domain.print_info() + domain.print_debug() + + dt = reader.dt + + t_cur = reader.get_t_current() + + n = int(t_cur/dt) + i = 300 + j = 15 + k = 16 + sensor_data= [] + print('iter') + f = open('visualisation.dat', 'r') + for i in range(6): + f.readline() + for i in range(1,34): + print(i, i * dt) + fields = [] + for i in range(6): + fields.append(np.fromstring(f.readline(), dtype=np.float, sep=';')) + sensor_data.append(fields[4][domain.calculate_index(i, j, k)]) + f.readline() + f.close() + print("plot") + f = open('tmp.tmp', 'w') + for i in sensor_data: + f.write(str(i) + "\n") + f.close() + #f = open('tmp.tmp', 'r') + #for i in f: + # sensor_data.append(float(i)) + plt.plot(sensor_data) + plt.show() + + def main(dry_run=False): cwd = os.getcwd() print(cwd) @@ -99,6 +142,8 @@ def main(dry_run=False): fields['T'] = field field_file_name = f'T_{t_cur}.dat' if dry_run: + for k in fields: + print((k, len(fields[k]))) data_assimilation.write_field_data(file_name=field_file_name, data=fields, field_keys=['u', 'v', 'w', 'p', 'T', 'C']) else: @@ -122,51 +167,5 @@ def main(dry_run=False): client.send_message(create_message(t, config_file_name)) -def gradient_tmp(): - - reader = FieldReader() - reader.print_header() - - xml = XML(reader.get_xml_file_name()) - xml.read_xml() - domain = Domain(xml.domain, xml.obstacles) - domain.print_info() - domain.print_debug() - - dt = reader.dt - - t_cur = reader.get_t_current() - - n = int(t_cur/dt) - i = 300 - j = 15 - k = 16 - sensor_data= [] - print('iter') - f = open('visualisation.dat', 'r') - for i in range(6): - f.readline() - for i in range(1,34): - print(i, i * dt) - fields = [] - for i in range(6): - fields.append(np.fromstring(f.readline(), dtype=np.float, sep=';')) - sensor_data.append(fields[4][domain.calculate_index(i, j, k)]) - f.readline() - f.close() - print("plot") - f = open('tmp.tmp', 'w') - for i in sensor_data: - f.write(str(i) + "\n") - f.close() - #f = open('tmp.tmp', 'r') - #for i in f: - # sensor_data.append(float(i)) - plt.plot(sensor_data) - plt.show() - - - if __name__ == '__main__': - #gradient_tmp() main(dry_run=False) diff --git a/external/HighFive b/external/HighFive new file mode 160000 index 00000000..8200f8f6 --- /dev/null +++ b/external/HighFive @@ -0,0 +1 @@ +Subproject commit 8200f8f6faa24bb46a38f6150671ba865403ec14 diff --git a/external/benchmark b/external/benchmark new file mode 160000 index 00000000..8d86026c --- /dev/null +++ b/external/benchmark @@ -0,0 +1 @@ +Subproject commit 8d86026c67e41b1f74e67c1b20cc8f73871bc76e diff --git a/external/googletest b/external/googletest index e2239ee6..2fe3bd99 160000 --- a/external/googletest +++ b/external/googletest @@ -1 +1 @@ -Subproject commit e2239ee6043f73722e7aa812a459f54a28552929 +Subproject commit 2fe3bd994b3189899d93f1d5a881e725e046fdc2 diff --git a/src/dataAssimilation/FieldIO.cpp b/src/dataAssimilation/FieldIO.cpp index 5d2ba69d..e5d910f7 100644 --- a/src/dataAssimilation/FieldIO.cpp +++ b/src/dataAssimilation/FieldIO.cpp @@ -12,7 +12,9 @@ #include #include + #include "../domain/DomainData.h" +#include "../utility/Mapping.h" FieldIO::FieldIO(const std::string &xml_file_name, const std::string &output_file_name) : @@ -25,12 +27,8 @@ FieldIO::FieldIO(const std::string &xml_file_name, const std::string &output_fil size_t n = static_cast(std::round(t_end / domain_data->get_physical_parameters().dt)) + 1; m_positions = new long[n]; - std::string header = create_header(xml_file_name); - m_positions[0] = static_cast(header.length()) + 1; - - std::ofstream output_file(m_file_name, std::ios_base::out); - output_file.write(header.c_str(), m_positions[0]); - output_file.close(); + HighFive::File out_file(m_file_name, HighFive::File::ReadWrite | HighFive::File::Create); + create_header(out_file, xml_file_name); } @@ -46,30 +44,24 @@ FieldIO::FieldIO(const std::string &xml_file_name, const std::string &output_fil /// \param C data of field C to be written out // ************************************************************************************************* void FieldIO::write_fields(real t_cur, Field &u, Field &v, Field &w, Field &p, Field &T, Field &C) { - std::string output = fmt::format("{:.5e}\n", t_cur); - Field fields[] = {u, v, w, p, T, C}; + HighFive::File out_file(m_file_name, HighFive::File::ReadWrite); + auto group_name = fmt::format("{:.5e}", t_cur); + m_logger->debug("attempt to write @t:{}", t_cur); + + if (out_file.exist(group_name)) { + out_file.unlink(group_name); + } + + HighFive::Group t_group = out_file.createGroup(group_name); + // Field fields[] = {u, v, w, p, T, C}; + Field fields[] = {u, v, w, T, C}; size_t size = u.get_size(); + std::vector dims{1, size}; for (Field &f: fields) { - for (size_t i = 0; i < size - 1; i++) { - output.append(fmt::format(("{};"), f[i])); - } - output.append(fmt::format(("{}\n"), f[size - 1])); + auto field_name = Mapping::get_field_type_name(f.get_type()); + m_logger->debug("attempt to write @t:{}:{}", t_cur, field_name); + t_group.createDataSet(field_name, HighFive::DataSpace(dims)); } - size_t n = static_cast(std::round(t_cur / DomainData::getInstance()->get_physical_parameters().dt)) - 1; - long length = static_cast(output.length()); - m_positions[n + 1] = m_positions[n] + length; - - std::fstream output_file(m_file_name); - // write field at position dependent on time step - m_logger->debug("times: {:>10d} write to: {:>20d}", n, m_positions[n]); - output_file.seekp(m_positions[n], std::ios_base::beg); - output_file.write(output.c_str(), length); - - // overwrite current time step - output_file.seekp(m_pos_time_step, std::ios_base::beg); - output_file.write(fmt::format("{:.5e}", t_cur).c_str(), 11); - - output_file.close(); } // ========================================== read ================================================= @@ -85,22 +77,14 @@ void FieldIO::write_fields(real t_cur, Field &u, Field &v, Field &w, Field &p, F // ************************************************************************************************* void FieldIO::read_fields(real t_cur, Field &u, Field &v, Field &w, Field &p, Field &T, Field &C) { m_logger->debug("read original data"); - std::ifstream input_file(m_file_name, std::ifstream::binary); - size_t n = static_cast(std::round(t_cur / DomainData::getInstance()->get_physical_parameters().dt)) - 1; - long pos = m_positions[n]; - m_logger->debug("times: {:>10d} read from: {:>20d}", n, m_positions[n]); - std::string line; - input_file.seekg(pos); - - std::getline(input_file, line); - m_logger->debug("read time step {}", line); - - read_field(input_file, u); - read_field(input_file, v); - read_field(input_file, w); - read_field(input_file, p); - read_field(input_file, T); - read_field(input_file, C); + HighFive::File input_file(m_file_name, HighFive::File::ReadOnly); + + read_vis_field(input_file, u, t_cur); + read_vis_field(input_file, v, t_cur); + read_vis_field(input_file, w, t_cur); + // read_vis_field(input_file, p, t_cur); + read_vis_field(input_file, T, t_cur); + read_vis_field(input_file, C, t_cur); } // ================================= write header ================================================== @@ -111,49 +95,73 @@ void FieldIO::read_fields(real t_cur, Field &u, Field &v, Field &w, Field &p, Fi /// ###FIELDS;u;v;w;p;T;concentration /// ###DATE:;XML: // ************************************************************************************************* -std::string FieldIO::create_header(const std::string &xml_file_name) { +void FieldIO::create_header(HighFive::File &file, const std::string &xml_file_name) { auto end = std::chrono::system_clock::now(); std::time_t end_time = std::chrono::system_clock::to_time_t(end); + if (file.exist("metadata")) { + file.unlink("metadata"); + } + HighFive::Group meta_group = file.createGroup("metadata"); + auto domain_data = DomainData::getInstance(); + + auto dt = domain_data->get_physical_parameters().dt; + HighFive::DataSet dt_set = meta_group.createDataSet("dt", HighFive::DataSpace::From(dt)); + dt_set.write(dt); + size_t Nx = domain_data->get_number_of_cells(CoordinateAxis::X); size_t Ny = domain_data->get_number_of_cells(CoordinateAxis::Y); size_t Nz = domain_data->get_number_of_cells(CoordinateAxis::Z); - std::string string_t_cur_text = "Current time step;"; - m_pos_time_step = static_cast(string_t_cur_text.length()); - std::string header = fmt::format("{}{:.5e};dt;{}\n", string_t_cur_text, 0.0, - DomainData::getInstance()->get_physical_parameters().dt); - header.append(fmt::format("###DOMAIN;{};{};{}\n", Nx, Ny, Nz)); - header.append(fmt::format("###FIELDS;u;v;w;p;T;concentration\n")); - header.append(fmt::format("###DATE;{}", std::ctime(&end_time))); - header.append(fmt::format("###XML;{}\n", xml_file_name)); - return header; + size_t domain[3] = {Nx, Ny, Nz}; + HighFive::DataSet domain_set = meta_group.createDataSet("domain", HighFive::DataSpace::From(domain)); + domain_set.write(domain); + + std::vector fields{"u" , "v", "w", "p", "T", "concentration"}; + HighFive::DataSet fields_set = meta_group.createDataSet("fields", HighFive::DataSpace::From(fields)); + fields_set.write(fields); + + std::string date[1] = {std::ctime(&end_time)}; + HighFive::DataSet date_set = meta_group.createDataSet("date", HighFive::DataSpace::From(date)); + date_set.write(date); + + std::string xml[1] = {xml_file_name}; + HighFive::DataSet xml_set = meta_group.createDataSet("xml", HighFive::DataSpace::From(date)); + xml_set.write(xml); } -void FieldIO::read_field(std::ifstream &file_stream, Field &field) { - std::string line; - auto to_double = [](const auto &v) { return std::stod(v); }; +void FieldIO::read_field(HighFive::File &file, Field &field) { + auto field_name = Mapping::get_field_type_name(field.get_type()); + auto ds = file.getDataSet(field_name); + ds.read(field.data); +} - std::getline(file_stream, line); - std::vector divided_string = Utility::split(line, ';'); - std::transform(divided_string.begin(), divided_string.end(), field.data, to_double); +void FieldIO::read_vis_field(HighFive::File &file, Field &field, const real t) { + auto field_name = Mapping::get_field_type_name(field.get_type()); + auto full_name = fmt::format("/{:.5e}/{}", t, field_name); + m_logger->info("opening vis file at {}", full_name); + auto ds = file.getDataSet(full_name); + ds.read(field.data); } void FieldIO::read_fields(const Settings::data_assimilation::field_changes &field_changes, Field &u, Field &v, Field &w, Field &p, Field &T, Field &C) { + int n; std::string line; if (!field_changes.changed) { return; } + // no changes -> read original file - std::ifstream file_changes(field_changes.file_name, std::ifstream::binary); - if (file_changes.is_open()) { // could not open file -> read original file + warning + m_logger->info("opening dat file {}", field_changes.file_name); + try { + HighFive::File file_changes(field_changes.file_name, HighFive::File::ReadOnly); + if (field_changes.u_changed) { read_field(file_changes, u); m_logger->debug("read changed u Field"); - std::getline(file_changes, line); } if (field_changes.v_changed) { read_field(file_changes, v); @@ -175,8 +183,9 @@ void FieldIO::read_fields(const Settings::data_assimilation::field_changes &fiel read_field(file_changes, C); m_logger->debug("read changed C Field"); } - } else { + } catch (const std::exception &ex) { m_logger->warn(fmt::format("File '{}' could not be opened. No changes will be applied.", field_changes.file_name)); + m_logger->warn(fmt::format("Exception during reading {}", ex.what())); } } @@ -184,71 +193,56 @@ void FieldIO::read_fields(const real t_cur, const Settings::data_assimilation::field_changes &field_changes, Field &u, Field &v, Field &w, Field &p, Field &T, Field &C) { - std::ifstream file_original(m_file_name, std::ifstream::binary); - size_t n = static_cast(std::round(t_cur / DomainData::getInstance()->get_physical_parameters().dt)) - 1; - file_original.seekg(m_positions[n]); + m_logger->debug("read time step {}", t_cur); + if (field_changes.changed) { // no changes -> read original file + m_logger->debug("no field changes"); + read_fields(t_cur, u, v, w, p, T, C); + return; + } - std::string line; - std::getline(file_original, line); - m_logger->debug("read time step {}", line); + try { + HighFive::File org_file(m_file_name, HighFive::File::ReadOnly); + HighFive::File new_file(field_changes.file_name, HighFive::File::ReadOnly); - if (field_changes.changed) { // no changes -> read original file - std::ifstream file_changes(field_changes.file_name, std::ifstream::binary); - if (file_changes.is_open()) { // could not open file -> read original file + warning - if (field_changes.u_changed) { - read_field(file_changes, u); - std::getline(file_original, line); - m_logger->debug("read changed u Field"); - } else { - read_field(file_original, u); - std::getline(file_changes, line); - } - if (field_changes.v_changed) { - read_field(file_changes, v); - std::getline(file_original, line); - m_logger->debug("read changed v Field"); - } else { - read_field(file_original, v); - std::getline(file_changes, line); - } - if (field_changes.w_changed) { - read_field(file_changes, w); - std::getline(file_original, line); - m_logger->debug("read changed w Field"); - } else { - read_field(file_original, w); - std::getline(file_changes, line); - } - if (field_changes.p_changed) { - read_field(file_changes, p); - std::getline(file_original, line); - m_logger->debug("read changed p Field"); - } else { - read_field(file_original, p); - std::getline(file_changes, line); - } - if (field_changes.T_changed) { - read_field(file_changes, T); - std::getline(file_original, line); - m_logger->debug("read changed T Field"); - } else { - read_field(file_original, T); - std::getline(file_changes, line); - } - if (field_changes.C_changed) { - read_field(file_changes, C); - std::getline(file_original, line); - m_logger->debug("read changed C Field"); - } else { - read_field(file_original, C); - std::getline(file_changes, line); - } + if (field_changes.u_changed) { + read_field(new_file, u); + m_logger->debug("read changed u Field"); } else { - m_logger->warn(fmt::format("File '{}' could not be opened, original data will be loaded", field_changes.file_name)); - read_fields(t_cur, u, v, w, p, T, C); + read_vis_field(org_file, u, t_cur); } - } else { - m_logger->debug("no field changes"); + if (field_changes.v_changed) { + read_field(new_file, v); + m_logger->debug("read changed v Field"); + } else { + read_vis_field(org_file, v, t_cur); + } + if (field_changes.w_changed) { + read_field(new_file, w); + m_logger->debug("read changed w Field"); + } else { + read_vis_field(org_file, w, t_cur); + } + if (field_changes.p_changed) { + read_field(new_file, p); + m_logger->debug("read changed p Field"); + } else { + read_vis_field(org_file, p, t_cur); + } + if (field_changes.T_changed) { + read_field(new_file, T); + m_logger->debug("read changed T Field"); + } else { + read_vis_field(org_file, T, t_cur); + } + if (field_changes.C_changed) { + read_field(new_file, C); + m_logger->debug("read changed C Field"); + } else { + read_vis_field(org_file, C, t_cur); + } + } catch (const std::exception &ex) { + m_logger->warn(fmt::format("File '{}' could not be opened, original data will be loaded", field_changes.file_name)); + m_logger->warn(fmt::format("Exception during reading {}", ex.what())); read_fields(t_cur, u, v, w, p, T, C); } } diff --git a/src/dataAssimilation/FieldIO.h b/src/dataAssimilation/FieldIO.h index 1eb9ae7a..055a1604 100644 --- a/src/dataAssimilation/FieldIO.h +++ b/src/dataAssimilation/FieldIO.h @@ -9,6 +9,11 @@ #include #include +#include + +#include +#include + #include "../field/FieldController.h" #include "../utility/Utility.h" #include "../solver/SolverController.h" @@ -27,7 +32,7 @@ class FieldIO { Field &p, Field &T, Field &C); private: - std::string create_header(const std::string &xml_file_name); + void create_header(HighFive::File &file, const std::string &xml_file_name); void read_fields(real t_cur, Field &u, Field &v, Field &w, Field &p, Field &T, Field &C); long *m_positions; @@ -36,7 +41,8 @@ class FieldIO { long m_pos_time_step; std::shared_ptr m_logger; - void read_field(std::ifstream &file_stream, Field &field); + void read_field(HighFive::File &file, Field &field); + void read_vis_field(HighFive::File &file, Field &field, const real t); }; #endif /* ARTSS_VISUALISATION_FIELDIO_H */ diff --git a/src/domain/Coordinate.h b/src/domain/Coordinate.h index c655e538..95c54342 100644 --- a/src/domain/Coordinate.h +++ b/src/domain/Coordinate.h @@ -47,7 +47,7 @@ class Coordinate { if (i == CoordinateAxis::Z) { return z; } - throw std::runtime_error("unknown axis"); + // throw std::runtime_error("unknown axis"); } const inline numeral &operator[](size_t i) const { @@ -60,7 +60,7 @@ class Coordinate { if (i == CoordinateAxis::Z) { return z; } - throw std::runtime_error("unknown axis"); + // throw std::runtime_error("unknown axis"); } Coordinate &operator+=(const Coordinate &rhs) {