From b916b9ea1c030b1e096545f44db57be85ebc2d23 Mon Sep 17 00:00:00 2001 From: Qiqi Wang Date: Fri, 13 Oct 2017 15:18:10 -0400 Subject: [PATCH] Added back openfoam4 scripts --- tools/openfoam4/scripts/__init__.py | 0 tools/openfoam4/scripts/foam_data.py | 118 ++++++++++++++++++++++++++ tools/openfoam4/scripts/foam_to_h5.py | 36 ++++++++ tools/openfoam4/scripts/h5_to_foam.py | 59 +++++++++++++ 4 files changed, 213 insertions(+) create mode 100644 tools/openfoam4/scripts/__init__.py create mode 100644 tools/openfoam4/scripts/foam_data.py create mode 100644 tools/openfoam4/scripts/foam_to_h5.py create mode 100644 tools/openfoam4/scripts/h5_to_foam.py diff --git a/tools/openfoam4/scripts/__init__.py b/tools/openfoam4/scripts/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/openfoam4/scripts/foam_data.py b/tools/openfoam4/scripts/foam_data.py new file mode 100644 index 0000000..48abb6b --- /dev/null +++ b/tools/openfoam4/scripts/foam_data.py @@ -0,0 +1,118 @@ +import os +import gzip + +import numpy as np + +def find_data_path(comm, path, time, mkdir=False): + if mkdir: + if comm.rank == 0 and not os.path.exists(path): + os.mkdir(path) + comm.Barrier() + if comm.size > 1: + proc_path = os.path.join(path, + 'processor{0}'.format(comm.rank)) + if mkdir: + if not os.path.exists(proc_path): + os.mkdir(proc_path) + else: + assert os.path.exists(proc_path) + if comm.rank == 0: + not_exist_path = os.path.join(path, + 'processor{0}'.format(comm.size)) + assert not os.path.exists(not_exist_path) + else: + proc_path = path + data_path = os.path.join(proc_path, time) + if mkdir: + if not os.path.exists(data_path): + os.mkdir(data_path) + else: + assert os.path.exists(data_path) + return data_path + +def split_line_parenthesis(line): + sub_lines = line.split(b'(') + split_line = [] + split_depth = [] + depth = 0 + for sub_line in sub_lines: + split_sub_line = sub_line.split(b')') + split_line.extend(split_sub_line) + split_depth.append(depth - np.arange(len(split_sub_line))) + depth += 2 - len(split_sub_line) + return split_line, np.hstack(split_depth) + +class FileParser: + def __init__(self, data_path): + self.data_path = data_path + + def parse(self, filename): + filename = os.path.join(self.data_path, filename) + if not filename.endswith('.gz'): + return + f = gzip.open(filename) + self.is_scalar = False + self.is_vector = False + line_beginning_depth = 0 + for line in f: + if line.strip().startswith(b'class '): + assert not self.is_scalar and not self.is_vector + if b'Vector' in line: + self.is_vector = True + elif b'Scalar' in line: + self.is_scalar = True + split_line, split_depth = split_line_parenthesis(line) + line_beginning_depth += split_depth[-1] + yield split_line, split_depth + line_beginning_depth + assert line_beginning_depth == 0 + +class DataLoader: + def __init__(self, data_path): + self.parser = FileParser(data_path) + + def __call__(self, filename): + data = [] + for split_line, split_depth in self.parser.parse(filename): + for sub_line, sub_depth in zip(split_line, split_depth): + if (sub_depth > 0 and self.parser.is_scalar or + sub_depth > 1 and self.parser.is_vector): + data.extend(sub_line.strip().split()) + data = np.array(data, float) + if filename.startswith('nu'): + data[data<=0] = 0 + return data + + +def join_line_parenthesis(split_line, split_depth): + line = split_line[0] + for i in range(1, len(split_line)): + if split_depth[i] > split_depth[i-1]: + line += b'(' + else: + line += b')' + line += split_line[i] + return line + +class DataWriter: + def __init__(self, ref_path, target_path): + self.parser = FileParser(ref_path) + self.target_path = target_path + + def __call__(self, data, filename): + data_ptr = 0 + with gzip.open(os.path.join(self.target_path, filename), 'wb') as f: + for split_line, split_depth in self.parser.parse(filename): + assert len(split_line) == len(split_depth) + for i in range(len(split_line)): + if (split_depth[i] > 0 and self.parser.is_scalar or + split_depth[i] > 1 and self.parser.is_vector): + ni = len(split_line[i].strip().split()) + data_i = data[data_ptr:data_ptr+ni] + data_i = ['{0:.18g}'.format(d) for d in data_i] + data_ptr += ni + if b'\n' in split_line[i]: + split_line[i] = (' '.join(data_i) + '\n').encode() + else: + split_line[i] = (' '.join(data_i)).encode() + f.write(join_line_parenthesis(split_line, split_depth)) + return data[:data_ptr] diff --git a/tools/openfoam4/scripts/foam_to_h5.py b/tools/openfoam4/scripts/foam_to_h5.py new file mode 100644 index 0000000..dc67b43 --- /dev/null +++ b/tools/openfoam4/scripts/foam_to_h5.py @@ -0,0 +1,36 @@ +import os +import sys +import argparse + +import h5py +import numpy as np +from mpi4py import MPI + +my_path = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(my_path) + +from foam_data import * + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Foam time to h5py') + parser.add_argument('foam_path', type=str, help='Path to OpenFOAM case') + parser.add_argument('time', type=str, help='Time to convert') + parser.add_argument('output', type=str, help='hdf5 output file') + args = parser.parse_args() + + comm = MPI.COMM_WORLD + + data_path = find_data_path(comm, args.foam_path, args.time) + data = list(map(DataLoader(data_path), sorted(os.listdir(data_path)))) + data = np.hstack(data) + + data_size = np.zeros(comm.size, int) + data_size[comm.rank] = data.size + comm.Allreduce(MPI.IN_PLACE, data_size, MPI.SUM) + i_start = data_size[:comm.rank].sum() + i_end = data_size[:comm.rank+1].sum() + + handle = h5py.File(args.output, 'w', driver='mpio', comm=comm) + d = handle.create_dataset('field', shape=(data_size.sum(),), dtype='d') + d[i_start:i_end] = data + handle.close() diff --git a/tools/openfoam4/scripts/h5_to_foam.py b/tools/openfoam4/scripts/h5_to_foam.py new file mode 100644 index 0000000..115812a --- /dev/null +++ b/tools/openfoam4/scripts/h5_to_foam.py @@ -0,0 +1,59 @@ +import os +import sys +import gzip +import shutil +import argparse + +import h5py +import numpy as np +import mpi4py +from mpi4py import MPI + +my_path = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(my_path) + +from foam_data import * + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Foam time to h5py') + parser.add_argument('ref_path', type=str, help='Reference OpenFOAM case') + parser.add_argument('hdf5_input', type=str, help='Input hdf5 file name') + parser.add_argument('out_path', type=str, help='Output OpenFOAM case path') + parser.add_argument('time', type=str, help='Time') + args = parser.parse_args() + + comm = MPI.COMM_WORLD + + ref_path = find_data_path(comm, args.ref_path, args.time) + data = list(map(DataLoader(ref_path), sorted(os.listdir(ref_path)))) + data = np.hstack(data) + + data_size = np.zeros(comm.size, int) + data_size[comm.rank] = data.size + comm.Allreduce(MPI.IN_PLACE, data_size, MPI.SUM) + i_start = data_size[:comm.rank].sum() + i_end = data_size[:comm.rank+1].sum() + + handle = h5py.File(args.hdf5_input, 'r', driver='mpio', comm=comm) + data = handle['/field'][i_start:i_end] + handle.close() + + out_path = find_data_path(comm, args.out_path, args.time, True) + writer = DataWriter(ref_path, out_path) + for fname in sorted(os.listdir(ref_path)): + if not fname.endswith('.gz'): + continue + written_data = writer(data, fname) + assert written_data.size <= data.size + data = data[written_data.size:] + assert data.size == 0 + + if comm.rank == 0: + shutil.copytree(os.path.join(args.ref_path, 'system'), + os.path.join(args.out_path, 'system')) + shutil.copytree(os.path.join(args.ref_path, 'constant'), + os.path.join(args.out_path, 'constant')) + if comm.size > 1: + proc_path = 'processor{0}'.format(comm.rank) + shutil.copytree(os.path.join(args.ref_path, proc_path, 'constant'), + os.path.join(args.out_path, proc_path, 'constant'))