diff --git a/dfode_kit/__init__.py b/dfode_kit/__init__.py index 761fafe..5435bc1 100644 --- a/dfode_kit/__init__.py +++ b/dfode_kit/__init__.py @@ -34,11 +34,11 @@ "df_to_h5": ("dfode_kit.cases.sampling", "df_to_h5"), "touch_h5": ("dfode_kit.data.io_hdf5", "touch_h5"), "get_TPY_from_h5": ("dfode_kit.data.io_hdf5", "get_TPY_from_h5"), - "advance_reactor": ("dfode_kit.data_operations.h5_kit", "advance_reactor"), - "load_model": ("dfode_kit.data_operations.h5_kit", "load_model"), - "predict_Y": ("dfode_kit.data_operations.h5_kit", "predict_Y"), - "nn_integrate": ("dfode_kit.data_operations.h5_kit", "nn_integrate"), - "integrate_h5": ("dfode_kit.data_operations.h5_kit", "integrate_h5"), + "advance_reactor": ("dfode_kit.data.integration", "advance_reactor"), + "load_model": ("dfode_kit.data.integration", "load_model"), + "predict_Y": ("dfode_kit.data.integration", "predict_Y"), + "nn_integrate": ("dfode_kit.data.integration", "nn_integrate"), + "integrate_h5": ("dfode_kit.data.integration", "integrate_h5"), } diff --git a/dfode_kit/cli/commands/init.py b/dfode_kit/cli/commands/init.py index afc15de..86d4415 100644 --- a/dfode_kit/cli/commands/init.py +++ b/dfode_kit/cli/commands/init.py @@ -109,7 +109,7 @@ def _handle_one_d_flame(args): json_result = {'case_type': 'oneD-flame'} if args.json else None if args.write_config: - from dfode_kit.df_interface.case_init import dump_plan_json + from dfode_kit.cases.init import dump_plan_json config_path = dump_plan_json(plan, args.write_config) if args.json: diff --git a/dfode_kit/cli/commands/init_helpers.py b/dfode_kit/cli/commands/init_helpers.py index 9867224..48232a6 100644 --- a/dfode_kit/cli/commands/init_helpers.py +++ b/dfode_kit/cli/commands/init_helpers.py @@ -6,7 +6,7 @@ from pathlib import Path from typing import Any -from dfode_kit.df_interface.case_init import ( +from dfode_kit.cases.init import ( DEFAULT_ONE_D_FLAME_TEMPLATE, OneDFlameInitInputs, dump_plan_json, @@ -117,7 +117,7 @@ def apply_one_d_flame_plan( overrides = one_d_flame_overrides_from_plan(plan) cfg = _build_one_d_flame_config(inputs, overrides, quiet=quiet) - from dfode_kit.df_interface.oneDflame_setup import setup_one_d_flame_case + from dfode_kit.cases.deepflame import setup_one_d_flame_case if quiet: with redirect_stdout(io.StringIO()): @@ -139,7 +139,7 @@ def _build_one_d_flame_config( overrides: dict[str, Any], quiet: bool = False, ): - from dfode_kit.df_interface.flame_configurations import OneDFreelyPropagatingFlameConfig + from dfode_kit.cases.presets import OneDFreelyPropagatingFlameConfig cfg = OneDFreelyPropagatingFlameConfig( mechanism=inputs.mechanism, diff --git a/dfode_kit/cli/commands/sample.py b/dfode_kit/cli/commands/sample.py index 6e0cfe0..84924b3 100644 --- a/dfode_kit/cli/commands/sample.py +++ b/dfode_kit/cli/commands/sample.py @@ -31,7 +31,7 @@ def add_command_parser(subparsers): def handle_command(args): from dfode_kit.data.io_hdf5 import touch_h5 - from dfode_kit.df_interface.sample_case import df_to_h5 + from dfode_kit.cases.sampling import df_to_h5 print('Handling sample command') df_to_h5(args.case, args.mech, args.save, include_mesh=args.include_mesh) diff --git a/dfode_kit/data/__init__.py b/dfode_kit/data/__init__.py index 32587c5..1b5e151 100644 --- a/dfode_kit/data/__init__.py +++ b/dfode_kit/data/__init__.py @@ -10,6 +10,12 @@ "require_h5_group", "touch_h5", "get_TPY_from_h5", + "advance_reactor", + "load_model", + "predict_Y", + "nn_integrate", + "integrate_h5", + "calculate_error", ] _ATTRIBUTE_MODULES = { @@ -21,6 +27,12 @@ "require_h5_group": ("dfode_kit.data.contracts", "require_h5_group"), "touch_h5": ("dfode_kit.data.io_hdf5", "touch_h5"), "get_TPY_from_h5": ("dfode_kit.data.io_hdf5", "get_TPY_from_h5"), + "advance_reactor": ("dfode_kit.data.integration", "advance_reactor"), + "load_model": ("dfode_kit.data.integration", "load_model"), + "predict_Y": ("dfode_kit.data.integration", "predict_Y"), + "nn_integrate": ("dfode_kit.data.integration", "nn_integrate"), + "integrate_h5": ("dfode_kit.data.integration", "integrate_h5"), + "calculate_error": ("dfode_kit.data.integration", "calculate_error"), } diff --git a/dfode_kit/data/integration.py b/dfode_kit/data/integration.py new file mode 100644 index 0000000..5225bf6 --- /dev/null +++ b/dfode_kit/data/integration.py @@ -0,0 +1,211 @@ +import h5py +import torch +import numpy as np +import cantera as ct + +from dfode_kit.data.contracts import MECHANISM_ATTR, require_h5_attr, read_scalar_field_datasets +from dfode_kit.data.io_hdf5 import get_TPY_from_h5, touch_h5 +from dfode_kit.utils import BCT, inverse_BCT + + +def advance_reactor(gas, state, reactor, reactor_net, time_step): + """Advance the reactor simulation for a given state.""" + state = state.flatten() + + expected_shape = (2 + gas.n_species,) + if state.shape != expected_shape: + raise ValueError( + f"Expected state shape {expected_shape}, got {state.shape}" + ) + + gas.TPY = state[0], state[1], state[2:] + + reactor.syncState() + reactor_net.reinitialize() + reactor_net.advance(time_step) + reactor_net.set_initial_time(0.0) + + return gas + + +@torch.no_grad() +def load_model(model_path, device, model_class, model_layers): + state_dict = torch.load(model_path, map_location='cpu') + + model = model_class(model_layers) + model.load_state_dict(state_dict['net']) + + model.eval() + model.to(device=device) + + return model + + +@torch.no_grad() +def predict_Y(model, model_path, d_arr, mech, device): + gas = ct.Solution(mech) + n_species = gas.n_species + expected_dims = 2 + n_species + if d_arr.shape[1] != expected_dims: + raise ValueError( + f"Expected input with {expected_dims} columns, got {d_arr.shape[1]}" + ) + + state_dict = torch.load(model_path, map_location='cpu') + + Xmu0 = state_dict['data_in_mean'] + Xstd0 = state_dict['data_in_std'] + Ymu0 = state_dict['data_target_mean'] + Ystd0 = state_dict['data_target_std'] + + d_arr = np.clip(d_arr, 0, None) + d_arr[:, 1] *= 0 + d_arr[:, 1] += 101325 + + orig_Y = d_arr[:, 2:].copy() + in_bct = d_arr.copy() + in_bct[:, 2:] = BCT(in_bct[:, 2:]) + in_bct_norm = (in_bct - Xmu0) / Xstd0 + + input = torch.from_numpy(in_bct_norm).float().to(device=device) + + output = model(input) + + out_bct = output.cpu().numpy() * Ystd0 + Ymu0 + in_bct[:, 2:-1] + next_Y = orig_Y.copy() + next_Y[:, :-1] = inverse_BCT(out_bct) + next_Y[:, :-1] = next_Y[:, :-1] / np.sum(next_Y[:, :-1], axis=1, keepdims=True) * (1 - next_Y[:, -1:]) + + return next_Y + + +@torch.no_grad() +def nn_integrate(orig_arr, model_path, device, model_class, model_layers, time_step, mech, frozen_temperature=510): + model = load_model(model_path, device, model_class, model_layers) + + mask = orig_arr[:, 0] > frozen_temperature + infer_arr = orig_arr[mask, :] + + next_Y = predict_Y(model, model_path, infer_arr, mech, device) + + new_states = np.hstack((np.zeros((orig_arr.shape[0], 1)), orig_arr)) + new_states[:, 0] += time_step + new_states[:, 2] = orig_arr[:, 1] + new_states[mask, 3:] = next_Y + + setter_gas = ct.Solution(mech) + getter_gas = ct.Solution(mech) + new_T = np.zeros_like(next_Y[:, 0]) + + for idx, (state, next_y) in enumerate(zip(infer_arr, next_Y)): + try: + setter_gas.TPY = state[0], state[1], state[2:] + h = setter_gas.enthalpy_mass + + getter_gas.Y = next_y + getter_gas.HP = h, state[1] + + new_T[idx] = getter_gas.T + + except ct.CanteraError: + continue + new_states[mask, 1] = new_T + + return new_states + + +def integrate_h5( + file_path, + save_path1, + save_path2, + time_step, + cvode_integration=True, + nn_integration=False, + model_settings=None, +): + """Process scalar-field datasets and save CVODE / NN integration outputs.""" + with h5py.File(file_path, 'r') as f: + mech = require_h5_attr(f, MECHANISM_ATTR) + + data_dict = read_scalar_field_datasets(file_path) + + if cvode_integration: + gas = ct.Solution(mech) + reactor = ct.Reactor(gas, name='Reactor1', energy='off') + reactor_net = ct.ReactorNet([reactor]) + reactor_net.rtol, reactor_net.atol = 1e-6, 1e-10 + + processed_data_dict = {} + + for name, data in data_dict.items(): + processed_data = np.empty((data.shape[0], data.shape[1] + 1)) + for i, state in enumerate(data): + gas = advance_reactor(gas, state, reactor, reactor_net, time_step) + + new_state = np.array([time_step, gas.T, gas.P] + list(gas.Y)) + + processed_data[i, :] = new_state + + processed_data_dict[name] = processed_data + + with h5py.File(save_path1, 'a') as f: + cvode_group = f.create_group('cvode_integration') + + for dataset_name, processed_data in processed_data_dict.items(): + cvode_group.create_dataset(dataset_name, data=processed_data) + print(f'Saved processed dataset: {dataset_name} in cvode_integration group') + + if nn_integration: + processed_data_dict = {} + if model_settings is None: + raise ValueError("model_settings must be provided for neural network integration.") + + for name, data in data_dict.items(): + try: + processed_data = nn_integrate(data, **model_settings) + processed_data_dict[name] = processed_data + except Exception as e: + print(f"Error processing dataset '{name}': {e}") + + with h5py.File(save_path2, 'a') as f: + if 'nn_integration' in f: + del f['nn_integration'] + nn_group = f.create_group('nn_integration') + + for dataset_name, processed_data in processed_data_dict.items(): + nn_group.create_dataset(dataset_name, data=processed_data) + print(f'Saved processed dataset: {dataset_name} in nn_integration group') + + +def calculate_error( + mech_path, + save_path1, + save_path2, + error='RMSE' +): + gas = ct.Solution(mech_path) + + with h5py.File(save_path1, 'r') as f1, h5py.File(save_path2, 'r') as f2: + cvode_group = f1['cvode_integration'] + nn_group = f2['nn_integration'] + + common_datasets = set(cvode_group.keys()) & set(nn_group.keys()) + + sorted_datasets = sorted(common_datasets, key=lambda x: float(x)) + results = {} + + for ds_name in sorted_datasets: + cvode_data = cvode_group[ds_name][:, 3:] + nn_data = nn_group[ds_name][:, 3:] + + if error == "RMSE": + rmse_per_dim = np.sqrt(np.mean((cvode_data - nn_data) ** 2, axis=0)) + results[ds_name] = rmse_per_dim + + print(f"RMSE of ataset: {ds_name}") + for dim_idx, rmse_val in enumerate(rmse_per_dim, start=1): + id = gas.species_names[dim_idx - 3] + print(f" Species {id}: {rmse_val:.6e}") + print() + + return results diff --git a/dfode_kit/data_operations/__init__.py b/dfode_kit/data_operations/__init__.py index 336d74d..1234364 100644 --- a/dfode_kit/data_operations/__init__.py +++ b/dfode_kit/data_operations/__init__.py @@ -22,11 +22,11 @@ _ATTRIBUTE_MODULES = { "touch_h5": ("dfode_kit.data.io_hdf5", "touch_h5"), "get_TPY_from_h5": ("dfode_kit.data.io_hdf5", "get_TPY_from_h5"), - "integrate_h5": ("dfode_kit.data_operations.h5_kit", "integrate_h5"), - "load_model": ("dfode_kit.data_operations.h5_kit", "load_model"), - "nn_integrate": ("dfode_kit.data_operations.h5_kit", "nn_integrate"), - "predict_Y": ("dfode_kit.data_operations.h5_kit", "predict_Y"), - "calculate_error": ("dfode_kit.data_operations.h5_kit", "calculate_error"), + "integrate_h5": ("dfode_kit.data.integration", "integrate_h5"), + "load_model": ("dfode_kit.data.integration", "load_model"), + "nn_integrate": ("dfode_kit.data.integration", "nn_integrate"), + "predict_Y": ("dfode_kit.data.integration", "predict_Y"), + "calculate_error": ("dfode_kit.data.integration", "calculate_error"), "random_perturb": ("dfode_kit.data_operations.augment_data", "random_perturb"), "label_npy": ("dfode_kit.data_operations.label_data", "label_npy"), "SCALAR_FIELDS_GROUP": ("dfode_kit.data.contracts", "SCALAR_FIELDS_GROUP"), diff --git a/dfode_kit/data_operations/augment_data.py b/dfode_kit/data_operations/augment_data.py index d9892be..7429640 100644 --- a/dfode_kit/data_operations/augment_data.py +++ b/dfode_kit/data_operations/augment_data.py @@ -1,7 +1,7 @@ import numpy as np import cantera as ct import time -from dfode_kit.data_operations.h5_kit import advance_reactor +from dfode_kit.data.integration import advance_reactor from dfode_kit.training.formation import formation_calculate def single_step(npstate, chem, time_step=1e-6): diff --git a/dfode_kit/data_operations/h5_kit.py b/dfode_kit/data_operations/h5_kit.py index 64a62b2..dca8954 100644 --- a/dfode_kit/data_operations/h5_kit.py +++ b/dfode_kit/data_operations/h5_kit.py @@ -1,236 +1,37 @@ -import h5py -import torch -import numpy as np -import cantera as ct - -from dfode_kit.data.contracts import MECHANISM_ATTR, require_h5_attr -from dfode_kit.data.io_hdf5 import get_TPY_from_h5, touch_h5 -from dfode_kit.utils import BCT, inverse_BCT - -def advance_reactor(gas, state, reactor, reactor_net, time_step): - """Advance the reactor simulation for a given state.""" - state = state.flatten() - - expected_shape = (2 + gas.n_species,) - if state.shape != expected_shape: - raise ValueError( - f"Expected state shape {expected_shape}, got {state.shape}" - ) - - gas.TPY = state[0], state[1], state[2:] - - reactor.syncState() - reactor_net.reinitialize() - reactor_net.advance(time_step) - reactor_net.set_initial_time(0.0) - - return gas - -@torch.no_grad() -def load_model(model_path, device, model_class, model_layers): - state_dict = torch.load(model_path, map_location='cpu') - - model = model_class(model_layers) - model.load_state_dict(state_dict['net']) - - model.eval() - model.to(device=device) - - return model - -@torch.no_grad() -def predict_Y(model, model_path, d_arr, mech, device): - gas = ct.Solution(mech) - n_species = gas.n_species - expected_dims = 2 + n_species - if d_arr.shape[1] != expected_dims: - raise ValueError( - f"Expected input with {expected_dims} columns, got {d_arr.shape[1]}" - ) - - - state_dict = torch.load(model_path, map_location='cpu') - - Xmu0 = state_dict['data_in_mean'] - Xstd0 = state_dict['data_in_std'] - Ymu0 = state_dict['data_target_mean'] - Ystd0 = state_dict['data_target_std'] - - d_arr = np.clip(d_arr, 0, None) - d_arr[:, 1] *= 0 - d_arr[:, 1] += 101325 - - orig_Y = d_arr[:, 2:].copy() - in_bct = d_arr.copy() - in_bct[:, 2:] = BCT(in_bct[:, 2:]) - in_bct_norm = (in_bct - Xmu0) / Xstd0 - - input = torch.from_numpy(in_bct_norm).float().to(device=device) - - output = model(input) - - out_bct = output.cpu().numpy() * Ystd0 + Ymu0 + in_bct[:, 2:-1] - next_Y = orig_Y.copy() - next_Y[:, :-1] = inverse_BCT(out_bct) - next_Y[:, :-1] = next_Y[:, :-1] / np.sum(next_Y[:, :-1], axis=1, keepdims=True) * (1 - next_Y[:, -1:]) - - return next_Y - -@torch.no_grad() -def nn_integrate(orig_arr, model_path, device, model_class, model_layers, time_step, mech, frozen_temperature=510): - model = load_model(model_path, device, model_class, model_layers) - - mask = orig_arr[:, 0] > frozen_temperature - infer_arr = orig_arr[mask, :] - - next_Y = predict_Y(model, model_path, infer_arr, mech, device) - - new_states = np.hstack((np.zeros((orig_arr.shape[0], 1)), orig_arr)) - new_states[:, 0] += time_step - new_states[:, 2] = orig_arr[:, 1] - new_states[mask, 3:] = next_Y - - setter_gas = ct.Solution(mech) - getter_gas = ct.Solution(mech) - new_T = np.zeros_like(next_Y[:, 0]) - - for idx, (state, next_y) in enumerate(zip(infer_arr, next_Y)): - try: - setter_gas.TPY = state[0], state[1], state[2:] - h = setter_gas.enthalpy_mass - - getter_gas.Y = next_y - getter_gas.HP = h, state[1] - - new_T[idx] = getter_gas.T - - except ct.CanteraError as e: - continue # Skip this iteration or set a default value - new_states[mask, 1] = new_T - - return new_states - -def integrate_h5( - file_path, - save_path1, - save_path2, - time_step, - cvode_integration=True, - nn_integration=False, - model_settings=None, -): - """ - Process datasets from an HDF5 file, applying CVODE or neural network integration, - and save the results in corresponding groups within the file. - - Parameters - ---------- - file_path : str - Path to the HDF5 file containing scalar fields. - time_step : float - Time step for the reactor simulation or neural network inference. - cvode_integration : bool, optional - If True, processes data using CVODE integration and saves it in the - `cvode_integration` group. Default is True. - nn_integration : bool, optional - If True, processes data using a neural network integration and saves it in - the `nn_integration` group. Default is False. - model_settings : dict, optional - A dictionary containing model settings for the neural network integration. - Must include keys: 'model_path', 'device', 'model_class', 'model_layers', - 'time_step', and 'mech'. - - Returns - ------- - None - """ - data_dict = {} - - with h5py.File(file_path, 'r') as f: - mech = require_h5_attr(f, MECHANISM_ATTR) - - data_dict = read_scalar_field_datasets(file_path) - - if cvode_integration: - gas = ct.Solution(mech) - reactor = ct.Reactor(gas, name='Reactor1', energy='off') - reactor_net = ct.ReactorNet([reactor]) - reactor_net.rtol, reactor_net.atol = 1e-6, 1e-10 - - processed_data_dict = {} - - for name, data in data_dict.items(): - processed_data = np.empty((data.shape[0], data.shape[1]+1)) - for i, state in enumerate(data): - gas = advance_reactor(gas, state, reactor, reactor_net, time_step) - - new_state = np.array([time_step, gas.T, gas.P] + list(gas.Y)) - - processed_data[i, :] = new_state - - processed_data_dict[name] = processed_data - - with h5py.File(save_path1, 'a') as f: # Use 'a' to append - cvode_group = f.create_group('cvode_integration') - - for dataset_name, processed_data in processed_data_dict.items(): - cvode_group.create_dataset(dataset_name, data=processed_data) - print(f'Saved processed dataset: {dataset_name} in cvode_integration group') - - if nn_integration: - processed_data_dict = {} - if model_settings is None: - raise ValueError("model_settings must be provided for neural network integration.") - - for name, data in data_dict.items(): - try: - processed_data = nn_integrate(data, **model_settings) - processed_data_dict[name] = processed_data - except Exception as e: - print(f"Error processing dataset '{name}': {e}") - - with h5py.File(save_path2, 'a') as f: # Use 'a' to append - if 'nn_integration' in f: - del f['nn_integration'] # Delete the existing group - nn_group = f.create_group('nn_integration') - - for dataset_name, processed_data in processed_data_dict.items(): - nn_group.create_dataset(dataset_name, data=processed_data) - print(f'Saved processed dataset: {dataset_name} in nn_integration group') - - -def calculate_error( - mech_path, - save_path1, - save_path2, - error = 'RMSE' -): - gas = ct.Solution(mech_path) - - with h5py.File(save_path1, 'r') as f1, h5py.File(save_path2, 'r') as f2: - cvode_group = f1['cvode_integration'] - nn_group = f2['nn_integration'] - - common_datasets = set(cvode_group.keys()) & set(nn_group.keys()) - - sorted_datasets = sorted(common_datasets, key=lambda x: float(x)) - results = {} - - for ds_name in sorted_datasets: - cvode_data = cvode_group[ds_name][:, 3:] # 跳过前3列,取后9列 - nn_data = nn_group[ds_name][:, 3:] # 跳过前3列,取后9列 - - if error == "RMSE": - rmse_per_dim = np.sqrt(np.mean((cvode_data - nn_data)**2, axis=0)) - results[ds_name] = rmse_per_dim - - print(f"RMSE of ataset: {ds_name}") - for dim_idx, rmse_val in enumerate(rmse_per_dim, start=1): - id = gas.species_names[dim_idx - 3] - print(f" Species {id}: {rmse_val:.6e}") - print() - - # elif error == "MAE": - # pass - - return results \ No newline at end of file +"""Compatibility shim for the canonical data I/O and integration modules.""" + +from importlib import import_module + + +__all__ = [ + "touch_h5", + "get_TPY_from_h5", + "advance_reactor", + "load_model", + "predict_Y", + "nn_integrate", + "integrate_h5", + "calculate_error", +] + +_ATTRIBUTE_MODULES = { + "touch_h5": ("dfode_kit.data.io_hdf5", "touch_h5"), + "get_TPY_from_h5": ("dfode_kit.data.io_hdf5", "get_TPY_from_h5"), + "advance_reactor": ("dfode_kit.data.integration", "advance_reactor"), + "load_model": ("dfode_kit.data.integration", "load_model"), + "predict_Y": ("dfode_kit.data.integration", "predict_Y"), + "nn_integrate": ("dfode_kit.data.integration", "nn_integrate"), + "integrate_h5": ("dfode_kit.data.integration", "integrate_h5"), + "calculate_error": ("dfode_kit.data.integration", "calculate_error"), +} + + +def __getattr__(name): + if name not in _ATTRIBUTE_MODULES: + raise AttributeError(f"module 'dfode_kit.data_operations.h5_kit' has no attribute '{name}'") + + module_name, attribute_name = _ATTRIBUTE_MODULES[name] + module = import_module(module_name) + value = getattr(module, attribute_name) + globals()[name] = value + return value diff --git a/tests/test_data_integration_shims.py b/tests/test_data_integration_shims.py new file mode 100644 index 0000000..52d5c17 --- /dev/null +++ b/tests/test_data_integration_shims.py @@ -0,0 +1,17 @@ +import importlib + + +def test_h5_kit_shim_reexports_io_helpers(): + legacy_h5_kit = importlib.import_module("dfode_kit.data_operations.h5_kit") + canonical_io = importlib.import_module("dfode_kit.data.io_hdf5") + + assert legacy_h5_kit.touch_h5 is canonical_io.touch_h5 + assert legacy_h5_kit.get_TPY_from_h5 is canonical_io.get_TPY_from_h5 + + +def test_data_operations_package_reexports_canonical_io_helpers(): + legacy_data_ops = importlib.import_module("dfode_kit.data_operations") + canonical_io = importlib.import_module("dfode_kit.data.io_hdf5") + + assert legacy_data_ops.touch_h5 is canonical_io.touch_h5 + assert legacy_data_ops.get_TPY_from_h5 is canonical_io.get_TPY_from_h5 diff --git a/tutorials/oneD_freely_propagating_flame/2_model_test/priori/test.py b/tutorials/oneD_freely_propagating_flame/2_model_test/priori/test.py index af78795..1ede131 100644 --- a/tutorials/oneD_freely_propagating_flame/2_model_test/priori/test.py +++ b/tutorials/oneD_freely_propagating_flame/2_model_test/priori/test.py @@ -4,7 +4,7 @@ import cantera as ct from dfode_kit import DFODE_ROOT -from dfode_kit.data_operations import integrate_h5, touch_h5, calculate_error +from dfode_kit.data import integrate_h5, touch_h5, calculate_error from dfode_kit.models.mlp import MLP mech_path = f'{DFODE_ROOT}/mechanisms/Burke2012_s9r23.yaml' diff --git a/tutorials/twoD_HIT_flame/2_model_test/priori/test.py b/tutorials/twoD_HIT_flame/2_model_test/priori/test.py index c0514f0..f153ac0 100644 --- a/tutorials/twoD_HIT_flame/2_model_test/priori/test.py +++ b/tutorials/twoD_HIT_flame/2_model_test/priori/test.py @@ -4,7 +4,7 @@ import cantera as ct from dfode_kit import DFODE_ROOT -from dfode_kit.data_operations import integrate_h5, touch_h5, calculate_error +from dfode_kit.data import integrate_h5, touch_h5, calculate_error from dfode_kit.models.mlp import MLP mech_path = f'{DFODE_ROOT}/mechanisms/Burke2012_s9r23.yaml'