Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions dfode_kit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@
"df_to_h5": ("dfode_kit.cases.sampling", "df_to_h5"),
"touch_h5": ("dfode_kit.data.io_hdf5", "touch_h5"),
"get_TPY_from_h5": ("dfode_kit.data.io_hdf5", "get_TPY_from_h5"),
"advance_reactor": ("dfode_kit.data_operations.h5_kit", "advance_reactor"),
"load_model": ("dfode_kit.data_operations.h5_kit", "load_model"),
"predict_Y": ("dfode_kit.data_operations.h5_kit", "predict_Y"),
"nn_integrate": ("dfode_kit.data_operations.h5_kit", "nn_integrate"),
"integrate_h5": ("dfode_kit.data_operations.h5_kit", "integrate_h5"),
"advance_reactor": ("dfode_kit.data.integration", "advance_reactor"),
"load_model": ("dfode_kit.data.integration", "load_model"),
"predict_Y": ("dfode_kit.data.integration", "predict_Y"),
"nn_integrate": ("dfode_kit.data.integration", "nn_integrate"),
"integrate_h5": ("dfode_kit.data.integration", "integrate_h5"),
}


Expand Down
2 changes: 1 addition & 1 deletion dfode_kit/cli/commands/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def _handle_one_d_flame(args):
json_result = {'case_type': 'oneD-flame'} if args.json else None

if args.write_config:
from dfode_kit.df_interface.case_init import dump_plan_json
from dfode_kit.cases.init import dump_plan_json

config_path = dump_plan_json(plan, args.write_config)
if args.json:
Expand Down
6 changes: 3 additions & 3 deletions dfode_kit/cli/commands/init_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from pathlib import Path
from typing import Any

from dfode_kit.df_interface.case_init import (
from dfode_kit.cases.init import (
DEFAULT_ONE_D_FLAME_TEMPLATE,
OneDFlameInitInputs,
dump_plan_json,
Expand Down Expand Up @@ -117,7 +117,7 @@ def apply_one_d_flame_plan(
overrides = one_d_flame_overrides_from_plan(plan)
cfg = _build_one_d_flame_config(inputs, overrides, quiet=quiet)

from dfode_kit.df_interface.oneDflame_setup import setup_one_d_flame_case
from dfode_kit.cases.deepflame import setup_one_d_flame_case

if quiet:
with redirect_stdout(io.StringIO()):
Expand All @@ -139,7 +139,7 @@ def _build_one_d_flame_config(
overrides: dict[str, Any],
quiet: bool = False,
):
from dfode_kit.df_interface.flame_configurations import OneDFreelyPropagatingFlameConfig
from dfode_kit.cases.presets import OneDFreelyPropagatingFlameConfig

cfg = OneDFreelyPropagatingFlameConfig(
mechanism=inputs.mechanism,
Expand Down
2 changes: 1 addition & 1 deletion dfode_kit/cli/commands/sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def add_command_parser(subparsers):

def handle_command(args):
from dfode_kit.data.io_hdf5 import touch_h5
from dfode_kit.df_interface.sample_case import df_to_h5
from dfode_kit.cases.sampling import df_to_h5

print('Handling sample command')
df_to_h5(args.case, args.mech, args.save, include_mesh=args.include_mesh)
Expand Down
12 changes: 12 additions & 0 deletions dfode_kit/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@
"require_h5_group",
"touch_h5",
"get_TPY_from_h5",
"advance_reactor",
"load_model",
"predict_Y",
"nn_integrate",
"integrate_h5",
"calculate_error",
]

_ATTRIBUTE_MODULES = {
Expand All @@ -21,6 +27,12 @@
"require_h5_group": ("dfode_kit.data.contracts", "require_h5_group"),
"touch_h5": ("dfode_kit.data.io_hdf5", "touch_h5"),
"get_TPY_from_h5": ("dfode_kit.data.io_hdf5", "get_TPY_from_h5"),
"advance_reactor": ("dfode_kit.data.integration", "advance_reactor"),
"load_model": ("dfode_kit.data.integration", "load_model"),
"predict_Y": ("dfode_kit.data.integration", "predict_Y"),
"nn_integrate": ("dfode_kit.data.integration", "nn_integrate"),
"integrate_h5": ("dfode_kit.data.integration", "integrate_h5"),
"calculate_error": ("dfode_kit.data.integration", "calculate_error"),
}


Expand Down
211 changes: 211 additions & 0 deletions dfode_kit/data/integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,211 @@
import h5py
import torch
import numpy as np
import cantera as ct

from dfode_kit.data.contracts import MECHANISM_ATTR, require_h5_attr, read_scalar_field_datasets
from dfode_kit.data.io_hdf5 import get_TPY_from_h5, touch_h5
from dfode_kit.utils import BCT, inverse_BCT


def advance_reactor(gas, state, reactor, reactor_net, time_step):
"""Advance the reactor simulation for a given state."""
state = state.flatten()

expected_shape = (2 + gas.n_species,)
if state.shape != expected_shape:
raise ValueError(
f"Expected state shape {expected_shape}, got {state.shape}"
)

gas.TPY = state[0], state[1], state[2:]

reactor.syncState()
reactor_net.reinitialize()
reactor_net.advance(time_step)
reactor_net.set_initial_time(0.0)

return gas


@torch.no_grad()
def load_model(model_path, device, model_class, model_layers):
state_dict = torch.load(model_path, map_location='cpu')

model = model_class(model_layers)
model.load_state_dict(state_dict['net'])

model.eval()
model.to(device=device)

return model


@torch.no_grad()
def predict_Y(model, model_path, d_arr, mech, device):
gas = ct.Solution(mech)
n_species = gas.n_species
expected_dims = 2 + n_species
if d_arr.shape[1] != expected_dims:
raise ValueError(
f"Expected input with {expected_dims} columns, got {d_arr.shape[1]}"
)

state_dict = torch.load(model_path, map_location='cpu')

Xmu0 = state_dict['data_in_mean']
Xstd0 = state_dict['data_in_std']
Ymu0 = state_dict['data_target_mean']
Ystd0 = state_dict['data_target_std']

d_arr = np.clip(d_arr, 0, None)
d_arr[:, 1] *= 0
d_arr[:, 1] += 101325

orig_Y = d_arr[:, 2:].copy()
in_bct = d_arr.copy()
in_bct[:, 2:] = BCT(in_bct[:, 2:])
in_bct_norm = (in_bct - Xmu0) / Xstd0

input = torch.from_numpy(in_bct_norm).float().to(device=device)

output = model(input)

out_bct = output.cpu().numpy() * Ystd0 + Ymu0 + in_bct[:, 2:-1]
next_Y = orig_Y.copy()
next_Y[:, :-1] = inverse_BCT(out_bct)
next_Y[:, :-1] = next_Y[:, :-1] / np.sum(next_Y[:, :-1], axis=1, keepdims=True) * (1 - next_Y[:, -1:])

return next_Y


@torch.no_grad()
def nn_integrate(orig_arr, model_path, device, model_class, model_layers, time_step, mech, frozen_temperature=510):
model = load_model(model_path, device, model_class, model_layers)

mask = orig_arr[:, 0] > frozen_temperature
infer_arr = orig_arr[mask, :]

next_Y = predict_Y(model, model_path, infer_arr, mech, device)

new_states = np.hstack((np.zeros((orig_arr.shape[0], 1)), orig_arr))
new_states[:, 0] += time_step
new_states[:, 2] = orig_arr[:, 1]
new_states[mask, 3:] = next_Y

setter_gas = ct.Solution(mech)
getter_gas = ct.Solution(mech)
new_T = np.zeros_like(next_Y[:, 0])

for idx, (state, next_y) in enumerate(zip(infer_arr, next_Y)):
try:
setter_gas.TPY = state[0], state[1], state[2:]
h = setter_gas.enthalpy_mass

getter_gas.Y = next_y
getter_gas.HP = h, state[1]

new_T[idx] = getter_gas.T

except ct.CanteraError:
continue
new_states[mask, 1] = new_T

return new_states


def integrate_h5(
file_path,
save_path1,
save_path2,
time_step,
cvode_integration=True,
nn_integration=False,
model_settings=None,
):
"""Process scalar-field datasets and save CVODE / NN integration outputs."""
with h5py.File(file_path, 'r') as f:
mech = require_h5_attr(f, MECHANISM_ATTR)

data_dict = read_scalar_field_datasets(file_path)

if cvode_integration:
gas = ct.Solution(mech)
reactor = ct.Reactor(gas, name='Reactor1', energy='off')
reactor_net = ct.ReactorNet([reactor])
reactor_net.rtol, reactor_net.atol = 1e-6, 1e-10

processed_data_dict = {}

for name, data in data_dict.items():
processed_data = np.empty((data.shape[0], data.shape[1] + 1))
for i, state in enumerate(data):
gas = advance_reactor(gas, state, reactor, reactor_net, time_step)

new_state = np.array([time_step, gas.T, gas.P] + list(gas.Y))

processed_data[i, :] = new_state

processed_data_dict[name] = processed_data

with h5py.File(save_path1, 'a') as f:
cvode_group = f.create_group('cvode_integration')

for dataset_name, processed_data in processed_data_dict.items():
cvode_group.create_dataset(dataset_name, data=processed_data)
print(f'Saved processed dataset: {dataset_name} in cvode_integration group')

if nn_integration:
processed_data_dict = {}
if model_settings is None:
raise ValueError("model_settings must be provided for neural network integration.")

for name, data in data_dict.items():
try:
processed_data = nn_integrate(data, **model_settings)
processed_data_dict[name] = processed_data
except Exception as e:
print(f"Error processing dataset '{name}': {e}")

with h5py.File(save_path2, 'a') as f:
if 'nn_integration' in f:
del f['nn_integration']
nn_group = f.create_group('nn_integration')

for dataset_name, processed_data in processed_data_dict.items():
nn_group.create_dataset(dataset_name, data=processed_data)
print(f'Saved processed dataset: {dataset_name} in nn_integration group')


def calculate_error(
mech_path,
save_path1,
save_path2,
error='RMSE'
):
gas = ct.Solution(mech_path)

with h5py.File(save_path1, 'r') as f1, h5py.File(save_path2, 'r') as f2:
cvode_group = f1['cvode_integration']
nn_group = f2['nn_integration']

common_datasets = set(cvode_group.keys()) & set(nn_group.keys())

sorted_datasets = sorted(common_datasets, key=lambda x: float(x))
results = {}

for ds_name in sorted_datasets:
cvode_data = cvode_group[ds_name][:, 3:]
nn_data = nn_group[ds_name][:, 3:]

if error == "RMSE":
rmse_per_dim = np.sqrt(np.mean((cvode_data - nn_data) ** 2, axis=0))
results[ds_name] = rmse_per_dim

print(f"RMSE of ataset: {ds_name}")
for dim_idx, rmse_val in enumerate(rmse_per_dim, start=1):
id = gas.species_names[dim_idx - 3]
print(f" Species {id}: {rmse_val:.6e}")
print()

return results
10 changes: 5 additions & 5 deletions dfode_kit/data_operations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@
_ATTRIBUTE_MODULES = {
"touch_h5": ("dfode_kit.data.io_hdf5", "touch_h5"),
"get_TPY_from_h5": ("dfode_kit.data.io_hdf5", "get_TPY_from_h5"),
"integrate_h5": ("dfode_kit.data_operations.h5_kit", "integrate_h5"),
"load_model": ("dfode_kit.data_operations.h5_kit", "load_model"),
"nn_integrate": ("dfode_kit.data_operations.h5_kit", "nn_integrate"),
"predict_Y": ("dfode_kit.data_operations.h5_kit", "predict_Y"),
"calculate_error": ("dfode_kit.data_operations.h5_kit", "calculate_error"),
"integrate_h5": ("dfode_kit.data.integration", "integrate_h5"),
"load_model": ("dfode_kit.data.integration", "load_model"),
"nn_integrate": ("dfode_kit.data.integration", "nn_integrate"),
"predict_Y": ("dfode_kit.data.integration", "predict_Y"),
"calculate_error": ("dfode_kit.data.integration", "calculate_error"),
"random_perturb": ("dfode_kit.data_operations.augment_data", "random_perturb"),
"label_npy": ("dfode_kit.data_operations.label_data", "label_npy"),
"SCALAR_FIELDS_GROUP": ("dfode_kit.data.contracts", "SCALAR_FIELDS_GROUP"),
Expand Down
2 changes: 1 addition & 1 deletion dfode_kit/data_operations/augment_data.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
import cantera as ct
import time
from dfode_kit.data_operations.h5_kit import advance_reactor
from dfode_kit.data.integration import advance_reactor
from dfode_kit.training.formation import formation_calculate

def single_step(npstate, chem, time_step=1e-6):
Expand Down
Loading
Loading