Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 0 additions & 69 deletions examples/tutorials/metatomic_tutorial.py

This file was deleted.

6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ dependencies = [
"h5py>=3.15.1",
"numpy>=1.26,<3; python_version < '3.13'",
"numpy>=2.3.2,<3; python_version >= '3.13'",
"nvalchemi-toolkit-ops>=0.3.0",
"nvalchemi-toolkit-ops[torch]>=0.3.0",
"tables>=3.11.1",
"torch>=2",
"tqdm>=4.67",
Expand All @@ -52,8 +52,8 @@ io = ["ase>=3.26", "phonopy>=2.37.0", "pymatgen>=2025.6.14"]
symmetry = ["moyopy>=0.7.8"]
mace = ["mace-torch>=0.3.15"]
mattersim = ["mattersim>=0.1.2"]
metatomic = ["metatomic-torch>=0.1.3", "metatrain[pet]>=2025.12"]
orb = ["orb-models>=0.6.0"]
metatomic = ["metatomic-torchsim>=0.1.1", "metatomic-ase>=0.1.0", "upet>=0.2.0"]
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The idea of metatomic/upet being separate packages is that some people will use other metatomic models that are not PET models, or not from the upet repository; so strictly speaking the upet dependency is not required here, except for testing purposes, but this is up to you!

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I added the ase and upet to the same group for the purposes of having them installed for testing. Otherwise we would need to boost the number of extras groups 2x per model which seemed like a big step.

Maybe there's a way to do it with https://peps.python.org/pep-0723/ like we do for examples but I think we can come back to that at a later point?

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With the way this repository is setup, PEP 723 sounds like the best option yes! Otherwise we gave good success adding test-only dependencies to a tox environment specification. But this works for now!

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the solution I would like doesn't yet exist there are no pep 723 aware test frameworks 🫠

orb = ["orb-models>=0.6.2"]
sevenn = ["sevenn[torchsim]>=0.12.1"]
graphpes = ["graph-pes>=0.1", "mace-torch>=0.3.12"]
nequip = ["nequip>=0.17.0"]
Expand Down
29 changes: 13 additions & 16 deletions tests/models/test_metatomic.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@


try:
from metatomic.torch import ase_calculator
from metatrain.utils.io import load_model
from metatomic.torch import AtomisticModel
from metatomic_ase import MetatomicCalculator
from upet import get_upet

from torch_sim.models.metatomic import MetatomicModel
except ImportError:
Expand All @@ -24,26 +25,22 @@


@pytest.fixture
def metatomic_calculator():
"""Load a pretrained metatomic model for testing."""
model_url = "https://huggingface.co/lab-cosmo/pet-mad/resolve/v1.1.0/models/pet-mad-v1.1.0.ckpt"
return ase_calculator.MetatomicCalculator(
model=load_model(model_url).export(), device=DEVICE
)
def metatomic_module() -> AtomisticModel:
return get_upet(model="pet-mad")


@pytest.fixture
def metatomic_model() -> MetatomicModel:
"""Create an MetatomicModel wrapper for the pretrained model."""
return MetatomicModel(model="pet-mad", device=DEVICE)
def metatomic_calculator(metatomic_module: AtomisticModel) -> MetatomicCalculator:
return MetatomicCalculator(model=metatomic_module, device=DEVICE)


@pytest.fixture
def metatomic_model(metatomic_module: AtomisticModel) -> MetatomicModel:
return MetatomicModel(model=metatomic_module, device=DEVICE)


def test_metatomic_initialization() -> None:
"""Test that the metatomic model initializes correctly."""
model = MetatomicModel(
model="pet-mad",
device=DEVICE,
)
model = MetatomicModel(model=get_upet(model="pet-mad"), device=DEVICE)
assert model.device == DEVICE
assert model.dtype == torch.float32

Expand Down
Loading
Loading