Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions test/infinicore/framework/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,10 @@ def __str__(self):
for key, value in self.kwargs.items():
if key == "out" and isinstance(value, int):
kwargs_strs.append(f"{key}={self.inputs[value].name}")
elif isinstance(value, (list, tuple)):
# Handle tuple/list of TensorSpecs or other values
item_strs = [str(item) for item in value]
kwargs_strs.append(f"{key}=({', '.join(item_strs)})")
else:
kwargs_strs.append(f"{key}={value}")

Expand Down
10 changes: 6 additions & 4 deletions test/infinicore/framework/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,9 +155,6 @@ def print_live_result(self, result):
if result.error_message:
print(f"πŸ’₯ Error: {result.error_message}")

if result.stdout or result.stderr or self.verbose:
print("-" * 40)

def print_summary(self, results, cumulative_timing, ops_dir, total_expected=0):
print(f"\n{'='*80}\nCUMULATIVE TEST SUMMARY\n{'='*80}")

Expand Down Expand Up @@ -367,12 +364,17 @@ def _resolve_name(self, obj, default_name):
return getattr(obj, "name", None) or default_name

def _spec_to_dict(self, s, name=None):
return {
spec_dict = {
"name": name if name else getattr(s, "name", "unknown"),
"shape": list(s.shape) if s.shape else None,
"dtype": str(s.dtype).split(".")[-1],
"strides": list(s.strides) if s.strides else None,
}
# Add file_path if it exists (stored in kwargs for TensorSpec)
file_path = getattr(s, "kwargs", {}).get("file_path")
if file_path:
spec_dict["file_path"] = file_path
return spec_dict

def _fmt_result(self, res):
if not (is_dataclass(res) or hasattr(res, "success")):
Expand Down
6 changes: 5 additions & 1 deletion test/infinicore/framework/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,4 +358,8 @@ def __str__(self):
dtype_str = (
f", {str(self.dtype).replace('infinicore.', '')}" if self.dtype else ""
)
return f"{name_str}tensor{self.shape}{strides_str}{dtype_str}"
# Add [FROM_FILE] marker when loading from file
from_file_marker = (
" [FROM_FILE]" if self.init_mode == TensorInitializer.FROM_FILE else ""
)
return f"{name_str}tensor{self.shape}{strides_str}{dtype_str}{from_file_marker}"
5 changes: 5 additions & 0 deletions test/infinicore/framework/utils/json_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ def _write_field(f, key, value, indent, sub_indent, close_comma=""):
items = value

current_len = len(indent) + len(f'"{key}": {open_char}')
wrapped = False # Track if we wrapped to new line

for i, item in enumerate(items):
if is_dict:
Expand All @@ -135,8 +136,12 @@ def _write_field(f, key, value, indent, sub_indent, close_comma=""):
if current_len + len(item_str) + len(item_comma) > 180:
f.write(f"\n{sub_indent}")
current_len = len(sub_indent)
wrapped = True

f.write(f"{item_str}{item_comma}")
current_len += len(item_str) + len(item_comma)

# Add newline before closing bracket if we wrapped
if wrapped:
f.write(f"\n{indent}")
f.write(f"{close_char}{close_comma}\n")
27 changes: 22 additions & 5 deletions test/infinicore/framework/utils/load_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import infinicore
from framework import (
BaseOperatorTest,
TensorInitializer,
TensorSpec,
TestCase,
GenericTestRunner,
Expand All @@ -42,11 +43,25 @@ def _parse_dtype(dtype_str):
def _dict_to_spec(spec_dict):
"""Convert JSON dict to TensorSpec object."""
if not isinstance(spec_dict, dict): return spec_dict

# Collect optional fields
kwargs = {k: spec_dict[k] for k in ('name', 'file_path') if k in spec_dict}

# Determine init_mode: file_path always uses FROM_FILE, otherwise use specified mode or default RANDOM
if 'file_path' in spec_dict:
init_mode = TensorInitializer.FROM_FILE
else:
init_mode = spec_dict.get('init_mode', TensorInitializer.RANDOM)
if isinstance(init_mode, str):
# Map string to enum, default to RANDOM if unknown
init_mode = getattr(TensorInitializer, init_mode.upper(), TensorInitializer.RANDOM)

return TensorSpec(
shape=tuple(spec_dict['shape']),
dtype=_parse_dtype(spec_dict['dtype']),
name=spec_dict.get('name'),
strides=tuple(spec_dict['strides']) if spec_dict.get('strides') else None
strides=tuple(spec_dict['strides']) if spec_dict.get('strides') else None,
init_mode=init_mode,
**kwargs
)

def parse_test_cases():
Expand Down Expand Up @@ -131,6 +146,7 @@ def main():
main()
'''


class TestGenerator:

def __init__(self, project_root):
Expand All @@ -151,8 +167,9 @@ def generate(self, json_list, output_dir):
# If the op name is provided, generate the return statement.
# If it's None/null, use 'pass' to avoid syntax errors.
make_body = lambda name, tag: (
f"return {name}(*args, **self._resolve_kwargs(args, kwargs))"
if name else f"pass # {tag} is null, skipping implementation"
f"return {name}(*args, **self._resolve_kwargs(args, kwargs))"
if name
else f"pass # {tag} is null, skipping implementation"
)

torch_body = make_body(torch_op_name, "torch_op")
Expand All @@ -162,7 +179,7 @@ def generate(self, json_list, output_dir):
config_str = pprint.pformat(op_config, indent=4, width=120)
file_content = _TEST_FILE_TEMPLATE.replace("{op_config_json}", config_str)
file_content = file_content.replace("{project_root}", self.project_root)

# Injected Method Bodies
file_content = file_content.replace("{torch_method_body}", torch_body)
file_content = file_content.replace("{infini_method_body}", infini_body)
Expand Down
2 changes: 1 addition & 1 deletion test/infinicore/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def main():
)

if bench:
print(f"Benchmark mode: {args.bench.upper()} timing")
print(f"Benchmark mode: {bench.upper()} timing")

# 3. Initialize and Execute
test_manager = TestManager(
Expand Down