Skip to content

Commit b8d7fe6

Browse files
committed
code_review
1 parent 5dd42ae commit b8d7fe6

File tree

1 file changed

+12
-10
lines changed

1 file changed

+12
-10
lines changed

codeflash/verification/test_runner.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -36,20 +36,21 @@ def run_behavioral_tests(
3636
*,
3737
pytest_timeout: int | None = None,
3838
pytest_cmd: str = "pytest",
39-
pytest_target_runtime_seconds: int = TOTAL_LOOPING_TIME_EFFECTIVE,
39+
pytest_target_runtime_seconds: float = TOTAL_LOOPING_TIME_EFFECTIVE,
4040
enable_coverage: bool = False,
4141
) -> tuple[Path, subprocess.CompletedProcess, Path | None, Path | None]:
4242
if test_framework in {"pytest", "unittest"}:
4343
test_files: list[str] = []
4444
for file in test_paths.test_files:
4545
if file.test_type == TestType.REPLAY_TEST:
4646
# Replay tests need specific test targeting because one file contains tests for multiple functions
47-
test_files.extend(
48-
[
49-
str(file.instrumented_behavior_file_path) + "::" + test.test_function
50-
for test in file.tests_in_file
51-
]
52-
)
47+
if file.tests_in_file:
48+
test_files.extend(
49+
[
50+
str(file.instrumented_behavior_file_path) + "::" + test.test_function
51+
for test in file.tests_in_file
52+
]
53+
)
5354
else:
5455
test_files.append(str(file.instrumented_behavior_file_path))
5556
pytest_cmd_list = (
@@ -67,7 +68,7 @@ def run_behavioral_tests(
6768
f"--codeflash_seconds={pytest_target_runtime_seconds}",
6869
]
6970
if pytest_timeout is not None:
70-
common_pytest_args.insert(1, f"--timeout={pytest_timeout}")
71+
common_pytest_args.append(f"--timeout={pytest_timeout}")
7172

7273
result_file_path = get_run_tmp_file(Path("pytest_results.xml"))
7374
result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"]
@@ -163,7 +164,7 @@ def run_line_profile_tests(
163164
f"--codeflash_seconds={pytest_target_runtime_seconds}",
164165
]
165166
if pytest_timeout is not None:
166-
pytest_args.insert(1, f"--timeout={pytest_timeout}")
167+
pytest_args.append(f"--timeout={pytest_timeout}")
167168
result_file_path = get_run_tmp_file(Path("pytest_results.xml"))
168169
result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"]
169170
pytest_test_env = test_env.copy()
@@ -213,7 +214,8 @@ def run_benchmarking_tests(
213214
f"--codeflash_seconds={pytest_target_runtime_seconds}",
214215
]
215216
if pytest_timeout is not None:
216-
pytest_args.insert(1, f"--timeout={pytest_timeout}")
217+
pytest_args.append(f"--timeout={pytest_timeout}")
218+
217219
result_file_path = get_run_tmp_file(Path("pytest_results.xml"))
218220
result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"]
219221
pytest_test_env = test_env.copy()

0 commit comments

Comments
 (0)