Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 79 additions & 14 deletions benchmark-scripts/consolidate_multiple_run_of_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import datetime
import argparse
from abc import ABC, abstractmethod
from statistics import mean
from statistics import mean, stdev, median
import os
import re
import fnmatch
Expand Down Expand Up @@ -564,23 +564,68 @@ class PipelineLatencyExtractor(KPIExtractor):
def extract_data(self, log_file_path):

print("parsing latency")
average_latency_value = ""
# Data structure to store latency data organized by source_name and sink_name
latency_data_structure = {} # {(source_name, sink_name): {frame_number: frame_latency_value}}
latency = {}
lat = re.findall(r'\d+', os.path.basename(log_file_path))
lat_filename = lat[0] if len(lat) > 0 else "UNKNOWN"
latency_key = "Pipeline_{} {}".format(lat_filename, PIPELINE_LATENCY_CONSTANT)

with open(log_file_path) as f:
for line in f:
if "latency_tracer_pipeline" in line:
match = re.search(r'frame_latency=\(double\)([0-9]*\.?[0-9]+)', line)
if match:
average_latency_value=match.group(1)
if len(average_latency_value) > 0:
latency[latency_key] = average_latency_value
else:
latency[latency_key] = "NA"
if "latency_tracer_pipeline" in line:
match = re.search(r'frame_latency=\(double\)([0-9]*\.?[0-9]+)', line)
source_name_match = re.search(r'source_name=\(string\)([^,\s]+)', line)
sink_name_match = re.search(r'sink_name=\(string\)([^,\s]+)', line)
frame_number_match = re.search(r'frame_num=\(uint\)(\d+)', line)

if match and source_name_match and sink_name_match and frame_number_match:
source_name = source_name_match.group(1)
sink_name = sink_name_match.group(1)
frame_number = int(frame_number_match.group(1))
frame_latency_value = float(match.group(1))

# Create key from source_name and sink_name
source_sink_key = (source_name, sink_name)

# Initialize dictionary for this source-sink pair if not exists
if source_sink_key not in latency_data_structure:
latency_data_structure[source_sink_key] = {}

# Store frame latency value with frame number as key
latency_data_structure[source_sink_key][frame_number] = frame_latency_value

# Process collected data and calculate statistics
latency_results = {}
channel_medians = {}
channel_averages = {}
channel_stddevs = {}

if latency_data_structure:
for (source_name, sink_name), frame_data in latency_data_structure.items():
if frame_data: # Check if we have frame data
latency_values = list(frame_data.values())
median_latency = median(latency_values)
average_latency = mean(latency_values)
stddev_latency = stdev(latency_values) if len(latency_values) > 1 else 0.0

channel_key = f"{source_name}_{sink_name}"
channel_medians[channel_key] = round(median_latency, 3)
channel_averages[channel_key] = round(average_latency, 3)
channel_stddevs[channel_key] = round(stddev_latency, 3)

print(f"Pipeline_{lat_filename} {source_name}_{sink_name}: median = {median_latency:.3f} ms, average = {average_latency:.3f} ms, stddev = {stddev_latency:.3f} ms, frames = {len(frame_data)}")

# Store results
if channel_medians:
latency_results[f"For Pipeline_{lat_filename} run per frame Latency (ms) for Channel(source_sink)"] = channel_medians

if channel_averages:
latency_results[f"For Pipeline_{lat_filename} run per frame Average Latency (ms) for Channel(source_sink)"] = channel_averages

if channel_stddevs:
latency_results[f"For Pipeline_{lat_filename} run per frame Stddev Latency (ms) for Channel(source_sink)"] = channel_stddevs

return latency
return latency_results

def return_blank(self):
return {"LATENCY": "NA"}
Expand Down Expand Up @@ -676,6 +721,8 @@ def add_parser():
n = 0
df = pd.DataFrame()
full_kpi_dict = {}
all_channel_medians = [] # Collect all channel medians from all files

for kpiExtractor in KPIExtractor_OPTION:
fileFound = False
for dirpath, dirname, filename in os.walk(root_directory):
Expand All @@ -686,10 +733,28 @@ def add_parser():
kpi_dict = extractor.extract_data(
os.path.join(root_directory, file))
if kpi_dict:
# Collect max median from each file for aggregate statistics
for key, value in kpi_dict.items():
if "run per frame Latency (ms) for Channel(source_sink)" in key and isinstance(value, dict):
# Add only the max median from this file to all_channel_medians
if value: # Check if dictionary is not empty
max_median_for_file = max(value.values())
all_channel_medians.append(max_median_for_file)
full_kpi_dict.update(kpi_dict)


# Calculate aggregate statistics of all channel medians
if all_channel_medians:
overall_median = median(all_channel_medians)

full_kpi_dict["Overall Latency (ms)"] = round(overall_median, 3)

# Write out summary csv file from dictionary
with open(output, 'w') as csv_file:
writer = csv.writer(csv_file)
for key, value in full_kpi_dict.items():
writer.writerow([key, value])
if isinstance(value, dict):
# Format dictionary values without curly braces
formatted_value = ', '.join([f'{k}: {v}' for k, v in value.items()])
writer.writerow([key, formatted_value])
else:
writer.writerow([key, value])
2 changes: 1 addition & 1 deletion benchmark-scripts/format_avc_mp4.sh
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ docker run --network host --privileged --user root -e VIDEO_FILE="$1" \
-v /tmp/.X11-unix:/tmp/.X11-unix \
-v "$SAMPLE_MEDIA_DIR"/:/vids \
-w /vids -i --rm intel/dlstreamer:2025.2.0-ubuntu24 \
bash -c "if [ -f /vids/$result ]; then exit 1; else gst-launch-1.0 filesrc location=/vids/$1 ! decodebin ! videoconvert ! videoscale ! videorate ! 'video/x-raw,width=$WIDTH,height=$HEIGHT,framerate=$FPS/1' ! x264enc ! h264parse ! mp4mux ! filesink location=/vids/$result; fi"
bash -c "if [ -f /vids/$result ]; then exit 1; else gst-launch-1.0 filesrc location=/vids/$1 ! decodebin ! videoconvert ! videoscale ! videorate ! 'video/x-raw,width=$WIDTH,height=$HEIGHT,framerate=$FPS/1' ! x264enc tune=zerolatency ! h264parse ! mp4mux ! filesink location=/vids/$result; fi"

rm ../sample-media/"$1"
echo "Result will be created in ../sample-media/$result"
10 changes: 5 additions & 5 deletions benchmark-scripts/stream_density.py
Original file line number Diff line number Diff line change
Expand Up @@ -725,7 +725,7 @@ def calculate_multi_stream_fps(num_pipelines, results_dir, container_name):
def get_pipeline_stream_count(base_dir=None):
"""
Detects the number of video streams defined in the pipeline.sh script
by counting occurrences of 'filesrc' elements.
by counting occurrences of 'filesrc' or 'rtspsrc' elements.

Args:
base_dir (str, optional): Base directory to locate the pipeline script.
Expand All @@ -747,17 +747,17 @@ def get_pipeline_stream_count(base_dir=None):
print(f"WARN: Pipeline script not found at {pipeline_script_path}")
return 0

# Read and search for 'filesrc' occurrences
# Read and search for 'filesrc' or 'rtspsrc' occurrences
with open(pipeline_script_path, 'r') as f:
content = f.read()

matches = re.findall(r'\bfilesrc\b', content)
matches = re.findall(r'\b(filesrc|rtspsrc)\b', content)
if matches:
detected_streams = len(matches)
print(f"DEBUG: Detected {detected_streams} stream(s) from {pipeline_script_path}")
return detected_streams
else:
print(f"DEBUG: No 'filesrc' tokens found in {pipeline_script_path}")
print(f"DEBUG: No 'filesrc' or 'rtspsrc' tokens found in {pipeline_script_path}")
return 0

except Exception as e:
Expand Down Expand Up @@ -785,4 +785,4 @@ def get_latest_pipeline_stream_logs(num_pipelines, pipeline_log_files):
timestamp_files, key=lambda x: x[1], reverse=False)
latest_files = [
file for file, mtime in sorted_timestamp[:num_pipelines]]
return latest_files
return latest_files
Loading