diff --git a/benchmark-scripts/Makefile b/benchmark-scripts/Makefile index d1e72c2..96a315a 100644 --- a/benchmark-scripts/Makefile +++ b/benchmark-scripts/Makefile @@ -1,3 +1,5 @@ +# Copyright © 2024 Intel Corporation. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 build-all: build-benchmark build-xpu build-igt @@ -17,4 +19,10 @@ run: docker run -it --rm -v /var/run/docker.sock:/var/run/docker.sock -v `pwd`/results:/tmp/results --net=host --privileged benchmark:dev /bin/bash consolidate: - docker run -itd -v `pwd`/$(ROOT_DIRECTORY):/$(ROOT_DIRECTORY) -e ROOT_DIRECTORY=$(ROOT_DIRECTORY)--net=host --privileged benchmark:dev /bin/bash -c "python3 consolidate_multiple_run_of_metrics.py --root_directory $(ROOT_DIRECTORY)/ --output $(ROOT_DIRECTORY)/summary.csv" \ No newline at end of file + docker run -itd -v `pwd`/$(ROOT_DIRECTORY):/$(ROOT_DIRECTORY) -e ROOT_DIRECTORY=$(ROOT_DIRECTORY)--net=host --privileged benchmark:dev /bin/bash -c "python3 consolidate_multiple_run_of_metrics.py --root_directory $(ROOT_DIRECTORY)/ --output $(ROOT_DIRECTORY)/summary.csv" + +python-test: + python -m coverage run -m unittest benchmark_test.py + +python-coverage: + coverage report -m \ No newline at end of file diff --git a/benchmark-scripts/benchmark.py b/benchmark-scripts/benchmark.py new file mode 100644 index 0000000..e42d264 --- /dev/null +++ b/benchmark-scripts/benchmark.py @@ -0,0 +1,119 @@ +''' +* Copyright (C) 2024 Intel Corporation. +* +* SPDX-License-Identifier: Apache-2.0 +''' + +import argparse +import os +import shlex +import subprocess # nosec B404 +import time +import traceback + + +def parse_args(print=False): + parser = argparse.ArgumentParser( + prog='benchmark', + description='runs benchmarking using docker compose') + parser.add_argument('--pipelines', type=int, default=1, + help='number of pipelines') + parser.add_argument('--target_fps', type=int, default=None, + help='stream density target FPS') + # TODO: add variable for stream density increment when implementing + parser.add_argument('--results_dir', + default=os.path.join(os.curdir, 'results'), + help='full path to the desired directory for logs ' + + 'and results') + parser.add_argument('--duration', type=int, default=30, + help='time in seconds, not needed when ' + + '--stream_density is specified') + parser.add_argument('--init_duration', type=int, default=5, + help='time in seconds') + # TODO: change target_device to an env variable in docker compose + parser.add_argument('--target_device', default='CPU', + help='desired running platform [cpu|core|xeon|dgpu.x]') + parser.add_argument('--compose_file', default=None, action='append', + help='path to docker compose files. ' + + 'can be used multiple times') + parser.add_argument('--retail_use_case_root', + default=os.path.join( + os.curdir, '..', '..', 'retail-use-cases'), + help='full path to the retail-use-cases repo root') + if print: + parser.print_help() + return + return parser.parse_args() + + +def docker_compose_containers(command, compose_files=[], compose_pre_args="", + compose_post_args="", + env_vars=os.environ.copy()): + try: + files = " -f ".join(compose_files) + compose_string = ("docker compose %s -f %s %s %s" % + (compose_pre_args, files, command, + compose_post_args)) + compose_args = shlex.split(compose_string) + + p = subprocess.Popen(compose_args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env_vars) # nosec B404, B603 + stdout, stderr = p.communicate() + + if p.returncode and stderr: + print("Error bringing %s the compose files: %s" % + (command, stderr)) + return stdout.strip(), stderr, p.returncode + except subprocess.CalledProcessError: + print("Exception bringing %s the compose files: %s" % + (command, traceback.format_exc())) + + +def main(): + my_args = parse_args() + + results_dir = os.path.abspath(my_args.results_dir) + if not os.path.exists(results_dir): + os.mkdir(results_dir) + + print("Starting workload(s)") + + # start the docker containers + # pass in necessary variables using env vars + compose_files = [] + for file in my_args.compose_file: + compose_files.append(os.path.abspath(file)) + + # add the benchmark docker compose file + compose_files.append(os.path.abspath(os.path.join( + os.curdir, '..', 'docker', 'docker-compose.yaml'))) + env_vars = os.environ.copy() + env_vars["log_dir"] = results_dir + env_vars["RESULTS_DIR"] = results_dir + env_vars["DEVICE"] = my_args.target_device + retail_use_case_root = os.path.abspath(my_args.retail_use_case_root) + env_vars["RETAIL_USE_CASE_ROOT"] = retail_use_case_root + if my_args.pipelines > 0: + env_vars["PIPELINE_COUNT"] = str(my_args.pipelines) + + docker_compose_containers("up", compose_files=compose_files, + compose_post_args="-d", env_vars=env_vars) + print("Waiting for init duration to complete...") + time.sleep(my_args.init_duration) + + # use duration to sleep + print("Waiting for %d seconds for workload to finish" % my_args.duration) + time.sleep(my_args.duration) + # stop all containers and camera-simulator + docker_compose_containers("down", compose_files=compose_files, + env_vars=env_vars) + + # collect metrics using copy-platform-metrics + print("workloads finished...") + # TODO: implement results handling based on what pipeline is run + + +if __name__ == '__main__': + main() diff --git a/benchmark-scripts/benchmark_test.py b/benchmark-scripts/benchmark_test.py new file mode 100644 index 0000000..85bc743 --- /dev/null +++ b/benchmark-scripts/benchmark_test.py @@ -0,0 +1,59 @@ +''' +* Copyright (C) 2024 Intel Corporation. +* +* SPDX-License-Identifier: Apache-2.0 +''' + +import mock +import subprocess # nosec B404 +import unittest +import benchmark + + +class Testing(unittest.TestCase): + + class MockPopen(object): + def __init__(self): + pass + + def communicate(self, input=None): + pass + + @property + def returncode(self): + pass + + def test_docker_compose_containers_success(self): + mock_popen = Testing.MockPopen() + mock_popen.communicate = mock.Mock( + return_value=('', '1Starting camera: rtsp://127.0.0.1:8554/' + + 'camera_0 from *.mp4')) + mock_returncode = mock.PropertyMock(return_value=0) + type(mock_popen).returncode = mock_returncode + + setattr(subprocess, 'Popen', lambda *args, **kargs: mock_popen) + res = benchmark.docker_compose_containers('up') + + self.assertEqual(res, ('', + '1Starting camera: rtsp://127.0.0.1:8554/' + + 'camera_0 from *.mp4', 0)) + mock_popen.communicate.assert_called_once_with() + mock_returncode.assert_called() + + def test_docker_compose_containers_fail(self): + mock_popen = Testing.MockPopen() + mock_popen.communicate = mock.Mock(return_value=('', + b'an error occurred')) + mock_returncode = mock.PropertyMock(return_value=1) + type(mock_popen).returncode = mock_returncode + + setattr(subprocess, 'Popen', lambda *args, **kargs: mock_popen) + res = benchmark.docker_compose_containers('up') + + self.assertEqual(res, ('', b'an error occurred', 1)) + mock_popen.communicate.assert_called_once_with() + mock_returncode.assert_called() + + +if __name__ == '__main__': + unittest.main() diff --git a/benchmark-scripts/camera-simulator.sh b/benchmark-scripts/camera-simulator.sh index 9f5d084..a67e8a2 100755 --- a/benchmark-scripts/camera-simulator.sh +++ b/benchmark-scripts/camera-simulator.sh @@ -1,6 +1,6 @@ #!/bin/bash -e # -# Copyright (C) 2023 Intel Corporation. +# Copyright (C) 2024 Intel Corporation. # # SPDX-License-Identifier: Apache-2.0 # @@ -21,8 +21,6 @@ if [ "${COMMAND,,}" = "start" ]; then if [ -z "$CAMERAS" ]; then CAMERAS=${#FILES[@]} fi - - cd $SOURCE_DIR/camera-simulator docker run --rm -t --network=host --name camera-simulator aler9/rtsp-simple-server >rtsp_simple_server.log.txt 2>&1 & index=0 diff --git a/benchmark-scripts/results_parser.py b/benchmark-scripts/results_parser.py index bab8a78..b5814a7 100644 --- a/benchmark-scripts/results_parser.py +++ b/benchmark-scripts/results_parser.py @@ -1,10 +1,9 @@ ''' -* Copyright (C) 2023 Intel Corporation. +* Copyright (C) 2024 Intel Corporation. * * SPDX-License-Identifier: Apache-2.0 ''' -import time import sys import argparse import os @@ -21,6 +20,9 @@ class InferenceCounts: text_recognition: int = 0 barcode: int = 0 + def __json__(self): + return self.__dict__ + tracked_objects = {} frame_count = 0 @@ -62,13 +64,17 @@ def print_object(obj): def process(results, reclassify_interval): - product_key = "classification_layer_name:efficientnet-b0/model/head/dense/BiasAdd/Add" - text_keys = ["inference_layer_name:logits", "inference_layer_name:shadow/LSTMLayers/transpose_time_major", + product_key = ("classification_layer_name:efficientnet-b0/model/head/" + + "dense/BiasAdd/Add") + text_keys = ["inference_layer_name:logits", + "inference_layer_name:shadow/LSTMLayers/" + + "transpose_time_major", "inference_layer_name:shadow/LSTMLayers/Reshape_1"] detections = {} objects = {} - inferenceCounts.detection+=1 - # Needed for additional entries like non-inference results like {"resolution":{"height":2160,"width":3840},"timestamp":201018476} + inferenceCounts.detection += 1 + # Needed for additional entries like non-inference results like + # {"resolution":{"height":2160,"width":3840},"timestamp":201018476} if "objects" not in results: return for result in results["objects"]: @@ -109,7 +115,8 @@ def process(results, reclassify_interval): objects[parent_id]["text"].append(text) print("- Frame {}".format(frame_count)) - for obj in sorted(objects.values(),key=lambda obj: obj["bounding_box"]["x_min"]): + for obj in sorted(objects.values(), + key=lambda obj: obj["bounding_box"]["x_min"]): print_object(obj) update_tracked_object(obj,tracked_objects) @@ -120,36 +127,35 @@ def update_tracked_object(obj, tracked_objects): tracked_object["id"] = obj["id"] for tracked_key in tracked_keys: updates = obj[tracked_key] - if not isinstance(updates,list): + if not isinstance(updates, list): updates= [updates] tracked_object.setdefault(tracked_key,Counter()).update( updates) -def process_file(args): - if args.file: - filename=args.file +def process_file(results_root, file, stream_index, reclassify_interval): + if file: + filename = file else: - filename = "results/r{}.jsonl".format(args.stream_index) - file = open(filename, "r") - line = file.readline() - global frame_count - while line: - try: - results = json.loads(line) - process(results, args.reclassify_interval) - frame_count += 1 - except Exception as e: - print("Error: {}".format(e)) - print(traceback.format_exc()) - line = file.readline() - file.close() + filename = os.path.join(results_root, "/r{}.jsonl". + format(stream_index)) + with open(filename, "r") as file: + global frame_count + for line in file: + try: + results = json.loads(line) + process(results, reclassify_interval) + frame_count += 1 + except Exception as e: + print("Error: {}".format(e)) + print(traceback.format_exc()) def on_connect(client, user_data, _unused_flags, return_code): if return_code == 0: args = user_data - print("Connected to broker at {}:{}".format(args.broker_address, args.broker_port)) + print("Connected to broker at {}:{}".format(args.broker_address, + args.broker_port)) topic = "gulfstream/results_{}".format(args.stream_index) print("Subscribing to topic {}".format(topic)) client.subscribe(topic) @@ -157,65 +163,71 @@ def on_connect(client, user_data, _unused_flags, return_code): print("Error {} connecting to broker".format(return_code)) sys.exit(1) + def on_message(_unused_client, user_data, msg): results = json.loads(msg.payload) process(results) -def process_mqtt(args): - client = mqtt.Client("Gulfstream", userdata=args) + +def process_mqtt(broker_address, broker_port): + client = mqtt.Client("Gulfstream") client.on_connect = on_connect client.on_message = on_message - client.connect(args.broker_address, args.broker_port) + client.connect(broker_address, broker_port) client.loop_forever() -def main(): +def main(mode, stream_index=0, file="", min_detections=15, + reclassify_interval=1, broker_address="localhost", broker_port=1883, + results_root=os.path.join(os.path.curdir, 'results')): try: - args = parse_args() - if args.mode == "file": - process_file(args) + if mode == "file": + process_file(results_root, file, stream_index, reclassify_interval) else: import paho.mqtt.client as mqtt - process_mqtt(args) + process_mqtt(broker_address, broker_port) text_count = 0 barcode_count = 0 - print("-------") - print("Summary") - print("-------") - print("Frames {}".format(frame_count)) + results = {"frame": frame_count} inferenceCounts.classification = inferenceCounts.detection - print(inferenceCounts) + results["inference_counts"] = inferenceCounts summary = [] for obj in tracked_objects.values(): summary_obj = {} id = obj["id"] for key in obj: - if isinstance(obj[key],Counter): - print("key is : {}".format(key)) + if isinstance(obj[key], Counter): if key == "text": - obj[key] = {k:v for k, v in obj[key].items() if v > args.min_detections} + obj[key] = {k: v for k, v in obj[key].items() + if v > args.min_detections} summary_obj[key] = list(obj[key].items()) obj[key] = list(obj[key].items()) if key == "barcode": if None in obj[key][0]: - print("barcode is None, skip") + pass else: - print("barcode found: {}".format(obj[key][0])) barcode_count += 1 else: summary_obj[key] = obj[key] - print("obj[key]: {}".format(obj[key])) detections = obj["label"][0][1] if detections >= args.min_detections: print_object(obj) text_count += len(obj["text"]) + # should this be a deep copy here? + # or does the summary actually have all the right values? summary.append(summary_obj) - print(json.dumps(summary)) - print("Total Objects: {} ".format(len(obj))) - print("Total Text count: {}".format(text_count)) - print("Total Barcode count: {}".format(barcode_count)) + results["summary"] = summary + # what should total_objects be? + results["total_objects"] = len(obj) + results["total_text_count"] = text_count + results["total_barcode_count"] = barcode_count + with open(os.path.join(results_root, 'meta_summary.json'), + 'w', encoding='utf-8') as f: + json.dump(results, f, ensure_ascii=False, indent=4) except: print(traceback.format_exc()) if __name__ == "__main__": - main() + args = parse_args() + main(args.mode, args.file, args.min_detections, args.reclassify_interval, + args.broker_address, args.broker_port) diff --git a/docker/platform/requirements.txt b/docker/platform/requirements.txt index 8bc1f73..25fcd8d 100644 --- a/docker/platform/requirements.txt +++ b/docker/platform/requirements.txt @@ -1,3 +1,6 @@ +argparse==1.4.0 +coverage==7.4.3 +mock==5.1.0 natsort==8.4.0 numpy==1.26.4 pandas==2.2.1