diff --git a/Makefile b/Makefile index f64d869..1b7e78b 100644 --- a/Makefile +++ b/Makefile @@ -9,10 +9,12 @@ MKDOCS_IMAGE ?= asc-mkdocs PIPELINE_COUNT ?= 1 TARGET_FPS ?= 14.95 +CONTAINER_NAMES ?= gst0 DOCKER_COMPOSE ?= docker-compose.yml RESULTS_DIR ?= $(PWD)/results RETAIL_USE_CASE_ROOT ?= $(PWD) BENCHMARK_DURATION ?= 45 +DENSITY_INCREMENT ?= 1 download-models: download-yolov8s ./download_models/downloadModels.sh @@ -90,7 +92,7 @@ build-benchmark: cd performance-tools && $(MAKE) build-benchmark-docker benchmark: build-benchmark download-models - cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/docker-compose.yml \ + cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/$(DOCKER_COMPOSE) \ --pipeline $(PIPELINE_COUNT) --duration $(BENCHMARK_DURATION) --results_dir $(RESULTS_DIR) # consolidate to show the summary csv @cd performance-tools/benchmark-scripts && ROOT_DIRECTORY=$(RESULTS_DIR) $(MAKE) --no-print-directory consolidate && \ @@ -98,8 +100,20 @@ benchmark: build-benchmark download-models echo "====== Loss prevention benchmark results summary: " && cat $(RESULTS_DIR)/summary.csv benchmark-stream-density: build-benchmark download-models - cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/docker-compose.yml \ - --target_fps $(TARGET_FPS) --density_increment 1 --results_dir $(RESULTS_DIR) +# example commands: +# 1. for single container pipeline stream density +# ```console +# make PIPELINE_SCRIPT=yolov8s_roi.sh benchmark-stream-density +# ``` +# 2. for multiple container pipelines stream density: +# ```console +# make DOCKER_COMPOSE=docker-compose-2-clients.yml BENCHMARK_DURATION=90 TARGET_FPS="10.95 2.95" CONTAINER_NAMES="gst1 gst2" \ +# benchmark-stream-density +# ``` +# + cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/$(DOCKER_COMPOSE) \ + --target_fps $(TARGET_FPS) --container_names $(CONTAINER_NAMES) \ + --density_increment $(DENSITY_INCREMENT) --results_dir $(RESULTS_DIR) clean-benchmark-results: cd performance-tools/benchmark-scripts && rm -rf $(RESULTS_DIR)/* || true diff --git a/benchmark.md b/benchmark.md index 26e94cd..74a2405 100644 --- a/benchmark.md +++ b/benchmark.md @@ -29,7 +29,7 @@ make benchmark !!! Note For more details on how this works, you can check the documentation of performance-tools in [Benchmark a CV Pipeline](https://github.com/intel-retail/documentation/blob/main/docs_src/performance-tools/benchmark.md#benchmark-a-cv-pipeline) section. -1. Benchmarking the stream density of the loss-prevention pipelines: +2. Benchmarking the stream density of the loss-prevention pipelines: ```bash make benchmark-stream-density @@ -38,6 +38,45 @@ make benchmark-stream-density !!! Note For more details on how this works, you can check the documentation of performance-tools in [Benchmark Stream Density for CV Pipelines](https://github.com/intel-retail/documentation/blob/main/docs_src/performance-tools/benchmark.md#benchmark-stream-density-for-cv-pipelines) section. +### Benchmark for multiple pipelines in parallel + +There is an example docker-compose file under src/ directory, named `docker-compose-2-clients.yml` that can be used to show case both of benchmarks of parallel running pipelines and stream density benchmarks of running pipelines. This docker-compose file contains two different running pipelines: one is running yolov5s pipeline and the other one is yolov8 region of interests pipeline. Use the following command examples to do the benchmarks: + +```bash +make update-submodules +``` + +and then re-build the whole benchmark tools: + +```bash +make build-benchmark +``` + +then clean up the previous results to have a fresh start: + +```bash +make clean-benchmark-results +``` + +and then you can benchmark multiple different running pipelines in that compose file via: + +```bash +make DOCKER_COMPOSE=docker-compose-2-clients.yml BENCHMARK_DURATION=90 benchmark +``` + +!!! Note + BENCHMARK_DURATION is proveded to have longer time for pipelines as more pipelines running in parallel in the docker-compose tend to slow down the system and need more time for all pipelines to be stabilized. Adjust this input accordingly for your hardware system. + +and then you can also do the stream density of both running pipelines in this docker-compose file via the following command: + +```bash +make DOCKER_COMPOSE=docker-compose-2-clients.yml BENCHMARK_DURATION=90 TARGET_FPS="10.95 2.95" CONTAINER_NAMES="gst1 gst2" benchmark-stream-density +``` + +!!! Note + The stream density benchmarks can take long time depending on your hardware system. Please allow it to run until to the end to see the benchmark result. + + ## Tuning Benchmark Parameters You can tune some benchmark parameters when you benchmark loss-prevention pipelines: @@ -46,7 +85,9 @@ You can tune some benchmark parameters when you benchmark loss-prevention pipeli | -----------------------|-----------------|----------------------------------------------------------------------| | PIPELINE_COUNT | 1 | number of loss-prevention pipelines to launch for benchmarking | | BENCHMARK_DURATION | 45 | the time period of benchmarking will be run in second | -| TARGET_FPS | 14.95 | used for stream density maintaining that target frames per second (fps) while having maximum number of pipelines running | +| TARGET_FPS | 14.95 | used for stream density maintaining that target frames per second (fps) while having maximum number of pipelines running and this can be multiple values with whitespace delimited for multiple running pipelines | +| CONTAINER_NAMES | gst0 | used for stream density to have target container name list for multiple running pipelines and paired with TARGET_FPS to have 1-to-1 mapping with the pipeline | +| DENSITY_INCREMENT | 1 | used for stream density to set the pipeline increment number for each iteration | | RESULTS_DIR | ./results | the directory of the outputs for running pipeline logs and fps info | | PIPELINE_SCRIPT | yolov5s.sh | the script to run the pipeline, for yolov8, you can use yolov8s_roi.sh for running region of interest pipeline | | RENDER_MODE | 0 | when it is set to 1, another popup winodw will display the input source video and some of inferencing results like bounding boxes and/or region of interests | diff --git a/src/docker-compose-2-clients.yml b/src/docker-compose-2-clients.yml new file mode 100644 index 0000000..ea5bb91 --- /dev/null +++ b/src/docker-compose-2-clients.yml @@ -0,0 +1,125 @@ +# +# Copyright (C) 2024 Intel Corporation. +# +# SPDX-License-Identifier: Apache-2.0 +# + + + ## Current Developer Toolbox doesn't support environment files, make sure to remove any files or environment variables starting with $ +version: '3.7' +services: + camera-simulator: + container_name: camera-simulator + image: aler9/rtsp-simple-server + ports: + - "127.0.0.1:8554:8554" + camera-simulator0: + image: jrottenberg/ffmpeg:4.1-alpine + container_name: camera-simulator0 + network_mode: "host" + entrypoint: ["/bin/sh","-c"] + command: + - | + if [ ! -f /home/pipeline-server/sample-media/coca-cola-4465029-1920-15-bench.mp4 ]; then + mkdir -p /home/pipeline-server/sample-media + wget -O /home/pipeline-server/sample-media/coca-cola-4465029-1920-15-bench.mp4 https://www.pexels.com/download/video/4465029 + fi + ffmpeg -nostdin -re -stream_loop -1 -i /home/pipeline-server/sample-media/coca-cola-4465029-1920-15-bench.mp4 -c copy -f rtsp -rtsp_transport tcp rtsp://localhost:8554/camera_0 + depends_on: + - camera-simulator + volumes: + - ${RETAIL_USE_CASE_ROOT:-..}/performance-tools/sample-media:/home/pipeline-server/sample-media + + GstClient1: + image: dlstreamer:dev + deploy: + mode: replicated + replicas: ${PIPELINE_COUNT:-1} + network_mode: "host" + entrypoint: /script/entrypoint.sh --pipeline_script_choice ${PIPELINE_SCRIPT:-yolov5s.sh} + privileged: true + ipc: "host" + env_file: + - ./res/gst.env + - ${DEVICE_ENV:-res/all-cpu.env} + environment: + - CONTAINER_NAME=gst1 + - INPUTSRC=${INPUTSRC:-rtsp://localhost:8554/camera_0} + - RENDER_MODE=${RENDER_MODE:-0} #RENDER_MODE=1 will work only after running xhost +local:docker + - DISPLAY=$DISPLAY + - HTTP_PROXY + - HTTPS_PROXY + - NO_PROXY + volumes: + - ${RESULTS_DIR:-../results}:/tmp/results + - ~/.Xauthority:/home/dlstreamer/.Xauthority + - /tmp/.X11-unix + - ~/.cl-cache:/home/pipeline-server/.cl-cache + - ./res/:/home/pipeline-server/envs + - ${RETAIL_USE_CASE_ROOT:-..}/models:/home/pipeline-server/models + - ./pipelines/:/home/pipeline-server/pipelines + - ./extensions/:/home/pipeline-server/extensions + + GstClient2: + image: dlstreamer:dev + deploy: + mode: replicated + replicas: ${PIPELINE_COUNT:-1} + network_mode: "host" + entrypoint: /script/entrypoint.sh --pipeline_script_choice ${PIPELINE_SCRIPT:-yolov8s_roi.sh} + privileged: true + ipc: "host" + env_file: + - ./res/gst.env + - ${DEVICE_ENV:-res/all-cpu.env} + environment: + - CONTAINER_NAME=gst2 + - INPUTSRC=${INPUTSRC:-https://github.com/openvinotoolkit/openvino_notebooks/raw/refs/heads/recipes/recipes/automated_self_checkout/data/example.mp4} + - RENDER_MODE=${RENDER_MODE:-0} #RENDER_MODE=1 will work only after running xhost +local:docker + - DISPLAY=$DISPLAY + - HTTP_PROXY + - HTTPS_PROXY + - NO_PROXY + volumes: + - ${RESULTS_DIR:-../results}:/tmp/results + - ~/.Xauthority:/home/dlstreamer/.Xauthority + - /tmp/.X11-unix + - ~/.cl-cache:/home/pipeline-server/.cl-cache + - ./res/:/home/pipeline-server/envs + - ${RETAIL_USE_CASE_ROOT:-..}/models:/home/pipeline-server/models + - ./pipelines/:/home/pipeline-server/pipelines + - ./extensions/:/home/pipeline-server/extensions + + mosquitto: + image: eclipse-mosquitto:2.0 + container_name: mosquitto + network_mode: "host" + ports: + - "127.0.0.1:1883:1883" + depends_on: + - GstClient1 + - GstClient2 + + mqtt_tracker: + image: loss-prevention:dev + network_mode: "host" + environment: + - MQTT_URL=127.0.0.1 + - MQTT_PORT=1883 + - MQTT_TOPIC=event/detection + - ROI_NAME=BASKET + restart: always + depends_on: + - mosquitto + + loss-prevention: + image: loss-prevention:dev + network_mode: "host" + environment: + - MQTT_URL=127.0.0.1 + - MQTT_PORT=1883 + - MQTT_TOPIC=event/detection + - ROI_NAME=BASKET,BAGGING + restart: always + depends_on: + - mosquitto diff --git a/src/entrypoint.sh b/src/entrypoint.sh index 194323b..1f49a28 100755 --- a/src/entrypoint.sh +++ b/src/entrypoint.sh @@ -19,6 +19,7 @@ checkBatchSize() { } cid_count="${cid_count:=0}" +CONTAINER_NAME="${CONTAINER_NAME:=gst}" cameras="${cameras:=}" stream_density_mount="${stream_density_mount:=}" stream_density_params="${stream_density_params:=}" @@ -125,14 +126,17 @@ fi # generate unique container id based on the date with the precision upto nano-seconds cid=$(date +%Y%m%d%H%M%S%N) +CONTAINER_NAME="${CONTAINER_NAME//\"/}" # Ensure to remove all double quotes from CONTAINER_NAME +cid="${cid}"_${CONTAINER_NAME} +echo "CONTAINER_NAME: ${CONTAINER_NAME}" echo "cid: $cid" -touch /tmp/results/r"$cid"_gst.jsonl -chown 1000:1000 /tmp/results/r"$cid"_gst.jsonl -touch /tmp/results/gst-launch_"$cid"_gst.log -chown 1000:1000 /tmp/results/gst-launch_"$cid"_gst.log -touch /tmp/results/pipeline"$cid"_gst.log -chown 1000:1000 /tmp/results/pipeline"$cid"_gst.log +touch /tmp/results/r"$cid".jsonl +chown 1000:1000 /tmp/results/r"$cid".jsonl +touch /tmp/results/gst-launch_"$cid".log +chown 1000:1000 /tmp/results/gst-launch_"$cid".log +touch /tmp/results/pipeline"$cid".log +chown 1000:1000 /tmp/results/pipeline"$cid".log cl_cache_dir="/home/pipeline-server/.cl-cache" \ DISPLAY="$DISPLAY" \ diff --git a/src/pipelines/age_recognition.sh b/src/pipelines/age_recognition.sh index 5e2ea7f..49ad012 100755 --- a/src/pipelines/age_recognition.sh +++ b/src/pipelines/age_recognition.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt CLASSIFICATION_OPTIONS="${CLASSIFICATION_OPTIONS:="reclassify-interval=1 $DETECTION_OPTIONS"}" # Extra Classification model parameters ex. "" | reclassify-interval=1 batch-size=1 nireq=4 gpu-throughput-streams=4 @@ -29,7 +29,7 @@ gstLaunchCmd="gst-launch-1.0 $inputsrc ! $DECODE ! \ gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=$DETECT_MODEL_PATH threshold=.8 device=$DEVICE ! \ gvaclassify batch-size=$BATCH_SIZE model-instance-id=classifier name=recognition model-proc=$CLASS_MODEL_PROC_PATH model=$CLASS_MODEL_PATH device=$DEVICE $CLASSIFICATION_OPTIONS ! \ $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! \ -gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/people_detection.sh b/src/pipelines/people_detection.sh index 7fe2742..c4d4d15 100755 --- a/src/pipelines/people_detection.sh +++ b/src/pipelines/people_detection.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt ROI="${ROI:="0,0,400,400"}" @@ -20,7 +20,7 @@ fi echo "decode type $DECODE" echo "Run person-detection pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvaattachroi roi=$ROI ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/person-detection-0200/FP16-INT8/person-detection-0200.xml threshold=.5 device=$DEVICE $PRE_PROCESS inference-region=1 ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvaattachroi roi=$ROI ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/person-detection-0200/FP16-INT8/person-detection-0200.xml threshold=.5 device=$DEVICE $PRE_PROCESS inference-region=1 ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov5s.sh b/src/pipelines/yolov5s.sh index 1668291..5b94c9e 100755 --- a/src/pipelines/yolov5s.sh +++ b/src/pipelines/yolov5s.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt if [ "$RENDER_MODE" == "1" ]; then OUTPUT="${OUTPUT:="! videoconvert ! video/x-raw,format=I420 ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=ximagesink sync=true --verbose"}" @@ -18,7 +18,7 @@ fi echo "decode type $DECODE" echo "Run yolov5s pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov5s_effnetb0.sh b/src/pipelines/yolov5s_effnetb0.sh index b2e4ab4..162f959 100755 --- a/src/pipelines/yolov5s_effnetb0.sh +++ b/src/pipelines/yolov5s_effnetb0.sh @@ -7,7 +7,7 @@ if [ "$INPUTSRC_TYPE" == "REALSENSE" ]; then # TODO: update with vaapipostproc when MJPEG codec is supported. - echo "Not supported until D436 with MJPEG." > /tmp/results/pipeline$cid_count.log + echo "Not supported until D436 with MJPEG." > /tmp/results/pipeline$cid.log exit 2 fi @@ -17,7 +17,7 @@ DETECTION_OPTIONS="${DETECTION_OPTIONS:="gpu-throughput-streams=4 nireq=4 batch- CLASSIFICATION_OPTIONS="${CLASSIFICATION_OPTIONS:="reclassify-interval=1 $DETECTION_OPTIONS"}" # Extra Classification model parameters ex. "" | reclassify-interval=1 batch-size=1 nireq=4 gpu-throughput-streams=4 -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt if [ "$RENDER_MODE" == "1" ]; then OUTPUT="${OUTPUT:="! videoconvert ! video/x-raw,format=I420 ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=ximagesink sync=true --verbose"}" @@ -27,7 +27,7 @@ fi echo "Run run yolov5s with efficientnet classification pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS $DETECTION_OPTIONS ! gvatrack name=tracking tracking-type=zero-term-imageless ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=0 ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json device=$CLASSIFICATION_DEVICE inference-region=roi-list name=classification $CLASSIFICATION_PRE_PROCESS $CLASSIFICATION_OPTIONS ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS $DETECTION_OPTIONS ! gvatrack name=tracking tracking-type=zero-term-imageless ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=0 ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json device=$CLASSIFICATION_DEVICE inference-region=roi-list name=classification $CLASSIFICATION_PRE_PROCESS $CLASSIFICATION_OPTIONS ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov5s_full.sh b/src/pipelines/yolov5s_full.sh index 5389394..215ea97 100755 --- a/src/pipelines/yolov5s_full.sh +++ b/src/pipelines/yolov5s_full.sh @@ -13,7 +13,7 @@ PARALLEL_PIPELINE="${PARALLEL_PIPELINE:=""}" # Run pipeline in parallel using th PARALLEL_AGGRAGATE="${PARALLEL_AGGRAGATE:=""}" # Aggregate parallel pipeline results together ex. "" | ! gvametaaggregate name=aggregate ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid_count.jsonl ! fpsdisplaysink video-sink=fakesink sync=true --verbose branch. ! queue ! OCR_RECLASSIFY_INTERVAL="${OCR_RECLASSIFY_INTERVAL:=5}" BARCODE_RECLASSIFY_INTERVAL="${BARCODE_RECLASSIFY_INTERVAL:=5}" -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt if [ "$RENDER_MODE" == "1" ]; then OUTPUT="${OUTPUT:="! videoconvert ! video/x-raw,format=I420 ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=ximagesink sync=true --verbose"}" @@ -23,7 +23,7 @@ fi echo "Run full Loss Prevention pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="gst-launch-1.0 $inputsrc ! decodebin ! gvadetect model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE ! gvatrack name=tracking tracking-type=zero-term-imageless ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json reclassify-interval=1 device=$DEVICE inference-region=roi-list name=classification ! gvapython class=ObjectFilter module=/home/pipeline-server/extensions/tracked_object_filter.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" name=tracked_object_filter ! gvadetect model-instance-id=ocr threshold=.40 model=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/FP32/horizontal-text-detection-0002.xml model-proc=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/horizontal-text-detection-0002.json name=text_detection device=CPU inference-region=roi-list ! gvainference model-instance-id=ocr2 device=CPU model=/home/pipeline-server/models/text_recognition/text-recognition-0012/FP32/text-recognition-0012.xml model-proc=/home/pipeline-server/models/text_recognition/text-recognition-0012/text-recognition-0012.json inference-region=roi-list name=text_recognition object-class=text ! gvapython class=OCR module=/home/pipeline-server/extensions/OCR_post_processing_0012.py name=ocr_postprocess ! gvapython name=barcode class=BarcodeDetection module=/home/pipeline-server/extensions/barcode_nv12_to_gray.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="gst-launch-1.0 $inputsrc ! decodebin ! gvadetect model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE ! gvatrack name=tracking tracking-type=zero-term-imageless ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json reclassify-interval=1 device=$DEVICE inference-region=roi-list name=classification ! gvapython class=ObjectFilter module=/home/pipeline-server/extensions/tracked_object_filter.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" name=tracked_object_filter ! gvadetect model-instance-id=ocr threshold=.40 model=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/FP32/horizontal-text-detection-0002.xml model-proc=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/horizontal-text-detection-0002.json name=text_detection device=CPU inference-region=roi-list ! gvainference model-instance-id=ocr2 device=CPU model=/home/pipeline-server/models/text_recognition/text-recognition-0012/FP32/text-recognition-0012.xml model-proc=/home/pipeline-server/models/text_recognition/text-recognition-0012/text-recognition-0012.json inference-region=roi-list name=text_recognition object-class=text ! gvapython class=OCR module=/home/pipeline-server/extensions/OCR_post_processing_0012.py name=ocr_postprocess ! gvapython name=barcode class=BarcodeDetection module=/home/pipeline-server/extensions/barcode_nv12_to_gray.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov8s_roi.sh b/src/pipelines/yolov8s_roi.sh index 8c8c64b..34891f0 100755 --- a/src/pipelines/yolov8s_roi.sh +++ b/src/pipelines/yolov8s_roi.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt CLASS_IDS="46,39,47" # YOLOv8 classes to be detected example "0,1,30" MQTT_HOST="127.0.0.1:1883" @@ -28,7 +28,7 @@ $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! \ gvapython module=/home/pipeline-server/extensions/gva_roi_metadata.py class=RoiMetadata kwarg=\"{\\\"rois\\\": \\\"$ROI\\\"}\" ! \ gvametapublish method=mqtt file-format=2 address="$MQTT_HOST" mqtt-client-id=yolov8 topic=event/detection ! \ queue ! \ -gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd"