diff --git a/Makefile b/Makefile index ccef652..0896a09 100644 --- a/Makefile +++ b/Makefile @@ -90,7 +90,7 @@ build-benchmark: cd performance-tools && $(MAKE) build-benchmark-docker benchmark: build-benchmark download-models - cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/docker-compose.yml \ + cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/$(DOCKER_COMPOSE) \ --pipeline $(PIPELINE_COUNT) --duration $(BENCHMARK_DURATION) --results_dir $(RESULTS_DIR) # consolidate to show the summary csv @cd performance-tools/benchmark-scripts && ROOT_DIRECTORY=$(RESULTS_DIR) $(MAKE) --no-print-directory consolidate && \ @@ -98,7 +98,7 @@ benchmark: build-benchmark download-models echo "====== Loss prevention benchmark results summary: " && cat $(RESULTS_DIR)/summary.csv benchmark-stream-density: build-benchmark download-models - cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/docker-compose.yml \ + cd performance-tools/benchmark-scripts && python benchmark.py --compose_file ../../src/$(DOCKER_COMPOSE) \ --target_fps $(TARGET_FPS) --density_increment 1 --results_dir $(RESULTS_DIR) clean-benchmark-results: diff --git a/src/docker-compose-2-clients.yml b/src/docker-compose-2-clients.yml new file mode 100644 index 0000000..1008c0e --- /dev/null +++ b/src/docker-compose-2-clients.yml @@ -0,0 +1,113 @@ +# +# Copyright (C) 2024 Intel Corporation. +# +# SPDX-License-Identifier: Apache-2.0 +# + + + ## Current Developer Toolbox doesn't support environment files, make sure to remove any files or environment variables starting with $ +version: '3.7' +services: + camera-simulator: + container_name: camera-simulator + image: aler9/rtsp-simple-server + ports: + - "127.0.0.1:8554:8554" + camera-simulator0: + image: jrottenberg/ffmpeg:4.1-alpine + container_name: camera-simulator0 + network_mode: "host" + entrypoint: ["/bin/sh","-c"] + command: + - | + if [ ! -f /home/pipeline-server/sample-media/coca-cola-4465029-1920-15-bench.mp4 ]; then + mkdir -p /home/pipeline-server/sample-media + wget -O /home/pipeline-server/sample-media/coca-cola-4465029-1920-15-bench.mp4 https://www.pexels.com/download/video/4465029 + fi + ffmpeg -nostdin -re -stream_loop -1 -i /home/pipeline-server/sample-media/coca-cola-4465029-1920-15-bench.mp4 -c copy -f rtsp -rtsp_transport tcp rtsp://localhost:8554/camera_0 + depends_on: + - camera-simulator + volumes: + - ${RETAIL_USE_CASE_ROOT:-..}/performance-tools/sample-media:/home/pipeline-server/sample-media + + OvmsClientGst1: + image: dlstreamer:dev + deploy: + mode: replicated + replicas: ${PIPELINE_COUNT:-1} + network_mode: "host" + entrypoint: /script/entrypoint.sh --pipeline_script_choice ${PIPELINE_SCRIPT:-yolov5s.sh} + privileged: true + ipc: "host" + env_file: + - ./res/gst.env + - ${DEVICE_ENV:-res/all-cpu.env} + environment: + - CONTAINER_NAME=gst1 + - INPUTSRC=${INPUTSRC:-rtsp://localhost:8554/camera_0} + - RENDER_MODE=${RENDER_MODE:-0} #RENDER_MODE=1 will work only after running xhost +local:docker + - DISPLAY=$DISPLAY + - HTTP_PROXY + - HTTPS_PROXY + - NO_PROXY + volumes: + - ${RESULTS_DIR:-../results}:/tmp/results + - ~/.Xauthority:/home/dlstreamer/.Xauthority + - /tmp/.X11-unix + - ~/.cl-cache:/home/pipeline-server/.cl-cache + - ./res/:/home/pipeline-server/envs + - ${RETAIL_USE_CASE_ROOT:-..}/models:/home/pipeline-server/models + - ./pipelines/:/home/pipeline-server/pipelines + - ./extensions/:/home/pipeline-server/extensions + + OvmsClientGst2: + image: dlstreamer:dev + deploy: + mode: replicated + replicas: ${PIPELINE_COUNT:-1} + network_mode: "host" + entrypoint: /script/entrypoint.sh --pipeline_script_choice ${PIPELINE_SCRIPT:-yolov8s_roi.sh} + privileged: true + ipc: "host" + env_file: + - ./res/gst.env + - ${DEVICE_ENV:-res/all-cpu.env} + environment: + - CONTAINER_NAME=gst2 + - INPUTSRC=${INPUTSRC:-rtsp://localhost:8554/camera_0} + - RENDER_MODE=${RENDER_MODE:-0} #RENDER_MODE=1 will work only after running xhost +local:docker + - DISPLAY=$DISPLAY + - HTTP_PROXY + - HTTPS_PROXY + - NO_PROXY + volumes: + - ${RESULTS_DIR:-../results}:/tmp/results + - ~/.Xauthority:/home/dlstreamer/.Xauthority + - /tmp/.X11-unix + - ~/.cl-cache:/home/pipeline-server/.cl-cache + - ./res/:/home/pipeline-server/envs + - ${RETAIL_USE_CASE_ROOT:-..}/models:/home/pipeline-server/models + - ./pipelines/:/home/pipeline-server/pipelines + - ./extensions/:/home/pipeline-server/extensions + + mosquitto: + image: eclipse-mosquitto:2.0 + container_name: mosquitto + network_mode: "host" + ports: + - "127.0.0.1:1883:1883" + depends_on: + - OvmsClientGst1 + - OvmsClientGst2 + + mqtt_tracker: + image: loss-prevention:dev + network_mode: "host" + environment: + - MQTT_URL=127.0.0.1 + - MQTT_PORT=1883 + - MQTT_TOPIC=event/detection + - ROI_NAME=BASKET + restart: always + depends_on: + - mosquitto \ No newline at end of file diff --git a/src/entrypoint.sh b/src/entrypoint.sh index 194323b..31e754c 100755 --- a/src/entrypoint.sh +++ b/src/entrypoint.sh @@ -19,6 +19,7 @@ checkBatchSize() { } cid_count="${cid_count:=0}" +CONTAINER_NAME="${CONTAINER_NAME:=gst}" cameras="${cameras:=}" stream_density_mount="${stream_density_mount:=}" stream_density_params="${stream_density_params:=}" @@ -125,14 +126,15 @@ fi # generate unique container id based on the date with the precision upto nano-seconds cid=$(date +%Y%m%d%H%M%S%N) +cid="${cid}"_${CONTAINER_NAME} echo "cid: $cid" -touch /tmp/results/r"$cid"_gst.jsonl -chown 1000:1000 /tmp/results/r"$cid"_gst.jsonl -touch /tmp/results/gst-launch_"$cid"_gst.log -chown 1000:1000 /tmp/results/gst-launch_"$cid"_gst.log -touch /tmp/results/pipeline"$cid"_gst.log -chown 1000:1000 /tmp/results/pipeline"$cid"_gst.log +touch /tmp/results/r"$cid".jsonl +chown 1000:1000 /tmp/results/r"$cid".jsonl +touch /tmp/results/gst-launch_"$cid".log +chown 1000:1000 /tmp/results/gst-launch_"$cid".log +touch /tmp/results/pipeline"$cid".log +chown 1000:1000 /tmp/results/pipeline"$cid".log cl_cache_dir="/home/pipeline-server/.cl-cache" \ DISPLAY="$DISPLAY" \ diff --git a/src/pipelines/age_recognition.sh b/src/pipelines/age_recognition.sh index 5e2ea7f..49ad012 100755 --- a/src/pipelines/age_recognition.sh +++ b/src/pipelines/age_recognition.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt CLASSIFICATION_OPTIONS="${CLASSIFICATION_OPTIONS:="reclassify-interval=1 $DETECTION_OPTIONS"}" # Extra Classification model parameters ex. "" | reclassify-interval=1 batch-size=1 nireq=4 gpu-throughput-streams=4 @@ -29,7 +29,7 @@ gstLaunchCmd="gst-launch-1.0 $inputsrc ! $DECODE ! \ gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=$DETECT_MODEL_PATH threshold=.8 device=$DEVICE ! \ gvaclassify batch-size=$BATCH_SIZE model-instance-id=classifier name=recognition model-proc=$CLASS_MODEL_PROC_PATH model=$CLASS_MODEL_PATH device=$DEVICE $CLASSIFICATION_OPTIONS ! \ $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! \ -gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/people_detection.sh b/src/pipelines/people_detection.sh index 7fe2742..c4d4d15 100755 --- a/src/pipelines/people_detection.sh +++ b/src/pipelines/people_detection.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt ROI="${ROI:="0,0,400,400"}" @@ -20,7 +20,7 @@ fi echo "decode type $DECODE" echo "Run person-detection pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvaattachroi roi=$ROI ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/person-detection-0200/FP16-INT8/person-detection-0200.xml threshold=.5 device=$DEVICE $PRE_PROCESS inference-region=1 ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvaattachroi roi=$ROI ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/person-detection-0200/FP16-INT8/person-detection-0200.xml threshold=.5 device=$DEVICE $PRE_PROCESS inference-region=1 ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov5s.sh b/src/pipelines/yolov5s.sh index 1668291..5b94c9e 100755 --- a/src/pipelines/yolov5s.sh +++ b/src/pipelines/yolov5s.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt if [ "$RENDER_MODE" == "1" ]; then OUTPUT="${OUTPUT:="! videoconvert ! video/x-raw,format=I420 ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=ximagesink sync=true --verbose"}" @@ -18,7 +18,7 @@ fi echo "decode type $DECODE" echo "Run yolov5s pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="GST_DEBUG=\"GST_TRACER:7\" GST_TRACERS=\"latency_tracer(flags=pipeline,interval=100)\" gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS ! $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov5s_effnetb0.sh b/src/pipelines/yolov5s_effnetb0.sh index b2e4ab4..162f959 100755 --- a/src/pipelines/yolov5s_effnetb0.sh +++ b/src/pipelines/yolov5s_effnetb0.sh @@ -7,7 +7,7 @@ if [ "$INPUTSRC_TYPE" == "REALSENSE" ]; then # TODO: update with vaapipostproc when MJPEG codec is supported. - echo "Not supported until D436 with MJPEG." > /tmp/results/pipeline$cid_count.log + echo "Not supported until D436 with MJPEG." > /tmp/results/pipeline$cid.log exit 2 fi @@ -17,7 +17,7 @@ DETECTION_OPTIONS="${DETECTION_OPTIONS:="gpu-throughput-streams=4 nireq=4 batch- CLASSIFICATION_OPTIONS="${CLASSIFICATION_OPTIONS:="reclassify-interval=1 $DETECTION_OPTIONS"}" # Extra Classification model parameters ex. "" | reclassify-interval=1 batch-size=1 nireq=4 gpu-throughput-streams=4 -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt if [ "$RENDER_MODE" == "1" ]; then OUTPUT="${OUTPUT:="! videoconvert ! video/x-raw,format=I420 ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=ximagesink sync=true --verbose"}" @@ -27,7 +27,7 @@ fi echo "Run run yolov5s with efficientnet classification pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS $DETECTION_OPTIONS ! gvatrack name=tracking tracking-type=zero-term-imageless ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=0 ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json device=$CLASSIFICATION_DEVICE inference-region=roi-list name=classification $CLASSIFICATION_PRE_PROCESS $CLASSIFICATION_OPTIONS ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="gst-launch-1.0 $inputsrc ! $DECODE ! gvadetect batch-size=$BATCH_SIZE model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE $PRE_PROCESS $DETECTION_OPTIONS ! gvatrack name=tracking tracking-type=zero-term-imageless ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=0 ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json device=$CLASSIFICATION_DEVICE inference-region=roi-list name=classification $CLASSIFICATION_PRE_PROCESS $CLASSIFICATION_OPTIONS ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov5s_full.sh b/src/pipelines/yolov5s_full.sh index 5389394..215ea97 100755 --- a/src/pipelines/yolov5s_full.sh +++ b/src/pipelines/yolov5s_full.sh @@ -13,7 +13,7 @@ PARALLEL_PIPELINE="${PARALLEL_PIPELINE:=""}" # Run pipeline in parallel using th PARALLEL_AGGRAGATE="${PARALLEL_AGGRAGATE:=""}" # Aggregate parallel pipeline results together ex. "" | ! gvametaaggregate name=aggregate ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid_count.jsonl ! fpsdisplaysink video-sink=fakesink sync=true --verbose branch. ! queue ! OCR_RECLASSIFY_INTERVAL="${OCR_RECLASSIFY_INTERVAL:=5}" BARCODE_RECLASSIFY_INTERVAL="${BARCODE_RECLASSIFY_INTERVAL:=5}" -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt if [ "$RENDER_MODE" == "1" ]; then OUTPUT="${OUTPUT:="! videoconvert ! video/x-raw,format=I420 ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=ximagesink sync=true --verbose"}" @@ -23,7 +23,7 @@ fi echo "Run full Loss Prevention pipeline on $DEVICE with batch size = $BATCH_SIZE" -gstLaunchCmd="gst-launch-1.0 $inputsrc ! decodebin ! gvadetect model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE ! gvatrack name=tracking tracking-type=zero-term-imageless ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json reclassify-interval=1 device=$DEVICE inference-region=roi-list name=classification ! gvapython class=ObjectFilter module=/home/pipeline-server/extensions/tracked_object_filter.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" name=tracked_object_filter ! gvadetect model-instance-id=ocr threshold=.40 model=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/FP32/horizontal-text-detection-0002.xml model-proc=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/horizontal-text-detection-0002.json name=text_detection device=CPU inference-region=roi-list ! gvainference model-instance-id=ocr2 device=CPU model=/home/pipeline-server/models/text_recognition/text-recognition-0012/FP32/text-recognition-0012.xml model-proc=/home/pipeline-server/models/text_recognition/text-recognition-0012/text-recognition-0012.json inference-region=roi-list name=text_recognition object-class=text ! gvapython class=OCR module=/home/pipeline-server/extensions/OCR_post_processing_0012.py name=ocr_postprocess ! gvapython name=barcode class=BarcodeDetection module=/home/pipeline-server/extensions/barcode_nv12_to_gray.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gstLaunchCmd="gst-launch-1.0 $inputsrc ! decodebin ! gvadetect model-instance-id=odmodel name=detection model=/home/pipeline-server/models/object_detection/yolov5s/FP16-INT8/yolov5s.xml model-proc=/home/pipeline-server/models/object_detection/yolov5s/yolov5s.json threshold=.5 device=$DEVICE ! gvatrack name=tracking tracking-type=zero-term-imageless ! gvaclassify model-instance-id=clasifier labels=/home/pipeline-server/models/object_classification/efficientnet-b0/imagenet_2012.txt model=/home/pipeline-server/models/object_classification/efficientnet-b0/FP32/efficientnet-b0.xml model-proc=/home/pipeline-server/models/object_classification/efficientnet-b0/efficientnet-b0.json reclassify-interval=1 device=$DEVICE inference-region=roi-list name=classification ! gvapython class=ObjectFilter module=/home/pipeline-server/extensions/tracked_object_filter.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" name=tracked_object_filter ! gvadetect model-instance-id=ocr threshold=.40 model=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/FP32/horizontal-text-detection-0002.xml model-proc=/home/pipeline-server/models/text_detection/horizontal-text-detection-0002/horizontal-text-detection-0002.json name=text_detection device=CPU inference-region=roi-list ! gvainference model-instance-id=ocr2 device=CPU model=/home/pipeline-server/models/text_recognition/text-recognition-0012/FP32/text-recognition-0012.xml model-proc=/home/pipeline-server/models/text_recognition/text-recognition-0012/text-recognition-0012.json inference-region=roi-list name=text_recognition object-class=text ! gvapython class=OCR module=/home/pipeline-server/extensions/OCR_post_processing_0012.py name=ocr_postprocess ! gvapython name=barcode class=BarcodeDetection module=/home/pipeline-server/extensions/barcode_nv12_to_gray.py kwarg=\"{\\\"reclassify_interval\\\": $BARCODE_RECLASSIFY_INTERVAL}\" ! gvametaconvert name=metaconvert add-empty-results=true ! gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd" diff --git a/src/pipelines/yolov8s_roi.sh b/src/pipelines/yolov8s_roi.sh index e399749..602931f 100755 --- a/src/pipelines/yolov8s_roi.sh +++ b/src/pipelines/yolov8s_roi.sh @@ -7,7 +7,7 @@ PRE_PROCESS="${PRE_PROCESS:=""}" #""|pre-process-backend=vaapi-surface-sharing|pre-process-backend=vaapi-surface-sharing pre-process-config=VAAPI_FAST_SCALE_LOAD_FACTOR=1 AGGREGATE="${AGGREGATE:="gvametaaggregate name=aggregate !"}" # Aggregate function at the end of the pipeline ex. "" | gvametaaggregate name=aggregate -PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt +PUBLISH="${PUBLISH:="name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl"}" # address=localhost:1883 topic=inferenceEvent method=mqtt CLASS_IDS="46,39,47" # YOLOv8 classes to be detected example "0,1,30" MQTT_HOST="127.0.0.1:1883" @@ -28,7 +28,7 @@ $AGGREGATE gvametaconvert name=metaconvert add-empty-results=true ! \ gvapython module=/home/pipeline-server/extensions/gva_roi_metadata.py class=RoiMetadata kwarg=\"{\\\"roi\\\": \\\"$ROI\\\"}\" ! \ gvametapublish method=mqtt file-format=2 address="$MQTT_HOST" mqtt-client-id=yolov8 topic=event/detection ! \ queue ! \ -gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid\"_gst\".jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid\"_gst\".log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid\"_gst\".log)" +gvametapublish name=destination file-format=2 file-path=/tmp/results/r$cid.jsonl $OUTPUT 2>&1 | tee >/tmp/results/gst-launch_$cid.log >(stdbuf -oL sed -n -e 's/^.*current: //p' | stdbuf -oL cut -d , -f 1 > /tmp/results/pipeline$cid.log)" echo "$gstLaunchCmd"