From de9a09c454cbcb0c301b9fe4699b7c08c6a1f188 Mon Sep 17 00:00:00 2001 From: Erol444 Date: Sun, 21 Mar 2021 17:17:23 +0100 Subject: [PATCH 1/5] added 28_camera_video_example demo, docs and test --- .../samples/28_camera_video_example.rst | 30 +++++++++++++++ examples/28_camera_video_example.py | 37 +++++++++++++++++++ examples/CMakeLists.txt | 1 + 3 files changed, 68 insertions(+) create mode 100644 docs/source/samples/28_camera_video_example.rst create mode 100644 examples/28_camera_video_example.py diff --git a/docs/source/samples/28_camera_video_example.rst b/docs/source/samples/28_camera_video_example.rst new file mode 100644 index 000000000..51d161676 --- /dev/null +++ b/docs/source/samples/28_camera_video_example.rst @@ -0,0 +1,30 @@ +28 - Camera video high resolution +========================= + +This example shows how to use high resolution video at low latency. Compared to `01_rbg_preview`, this demo outputs NV12 frames whereas +preview frames are BGR and are not suited for larger resoulution (eg. 2000x1000). Prevuew is more suitable for either NN or visualization purposes. + +Setup +##### + +Please run the following command to install the required dependencies + + +.. code-block:: bash + :substitutions: + + python3 -m pip install --extra-index-url https://artifacts.luxonis.com/artifactory/luxonis-python-snapshot-local/ depthai==|release| + + +For additional information, please follow :ref:`Python API installation guide ` + +Source code +########### + +Also `available on GitHub `__ + +.. literalinclude:: ../../../examples/28_camera_video_example.py + :language: python + :linenos: + +.. include:: /includes/footer-short.rst \ No newline at end of file diff --git a/examples/28_camera_video_example.py b/examples/28_camera_video_example.py new file mode 100644 index 000000000..d86d2ae8d --- /dev/null +++ b/examples/28_camera_video_example.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +import cv2 +import depthai as dai +import numpy as np + +# Start defining a pipeline +pipeline = dai.Pipeline() + +# Define a source - color camera +colorCam = pipeline.createColorCamera() +colorCam.setBoardSocket(dai.CameraBoardSocket.RGB) +colorCam.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K) +colorCam.setVideoSize(2000, 1000) + +# Create output +xoutVideo = pipeline.createXLinkOut() +xoutVideo.setStreamName("video") + +colorCam.video.link(xoutVideo.input) + +# Pipeline defined, now the device is connected to +with dai.Device(pipeline) as device: + # Start pipeline + device.startPipeline() + video = device.getOutputQueue(name="video", maxSize=1, blocking=False) + + while True: + # Get preview and video frames + videoIn = video.get() + + # Get BGR frame from NV12 encoded video frame to show with opencv + # Visualizing the frame on slower hosts might have overhead + cv2.imshow("video", videoIn.getCvFrame()) + + if cv2.waitKey(1) == ord('q'): + break diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index e7f4c41ee..93ff2e6b7 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -136,3 +136,4 @@ add_python_example(26_2_spatial_mobilenet_mono 26_2_spatial_mobilenet_mono.py "$ add_python_example(26_3_spatial_tiny_yolo_v3 26_3_spatial_tiny_yolo.py "${tiny_yolo_v3_blob}") add_python_example(26_3_spatial_tiny_yolo_v4 26_3_spatial_tiny_yolo.py "${tiny_yolo_v4_blob}") add_python_example(27_spatial_location_calculator 27_spatial_location_calculator.py) +add_python_example(28_camera_video_example 28_camera_video_example.py) From 875e90fc3c7158e391c4cda227082ccac122e381 Mon Sep 17 00:00:00 2001 From: Erol444 Date: Sun, 21 Mar 2021 17:19:27 +0100 Subject: [PATCH 2/5] added example to docs menu --- docs/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index cc5c92d12..ccf74e555 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -100,6 +100,7 @@ Now, pick a tutorial or code sample and start utilizing Gen2 capabilities samples/26_2_spatial_mobilenet_mono.rst samples/26_3_spatial_tiny_yolo.rst samples/27_spatial_location_calculator.rst + samples/28_camera_video_example.rst .. toctree:: :maxdepth: 1 From dbd62036fafc9bc02a2d73bd6797a436837220f6 Mon Sep 17 00:00:00 2001 From: Erol444 Date: Sun, 21 Mar 2021 22:18:43 +0100 Subject: [PATCH 3/5] referenced demo `01 - RGB Preview`, additional === (to not throw warning), replaced setup with install_from_pypi.rst --- docs/source/samples/28_camera_video_example.rst | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/docs/source/samples/28_camera_video_example.rst b/docs/source/samples/28_camera_video_example.rst index 51d161676..7b466273e 100644 --- a/docs/source/samples/28_camera_video_example.rst +++ b/docs/source/samples/28_camera_video_example.rst @@ -1,22 +1,13 @@ 28 - Camera video high resolution -========================= +================================= -This example shows how to use high resolution video at low latency. Compared to `01_rbg_preview`, this demo outputs NV12 frames whereas +This example shows how to use high resolution video at low latency. Compared to :ref:`01 - RGB Preview`, this demo outputs NV12 frames whereas preview frames are BGR and are not suited for larger resoulution (eg. 2000x1000). Prevuew is more suitable for either NN or visualization purposes. Setup ##### -Please run the following command to install the required dependencies - - -.. code-block:: bash - :substitutions: - - python3 -m pip install --extra-index-url https://artifacts.luxonis.com/artifactory/luxonis-python-snapshot-local/ depthai==|release| - - -For additional information, please follow :ref:`Python API installation guide ` +.. include:: /includes/install_from_pypi.rst Source code ########### From b332dfa03f6bb7247ead1405ec99395de3e8e41a Mon Sep 17 00:00:00 2001 From: Erol444 Date: Tue, 23 Mar 2021 21:52:17 +0100 Subject: [PATCH 4/5] fixed camera video example to 1920*1080 and 1080p res. Fixed typo "preview" and tiny_yolo in index.rst --- docs/source/index.rst | 4 ++-- docs/source/samples/28_camera_video_example.rst | 2 +- examples/28_camera_video_example.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index ccf74e555..b073c9edd 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -91,8 +91,8 @@ Now, pick a tutorial or code sample and start utilizing Gen2 capabilities samples/17_video_mobilenet.rst samples/18_rgb_encoding_mobilenet.rst samples/21_mobilenet_decoding_on_device.rst - samples/22_1_tiny_tolo_v3_decoding_on_device.rst - samples/22_2_tiny_tolo_v4_decoding_on_device.rst + samples/22_1_tiny_yolo_v3_decoding_on_device.rst + samples/22_2_tiny_yolo_v4_decoding_on_device.rst samples/23_autoexposure_roi.rst samples/24_opencv_support.rst samples/25_system_information.rst diff --git a/docs/source/samples/28_camera_video_example.rst b/docs/source/samples/28_camera_video_example.rst index 7b466273e..2eb08a02d 100644 --- a/docs/source/samples/28_camera_video_example.rst +++ b/docs/source/samples/28_camera_video_example.rst @@ -2,7 +2,7 @@ ================================= This example shows how to use high resolution video at low latency. Compared to :ref:`01 - RGB Preview`, this demo outputs NV12 frames whereas -preview frames are BGR and are not suited for larger resoulution (eg. 2000x1000). Prevuew is more suitable for either NN or visualization purposes. +preview frames are BGR and are not suited for larger resoulution (eg. 2000x1000). Preview is more suitable for either NN or visualization purposes. Setup ##### diff --git a/examples/28_camera_video_example.py b/examples/28_camera_video_example.py index d86d2ae8d..34c4c3513 100644 --- a/examples/28_camera_video_example.py +++ b/examples/28_camera_video_example.py @@ -10,8 +10,8 @@ # Define a source - color camera colorCam = pipeline.createColorCamera() colorCam.setBoardSocket(dai.CameraBoardSocket.RGB) -colorCam.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K) -colorCam.setVideoSize(2000, 1000) +colorCam.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P) +colorCam.setVideoSize(1920, 1080) # Create output xoutVideo = pipeline.createXLinkOut() From 48c09e1d4842db2c9edda6229e5968a5e88d5867 Mon Sep 17 00:00:00 2001 From: Erol444 Date: Tue, 23 Mar 2021 22:05:07 +0100 Subject: [PATCH 5/5] fix tiny yolo v3 numbering in docs --- docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst b/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst index 5844603d2..b8609f35e 100644 --- a/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst +++ b/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst @@ -1,4 +1,4 @@ -21 - RGB & TinyYoloV3 decoding on device +22.1 - RGB & TinyYoloV3 decoding on device ========================================== This example shows how to run TinyYoloV3 on the RGB input frame, and how to display both the RGB