diff --git a/docs/source/index.rst b/docs/source/index.rst index cc5c92d12..b073c9edd 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -91,8 +91,8 @@ Now, pick a tutorial or code sample and start utilizing Gen2 capabilities samples/17_video_mobilenet.rst samples/18_rgb_encoding_mobilenet.rst samples/21_mobilenet_decoding_on_device.rst - samples/22_1_tiny_tolo_v3_decoding_on_device.rst - samples/22_2_tiny_tolo_v4_decoding_on_device.rst + samples/22_1_tiny_yolo_v3_decoding_on_device.rst + samples/22_2_tiny_yolo_v4_decoding_on_device.rst samples/23_autoexposure_roi.rst samples/24_opencv_support.rst samples/25_system_information.rst @@ -100,6 +100,7 @@ Now, pick a tutorial or code sample and start utilizing Gen2 capabilities samples/26_2_spatial_mobilenet_mono.rst samples/26_3_spatial_tiny_yolo.rst samples/27_spatial_location_calculator.rst + samples/28_camera_video_example.rst .. toctree:: :maxdepth: 1 diff --git a/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst b/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst index 5844603d2..b8609f35e 100644 --- a/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst +++ b/docs/source/samples/22_1_tiny_yolo_v3_decoding_on_device.rst @@ -1,4 +1,4 @@ -21 - RGB & TinyYoloV3 decoding on device +22.1 - RGB & TinyYoloV3 decoding on device ========================================== This example shows how to run TinyYoloV3 on the RGB input frame, and how to display both the RGB diff --git a/docs/source/samples/28_camera_video_example.rst b/docs/source/samples/28_camera_video_example.rst new file mode 100644 index 000000000..2eb08a02d --- /dev/null +++ b/docs/source/samples/28_camera_video_example.rst @@ -0,0 +1,21 @@ +28 - Camera video high resolution +================================= + +This example shows how to use high resolution video at low latency. Compared to :ref:`01 - RGB Preview`, this demo outputs NV12 frames whereas +preview frames are BGR and are not suited for larger resoulution (eg. 2000x1000). Preview is more suitable for either NN or visualization purposes. + +Setup +##### + +.. include:: /includes/install_from_pypi.rst + +Source code +########### + +Also `available on GitHub `__ + +.. literalinclude:: ../../../examples/28_camera_video_example.py + :language: python + :linenos: + +.. include:: /includes/footer-short.rst \ No newline at end of file diff --git a/examples/28_camera_video_example.py b/examples/28_camera_video_example.py new file mode 100644 index 000000000..34c4c3513 --- /dev/null +++ b/examples/28_camera_video_example.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +import cv2 +import depthai as dai +import numpy as np + +# Start defining a pipeline +pipeline = dai.Pipeline() + +# Define a source - color camera +colorCam = pipeline.createColorCamera() +colorCam.setBoardSocket(dai.CameraBoardSocket.RGB) +colorCam.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P) +colorCam.setVideoSize(1920, 1080) + +# Create output +xoutVideo = pipeline.createXLinkOut() +xoutVideo.setStreamName("video") + +colorCam.video.link(xoutVideo.input) + +# Pipeline defined, now the device is connected to +with dai.Device(pipeline) as device: + # Start pipeline + device.startPipeline() + video = device.getOutputQueue(name="video", maxSize=1, blocking=False) + + while True: + # Get preview and video frames + videoIn = video.get() + + # Get BGR frame from NV12 encoded video frame to show with opencv + # Visualizing the frame on slower hosts might have overhead + cv2.imshow("video", videoIn.getCvFrame()) + + if cv2.waitKey(1) == ord('q'): + break diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index e7f4c41ee..93ff2e6b7 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -136,3 +136,4 @@ add_python_example(26_2_spatial_mobilenet_mono 26_2_spatial_mobilenet_mono.py "$ add_python_example(26_3_spatial_tiny_yolo_v3 26_3_spatial_tiny_yolo.py "${tiny_yolo_v3_blob}") add_python_example(26_3_spatial_tiny_yolo_v4 26_3_spatial_tiny_yolo.py "${tiny_yolo_v4_blob}") add_python_example(27_spatial_location_calculator 27_spatial_location_calculator.py) +add_python_example(28_camera_video_example 28_camera_video_example.py)