Skip to content
Permalink
Browse files

Add support for Hikvision cameras (#229)

* hikvision playground

* hikvision: only receive frames

* Update BetaCameras/Hikvision/Hikvision.cs

Co-Authored-By: sisiplac <simon.placht@metrilus.de>

* Update BetaCameras/Hikvision/Hikvision.cs

Co-Authored-By: sisiplac <simon.placht@metrilus.de>

* Update BetaCameras/Hikvision/Hikvision.cs

Co-Authored-By: sisiplac <simon.placht@metrilus.de>

* Update BetaCameras/Hikvision/Hikvision.cs

Co-Authored-By: sisiplac <simon.placht@metrilus.de>

* remove any cpu build config

* debug

Co-authored-by: sisiplac <simon.placht@metrilus.de>
  • Loading branch information
jangernert and sisiplac committed Jan 30, 2020
1 parent 3f23751 commit 577b04524248c59f9ebadad1c1a71ae5a1074b48
@@ -0,0 +1,213 @@
using FFmpeg.AutoGen;
using Metrilus.Util;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;

namespace MetriCam2.Cameras
{
public class H264Decoder
{
//private static bool initialized = false;
private static unsafe AVFormatContext* ic = null;
private static unsafe AVStream* video_st = null;
private static unsafe SwsContext* img_convert_ctx = null;
private static unsafe AVFrame* yuv_image = null;
private static unsafe AVFrame* rgb_image = null;
private static int bufferSize = 3840 * 2160 * 4;
private static byte[] startSequence = { 0x00, 0x00, 0x00, 0x01 };

//private avio_alloc_context_read_packet readCallback;

static H264Decoder()
{
}

public unsafe H264Decoder(List<byte[]> nalUnits)
{
//if (initialized)
//{
// //Sequence of freeing all ffmpeg resources and calling constructor again caused an error in avcodec_send_packet in rare cases.
// //Thus, the init sequence can only be called once.
// //Important: Avoiding the re-init only works if the firstCompressedPacket contains a key frame.
// //This should be guaranteed, since the H264 encoding on server side is re-initialized, too.
// return;
//}

//initialized = true;

ffmpeg.av_register_all();

int dataSize = 0;
foreach (byte[] nalUnit in nalUnits)
{
dataSize += nalUnit.Length + 4;
}

byte* dat = (byte*)ffmpeg.av_malloc((ulong)dataSize);


fixed (byte* start = startSequence)
{
foreach (byte[] nalUnit in nalUnits)
{
fixed (byte* dataPtr = nalUnit)
{
UnmanagedMemory.CopyMemory(dat, start, (uint)startSequence.Length);
dat += startSequence.Length;
UnmanagedMemory.CopyMemory(dat, dataPtr, (uint)nalUnit.Length);
dat += nalUnit.Length;
}
}

dat -= dataSize;
}

AVFormatContext* icLocal = ffmpeg.avformat_alloc_context();

ic = icLocal;

avio_alloc_context_write_packet_func writeCallback;
writeCallback.Pointer = IntPtr.Zero;
avio_alloc_context_seek_func seekCallback;
seekCallback.Pointer = IntPtr.Zero;
avio_alloc_context_read_packet_func readCallback;
readCallback.Pointer = IntPtr.Zero;

icLocal->pb = ffmpeg.avio_alloc_context(dat, bufferSize, 0, null, readCallback, writeCallback, seekCallback);

if (icLocal->pb == null)
{
throw new Exception("Failed to allocate ffmpeg context.");
}

// Need to probe buffer for input format unless you already know it
AVProbeData probe_data;
probe_data.buf_size = dataSize;
probe_data.filename = (byte*)Marshal.StringToHGlobalAnsi("stream");
probe_data.buf = (byte*)UnmanagedMemory.Alloc(probe_data.buf_size);
UnmanagedMemory.CopyMemory(probe_data.buf, dat, (uint)probe_data.buf_size);

AVInputFormat* pAVInputFormat = ffmpeg.av_probe_input_format(&probe_data, 1);

if (pAVInputFormat == null)
{
pAVInputFormat = ffmpeg.av_probe_input_format(&probe_data, 0);
}

// cleanup
UnmanagedMemory.DeAlloc((IntPtr)probe_data.buf, probe_data.buf_size);
probe_data.buf = null;

pAVInputFormat->flags |= ffmpeg.AVFMT_NOFILE;

ffmpeg.avformat_open_input(&icLocal, "stream", pAVInputFormat, null);

for (int i = 0; i < icLocal->nb_streams; i++)
{
AVCodecContext* enc = icLocal->streams[i]->codec;

if (AVMediaType.AVMEDIA_TYPE_VIDEO == enc->codec_type)
{
AVCodec* codec = ffmpeg.avcodec_find_decoder(enc->codec_id);

if (codec == null || ffmpeg.avcodec_open2(enc, codec, null) < 0)
{
//Console.WriteLine("Cannot find codec");
}

video_st = icLocal->streams[i];
}
}

//Init picture
yuv_image = ffmpeg.av_frame_alloc();
yuv_image->format = -1; //We do not know the format of the raw decoded image
}

~H264Decoder()
{
}

/// <summary>
/// Provided the compressed package for the next frame and get the decoded image of the current frame.
/// </summary>
/// <param name="compressedPacket"></param>
/// <returns></returns>
public unsafe Bitmap Update(List<byte[]> nalUnits)
{
AVPacket packet;

int dataSize = 0;
foreach(byte[] nalUnit in nalUnits)
{
dataSize += nalUnit.Length + 4;
}

byte* dat = (byte*)ffmpeg.av_malloc((ulong)dataSize);


fixed (byte* start = startSequence)
{
foreach (byte[] nalUnit in nalUnits)
{
fixed (byte* dataPtr = nalUnit)
{
UnmanagedMemory.CopyMemory(dat, start, (uint)startSequence.Length);
dat += startSequence.Length;
UnmanagedMemory.CopyMemory(dat, dataPtr, (uint)nalUnit.Length);
dat += nalUnit.Length;
}
}
dat -= dataSize;
}


ffmpeg.av_packet_from_data(&packet, dat, dataSize);

if (rgb_image != null)
{
GC.KeepAlive(nalUnits);
}

int ret = ffmpeg.avcodec_send_packet(video_st->codec, &packet);

if (ret != 0)
{
throw new Exception("Error in avcodec_send_packet. Error code: " + ret.ToString());
}

ret = ffmpeg.avcodec_receive_frame(video_st->codec, yuv_image);

if (ret < 0)
{
throw new Exception("Error in avcodec_receive_frame. Error code: " + ret.ToString());
}

ffmpeg.av_packet_unref(&packet);

if (rgb_image == null)
{
rgb_image = ffmpeg.av_frame_alloc();
rgb_image->format = (int)AVPixelFormat.AV_PIX_FMT_BGR24; //We want to transform the raw decoded image to BGR24
rgb_image->width = yuv_image->width;
rgb_image->height = yuv_image->height;
ffmpeg.av_frame_get_buffer(rgb_image, 32);
}

//Convert from one of the YUV color formats provided by H264 decompression to RGB.
img_convert_ctx = ffmpeg.sws_getCachedContext(img_convert_ctx, yuv_image->width, yuv_image->height, video_st->codec->pix_fmt, rgb_image->width, rgb_image->height, AVPixelFormat.AV_PIX_FMT_BGR24, 0, null, null, null);
ffmpeg.sws_scale(img_convert_ctx, yuv_image->data, yuv_image->linesize, 0, yuv_image->height, rgb_image->data, rgb_image->linesize);

Bitmap bmp = new Bitmap(yuv_image->width, yuv_image->height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
BitmapData bmpData = bmp.LockBits(new Rectangle(new Point(0, 0), new Size(bmp.Width, bmp.Height)), System.Drawing.Imaging.ImageLockMode.ReadWrite, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
UnmanagedMemory.CopyMemory((IntPtr)bmpData.Scan0, (IntPtr)rgb_image->extended_data[0], bmp.Width * bmp.Height * 3);
bmp.UnlockBits(bmpData);
return bmp;
}
}

}

@@ -0,0 +1,175 @@
// Copyright (c) Metrilus GmbH
// MetriCam 2 is licensed under the MIT license. See License.txt for full license text.

using MetriCam2.Exceptions;
using Metrilus.Util;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Net;
using System.Threading;
#if NETSTANDARD2_0
#else
using System.Drawing.Imaging;
#endif


namespace MetriCam2.Cameras
{
public class Hikvision : Camera, IDisposable
{
private bool _disposed = false;
private RTSPClient _client = null;
private long _currentBitmapTimestamp = 0;
private Bitmap _currentBitmap = null;

ParamDesc<string> IPAddressDesc
{
get
{
ParamDesc<string> res = new ParamDesc<string>
{
Description = "IP address of the camera",
ReadableWhen = ParamDesc.ConnectionStates.Connected | ParamDesc.ConnectionStates.Disconnected,
WritableWhen = ParamDesc.ConnectionStates.Disconnected
};
return res;
}
}
public string IPAddress { get; set; } = "";

ParamDesc<uint> PortDesc
{
get
{
ParamDesc<uint> res = new ParamDesc<uint>
{
Description = "Port of RTSP",
ReadableWhen = ParamDesc.ConnectionStates.Connected | ParamDesc.ConnectionStates.Disconnected,
WritableWhen = ParamDesc.ConnectionStates.Disconnected
};
return res;
}
}
public uint Port { get; set; } = 554;

ParamDesc<string> UsernameDesc
{
get
{
ParamDesc<string> res = new ParamDesc<string>
{
Description = "Username to access the camera",
ReadableWhen = ParamDesc.ConnectionStates.Connected | ParamDesc.ConnectionStates.Disconnected,
WritableWhen = ParamDesc.ConnectionStates.Disconnected
};
return res;
}
}
public string Username { get; set; } = "";

ParamDesc<string> PasswordDesc
{
get
{
ParamDesc<string> res = new ParamDesc<string>
{
Description = "Password to access the camera",
ReadableWhen = ParamDesc.ConnectionStates.Connected | ParamDesc.ConnectionStates.Disconnected,
WritableWhen = ParamDesc.ConnectionStates.Disconnected
};
return res;
}
}
public string Password { get; set; } = "";


public Hikvision()
{

}

public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}

protected virtual void Dispose(bool disposing)
{
if (_disposed)
return;

if (IsConnected)
DisconnectImpl();

if (disposing)
{
// dispose managed resources
}

_disposed = true;
}

protected override void LoadAllAvailableChannels()
{
ChannelRegistry cr = ChannelRegistry.Instance;
Channels.Clear();

Channels.Add(cr.RegisterChannel(ChannelNames.Color));
}

protected unsafe override void ConnectImpl()
{
if (ActiveChannels.Count == 0)
{
AddToActiveChannels(ChannelNames.Color);
}

_client = new RTSPClient(IPAddress, Port, Username, Password);
_client.Connect();
}

private void OnErrorCallback(Exception error)
{

}
protected override void DisconnectImpl()
{
_client.Disconnect();
_client = null;
}

protected override void UpdateImpl()
{
long timestamp = 0;
Bitmap bitmap = null;
long startTime = DateTime.Now.Ticks;
long maxFrameTime = TimeSpan.FromSeconds(3).Ticks;

do
{
(bitmap, timestamp) = _client.GetCurrentBitmap();
if (DateTime.Now.Ticks > startTime + maxFrameTime)
{
throw new ImageAcquisitionFailedException("error?!");
}
}
while (timestamp <= _currentBitmapTimestamp);

_currentBitmapTimestamp = timestamp;
_currentBitmap = bitmap;
}

protected override ImageBase CalcChannelImpl(string channelName)
{
switch (channelName)
{
case ChannelNames.Color:
return new ColorImage(_currentBitmap);
}

throw new ImageAcquisitionFailedException($"{Name}: No valid channel name");
}
}
}

0 comments on commit 577b045

Please sign in to comment.
You can’t perform that action at this time.