diff --git a/README.md b/README.md index 077335ca7..034bfb5f7 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ ![ffmeplay](https://github.com/unosquare/ffmediaelement/raw/master/Support/ffmeplay.png) -## Announcements +## Current Release Status - If you would like to support this project, you can show your appreciation via [PayPal.Me](https://www.paypal.me/mariodivece/50usd) - Current Status: (2018-04-25) - Release 3.4.240 is now available, (see the Releases) - NuGet Package available here: https://www.nuget.org/packages/FFME.Windows/ @@ -21,7 +21,7 @@ Here is a quick guide on how to get started. 1. Open Visual Studio (v2017 recommended), and create a new WPF Application. Target Framework must be 4.6.2 or above. (This will change to 4.6.1 in the final release) 2. Install the NuGet Package from your Package Manager Console: `PM> Install-Package FFME.Windows` -3. You need FFmpeg binaries now. Build your own or download a compatible build from [Zeranoe FFmpeg Builds site](https://ffmpeg.zeranoe.com/builds/win32/shared/ffmpeg-3.4.2-win32-shared.zip). +3. You need FFmpeg binaries now. Build your own or download a compatible build from [Zeranoe FFmpeg Builds site](https://ffmpeg.zeranoe.com/builds/win32/shared/ffmpeg-4.0-win32-shared.zip). 4. Your FFmpeg build should have a `bin` folder with 3 exe files and 8 dll files. Copy all 11 files to a folder such as `c:\ffmpeg` 5. Within you application's startup code (`Main` method), set `Unosquare.FFME.MediaElement.FFmpegDirectory = @"c:\ffmpeg";`. 6. Use the FFME `MediaElement` control as any other WPF control. @@ -82,7 +82,7 @@ device://gdigrab?desktop *Please note that I am unable to distribute FFmpeg's binaries because I don't know if I am allowed to do so. Follow the instructions below to compile, run and test FFME.* 1. Clone this repository. -2. Download the FFmpeg win32-shared binaries from Zeranoe FFmpeg Builds. +2. Download the FFmpeg win32-shared binaries from Zeranoe FFmpeg Builds. 3. Extract the contents of the zip file you just downloaded and go to the bin folder that got extracted. You should see 3 exe files and 8 dll files. Select and copy all of them. 4. Now paste all 11 files from the prior step onto a well-known folder. Take note of the full path. (I used c:\ffmpeg\) 5. Open the solution and set the Unosquare.FFME.Windows.Sample project as the startup project. You can do this by right clicking on the project and selecting Set as startup project diff --git a/Support/build-nuget-package.bat b/Support/build-nuget-package.bat index b37e32b9b..0ad9291e7 100644 --- a/Support/build-nuget-package.bat +++ b/Support/build-nuget-package.bat @@ -1,6 +1,6 @@ @echo off SET enableextensions -SET PackagePath="%UserProfile%\Desktop\ffme.windows-3.4.250\" +SET PackagePath="%UserProfile%\Desktop\ffme.windows-4.0.250\" SET ProjectPath="C:\projects\ffmediaelement\" SET ReleasePath="%ProjectPath%Unosquare.FFME.Windows.Sample\bin\Release\" diff --git a/Support/ffme.win.nuspec b/Support/ffme.win.nuspec index 2210ed440..dc8ebdf13 100644 --- a/Support/ffme.win.nuspec +++ b/Support/ffme.win.nuspec @@ -2,7 +2,7 @@ FFME.Windows - 3.4.250 + 4.0.250 FFME: WPF MediaElement Control Alternative Mario Di Vece and the FFME contributors MarioDiVece,Unosquare @@ -14,7 +14,7 @@ FFME is a close (and I'd like to think better) drop-in replacement for Microsoft's WPF MediaElement Control. While the standard MediaElement uses DirectX (DirectShow) for media playback, FFME uses FFmpeg to read and decode audio and video. This means that for those of you who want to support stuff like HLS playback, or just don't want to go through the hassle of installing codecs on client machines, using FFME might just be the answer. FFmpeg MediaElement Control (FFME) - This is a release package of the Michelob build referencing bindings to FFmpeg version 3.4.2 + This is a release package of the Michelob build referencing bindings to FFmpeg version 4.0 This package does not contain the required FFmpeg binaries. Please refer to the following URL for instructions on how to obtain the binaries: https://github.com/unosquare/ffmediaelement Release details: https://github.com/unosquare/ffmediaelement/milestone/7?closed=1 @@ -23,7 +23,7 @@ hls wpf ffmpeg mediaelement ffme h264 h265 hevc audio video processing decoding playback frame - + diff --git a/Support/readme.txt b/Support/readme.txt index 5d766d5fa..fd25bfd29 100644 --- a/Support/readme.txt +++ b/Support/readme.txt @@ -1,7 +1,7 @@ How to use FFME In order to use the FFME MediaElement control, you will need to setup a folder with FFmpeg binaries. Here are the steps: -1. You can build your own FFmpeg or download a compatible build from the wonderful Zeranoe FFmpeg Builds site: (https://ffmpeg.zeranoe.com/builds/win32/shared/ffmpeg-3.4.2-win32-shared.zip). +1. You can build your own FFmpeg or download a compatible build from the wonderful Zeranoe FFmpeg Builds site: (https://ffmpeg.zeranoe.com/builds/win32/shared/ffmpeg-4.0-win32-shared.zip). 2. Your FFmpeg build (see the bin folder) should have 3 exe files and 8 dll files. Copy all 11 files to a folder such as (c:\ffmpeg) 3. Within you application's startup code (Main method), set Unosquare.FFME.MediaElement.FFmpegDirectory = "path to ffmpeg binaries from the previous step";. 4. Use the FFME MediaElement control as any other WPF control! diff --git a/Unosquare.FFME.Common/Commands/MediaCommand.cs b/Unosquare.FFME.Common/Commands/MediaCommand.cs index 23caed950..54c11029f 100644 --- a/Unosquare.FFME.Common/Commands/MediaCommand.cs +++ b/Unosquare.FFME.Common/Commands/MediaCommand.cs @@ -49,7 +49,7 @@ protected MediaCommand(MediaCommandManager manager, MediaCommandType commandType /// /// Gets a value indicating whether this command is marked as completed. /// - public bool HasCompleted => IsDisposed || TaskContext.IsCompleted; + public bool HasCompleted => IsDisposed || TaskContext == null || TaskContext.IsCompleted; /// /// Gets the task that this command will run. @@ -93,7 +93,7 @@ public async Task StartAsync() var m = Manager.MediaCore; // Avoid processing the command if the element is disposed. - if (IsDisposed || m.IsDisposed || TaskContext.IsCanceled || IsRunning) + if (IsDisposed || m.IsDisposed || TaskContext.IsCanceled || IsRunning || TaskContext.IsCompleted) return; // Start and await the task diff --git a/Unosquare.FFME.Common/Commands/OpenCommand.cs b/Unosquare.FFME.Common/Commands/OpenCommand.cs index 0a3e7cf89..169d95219 100644 --- a/Unosquare.FFME.Common/Commands/OpenCommand.cs +++ b/Unosquare.FFME.Common/Commands/OpenCommand.cs @@ -66,7 +66,7 @@ internal override async Task ExecuteInternal() // Create the stream container // the async protocol prefix allows for increased performance for local files. - var streamOptions = new StreamOptions(); + var containerConfig = new ContainerConfiguration(); // Convert the URI object to something the Media Container understands var mediaUrl = Source.ToString(); @@ -76,7 +76,7 @@ internal override async Task ExecuteInternal() { // Set the default protocol Prefix mediaUrl = Source.LocalPath; - streamOptions.ProtocolPrefix = "async"; + containerConfig.ProtocolPrefix = "async"; } } catch { } @@ -85,22 +85,22 @@ internal override async Task ExecuteInternal() if (string.IsNullOrWhiteSpace(Source.Scheme) == false && (Source.Scheme.Equals("format") || Source.Scheme.Equals("device")) && string.IsNullOrWhiteSpace(Source.Host) == false - && string.IsNullOrWhiteSpace(streamOptions.Input.ForcedInputFormat) + && string.IsNullOrWhiteSpace(containerConfig.ForcedInputFormat) && string.IsNullOrWhiteSpace(Source.Query) == false) { // Update the Input format and container input URL // It is also possible to set some input options as follows: - // streamOptions.Input.Add(StreamInputOptions.Names.FrameRate, "20"); - streamOptions.Input.ForcedInputFormat = Source.Host; + // streamOptions.PrivateOptions["framerate"] = "20"; + containerConfig.ForcedInputFormat = Source.Host; mediaUrl = Uri.UnescapeDataString(Source.Query).TrimStart('?'); m.Log(MediaLogMessageType.Info, $"Media URI will be updated. Input Format: {Source.Host}, Input Argument: {mediaUrl}"); } // Allow the stream input options to be changed - await m.SendOnMediaInitializing(streamOptions, mediaUrl); + await m.SendOnMediaInitializing(containerConfig, mediaUrl); // Instantiate the internal container - m.Container = new MediaContainer(mediaUrl, streamOptions, m); + m.Container = new MediaContainer(mediaUrl, containerConfig, m); // Notify the user media is opening and allow for media options to be modified // Stuff like audio and video filters and stream selection can be performed here. diff --git a/Unosquare.FFME.Common/Core/FFAudioParams.cs b/Unosquare.FFME.Common/Core/FFAudioParams.cs index 8031bdd23..cb2456f60 100644 --- a/Unosquare.FFME.Common/Core/FFAudioParams.cs +++ b/Unosquare.FFME.Common/Core/FFAudioParams.cs @@ -53,8 +53,8 @@ private FFAudioParams() /// The frame. private FFAudioParams(AVFrame* frame) { - ChannelCount = ffmpeg.av_frame_get_channels(frame); - ChannelLayout = ffmpeg.av_frame_get_channel_layout(frame); + ChannelCount = frame->channels; + ChannelLayout = unchecked((long)frame->channel_layout); Format = (AVSampleFormat)frame->format; SamplesPerChannel = frame->nb_samples; BufferLength = ffmpeg.av_samples_get_buffer_size(null, ChannelCount, SamplesPerChannel, Format, 1); diff --git a/Unosquare.FFME.Common/Core/FFInterop.cs b/Unosquare.FFME.Common/Core/FFInterop.cs index 515da313d..f8510a2f4 100644 --- a/Unosquare.FFME.Common/Core/FFInterop.cs +++ b/Unosquare.FFME.Common/Core/FFInterop.cs @@ -3,6 +3,7 @@ using FFmpeg.AutoGen; using Shared; using System; + using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices; @@ -104,16 +105,15 @@ public static unsafe bool Initialize(string overridePath, int libIdentifiers) // Additional library initialization if (FFLibrary.LibAVDevice.IsLoaded) ffmpeg.avdevice_register_all(); - if (FFLibrary.LibAVFilter.IsLoaded) ffmpeg.avfilter_register_all(); - // Standard set initialization - ffmpeg.av_register_all(); - ffmpeg.avcodec_register_all(); - ffmpeg.avformat_network_init(); + // Standard set initialization -- not needed anymore starting FFmpeg 4 + // if (FFLibrary.LibAVFilter.IsLoaded) ffmpeg.avfilter_register_all(); + // ffmpeg.av_register_all(); + // ffmpeg.avcodec_register_all(); + // ffmpeg.avformat_network_init(); // Logging and locking LoggingWorker.ConnectToFFmpeg(); - FFLockManager.Register(); // set the static environment properties m_LibrariesPath = ffmpegPath; @@ -187,6 +187,122 @@ public static unsafe string PtrToStringUTF8(byte* stringAddress) } } + /// + /// Retrieves the options information associated with the given AVClass. + /// + /// The av class. + /// A list of option metadata + public static unsafe List RetrieveOptions(AVClass* avClass) + { + // see: https://github.com/FFmpeg/FFmpeg/blob/e0f32286861ddf7666ba92297686fa216d65968e/tools/enum_options.c + var result = new List(128); + if (avClass == null) return result; + + AVOption* option = avClass->option; + + while (option != null) + { + if (option->type != AVOptionType.AV_OPT_TYPE_CONST) + result.Add(new OptionMeta(option)); + + option = ffmpeg.av_opt_next(avClass, option); + } + + return result; + } + + /// + /// Retrives the codecs. + /// + /// The codecs + public static unsafe AVCodec*[] RetriveCodecs() + { + var result = new List(1024); + void* iterator; + AVCodec* item; + while ((item = ffmpeg.av_codec_iterate(&iterator)) != null) + { + result.Add(new IntPtr(item)); + } + + var collection = new AVCodec*[result.Count]; + for (var i = 0; i < result.Count; i++) + { + collection[i] = (AVCodec*)result[i].ToPointer(); + } + + return collection; + } + + /// + /// Retrieves the input format names. + /// + /// The collection of names + public static unsafe List RetrieveInputFormatNames() + { + var result = new List(128); + void* iterator; + AVInputFormat* item; + while ((item = ffmpeg.av_demuxer_iterate(&iterator)) != null) + { + result.Add(PtrToStringUTF8(item->name)); + } + + return result; + } + + /// + /// Retrieves the decoder names. + /// + /// All codecs. + /// The collection of names + public static unsafe List RetrieveDecoderNames(AVCodec*[] allCodecs) + { + var codecNames = new List(allCodecs.Length); + foreach (var c in allCodecs) + { + if (ffmpeg.av_codec_is_decoder(c) != 0) + codecNames.Add(PtrToStringUTF8(c->name)); + } + + return codecNames; + } + + /// + /// Retrieves the global format options. + /// + /// The collection of option infos + public static unsafe List RetrieveGlobalFormatOptions() => + RetrieveOptions(ffmpeg.avformat_get_class()); + + /// + /// Retrieves the global codec options. + /// + /// The collection of option infos + public static unsafe List RetrieveGlobalCodecOptions() => + RetrieveOptions(ffmpeg.avcodec_get_class()); + + /// + /// Retrieves the input format options. + /// + /// Name of the format. + /// The collection of option infos + public static unsafe List RetrieveInputFormatOptions(string formatName) + { + var item = ffmpeg.av_find_input_format(formatName); + if (item == null) return new List(0); + + return RetrieveOptions(item->priv_class); + } + + /// + /// Retrieves the codec options. + /// + /// The codec. + /// The collection of option infos + public static unsafe List RetrieveCodecOptions(AVCodec* codec) => + RetrieveOptions(codec->priv_class); + #endregion } } diff --git a/Unosquare.FFME.Common/Core/FFLibrary.cs b/Unosquare.FFME.Common/Core/FFLibrary.cs index 9031a6d7d..fd2c47b7d 100644 --- a/Unosquare.FFME.Common/Core/FFLibrary.cs +++ b/Unosquare.FFME.Common/Core/FFLibrary.cs @@ -64,37 +64,37 @@ private FFLibrary(string name, int version, int flagId) /// /// Gets the AVCodec library. /// - public static FFLibrary LibAVCodec { get; } = new FFLibrary(Names.AVCodec, 57, 1); + public static FFLibrary LibAVCodec { get; } = new FFLibrary(Names.AVCodec, 58, 1); /// /// Gets the AVFormat library. /// - public static FFLibrary LibAVFormat { get; } = new FFLibrary(Names.AVFormat, 57, 2); + public static FFLibrary LibAVFormat { get; } = new FFLibrary(Names.AVFormat, 58, 2); /// /// Gets the AVUtil library. /// - public static FFLibrary LibAVUtil { get; } = new FFLibrary(Names.AVUtil, 55, 4); + public static FFLibrary LibAVUtil { get; } = new FFLibrary(Names.AVUtil, 56, 4); /// /// Gets the SWResample library. /// - public static FFLibrary LibSWResample { get; } = new FFLibrary(Names.SWResample, 2, 8); + public static FFLibrary LibSWResample { get; } = new FFLibrary(Names.SWResample, 3, 8); /// /// Gets the SWScale library. /// - public static FFLibrary LibSWScale { get; } = new FFLibrary(Names.SWScale, 4, 16); + public static FFLibrary LibSWScale { get; } = new FFLibrary(Names.SWScale, 5, 16); /// /// Gets the AVDevice library. /// - public static FFLibrary LibAVDevice { get; } = new FFLibrary(Names.AVDevice, 57, 32); + public static FFLibrary LibAVDevice { get; } = new FFLibrary(Names.AVDevice, 58, 32); /// /// Gets the AVFilter library. /// - public static FFLibrary LibAVFilter { get; } = new FFLibrary(Names.AVFilter, 6, 64); + public static FFLibrary LibAVFilter { get; } = new FFLibrary(Names.AVFilter, 7, 64); #endregion @@ -153,7 +153,7 @@ public bool Load(string basePath) if (Reference != IntPtr.Zero) throw new InvalidOperationException($"Library {Name} was already loaded."); - var result = LibraryLoader.LoadNativeLibraryUsingPlatformNamingConvention(basePath, Name, Version); + var result = LibraryLoader.LoadNativeLibrary(basePath, Name, Version); if (result != IntPtr.Zero) { diff --git a/Unosquare.FFME.Common/Core/FFLockManager.cs b/Unosquare.FFME.Common/Core/FFLockManager.cs deleted file mode 100644 index 443ee48f3..000000000 --- a/Unosquare.FFME.Common/Core/FFLockManager.cs +++ /dev/null @@ -1,120 +0,0 @@ -namespace Unosquare.FFME.Core -{ - using FFmpeg.AutoGen; - using System; - using System.Collections.Generic; - using System.Threading; - - /// - /// A lock manager for FFmpeg libraries - /// - internal static class FFLockManager - { - /* because Zeranoe FFmpeg Builds don't have --enable-pthreads, - * https://ffmpeg.zeranoe.com/builds/readme/win64/static/ffmpeg-20170620-ae6f6d4-win64-static-readme.txt - * and because by default FFmpeg is not thread-safe, - * https://stackoverflow.com/questions/13888915/thread-safety-of-libav-ffmpeg - * we need to register a lock manager with av_lockmgr_register - * Just like in https://raw.githubusercontent.com/FFmpeg/FFmpeg/release/3.4/ffplay.c - */ - - /// - /// The register lock - /// - private static readonly object RegisterLock = new object(); - - /// - /// Keeps track of the unmanaged and managed locking structures for the FFmpeg libraries to use. - /// - private static readonly Dictionary FFmpegOpDone = new Dictionary(); - - /// - /// The registration state - /// - private static bool m_HasRegistered = false; - - /// - /// Gets a value indicating whether the lock manager has registered. - /// - public static bool HasRegistered - { - get - { - lock (RegisterLock) - { - return m_HasRegistered; - } - } - } - - /// - /// Gets the FFmpeg lock manager callback. - /// Example: ffmpeg.av_lockmgr_register(FFLockManager.LockOpCallback); - /// - private static unsafe av_lockmgr_register_cb LockOpCallback { get; } = OnFFmpegLockOp; - - /// - /// Registers the lock manager. If it has been registered it does not do it again. - /// Thi method is thread-safe. - /// - public static void Register() - { - lock (RegisterLock) - { - if (m_HasRegistered) return; - ffmpeg.av_lockmgr_register(LockOpCallback); - m_HasRegistered = true; - } - } - - /// - /// Manages FFmpeg Multithreaded locking - /// - /// The mutex. - /// The op. - /// - /// 0 for success, 1 for error - /// - private static unsafe int OnFFmpegLockOp(void** mutex, AVLockOp lockingOperation) - { - switch (lockingOperation) - { - case AVLockOp.AV_LOCK_CREATE: - { - var m = new ManualResetEvent(true); - var mutexPointer = m.SafeWaitHandle.DangerousGetHandle(); - *mutex = (void*)mutexPointer; - FFmpegOpDone[mutexPointer] = m; - return 0; - } - - case AVLockOp.AV_LOCK_OBTAIN: - { - var mutexPointer = new IntPtr(*mutex); - FFmpegOpDone[mutexPointer].WaitOne(); - FFmpegOpDone[mutexPointer].Reset(); - return 0; - } - - case AVLockOp.AV_LOCK_RELEASE: - { - var mutexPointer = new IntPtr(*mutex); - FFmpegOpDone[mutexPointer].Set(); - return 0; - } - - case AVLockOp.AV_LOCK_DESTROY: - { - var mutexPointer = new IntPtr(*mutex); - var m = FFmpegOpDone[mutexPointer]; - FFmpegOpDone.Remove(mutexPointer); - m.Set(); - m.Dispose(); - return 0; - } - } - - return 1; - } - } -} diff --git a/Unosquare.FFME.Common/Decoding/CodecOption.cs b/Unosquare.FFME.Common/Decoding/CodecOption.cs deleted file mode 100644 index 4b71a767a..000000000 --- a/Unosquare.FFME.Common/Decoding/CodecOption.cs +++ /dev/null @@ -1,36 +0,0 @@ -namespace Unosquare.FFME.Decoding -{ - /// - /// A single codec option along with a stream specifier. - /// - internal sealed class CodecOption - { - /// - /// Initializes a new instance of the class. - /// - /// The spec. - /// The key. - /// The value. - public CodecOption(StreamSpecifier spec, string key, string value) - { - StreamSpecifier = spec; - Key = key; - Value = value; - } - - /// - /// Gets or sets the stream specifier. - /// - public StreamSpecifier StreamSpecifier { get; set; } - - /// - /// Gets or sets the option name - /// - public string Key { get; set; } - - /// - /// Gets or sets the option value. - /// - public string Value { get; set; } - } -} diff --git a/Unosquare.FFME.Common/Decoding/HardwareAccelerator.cs b/Unosquare.FFME.Common/Decoding/HardwareAccelerator.cs index 9c28b3ab2..fd6ca8738 100644 --- a/Unosquare.FFME.Common/Decoding/HardwareAccelerator.cs +++ b/Unosquare.FFME.Common/Decoding/HardwareAccelerator.cs @@ -2,9 +2,9 @@ { using Core; using FFmpeg.AutoGen; + using Shared; using System; using System.Collections.Generic; - using System.Collections.ObjectModel; internal unsafe class HardwareAccelerator { @@ -13,34 +13,7 @@ internal unsafe class HardwareAccelerator /// private readonly AVCodecContext_get_format GetFormatCallback; - /// - /// Initializes static members of the class. - /// - static HardwareAccelerator() - { - Dxva2 = new HardwareAccelerator - { - Name = "DXVA2", - DeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2, - PixelFormat = AVPixelFormat.AV_PIX_FMT_DXVA2_VLD, - RequiresTransfer = true, - }; - - Cuda = new HardwareAccelerator - { - Name = "CUVID", - DeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA, - PixelFormat = AVPixelFormat.AV_PIX_FMT_CUDA, - RequiresTransfer = false, - }; - - All = new ReadOnlyDictionary( - new Dictionary() - { - { Dxva2.PixelFormat, Dxva2 }, - { Cuda.PixelFormat, Cuda } - }); - } + private VideoComponent Component; /// /// Prevents a default instance of the class from being created. @@ -51,32 +24,11 @@ private HardwareAccelerator() GetFormatCallback = new AVCodecContext_get_format(GetPixelFormat); } - /// - /// A dicitionary containing all Accelerators by pixel format - /// - public static ReadOnlyDictionary All { get; } - - /// - /// Gets the dxva2 accelerator. - /// - public static HardwareAccelerator Dxva2 { get; } - - /// - /// Gets the CUDA video accelerator. - /// - public static HardwareAccelerator Cuda { get; } - /// /// Gets the name of the HW accelerator. /// public string Name { get; private set; } - /// - /// Gets a value indicating whether the frame requires the transfer from - /// the hardware to RAM - /// - public bool RequiresTransfer { get; private set; } - /// /// Gets the hardware output pixel format. /// @@ -88,47 +40,82 @@ private HardwareAccelerator() public AVHWDeviceType DeviceType { get; private set; } /// - /// Attaches a hardware device context to the specified video component. + /// Attaches a hardware accelerator to the specified component. /// /// The component. - /// Throws when unable to initialize the hardware device - public void AttachDevice(VideoComponent component) + /// The selected configuration. + /// + /// Whether or not the hardware accelerator was attached + /// + public static bool Attach(VideoComponent component, HardwareDeviceInfo selectedConfig) { - var result = 0; - - fixed (AVBufferRef** devContextRef = &component.HardwareDeviceContext) + try { - result = ffmpeg.av_hwdevice_ctx_create(devContextRef, DeviceType, null, null, 0); - if (result < 0) - throw new Exception($"Unable to initialize hardware context for device {Name}"); + var result = new HardwareAccelerator + { + Component = component, + Name = selectedConfig.DeviceTypeName, + DeviceType = selectedConfig.DeviceType, + PixelFormat = selectedConfig.PixelFormat, + }; + + result.InitializeHardwareContext(); + return true; + } + catch (Exception ex) + { + component.Container.Parent?.Log(MediaLogMessageType.Error, $"Could not attach hardware decoder. {ex.Message}"); + return false; } - - component.HardwareAccelerator = this; - component.CodecContext->hw_device_ctx = ffmpeg.av_buffer_ref(component.HardwareDeviceContext); - component.CodecContext->get_format = GetFormatCallback; } /// - /// Detaches and disposes the hardware device context from the specified video component + /// Gets the supported hardware decoder device types for the given codec. /// - /// The component. - public void DetachDevice(VideoComponent component) + /// The codec identifier. + /// + /// A list of hardware device decoders compatible with the codec + /// + public static List GetCompatibleDevices(AVCodecID codecId) { - // TODO: (Floyd) Check the below code in the future because I am not sure - // how to uninitialize the hardware device context - if (component.CodecContext != null) + const int AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX = 0x01; + var codec = ffmpeg.avcodec_find_decoder(codecId); + var result = new List(64); + var configIndex = 0; + + // skip unsupported configs + if (codec == null || codecId == AVCodecID.AV_CODEC_ID_NONE) + return result; + + while (true) { - ffmpeg.av_buffer_unref(&component.CodecContext->hw_device_ctx); - component.CodecContext->hw_device_ctx = null; + var config = ffmpeg.avcodec_get_hw_config(codec, configIndex); + if (config == null) break; + + if ((config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) != 0 + && config->device_type != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) + { + result.Add(new HardwareDeviceInfo(config)); + } + + configIndex++; } - if (component.HardwareDeviceContext != null) + return result; + } + + /// + /// Detaches and disposes the hardware device context from the specified video component + /// + public void Release() + { + if (Component.HardwareDeviceContext != null) { - fixed (AVBufferRef** hwdc = &component.HardwareDeviceContext) + fixed (AVBufferRef** hwdc = &Component.HardwareDeviceContext) { ffmpeg.av_buffer_unref(hwdc); - component.HardwareDeviceContext = null; - component.HardwareAccelerator = null; + Component.HardwareDeviceContext = null; + Component.HardwareAccelerator = null; } } } @@ -138,7 +125,7 @@ public void DetachDevice(VideoComponent component) /// The input hardware frame gets freed and the return value will point to the new software frame /// /// The codec context. - /// The input. + /// The input frame coming from the decoder (may or may not be hardware). /// if set to true [comes from hardware] otherwise, hardware decoding was not perfomred. /// /// The frame downloaded from the device into RAM @@ -156,9 +143,6 @@ public void DetachDevice(VideoComponent component) if (input->format != (int)PixelFormat) return input; - if (RequiresTransfer == false) - return input; - var output = ffmpeg.av_frame_alloc(); var result = ffmpeg.av_hwframe_transfer_data(output, input, 0); @@ -176,6 +160,25 @@ public void DetachDevice(VideoComponent component) return output; } + /// + /// Attaches a hardware device context to the specified video component. + /// + /// Throws when unable to initialize the hardware device + private void InitializeHardwareContext() + { + fixed (AVBufferRef** devContextRef = &Component.HardwareDeviceContext) + { + var initResultCode = 0; + initResultCode = ffmpeg.av_hwdevice_ctx_create(devContextRef, DeviceType, null, null, 0); + if (initResultCode < 0) + throw new Exception($"Unable to initialize hardware context for device {Name}"); + } + + Component.HardwareAccelerator = this; + Component.CodecContext->hw_device_ctx = ffmpeg.av_buffer_ref(Component.HardwareDeviceContext); + Component.CodecContext->get_format = GetFormatCallback; + } + /// /// Gets the pixel format. /// Port of (get_format) method in ffmpeg.c diff --git a/Unosquare.FFME.Common/Decoding/MediaComponent.cs b/Unosquare.FFME.Common/Decoding/MediaComponent.cs index 69048c271..0a3d90017 100644 --- a/Unosquare.FFME.Common/Decoding/MediaComponent.cs +++ b/Unosquare.FFME.Common/Decoding/MediaComponent.cs @@ -52,11 +52,6 @@ internal abstract unsafe class MediaComponent : IDisposable /// private bool IsDisposed = false; - /// - /// Holds total bytes read in the lifetime of this object - /// - private ulong m_LifetimeBytesRead = 0; - #endregion #region Constructor @@ -70,6 +65,8 @@ internal abstract unsafe class MediaComponent : IDisposable /// The container exception. protected MediaComponent(MediaContainer container, int streamIndex) { + // Parted from: https://github.com/FFmpeg/FFmpeg/blob/master/fftools/ffplay.c#L2559 + // avctx = avcodec_alloc_context3(NULL); Container = container ?? throw new ArgumentNullException(nameof(container)); CodecContext = ffmpeg.avcodec_alloc_context3(null); RC.Current.Add(CodecContext, $"134: {nameof(MediaComponent)}[{MediaType}].ctor()"); @@ -77,88 +74,121 @@ protected MediaComponent(MediaContainer container, int streamIndex) Stream = container.InputContext->streams[StreamIndex]; StreamInfo = container.MediaInfo.Streams[StreamIndex]; - // Set codec options + // Set default codec context options from probed stream var setCodecParamsResult = ffmpeg.avcodec_parameters_to_context(CodecContext, Stream->codecpar); if (setCodecParamsResult < 0) Container.Parent?.Log(MediaLogMessageType.Warning, $"Could not set codec parameters. Error code: {setCodecParamsResult}"); // We set the packet timebase in the same timebase as the stream as opposed to the tpyical AV_TIME_BASE - if (this is VideoComponent && Container.MediaOptions.VideoForcedFps != null) + if (this is VideoComponent && Container.MediaOptions.VideoForcedFps > 0) { - ffmpeg.av_codec_set_pkt_timebase(CodecContext, Container.MediaOptions.VideoForcedFps.Value); - ffmpeg.av_stream_set_r_frame_rate(Stream, Container.MediaOptions.VideoForcedFps.Value); + var fpsRational = ffmpeg.av_d2q(Container.MediaOptions.VideoForcedFps, 1000000); + Stream->r_frame_rate = fpsRational; + CodecContext->pkt_timebase = new AVRational { num = fpsRational.den, den = fpsRational.num }; } else { - ffmpeg.av_codec_set_pkt_timebase(CodecContext, Stream->time_base); + CodecContext->pkt_timebase = Stream->time_base; + } + + // Find the default decoder codec from the stream and set it. + var defaultCodec = ffmpeg.avcodec_find_decoder(Stream->codec->codec_id); + AVCodec* forcedCodec = null; + + // If set, change the codec to the forced codec. + if (Container.MediaOptions.DecoderCodec.ContainsKey(StreamIndex) && + string.IsNullOrWhiteSpace(Container.MediaOptions.DecoderCodec[StreamIndex]) == false) + { + var forcedCodecName = Container.MediaOptions.DecoderCodec[StreamIndex]; + forcedCodec = ffmpeg.avcodec_find_decoder_by_name(forcedCodecName); + if (forcedCodec == null) + { + Container.Parent?.Log(MediaLogMessageType.Warning, + $"COMP {MediaType.ToString().ToUpperInvariant()}: Unable to set decoder codec to '{forcedCodecName}' on stream index {StreamIndex}"); + } } - // Find the codec and set it. - var codec = ffmpeg.avcodec_find_decoder(Stream->codec->codec_id); - if (codec == null) + // Check we have a valid codec to open and process the stream. + if (defaultCodec == null && forcedCodec == null) { var errorMessage = $"Fatal error. Unable to find suitable decoder for {Stream->codec->codec_id.ToString()}"; CloseComponent(); throw new MediaContainerException(errorMessage); } - CodecContext->codec_id = codec->id; + var codecCandidates = new AVCodec*[] { forcedCodec, defaultCodec }; + AVCodec* selectedCodec = null; + var codecOpenResult = 0; - // Process the low res index option - var lowResIndex = ffmpeg.av_codec_get_max_lowres(codec); - if (Container.MediaOptions.EnableLowRes) + foreach (var codec in codecCandidates) { - ffmpeg.av_codec_set_lowres(CodecContext, lowResIndex); - CodecContext->flags |= ffmpeg.CODEC_FLAG_EMU_EDGE; - } - else - { - lowResIndex = 0; - } + if (codec == null) + continue; - // Configure the codec context flags - if (Container.MediaOptions.EnableFastDecoding) CodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST; - if (Container.MediaOptions.EnableLowDelay) CodecContext->flags |= ffmpeg.AV_CODEC_FLAG_LOW_DELAY; - if ((codec->capabilities & ffmpeg.AV_CODEC_CAP_DR1) != 0) CodecContext->flags |= ffmpeg.CODEC_FLAG_EMU_EDGE; - if ((codec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) != 0) CodecContext->flags |= ffmpeg.AV_CODEC_CAP_TRUNCATED; - if ((codec->capabilities & ffmpeg.CODEC_FLAG2_CHUNKS) != 0) CodecContext->flags |= ffmpeg.CODEC_FLAG2_CHUNKS; + // Pass default codec stuff to the codec contect + CodecContext->codec_id = codec->id; + if ((codec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) != 0) CodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED; + if ((codec->capabilities & ffmpeg.AV_CODEC_FLAG2_CHUNKS) != 0) CodecContext->flags |= ffmpeg.AV_CODEC_FLAG2_CHUNKS; - // Setup additional settings. The most important one is Threads -- Setting it to 1 decoding is very slow. Setting it to auto - // decoding is very fast in most scenarios. - var codecOptions = Container.MediaOptions.CodecOptions.FilterOptions(CodecContext->codec_id, Container.InputContext, Stream, codec); - if (codecOptions.HasKey(MediaCodecOptions.Names.Threads) == false) - codecOptions[MediaCodecOptions.Names.Threads] = "auto"; + // Process the decoder options + { + var decoderOptions = Container.MediaOptions.DecoderParams; - if (lowResIndex != 0) codecOptions[MediaCodecOptions.Names.LowRes] = lowResIndex.ToString(CultureInfo.InvariantCulture); - if (CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO || CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO) - codecOptions[MediaCodecOptions.Names.RefCountedFrames] = 1.ToString(CultureInfo.InvariantCulture); + // Configure the codec context flags + if (decoderOptions.EnableFastDecoding) CodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST; + if (decoderOptions.EnableLowDelay) CodecContext->flags |= ffmpeg.AV_CODEC_FLAG_LOW_DELAY; - // Enable Hardware acceleration if requested - if (this is VideoComponent && container.MediaOptions.EnableHardwareAcceleration) - HardwareAccelerator.Cuda.AttachDevice(this as VideoComponent); + // process the low res option + if (decoderOptions.EnableLowRes && codec->max_lowres > 0) + decoderOptions.LowResIndex = codec->max_lowres.ToString(CultureInfo.InvariantCulture); - // Open the CodecContext. This requires exclusive FFmpeg access - var codecOpenResult = 0; - lock (CodecOpenLock) - { - fixed (AVDictionary** reference = &codecOptions.Pointer) - codecOpenResult = ffmpeg.avcodec_open2(CodecContext, codec, reference); - } + // Ensure ref counted frames for audio and video decoding + if (CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO || CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO) + decoderOptions.RefCountedFrames = "1"; + } - // Check if the codec opened successfully - if (codecOpenResult < 0) - { - CloseComponent(); - throw new MediaContainerException($"Unable to open codec. Error code {codecOpenResult}"); + // Setup additional settings. The most important one is Threads -- Setting it to 1 decoding is very slow. Setting it to auto + // decoding is very fast in most scenarios. + var codecOptions = Container.MediaOptions.DecoderParams.GetStreamCodecOptions(Stream->index); + + // Enable Hardware acceleration if requested + if (this is VideoComponent && container.MediaOptions.VideoHardwareDevice != null) + HardwareAccelerator.Attach(this as VideoComponent, container.MediaOptions.VideoHardwareDevice); + + // Open the CodecContext. This requires exclusive FFmpeg access + lock (CodecOpenLock) + { + fixed (AVDictionary** codecOptionsRef = &codecOptions.Pointer) + codecOpenResult = ffmpeg.avcodec_open2(CodecContext, codec, codecOptionsRef); + } + + // Check if the codec opened successfully + if (codecOpenResult < 0) + { + Container.Parent?.Log(MediaLogMessageType.Warning, + $"Unable to open codec '{FFInterop.PtrToStringUTF8(codec->name)}' on stream {streamIndex}"); + + continue; + } + + // If there are any codec options left over from passing them, it means they were not consumed + var currentEntry = codecOptions.First(); + while (currentEntry != null && currentEntry?.Key != null) + { + Container.Parent?.Log(MediaLogMessageType.Warning, + $"Invalid codec option: '{currentEntry.Key}' for codec '{FFInterop.PtrToStringUTF8(codec->name)}', stream {streamIndex}"); + currentEntry = codecOptions.Next(currentEntry); + } + + selectedCodec = codec; + break; } - // If there are any codec options left over from passing them, it means they were not consumed - var currentEntry = codecOptions.First(); - while (currentEntry != null && currentEntry?.Key != null) + if (selectedCodec == null) { - Container.Parent?.Log(MediaLogMessageType.Warning, $"Invalid codec option: '{currentEntry.Key}'"); - currentEntry = codecOptions.Next(currentEntry); + CloseComponent(); + throw new MediaContainerException($"Unable to find suitable decoder codec for stream {streamIndex}. Error code {codecOpenResult}"); } // Startup done. Set some options. @@ -178,7 +208,7 @@ protected MediaComponent(MediaContainer container, int streamIndex) Duration = Stream->duration.ToTimeSpan(Stream->time_base); CodecId = Stream->codec->codec_id; - CodecName = ffmpeg.avcodec_get_name(CodecId); + CodecName = FFInterop.PtrToStringUTF8(selectedCodec->name); Bitrate = Stream->codec->bit_rate < 0 ? 0 : Convert.ToUInt64(Stream->codec->bit_rate); Container.Parent?.Log(MediaLogMessageType.Debug, $"COMP {MediaType.ToString().ToUpperInvariant()}: Start Offset: {StartTimeOffset.Format()}; Duration: {Duration.Format()}"); @@ -244,11 +274,7 @@ protected MediaComponent(MediaContainer container, int streamIndex) /// /// Gets the total amount of bytes read by this component in the lifetime of this component. /// - public ulong LifetimeBytesRead - { - get => m_LifetimeBytesRead; - private set => m_LifetimeBytesRead = value; - } + public ulong LifetimeBytesRead { get; private set; } = 0; /// /// Gets the ID of the codec for this component. @@ -454,7 +480,9 @@ private List DecodeNextPacketInternal() if (MediaType == MediaType.Audio || MediaType == MediaType.Video) { // If it's audio or video, we use the new API and the decoded frames are stored in AVFrame - // Let us send the packet to the codec for decoding a frame of uncompressed data later + // Let us send the packet to the codec for decoding a frame of uncompressed data later. + // TODO: sendPacketResult is never checked for errors... We requires ome error handling. + // for example when using h264_qsv codec, this returns -40 (Function not implemented) var sendPacketResult = ffmpeg.avcodec_send_packet(CodecContext, IsEmptyPacket(packet) ? null : packet); // Let's check and see if we can get 1 or more frames from the packet we just sent to the decoder. diff --git a/Unosquare.FFME.Common/Decoding/MediaContainer.cs b/Unosquare.FFME.Common/Decoding/MediaContainer.cs index 3947a3f82..44b6ceb54 100644 --- a/Unosquare.FFME.Common/Decoding/MediaContainer.cs +++ b/Unosquare.FFME.Common/Decoding/MediaContainer.cs @@ -83,10 +83,10 @@ internal sealed unsafe class MediaContainer : IDisposable /// Initializes a new instance of the class. /// /// The media URL. - /// The stream options. + /// The container configuration options. /// The logger. /// mediaUrl - public MediaContainer(string mediaUrl, StreamOptions streamOptions, IMediaLogger parent) + public MediaContainer(string mediaUrl, ContainerConfiguration config, IMediaLogger parent) { // Argument Validation if (string.IsNullOrWhiteSpace(mediaUrl)) @@ -98,17 +98,17 @@ public MediaContainer(string mediaUrl, StreamOptions streamOptions, IMediaLogger // Create the options object Parent = parent; MediaUrl = mediaUrl; - StreamOptions = streamOptions ?? new StreamOptions(); + Configuration = config ?? new ContainerConfiguration(); // drop the protocol prefix if it is redundant - var protocolPrefix = StreamOptions.ProtocolPrefix; + var protocolPrefix = Configuration.ProtocolPrefix; if (string.IsNullOrWhiteSpace(MediaUrl) == false && string.IsNullOrWhiteSpace(protocolPrefix) == false && MediaUrl.ToLowerInvariant().Trim().StartsWith(protocolPrefix.ToLowerInvariant() + ":")) { protocolPrefix = null; } - StreamOptions.ProtocolPrefix = protocolPrefix; + Configuration.ProtocolPrefix = protocolPrefix; StreamInitialize(); } @@ -133,11 +133,12 @@ public MediaContainer(string mediaUrl, StreamOptions streamOptions, IMediaLogger public string MediaUrl { get; } /// - /// The stream initialization options. - /// Options are applied when creating the container. - /// After initialization, changing the options has no effect. + /// The container and demuxer initialization and configuration options. + /// Options are applied when creating an instance of the container. + /// After container creation, changing the configuration options passed in + /// the constructor has no effect. /// - public StreamOptions StreamOptions { get; } + public ContainerConfiguration Configuration { get; } /// /// Represetnts options that applied before initializing media components and their corresponding @@ -526,23 +527,23 @@ private void StreamInitialize() // Retrieve the input format (null = auto for default) AVInputFormat* inputFormat = null; - if (string.IsNullOrWhiteSpace(StreamOptions.Input.ForcedInputFormat) == false) + if (string.IsNullOrWhiteSpace(Configuration.ForcedInputFormat) == false) { - inputFormat = ffmpeg.av_find_input_format(StreamOptions.Input.ForcedInputFormat); + inputFormat = ffmpeg.av_find_input_format(Configuration.ForcedInputFormat); if (inputFormat == null) { Parent?.Log(MediaLogMessageType.Warning, - $"Format '{StreamOptions.Input.ForcedInputFormat}' not found. Will use automatic format detection."); + $"Format '{Configuration.ForcedInputFormat}' not found. Will use automatic format detection."); } } try { // Create the input format context, and open the input based on the provided format options. - using (var inputOptions = new FFDictionary(StreamOptions.Input)) + using (var privateOptions = new FFDictionary(Configuration.PrivateOptions)) { - if (inputOptions.HasKey(StreamInputOptions.Names.ScanAllPmts) == false) - inputOptions.Set(StreamInputOptions.Names.ScanAllPmts, "1", true); + if (privateOptions.HasKey(ContainerConfiguration.ScanAllPmts) == false) + privateOptions.Set(ContainerConfiguration.ScanAllPmts, "1", true); // Create the input context StreamInitializeInputContext(); @@ -554,12 +555,15 @@ private void StreamInitialize() var openResult = 0; // We set the start of the read operation time so tiomeouts can be detected + // and we open the URL so the input context can be initialized. StreamReadInterruptStartTime.Value = DateTime.UtcNow.Ticks; - fixed (AVDictionary** inputOptionsRef = &inputOptions.Pointer) + fixed (AVDictionary** privateOptionsRef = &privateOptions.Pointer) { - var prefix = string.IsNullOrWhiteSpace(StreamOptions.ProtocolPrefix) ? - string.Empty : $"{StreamOptions.ProtocolPrefix.Trim()}:"; - openResult = ffmpeg.avformat_open_input(&inputContextPtr, $"{prefix}{MediaUrl}", inputFormat, inputOptionsRef); + var prefix = string.IsNullOrWhiteSpace(Configuration.ProtocolPrefix) ? + string.Empty : $"{Configuration.ProtocolPrefix.Trim()}:"; + + // Pass the private options dictionary + openResult = ffmpeg.avformat_open_input(&inputContextPtr, $"{prefix}{MediaUrl}", inputFormat, privateOptionsRef); InputContext = inputContextPtr; } @@ -573,15 +577,14 @@ private void StreamInitialize() // Set some general properties MediaFormatName = FFInterop.PtrToStringUTF8(InputContext->iformat->name); - // If there are any optins left in the dictionary, it means they did not get used (invalid options). - inputOptions.Remove(StreamInputOptions.Names.ScanAllPmts); - + // If there are any options left in the dictionary, it means they did not get used (invalid options). // Output the invalid options as warnings - var currentEntry = inputOptions.First(); + privateOptions.Remove(ContainerConfiguration.ScanAllPmts); + var currentEntry = privateOptions.First(); while (currentEntry != null && currentEntry?.Key != null) { Parent?.Log(MediaLogMessageType.Warning, $"Invalid input option: '{currentEntry.Key}'"); - currentEntry = inputOptions.Next(currentEntry); + currentEntry = privateOptions.Next(currentEntry); } } @@ -675,12 +678,12 @@ private void StreamInitializeInputContext() InputContext->interrupt_callback.opaque = InputContext; // Acquire the format options to be applied - var opts = StreamOptions.Format; + var opts = Configuration.GlobalOptions; // Apply the options if (opts.EnableReducedBuffering) InputContext->avio_flags |= ffmpeg.AVIO_FLAG_DIRECT; if (opts.PacketSize != default) InputContext->packet_size = System.Convert.ToUInt32(opts.PacketSize); - if (opts.ProbeSize != default) InputContext->probesize = StreamOptions.Format.ProbeSize <= 32 ? 32 : opts.ProbeSize; + if (opts.ProbeSize != default) InputContext->probesize = Configuration.GlobalOptions.ProbeSize <= 32 ? 32 : opts.ProbeSize; // Flags InputContext->flags |= opts.FlagDiscardCorrupt ? ffmpeg.AVFMT_FLAG_DISCARD_CORRUPT : InputContext->flags; @@ -909,7 +912,7 @@ private unsafe int OnStreamReadInterrupt(void* opaque) var startTicks = StreamReadInterruptStartTime.Value; var timeDifference = TimeSpan.FromTicks(nowTicks - startTicks); - if (StreamOptions.Input.ReadTimeout.Ticks >= 0 && timeDifference.Ticks > StreamOptions.Input.ReadTimeout.Ticks) + if (Configuration.ReadTimeout.Ticks >= 0 && timeDifference.Ticks > Configuration.ReadTimeout.Ticks) { Parent?.Log(MediaLogMessageType.Error, $"{nameof(OnStreamReadInterrupt)} timed out with {timeDifference.Format()}"); return ErrorResult; diff --git a/Unosquare.FFME.Common/Decoding/StreamSpecifier.cs b/Unosquare.FFME.Common/Decoding/StreamSpecifier.cs deleted file mode 100644 index 6c9628c5c..000000000 --- a/Unosquare.FFME.Common/Decoding/StreamSpecifier.cs +++ /dev/null @@ -1,128 +0,0 @@ -namespace Unosquare.FFME.Decoding -{ - using Shared; - using System; - using System.Collections.Generic; - using System.Collections.ObjectModel; - using System.Globalization; - - /// - /// A managed representation of an FFmpeg stream specifier - /// - internal class StreamSpecifier - { - #region Constructors - - /// - /// Initializes a new instance of the class. - /// - public StreamSpecifier() - { - StreamSuffix = string.Empty; - StreamId = -1; - } - - /// - /// Initializes a new instance of the class. - /// - /// The stream identifier. - /// streamId - public StreamSpecifier(int streamId) - { - if (streamId < 0) - throw new ArgumentException($"{nameof(streamId)} must be greater than or equal to 0"); - - StreamSuffix = string.Empty; - StreamId = streamId; - } - - /// - /// Initializes a new instance of the class. - /// - /// Type of the media. - /// streamType - public StreamSpecifier(MediaType mediaType) - { - var streamType = Types[mediaType]; - if (streamType != 'a' && streamType != 'v' && streamType != 's') - throw new ArgumentException($"{nameof(streamType)} must be either a, v, or s"); - - StreamSuffix = new string(streamType, 1); - StreamId = -1; - } - - /// - /// Initializes a new instance of the class. - /// - /// Type of the media. - /// The stream identifier. - /// - /// streamType - /// or - /// streamId - /// - public StreamSpecifier(MediaType mediaType, int streamId) - { - var streamType = Types[mediaType]; - if (streamType != 'a' && streamType != 'v' && streamType != 's') - throw new ArgumentException($"{nameof(streamType)} must be either a, v, or s"); - - if (streamId < 0) - throw new ArgumentException($"{nameof(streamId)} must be greater than or equal to 0"); - - StreamSuffix = new string(streamType, 1); - StreamId = streamId; - } - - #endregion - - #region Properties - - /// - /// Provides suffixes for the different media types. - /// - public static ReadOnlyDictionary Types { get; } - = new ReadOnlyDictionary(new Dictionary - { - { MediaType.Audio, 'a' }, - { MediaType.Video, 'v' }, - { MediaType.Subtitle, 's' }, - }); - - /// - /// Gets the stream identifier. - /// - public int StreamId { get; } - - /// - /// Gets the stream suffix. - /// - public string StreamSuffix { get; } - - #endregion - - #region Methods - - /// - /// Returns a that represents this stream specifier. - /// - /// - /// A that represents this instance. - /// - public override string ToString() - { - if (string.IsNullOrWhiteSpace(StreamSuffix) == false && StreamId >= 0) - return $"{StreamSuffix}:{StreamId}"; - - if (string.IsNullOrWhiteSpace(StreamSuffix) == false) - return StreamSuffix; - - if (StreamId >= 0) - return StreamId.ToString(CultureInfo.InvariantCulture); - - return string.Empty; - } - - #endregion - } -} diff --git a/Unosquare.FFME.Common/Decoding/SubtitleComponent.cs b/Unosquare.FFME.Common/Decoding/SubtitleComponent.cs index 4a91b9211..d08c21cf7 100644 --- a/Unosquare.FFME.Common/Decoding/SubtitleComponent.cs +++ b/Unosquare.FFME.Common/Decoding/SubtitleComponent.cs @@ -20,9 +20,15 @@ internal sealed unsafe class SubtitleComponent : MediaComponent internal SubtitleComponent(MediaContainer container, int streamIndex) : base(container, streamIndex) { - // placeholder. Nothing else to change here. + // Adjust the offset according to options + Delay = container.MediaOptions.SubtitlesDelay; } + /// + /// Gets the amount of time to offset the subtitles by for this component + /// + public TimeSpan Delay { get; } = TimeSpan.Zero; + /// /// Converts decoded, raw frame data in the frame source into a a usable frame.
/// The process includes performing picture, samples or text conversions @@ -50,6 +56,14 @@ public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock out target.Duration = source.Duration; target.StreamIndex = input.StreamIndex; + // Process time offsets + if (Delay != TimeSpan.Zero) + { + target.StartTime = TimeSpan.FromTicks(target.StartTime.Ticks + Delay.Ticks); + target.EndTime = TimeSpan.FromTicks(target.EndTime.Ticks + Delay.Ticks); + target.Duration = TimeSpan.FromTicks(target.EndTime.Ticks - target.StartTime.Ticks); + } + target.OriginalText.Clear(); if (source.Text.Count > 0) target.OriginalText.AddRange(source.Text); diff --git a/Unosquare.FFME.Common/Decoding/VideoComponent.cs b/Unosquare.FFME.Common/Decoding/VideoComponent.cs index fd9666039..d466deaf8 100644 --- a/Unosquare.FFME.Common/Decoding/VideoComponent.cs +++ b/Unosquare.FFME.Common/Decoding/VideoComponent.cs @@ -6,7 +6,6 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; - using System.Runtime.InteropServices; /// /// Performs video picture decoding, scaling and extraction logic. @@ -160,30 +159,22 @@ public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock out } // Perform scaling and save the data to our unmanaged buffer pointer - var targetBufferStride = ffmpeg.av_image_get_linesize( - Constants.Video.VideoPixelFormat, source.Pointer->width, 0); - var targetStride = new int[] { targetBufferStride }; - var targetLength = ffmpeg.av_image_get_buffer_size( - Constants.Video.VideoPixelFormat, source.Pointer->width, source.Pointer->height, 1); - - // Ensure proper allocation of the buffer - // If there is a size mismatch between the wanted buffer length and the existing one, - // then let's reallocate the buffer and set the new size (dispose of the existing one if any) - if (target.PictureBufferLength != targetLength) - { - if (target.PictureBuffer != IntPtr.Zero) - Marshal.FreeHGlobal(target.PictureBuffer); - - target.PictureBufferLength = targetLength; - target.PictureBuffer = Marshal.AllocHGlobal(target.PictureBufferLength); - } - + target.EnsureAllocated(source, Constants.Video.VideoPixelFormat); + var targetStride = new int[] { target.PictureBufferStride }; var targetScan = default(byte_ptrArray8); targetScan[0] = (byte*)target.PictureBuffer; // The scaling is done here - var outputHeight = ffmpeg.sws_scale(Scaler, source.Pointer->data, source.Pointer->linesize, 0, source.Pointer->height, targetScan, targetStride); + var outputHeight = ffmpeg.sws_scale( + Scaler, + source.Pointer->data, + source.Pointer->linesize, + 0, + source.Pointer->height, + targetScan, + targetStride); + // After scaling, we need to copy and guess some of the block properties // Flag the block if we have to target.IsStartTimeGuessed = source.HasValidStartTime == false; @@ -215,14 +206,12 @@ public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock out target.SmtpeTimecode = source.SmtpeTimecode; } + // Fill out other properties target.CodedPictureNumber = source.CodedPictureNumber; - target.StreamIndex = input.StreamIndex; + target.StreamIndex = source.StreamIndex; target.ClosedCaptions = new ReadOnlyCollection(source.ClosedCaptions); - target.BufferStride = targetStride[0]; - - target.PixelHeight = source.Pointer->height; - target.PixelWidth = source.Pointer->width; + // Process the aspect ratio var aspectRatio = source.Pointer->sample_aspect_ratio; if (aspectRatio.num == 0 || aspectRatio.den == 0) { @@ -245,12 +234,14 @@ public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock out /// Create a managed fraome from an unmanaged one. protected override unsafe MediaFrame CreateFrameSource(ref AVFrame* frame) { - if (string.IsNullOrWhiteSpace(FilterString) == false) - InitializeFilterGraph(frame); - + // Move the frame from hardware (GPU) memory to RAM (CPU) if (HardwareAccelerator != null) frame = HardwareAccelerator.ExchangeFrame(CodecContext, frame, out IsUsingHardwareDecoding); + // Init the filtergraph for the frame + if (string.IsNullOrWhiteSpace(FilterString) == false) + InitializeFilterGraph(frame); + AVFrame* outputFrame; // TODO: (Floyd) Support real-time changes in Video Filtergraph by checking if MediaOptions.VideoFilterGraph has changed @@ -310,7 +301,7 @@ protected override void Dispose(bool alsoManaged) } DestroyFiltergraph(); - HardwareAccelerator?.DetachDevice(this); + HardwareAccelerator?.Release(); base.Dispose(alsoManaged); } diff --git a/Unosquare.FFME.Common/Decoding/VideoFrame.cs b/Unosquare.FFME.Common/Decoding/VideoFrame.cs index d43e9bbd8..d37af8c1d 100644 --- a/Unosquare.FFME.Common/Decoding/VideoFrame.cs +++ b/Unosquare.FFME.Common/Decoding/VideoFrame.cs @@ -38,7 +38,7 @@ internal VideoFrame(AVFrame* frame, MediaComponent component) // for video frames, we always get the best effort timestamp as dts and pts might // contain different times. - frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); + frame->pts = frame->best_effort_timestamp; HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE; StartTime = frame->pts == ffmpeg.AV_NOPTS_VALUE ? diff --git a/Unosquare.FFME.Common/MediaEngine.Connector.cs b/Unosquare.FFME.Common/MediaEngine.Connector.cs index 3ec78b42f..6b69ffa82 100644 --- a/Unosquare.FFME.Common/MediaEngine.Connector.cs +++ b/Unosquare.FFME.Common/MediaEngine.Connector.cs @@ -52,13 +52,13 @@ internal Task SendOnMediaOpened() /// /// Raises the media initializing event. /// - /// The options. + /// The container configuration options. /// The URL. /// A representing the asynchronous operation. [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal Task SendOnMediaInitializing(StreamOptions options, string url) + internal Task SendOnMediaInitializing(ContainerConfiguration config, string url) { - return Connector != null ? Connector.OnMediaInitializing(this, options, url) : Task.CompletedTask; + return Connector != null ? Connector.OnMediaInitializing(this, config, url) : Task.CompletedTask; } /// diff --git a/Unosquare.FFME.Common/MediaEngine.Static.cs b/Unosquare.FFME.Common/MediaEngine.Static.cs index 04b65dab3..77fb17e9c 100644 --- a/Unosquare.FFME.Common/MediaEngine.Static.cs +++ b/Unosquare.FFME.Common/MediaEngine.Static.cs @@ -2,12 +2,20 @@ { using Core; using Decoding; + using FFmpeg.AutoGen; using Primitives; using Shared; + using System; using System.Collections.Generic; + using System.Collections.ObjectModel; + using System.Linq; public partial class MediaEngine { + #region Private Fields + + private static readonly string NotInitializedErrorMessage = $"{nameof(MediaEngine)} not initialized"; + /// /// The initialize lock /// @@ -28,6 +36,24 @@ public partial class MediaEngine /// private static int m_FFmpegLoadModeFlags = FFmpegLoadMode.FullFeatures; + private static ReadOnlyCollection m_InputFormatNames; + + private static ReadOnlyCollection m_GlobalInputFormatOptions; + + private static ReadOnlyDictionary> m_InputFormatOptions; + + private static ReadOnlyCollection m_DecoderNames; + + private static ReadOnlyCollection m_GlobalDecoderOptions; + + private static ReadOnlyDictionary> m_DecoderOptions; + + private static unsafe AVCodec*[] m_AllCodecs; + + #endregion + + #region Properties + /// /// Gets the platform-specific implementation requirements. /// @@ -69,6 +95,184 @@ public static int FFmpegLoadModeFlags } } + /// + /// Gets the registered FFmpeg input format names. + /// + /// When the MediaEngine has not been initialized + public static ReadOnlyCollection InputFormatNames + { + get + { + lock (InitLock) + { + if (IsIntialized == false) + throw new InvalidOperationException(NotInitializedErrorMessage); + + if (m_InputFormatNames == null) + m_InputFormatNames = new ReadOnlyCollection(FFInterop.RetrieveInputFormatNames()); + + return m_InputFormatNames; + } + } + } + + /// + /// Gets the global input format options information. + /// + /// When the MediaEngine has not been initialized + public static ReadOnlyCollection InputFormatOptionsGlobal + { + get + { + lock (InitLock) + { + if (IsIntialized == false) + throw new InvalidOperationException(NotInitializedErrorMessage); + + if (m_GlobalInputFormatOptions == null) + { + m_GlobalInputFormatOptions = new ReadOnlyCollection( + FFInterop.RetrieveGlobalFormatOptions().ToArray()); + } + + return m_GlobalInputFormatOptions; + } + } + } + + /// + /// Gets the input format options. + /// + /// When the MediaEngine has not been initialized + public static ReadOnlyDictionary> InputFormatOptions + { + get + { + lock (InitLock) + { + if (IsIntialized == false) + throw new InvalidOperationException(NotInitializedErrorMessage); + + if (m_InputFormatOptions == null) + { + var result = new Dictionary>(InputFormatNames.Count); + foreach (var formatName in InputFormatNames) + { + var optionsInfo = FFInterop.RetrieveInputFormatOptions(formatName); + result[formatName] = new ReadOnlyCollection(optionsInfo); + } + + m_InputFormatOptions = new ReadOnlyDictionary>(result); + } + + return m_InputFormatOptions; + } + } + } + + /// + /// Gets the registered FFmpeg decoder codec names. + /// + /// When the MediaEngine has not been initialized + public static unsafe ReadOnlyCollection DecoderNames + { + get + { + lock (InitLock) + { + if (IsIntialized == false) + throw new InvalidOperationException(NotInitializedErrorMessage); + + if (m_DecoderNames == null) + m_DecoderNames = new ReadOnlyCollection(FFInterop.RetrieveDecoderNames(AllCodecs)); + + return m_DecoderNames; + } + } + } + + /// + /// Gets the global options that apply to all decoders + /// + /// When the MediaEngine has not been initialized + public static ReadOnlyCollection DecoderOptionsGlobal + { + get + { + lock (InitLock) + { + if (IsIntialized == false) + throw new InvalidOperationException(NotInitializedErrorMessage); + + if (m_GlobalDecoderOptions == null) + { + m_GlobalDecoderOptions = new ReadOnlyCollection( + FFInterop.RetrieveGlobalCodecOptions().Where(o => o.IsDecodingOption).ToArray()); + } + + return m_GlobalDecoderOptions; + } + } + } + + /// + /// Gets the decoder specific options. + /// + /// When the MediaEngine has not been initialized + public static unsafe ReadOnlyDictionary> DecoderOptions + { + get + { + lock (InitLock) + { + if (IsIntialized == false) + throw new InvalidOperationException(NotInitializedErrorMessage); + + if (m_DecoderOptions == null) + { + var result = new Dictionary>(DecoderNames.Count); + foreach (var c in AllCodecs) + { + if (c->decode.Pointer == IntPtr.Zero) + continue; + + result[FFInterop.PtrToStringUTF8(c->name)] = + new ReadOnlyCollection(FFInterop.RetrieveCodecOptions(c)); + } + + m_DecoderOptions = new ReadOnlyDictionary>(result); + } + + return m_DecoderOptions; + } + } + } + + /// + /// Gets all registered encoder and decoder codecs. + /// + /// When the MediaEngine has not been initialized + internal static unsafe AVCodec*[] AllCodecs + { + get + { + lock (InitLock) + { + if (IsIntialized == false) + throw new InvalidOperationException(NotInitializedErrorMessage); + + if (m_AllCodecs == null) + m_AllCodecs = FFInterop.RetriveCodecs(); + + return m_AllCodecs; + } + } + } + + #endregion + + #region Methods + /// /// Initializes the MedieElementCore. /// @@ -145,5 +349,7 @@ internal static MediaBlockBuffer LoadBlocks(string sourceUrl, MediaType sourceTy return result; } } + + #endregion } } diff --git a/Unosquare.FFME.Common/MediaEngine.Workers.Decoding.cs b/Unosquare.FFME.Common/MediaEngine.Workers.Decoding.cs index c26f03791..f9527580f 100644 --- a/Unosquare.FFME.Common/MediaEngine.Workers.Decoding.cs +++ b/Unosquare.FFME.Common/MediaEngine.Workers.Decoding.cs @@ -211,6 +211,11 @@ internal void RunFrameDecodingWorker() blocks = Blocks[t]; isInRange = blocks.IsInRange(wallClock); + // wait for component to get there if we only have furutre blocks + // in auxiliary component. + if (blocks.Count > 0 && blocks.RangeStartTime > wallClock) + continue; + // Invalidate the renderer if we don't have the block. if (isInRange == false) { @@ -218,11 +223,6 @@ internal void RunFrameDecodingWorker() Renderers[t].Seek(); } - // wait for component to get there if we only have furutre blocks - // in auxiliary component. - if (blocks.Count > 0 && blocks.RangeStartTime > wallClock) - continue; - // Try to catch up with the wall clock while (blocks.Count == 0 || blocks.RangeEndTime <= wallClock) { diff --git a/Unosquare.FFME.Common/MediaEngine.Workers.cs b/Unosquare.FFME.Common/MediaEngine.Workers.cs index 83a380357..eb15b817d 100644 --- a/Unosquare.FFME.Common/MediaEngine.Workers.cs +++ b/Unosquare.FFME.Common/MediaEngine.Workers.cs @@ -223,6 +223,20 @@ internal void PreloadSubtitles() try { m_PreloadedSubtitles = LoadBlocks(subtitlesUrl, MediaType.Subtitle, this); + + // Process and adjust subtitle delays if necessary + if (Container.MediaOptions.SubtitlesDelay != TimeSpan.Zero) + { + var delay = Container.MediaOptions.SubtitlesDelay; + for (var i = 0; i < m_PreloadedSubtitles.Count; i++) + { + var target = m_PreloadedSubtitles[i]; + target.StartTime = TimeSpan.FromTicks(target.StartTime.Ticks + delay.Ticks); + target.EndTime = TimeSpan.FromTicks(target.EndTime.Ticks + delay.Ticks); + target.Duration = TimeSpan.FromTicks(target.EndTime.Ticks - target.StartTime.Ticks); + } + } + Container.MediaOptions.IsSubtitleDisabled = true; } catch (MediaContainerException mex) @@ -293,11 +307,11 @@ private int SendBlockToRenderer(MediaBlock block, TimeSpan clockPosition) // Send the block to its corresponding renderer Renderers[block.MediaType]?.Render(block, clockPosition); + LastRenderTime[block.MediaType] = block.StartTime; // Extension method for logging var blockIndex = Blocks.ContainsKey(block.MediaType) ? Blocks[block.MediaType].IndexOf(clockPosition) : 0; this.LogRenderBlock(block, clockPosition, blockIndex); - LastRenderTime[block.MediaType] = block.StartTime; return 1; } diff --git a/Unosquare.FFME.Common/Primitives/MediaBlockBuffer.cs b/Unosquare.FFME.Common/Primitives/MediaBlockBuffer.cs index 16badff58..5a4aa49a0 100644 --- a/Unosquare.FFME.Common/Primitives/MediaBlockBuffer.cs +++ b/Unosquare.FFME.Common/Primitives/MediaBlockBuffer.cs @@ -255,8 +255,11 @@ public MediaBlock Next(MediaBlock current) { using (Locker.AcquireReaderLock()) { - var currentIndex = PlaybackBlocks.IndexOf(current); - if (currentIndex < 0) return null; + var currentIndex = current == null && PlaybackBlocks.Count > 0 ? + 0 : PlaybackBlocks.IndexOf(current); + + if (currentIndex < 0) + return null; if (currentIndex + 1 < PlaybackBlocks.Count) return PlaybackBlocks[currentIndex + 1]; diff --git a/Unosquare.FFME.Common/Shared/ContainerConfiguration.cs b/Unosquare.FFME.Common/Shared/ContainerConfiguration.cs new file mode 100644 index 000000000..9ecd751ef --- /dev/null +++ b/Unosquare.FFME.Common/Shared/ContainerConfiguration.cs @@ -0,0 +1,67 @@ +namespace Unosquare.FFME.Shared +{ + using System; + using System.Collections.Generic; + using System.Collections.ObjectModel; + + /// + /// Represents a set of options that are used to initialize a media container before opening the stream. + /// This includes both, demuxer and decoder options. + /// + public sealed class ContainerConfiguration + { + /// + /// The scan all PMTS private option name + /// + internal const string ScanAllPmts = "scan_all_pmts"; + + /// + /// Initializes a new instance of the class. + /// + internal ContainerConfiguration() + { + // placeholder + } + + /// + /// Gets or sets the forced input format. If let null or empty, + /// the input format will be selected automatically. + /// + public string ForcedInputFormat { get; set; } + + /// + /// Gets the protocol prefix. + /// Typically async for local files and empty for other types. + /// + public string ProtocolPrefix { get; set; } = null; + + /// + /// Gets or sets the amount of time to wait for a an open or read + /// operation to complete before it times out. It is 30 seconds by default + /// + public TimeSpan ReadTimeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Contains global options for the demuxer. For additional info + /// please see: https://ffmpeg.org/ffmpeg-formats.html#Format-Options + /// + public DemuxerGlobalOptions GlobalOptions { get; } = new DemuxerGlobalOptions(); + + /// + /// Contains private demuxer options. For additional info + /// please see: https://ffmpeg.org/ffmpeg-all.html#Demuxers + /// + public Dictionary PrivateOptions { get; } = + new Dictionary(512, StringComparer.InvariantCultureIgnoreCase); + + /// + /// Gets a list of global options for the demuxers. These options apply to all demuxers. + /// + public ReadOnlyCollection GlobalOptionsMeta => MediaEngine.InputFormatOptionsGlobal; + + /// + /// Retrieves a dictionary of private options available to each of the registered demuxers. + /// + public ReadOnlyDictionary> PrivateOptionsMeta => MediaEngine.InputFormatOptions; + } +} diff --git a/Unosquare.FFME.Common/Shared/DecoderOptions.cs b/Unosquare.FFME.Common/Shared/DecoderOptions.cs new file mode 100644 index 000000000..e49d1ac1a --- /dev/null +++ b/Unosquare.FFME.Common/Shared/DecoderOptions.cs @@ -0,0 +1,145 @@ +namespace Unosquare.FFME.Shared +{ + using Core; + using System.Collections.Generic; + + /// + /// Represents decoder global and private options for all streams + /// See https://www.ffmpeg.org/ffmpeg-codecs.html#Codec-Options + /// + public sealed class DecoderOptions + { + private readonly Dictionary GlobalOptions = new Dictionary(64); + private readonly Dictionary> PrivateOptions = new Dictionary>(); + + /// + /// Initializes a new instance of the class. + /// + internal DecoderOptions() + { + Threads = "auto"; + } + + /// + /// Gets or sets a value indicating whether [enable low resource]. + /// In theroy this should be 0,1,2,3 for 1, 1/2, 1,4 and 1/8 resolutions. + /// TODO: We are for now just supporting 1/2 resolution (true value) + /// Port of lowres. + /// + public bool EnableLowRes { get; set; } = false; + + /// + /// Gets or sets a value indicating whether [enable fast decoding]. + /// Port of fast + /// + public bool EnableFastDecoding { get; set; } = false; + + /// + /// Enables low_delay flag for low latency streaming. + /// + public bool EnableLowDelay { get; set; } = false; + + /// + /// Gets or sets the threads. + /// + public string Threads + { + get => this[GlobalOptionNames.Threads]; + set => this[GlobalOptionNames.Threads] = value; + } + + /// + /// Gets or sets whether to use reference counted frames. + /// + public string RefCountedFrames + { + get => this[GlobalOptionNames.RefCountedFrames]; + set => this[GlobalOptionNames.RefCountedFrames] = value; + } + + /// + /// Gets or sets the index of the low resolution index. + /// + internal string LowResIndex + { + get => this[GlobalOptionNames.LowRes]; + set => this[GlobalOptionNames.LowRes] = value; + } + + /// + /// Gets or sets the specified global option. + /// See: https://www.ffmpeg.org/ffmpeg-codecs.html#Codec-Options + /// + /// Name of the global option. + /// The value of the option + public string this[string globalOptionName] + { + get => GlobalOptions.ContainsKey(globalOptionName) ? GlobalOptions[globalOptionName] : null; + set => GlobalOptions[globalOptionName] = value; + } + + /// + /// Gets or sets the specified private option + /// See: https://www.ffmpeg.org/ffmpeg-codecs.html#toc-Decoders + /// + /// Index of the stream. + /// Name of the private option. + /// The private option value + public string this[int streamIndex, string privateOptionName] + { + get + { + if (PrivateOptions.ContainsKey(streamIndex) == false) return null; + if (PrivateOptions[streamIndex].ContainsKey(privateOptionName) == false) return null; + return PrivateOptions[streamIndex][privateOptionName]; + } + set + { + if (PrivateOptions.ContainsKey(streamIndex) == false) + PrivateOptions[streamIndex] = new Dictionary(); + + PrivateOptions[streamIndex][privateOptionName] = value; + } + } + + /// + /// Gets the combined global and private stream codec options as a dictionary. + /// + /// Index of the stream. + /// An options dictionary + internal FFDictionary GetStreamCodecOptions(int streamIndex) + { + var result = new Dictionary(GlobalOptions); + if (PrivateOptions.ContainsKey(streamIndex)) + { + foreach (var kvp in PrivateOptions[streamIndex]) + { + result[kvp.Key] = kvp.Value; + } + } + + return new FFDictionary(result); + } + + /// + /// Well-known codec option names + /// + private static class GlobalOptionNames + { + /// + /// The threads + /// + public const string Threads = "threads"; + + /// + /// The reference counted frames + /// + public const string RefCountedFrames = "refcounted_frames"; + + /// + /// The low resource + /// + public const string LowRes = "lowres"; + } + } +} diff --git a/Unosquare.FFME.Common/Shared/StreamFormatOptions.cs b/Unosquare.FFME.Common/Shared/DemuxerGlobalOptions.cs similarity index 89% rename from Unosquare.FFME.Common/Shared/StreamFormatOptions.cs rename to Unosquare.FFME.Common/Shared/DemuxerGlobalOptions.cs index 1092fb216..711765bea 100644 --- a/Unosquare.FFME.Common/Shared/StreamFormatOptions.cs +++ b/Unosquare.FFME.Common/Shared/DemuxerGlobalOptions.cs @@ -3,16 +3,15 @@ using System; /// - /// Contains options for the format context as documented: - /// https://ffmpeg.org/ffmpeg-formats.html#Format-Options - /// TODO: (Floyd) There are still quite a bit of options that have not been implemented. + /// The libavformat library provides some generic global options, which can be set on all the muxers and demuxers. + /// For additional information, please see: https://ffmpeg.org/ffmpeg-formats.html#Format-Options /// - public sealed class StreamFormatOptions + public sealed class DemuxerGlobalOptions { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - internal StreamFormatOptions() + internal DemuxerGlobalOptions() { // placeholder } diff --git a/Unosquare.FFME.Common/Shared/HardwareDeviceInfo.cs b/Unosquare.FFME.Common/Shared/HardwareDeviceInfo.cs new file mode 100644 index 000000000..31215112f --- /dev/null +++ b/Unosquare.FFME.Common/Shared/HardwareDeviceInfo.cs @@ -0,0 +1,53 @@ +namespace Unosquare.FFME.Shared +{ + using FFmpeg.AutoGen; + + /// + /// Represents a hardware configuration pair of device and pixel format + /// + public sealed unsafe class HardwareDeviceInfo + { + /// + /// Initializes a new instance of the class. + /// + /// The source configuration. + internal HardwareDeviceInfo(AVCodecHWConfig* config) + { + DeviceType = config->device_type; + PixelFormat = config->pix_fmt; + DeviceTypeName = ffmpeg.av_hwdevice_get_type_name(DeviceType); + PixelFormatName = ffmpeg.av_get_pix_fmt_name(PixelFormat); + } + + /// + /// Gets the type of hardware device. + /// + public AVHWDeviceType DeviceType { get; } + + /// + /// Gets the name of the device type. + /// + public string DeviceTypeName { get; } + + /// + /// Gets the hardware output pixel format. + /// + public AVPixelFormat PixelFormat { get; } + + /// + /// Gets the name of the pixel format. + /// + public string PixelFormatName { get; } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return $"Device {DeviceTypeName}: {PixelFormatName}"; + } + } +} diff --git a/Unosquare.FFME.Common/Shared/IMediaConnector.cs b/Unosquare.FFME.Common/Shared/IMediaConnector.cs index 5bd3bc3b2..c1eab256b 100644 --- a/Unosquare.FFME.Common/Shared/IMediaConnector.cs +++ b/Unosquare.FFME.Common/Shared/IMediaConnector.cs @@ -12,10 +12,10 @@ public interface IMediaConnector /// Called when [media initializing]. /// /// The sender. - /// The stream options. + /// The container configuration options. /// The media URL. /// A representing the asynchronous operation. - Task OnMediaInitializing(MediaEngine sender, StreamOptions streamOptions, string mediaUrl); + Task OnMediaInitializing(MediaEngine sender, ContainerConfiguration config, string mediaUrl); /// /// Called when [media opening]. diff --git a/Unosquare.FFME.Common/Shared/MediaCodecOptions.cs b/Unosquare.FFME.Common/Shared/MediaCodecOptions.cs deleted file mode 100644 index 22058bcb8..000000000 --- a/Unosquare.FFME.Common/Shared/MediaCodecOptions.cs +++ /dev/null @@ -1,191 +0,0 @@ -namespace Unosquare.FFME.Shared -{ - using Core; - using Decoding; - using FFmpeg.AutoGen; - using System.Collections.Generic; - - /// - /// Represents a set of codec options associated with a stream specifier. - /// - public class MediaCodecOptions - { - #region Private Members - - /// - /// Holds the internal list of option items - /// - private readonly List Options = new List(); - - #endregion - - #region Constructor - - /// - /// Initializes a new instance of the class. - /// - public MediaCodecOptions() - { - // Placeholder - } - - #endregion - - #region Methods - - /// - /// Adds an option - /// - /// The key. - /// The value. - /// Type of the stream. - public void Add(string key, string value, char streamType) - { - var option = new CodecOption(new StreamSpecifier(CharToMediaType(streamType)), key, value); - Options.Add(option); - } - - /// - /// Adds an option - /// - /// The key. - /// The value. - /// Index of the stream. - public void Add(string key, string value, int streamIndex) - { - var option = new CodecOption(new StreamSpecifier(streamIndex), key, value); - Options.Add(option); - } - - /// - /// Adds an option - /// - /// The key. - /// The value. - /// Type of the stream. - /// Index of the stream. - public void Add(string key, string value, char streamType, int streamIndex) - { - var option = new CodecOption(new StreamSpecifier(CharToMediaType(streamType), streamIndex), key, value); - Options.Add(option); - } - - /// - /// Retrieves a dictionary with the options for the specified codec. - /// Port of filter_codec_opts - /// - /// The codec identifier. - /// The format. - /// The stream. - /// The codec. - /// The filtered options - internal unsafe FFDictionary FilterOptions(AVCodecID codecId, AVFormatContext* format, AVStream* stream, AVCodec* codec) - { - var result = new FFDictionary(); - - if (codec == null) - { - codec = (format->oformat != null) ? - ffmpeg.avcodec_find_encoder(codecId) : ffmpeg.avcodec_find_decoder(codecId); - } - - var codecClass = ffmpeg.avcodec_get_class(); - - var flags = format->oformat != null ? - ffmpeg.AV_OPT_FLAG_ENCODING_PARAM : ffmpeg.AV_OPT_FLAG_DECODING_PARAM; - - var streamType = (char)0; - - switch (stream->codecpar->codec_type) - { - case AVMediaType.AVMEDIA_TYPE_VIDEO: - streamType = 'v'; - flags |= ffmpeg.AV_OPT_FLAG_VIDEO_PARAM; - break; - case AVMediaType.AVMEDIA_TYPE_AUDIO: - streamType = 'a'; - flags |= ffmpeg.AV_OPT_FLAG_AUDIO_PARAM; - break; - case AVMediaType.AVMEDIA_TYPE_SUBTITLE: - streamType = 's'; - flags |= ffmpeg.AV_OPT_FLAG_SUBTITLE_PARAM; - break; - } - - foreach (var optionItem in Options) - { - // Inline port of check_stream_specifier - var matched = ffmpeg.avformat_match_stream_specifier(format, stream, optionItem.StreamSpecifier.ToString()) > 0; - if (matched == false) continue; - - if (ffmpeg.av_opt_find(&codecClass, optionItem.Key, null, flags, ffmpeg.AV_OPT_SEARCH_FAKE_OBJ) != null || codec == null - || (codec->priv_class != null && ffmpeg.av_opt_find(&codec->priv_class, optionItem.Key, null, flags, ffmpeg.AV_OPT_SEARCH_FAKE_OBJ) != null)) - { - result[optionItem.Key] = optionItem.Value; - } - else if (optionItem.StreamSpecifier.StreamSuffix[0] == streamType && ffmpeg.av_opt_find(&codecClass, optionItem.Key, null, flags, ffmpeg.AV_OPT_SEARCH_FAKE_OBJ) != null) - { - result[optionItem.Key] = optionItem.Value; - } - } - - return result; - } - - /// - /// Retrieves an array of dictionaries, one for each stream index - /// https://ffmpeg.org/ffplay.html#toc-Options - /// Port of setup_find_stream_info_opts. - /// - /// The format. - /// The options per stream - internal unsafe FFDictionary[] GetPerStreamOptions(AVFormatContext* format) - { - if (format->nb_streams == 0) - return null; - - var result = new FFDictionary[format->nb_streams]; - for (var i = 0; i < format->nb_streams; i++) - result[i] = FilterOptions(format->streams[i]->codecpar->codec_id, format, format->streams[i], null); - - return result; - } - - /// - /// Converts a character to a media type. - /// - /// The c. - /// The media type - private static MediaType CharToMediaType(char c) - { - if (c == 'v') return MediaType.Video; - if (c == 'a') return MediaType.Audio; - if (c == 's') return MediaType.Subtitle; - - return MediaType.None; - } - - #endregion - - /// - /// Well-known codec option names - /// - public static class Names - { - /// - /// The threads - /// - public const string Threads = "threads"; - - /// - /// The reference counted frames - /// - public const string RefCountedFrames = "refcounted_frames"; - - /// - /// The low resource - /// - public const string LowRes = "lowres"; - } - } -} diff --git a/Unosquare.FFME.Common/Shared/MediaInfo.cs b/Unosquare.FFME.Common/Shared/MediaInfo.cs index 6cd3eeda2..71f87a27a 100644 --- a/Unosquare.FFME.Common/Shared/MediaInfo.cs +++ b/Unosquare.FFME.Common/Shared/MediaInfo.cs @@ -183,6 +183,13 @@ private static List ExtractStreams(AVFormatContext* ic) TBC = 1d / s->codec->time_base.ToDouble(), }; + // Extract valid hardwar configurations + stream.HardwareDevices = new ReadOnlyCollection( + HardwareAccelerator.GetCompatibleDevices(stream.Codec)); + + stream.HardwareDecoders = new ReadOnlyCollection( + GetHardwareDecoders(stream.Codec)); + // TODO: I chose not to include Side data but I could easily do so // https://ffmpeg.org/doxygen/3.2/dump_8c_source.html // See function: dump_sidedata @@ -318,6 +325,33 @@ private static List ExtractPrograms(AVFormatContext* ic, ReadOnlyDi return result; } + /// + /// Gets the available hardware decoder codecs for the given codec id (codec family). + /// + /// The codec family. + /// A list of hardware-enabled decoder codec names + private static List GetHardwareDecoders(AVCodecID codecFamily) + { + var result = new List(); + + foreach (var c in MediaEngine.AllCodecs) + { + if (ffmpeg.av_codec_is_decoder(c) == 0) + continue; + + if (c->id != codecFamily) + continue; + + if ((c->capabilities & ffmpeg.AV_CODEC_CAP_HARDWARE) != 0 + || (c->capabilities & ffmpeg.AV_CODEC_CAP_HYBRID) != 0) + { + result.Add(FFInterop.PtrToStringUTF8(c->name)); + } + } + + return result; + } + #endregion } @@ -493,6 +527,16 @@ public class StreamInfo /// public ReadOnlyDictionary Metadata { get; internal set; } + /// + /// Gets the compatible hardware device configurations for the stream's codec. + /// + public ReadOnlyCollection HardwareDevices { get; internal set; } + + /// + /// Gets a list of compatible hardware decoder names. + /// + public ReadOnlyCollection HardwareDecoders { get; internal set; } + /// /// Gets the language string from the stream's metadata. /// diff --git a/Unosquare.FFME.Common/Shared/MediaOptions.cs b/Unosquare.FFME.Common/Shared/MediaOptions.cs index 58c0f6a3c..b7a162b79 100644 --- a/Unosquare.FFME.Common/Shared/MediaOptions.cs +++ b/Unosquare.FFME.Common/Shared/MediaOptions.cs @@ -1,10 +1,13 @@ namespace Unosquare.FFME.Shared { - using FFmpeg.AutoGen; + using System; + using System.Collections.Generic; /// - /// Represetnts options that applied before initializing media components and their corresponding - /// codecs. Once the container has created the media components, changing these options will have no effect. + /// Represetnts options that applied creating the individual media stream components. + /// Once the container has created the media components, changing these options will have no effect. + /// See: https://www.ffmpeg.org/ffmpeg-all.html#Main-options + /// Partly a port of https://github.com/FFmpeg/FFmpeg/blob/master/fftools/ffmpeg_opt.c /// public sealed class MediaOptions { @@ -14,36 +17,29 @@ internal MediaOptions() } /// - /// Gets the codec options. - /// Codec options are documented here: https://www.ffmpeg.org/ffmpeg-codecs.html#Codec-Options - /// Port of codec_opts + /// Provides access to the global and per-stream decoder options + /// See https://www.ffmpeg.org/ffmpeg-codecs.html#Codec-Options /// - public MediaCodecOptions CodecOptions { get; } = new MediaCodecOptions(); + public DecoderOptions DecoderParams { get; } = new DecoderOptions(); /// - /// Gets or sets a value indicating whether [enable low resource]. - /// In theroy this should be 0,1,2,3 for 1, 1/2, 1,4 and 1/8 resolutions. - /// TODO: We are for now just supporting 1/2 resolution (true value) - /// Port of lowres. + /// A dictionary of stream indexes and force decoder codec names. + /// This is equivalent to the -codec Main option. + /// See: https://www.ffmpeg.org/ffmpeg-all.html#Main-options (-codec option) /// - public bool EnableLowRes { get; set; } = false; + public Dictionary DecoderCodec { get; } = new Dictionary(32); /// - /// Gets or sets a value indicating whether [enable fast decoding]. - /// Port of fast + /// Gets or sets the amount of time to offset the subtitles by + /// This is an FFME-only property -- Not a port of ffmpeg. /// - public bool EnableFastDecoding { get; set; } = false; + public TimeSpan SubtitlesDelay { get; set; } = TimeSpan.Zero; /// - /// Enables low_delay flag for low latency streaming. + /// Use Stream's HardwareDevices property to get a list of + /// compatible hardware accelerators. /// - public bool EnableLowDelay { get; set; } = false; - - /// - /// Gets or sets a value indicating whether experimental hardware acceleration is enabled. - /// Defaults to false. This feature is experimental. - /// - public bool EnableHardwareAcceleration { get; set; } + public HardwareDeviceInfo VideoHardwareDevice { get; set; } /// /// Prevent reading from audio stream components. @@ -74,7 +70,7 @@ internal MediaOptions() /// /// Specifies a forced FPS value for the input video stream. /// - public AVRational? VideoForcedFps { get; set; } = null; + public double VideoForcedFps { get; set; } = default; /// /// Initially contains the best suitable video stream. diff --git a/Unosquare.FFME.Common/Shared/OptionMeta.cs b/Unosquare.FFME.Common/Shared/OptionMeta.cs new file mode 100644 index 000000000..088ca7d79 --- /dev/null +++ b/Unosquare.FFME.Common/Shared/OptionMeta.cs @@ -0,0 +1,143 @@ +namespace Unosquare.FFME.Shared +{ + using Core; + using FFmpeg.AutoGen; + + /// + /// Provides information about a named option for a demuxer or a codec. + /// + public sealed class OptionMeta + { + /// + /// Initializes a new instance of the class. + /// + /// The option. + internal unsafe OptionMeta(AVOption* option) + { + OptionType = option->type; + Flags = option->flags; + HelpText = FFInterop.PtrToStringUTF8(option->help); + Name = FFInterop.PtrToStringUTF8(option->name); + Min = option->min; + Max = option->max; + + // Default values + // DefaultString = FFInterop.PtrToStringUTF8(option->default_val.str); // TODO: This throws a memory violation for some reason + DefaultDouble = option->default_val.dbl; + DefaultLong = option->default_val.i64; + DefaultRational = option->default_val.q; + + // Flag Parsing + IsAudioOption = (option->flags & ffmpeg.AV_OPT_FLAG_AUDIO_PARAM) > 0; + IsBsfOption = (option->flags & ffmpeg.AV_OPT_FLAG_BSF_PARAM) > 0; + IsDecodingOption = (option->flags & ffmpeg.AV_OPT_FLAG_DECODING_PARAM) > 0; + IsEncodingOption = (option->flags & ffmpeg.AV_OPT_FLAG_ENCODING_PARAM) > 0; + IsExported = (option->flags & ffmpeg.AV_OPT_FLAG_EXPORT) > 0; + IsFilteringOption = (option->flags & ffmpeg.AV_OPT_FLAG_FILTERING_PARAM) > 0; + IsReadonly = (option->flags & ffmpeg.AV_OPT_FLAG_READONLY) > 0; + IsSubtitleOption = (option->flags & ffmpeg.AV_OPT_FLAG_SUBTITLE_PARAM) > 0; + IsVideoOption = (option->flags & ffmpeg.AV_OPT_FLAG_VIDEO_PARAM) > 0; + } + + /// + /// Gets the name. + /// + public string Name { get; } + + /// + /// Gets the flags. + /// + public int Flags { get; } + + /// + /// Gets a value indicating whether this instance is encoding option. + /// + public bool IsEncodingOption { get; } + + /// + /// Gets a value indicating whether this instance is decoding option. + /// + public bool IsDecodingOption { get; } + + /// + /// Gets a value indicating whether this instance is filtering option. + /// + public bool IsFilteringOption { get; } + + /// + /// Gets a value indicating whether this instance is audio option. + /// + public bool IsAudioOption { get; } + + /// + /// Gets a value indicating whether this instance is subtitle option. + /// + public bool IsSubtitleOption { get; } + + /// + /// Gets a value indicating whether this instance is video option. + /// + public bool IsVideoOption { get; } + + /// + /// Gets a value indicating whether this instance is BSF option. + /// + public bool IsBsfOption { get; } + + /// + /// Gets a value indicating whether this instance is readonly. + /// + public bool IsReadonly { get; } + + /// + /// Gets a value indicating whether this instance is exported. + /// + public bool IsExported { get; } + + /// + /// Gets the type of the option. + /// + public AVOptionType OptionType { get; } + + /// + /// Gets the default long. + /// + public long DefaultLong { get; } + + /// + /// Gets the default double. + /// + public double DefaultDouble { get; } + + /// + /// Gets the default rational. + /// + public AVRational DefaultRational { get; } + + /// + /// Gets the help text. + /// + public string HelpText { get; } + + /// + /// Gets the minimum. + /// + public double Min { get; } + + /// + /// Gets the maximum. + /// + public double Max { get; } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return $"{Name} {OptionType.ToString().Replace("AV_OPT_TYPE_", string.Empty)}: {HelpText} "; + } + } +} diff --git a/Unosquare.FFME.Common/Shared/StreamInputOptions.cs b/Unosquare.FFME.Common/Shared/StreamInputOptions.cs deleted file mode 100644 index f9d68830b..000000000 --- a/Unosquare.FFME.Common/Shared/StreamInputOptions.cs +++ /dev/null @@ -1,50 +0,0 @@ -namespace Unosquare.FFME.Shared -{ - using System; - using System.Collections.Generic; - - /// - /// A dictionary containing generic input options for both: - /// Global Codec Options: https://www.ffmpeg.org/ffmpeg-all.html#Codec-Options - /// Demuxer-Private options: https://ffmpeg.org/ffmpeg-all.html#Demuxers - /// - public sealed class StreamInputOptions : Dictionary - { - /// - /// Initializes a new instance of the class. - /// - internal StreamInputOptions() - : base(512, StringComparer.InvariantCultureIgnoreCase) - { - // placeholder - } - - /// - /// Gets or sets the forced input format. If let null or empty, - /// the input format will be selected automatically. - /// - public string ForcedInputFormat { get; set; } - - /// - /// Gets or sets the amount of time to wait for a an open or read operation to complete. - /// - public TimeSpan ReadTimeout { get; set; } = TimeSpan.FromSeconds(30); - - /// - /// A collection of well-known demuxer-specific, non-global format options - /// TODO: (Floyd) Implement some of the more common names maybe? - /// - public static class Names - { - /// - /// mpegts - /// - public const string ScanAllPmts = "scan_all_pmts"; - - /// - /// The frame rate input setting - /// - public const string FrameRate = "framerate"; - } - } -} diff --git a/Unosquare.FFME.Common/Shared/StreamOptions.cs b/Unosquare.FFME.Common/Shared/StreamOptions.cs deleted file mode 100644 index 6a10e4a55..000000000 --- a/Unosquare.FFME.Common/Shared/StreamOptions.cs +++ /dev/null @@ -1,35 +0,0 @@ -namespace Unosquare.FFME.Shared -{ - /// - /// Represents a set of options that are used to initialize a media container before opening the stream. - /// - public sealed class StreamOptions - { - /// - /// Initializes a new instance of the class. - /// - internal StreamOptions() - { - // placeholder - } - - /// - /// Contains options for the format context as documented: - /// https://ffmpeg.org/ffmpeg-formats.html#Format-Options - /// - public StreamFormatOptions Format { get; } = new StreamFormatOptions(); - - /// - /// A dictionary containing generic input options for both: - /// Global Codec Options: https://www.ffmpeg.org/ffmpeg-all.html#Codec-Options - /// Demuxer-Private Options: https://ffmpeg.org/ffmpeg-all.html#Demuxers - /// - public StreamInputOptions Input { get; } = new StreamInputOptions(); - - /// - /// Gets the protocol prefix. - /// Typically async for local files and empty for other types. - /// - public string ProtocolPrefix { get; set; } = null; - } -} diff --git a/Unosquare.FFME.Common/Shared/VideoBlock.cs b/Unosquare.FFME.Common/Shared/VideoBlock.cs index 4bce2ac23..867a5c117 100644 --- a/Unosquare.FFME.Common/Shared/VideoBlock.cs +++ b/Unosquare.FFME.Common/Shared/VideoBlock.cs @@ -1,9 +1,10 @@ namespace Unosquare.FFME.Shared { using ClosedCaptions; + using Decoding; + using FFmpeg.AutoGen; using System; using System.Collections.ObjectModel; - using System.Runtime.InteropServices; /// /// A pre-allocated, scaled video block. The buffer is in BGR, 24-bit format @@ -44,17 +45,17 @@ public sealed class VideoBlock : MediaBlock, IDisposable /// The picture buffer stride. /// Pixel Width * 32-bit color (4 byes) + alignment (typically 0 for modern hw). /// - public int BufferStride { get; internal set; } + public int BufferStride => PictureBufferStride; /// /// Gets the number of horizontal pixels in the image. /// - public int PixelWidth { get; internal set; } + public int PixelWidth { get; private set; } /// /// Gets the number of vertical pixels in the image. /// - public int PixelHeight { get; internal set; } + public int PixelHeight { get; private set; } /// /// Gets the width of the aspect ratio. @@ -91,12 +92,17 @@ public sealed class VideoBlock : MediaBlock, IDisposable /// /// The picture buffer length of the last allocated buffer /// - internal int PictureBufferLength { get; set; } + internal int PictureBufferLength { get; private set; } /// /// Holds a reference to the last allocated buffer /// - internal IntPtr PictureBuffer { get; set; } + internal IntPtr PictureBuffer { get; private set; } + + /// + /// Gets the picture buffer stride. + /// + internal int PictureBufferStride { get; private set; } #endregion @@ -111,6 +117,46 @@ public override void Dispose() GC.SuppressFinalize(this); } + /// + /// Allocates a block of memory suitable for a picture buffer + /// and sets the corresponding properties. + /// + /// The source. + /// The pixel format. + internal unsafe void EnsureAllocated(VideoFrame source, AVPixelFormat pixelFormat) + { + // Ensure proper allocation of the buffer + // If there is a size mismatch between the wanted buffer length and the existing one, + // then let's reallocate the buffer and set the new size (dispose of the existing one if any) + var targetLength = ffmpeg.av_image_get_buffer_size(pixelFormat, source.Pointer->width, source.Pointer->height, 1); + if (PictureBufferLength != targetLength) + { + Deallocate(); + PictureBuffer = new IntPtr(ffmpeg.av_malloc((uint)targetLength)); + PictureBufferLength = targetLength; + } + + // Update related properties + PictureBufferStride = ffmpeg.av_image_get_linesize(pixelFormat, source.Pointer->width, 0); + PixelWidth = source.Pointer->width; + PixelHeight = source.Pointer->height; + } + + /// + /// Deallocates the picture buffer and resets the related buffer properties + /// + private unsafe void Deallocate() + { + if (PictureBuffer == IntPtr.Zero) return; + + ffmpeg.av_free(PictureBuffer.ToPointer()); + PictureBuffer = IntPtr.Zero; + PictureBufferLength = 0; + PictureBufferStride = 0; + PixelWidth = 0; + PixelHeight = 0; + } + /// /// Releases unmanaged and - optionally - managed resources. /// @@ -124,13 +170,7 @@ private void Dispose(bool alsoManaged) // no code for managed dispose } - if (PictureBuffer != IntPtr.Zero) - { - Marshal.FreeHGlobal(PictureBuffer); - PictureBuffer = IntPtr.Zero; - PictureBufferLength = 0; - } - + Deallocate(); IsDisposed = true; } } diff --git a/Unosquare.FFME.Common/Unosquare.FFME.Common.csproj b/Unosquare.FFME.Common/Unosquare.FFME.Common.csproj index 7746d1003..134710ee7 100644 --- a/Unosquare.FFME.Common/Unosquare.FFME.Common.csproj +++ b/Unosquare.FFME.Common/Unosquare.FFME.Common.csproj @@ -4,7 +4,7 @@ netstandard2.0;net461 Unosquare.FFME - 3.4.2.250 + 4.0.2.250 Mario Di Vece and Contributors to the FFME project Unosquare SA de CV FFME Cross-Platform Media Engine @@ -21,8 +21,8 @@ true ..\StyleCop.Analyzers.ruleset false - 3.4.2.240 - 3.4.2.250 + 4.0.2.250 + 4.0.2.250 latest bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml @@ -34,7 +34,7 @@ - + diff --git a/Unosquare.FFME.MacOS.Sample/Unosquare.FFME.MacOS.Sample.csproj b/Unosquare.FFME.MacOS.Sample/Unosquare.FFME.MacOS.Sample.csproj index fa8640c0f..ce1bb8b89 100644 --- a/Unosquare.FFME.MacOS.Sample/Unosquare.FFME.MacOS.Sample.csproj +++ b/Unosquare.FFME.MacOS.Sample/Unosquare.FFME.MacOS.Sample.csproj @@ -58,8 +58,8 @@ - - ..\packages\FFmpeg.AutoGen.3.4.0.6\lib\netstandard2.0\FFmpeg.AutoGen.dll + + ..\packages\FFmpeg.AutoGen.4.0.0.2\lib\netstandard2.0\FFmpeg.AutoGen.dll @@ -87,7 +87,9 @@ - + + Designer + diff --git a/Unosquare.FFME.MacOS.Sample/packages.config b/Unosquare.FFME.MacOS.Sample/packages.config index 9fdc5bdef..90d1924bd 100644 --- a/Unosquare.FFME.MacOS.Sample/packages.config +++ b/Unosquare.FFME.MacOS.Sample/packages.config @@ -1,4 +1,4 @@  - + \ No newline at end of file diff --git a/Unosquare.FFME.MacOS/Platform/MacMediaConnector.cs b/Unosquare.FFME.MacOS/Platform/MacMediaConnector.cs index 7135bac9c..f82213432 100644 --- a/Unosquare.FFME.MacOS/Platform/MacMediaConnector.cs +++ b/Unosquare.FFME.MacOS/Platform/MacMediaConnector.cs @@ -49,7 +49,7 @@ public Task OnMediaOpening(MediaEngine sender, MediaOptions options, MediaInfo m return Task.CompletedTask; } - public Task OnMediaInitializing(MediaEngine sender, StreamOptions options, string url) + public Task OnMediaInitializing(MediaEngine sender, ContainerConfiguration config, string url) { return Task.CompletedTask; } diff --git a/Unosquare.FFME.MacOS/Unosquare.FFME.MacOS.csproj b/Unosquare.FFME.MacOS/Unosquare.FFME.MacOS.csproj index a7fb40bc3..e2abfb6e5 100644 --- a/Unosquare.FFME.MacOS/Unosquare.FFME.MacOS.csproj +++ b/Unosquare.FFME.MacOS/Unosquare.FFME.MacOS.csproj @@ -52,8 +52,8 @@ - - ..\packages\FFmpeg.AutoGen.3.4.0.6\lib\netstandard2.0\FFmpeg.AutoGen.dll + + ..\packages\FFmpeg.AutoGen.4.0.0.2\lib\netstandard2.0\FFmpeg.AutoGen.dll @@ -79,7 +79,9 @@ - + + Designer + \ No newline at end of file diff --git a/Unosquare.FFME.MacOS/packages.config b/Unosquare.FFME.MacOS/packages.config index 9fdc5bdef..90d1924bd 100644 --- a/Unosquare.FFME.MacOS/packages.config +++ b/Unosquare.FFME.MacOS/packages.config @@ -1,4 +1,4 @@  - + \ No newline at end of file diff --git a/Unosquare.FFME.Windows.Sample/App.xaml.cs b/Unosquare.FFME.Windows.Sample/App.xaml.cs index aad0697c9..8d8214ffc 100644 --- a/Unosquare.FFME.Windows.Sample/App.xaml.cs +++ b/Unosquare.FFME.Windows.Sample/App.xaml.cs @@ -16,7 +16,7 @@ public App() : base() { // Change the default location of the ffmpeg binaries - // You can get the binaries here: https://ffmpeg.zeranoe.com/builds/win32/shared/ffmpeg-3.4.2-win32-shared.zip + // You can get the binaries here: https://ffmpeg.zeranoe.com/builds/win32/shared/ffmpeg-4.0-win32-shared.zip MediaElement.FFmpegDirectory = @"c:\ffmpeg"; // You can pick which FFmpeg binaries are loaded. See issue #28 diff --git a/Unosquare.FFME.Windows.Sample/MainWindow.MediaEvents.cs b/Unosquare.FFME.Windows.Sample/MainWindow.MediaEvents.cs index 72df965e7..a96841e63 100644 --- a/Unosquare.FFME.Windows.Sample/MainWindow.MediaEvents.cs +++ b/Unosquare.FFME.Windows.Sample/MainWindow.MediaEvents.cs @@ -6,6 +6,7 @@ using System; using System.Diagnostics; using System.Linq; + using System.Text; using System.Windows; using System.Windows.Controls; @@ -59,7 +60,7 @@ private void OnMediaFailed(object sender, ExceptionRoutedEventArgs e) #endregion - #region Media Stream Events + #region Media Stream Opening Event Handlers /// /// Handles the MediaInitializing event of the Media control. @@ -71,13 +72,13 @@ private void OnMediaInitializing(object sender, MediaInitializingRoutedEventArgs // An example of injecting input options for http/https streams if (e.Url.StartsWith("http://") || e.Url.StartsWith("https://")) { - e.Options.Input["user_agent"] = $"{typeof(StreamOptions).Namespace}/{typeof(StreamOptions).Assembly.GetName().Version}"; - e.Options.Input["headers"] = $"Referer:https://www.unosquare.com"; - e.Options.Input["multiple_requests"] = "1"; - e.Options.Input["reconnect"] = "1"; - e.Options.Input["reconnect_at_eof"] = "1"; - e.Options.Input["reconnect_streamed"] = "1"; - e.Options.Input["reconnect_delay_max"] = "10"; // in seconds + e.Configuration.PrivateOptions["user_agent"] = $"{typeof(ContainerConfiguration).Namespace}/{typeof(ContainerConfiguration).Assembly.GetName().Version}"; + e.Configuration.PrivateOptions["headers"] = $"Referer:https://www.unosquare.com"; + e.Configuration.PrivateOptions["multiple_requests"] = "1"; + e.Configuration.PrivateOptions["reconnect"] = "1"; + e.Configuration.PrivateOptions["reconnect_at_eof"] = "1"; + e.Configuration.PrivateOptions["reconnect_streamed"] = "1"; + e.Configuration.PrivateOptions["reconnect_delay_max"] = "10"; // in seconds } // Example of forcing tcp transport on rtsp feeds @@ -86,14 +87,14 @@ private void OnMediaInitializing(object sender, MediaInitializingRoutedEventArgs // TCP provides reliable communication while UDP does not if (e.Url.StartsWith("rtsp://")) { - e.Options.Input["rtsp_transport"] = "tcp"; - e.Options.Format.FlagNoBuffer = true; + e.Configuration.PrivateOptions["rtsp_transport"] = "tcp"; + e.Configuration.GlobalOptions.FlagNoBuffer = true; } // In realtime streams these settings can be used to reduce latency (see example from issue #152) - // e.Options.Format.FlagNoBuffer = true; - // e.Options.Format.ProbeSize = 8192; - // e.Options.Format.MaxAnalyzeDuration = System.TimeSpan.FromSeconds(1); + // e.Options.GlobalOptions.FlagNoBuffer = true; + // e.Options.GlobalOptions.ProbeSize = 8192; + // e.Options.GlobalOptions.MaxAnalyzeDuration = System.TimeSpan.FromSeconds(1); } /// @@ -103,6 +104,9 @@ private void OnMediaInitializing(object sender, MediaInitializingRoutedEventArgs /// The instance containing the event data. private void OnMediaOpening(object sender, MediaOpeningRoutedEventArgs e) { + // You can start off by adjusting subtitles delay + // e.Options.SubtitlesDelay = TimeSpan.FromSeconds(7); // See issue #216 + // Example of automatically side-loading SRT subs try { @@ -119,28 +123,61 @@ private void OnMediaOpening(object sender, MediaOpeningRoutedEventArgs e) // You can force video FPS if necessary // see: https://github.com/unosquare/ffmediaelement/issues/212 - // e.Options.VideoForcedFps = new AVRational { num = 25, den = 1 }; + // e.Options.VideoForcedFps = 25; - // An example of switching to a different stream + // An example of specifcally selecting a playback stream var subtitleStreams = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_SUBTITLE).Select(kvp => kvp.Value); var englishSubtitleStream = subtitleStreams.FirstOrDefault(s => s.Language.StartsWith("en")); if (englishSubtitleStream != null) + { e.Options.SubtitleStream = englishSubtitleStream; + } - // The yadif filter deinterlaces the video; we check the field order if we need - // to deinterlace the video automatically - if (e.Options.VideoStream != null - && e.Options.VideoStream.FieldOrder != AVFieldOrder.AV_FIELD_PROGRESSIVE - && e.Options.VideoStream.FieldOrder != AVFieldOrder.AV_FIELD_UNKNOWN) + var videoStream = e.Options.VideoStream; + if (videoStream != null) { - e.Options.VideoFilter = "yadif"; + // Check if the video requires deinterlacing + var requiresDeinterlace = videoStream.FieldOrder != AVFieldOrder.AV_FIELD_PROGRESSIVE + && videoStream.FieldOrder != AVFieldOrder.AV_FIELD_UNKNOWN; - // When enabling HW acceleration, the filtering does not seem to get applied for some reason. - e.Options.EnableHardwareAcceleration = false; - } + // Hardwrae device priorities + var deviceCandidates = new AVHWDeviceType[] + { + AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA, + AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA, + AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 + }; + + // Hardware device selection + if (videoStream.FPS <= 30) + { + foreach (var deviceType in deviceCandidates) + { + var accelerator = videoStream.HardwareDevices.FirstOrDefault(d => d.DeviceType == deviceType); + if (accelerator != null) + { + e.Options.VideoHardwareDevice = accelerator; + break; + } + } + } + + var videoFilter = new StringBuilder(); - // Experimetal HW acceleration support. Remove if not needed. - e.Options.EnableHardwareAcceleration = false; + // The yadif filter deinterlaces the video; we check the field order if we need + // to deinterlace the video automatically + if (requiresDeinterlace) + videoFilter.Append("yadif,"); + + // Scale down to maximum 1080p screen resolution. + if (videoStream.PixelHeight > 1080) + { + // e.Options.VideoHardwareDevice = null; + videoFilter.Append($"scale=-1:1080,"); + } + + e.Options.VideoFilter = videoFilter.ToString().TrimEnd(','); + } // e.Options.AudioFilter = "aecho=0.8:0.9:1000:0.3"; // e.Options.AudioFilter = "chorus=0.5:0.9:50|60|40:0.4|0.32|0.3:0.25|0.4|0.3:2|2.3|1.3"; @@ -160,7 +197,7 @@ private void OnMediaOpened(object sender, RoutedEventArgs e) #endregion - #region Methods: Event Handlers + #region Other Media Event Handlers /// /// Handles the PositionChanged event of the Media control. @@ -174,4 +211,4 @@ private void OnMediaPositionChanged(object sender, PositionChangedRoutedEventArg #endregion } -} +} \ No newline at end of file diff --git a/Unosquare.FFME.Windows.Sample/MainWindow.xaml b/Unosquare.FFME.Windows.Sample/MainWindow.xaml index 28626f313..370231fb8 100644 --- a/Unosquare.FFME.Windows.Sample/MainWindow.xaml +++ b/Unosquare.FFME.Windows.Sample/MainWindow.xaml @@ -6,7 +6,7 @@ xmlns:ffme="clr-namespace:Unosquare.FFME;assembly=ffme.win" xmlns:controls="clr-namespace:Unosquare.FFME.Windows.Sample.Controls" mc:Ignorable="d" DataContext="{StaticResource ViewModel}" - WindowStartupLocation="CenterScreen" MinHeight="720" Height="0" MinWidth="1280" Width="0" + WindowStartupLocation="Manual" MinHeight="720" Height="0" MinWidth="1280" Width="0" Title="{Binding WindowTitle}" Background="Black"> diff --git a/Unosquare.FFME.Windows.Sample/MainWindow.xaml.cs b/Unosquare.FFME.Windows.Sample/MainWindow.xaml.cs index bd7d7d2bb..f0f4240d9 100644 --- a/Unosquare.FFME.Windows.Sample/MainWindow.xaml.cs +++ b/Unosquare.FFME.Windows.Sample/MainWindow.xaml.cs @@ -1,5 +1,6 @@ namespace Unosquare.FFME.Windows.Sample { + using Platform; using Shared; using System; using System.Linq; @@ -32,6 +33,14 @@ public partial class MainWindow : Window /// public MainWindow() { + // During runtime, let's hide the window. The loaded event handler will + // compute the final placement of our window. + if (GuiContext.Current.IsInDesignTime == false) + { + Left = int.MinValue; + Top = int.MinValue; + } + // Load up WPF resources InitializeComponent(); @@ -153,14 +162,32 @@ private void OnWindowLoaded(object sender, RoutedEventArgs e) Loaded -= OnWindowLoaded; // Compute and Apply Sizing Properties - var presenter = VisualTreeHelper.GetParent(Content as UIElement) as ContentPresenter; - presenter.MinWidth = MinWidth; - presenter.MinHeight = MinHeight; - - SizeToContent = SizeToContent.WidthAndHeight; - MinWidth = ActualWidth; - MinHeight = ActualHeight; - SizeToContent = SizeToContent.Manual; + { + var presenter = VisualTreeHelper.GetParent(Content as UIElement) as ContentPresenter; + presenter.MinWidth = MinWidth; + presenter.MinHeight = MinHeight; + + SizeToContent = SizeToContent.WidthAndHeight; + MinWidth = ActualWidth; + MinHeight = ActualHeight; + SizeToContent = SizeToContent.Manual; + } + + // Place on secondary screen by default if there is one + { + var screenOffsetX = 0d; + var screenWidth = SystemParameters.PrimaryScreenWidth; + var screenHeight = SystemParameters.PrimaryScreenHeight; + + if (SystemParameters.VirtualScreenWidth != SystemParameters.FullPrimaryScreenWidth) + { + screenOffsetX = SystemParameters.PrimaryScreenWidth; + screenWidth = SystemParameters.VirtualScreenWidth - SystemParameters.PrimaryScreenWidth; + } + + Left = screenOffsetX + ((screenWidth - ActualWidth) / 2d); + Top = (SystemParameters.PrimaryScreenHeight - ActualHeight) / 2d; + } // Open a file if it is specified in the arguments var args = Environment.GetCommandLineArgs(); diff --git a/Unosquare.FFME.Windows.Sample/Properties/AssemblyInfo.cs b/Unosquare.FFME.Windows.Sample/Properties/AssemblyInfo.cs index 810ff6c7e..59b58c8ff 100644 --- a/Unosquare.FFME.Windows.Sample/Properties/AssemblyInfo.cs +++ b/Unosquare.FFME.Windows.Sample/Properties/AssemblyInfo.cs @@ -40,5 +40,5 @@ // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("2018.4.25.*")] -[assembly: AssemblyFileVersion("3.4.250")] +[assembly: AssemblyVersion("2018.5.1.*")] +[assembly: AssemblyFileVersion("4.0.250")] diff --git a/Unosquare.FFME.Windows.Sample/Unosquare.FFME.Windows.Sample.csproj b/Unosquare.FFME.Windows.Sample/Unosquare.FFME.Windows.Sample.csproj index bae45626e..84a12e585 100644 --- a/Unosquare.FFME.Windows.Sample/Unosquare.FFME.Windows.Sample.csproj +++ b/Unosquare.FFME.Windows.Sample/Unosquare.FFME.Windows.Sample.csproj @@ -51,8 +51,8 @@ TRACE - - ..\packages\FFmpeg.AutoGen.3.4.0.6\lib\net45\FFmpeg.AutoGen.dll + + ..\packages\FFmpeg.AutoGen.4.0.0.2\lib\net45\FFmpeg.AutoGen.dll diff --git a/Unosquare.FFME.Windows.Sample/ViewModels/PlaylistViewModel.cs b/Unosquare.FFME.Windows.Sample/ViewModels/PlaylistViewModel.cs index 2fef8ae85..6fab7daf2 100644 --- a/Unosquare.FFME.Windows.Sample/ViewModels/PlaylistViewModel.cs +++ b/Unosquare.FFME.Windows.Sample/ViewModels/PlaylistViewModel.cs @@ -182,14 +182,18 @@ private void OnMediaOpened(object sender, System.Windows.RoutedEventArgs e) private void OnRenderingVideo(object sender, RenderingVideoEventArgs e) { if (HasTakenThumbnail) return; - var m = Root.App.MediaElement; - - if (m.HasMediaEnded || m.Position.TotalSeconds >= 3 || (m.NaturalDuration.HasTimeSpan && m.NaturalDuration.TimeSpan.TotalSeconds <= 3)) + GuiContext.Current.InvokeAsync(() => { - HasTakenThumbnail = true; - Entries.AddOrUpdateEntryThumbnail(m.Source.ToString(), e.Bitmap); - Entries.SaveEntries(); - } + var m = Root.App?.MediaElement; + if (m == null) return; + + if (m.HasMediaEnded || m.Position.TotalSeconds >= 3 || (m.NaturalDuration.HasTimeSpan && m.NaturalDuration.TimeSpan.TotalSeconds <= 3)) + { + HasTakenThumbnail = true; + Entries.AddOrUpdateEntryThumbnail(m.Source.ToString(), e.Bitmap); + Entries.SaveEntries(); + } + }); } } } diff --git a/Unosquare.FFME.Windows.Sample/packages.config b/Unosquare.FFME.Windows.Sample/packages.config index 8274ac6eb..8465f1d04 100644 --- a/Unosquare.FFME.Windows.Sample/packages.config +++ b/Unosquare.FFME.Windows.Sample/packages.config @@ -1,5 +1,5 @@  - + \ No newline at end of file diff --git a/Unosquare.FFME.Windows/Events/MediaInitializingRoutedEventArgs.cs b/Unosquare.FFME.Windows/Events/MediaInitializingRoutedEventArgs.cs index 5133b5595..10fde3863 100644 --- a/Unosquare.FFME.Windows/Events/MediaInitializingRoutedEventArgs.cs +++ b/Unosquare.FFME.Windows/Events/MediaInitializingRoutedEventArgs.cs @@ -14,19 +14,19 @@ public class MediaInitializingRoutedEventArgs : RoutedEventArgs /// /// The routed event. /// The source. - /// The options. + /// The container configuration options. /// The URL. - public MediaInitializingRoutedEventArgs(RoutedEvent routedEvent, object source, StreamOptions options, string url) + public MediaInitializingRoutedEventArgs(RoutedEvent routedEvent, object source, ContainerConfiguration config, string url) : base(routedEvent, source) { - Options = options; + Configuration = config; Url = url; } /// - /// Set or change the options before the media is opened. + /// Set or change the container configuration options before the media is opened. /// - public StreamOptions Options { get; } + public ContainerConfiguration Configuration { get; } /// /// Gets the URL. diff --git a/Unosquare.FFME.Windows/MediaElement.RoutedEvents.cs b/Unosquare.FFME.Windows/MediaElement.RoutedEvents.cs index f34e7c914..6a5572877 100644 --- a/Unosquare.FFME.Windows/MediaElement.RoutedEvents.cs +++ b/Unosquare.FFME.Windows/MediaElement.RoutedEvents.cs @@ -362,11 +362,11 @@ internal Task RaiseMediaOpeningEvent(MediaOptions options, MediaInfo mediaInfo) /// /// Raises the media opening event. /// - /// The options. + /// The container configuration options. /// The URL. /// A representing the asynchronous operation. [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal Task RaiseMediaInitializingEvent(StreamOptions options, string url) + internal Task RaiseMediaInitializingEvent(ContainerConfiguration config, string url) { LogEventStart(MediaInitializingEvent); return GuiContext.Current.EnqueueInvoke(() => @@ -374,7 +374,7 @@ internal Task RaiseMediaInitializingEvent(StreamOptions options, string url) RaiseEvent(new MediaInitializingRoutedEventArgs( MediaInitializingEvent, this, - options, + config, url)); LogEventDone(MediaInitializingEvent); diff --git a/Unosquare.FFME.Windows/MediaElement.cs b/Unosquare.FFME.Windows/MediaElement.cs index 0ae558677..5deebcdf5 100644 --- a/Unosquare.FFME.Windows/MediaElement.cs +++ b/Unosquare.FFME.Windows/MediaElement.cs @@ -161,7 +161,7 @@ Uri IUriContext.BaseUri /// /// This is the image that holds video bitmaps /// - internal Image VideoView { get; } = new Image { Name = nameof(VideoView) }; + internal ImageHost VideoView { get; } = new ImageHost { Name = nameof(VideoView) }; /// /// Gets the closed captions view control. @@ -311,8 +311,8 @@ private void InitializeComponent() ContentGrid.VerticalAlignment = VerticalAlignment.Stretch; // Initialize dependency properties to those of the Video view - Stretch = VideoView.Stretch; - StretchDirection = VideoView.StretchDirection; + // Stretch = VideoView.Stretch; + // StretchDirection = VideoView.StretchDirection; // Add the child video view and bind the alignment properties BindProperty(VideoView, HorizontalAlignmentProperty, this, nameof(HorizontalAlignment), BindingMode.OneWay); @@ -383,21 +383,20 @@ private void InitializeComponent() ContentGrid.Children.Add(CaptionsView); // Display the control (or not) - if (WindowsPlatform.Instance.IsInDesignTime) + if (WindowsPlatform.Instance.IsInDesignTime == false) + { + // Setup the media engine and associated property updates worker + MediaCore = new MediaEngine(this, new WindowsMediaConnector(this)); + StartPropertyUpdatesWorker(); + } + else { - // Shows an FFmpeg image if we are in design-time var bitmap = Properties.Resources.FFmpegMediaElementBackground; var bitmapSource = Imaging.CreateBitmapSourceFromHBitmap( bitmap.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions()); var controlBitmap = new WriteableBitmap(bitmapSource); VideoView.Source = controlBitmap; } - else - { - // Setup the media engine and associated property updates worker - MediaCore = new MediaEngine(this, new WindowsMediaConnector(this)); - StartPropertyUpdatesWorker(); - } } #endregion diff --git a/Unosquare.FFME.Windows/Platform/WindowsMediaConnector.cs b/Unosquare.FFME.Windows/Platform/WindowsMediaConnector.cs index ee05d7d8d..7bc61e590 100644 --- a/Unosquare.FFME.Windows/Platform/WindowsMediaConnector.cs +++ b/Unosquare.FFME.Windows/Platform/WindowsMediaConnector.cs @@ -158,12 +158,12 @@ public Task OnMediaOpening(MediaEngine sender, MediaOptions options, MediaInfo m /// Called when [media initializing]. /// /// The sender. - /// The options. + /// The container configuration options. /// The URL. /// A representing the asynchronous operation. - public Task OnMediaInitializing(MediaEngine sender, StreamOptions options, string url) + public Task OnMediaInitializing(MediaEngine sender, ContainerConfiguration config, string url) { - return Parent != null ? Parent.RaiseMediaInitializingEvent(options, url) : Task.CompletedTask; + return Parent != null ? Parent.RaiseMediaInitializingEvent(config, url) : Task.CompletedTask; } /// diff --git a/Unosquare.FFME.Windows/Properties/AssemblyInfo.cs b/Unosquare.FFME.Windows/Properties/AssemblyInfo.cs index 67bd1d18f..818c03e66 100644 --- a/Unosquare.FFME.Windows/Properties/AssemblyInfo.cs +++ b/Unosquare.FFME.Windows/Properties/AssemblyInfo.cs @@ -31,5 +31,5 @@ // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("2018.4.25.*")] -[assembly: AssemblyFileVersion("3.4.250")] +[assembly: AssemblyVersion("2018.5.1.*")] +[assembly: AssemblyFileVersion("4.0.250")] diff --git a/Unosquare.FFME.Windows/Rendering/ElementHostBase.cs b/Unosquare.FFME.Windows/Rendering/ElementHostBase.cs new file mode 100644 index 000000000..87070e7e8 --- /dev/null +++ b/Unosquare.FFME.Windows/Rendering/ElementHostBase.cs @@ -0,0 +1,413 @@ +namespace Unosquare.FFME.Rendering +{ + using System; + using System.Collections; + using System.ComponentModel; + using System.Runtime.CompilerServices; + using System.Threading; + using System.Windows; + using System.Windows.Media; + using System.Windows.Threading; + + /// + /// Provides a base class for a frameowrk element that is capable of + /// being hosted on its own dispatcher. This allows for mutithreaded + /// UI compistion. + /// + /// The contained framework element + /// + internal abstract class ElementHostBase : FrameworkElement + where T : FrameworkElement + { + /// + /// The thread separated control loaded event + /// + public static readonly RoutedEvent ElementLoadedEvent = EventManager.RegisterRoutedEvent( + nameof(ElementLoaded), + RoutingStrategy.Bubble, + typeof(RoutedEventHandler), + typeof(ElementHostBase)); + + /// + /// Initializes a new instance of the class. + /// + /// if set to true, it creates its own separate thread and associated dispatcher + protected ElementHostBase(bool hasOwnDispatcher) + { + var isInDesignMode = DesignerProperties.GetIsInDesignMode(this); + HasOwnDispatcher = isInDesignMode ? false : hasOwnDispatcher; + } + + /// + /// Occurs when the thread separated control loads. + /// + public event RoutedEventHandler ElementLoaded + { + add { AddHandler(ElementLoadedEvent, value); } + remove { RemoveHandler(ElementLoadedEvent, value); } + } + + /// + /// Gets a value indicating whether this instance is running on its own dispatcher. + /// + public bool HasOwnDispatcher { get; } + + /// + /// Gets the dispatcher this element is hosted on. + /// + public Dispatcher ElementDispatcher { get; private set; } + + /// + /// PRovides access to the framework element hosted within this element + /// + public T Element { get; private set; } + + /// + /// Gets the host visual. This becomes the root element of this control + /// that glues the presentation source running on a different dispatcher + /// to the main UI dispatcher. + /// + protected HostVisual Host { get; private set; } + + /// + /// Gets the number of visual child elements within this element. + /// + protected override int VisualChildrenCount + { + get + { + if (HasOwnDispatcher) + return Host == null ? 0 : 1; + else + return Element == null ? 0 : 1; + } + } + + /// + /// Gets an enumerator for logical child elements of this element. + /// + protected override IEnumerator LogicalChildren + { + get + { + if (HasOwnDispatcher) + { + if (Host != null) + yield return Host; + } + else + { + if (Element != null) + yield return Element; + } + } + } + + /// + /// Gets or sets the presentation source which roots the visual elements on + /// the independent dispatcher. + /// + private HostedPresentationSource PresentationSource { get; set; } + + /// + /// Invokes the specified action on the hosted visual element's dispatcher. + /// + /// The action. + /// The awaitable operation + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public DispatcherOperation Invoke(Action action) + { + return Invoke(DispatcherPriority.Normal, action); + } + + /// + /// Invokes the specified action on the hosted visual element's dispatcher. + /// + /// The priority. + /// The action. + /// The awaitable operation + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public DispatcherOperation Invoke(DispatcherPriority priority, Action action) + { + if (ElementDispatcher == null || ElementDispatcher.HasShutdownStarted || ElementDispatcher.HasShutdownFinished) + return null; + + if (action == null) + return null; + + return ElementDispatcher?.BeginInvoke(action, priority); + } + + /// + /// Raises the event. This method is invoked whenever is set to true internally. + /// + /// The that contains the event data. + protected override void OnInitialized(EventArgs e) + { + if (HasOwnDispatcher) + { + Host = new HostVisual(); + AddVisualChild(Host); + AddLogicalChild(Host); + Loaded += HandleLoadedEvent; + Unloaded += HandleUnloadedEvent; + LayoutUpdated += HandleLayoutUpdatedEvent; + } + else + { + ElementDispatcher = Dispatcher.CurrentDispatcher; + Element = CreateHostedElement(); + Element.Loaded += (sender, args) => + { + InvalidateMeasure(); + RaiseEvent(new RoutedEventArgs(ElementLoadedEvent, this)); + }; + + AddVisualChild(Element); + AddLogicalChild(Element); + } + + base.OnInitialized(e); + } + + /// + /// Overrides , and returns a child at the specified index from a collection of child elements. + /// + /// The zero-based index of the requested child element in the collection. + /// + /// The requested child element. This should not return null; if the provided index is out of range, an exception is thrown. + /// + protected override Visual GetVisualChild(int index) + { + if (HasOwnDispatcher) + return Host; + else + return Element; + } + + /// + /// Creates the element contained by this host + /// + /// An instance of the framework element to be hosted + protected abstract T CreateHostedElement(); + + /// + /// When overridden in a derived class, positions child elements and determines a size for a derived class. + /// + /// The final area within the parent that this element should use to arrange itself and its children. + /// + /// The actual size used. + /// + /// + protected override Size ArrangeOverride(Size finalSize) + { + if (HasOwnDispatcher) + { + Invoke(() => { Element?.Arrange(new Rect(finalSize)); }); + return finalSize; + } + + Element?.Arrange(new Rect(finalSize)); + return finalSize; + } + + /// + /// When overridden in a derived class, measures the size in layout required for child elements and determines a size for the -derived class. + /// + /// The available size that this element can give to child elements. Infinity can be specified as a value to indicate that the element will size to whatever content is available. + /// + /// The size that this element determines it needs during layout, based on its calculations of child element sizes. + /// + protected override Size MeasureOverride(Size availableSize) + { + if (HasOwnDispatcher) + { + return default; + } + + Element.Measure(availableSize); + return Element.DesiredSize; + } + + /// + /// Handles the unloaded event. + /// + /// The sender. + /// The instance containing the event data. + protected virtual void HandleUnloadedEvent(object sender, EventArgs e) + { + if (ElementDispatcher == null) + return; + + ElementDispatcher.InvokeShutdown(); + RemoveLogicalChild(Host); + RemoveVisualChild(Host); + ElementDispatcher = null; + Element = null; + } + + /// + /// Gets the element property. + /// + /// The property type + /// The property. + /// The value + protected V GetElementProperty(DependencyProperty property) + { + if (HasOwnDispatcher) + { + var result = default(V); + Invoke(() => { result = (V)Element.GetValue(property); }).Wait(); + return result; + } + + return (V)Element.GetValue(property); + } + + /// + /// Sets the element property. + /// + /// The value type + /// The property. + /// The value. + protected void SetElementProperty(DependencyProperty property, V value) + { + if (HasOwnDispatcher) + { + Invoke(() => { Element.SetValue(property, value); }); + return; + } + + Element.SetValue(property, value); + } + + /// + /// Handles the layout updated event. + /// + /// The sender. + /// The instance containing the event data. + private void HandleLayoutUpdatedEvent(object sender, EventArgs e) + { + Invoke(() => { Element?.Measure(DesiredSize); }); + } + + /// + /// Handles the loaded event. + /// + /// The sender. + /// The instance containing the event data. + private void HandleLoadedEvent(object sender, RoutedEventArgs e) + { + Loaded -= HandleLoadedEvent; + + var doneCreating = new ManualResetEvent(false); + var thread = new Thread(() => + { + PresentationSource = new HostedPresentationSource(Host); + doneCreating.Set(); + Element = CreateHostedElement(); + PresentationSource.RootVisual = Element; + Dispatcher.Run(); + PresentationSource.Dispose(); + }); + + thread.SetApartmentState(ApartmentState.STA); + thread.IsBackground = true; + thread.Priority = ThreadPriority.Highest; + thread.Start(); + doneCreating.WaitOne(); + doneCreating.Dispose(); + + while (Dispatcher.FromThread(thread) == null) + { + Thread.Sleep(50); + } + + ElementDispatcher = Dispatcher.FromThread(thread); + Dispatcher.BeginInvoke(new Action(() => { InvalidateMeasure(); })); + RaiseEvent(new RoutedEventArgs(ElementLoadedEvent, this)); + } + + /// + /// A presentation source class to root a Visual (a HostVisual) on to its own visual tree + /// + /// + private sealed class HostedPresentationSource : PresentationSource, IDisposable + { + private readonly VisualTarget HostConnector; + private bool m_IsDisposed = false; + + /// + /// Initializes a new instance of the class. + /// + /// The host. + public HostedPresentationSource(HostVisual host) + { + HostConnector = new VisualTarget(host); + } + + /// + /// When overridden in a derived class, gets or sets the root visual being presented in the source. + /// + public override Visual RootVisual + { + get + { + return HostConnector.RootVisual; + } + set + { + var oldRoot = HostConnector.RootVisual; + + // Set the root visual of the VisualTarget. This visual will + // now be used to visually compose the scene. + HostConnector.RootVisual = value; + + // Tell the PresentationSource that the root visual has + // changed. This kicks off a bunch of stuff like the + // Loaded event. + RootChanged(oldRoot, value); + + // Kickoff layout... + if (value is UIElement rootElement) + { + rootElement.Measure(new Size(double.PositiveInfinity, double.PositiveInfinity)); + rootElement.Arrange(new Rect(rootElement.DesiredSize)); + } + } + } + + /// + /// When overridden in a derived class, gets a value that declares whether the object is disposed. + /// + public override bool IsDisposed => m_IsDisposed; + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public void Dispose() => Dispose(true); + + /// + /// When overridden in a derived class, returns a visual target for the given source. + /// + /// + /// Returns a that is target for rendering the visual. + /// + protected override CompositionTarget GetCompositionTargetCore() => HostConnector; + + /// + /// Releases unmanaged and - optionally - managed resources. + /// + /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + private void Dispose(bool alsoManaged) + { + if (m_IsDisposed) return; + if (alsoManaged) + { + m_IsDisposed = true; + HostConnector.Dispose(); + } + } + } + } +} diff --git a/Unosquare.FFME.Windows/Rendering/ImageHost.cs b/Unosquare.FFME.Windows/Rendering/ImageHost.cs new file mode 100644 index 000000000..bd3a56637 --- /dev/null +++ b/Unosquare.FFME.Windows/Rendering/ImageHost.cs @@ -0,0 +1,132 @@ +namespace Unosquare.FFME.Rendering +{ + using System.Windows; + using System.Windows.Controls; + using System.Windows.Media; + + /// + /// Implements an Image control that is hosted on its own independent dispatcher + /// but maintains composability with the main UI. + /// + internal sealed class ImageHost : ElementHostBase + { + #region Dependency Property Registrations + + /// + /// The source property + /// + public static readonly DependencyProperty SourceProperty = Image.SourceProperty.AddOwner(typeof(ImageHost)); + + /// + /// The stretch property + /// + public static readonly DependencyProperty StretchProperty = Image.StretchProperty.AddOwner(typeof(ImageHost)); + + /// + /// The stretch direction property + /// + public static readonly DependencyProperty StretchDirectionProperty = Image.StretchDirectionProperty.AddOwner(typeof(ImageHost)); + + /// + /// The horizontal alignment property + /// + public static new readonly DependencyProperty HorizontalAlignmentProperty = FrameworkElement.HorizontalAlignmentProperty.AddOwner(typeof(ImageHost)); + + /// + /// The vertical alignment property + /// + public static new readonly DependencyProperty VerticalAlignmentProperty = FrameworkElement.VerticalAlignmentProperty.AddOwner(typeof(ImageHost)); + + /// + /// The layout transform property + /// + public static new readonly DependencyProperty LayoutTransformProperty = FrameworkElement.LayoutTransformProperty.AddOwner(typeof(ImageHost)); + + #endregion + + /// + /// Initializes a new instance of the class. + /// + public ImageHost() + : base(true) + { + // placeholder + } + + public ImageHost(bool hasOwnDispatcher) + : base(hasOwnDispatcher) + { + // placeholder + } + + /// + /// Gets or sets the source. + /// + public ImageSource Source + { + get => GetElementProperty(SourceProperty); + set => SetElementProperty(SourceProperty, value); + } + + /// + /// Gets or sets the stretch. + /// + public Stretch Stretch + { + get => GetElementProperty(StretchProperty); + set => SetElementProperty(StretchProperty, value); + } + + /// + /// Gets or sets the stretch direction. + /// + public StretchDirection StretchDirection + { + get => GetElementProperty(StretchDirectionProperty); + set => SetElementProperty(StretchDirectionProperty, value); + } + + /// + /// Gets or sets the horizontal alignment characteristics applied to this element when it is composed within a parent element, such as a panel or items control. + /// + public new HorizontalAlignment HorizontalAlignment + { + get => GetElementProperty(HorizontalAlignmentProperty); + set => SetElementProperty(HorizontalAlignmentProperty, value); + } + + /// + /// Gets or sets the vertical alignment characteristics applied to this element when it is composed within a parent element such as a panel or items control. + /// + public new VerticalAlignment VerticalAlignment + { + get => GetElementProperty(VerticalAlignmentProperty); + set => SetElementProperty(VerticalAlignmentProperty, value); + } + + /// + /// Gets or sets a graphics transformation that should apply to this element when layout is performed. + /// + public new Transform LayoutTransform + { + get => GetElementProperty(LayoutTransformProperty); + set => SetElementProperty(LayoutTransformProperty, value); + } + + /// + /// Creates the element contained by this host + /// + /// + /// An instance of the framework element to be hosted + /// + protected override Image CreateHostedElement() + { + var control = new Image(); + control.BeginInit(); + control.HorizontalAlignment = HorizontalAlignment.Stretch; + control.VerticalAlignment = VerticalAlignment.Stretch; + control.EndInit(); + return control; + } + } +} diff --git a/Unosquare.FFME.Windows/Rendering/SubtitleRenderer.cs b/Unosquare.FFME.Windows/Rendering/SubtitleRenderer.cs index 0918f2878..ef2516f40 100644 --- a/Unosquare.FFME.Windows/Rendering/SubtitleRenderer.cs +++ b/Unosquare.FFME.Windows/Rendering/SubtitleRenderer.cs @@ -109,15 +109,25 @@ public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) var subtitleBlock = mediaBlock as SubtitleBlock; if (subtitleBlock == null) return; - // Save the start and end times. We will need - // them in order to make the subtitles disappear - StartTime = subtitleBlock.StartTime; - EndTime = subtitleBlock.EndTime; - // Raise the subtitles event and keep track of the text. - BlockText = MediaElement.RaiseRenderingSubtitlesEvent(subtitleBlock, clockPosition) - ? string.Empty - : string.Join("\r\n", subtitleBlock.Text); + var cancelRender = MediaElement.RaiseRenderingSubtitlesEvent(subtitleBlock, clockPosition); + + if (cancelRender) + { + BlockText = string.Empty; + StartTime = null; + EndTime = null; + } + else + { + // Save the block text lines to display + BlockText = string.Join("\r\n", subtitleBlock.Text); + + // Save the start and end times. We will need + // them in order to make the subtitles disappear + StartTime = subtitleBlock.StartTime; + EndTime = subtitleBlock.EndTime; + } // Call the selective update method Update(clockPosition); diff --git a/Unosquare.FFME.Windows/Rendering/VideoRenderer.cs b/Unosquare.FFME.Windows/Rendering/VideoRenderer.cs index b249e3a81..fc968a710 100644 --- a/Unosquare.FFME.Windows/Rendering/VideoRenderer.cs +++ b/Unosquare.FFME.Windows/Rendering/VideoRenderer.cs @@ -177,8 +177,13 @@ public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) // Flag the start of a rendering cycle IsRenderingInProgress.Value = true; + // Send the block to the captions renderer + if (block.ClosedCaptions.Count > 0) + GuiContext.Current.EnqueueInvoke(() => MediaElement.CaptionsView.RenderPacket(block, MediaCore)); + // Ensure the target bitmap can be loaded - GuiContext.Current.EnqueueInvoke(DispatcherPriority.Render, () => + // GuiContext.Current.EnqueueInvoke(DispatcherPriority.Render, () => + MediaElement.VideoView.Invoke(DispatcherPriority.Render, () => { if (block.IsDisposed) { @@ -188,17 +193,21 @@ public void Render(MediaBlock mediaBlock, TimeSpan clockPosition) try { - MediaElement.CaptionsView.RenderPacket(block, MediaCore); - var bitmapData = LockTargetBitmap(block); if (bitmapData != null) { LoadTargetBitmapBuffer(bitmapData, block); MediaElement.RaiseRenderingVideoEvent(block, bitmapData, clockPosition); RenderTargetBitmap(block, bitmapData, clockPosition); + ApplyLayoutTransforms(block); } } - catch { /* swallow */ } + catch (Exception ex) + { + MediaElement?.MediaCore?.Log( + MediaLogMessageType.Error, + $"{nameof(VideoRenderer)} {ex.GetType()}: {nameof(Render)} failed. {ex.Message}."); + } finally { IsRenderingInProgress.Value = false; @@ -236,7 +245,6 @@ private void RenderTargetBitmap(VideoBlock block, BitmapDataBuffer bitmapData, T // Signal an update on the rendering surface TargetBitmap?.AddDirtyRect(bitmapData.UpdateRect); TargetBitmap?.Unlock(); - ApplyLayoutTransforms(block); } catch (Exception ex) { @@ -260,8 +268,8 @@ private BitmapDataBuffer LockTargetBitmap(VideoBlock block) BitmapDataBuffer result = null; // Skip the locking if scrubbing is not enabled - if (MediaElement.ScrubbingEnabled == false && (MediaElement.IsPlaying == false || MediaElement.IsSeeking)) - return result; + // if (MediaElement.ScrubbingEnabled == false && (MediaElement.IsPlaying == false || MediaElement.IsSeeking)) + // return result; // Figure out what we need to do var needsCreation = TargetBitmap == null && MediaElement.HasVideo; diff --git a/Unosquare.FFME.Windows/Unosquare.FFME.Windows.csproj b/Unosquare.FFME.Windows/Unosquare.FFME.Windows.csproj index e0712d34b..301bc661e 100644 --- a/Unosquare.FFME.Windows/Unosquare.FFME.Windows.csproj +++ b/Unosquare.FFME.Windows/Unosquare.FFME.Windows.csproj @@ -40,8 +40,8 @@ - - ..\packages\FFmpeg.AutoGen.3.4.0.6\lib\net45\FFmpeg.AutoGen.dll + + ..\packages\FFmpeg.AutoGen.4.0.0.2\lib\net45\FFmpeg.AutoGen.dll @@ -66,6 +66,8 @@ + + @@ -111,7 +113,9 @@ - + + Designer + diff --git a/Unosquare.FFME.Windows/packages.config b/Unosquare.FFME.Windows/packages.config index f864ac337..d1b29aebd 100644 --- a/Unosquare.FFME.Windows/packages.config +++ b/Unosquare.FFME.Windows/packages.config @@ -1,5 +1,5 @@  - + \ No newline at end of file