diff --git a/src/org/jitsi/examples/AVReceive2.java b/src/org/jitsi/examples/AVReceive2.java index 68265b0a5..44245b567 100644 --- a/src/org/jitsi/examples/AVReceive2.java +++ b/src/org/jitsi/examples/AVReceive2.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,393 +13,393 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.examples; - -import java.io.*; -import java.net.*; -import java.util.*; - -import org.jitsi.service.libjitsi.*; -import org.jitsi.service.neomedia.*; -import org.jitsi.service.neomedia.device.*; -import org.jitsi.service.neomedia.format.*; - -/** - * Implements an example application in the fashion of JMF's AVReceive2 example - * which demonstrates the use of the libjitsi library for the purposes - * of receiving audio and video via RTP means. - * - * @author Lyubomir Marinov - */ -public class AVReceive2 -{ - /** - * The port which is the target of the transmission i.e. on which the media - * is to be received. - * - * @see #LOCAL_PORT_BASE_ARG_NAME - */ - private int localPortBase; - - /** - * The MediaStream instances initialized by this instance indexed - * by their respective MediaType ordinal. - */ - private MediaStream[] mediaStreams; - - /** - * The InetAddress of the host which is the target of the receipt - * i.e. from which the media is to be received. - * - * @see #REMOTE_HOST_ARG_NAME - */ - private InetAddress remoteAddr; - - /** - * The port which is the target of the receipt i.e. from which the media is - * to be received. - * - * @see #REMOTE_PORT_BASE_ARG_NAME - */ - private int remotePortBase; - - /** - * Initializes a new AVReceive2 instance which is to receive audio - * and video from a specific host and a specific port. - * - * @param localPortBase the port on which the audio and video are to be - * received - * @param remoteHost the name of the host from which the media is - * transmitted - * @param remotePortBase the port from which the media is transmitted - * @throws Exception if any error arises during the parsing of the specified - * localPortBase, remoteHost and remotePortBase - */ - private AVReceive2( - String localPortBase, - String remoteHost, String remotePortBase) - throws Exception - { - this.localPortBase - = (localPortBase == null) - ? -1 - : Integer.valueOf(localPortBase).intValue(); - this.remoteAddr = InetAddress.getByName(remoteHost); - this.remotePortBase = Integer.valueOf(remotePortBase).intValue(); - } - - /** - * Initializes the receipt of audio and video. - * - * @return true if this instance has been successfully initialized - * to receive audio and video - * @throws Exception if anything goes wrong while initializing this instance - * for the receipt of audio and video - */ - private boolean initialize() - throws Exception - { - /* - * Prepare for the start of the transmission i.e. initialize the - * MediaStream instances. - */ - MediaType[] mediaTypes = MediaType.values(); - MediaService mediaService = LibJitsi.getMediaService(); - int localPort = localPortBase; - int remotePort = remotePortBase; - - mediaStreams = new MediaStream[mediaTypes.length]; - for (MediaType mediaType : mediaTypes) - { - /* - * The default MediaDevice (for a specific MediaType) is configured - * (by the user of the application via some sort of UI) into the - * ConfigurationService. If there is no ConfigurationService - * instance known to LibJitsi, the first available MediaDevice of - * the specified MediaType will be chosen by MediaService. - */ - MediaDevice device - = mediaService.getDefaultDevice(mediaType, MediaUseCase.CALL); - MediaStream mediaStream = mediaService.createMediaStream(device); - - // direction - /* - * The AVTransmit2 example sends only and the AVReceive2 receives - * only. In a call, the MediaStream's direction will most commonly - * be set to SENDRECV. - */ - mediaStream.setDirection(MediaDirection.RECVONLY); - - // format - String encoding; - double clockRate; - /* - * The AVTransmit2 and AVReceive2 examples use the H.264 video - * codec. Its RTP transmission has no static RTP payload type number - * assigned. - */ - byte dynamicRTPPayloadType; - - switch (device.getMediaType()) - { - case AUDIO: - encoding = "PCMU"; - clockRate = 8000; - /* PCMU has a static RTP payload type number assigned. */ - dynamicRTPPayloadType = -1; - break; - case VIDEO: - encoding = "H264"; - clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; - /* - * The dymanic RTP payload type numbers are usually negotiated - * in the signaling functionality. - */ - dynamicRTPPayloadType = 99; - break; - default: - encoding = null; - clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; - dynamicRTPPayloadType = -1; - } - - if (encoding != null) - { - MediaFormat format - = mediaService.getFormatFactory().createMediaFormat( - encoding, - clockRate); - - /* - * The MediaFormat instances which do not have a static RTP - * payload type number association must be explicitly assigned - * a dynamic RTP payload type number. - */ - if (dynamicRTPPayloadType != -1) - { - mediaStream.addDynamicRTPPayloadType( - dynamicRTPPayloadType, - format); - } - - mediaStream.setFormat(format); - } - - // connector - StreamConnector connector; - - if (localPortBase == -1) - { - connector = new DefaultStreamConnector(); - } - else - { - int localRTPPort = localPort++; - int localRTCPPort = localPort++; - - connector - = new DefaultStreamConnector( - new DatagramSocket(localRTPPort), - new DatagramSocket(localRTCPPort)); - } - mediaStream.setConnector(connector); - - // target - /* - * The AVTransmit2 and AVReceive2 examples follow the common - * practice that the RTCP port is right after the RTP port. - */ - int remoteRTPPort = remotePort++; - int remoteRTCPPort = remotePort++; - - mediaStream.setTarget( - new MediaStreamTarget( - new InetSocketAddress(remoteAddr, remoteRTPPort), - new InetSocketAddress(remoteAddr, remoteRTCPPort))); - - // name - /* - * The name is completely optional and it is not being used by the - * MediaStream implementation at this time, it is just remembered so - * that it can be retrieved via MediaStream#getName(). It may be - * integrated with the signaling functionality if necessary. - */ - mediaStream.setName(mediaType.toString()); - - mediaStreams[mediaType.ordinal()] = mediaStream; - } - - /* - * Do start the transmission i.e. start the initialized MediaStream - * instances. - */ - for (MediaStream mediaStream : mediaStreams) - if (mediaStream != null) - mediaStream.start(); - - return true; - } - - /** - * Close the MediaStreams. - */ - private void close() - { - if (mediaStreams != null) - { - for (int i = 0; i < mediaStreams.length; i++) - { - MediaStream mediaStream = mediaStreams[i]; - - if (mediaStream != null) - { - try - { - mediaStream.stop(); - } - finally - { - mediaStream.close(); - mediaStreams[i] = null; - } - } - } - - mediaStreams = null; - } - } - - /** - * The name of the command-line argument which specifies the port on which - * the media is to be received. The command-line argument value will be used - * as the port to receive the audio RTP on, the next port after it will be - * used to receive the audio RTCP on. Respectively, the subsequent ports - * ports will be used to transmit the video RTP and RTCP on." - */ - private static final String LOCAL_PORT_BASE_ARG_NAME - = "--local-port-base="; - - /** - * The name of the command-line argument which specifies the name of the - * host from which the media is to be received. - */ - private static final String REMOTE_HOST_ARG_NAME = "--remote-host="; - - /** - * The name of the command-line argument which specifies the port from which - * the media is to be received. The command-line argument value will be - * used as the port to receive the audio RTP from, the next port after it - * will be to receive the audio RTCP from. Respectively, the subsequent - * ports will be used to receive the video RTP and RTCP from." - */ - private static final String REMOTE_PORT_BASE_ARG_NAME - = "--remote-port-base="; - - /** - * The list of command-line arguments accepted as valid by the - * AVReceive2 application along with their human-readable usage - * descriptions. - */ - private static final String[][] ARGS - = { - { - LOCAL_PORT_BASE_ARG_NAME, - "The port on which media is to be received. The specified value" - + " will be used as the port to receive the audio RTP on," - + " the next port after it will be used to receive the" - + " audio RTCP on. Respectively, the subsequent ports will" - + " be used to receive the video RTP and RTCP on." - }, - { - REMOTE_HOST_ARG_NAME, - "The name of the host from which the media is to be received." - }, - { - REMOTE_PORT_BASE_ARG_NAME, - "The port from which media is to be received. The specified" - + " vaue will be used as the port to receive the audio RTP" - + " from, the next port after it will be used to receive" - + " the audio RTCP from. Respectively, the subsequent ports" - + " will be used to receive the video RTP and RTCP from." - } - }; - - public static void main(String[] args) - throws Exception - { - // We need three parameters to do the transmission. For example, - // ant run-example -Drun.example.name=AVReceive2 -Drun.example.arg.line="--local-port-base=10000 --remote-host=129.130.131.132 --remote-port-base=5000" - if (args.length < 3) - { - prUsage(); - } - else - { - Map argMap = AVTransmit2.parseCommandLineArgs(args); - - LibJitsi.start(); - try - { - AVReceive2 avReceive - = new AVReceive2( - argMap.get(LOCAL_PORT_BASE_ARG_NAME), - argMap.get(REMOTE_HOST_ARG_NAME), - argMap.get(REMOTE_PORT_BASE_ARG_NAME)); - - if (avReceive.initialize()) - { - try - { - /* - * Wait for the media to be received and played back. - * AVTransmit2 transmits for 1 minute so AVReceive2 - * waits for 2 minutes to allow AVTransmit2 to start the - * tranmission with a bit of a delay (if necessary). - */ - long then = System.currentTimeMillis(); - long waitingPeriod = 2 * 60000; - - try - { - while ((System.currentTimeMillis() - then) - < waitingPeriod) - Thread.sleep(1000); - } - catch (InterruptedException ie) - { - } - } - finally - { - avReceive.close(); - } - - System.err.println("Exiting AVReceive2"); - } - else - { - System.err.println("Failed to initialize the sessions."); - } - } - finally - { - LibJitsi.stop(); - } - } - } - - /** - * Outputs human-readable description about the usage of the - * AVReceive2 application and the command-line arguments it - * accepts as valid. - */ - private static void prUsage() - { - PrintStream err = System.err; - - err.println("Usage: " + AVReceive2.class.getName() + " "); - err.println("Valid args:"); - for (String[] arg : ARGS) - err.println(" " + arg[0] + " " + arg[1]); - } -} +package org.jitsi.examples; + +import java.io.*; +import java.net.*; +import java.util.*; + +import org.jitsi.service.libjitsi.*; +import org.jitsi.service.neomedia.*; +import org.jitsi.service.neomedia.device.*; +import org.jitsi.service.neomedia.format.*; + +/** + * Implements an example application in the fashion of JMF's AVReceive2 example + * which demonstrates the use of the libjitsi library for the purposes + * of receiving audio and video via RTP means. + * + * @author Lyubomir Marinov + */ +public class AVReceive2 +{ + /** + * The port which is the target of the transmission i.e. on which the media + * is to be received. + * + * @see #LOCAL_PORT_BASE_ARG_NAME + */ + private int localPortBase; + + /** + * The MediaStream instances initialized by this instance indexed + * by their respective MediaType ordinal. + */ + private MediaStream[] mediaStreams; + + /** + * The InetAddress of the host which is the target of the receipt + * i.e. from which the media is to be received. + * + * @see #REMOTE_HOST_ARG_NAME + */ + private InetAddress remoteAddr; + + /** + * The port which is the target of the receipt i.e. from which the media is + * to be received. + * + * @see #REMOTE_PORT_BASE_ARG_NAME + */ + private int remotePortBase; + + /** + * Initializes a new AVReceive2 instance which is to receive audio + * and video from a specific host and a specific port. + * + * @param localPortBase the port on which the audio and video are to be + * received + * @param remoteHost the name of the host from which the media is + * transmitted + * @param remotePortBase the port from which the media is transmitted + * @throws Exception if any error arises during the parsing of the specified + * localPortBase, remoteHost and remotePortBase + */ + private AVReceive2( + String localPortBase, + String remoteHost, String remotePortBase) + throws Exception + { + this.localPortBase + = (localPortBase == null) + ? -1 + : Integer.valueOf(localPortBase).intValue(); + this.remoteAddr = InetAddress.getByName(remoteHost); + this.remotePortBase = Integer.valueOf(remotePortBase).intValue(); + } + + /** + * Initializes the receipt of audio and video. + * + * @return true if this instance has been successfully initialized + * to receive audio and video + * @throws Exception if anything goes wrong while initializing this instance + * for the receipt of audio and video + */ + private boolean initialize() + throws Exception + { + /* + * Prepare for the start of the transmission i.e. initialize the + * MediaStream instances. + */ + MediaType[] mediaTypes = MediaType.values(); + MediaService mediaService = LibJitsi.getMediaService(); + int localPort = localPortBase; + int remotePort = remotePortBase; + + mediaStreams = new MediaStream[mediaTypes.length]; + for (MediaType mediaType : mediaTypes) + { + /* + * The default MediaDevice (for a specific MediaType) is configured + * (by the user of the application via some sort of UI) into the + * ConfigurationService. If there is no ConfigurationService + * instance known to LibJitsi, the first available MediaDevice of + * the specified MediaType will be chosen by MediaService. + */ + MediaDevice device + = mediaService.getDefaultDevice(mediaType, MediaUseCase.CALL); + MediaStream mediaStream = mediaService.createMediaStream(device); + + // direction + /* + * The AVTransmit2 example sends only and the AVReceive2 receives + * only. In a call, the MediaStream's direction will most commonly + * be set to SENDRECV. + */ + mediaStream.setDirection(MediaDirection.RECVONLY); + + // format + String encoding; + double clockRate; + /* + * The AVTransmit2 and AVReceive2 examples use the H.264 video + * codec. Its RTP transmission has no static RTP payload type number + * assigned. + */ + byte dynamicRTPPayloadType; + + switch (device.getMediaType()) + { + case AUDIO: + encoding = "PCMU"; + clockRate = 8000; + /* PCMU has a static RTP payload type number assigned. */ + dynamicRTPPayloadType = -1; + break; + case VIDEO: + encoding = "H264"; + clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; + /* + * The dymanic RTP payload type numbers are usually negotiated + * in the signaling functionality. + */ + dynamicRTPPayloadType = 99; + break; + default: + encoding = null; + clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; + dynamicRTPPayloadType = -1; + } + + if (encoding != null) + { + MediaFormat format + = mediaService.getFormatFactory().createMediaFormat( + encoding, + clockRate); + + /* + * The MediaFormat instances which do not have a static RTP + * payload type number association must be explicitly assigned + * a dynamic RTP payload type number. + */ + if (dynamicRTPPayloadType != -1) + { + mediaStream.addDynamicRTPPayloadType( + dynamicRTPPayloadType, + format); + } + + mediaStream.setFormat(format); + } + + // connector + StreamConnector connector; + + if (localPortBase == -1) + { + connector = new DefaultStreamConnector(); + } + else + { + int localRTPPort = localPort++; + int localRTCPPort = localPort++; + + connector + = new DefaultStreamConnector( + new DatagramSocket(localRTPPort), + new DatagramSocket(localRTCPPort)); + } + mediaStream.setConnector(connector); + + // target + /* + * The AVTransmit2 and AVReceive2 examples follow the common + * practice that the RTCP port is right after the RTP port. + */ + int remoteRTPPort = remotePort++; + int remoteRTCPPort = remotePort++; + + mediaStream.setTarget( + new MediaStreamTarget( + new InetSocketAddress(remoteAddr, remoteRTPPort), + new InetSocketAddress(remoteAddr, remoteRTCPPort))); + + // name + /* + * The name is completely optional and it is not being used by the + * MediaStream implementation at this time, it is just remembered so + * that it can be retrieved via MediaStream#getName(). It may be + * integrated with the signaling functionality if necessary. + */ + mediaStream.setName(mediaType.toString()); + + mediaStreams[mediaType.ordinal()] = mediaStream; + } + + /* + * Do start the transmission i.e. start the initialized MediaStream + * instances. + */ + for (MediaStream mediaStream : mediaStreams) + if (mediaStream != null) + mediaStream.start(); + + return true; + } + + /** + * Close the MediaStreams. + */ + private void close() + { + if (mediaStreams != null) + { + for (int i = 0; i < mediaStreams.length; i++) + { + MediaStream mediaStream = mediaStreams[i]; + + if (mediaStream != null) + { + try + { + mediaStream.stop(); + } + finally + { + mediaStream.close(); + mediaStreams[i] = null; + } + } + } + + mediaStreams = null; + } + } + + /** + * The name of the command-line argument which specifies the port on which + * the media is to be received. The command-line argument value will be used + * as the port to receive the audio RTP on, the next port after it will be + * used to receive the audio RTCP on. Respectively, the subsequent ports + * ports will be used to transmit the video RTP and RTCP on." + */ + private static final String LOCAL_PORT_BASE_ARG_NAME + = "--local-port-base="; + + /** + * The name of the command-line argument which specifies the name of the + * host from which the media is to be received. + */ + private static final String REMOTE_HOST_ARG_NAME = "--remote-host="; + + /** + * The name of the command-line argument which specifies the port from which + * the media is to be received. The command-line argument value will be + * used as the port to receive the audio RTP from, the next port after it + * will be to receive the audio RTCP from. Respectively, the subsequent + * ports will be used to receive the video RTP and RTCP from." + */ + private static final String REMOTE_PORT_BASE_ARG_NAME + = "--remote-port-base="; + + /** + * The list of command-line arguments accepted as valid by the + * AVReceive2 application along with their human-readable usage + * descriptions. + */ + private static final String[][] ARGS + = { + { + LOCAL_PORT_BASE_ARG_NAME, + "The port on which media is to be received. The specified value" + + " will be used as the port to receive the audio RTP on," + + " the next port after it will be used to receive the" + + " audio RTCP on. Respectively, the subsequent ports will" + + " be used to receive the video RTP and RTCP on." + }, + { + REMOTE_HOST_ARG_NAME, + "The name of the host from which the media is to be received." + }, + { + REMOTE_PORT_BASE_ARG_NAME, + "The port from which media is to be received. The specified" + + " vaue will be used as the port to receive the audio RTP" + + " from, the next port after it will be used to receive" + + " the audio RTCP from. Respectively, the subsequent ports" + + " will be used to receive the video RTP and RTCP from." + } + }; + + public static void main(String[] args) + throws Exception + { + // We need three parameters to do the transmission. For example, + // ant run-example -Drun.example.name=AVReceive2 -Drun.example.arg.line="--local-port-base=10000 --remote-host=129.130.131.132 --remote-port-base=5000" + if (args.length < 3) + { + prUsage(); + } + else + { + Map argMap = AVTransmit2.parseCommandLineArgs(args); + + LibJitsi.start(); + try + { + AVReceive2 avReceive + = new AVReceive2( + argMap.get(LOCAL_PORT_BASE_ARG_NAME), + argMap.get(REMOTE_HOST_ARG_NAME), + argMap.get(REMOTE_PORT_BASE_ARG_NAME)); + + if (avReceive.initialize()) + { + try + { + /* + * Wait for the media to be received and played back. + * AVTransmit2 transmits for 1 minute so AVReceive2 + * waits for 2 minutes to allow AVTransmit2 to start the + * tranmission with a bit of a delay (if necessary). + */ + long then = System.currentTimeMillis(); + long waitingPeriod = 2 * 60000; + + try + { + while ((System.currentTimeMillis() - then) + < waitingPeriod) + Thread.sleep(1000); + } + catch (InterruptedException ie) + { + } + } + finally + { + avReceive.close(); + } + + System.err.println("Exiting AVReceive2"); + } + else + { + System.err.println("Failed to initialize the sessions."); + } + } + finally + { + LibJitsi.stop(); + } + } + } + + /** + * Outputs human-readable description about the usage of the + * AVReceive2 application and the command-line arguments it + * accepts as valid. + */ + private static void prUsage() + { + PrintStream err = System.err; + + err.println("Usage: " + AVReceive2.class.getName() + " "); + err.println("Valid args:"); + for (String[] arg : ARGS) + err.println(" " + arg[0] + " " + arg[1]); + } +} diff --git a/src/org/jitsi/examples/AVTransmit2.java b/src/org/jitsi/examples/AVTransmit2.java index b086cb956..99cc32306 100644 --- a/src/org/jitsi/examples/AVTransmit2.java +++ b/src/org/jitsi/examples/AVTransmit2.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,426 +13,426 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.examples; - -import java.io.*; -import java.net.*; -import java.util.*; - -import org.jitsi.service.libjitsi.*; -import org.jitsi.service.neomedia.*; -import org.jitsi.service.neomedia.device.*; -import org.jitsi.service.neomedia.format.*; - -/** - * Implements an example application in the fashion of JMF's AVTransmit2 example - * which demonstrates the use of the libjitsi library for the purposes - * of transmitting audio and video via RTP means. - * - * @author Lyubomir Marinov - */ -public class AVTransmit2 -{ - /** - * The port which is the source of the transmission i.e. from which the - * media is to be transmitted. - * - * @see #LOCAL_PORT_BASE_ARG_NAME - */ - private int localPortBase; - - /** - * The MediaStream instances initialized by this instance indexed - * by their respective MediaType ordinal. - */ - private MediaStream[] mediaStreams; - - /** - * The InetAddress of the host which is the target of the - * transmission i.e. to which the media is to be transmitted. - * - * @see #REMOTE_HOST_ARG_NAME - */ - private InetAddress remoteAddr; - - /** - * The port which is the target of the transmission i.e. to which the media - * is to be transmitted. - * - * @see #REMOTE_PORT_BASE_ARG_NAME - */ - private int remotePortBase; - - /** - * Initializes a new AVTransmit2 instance which is to transmit - * audio and video to a specific host and a specific port. - * - * @param localPortBase the port which is the source of the transmission - * i.e. from which the media is to be transmitted - * @param remoteHost the name of the host which is the target of the - * transmission i.e. to which the media is to be transmitted - * @param remotePortBase the port which is the target of the transmission - * i.e. to which the media is to be transmitted - * @throws Exception if any error arises during the parsing of the specified - * localPortBase, remoteHost and remotePortBase - */ - private AVTransmit2( - String localPortBase, - String remoteHost, String remotePortBase) - throws Exception - { - this.localPortBase - = (localPortBase == null) - ? -1 - : Integer.valueOf(localPortBase).intValue(); - this.remoteAddr = InetAddress.getByName(remoteHost); - this.remotePortBase = Integer.valueOf(remotePortBase).intValue(); - } - - /** - * Starts the transmission. Returns null if transmission started ok. - * Otherwise it returns a string with the reason why the setup failed. - */ - private String start() - throws Exception - { - /* - * Prepare for the start of the transmission i.e. initialize the - * MediaStream instances. - */ - MediaType[] mediaTypes = MediaType.values(); - MediaService mediaService = LibJitsi.getMediaService(); - int localPort = localPortBase; - int remotePort = remotePortBase; - - mediaStreams = new MediaStream[mediaTypes.length]; - for (MediaType mediaType : mediaTypes) - { - /* - * The default MediaDevice (for a specific MediaType) is configured - * (by the user of the application via some sort of UI) into the - * ConfigurationService. If there is no ConfigurationService - * instance known to LibJitsi, the first available MediaDevice of - * the specified MediaType will be chosen by MediaService. - */ - MediaDevice device - = mediaService.getDefaultDevice(mediaType, MediaUseCase.CALL); - MediaStream mediaStream = mediaService.createMediaStream(device); - - // direction - /* - * The AVTransmit2 example sends only and the AVReceive2 receives - * only. In a call, the MediaStream's direction will most commonly - * be set to SENDRECV. - */ - mediaStream.setDirection(MediaDirection.SENDONLY); - - // format - String encoding; - double clockRate; - /* - * The AVTransmit2 and AVReceive2 examples use the H.264 video - * codec. Its RTP transmission has no static RTP payload type number - * assigned. - */ - byte dynamicRTPPayloadType; - - switch (device.getMediaType()) - { - case AUDIO: - encoding = "PCMU"; - clockRate = 8000; - /* PCMU has a static RTP payload type number assigned. */ - dynamicRTPPayloadType = -1; - break; - case VIDEO: - encoding = "H264"; - clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; - /* - * The dymanic RTP payload type numbers are usually negotiated - * in the signaling functionality. - */ - dynamicRTPPayloadType = 99; - break; - default: - encoding = null; - clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; - dynamicRTPPayloadType = -1; - } - - if (encoding != null) - { - MediaFormat format - = mediaService.getFormatFactory().createMediaFormat( - encoding, - clockRate); - - /* - * The MediaFormat instances which do not have a static RTP - * payload type number association must be explicitly assigned - * a dynamic RTP payload type number. - */ - if (dynamicRTPPayloadType != -1) - { - mediaStream.addDynamicRTPPayloadType( - dynamicRTPPayloadType, - format); - } - - mediaStream.setFormat(format); - } - - // connector - StreamConnector connector; - - if (localPortBase == -1) - { - connector = new DefaultStreamConnector(); - } - else - { - int localRTPPort = localPort++; - int localRTCPPort = localPort++; - - connector - = new DefaultStreamConnector( - new DatagramSocket(localRTPPort), - new DatagramSocket(localRTCPPort)); - } - mediaStream.setConnector(connector); - - // target - /* - * The AVTransmit2 and AVReceive2 examples follow the common - * practice that the RTCP port is right after the RTP port. - */ - int remoteRTPPort = remotePort++; - int remoteRTCPPort = remotePort++; - - mediaStream.setTarget( - new MediaStreamTarget( - new InetSocketAddress(remoteAddr, remoteRTPPort), - new InetSocketAddress(remoteAddr, remoteRTCPPort))); - - // name - /* - * The name is completely optional and it is not being used by the - * MediaStream implementation at this time, it is just remembered so - * that it can be retrieved via MediaStream#getName(). It may be - * integrated with the signaling functionality if necessary. - */ - mediaStream.setName(mediaType.toString()); - - mediaStreams[mediaType.ordinal()] = mediaStream; - } - - /* - * Do start the transmission i.e. start the initialized MediaStream - * instances. - */ - for (MediaStream mediaStream : mediaStreams) - if (mediaStream != null) - mediaStream.start(); - - return null; - } - - /** - * Stops the transmission if already started - */ - private void stop() - { - if (mediaStreams != null) - { - for (int i = 0; i < mediaStreams.length; i++) - { - MediaStream mediaStream = mediaStreams[i]; - - if (mediaStream != null) - { - try - { - mediaStream.stop(); - } - finally - { - mediaStream.close(); - mediaStreams[i] = null; - } - } - } - - mediaStreams = null; - } - } - - /** - * The name of the command-line argument which specifies the port from which - * the media is to be transmitted. The command-line argument value will be - * used as the port to transmit the audio RTP from, the next port after it - * will be to transmit the audio RTCP from. Respectively, the subsequent - * ports will be used to transmit the video RTP and RTCP from." - */ - private static final String LOCAL_PORT_BASE_ARG_NAME - = "--local-port-base="; - - /** - * The name of the command-line argument which specifies the name of the - * host to which the media is to be transmitted. - */ - private static final String REMOTE_HOST_ARG_NAME = "--remote-host="; - - /** - * The name of the command-line argument which specifies the port to which - * the media is to be transmitted. The command-line argument value will be - * used as the port to transmit the audio RTP to, the next port after it - * will be to transmit the audio RTCP to. Respectively, the subsequent ports - * will be used to transmit the video RTP and RTCP to." - */ - private static final String REMOTE_PORT_BASE_ARG_NAME - = "--remote-port-base="; - - /** - * The list of command-line arguments accepted as valid by the - * AVTransmit2 application along with their human-readable usage - * descriptions. - */ - private static final String[][] ARGS - = { - { - LOCAL_PORT_BASE_ARG_NAME, - "The port which is the source of the transmission i.e. from" - + " which the media is to be transmitted. The specified" - + " value will be used as the port to transmit the audio" - + " RTP from, the next port after it will be used to" - + " transmit the audio RTCP from. Respectively, the" - + " subsequent ports will be used to transmit the video RTP" - + " and RTCP from." - }, - { - REMOTE_HOST_ARG_NAME, - "The name of the host which is the target of the transmission" - + " i.e. to which the media is to be transmitted" - }, - { - REMOTE_PORT_BASE_ARG_NAME, - "The port which is the target of the transmission i.e. to which" - + " the media is to be transmitted. The specified value" - + " will be used as the port to transmit the audio RTP to" - + " the next port after it will be used to transmit the" - + " audio RTCP to. Respectively, the subsequent ports will" - + " be used to transmit the video RTP and RTCP to." - } - }; - - public static void main(String[] args) - throws Exception - { - // We need two parameters to do the transmission. For example, - // ant run-example -Drun.example.name=AVTransmit2 -Drun.example.arg.line="--remote-host=127.0.0.1 --remote-port-base=10000" - if (args.length < 2) - { - prUsage(); - } - else - { - Map argMap = parseCommandLineArgs(args); - - LibJitsi.start(); - try - { - // Create a audio transmit object with the specified params. - AVTransmit2 at - = new AVTransmit2( - argMap.get(LOCAL_PORT_BASE_ARG_NAME), - argMap.get(REMOTE_HOST_ARG_NAME), - argMap.get(REMOTE_PORT_BASE_ARG_NAME)); - // Start the transmission - String result = at.start(); - - // result will be non-null if there was an error. The return - // value is a String describing the possible error. Print it. - if (result == null) - { - System.err.println("Start transmission for 60 seconds..."); - - // Transmit for 60 seconds and then close the processor - // This is a safeguard when using a capture data source - // so that the capture device will be properly released - // before quitting. - // The right thing to do would be to have a GUI with a - // "Stop" button that would call stop on AVTransmit2 - try - { - Thread.sleep(60000); - } catch (InterruptedException ie) - { - } - - // Stop the transmission - at.stop(); - - System.err.println("...transmission ended."); - } - else - { - System.err.println("Error : " + result); - } - } - finally - { - LibJitsi.stop(); - } - } - } - - /** - * Parses the arguments specified to the AVTransmit2 application on - * the command line. - * - * @param args the arguments specified to the AVTransmit2 - * application on the command line - * @return a Map containing the arguments specified to the - * AVTransmit2 application on the command line in the form of - * name-value associations - */ - static Map parseCommandLineArgs(String[] args) - { - Map argMap = new HashMap(); - - for (String arg : args) - { - int keyEndIndex = arg.indexOf('='); - String key; - String value; - - if (keyEndIndex == -1) - { - key = arg; - value = null; - } - else - { - key = arg.substring(0, keyEndIndex + 1); - value = arg.substring(keyEndIndex + 1); - } - argMap.put(key, value); - } - return argMap; - } - - /** - * Outputs human-readable description about the usage of the - * AVTransmit2 application and the command-line arguments it - * accepts as valid. - */ - private static void prUsage() - { - PrintStream err = System.err; - - err.println("Usage: " + AVTransmit2.class.getName() + " "); - err.println("Valid args:"); - for (String[] arg : ARGS) - err.println(" " + arg[0] + " " + arg[1]); - } -} +package org.jitsi.examples; + +import java.io.*; +import java.net.*; +import java.util.*; + +import org.jitsi.service.libjitsi.*; +import org.jitsi.service.neomedia.*; +import org.jitsi.service.neomedia.device.*; +import org.jitsi.service.neomedia.format.*; + +/** + * Implements an example application in the fashion of JMF's AVTransmit2 example + * which demonstrates the use of the libjitsi library for the purposes + * of transmitting audio and video via RTP means. + * + * @author Lyubomir Marinov + */ +public class AVTransmit2 +{ + /** + * The port which is the source of the transmission i.e. from which the + * media is to be transmitted. + * + * @see #LOCAL_PORT_BASE_ARG_NAME + */ + private int localPortBase; + + /** + * The MediaStream instances initialized by this instance indexed + * by their respective MediaType ordinal. + */ + private MediaStream[] mediaStreams; + + /** + * The InetAddress of the host which is the target of the + * transmission i.e. to which the media is to be transmitted. + * + * @see #REMOTE_HOST_ARG_NAME + */ + private InetAddress remoteAddr; + + /** + * The port which is the target of the transmission i.e. to which the media + * is to be transmitted. + * + * @see #REMOTE_PORT_BASE_ARG_NAME + */ + private int remotePortBase; + + /** + * Initializes a new AVTransmit2 instance which is to transmit + * audio and video to a specific host and a specific port. + * + * @param localPortBase the port which is the source of the transmission + * i.e. from which the media is to be transmitted + * @param remoteHost the name of the host which is the target of the + * transmission i.e. to which the media is to be transmitted + * @param remotePortBase the port which is the target of the transmission + * i.e. to which the media is to be transmitted + * @throws Exception if any error arises during the parsing of the specified + * localPortBase, remoteHost and remotePortBase + */ + private AVTransmit2( + String localPortBase, + String remoteHost, String remotePortBase) + throws Exception + { + this.localPortBase + = (localPortBase == null) + ? -1 + : Integer.valueOf(localPortBase).intValue(); + this.remoteAddr = InetAddress.getByName(remoteHost); + this.remotePortBase = Integer.valueOf(remotePortBase).intValue(); + } + + /** + * Starts the transmission. Returns null if transmission started ok. + * Otherwise it returns a string with the reason why the setup failed. + */ + private String start() + throws Exception + { + /* + * Prepare for the start of the transmission i.e. initialize the + * MediaStream instances. + */ + MediaType[] mediaTypes = MediaType.values(); + MediaService mediaService = LibJitsi.getMediaService(); + int localPort = localPortBase; + int remotePort = remotePortBase; + + mediaStreams = new MediaStream[mediaTypes.length]; + for (MediaType mediaType : mediaTypes) + { + /* + * The default MediaDevice (for a specific MediaType) is configured + * (by the user of the application via some sort of UI) into the + * ConfigurationService. If there is no ConfigurationService + * instance known to LibJitsi, the first available MediaDevice of + * the specified MediaType will be chosen by MediaService. + */ + MediaDevice device + = mediaService.getDefaultDevice(mediaType, MediaUseCase.CALL); + MediaStream mediaStream = mediaService.createMediaStream(device); + + // direction + /* + * The AVTransmit2 example sends only and the AVReceive2 receives + * only. In a call, the MediaStream's direction will most commonly + * be set to SENDRECV. + */ + mediaStream.setDirection(MediaDirection.SENDONLY); + + // format + String encoding; + double clockRate; + /* + * The AVTransmit2 and AVReceive2 examples use the H.264 video + * codec. Its RTP transmission has no static RTP payload type number + * assigned. + */ + byte dynamicRTPPayloadType; + + switch (device.getMediaType()) + { + case AUDIO: + encoding = "PCMU"; + clockRate = 8000; + /* PCMU has a static RTP payload type number assigned. */ + dynamicRTPPayloadType = -1; + break; + case VIDEO: + encoding = "H264"; + clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; + /* + * The dymanic RTP payload type numbers are usually negotiated + * in the signaling functionality. + */ + dynamicRTPPayloadType = 99; + break; + default: + encoding = null; + clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED; + dynamicRTPPayloadType = -1; + } + + if (encoding != null) + { + MediaFormat format + = mediaService.getFormatFactory().createMediaFormat( + encoding, + clockRate); + + /* + * The MediaFormat instances which do not have a static RTP + * payload type number association must be explicitly assigned + * a dynamic RTP payload type number. + */ + if (dynamicRTPPayloadType != -1) + { + mediaStream.addDynamicRTPPayloadType( + dynamicRTPPayloadType, + format); + } + + mediaStream.setFormat(format); + } + + // connector + StreamConnector connector; + + if (localPortBase == -1) + { + connector = new DefaultStreamConnector(); + } + else + { + int localRTPPort = localPort++; + int localRTCPPort = localPort++; + + connector + = new DefaultStreamConnector( + new DatagramSocket(localRTPPort), + new DatagramSocket(localRTCPPort)); + } + mediaStream.setConnector(connector); + + // target + /* + * The AVTransmit2 and AVReceive2 examples follow the common + * practice that the RTCP port is right after the RTP port. + */ + int remoteRTPPort = remotePort++; + int remoteRTCPPort = remotePort++; + + mediaStream.setTarget( + new MediaStreamTarget( + new InetSocketAddress(remoteAddr, remoteRTPPort), + new InetSocketAddress(remoteAddr, remoteRTCPPort))); + + // name + /* + * The name is completely optional and it is not being used by the + * MediaStream implementation at this time, it is just remembered so + * that it can be retrieved via MediaStream#getName(). It may be + * integrated with the signaling functionality if necessary. + */ + mediaStream.setName(mediaType.toString()); + + mediaStreams[mediaType.ordinal()] = mediaStream; + } + + /* + * Do start the transmission i.e. start the initialized MediaStream + * instances. + */ + for (MediaStream mediaStream : mediaStreams) + if (mediaStream != null) + mediaStream.start(); + + return null; + } + + /** + * Stops the transmission if already started + */ + private void stop() + { + if (mediaStreams != null) + { + for (int i = 0; i < mediaStreams.length; i++) + { + MediaStream mediaStream = mediaStreams[i]; + + if (mediaStream != null) + { + try + { + mediaStream.stop(); + } + finally + { + mediaStream.close(); + mediaStreams[i] = null; + } + } + } + + mediaStreams = null; + } + } + + /** + * The name of the command-line argument which specifies the port from which + * the media is to be transmitted. The command-line argument value will be + * used as the port to transmit the audio RTP from, the next port after it + * will be to transmit the audio RTCP from. Respectively, the subsequent + * ports will be used to transmit the video RTP and RTCP from." + */ + private static final String LOCAL_PORT_BASE_ARG_NAME + = "--local-port-base="; + + /** + * The name of the command-line argument which specifies the name of the + * host to which the media is to be transmitted. + */ + private static final String REMOTE_HOST_ARG_NAME = "--remote-host="; + + /** + * The name of the command-line argument which specifies the port to which + * the media is to be transmitted. The command-line argument value will be + * used as the port to transmit the audio RTP to, the next port after it + * will be to transmit the audio RTCP to. Respectively, the subsequent ports + * will be used to transmit the video RTP and RTCP to." + */ + private static final String REMOTE_PORT_BASE_ARG_NAME + = "--remote-port-base="; + + /** + * The list of command-line arguments accepted as valid by the + * AVTransmit2 application along with their human-readable usage + * descriptions. + */ + private static final String[][] ARGS + = { + { + LOCAL_PORT_BASE_ARG_NAME, + "The port which is the source of the transmission i.e. from" + + " which the media is to be transmitted. The specified" + + " value will be used as the port to transmit the audio" + + " RTP from, the next port after it will be used to" + + " transmit the audio RTCP from. Respectively, the" + + " subsequent ports will be used to transmit the video RTP" + + " and RTCP from." + }, + { + REMOTE_HOST_ARG_NAME, + "The name of the host which is the target of the transmission" + + " i.e. to which the media is to be transmitted" + }, + { + REMOTE_PORT_BASE_ARG_NAME, + "The port which is the target of the transmission i.e. to which" + + " the media is to be transmitted. The specified value" + + " will be used as the port to transmit the audio RTP to" + + " the next port after it will be used to transmit the" + + " audio RTCP to. Respectively, the subsequent ports will" + + " be used to transmit the video RTP and RTCP to." + } + }; + + public static void main(String[] args) + throws Exception + { + // We need two parameters to do the transmission. For example, + // ant run-example -Drun.example.name=AVTransmit2 -Drun.example.arg.line="--remote-host=127.0.0.1 --remote-port-base=10000" + if (args.length < 2) + { + prUsage(); + } + else + { + Map argMap = parseCommandLineArgs(args); + + LibJitsi.start(); + try + { + // Create a audio transmit object with the specified params. + AVTransmit2 at + = new AVTransmit2( + argMap.get(LOCAL_PORT_BASE_ARG_NAME), + argMap.get(REMOTE_HOST_ARG_NAME), + argMap.get(REMOTE_PORT_BASE_ARG_NAME)); + // Start the transmission + String result = at.start(); + + // result will be non-null if there was an error. The return + // value is a String describing the possible error. Print it. + if (result == null) + { + System.err.println("Start transmission for 60 seconds..."); + + // Transmit for 60 seconds and then close the processor + // This is a safeguard when using a capture data source + // so that the capture device will be properly released + // before quitting. + // The right thing to do would be to have a GUI with a + // "Stop" button that would call stop on AVTransmit2 + try + { + Thread.sleep(60000); + } catch (InterruptedException ie) + { + } + + // Stop the transmission + at.stop(); + + System.err.println("...transmission ended."); + } + else + { + System.err.println("Error : " + result); + } + } + finally + { + LibJitsi.stop(); + } + } + } + + /** + * Parses the arguments specified to the AVTransmit2 application on + * the command line. + * + * @param args the arguments specified to the AVTransmit2 + * application on the command line + * @return a Map containing the arguments specified to the + * AVTransmit2 application on the command line in the form of + * name-value associations + */ + static Map parseCommandLineArgs(String[] args) + { + Map argMap = new HashMap(); + + for (String arg : args) + { + int keyEndIndex = arg.indexOf('='); + String key; + String value; + + if (keyEndIndex == -1) + { + key = arg; + value = null; + } + else + { + key = arg.substring(0, keyEndIndex + 1); + value = arg.substring(keyEndIndex + 1); + } + argMap.put(key, value); + } + return argMap; + } + + /** + * Outputs human-readable description about the usage of the + * AVTransmit2 application and the command-line arguments it + * accepts as valid. + */ + private static void prUsage() + { + PrintStream err = System.err; + + err.println("Usage: " + AVTransmit2.class.getName() + " "); + err.println("Valid args:"); + for (String[] arg : ARGS) + err.println(" " + arg[0] + " " + arg[1]); + } +} diff --git a/src/org/jitsi/impl/configuration/ConfigurationStore.java b/src/org/jitsi/impl/configuration/ConfigurationStore.java index 73b413736..5ed62d0e2 100644 --- a/src/org/jitsi/impl/configuration/ConfigurationStore.java +++ b/src/org/jitsi/impl/configuration/ConfigurationStore.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,119 +13,119 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.configuration; - -import java.io.*; - -import org.jitsi.util.xml.*; - -/** - * Abstracts the runtime storage, the serialization and deserialization of the - * configuration properties and their associated values of - * ConfigurationServiceImpl and the format of the configuration file. - * Thus ConfigurationServiceImpl can operate regardless of these - * specifics and takes care of asking the VetoableChangeListeners, - * converting the property values to the requested types and notifying the - * PropertyChangeListeners. - * - * @author Lyubomir Marinov - */ -public interface ConfigurationStore -{ - - /** - * Gets the value in this ConfigurationStore of a property with a - * specific name. - * - * @param name the name of the property to get the value of - * @return the value in this ConfigurationStore of the property - * with the specified name; null if the property with the specified - * name does not have an association with a value in this - * ConfigurationStore - */ - public Object getProperty(String name); - - /** - * Gets the names of the properties which have values associated in this - * ConfigurationStore. - * - * @return an array of Strings which specify the names of the - * properties that have values associated in this - * ConfigurationStore; an empty array if this instance contains no - * property values - */ - public String[] getPropertyNames(); - - /** - * Determines whether a specific name stands for a system property. - * - * @param name the name of a property which is to be determined whether it - * is a system property - * @return true if the specified name stands for a system property; - * false, otherwise - */ - public boolean isSystemProperty(String name); - - /** - * Removes all property name-value associations currently present in this - * ConfigurationStore and deserializes new property name-value - * associations from a specific File which presumably is in the - * format represented by this instance. - * - * @param file the File to be read and to deserialize new property - * name-value associations from into this instance - * @throws IOException if there is an input error while reading from the - * specified file - * @throws XMLException if parsing the contents of the specified - * file fails - */ - public void reloadConfiguration(File file) - throws IOException, - XMLException; - - /** - * Removes the value association in this ConfigurationStore of the - * property with a specific name. If the property with the specified name is - * not associated with a value in this ConfigurationStore, does - * nothing. - * - * @param name the name of the property which is to have its value - * association in this ConfigurationStore removed - */ - public void removeProperty(String name); - - /** - * Sets the value of a non-system property with a specific name to a - * specific value in this ConfigurationStore. - * - * @param name the name of the non-system property to be set to the - * specified value in this ConfigurationStore - * @param value the value to be assigned to the non-system property with the - * specified name in this ConfigurationStore - */ - public void setNonSystemProperty(String name, Object value); - - /** - * Sets a property with a specific name to be considered a system property - * by the ConfigurationStore. - * - * @param name the name of the property to be set as a system property in - * this ConfigurationStore - */ - public void setSystemProperty(String name); - - /** - * Stores/serializes the property name-value associations currently present - * in this ConfigurationStore into a specific OutputStream - * in the format represented by this instance. - * - * @param out the OutputStream to receive the serialized form of - * the property name-value associations currently present in this - * ConfigurationStore - * @throws IOException if there is an output error while storing the - * properties managed by this ConfigurationStore into the specified - * file - */ - public void storeConfiguration(OutputStream out) - throws IOException; -} +package org.jitsi.impl.configuration; + +import java.io.*; + +import org.jitsi.util.xml.*; + +/** + * Abstracts the runtime storage, the serialization and deserialization of the + * configuration properties and their associated values of + * ConfigurationServiceImpl and the format of the configuration file. + * Thus ConfigurationServiceImpl can operate regardless of these + * specifics and takes care of asking the VetoableChangeListeners, + * converting the property values to the requested types and notifying the + * PropertyChangeListeners. + * + * @author Lyubomir Marinov + */ +public interface ConfigurationStore +{ + + /** + * Gets the value in this ConfigurationStore of a property with a + * specific name. + * + * @param name the name of the property to get the value of + * @return the value in this ConfigurationStore of the property + * with the specified name; null if the property with the specified + * name does not have an association with a value in this + * ConfigurationStore + */ + public Object getProperty(String name); + + /** + * Gets the names of the properties which have values associated in this + * ConfigurationStore. + * + * @return an array of Strings which specify the names of the + * properties that have values associated in this + * ConfigurationStore; an empty array if this instance contains no + * property values + */ + public String[] getPropertyNames(); + + /** + * Determines whether a specific name stands for a system property. + * + * @param name the name of a property which is to be determined whether it + * is a system property + * @return true if the specified name stands for a system property; + * false, otherwise + */ + public boolean isSystemProperty(String name); + + /** + * Removes all property name-value associations currently present in this + * ConfigurationStore and deserializes new property name-value + * associations from a specific File which presumably is in the + * format represented by this instance. + * + * @param file the File to be read and to deserialize new property + * name-value associations from into this instance + * @throws IOException if there is an input error while reading from the + * specified file + * @throws XMLException if parsing the contents of the specified + * file fails + */ + public void reloadConfiguration(File file) + throws IOException, + XMLException; + + /** + * Removes the value association in this ConfigurationStore of the + * property with a specific name. If the property with the specified name is + * not associated with a value in this ConfigurationStore, does + * nothing. + * + * @param name the name of the property which is to have its value + * association in this ConfigurationStore removed + */ + public void removeProperty(String name); + + /** + * Sets the value of a non-system property with a specific name to a + * specific value in this ConfigurationStore. + * + * @param name the name of the non-system property to be set to the + * specified value in this ConfigurationStore + * @param value the value to be assigned to the non-system property with the + * specified name in this ConfigurationStore + */ + public void setNonSystemProperty(String name, Object value); + + /** + * Sets a property with a specific name to be considered a system property + * by the ConfigurationStore. + * + * @param name the name of the property to be set as a system property in + * this ConfigurationStore + */ + public void setSystemProperty(String name); + + /** + * Stores/serializes the property name-value associations currently present + * in this ConfigurationStore into a specific OutputStream + * in the format represented by this instance. + * + * @param out the OutputStream to receive the serialized form of + * the property name-value associations currently present in this + * ConfigurationStore + * @throws IOException if there is an output error while storing the + * properties managed by this ConfigurationStore into the specified + * file + */ + public void storeConfiguration(OutputStream out) + throws IOException; +} diff --git a/src/org/jitsi/impl/configuration/DatabaseConfigurationStore.java b/src/org/jitsi/impl/configuration/DatabaseConfigurationStore.java index e49c2b4a3..de4b71e03 100644 --- a/src/org/jitsi/impl/configuration/DatabaseConfigurationStore.java +++ b/src/org/jitsi/impl/configuration/DatabaseConfigurationStore.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,105 +13,105 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.configuration; - -import java.io.*; -import java.util.*; - -import org.jitsi.util.xml.*; - -/** - * - * @author Lyubomir Marinov - */ -@SuppressWarnings("rawtypes") -public abstract class DatabaseConfigurationStore - extends HashtableConfigurationStore -{ - /** - * Initializes a new DatabaseConfigurationStore instance. - */ - protected DatabaseConfigurationStore() - { - this(new Hashtable()); - } - - /** - * Initializes a new DatabaseConfigurationStore instance with a - * specific runtime Hashtable storage. - * - * @param properties the Hashtable which is to become the runtime - * storage of the new instance - */ - protected DatabaseConfigurationStore(Hashtable properties) - { - super(properties); - } - - /** - * Removes all property name-value associations currently present in this - * ConfigurationStore instance and deserializes new property - * name-value associations from its underlying database (storage). - * - * @throws IOException if there is an input error while reading from the - * underlying database (storage) - */ - protected abstract void reloadConfiguration() - throws IOException; - - /** - * Removes all property name-value associations currently present in this - * ConfigurationStore and deserializes new property name-value - * associations from a specific File which presumably is in the - * format represented by this instance. - * - * @param file the File to be read and to deserialize new property - * name-value associations from into this instance - * @throws IOException if there is an input error while reading from the - * specified file - * @throws XMLException if parsing the contents of the specified - * file fails - * @see ConfigurationStore#reloadConfiguration(File) - */ - public void reloadConfiguration(File file) - throws IOException, - XMLException - { - properties.clear(); - - reloadConfiguration(); - } - - /** - * Stores/serializes the property name-value associations currently present - * in this ConfigurationStore instance into its underlying database - * (storage). - * - * @throws IOException if there is an output error while storing the - * properties managed by this ConfigurationStore instance into its - * underlying database (storage) - */ - protected void storeConfiguration() - throws IOException - { - } - - /** - * Stores/serializes the property name-value associations currently present - * in this ConfigurationStore into a specific OutputStream - * in the format represented by this instance. - * - * @param out the OutputStream to receive the serialized form of - * the property name-value associations currently present in this - * ConfigurationStore - * @throws IOException if there is an output error while storing the - * properties managed by this ConfigurationStore into the specified - * file - * @see ConfigurationStore#storeConfiguration(OutputStream) - */ - public void storeConfiguration(OutputStream out) - throws IOException - { - storeConfiguration(); - } -} +package org.jitsi.impl.configuration; + +import java.io.*; +import java.util.*; + +import org.jitsi.util.xml.*; + +/** + * + * @author Lyubomir Marinov + */ +@SuppressWarnings("rawtypes") +public abstract class DatabaseConfigurationStore + extends HashtableConfigurationStore +{ + /** + * Initializes a new DatabaseConfigurationStore instance. + */ + protected DatabaseConfigurationStore() + { + this(new Hashtable()); + } + + /** + * Initializes a new DatabaseConfigurationStore instance with a + * specific runtime Hashtable storage. + * + * @param properties the Hashtable which is to become the runtime + * storage of the new instance + */ + protected DatabaseConfigurationStore(Hashtable properties) + { + super(properties); + } + + /** + * Removes all property name-value associations currently present in this + * ConfigurationStore instance and deserializes new property + * name-value associations from its underlying database (storage). + * + * @throws IOException if there is an input error while reading from the + * underlying database (storage) + */ + protected abstract void reloadConfiguration() + throws IOException; + + /** + * Removes all property name-value associations currently present in this + * ConfigurationStore and deserializes new property name-value + * associations from a specific File which presumably is in the + * format represented by this instance. + * + * @param file the File to be read and to deserialize new property + * name-value associations from into this instance + * @throws IOException if there is an input error while reading from the + * specified file + * @throws XMLException if parsing the contents of the specified + * file fails + * @see ConfigurationStore#reloadConfiguration(File) + */ + public void reloadConfiguration(File file) + throws IOException, + XMLException + { + properties.clear(); + + reloadConfiguration(); + } + + /** + * Stores/serializes the property name-value associations currently present + * in this ConfigurationStore instance into its underlying database + * (storage). + * + * @throws IOException if there is an output error while storing the + * properties managed by this ConfigurationStore instance into its + * underlying database (storage) + */ + protected void storeConfiguration() + throws IOException + { + } + + /** + * Stores/serializes the property name-value associations currently present + * in this ConfigurationStore into a specific OutputStream + * in the format represented by this instance. + * + * @param out the OutputStream to receive the serialized form of + * the property name-value associations currently present in this + * ConfigurationStore + * @throws IOException if there is an output error while storing the + * properties managed by this ConfigurationStore into the specified + * file + * @see ConfigurationStore#storeConfiguration(OutputStream) + */ + public void storeConfiguration(OutputStream out) + throws IOException + { + storeConfiguration(); + } +} diff --git a/src/org/jitsi/impl/configuration/HashtableConfigurationStore.java b/src/org/jitsi/impl/configuration/HashtableConfigurationStore.java index 5e198b235..332b08b81 100644 --- a/src/org/jitsi/impl/configuration/HashtableConfigurationStore.java +++ b/src/org/jitsi/impl/configuration/HashtableConfigurationStore.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,143 +13,143 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.configuration; - -import java.util.*; - -/** - * A simple in-memory {@link ConfigurationStore} implementation that only uses - * a hashtable. - * - * @param the hashtable extension that descendants are going to use. - * @author Lyubomir Marinov - */ -@SuppressWarnings("rawtypes") -public abstract class HashtableConfigurationStore - implements ConfigurationStore -{ - - /** - * The Hashtable instance which stores the property name-value - * associations of this ConfigurationStore instance and which is - * effectively adapted by this instance to ConfigurationStore. - */ - protected final T properties; - - /** - * Creates an instance of this class using properties as the set - * of properties where it will be storing an retrieving properties. - * - * @param properties the map that this store will use for storing and - * retrieving properties. - */ - protected HashtableConfigurationStore(T properties) - { - this.properties = properties; - } - - /** - * Implements {@link ConfigurationStore#getProperty(String)}. If this - * ConfigurationStore contains a value associated with the - * specified property name, returns it. Otherwise, searches for a system - * property with the specified name and returns its value. - * - * @param name the name of the property to get the value of - * @return the value in this ConfigurationStore of the property - * with the specified name; null if the property with the specified - * name does not have an association with a value in this - * ConfigurationStore - * @see ConfigurationStore#getProperty(String) - */ - public Object getProperty(String name) - { - Object value = properties.get(name); - - return (value != null) ? value : System.getProperty(name); - } - - /** - * Implements {@link ConfigurationStore#getPropertyNames()}. Gets the names - * of the properties which have values associated in this - * ConfigurationStore. - * - * @return an array of Strings which specify the names of the - * properties that have values associated in this - * ConfigurationStore; an empty array if this instance contains no - * property values - * @see ConfigurationStore#getPropertyNames() - */ - public String[] getPropertyNames() - { - synchronized (properties) - { - Set propertyNames = properties.keySet(); - - return propertyNames.toArray(new String[propertyNames.size()]); - } - } - - /** - * Implements {@link ConfigurationStore#isSystemProperty(String)}. Considers - * a property to be system if the system properties contain a value - * associated with its name. - * - * @param name the name of a property which is to be determined whether it - * is a system property - * @return true if the specified name stands for a system property; - * false, otherwise - * @see ConfigurationStore#isSystemProperty(String) - */ - public boolean isSystemProperty(String name) - { - return (System.getProperty(name) != null); - } - - /** - * Implements {@link ConfigurationStore#removeProperty(String)}. Removes the - * value association in this ConfigurationStore of the property - * with a specific name. If the property with the specified name is not - * associated with a value in this ConfigurationStore, does - * nothing. - * - * @param name the name of the property which is to have its value - * association in this ConfigurationStore removed - * @see ConfigurationStore#removeProperty(String) - */ - public void removeProperty(String name) - { - properties.remove(name); - } - - /** - * Implements - * {@link ConfigurationStore#setNonSystemProperty(String, Object)}. - * - * @param name the name of the non-system property to be set to the - * specified value in this ConfigurationStore - * @param value the value to be assigned to the non-system property with the - * specified name in this ConfigurationStore - * @see ConfigurationStore#setNonSystemProperty(String, Object) - */ - @SuppressWarnings("unchecked") - public void setNonSystemProperty(String name, Object value) - { - properties.put(name, value); - } - - /** - * Implements {@link ConfigurationStore#setSystemProperty(String)}. Since - * system properties are managed through the System class, setting - * a property as system in this ConfigurationStore effectively - * removes any existing value associated with the specified property name - * from this instance. - * - * @param name the name of the property to be set as a system property in - * this ConfigurationStore - * @see ConfigurationStore#setSystemProperty(String) - */ - public void setSystemProperty(String name) - { - removeProperty(name); - } -} +package org.jitsi.impl.configuration; + +import java.util.*; + +/** + * A simple in-memory {@link ConfigurationStore} implementation that only uses + * a hashtable. + * + * @param the hashtable extension that descendants are going to use. + * @author Lyubomir Marinov + */ +@SuppressWarnings("rawtypes") +public abstract class HashtableConfigurationStore + implements ConfigurationStore +{ + + /** + * The Hashtable instance which stores the property name-value + * associations of this ConfigurationStore instance and which is + * effectively adapted by this instance to ConfigurationStore. + */ + protected final T properties; + + /** + * Creates an instance of this class using properties as the set + * of properties where it will be storing an retrieving properties. + * + * @param properties the map that this store will use for storing and + * retrieving properties. + */ + protected HashtableConfigurationStore(T properties) + { + this.properties = properties; + } + + /** + * Implements {@link ConfigurationStore#getProperty(String)}. If this + * ConfigurationStore contains a value associated with the + * specified property name, returns it. Otherwise, searches for a system + * property with the specified name and returns its value. + * + * @param name the name of the property to get the value of + * @return the value in this ConfigurationStore of the property + * with the specified name; null if the property with the specified + * name does not have an association with a value in this + * ConfigurationStore + * @see ConfigurationStore#getProperty(String) + */ + public Object getProperty(String name) + { + Object value = properties.get(name); + + return (value != null) ? value : System.getProperty(name); + } + + /** + * Implements {@link ConfigurationStore#getPropertyNames()}. Gets the names + * of the properties which have values associated in this + * ConfigurationStore. + * + * @return an array of Strings which specify the names of the + * properties that have values associated in this + * ConfigurationStore; an empty array if this instance contains no + * property values + * @see ConfigurationStore#getPropertyNames() + */ + public String[] getPropertyNames() + { + synchronized (properties) + { + Set propertyNames = properties.keySet(); + + return propertyNames.toArray(new String[propertyNames.size()]); + } + } + + /** + * Implements {@link ConfigurationStore#isSystemProperty(String)}. Considers + * a property to be system if the system properties contain a value + * associated with its name. + * + * @param name the name of a property which is to be determined whether it + * is a system property + * @return true if the specified name stands for a system property; + * false, otherwise + * @see ConfigurationStore#isSystemProperty(String) + */ + public boolean isSystemProperty(String name) + { + return (System.getProperty(name) != null); + } + + /** + * Implements {@link ConfigurationStore#removeProperty(String)}. Removes the + * value association in this ConfigurationStore of the property + * with a specific name. If the property with the specified name is not + * associated with a value in this ConfigurationStore, does + * nothing. + * + * @param name the name of the property which is to have its value + * association in this ConfigurationStore removed + * @see ConfigurationStore#removeProperty(String) + */ + public void removeProperty(String name) + { + properties.remove(name); + } + + /** + * Implements + * {@link ConfigurationStore#setNonSystemProperty(String, Object)}. + * + * @param name the name of the non-system property to be set to the + * specified value in this ConfigurationStore + * @param value the value to be assigned to the non-system property with the + * specified name in this ConfigurationStore + * @see ConfigurationStore#setNonSystemProperty(String, Object) + */ + @SuppressWarnings("unchecked") + public void setNonSystemProperty(String name, Object value) + { + properties.put(name, value); + } + + /** + * Implements {@link ConfigurationStore#setSystemProperty(String)}. Since + * system properties are managed through the System class, setting + * a property as system in this ConfigurationStore effectively + * removes any existing value associated with the specified property name + * from this instance. + * + * @param name the name of the property to be set as a system property in + * this ConfigurationStore + * @see ConfigurationStore#setSystemProperty(String) + */ + public void setSystemProperty(String name) + { + removeProperty(name); + } +} diff --git a/src/org/jitsi/impl/configuration/PropertyConfigurationStore.java b/src/org/jitsi/impl/configuration/PropertyConfigurationStore.java index d066ca085..f349232aa 100644 --- a/src/org/jitsi/impl/configuration/PropertyConfigurationStore.java +++ b/src/org/jitsi/impl/configuration/PropertyConfigurationStore.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,99 +13,99 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.configuration; - -import java.io.*; -import java.util.*; - -/** - * Implements a ConfigurationStore which stores property name-value - * associations in a Properties instance and supports its - * serialization format for the configuration file of - * ConfigurationServiceImpl. Because of the Properties - * backend which can associate names only String values, instances - * of PropertyConfigurationStore convert property values to - * String using Object#toString(). - * - * @author Lyubomir Marinov - */ -public class PropertyConfigurationStore - extends HashtableConfigurationStore -{ - /** - * Initializes a new PropertyConfigurationStore instance. - */ - public PropertyConfigurationStore() - { - super(new SortedProperties()); - } - - /** - * Implements {@link ConfigurationStore#reloadConfiguration(File)}. Removes - * all property name-value associations currently present in this - * ConfigurationStore and deserializes new property name-value - * associations from a specific File which presumably is in the - * format represented by this instance. - * - * @param file the File to be read and to deserialize new property - * name-value associations from into this instance - * @throws IOException if there is an input error while reading from the - * specified file - * @see ConfigurationStore#reloadConfiguration(File) - */ - public void reloadConfiguration(File file) - throws IOException - { - properties.clear(); - - InputStream in = new BufferedInputStream(new FileInputStream(file)); - try - { - properties.load(in); - } - finally - { - in.close(); - } - } - - /** - * Overrides - * {@link HashtableConfigurationStore#setNonSystemProperty(String, Object)}. - * As the backend of this instance is a Properties instance, it can - * only store String values and the specified value to be - * associated with the specified property name is converted to a - * String. - * - * @param name the name of the non-system property to be set to the - * specified value in this ConfigurationStore - * @param value the value to be assigned to the non-system property with the - * specified name in this ConfigurationStore - * @see ConfigurationStore#setNonSystemProperty(String, Object) - */ - @Override - public void setNonSystemProperty(String name, Object value) - { - properties.setProperty(name, value.toString()); - } - - /** - * Implements {@link ConfigurationStore#storeConfiguration(OutputStream)}. - * Stores/serializes the property name-value associations currently present - * in this ConfigurationStore into a specific OutputStream - * in the format represented by this instance. - * - * @param out the OutputStream to receive the serialized form of - * the property name-value associations currently present in this - * ConfigurationStore - * @throws IOException if there is an output error while storing the - * properties managed by this ConfigurationStore into the specified - * file - * @see ConfigurationStore#storeConfiguration(OutputStream) - */ - public void storeConfiguration(OutputStream out) - throws IOException - { - properties.store(out, null); - } -} +package org.jitsi.impl.configuration; + +import java.io.*; +import java.util.*; + +/** + * Implements a ConfigurationStore which stores property name-value + * associations in a Properties instance and supports its + * serialization format for the configuration file of + * ConfigurationServiceImpl. Because of the Properties + * backend which can associate names only String values, instances + * of PropertyConfigurationStore convert property values to + * String using Object#toString(). + * + * @author Lyubomir Marinov + */ +public class PropertyConfigurationStore + extends HashtableConfigurationStore +{ + /** + * Initializes a new PropertyConfigurationStore instance. + */ + public PropertyConfigurationStore() + { + super(new SortedProperties()); + } + + /** + * Implements {@link ConfigurationStore#reloadConfiguration(File)}. Removes + * all property name-value associations currently present in this + * ConfigurationStore and deserializes new property name-value + * associations from a specific File which presumably is in the + * format represented by this instance. + * + * @param file the File to be read and to deserialize new property + * name-value associations from into this instance + * @throws IOException if there is an input error while reading from the + * specified file + * @see ConfigurationStore#reloadConfiguration(File) + */ + public void reloadConfiguration(File file) + throws IOException + { + properties.clear(); + + InputStream in = new BufferedInputStream(new FileInputStream(file)); + try + { + properties.load(in); + } + finally + { + in.close(); + } + } + + /** + * Overrides + * {@link HashtableConfigurationStore#setNonSystemProperty(String, Object)}. + * As the backend of this instance is a Properties instance, it can + * only store String values and the specified value to be + * associated with the specified property name is converted to a + * String. + * + * @param name the name of the non-system property to be set to the + * specified value in this ConfigurationStore + * @param value the value to be assigned to the non-system property with the + * specified name in this ConfigurationStore + * @see ConfigurationStore#setNonSystemProperty(String, Object) + */ + @Override + public void setNonSystemProperty(String name, Object value) + { + properties.setProperty(name, value.toString()); + } + + /** + * Implements {@link ConfigurationStore#storeConfiguration(OutputStream)}. + * Stores/serializes the property name-value associations currently present + * in this ConfigurationStore into a specific OutputStream + * in the format represented by this instance. + * + * @param out the OutputStream to receive the serialized form of + * the property name-value associations currently present in this + * ConfigurationStore + * @throws IOException if there is an output error while storing the + * properties managed by this ConfigurationStore into the specified + * file + * @see ConfigurationStore#storeConfiguration(OutputStream) + */ + public void storeConfiguration(OutputStream out) + throws IOException + { + properties.store(out, null); + } +} diff --git a/src/org/jitsi/impl/configuration/SortedProperties.java b/src/org/jitsi/impl/configuration/SortedProperties.java index 203ef8379..1a5d4270c 100644 --- a/src/org/jitsi/impl/configuration/SortedProperties.java +++ b/src/org/jitsi/impl/configuration/SortedProperties.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,77 +13,77 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.configuration; - -import java.util.*; - -/** - * This class is a sorted version of classical java.util.Properties. It - * is strongly inspired by http://forums.sun.com/thread.jspa?threadID=141144. - * - * @author Sebastien Vincent - * @author Damian Minkov - */ -public class SortedProperties - extends Properties -{ - /** - * Serial version UID. - */ - private static final long serialVersionUID = 0L; - - /** - * Gets an Enumeration of the keys in this Properties - * object. Contrary to the original Properties implementation, it - * forces the keys to be alphabetically sorted. - * - * @return an Enumeration of the keys in this Properties - * object - */ - @Override - public synchronized Enumeration keys() - { - final Object[] keys = keySet().toArray(); - - Arrays.sort(keys); - return - new Enumeration() - { - private int i = 0; - - public boolean hasMoreElements() - { - return i < keys.length; - } - - public Object nextElement() - { - return keys[i++]; - } - }; - } - - /** - * Does not allow putting empty String keys in this - * Properties object. - * - * @param key the key - * @param value the value - * @return the previous value of the specified key in this - * Hashtable, or null if it did not have one - */ - @Override - public synchronized Object put(Object key, Object value) - { - /* - * We discovered a special case related to the Properties - * ConfigurationService implementation during testing in which the key - * was a String composed of null characters only (which would be - * trimmed) consumed megabytes of heap. Do now allow such keys. - */ - if (key.toString().trim().length() == 0) - return null; - - return super.put(key, value); - } -} +package org.jitsi.impl.configuration; + +import java.util.*; + +/** + * This class is a sorted version of classical java.util.Properties. It + * is strongly inspired by http://forums.sun.com/thread.jspa?threadID=141144. + * + * @author Sebastien Vincent + * @author Damian Minkov + */ +public class SortedProperties + extends Properties +{ + /** + * Serial version UID. + */ + private static final long serialVersionUID = 0L; + + /** + * Gets an Enumeration of the keys in this Properties + * object. Contrary to the original Properties implementation, it + * forces the keys to be alphabetically sorted. + * + * @return an Enumeration of the keys in this Properties + * object + */ + @Override + public synchronized Enumeration keys() + { + final Object[] keys = keySet().toArray(); + + Arrays.sort(keys); + return + new Enumeration() + { + private int i = 0; + + public boolean hasMoreElements() + { + return i < keys.length; + } + + public Object nextElement() + { + return keys[i++]; + } + }; + } + + /** + * Does not allow putting empty String keys in this + * Properties object. + * + * @param key the key + * @param value the value + * @return the previous value of the specified key in this + * Hashtable, or null if it did not have one + */ + @Override + public synchronized Object put(Object key, Object value) + { + /* + * We discovered a special case related to the Properties + * ConfigurationService implementation during testing in which the key + * was a String composed of null characters only (which would be + * trimmed) consumed megabytes of heap. Do now allow such keys. + */ + if (key.toString().trim().length() == 0) + return null; + + return super.put(key, value); + } +} diff --git a/src/org/jitsi/impl/neomedia/codec/AbstractCodec2.java b/src/org/jitsi/impl/neomedia/codec/AbstractCodec2.java index 7c2ca9ac7..2a4b53fa1 100644 --- a/src/org/jitsi/impl/neomedia/codec/AbstractCodec2.java +++ b/src/org/jitsi/impl/neomedia/codec/AbstractCodec2.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,541 +13,541 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.codec; - -import java.awt.*; - -import javax.media.*; -import javax.media.format.*; - -import net.sf.fmj.media.*; - -/** - * Extends FMJ's AbstractCodec to make it even easier to implement a - * Codec. - * - * @author Lyubomir Marinov - */ -public abstract class AbstractCodec2 - extends AbstractCodec -{ - /** - * The Buffer flag which indicates that the respective - * Buffer contains audio data which has been decoded as a result of - * the operation of FEC. - */ - public static final int BUFFER_FLAG_FEC = (1 << 24); - - /** - * The Buffer flag which indicates that the respective - * Buffer contains audio data which has been decoded as a result of - * the operation of PLC. - */ - public static final int BUFFER_FLAG_PLC = (1 << 25); - - /** - * An empty array of Format element type. Explicitly defined to - * reduce unnecessary allocations. - */ - public static final Format[] EMPTY_FORMATS = new Format[0]; - - /** - * The maximum number of lost sequence numbers to conceal with packet loss - * mitigation techniques such as Forward Error Correction (FEC) and Packet - * Loss Concealment (PLC) when dealing with audio. - */ - public static final int MAX_AUDIO_SEQUENCE_NUMBERS_TO_PLC = 3; - - /** - * The maximum (RTP) sequence number value. - */ - public static final int SEQUENCE_MAX = 65535; - - /** - * The minimum (RTP) sequence number value. - */ - public static final int SEQUENCE_MIN = 0; - - /** - * Calculates the number of sequences which have been lost i.e. which have - * not been received. - * - * @param lastSeqNo the last received sequence number (prior to the current - * sequence number represented by seqNo.) May be - * {@link Buffer#SEQUENCE_UNKNOWN}. May be equal to seqNo for the - * purposes of Codec implementations which repeatedly process one and the - * same input Buffer multiple times. - * @param seqNo the current sequence number. May be equal to - * lastSeqNo for the purposes of Codec implementations which - * repeatedly process one and the same input Buffer multiple times. - * @return the number of sequences (between lastSeqNo and - * seqNo) which have been lost i.e. which have not been received - */ - public static int calculateLostSeqNoCount(long lastSeqNo, long seqNo) - { - if (lastSeqNo == Buffer.SEQUENCE_UNKNOWN) - return 0; - - int delta = (int) (seqNo - lastSeqNo); - - /* - * We explicitly allow the same sequence number to be received multiple - * times for the purposes of Codec implementations which repeatedly - * process one and the same input Buffer multiple times. - */ - if (delta == 0) - return 0; - else if (delta > 0) - return delta - 1; // The sequence number has not wrapped yet. - else - return delta + SEQUENCE_MAX; // The sequence number has wrapped. - } - - /** - * Increments a specific sequence number and makes sure that the result - * stays within the range of valid RTP sequence number values. - * - * @param seqNo the sequence number to increment - * @return a sequence number which represents an increment over the - * specified seqNo within the range of valid RTP sequence number - * values - */ - public static long incrementSeqNo(long seqNo) - { - seqNo++; - if (seqNo > SEQUENCE_MAX) - seqNo = SEQUENCE_MIN; - return seqNo; - } - - /** - * Utility to perform format matching. - * - * @param in input format - * @param outs array of output formats - * @return the first output format that is supported - */ - public static Format matches(Format in, Format outs[]) - { - for (Format out : outs) - if (in.matches(out)) - return out; - return null; - } - - public static YUVFormat specialize(YUVFormat yuvFormat, Class dataType) - { - Dimension size = yuvFormat.getSize(); - int strideY = yuvFormat.getStrideY(); - - if ((strideY == Format.NOT_SPECIFIED) && (size != null)) - strideY = size.width; - - int strideUV = yuvFormat.getStrideUV(); - - if ((strideUV == Format.NOT_SPECIFIED) - && (strideY != Format.NOT_SPECIFIED)) - strideUV = (strideY + 1) / 2; - - int offsetY = yuvFormat.getOffsetY(); - - if (offsetY == Format.NOT_SPECIFIED) - offsetY = 0; - - int offsetU = yuvFormat.getOffsetU(); - - if ((offsetU == Format.NOT_SPECIFIED) - && (strideY != Format.NOT_SPECIFIED) - && (size != null)) - offsetU = offsetY + strideY * size.height; - - int offsetV = yuvFormat.getOffsetV(); - - if ((offsetV == Format.NOT_SPECIFIED) - && (offsetU != Format.NOT_SPECIFIED) - && (strideUV != Format.NOT_SPECIFIED) - && (size != null)) - offsetV = offsetU + strideUV * ((size.height + 1) / 2); - - int maxDataLength - = ((strideY != Format.NOT_SPECIFIED) - && (strideUV != Format.NOT_SPECIFIED)) - && (size != null) - ? (strideY * size.height - + 2 * strideUV * ((size.height + 1) / 2) - + FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE) - : Format.NOT_SPECIFIED; - - return - new YUVFormat( - size, - maxDataLength, - (dataType == null) ? yuvFormat.getDataType() : dataType, - yuvFormat.getFrameRate(), - YUVFormat.YUV_420, - strideY, strideUV, - offsetY, offsetU, offsetV); - } - - /** - * Ensures that the value of the data property of a specific - * Buffer is an array of bytes whose length is at least a - * specific number of bytes. - * - * @param buffer the Buffer whose data property value is - * to be validated - * @param newSize the minimum length of the array of byte which is - * to be the value of the data property of buffer - * @param arraycopy true to copy the bytes which are in the - * value of the data property of buffer at the time of the - * invocation of the method if the value of the data property of - * buffer is an array of byte whose length is less than - * newSize; otherwise, false - * @return an array of bytes which is the value of the - * data property of buffer and whose length is at least - * newSize number of bytes - */ - public static byte[] validateByteArraySize( - Buffer buffer, - int newSize, - boolean arraycopy) - { - Object data = buffer.getData(); - byte[] newBytes; - - if (data instanceof byte[]) - { - byte[] bytes = (byte[]) data; - - if (bytes.length < newSize) - { - newBytes = new byte[newSize]; - buffer.setData(newBytes); - if (arraycopy) - { - System.arraycopy(bytes, 0, newBytes, 0, bytes.length); - } - else - { - buffer.setLength(0); - buffer.setOffset(0); - } - } - else - { - newBytes = bytes; - } - } - else - { - newBytes = new byte[newSize]; - buffer.setData(newBytes); - buffer.setLength(0); - buffer.setOffset(0); - } - return newBytes; - } - - /** - * The bitmap/flag mask of optional features supported by this - * AbstractCodec2 such as {@link #BUFFER_FLAG_FEC} and - * {@link #BUFFER_FLAG_PLC}. - */ - protected int features; - - private final Class formatClass; - - /** - * The total input length processed by all invocations of - * {@link #process(Buffer,Buffer)}. Introduced for the purposes of debugging - * at the time of this writing. - */ - private long inLenProcessed; - - /** - * The name of this PlugIn. - */ - private final String name; - - /** - * The total output length processed by all invocations of - * {@link #process(Buffer,Buffer)}. Introduced for the purposes of debugging - * at the time of this writing. - */ - private long outLenProcessed; - - private final Format[] supportedOutputFormats; - - /** - * Initializes a new AbstractCodec2 instance with a specific - * PlugIn name, a specific Class of input and output - * Formats and a specific list of Formats supported as - * output. - * - * @param name the PlugIn name of the new instance - * @param formatClass the Class of input and output - * Formats supported by the new instance - * @param supportedOutputFormats the list of Formats supported by - * the new instance as output - */ - protected AbstractCodec2( - String name, - Class formatClass, - Format[] supportedOutputFormats) - { - this.formatClass = formatClass; - this.name = name; - this.supportedOutputFormats = supportedOutputFormats; - - /* - * An Effect is a Codec that does not modify the Format of the data, it - * modifies the contents. - */ - if (this instanceof Effect) - inputFormats = this.supportedOutputFormats; - } - - @Override - public void close() - { - if (!opened) - return; - - doClose(); - - opened = false; - super.close(); - } - - protected void discardOutputBuffer(Buffer outputBuffer) - { - outputBuffer.setDiscard(true); - } - - protected abstract void doClose(); - - /** - * Opens this Codec and acquires the resources that it needs to - * operate. A call to {@link PlugIn#open()} on this instance will result in - * a call to doOpen only if {@link AbstractCodec#opened} is - * false. All required input and/or output formats are assumed to - * have been set on this Codec before doOpen is called. - * - * @throws ResourceUnavailableException if any of the resources that this - * Codec needs to operate cannot be acquired - */ - protected abstract void doOpen() - throws ResourceUnavailableException; - - protected abstract int doProcess(Buffer inBuf, Buffer outBuf); - - /** - * Gets the Formats which are supported by this Codec as - * output when the input is in a specific Format. - * - * @param inputFormat the Format of the input for which the - * supported output Formats are to be returned - * @return an array of Formats supported by this Codec as - * output when the input is in the specified inputFormat - */ - protected Format[] getMatchingOutputFormats(Format inputFormat) - { - /* - * An Effect is a Codec that does not modify the Format of the data, it - * modifies the contents. - */ - if (this instanceof Effect) - return new Format[] { inputFormat }; - - return - (supportedOutputFormats == null) - ? EMPTY_FORMATS - : supportedOutputFormats.clone(); - } - - @Override - public String getName() - { - return (name == null) ? super.getName() : name; - } - - /** - * Implements {@link AbstractCodec#getSupportedOutputFormats(Format)}. - * - * @param inputFormat input format - * @return array of supported output format - * @see AbstractCodec#getSupportedOutputFormats(Format) - */ - @Override - public Format[] getSupportedOutputFormats(Format inputFormat) - { - if (inputFormat == null) - return supportedOutputFormats; - - if (!formatClass.isInstance(inputFormat) - || (matches(inputFormat, inputFormats) == null)) - return EMPTY_FORMATS; - - return getMatchingOutputFormats(inputFormat); - } - - /** - * Opens this PlugIn software or hardware component and acquires - * the resources that it needs to operate. All required input and/or output - * formats have to be set on this PlugIn before open is - * called. Buffers should not be passed into this PlugIn without - * first calling open. - * - * @throws ResourceUnavailableException if any of the resources that this - * PlugIn needs to operate cannot be acquired - * @see AbstractPlugIn#open() - */ - @Override - public void open() - throws ResourceUnavailableException - { - if (opened) - return; - - doOpen(); - - opened = true; - super.open(); - } - - /** - * Implements AbstractCodec#process(Buffer, Buffer). - * - * @param inBuf - * @param outBuf - * @return BUFFER_PROCESSED_OK if the specified inBuff was - * successfully processed or BUFFER_PROCESSED_FAILED if the - * specified was not successfully processed - * @see AbstractCodec#process(Buffer, Buffer) - */ - @Override - public int process(Buffer inBuf, Buffer outBuf) - { - if (!checkInputBuffer(inBuf)) - return BUFFER_PROCESSED_FAILED; - if (isEOM(inBuf)) - { - propagateEOM(outBuf); - return BUFFER_PROCESSED_OK; - } - if (inBuf.isDiscard()) - { - discardOutputBuffer(outBuf); - return BUFFER_PROCESSED_OK; - } - - int process; - int inLenProcessed = inBuf.getLength(); - - // Buffer.FLAG_SILENCE is set only when the intention is to drop the - // specified input Buffer but to note that it has not been lost. The - // latter is usually necessary if this AbstractCodec2 does Forward Error - // Correction (FEC) and/or Packet Loss Concealment (PLC) and may cause - // noticeable artifacts otherwise. - if ((((BUFFER_FLAG_FEC | BUFFER_FLAG_PLC) & features) == 0) - && ((Buffer.FLAG_SILENCE & inBuf.getFlags()) != 0)) - { - process = OUTPUT_BUFFER_NOT_FILLED; - } - else - { - process = doProcess(inBuf, outBuf); - } - - // Keep track of additional information for the purposes of debugging. - if ((process & INPUT_BUFFER_NOT_CONSUMED) != 0) - inLenProcessed -= inBuf.getLength(); - if (inLenProcessed < 0) - inLenProcessed = 0; - - int outLenProcessed; - - if (((process & BUFFER_PROCESSED_FAILED) != 0) - || ((process & OUTPUT_BUFFER_NOT_FILLED)) != 0) - { - outLenProcessed = 0; - } - else - { - outLenProcessed = outBuf.getLength(); - if (outLenProcessed < 0) - outLenProcessed = 0; - } - - this.inLenProcessed += inLenProcessed; - this.outLenProcessed += outLenProcessed; - - return process; - } - - @Override - public Format setInputFormat(Format format) - { - if (!formatClass.isInstance(format) - || (matches(format, inputFormats) == null)) - return null; - - return super.setInputFormat(format); - } - - @Override - public Format setOutputFormat(Format format) - { - if (!formatClass.isInstance(format) - || (matches(format, getMatchingOutputFormats(inputFormat)) - == null)) - return null; - - return super.setOutputFormat(format); - } - - /** - * Updates the format, length and offset of a - * specific output Buffer to specific values. - * - * @param outputBuffer the output Buffer to update the properties - * of - * @param format the Format to set on outputBuffer - * @param length the length to set on outputBuffer - * @param offset the offset to set on outputBuffer - */ - protected void updateOutput( - Buffer outputBuffer, - Format format, int length, int offset) - { - outputBuffer.setFormat(format); - outputBuffer.setLength(length); - outputBuffer.setOffset(offset); - } - - protected short[] validateShortArraySize(Buffer buffer, int newSize) - { - Object data = buffer.getData(); - short[] newShorts; - - if (data instanceof short[]) - { - short[] shorts = (short[]) data; - - if (shorts.length >= newSize) - return shorts; - - newShorts = new short[newSize]; - System.arraycopy(shorts, 0, newShorts, 0, shorts.length); - } - else - { - newShorts = new short[newSize]; - buffer.setLength(0); - buffer.setOffset(0); - } - - buffer.setData(newShorts); - return newShorts; - } -} +package org.jitsi.impl.neomedia.codec; + +import java.awt.*; + +import javax.media.*; +import javax.media.format.*; + +import net.sf.fmj.media.*; + +/** + * Extends FMJ's AbstractCodec to make it even easier to implement a + * Codec. + * + * @author Lyubomir Marinov + */ +public abstract class AbstractCodec2 + extends AbstractCodec +{ + /** + * The Buffer flag which indicates that the respective + * Buffer contains audio data which has been decoded as a result of + * the operation of FEC. + */ + public static final int BUFFER_FLAG_FEC = (1 << 24); + + /** + * The Buffer flag which indicates that the respective + * Buffer contains audio data which has been decoded as a result of + * the operation of PLC. + */ + public static final int BUFFER_FLAG_PLC = (1 << 25); + + /** + * An empty array of Format element type. Explicitly defined to + * reduce unnecessary allocations. + */ + public static final Format[] EMPTY_FORMATS = new Format[0]; + + /** + * The maximum number of lost sequence numbers to conceal with packet loss + * mitigation techniques such as Forward Error Correction (FEC) and Packet + * Loss Concealment (PLC) when dealing with audio. + */ + public static final int MAX_AUDIO_SEQUENCE_NUMBERS_TO_PLC = 3; + + /** + * The maximum (RTP) sequence number value. + */ + public static final int SEQUENCE_MAX = 65535; + + /** + * The minimum (RTP) sequence number value. + */ + public static final int SEQUENCE_MIN = 0; + + /** + * Calculates the number of sequences which have been lost i.e. which have + * not been received. + * + * @param lastSeqNo the last received sequence number (prior to the current + * sequence number represented by seqNo.) May be + * {@link Buffer#SEQUENCE_UNKNOWN}. May be equal to seqNo for the + * purposes of Codec implementations which repeatedly process one and the + * same input Buffer multiple times. + * @param seqNo the current sequence number. May be equal to + * lastSeqNo for the purposes of Codec implementations which + * repeatedly process one and the same input Buffer multiple times. + * @return the number of sequences (between lastSeqNo and + * seqNo) which have been lost i.e. which have not been received + */ + public static int calculateLostSeqNoCount(long lastSeqNo, long seqNo) + { + if (lastSeqNo == Buffer.SEQUENCE_UNKNOWN) + return 0; + + int delta = (int) (seqNo - lastSeqNo); + + /* + * We explicitly allow the same sequence number to be received multiple + * times for the purposes of Codec implementations which repeatedly + * process one and the same input Buffer multiple times. + */ + if (delta == 0) + return 0; + else if (delta > 0) + return delta - 1; // The sequence number has not wrapped yet. + else + return delta + SEQUENCE_MAX; // The sequence number has wrapped. + } + + /** + * Increments a specific sequence number and makes sure that the result + * stays within the range of valid RTP sequence number values. + * + * @param seqNo the sequence number to increment + * @return a sequence number which represents an increment over the + * specified seqNo within the range of valid RTP sequence number + * values + */ + public static long incrementSeqNo(long seqNo) + { + seqNo++; + if (seqNo > SEQUENCE_MAX) + seqNo = SEQUENCE_MIN; + return seqNo; + } + + /** + * Utility to perform format matching. + * + * @param in input format + * @param outs array of output formats + * @return the first output format that is supported + */ + public static Format matches(Format in, Format outs[]) + { + for (Format out : outs) + if (in.matches(out)) + return out; + return null; + } + + public static YUVFormat specialize(YUVFormat yuvFormat, Class dataType) + { + Dimension size = yuvFormat.getSize(); + int strideY = yuvFormat.getStrideY(); + + if ((strideY == Format.NOT_SPECIFIED) && (size != null)) + strideY = size.width; + + int strideUV = yuvFormat.getStrideUV(); + + if ((strideUV == Format.NOT_SPECIFIED) + && (strideY != Format.NOT_SPECIFIED)) + strideUV = (strideY + 1) / 2; + + int offsetY = yuvFormat.getOffsetY(); + + if (offsetY == Format.NOT_SPECIFIED) + offsetY = 0; + + int offsetU = yuvFormat.getOffsetU(); + + if ((offsetU == Format.NOT_SPECIFIED) + && (strideY != Format.NOT_SPECIFIED) + && (size != null)) + offsetU = offsetY + strideY * size.height; + + int offsetV = yuvFormat.getOffsetV(); + + if ((offsetV == Format.NOT_SPECIFIED) + && (offsetU != Format.NOT_SPECIFIED) + && (strideUV != Format.NOT_SPECIFIED) + && (size != null)) + offsetV = offsetU + strideUV * ((size.height + 1) / 2); + + int maxDataLength + = ((strideY != Format.NOT_SPECIFIED) + && (strideUV != Format.NOT_SPECIFIED)) + && (size != null) + ? (strideY * size.height + + 2 * strideUV * ((size.height + 1) / 2) + + FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE) + : Format.NOT_SPECIFIED; + + return + new YUVFormat( + size, + maxDataLength, + (dataType == null) ? yuvFormat.getDataType() : dataType, + yuvFormat.getFrameRate(), + YUVFormat.YUV_420, + strideY, strideUV, + offsetY, offsetU, offsetV); + } + + /** + * Ensures that the value of the data property of a specific + * Buffer is an array of bytes whose length is at least a + * specific number of bytes. + * + * @param buffer the Buffer whose data property value is + * to be validated + * @param newSize the minimum length of the array of byte which is + * to be the value of the data property of buffer + * @param arraycopy true to copy the bytes which are in the + * value of the data property of buffer at the time of the + * invocation of the method if the value of the data property of + * buffer is an array of byte whose length is less than + * newSize; otherwise, false + * @return an array of bytes which is the value of the + * data property of buffer and whose length is at least + * newSize number of bytes + */ + public static byte[] validateByteArraySize( + Buffer buffer, + int newSize, + boolean arraycopy) + { + Object data = buffer.getData(); + byte[] newBytes; + + if (data instanceof byte[]) + { + byte[] bytes = (byte[]) data; + + if (bytes.length < newSize) + { + newBytes = new byte[newSize]; + buffer.setData(newBytes); + if (arraycopy) + { + System.arraycopy(bytes, 0, newBytes, 0, bytes.length); + } + else + { + buffer.setLength(0); + buffer.setOffset(0); + } + } + else + { + newBytes = bytes; + } + } + else + { + newBytes = new byte[newSize]; + buffer.setData(newBytes); + buffer.setLength(0); + buffer.setOffset(0); + } + return newBytes; + } + + /** + * The bitmap/flag mask of optional features supported by this + * AbstractCodec2 such as {@link #BUFFER_FLAG_FEC} and + * {@link #BUFFER_FLAG_PLC}. + */ + protected int features; + + private final Class formatClass; + + /** + * The total input length processed by all invocations of + * {@link #process(Buffer,Buffer)}. Introduced for the purposes of debugging + * at the time of this writing. + */ + private long inLenProcessed; + + /** + * The name of this PlugIn. + */ + private final String name; + + /** + * The total output length processed by all invocations of + * {@link #process(Buffer,Buffer)}. Introduced for the purposes of debugging + * at the time of this writing. + */ + private long outLenProcessed; + + private final Format[] supportedOutputFormats; + + /** + * Initializes a new AbstractCodec2 instance with a specific + * PlugIn name, a specific Class of input and output + * Formats and a specific list of Formats supported as + * output. + * + * @param name the PlugIn name of the new instance + * @param formatClass the Class of input and output + * Formats supported by the new instance + * @param supportedOutputFormats the list of Formats supported by + * the new instance as output + */ + protected AbstractCodec2( + String name, + Class formatClass, + Format[] supportedOutputFormats) + { + this.formatClass = formatClass; + this.name = name; + this.supportedOutputFormats = supportedOutputFormats; + + /* + * An Effect is a Codec that does not modify the Format of the data, it + * modifies the contents. + */ + if (this instanceof Effect) + inputFormats = this.supportedOutputFormats; + } + + @Override + public void close() + { + if (!opened) + return; + + doClose(); + + opened = false; + super.close(); + } + + protected void discardOutputBuffer(Buffer outputBuffer) + { + outputBuffer.setDiscard(true); + } + + protected abstract void doClose(); + + /** + * Opens this Codec and acquires the resources that it needs to + * operate. A call to {@link PlugIn#open()} on this instance will result in + * a call to doOpen only if {@link AbstractCodec#opened} is + * false. All required input and/or output formats are assumed to + * have been set on this Codec before doOpen is called. + * + * @throws ResourceUnavailableException if any of the resources that this + * Codec needs to operate cannot be acquired + */ + protected abstract void doOpen() + throws ResourceUnavailableException; + + protected abstract int doProcess(Buffer inBuf, Buffer outBuf); + + /** + * Gets the Formats which are supported by this Codec as + * output when the input is in a specific Format. + * + * @param inputFormat the Format of the input for which the + * supported output Formats are to be returned + * @return an array of Formats supported by this Codec as + * output when the input is in the specified inputFormat + */ + protected Format[] getMatchingOutputFormats(Format inputFormat) + { + /* + * An Effect is a Codec that does not modify the Format of the data, it + * modifies the contents. + */ + if (this instanceof Effect) + return new Format[] { inputFormat }; + + return + (supportedOutputFormats == null) + ? EMPTY_FORMATS + : supportedOutputFormats.clone(); + } + + @Override + public String getName() + { + return (name == null) ? super.getName() : name; + } + + /** + * Implements {@link AbstractCodec#getSupportedOutputFormats(Format)}. + * + * @param inputFormat input format + * @return array of supported output format + * @see AbstractCodec#getSupportedOutputFormats(Format) + */ + @Override + public Format[] getSupportedOutputFormats(Format inputFormat) + { + if (inputFormat == null) + return supportedOutputFormats; + + if (!formatClass.isInstance(inputFormat) + || (matches(inputFormat, inputFormats) == null)) + return EMPTY_FORMATS; + + return getMatchingOutputFormats(inputFormat); + } + + /** + * Opens this PlugIn software or hardware component and acquires + * the resources that it needs to operate. All required input and/or output + * formats have to be set on this PlugIn before open is + * called. Buffers should not be passed into this PlugIn without + * first calling open. + * + * @throws ResourceUnavailableException if any of the resources that this + * PlugIn needs to operate cannot be acquired + * @see AbstractPlugIn#open() + */ + @Override + public void open() + throws ResourceUnavailableException + { + if (opened) + return; + + doOpen(); + + opened = true; + super.open(); + } + + /** + * Implements AbstractCodec#process(Buffer, Buffer). + * + * @param inBuf + * @param outBuf + * @return BUFFER_PROCESSED_OK if the specified inBuff was + * successfully processed or BUFFER_PROCESSED_FAILED if the + * specified was not successfully processed + * @see AbstractCodec#process(Buffer, Buffer) + */ + @Override + public int process(Buffer inBuf, Buffer outBuf) + { + if (!checkInputBuffer(inBuf)) + return BUFFER_PROCESSED_FAILED; + if (isEOM(inBuf)) + { + propagateEOM(outBuf); + return BUFFER_PROCESSED_OK; + } + if (inBuf.isDiscard()) + { + discardOutputBuffer(outBuf); + return BUFFER_PROCESSED_OK; + } + + int process; + int inLenProcessed = inBuf.getLength(); + + // Buffer.FLAG_SILENCE is set only when the intention is to drop the + // specified input Buffer but to note that it has not been lost. The + // latter is usually necessary if this AbstractCodec2 does Forward Error + // Correction (FEC) and/or Packet Loss Concealment (PLC) and may cause + // noticeable artifacts otherwise. + if ((((BUFFER_FLAG_FEC | BUFFER_FLAG_PLC) & features) == 0) + && ((Buffer.FLAG_SILENCE & inBuf.getFlags()) != 0)) + { + process = OUTPUT_BUFFER_NOT_FILLED; + } + else + { + process = doProcess(inBuf, outBuf); + } + + // Keep track of additional information for the purposes of debugging. + if ((process & INPUT_BUFFER_NOT_CONSUMED) != 0) + inLenProcessed -= inBuf.getLength(); + if (inLenProcessed < 0) + inLenProcessed = 0; + + int outLenProcessed; + + if (((process & BUFFER_PROCESSED_FAILED) != 0) + || ((process & OUTPUT_BUFFER_NOT_FILLED)) != 0) + { + outLenProcessed = 0; + } + else + { + outLenProcessed = outBuf.getLength(); + if (outLenProcessed < 0) + outLenProcessed = 0; + } + + this.inLenProcessed += inLenProcessed; + this.outLenProcessed += outLenProcessed; + + return process; + } + + @Override + public Format setInputFormat(Format format) + { + if (!formatClass.isInstance(format) + || (matches(format, inputFormats) == null)) + return null; + + return super.setInputFormat(format); + } + + @Override + public Format setOutputFormat(Format format) + { + if (!formatClass.isInstance(format) + || (matches(format, getMatchingOutputFormats(inputFormat)) + == null)) + return null; + + return super.setOutputFormat(format); + } + + /** + * Updates the format, length and offset of a + * specific output Buffer to specific values. + * + * @param outputBuffer the output Buffer to update the properties + * of + * @param format the Format to set on outputBuffer + * @param length the length to set on outputBuffer + * @param offset the offset to set on outputBuffer + */ + protected void updateOutput( + Buffer outputBuffer, + Format format, int length, int offset) + { + outputBuffer.setFormat(format); + outputBuffer.setLength(length); + outputBuffer.setOffset(offset); + } + + protected short[] validateShortArraySize(Buffer buffer, int newSize) + { + Object data = buffer.getData(); + short[] newShorts; + + if (data instanceof short[]) + { + short[] shorts = (short[]) data; + + if (shorts.length >= newSize) + return shorts; + + newShorts = new short[newSize]; + System.arraycopy(shorts, 0, newShorts, 0, shorts.length); + } + else + { + newShorts = new short[newSize]; + buffer.setLength(0); + buffer.setOffset(0); + } + + buffer.setData(newShorts); + return newShorts; + } +} diff --git a/src/org/jitsi/impl/neomedia/codec/audio/ilbc/JavaEncoder.java b/src/org/jitsi/impl/neomedia/codec/audio/ilbc/JavaEncoder.java index 62452485d..e9f71f263 100644 --- a/src/org/jitsi/impl/neomedia/codec/audio/ilbc/JavaEncoder.java +++ b/src/org/jitsi/impl/neomedia/codec/audio/ilbc/JavaEncoder.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,294 +13,294 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.codec.audio.ilbc; - -import javax.media.*; -import javax.media.format.*; - -import org.jitsi.impl.neomedia.codec.*; -import org.jitsi.service.neomedia.codec.*; -import org.jitsi.service.neomedia.control.*; - -import java.awt.*; -import java.util.*; - -/** - * Implements an iLBC encoder and RTP packetizer as a {@link Codec}. - * - * @author Damian Minkov - * @author Lyubomir Marinov - */ -public class JavaEncoder - extends AbstractCodec2 - implements FormatParametersAwareCodec -{ - /** - * The duration an output Buffer produced by this Codec. - */ - private int duration = 0; - - /** - * The ilbc_encoder adapted to Codec by this instance. - */ - private ilbc_encoder enc = null; - - /** - * The input length in bytes with which {@link #enc} has been initialized. - */ - private int inLen; - - /** - * The output length in bytes with which {@link #enc} has been initialized. - */ - private int outLen; - - /** - * The input from previous calls to {@link #doProcess(Buffer, Buffer)} which - * has not been consumed yet. - */ - private byte[] prevIn; - - /** - * The number of bytes in {@link #prevIn} which have not been consumed yet. - */ - private int prevInLen; - - /** - * Initializes a new iLBC JavaEncoder instance. - */ - public JavaEncoder() - { - super( - "iLBC Encoder", - AudioFormat.class, - new Format[] - { - new AudioFormat( - Constants.ILBC_RTP, - 8000, - 16, - 1, - AudioFormat.LITTLE_ENDIAN, - AudioFormat.SIGNED) - }); - - inputFormats - = new Format[] - { - new AudioFormat( - AudioFormat.LINEAR, - 8000, - 16, - 1, - AudioFormat.LITTLE_ENDIAN, - AudioFormat.SIGNED) - }; - - addControl( - new com.sun.media.controls.SilenceSuppressionAdapter( - this, - false, - false)); - - addControl(this); - } - - /** - * Implements {@link AbstractCodec2#doClose()}. - * - * @see AbstractCodec2#doClose() - */ - @Override - protected void doClose() - { - enc = null; - outLen = 0; - inLen = 0; - prevIn = null; - prevInLen = 0; - duration = 0; - } - - /** - * Implements {@link AbstractCodec2#doOpen()}. - * - * @see AbstractCodec2#doOpen() - */ - @Override - protected void doOpen() - { - // if not already initialised, use the default value (30). - if(enc == null) - initEncoder(Constants.ILBC_MODE); - } - - /** - * Implements {@link AbstractCodec2#doProcess(Buffer, Buffer)}. - * - * @param inBuffer the input buffer - * @param outBuffer the output buffer - * @return the status of the processing, whether buffer is consumed/filled.. - * @see AbstractCodec2#doProcess(Buffer, Buffer) - */ - @Override - protected int doProcess(Buffer inBuffer, Buffer outBuffer) - { - int inLen = inBuffer.getLength(); - byte[] in = (byte[]) inBuffer.getData(); - int inOff = inBuffer.getOffset(); - - if ((prevInLen != 0) || (inLen < this.inLen)) - { - int bytesToCopy = this.inLen - prevInLen; - - if (bytesToCopy > inLen) - bytesToCopy = inLen; - System.arraycopy(in, inOff, prevIn, prevInLen, bytesToCopy); - prevInLen += bytesToCopy; - - inBuffer.setLength(inLen - bytesToCopy); - inBuffer.setOffset(inOff + bytesToCopy); - - inLen = prevInLen; - in = prevIn; - inOff = 0; - } - else - { - inBuffer.setLength(inLen - this.inLen); - inBuffer.setOffset(inOff + this.inLen); - } - - int ret; - - if (inLen >= this.inLen) - { - /* - * If we are about to encode from prevInput, we already have - * prevInputLength taken into consideration by using prevInput in - * the first place and we have to make sure that we will not use the - * same prevInput more than once. - */ - prevInLen = 0; - - int outOff = 0; - byte[] out - = validateByteArraySize(outBuffer, outOff + outLen, true); - - enc.encode(out, outOff, in, inOff); - - updateOutput(outBuffer, getOutputFormat(), outLen, outOff); - outBuffer.setDuration(duration); - ret = BUFFER_PROCESSED_OK; - } - else - { - ret = OUTPUT_BUFFER_NOT_FILLED; - } - - if (inBuffer.getLength() > 0) - ret |= INPUT_BUFFER_NOT_CONSUMED; - return ret; - } - - /** - * Implements {@link javax.media.Control#getControlComponent()}. - */ - @Override - public Component getControlComponent() - { - return null; - } - - /** - * Get the output format. - * - * @return output format - * @see net.sf.fmj.media.AbstractCodec#getOutputFormat() - */ - @Override - @SuppressWarnings("serial") - public Format getOutputFormat() - { - Format f = super.getOutputFormat(); - - if ((f != null) && (f.getClass() == AudioFormat.class)) - { - AudioFormat af = (AudioFormat) f; - - f - = setOutputFormat( - new AudioFormat( - af.getEncoding(), - af.getSampleRate(), - af.getSampleSizeInBits(), - af.getChannels(), - af.getEndian(), - af.getSigned(), - af.getFrameSizeInBits(), - af.getFrameRate(), - af.getDataType()) - { - @Override - public long computeDuration(long length) - { - return JavaEncoder.this.duration; - } - }); - } - return f; - } - - /** - * Init encoder with specified mode. - * @param mode the mode to use. - */ - private void initEncoder(int mode) - { - enc = new ilbc_encoder(mode); - - switch (mode) - { - case 20: - outLen = ilbc_constants.NO_OF_BYTES_20MS; - break; - case 30: - outLen = ilbc_constants.NO_OF_BYTES_30MS; - break; - default: - throw new IllegalStateException("mode"); - } - /* mode is 20 or 30 ms, duration must be in nanoseconds */ - duration = mode * 1000000; - inLen = enc.ULP_inst.blockl * 2; - prevIn = new byte[inLen]; - prevInLen = 0; - } - - /** - * Sets the format parameters to fmtps - * - * @param fmtps The format parameters to set - */ - @Override - public void setFormatParameters(Map fmtps) - { - String modeStr = fmtps.get("mode"); - - if(modeStr != null) - { - try - { - int mode = Integer.valueOf(modeStr); - - // supports only mode 20 or 30 - if(mode == 20 || mode == 30) - initEncoder(mode); - } - catch(Throwable t) - { - } - } - } -} +package org.jitsi.impl.neomedia.codec.audio.ilbc; + +import javax.media.*; +import javax.media.format.*; + +import org.jitsi.impl.neomedia.codec.*; +import org.jitsi.service.neomedia.codec.*; +import org.jitsi.service.neomedia.control.*; + +import java.awt.*; +import java.util.*; + +/** + * Implements an iLBC encoder and RTP packetizer as a {@link Codec}. + * + * @author Damian Minkov + * @author Lyubomir Marinov + */ +public class JavaEncoder + extends AbstractCodec2 + implements FormatParametersAwareCodec +{ + /** + * The duration an output Buffer produced by this Codec. + */ + private int duration = 0; + + /** + * The ilbc_encoder adapted to Codec by this instance. + */ + private ilbc_encoder enc = null; + + /** + * The input length in bytes with which {@link #enc} has been initialized. + */ + private int inLen; + + /** + * The output length in bytes with which {@link #enc} has been initialized. + */ + private int outLen; + + /** + * The input from previous calls to {@link #doProcess(Buffer, Buffer)} which + * has not been consumed yet. + */ + private byte[] prevIn; + + /** + * The number of bytes in {@link #prevIn} which have not been consumed yet. + */ + private int prevInLen; + + /** + * Initializes a new iLBC JavaEncoder instance. + */ + public JavaEncoder() + { + super( + "iLBC Encoder", + AudioFormat.class, + new Format[] + { + new AudioFormat( + Constants.ILBC_RTP, + 8000, + 16, + 1, + AudioFormat.LITTLE_ENDIAN, + AudioFormat.SIGNED) + }); + + inputFormats + = new Format[] + { + new AudioFormat( + AudioFormat.LINEAR, + 8000, + 16, + 1, + AudioFormat.LITTLE_ENDIAN, + AudioFormat.SIGNED) + }; + + addControl( + new com.sun.media.controls.SilenceSuppressionAdapter( + this, + false, + false)); + + addControl(this); + } + + /** + * Implements {@link AbstractCodec2#doClose()}. + * + * @see AbstractCodec2#doClose() + */ + @Override + protected void doClose() + { + enc = null; + outLen = 0; + inLen = 0; + prevIn = null; + prevInLen = 0; + duration = 0; + } + + /** + * Implements {@link AbstractCodec2#doOpen()}. + * + * @see AbstractCodec2#doOpen() + */ + @Override + protected void doOpen() + { + // if not already initialised, use the default value (30). + if(enc == null) + initEncoder(Constants.ILBC_MODE); + } + + /** + * Implements {@link AbstractCodec2#doProcess(Buffer, Buffer)}. + * + * @param inBuffer the input buffer + * @param outBuffer the output buffer + * @return the status of the processing, whether buffer is consumed/filled.. + * @see AbstractCodec2#doProcess(Buffer, Buffer) + */ + @Override + protected int doProcess(Buffer inBuffer, Buffer outBuffer) + { + int inLen = inBuffer.getLength(); + byte[] in = (byte[]) inBuffer.getData(); + int inOff = inBuffer.getOffset(); + + if ((prevInLen != 0) || (inLen < this.inLen)) + { + int bytesToCopy = this.inLen - prevInLen; + + if (bytesToCopy > inLen) + bytesToCopy = inLen; + System.arraycopy(in, inOff, prevIn, prevInLen, bytesToCopy); + prevInLen += bytesToCopy; + + inBuffer.setLength(inLen - bytesToCopy); + inBuffer.setOffset(inOff + bytesToCopy); + + inLen = prevInLen; + in = prevIn; + inOff = 0; + } + else + { + inBuffer.setLength(inLen - this.inLen); + inBuffer.setOffset(inOff + this.inLen); + } + + int ret; + + if (inLen >= this.inLen) + { + /* + * If we are about to encode from prevInput, we already have + * prevInputLength taken into consideration by using prevInput in + * the first place and we have to make sure that we will not use the + * same prevInput more than once. + */ + prevInLen = 0; + + int outOff = 0; + byte[] out + = validateByteArraySize(outBuffer, outOff + outLen, true); + + enc.encode(out, outOff, in, inOff); + + updateOutput(outBuffer, getOutputFormat(), outLen, outOff); + outBuffer.setDuration(duration); + ret = BUFFER_PROCESSED_OK; + } + else + { + ret = OUTPUT_BUFFER_NOT_FILLED; + } + + if (inBuffer.getLength() > 0) + ret |= INPUT_BUFFER_NOT_CONSUMED; + return ret; + } + + /** + * Implements {@link javax.media.Control#getControlComponent()}. + */ + @Override + public Component getControlComponent() + { + return null; + } + + /** + * Get the output format. + * + * @return output format + * @see net.sf.fmj.media.AbstractCodec#getOutputFormat() + */ + @Override + @SuppressWarnings("serial") + public Format getOutputFormat() + { + Format f = super.getOutputFormat(); + + if ((f != null) && (f.getClass() == AudioFormat.class)) + { + AudioFormat af = (AudioFormat) f; + + f + = setOutputFormat( + new AudioFormat( + af.getEncoding(), + af.getSampleRate(), + af.getSampleSizeInBits(), + af.getChannels(), + af.getEndian(), + af.getSigned(), + af.getFrameSizeInBits(), + af.getFrameRate(), + af.getDataType()) + { + @Override + public long computeDuration(long length) + { + return JavaEncoder.this.duration; + } + }); + } + return f; + } + + /** + * Init encoder with specified mode. + * @param mode the mode to use. + */ + private void initEncoder(int mode) + { + enc = new ilbc_encoder(mode); + + switch (mode) + { + case 20: + outLen = ilbc_constants.NO_OF_BYTES_20MS; + break; + case 30: + outLen = ilbc_constants.NO_OF_BYTES_30MS; + break; + default: + throw new IllegalStateException("mode"); + } + /* mode is 20 or 30 ms, duration must be in nanoseconds */ + duration = mode * 1000000; + inLen = enc.ULP_inst.blockl * 2; + prevIn = new byte[inLen]; + prevInLen = 0; + } + + /** + * Sets the format parameters to fmtps + * + * @param fmtps The format parameters to set + */ + @Override + public void setFormatParameters(Map fmtps) + { + String modeStr = fmtps.get("mode"); + + if(modeStr != null) + { + try + { + int mode = Integer.valueOf(modeStr); + + // supports only mode 20 or 30 + if(mode == 20 || mode == 30) + initEncoder(mode); + } + catch(Throwable t) + { + } + } + } +} diff --git a/src/org/jitsi/impl/neomedia/codec/video/HFlip.java b/src/org/jitsi/impl/neomedia/codec/video/HFlip.java index fd5699108..557edc600 100644 --- a/src/org/jitsi/impl/neomedia/codec/video/HFlip.java +++ b/src/org/jitsi/impl/neomedia/codec/video/HFlip.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,372 +13,372 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.codec.video; - -import java.awt.*; - -import javax.media.*; - -import org.jitsi.impl.neomedia.codec.*; -import org.jitsi.util.*; - -/** - * Implements a video Effect which horizontally flips - * AVFrames. - * - * @author Sebastien Vincent - * @author Lyubomir Marinov - */ -public class HFlip - extends AbstractCodec2 - implements Effect -{ - /** - * The Logger used by the HFlip class and its instances - * for logging output. - */ - private static final Logger logger = Logger.getLogger(HFlip.class); - - /** - * The list of Formats supported by HFlip instances as - * input and output. - */ - private static final Format[] SUPPORTED_FORMATS - = new Format[] { new AVFrameFormat() }; - - /** - * The name of the FFmpeg ffsink video source AVFilter used by - * HFlip. - */ - private static final String VSINK_FFSINK_NAME = "nullsink"; - - /** - * The name of the FFmpeg buffer video source AVFilter used by - * HFlip. - */ - private static final String VSRC_BUFFER_NAME = "buffer"; - - /** - * The pointer to the AVFilterContext in {@link #graph} of the - * FFmpeg video source with the name {@link #VSRC_BUFFER_NAME}. - */ - private long buffer; - - /** - * The pointer to the AVFilterContext in {@link #graph} of the - * FFmpeg video sink with the name {@link #VSINK_FFSINK_NAME}. - */ - private long ffsink; - - /** - * The pointer to the AVFilterGraph instance which contains the - * FFmpeg hflip filter represented by this Effect. - */ - private long graph = 0; - - /** - * The indicator which determines whether the fact that {@link #graph} is - * equal to zero means that an attempt to initialize it is to be made. If - * false, indicates that such an attempt has already been made and - * has failed. In other words, prevents multiple initialization attempts - * with the same parameters. - */ - private boolean graphIsPending = true; - - /** - * The height of {@link #graph}. - */ - private int height; - - /** - * The pointer to the AVFilterBufferRef instance represented as an - * AVFrame by {@link #outputFrame}. - */ - private long outputFilterBufferRef; - - /** - * The pointer to the AVFrame instance which is the output (data) - * of this Effect. - */ - private long outputFrame; - - /** - * The FFmpeg pixel format of {@link #graph}. - */ - private int pixFmt = FFmpeg.PIX_FMT_NONE; - - /** - * The width of {@link #graph}. - */ - private int width; - - /** - * Initializes a new HFlip instance. - */ - public HFlip() - { - super("FFmpeg HFlip Filter", AVFrameFormat.class, SUPPORTED_FORMATS); - } - - /** - * Closes this Effect. - * - * @see AbstractCodecExt#doClose() - */ - @Override - protected synchronized void doClose() - { - try - { - if (outputFrame != 0) - { - FFmpeg.avcodec_free_frame(outputFrame); - outputFrame = 0; - } - } - finally - { - reset(); - } - } - - /** - * Opens this Effect. - * - * @throws ResourceUnavailableException if any of the required resource - * cannot be allocated - * @see AbstractCodecExt#doOpen() - */ - @Override - protected synchronized void doOpen() - throws ResourceUnavailableException - { - outputFrame = FFmpeg.avcodec_alloc_frame(); - if (outputFrame == 0) - { - String reason = "avcodec_alloc_frame: " + outputFrame; - - logger.error(reason); - throw new ResourceUnavailableException(reason); - } - } - - /** - * Performs the media processing defined by this Effect. - * - * @param inputBuffer the Buffer that contains the media data to be - * processed - * @param outputBuffer the Buffer in which to store the processed - * media data - * @return BUFFER_PROCESSED_OK if the processing is successful - * @see AbstractCodecExt#doProcess(Buffer, Buffer) - */ - @Override - protected synchronized int doProcess( - Buffer inputBuffer, - Buffer outputBuffer) - { - /* - * A new frame is about to be output so the old frame is no longer - * necessary. - */ - if (outputFilterBufferRef != 0) - { - FFmpeg.avfilter_unref_buffer(outputFilterBufferRef); - outputFilterBufferRef = 0; - } - - /* - * Make sure the graph is configured with the current Format i.e. size - * and pixFmt. - */ - AVFrameFormat format = (AVFrameFormat) inputBuffer.getFormat(); - Dimension size = format.getSize(); - int pixFmt = format.getPixFmt(); - - if ((this.width != size.width) - || (this.height != size.height) - || (this.pixFmt != pixFmt)) - reset(); - if (graph == 0) - { - String errorReason = null; - int error = 0; - long buffer = 0; - long ffsink = 0; - - if (graphIsPending) - { - graphIsPending = false; - - graph = FFmpeg.avfilter_graph_alloc(); - if (graph == 0) - errorReason = "avfilter_graph_alloc"; - else - { - String filters - = VSRC_BUFFER_NAME + "=" + size.width + ":" + size.height - + ":" + pixFmt + ":1:1000000:1:1,hflip," - + VSINK_FFSINK_NAME; - long log_ctx = 0; - - error - = FFmpeg.avfilter_graph_parse( - graph, - filters, - 0, 0, - log_ctx); - if (error == 0) - { - /* - * Unfortunately, the name of an AVFilterContext created by - * avfilter_graph_parse is not the name of the AVFilter. - */ - String parsedFilterNameFormat = "Parsed_%2$s_%1$d"; - String parsedFilterName - = String.format( - parsedFilterNameFormat, - 0, VSRC_BUFFER_NAME); - - buffer - = FFmpeg.avfilter_graph_get_filter( - graph, - parsedFilterName); - if (buffer == 0) - { - errorReason - = "avfilter_graph_get_filter: " - + VSRC_BUFFER_NAME - + "/" - + parsedFilterName; - } - else - { - parsedFilterName - = String.format( - parsedFilterNameFormat, - 2, - VSINK_FFSINK_NAME); - ffsink - = FFmpeg.avfilter_graph_get_filter( - graph, - parsedFilterName); - if (ffsink == 0) - { - errorReason - = "avfilter_graph_get_filter: " - + VSINK_FFSINK_NAME - + "/" - + parsedFilterName; - } - else - { - error - = FFmpeg.avfilter_graph_config(graph, log_ctx); - if (error != 0) - errorReason = "avfilter_graph_config"; - } - } - } - else - errorReason = "avfilter_graph_parse"; - if ((errorReason != null) || (error != 0)) - { - FFmpeg.avfilter_graph_free(graph); - graph = 0; - } - } - } - if (graph == 0) - { - if (errorReason != null) - { - StringBuilder msg = new StringBuilder(errorReason); - - if (error != 0) - msg.append(": ").append(error); - msg.append(", format ").append(format); - logger.error(msg); - } - return BUFFER_PROCESSED_FAILED; - } - else - { - this.width = size.width; - this.height = size.height; - this.pixFmt = pixFmt; - this.buffer = buffer; - this.ffsink = ffsink; - } - } - - /* - * The graph is configured for the current Format, apply its filters to - * the inputFrame. - */ - long inputFrame = ((AVFrame) inputBuffer.getData()).getPtr(); - - outputFilterBufferRef - = FFmpeg.get_filtered_video_frame( - inputFrame, this.width, this.height, this.pixFmt, - buffer, - ffsink, - outputFrame); - if(outputFilterBufferRef == 0) - { - /* - * If get_filtered_video_frame fails, it is likely to fail for any - * frame. Consequently, printing that it has failed will result in a - * lot of repeating logging output. Since the failure in question - * will be visible in the UI anyway, just debug it. - */ - if (logger.isDebugEnabled()) - logger.debug("get_filtered_video_frame"); - return BUFFER_PROCESSED_FAILED; - } - - Object out = outputBuffer.getData(); - - if (!(out instanceof AVFrame) - || (((AVFrame) out).getPtr() != outputFrame)) - { - outputBuffer.setData(new AVFrame(outputFrame)); - } - - outputBuffer.setDiscard(inputBuffer.isDiscard()); - outputBuffer.setDuration(inputBuffer.getDuration()); - outputBuffer.setEOM(inputBuffer.isEOM()); - outputBuffer.setFlags(inputBuffer.getFlags()); - outputBuffer.setFormat(format); - outputBuffer.setHeader(inputBuffer.getHeader()); - outputBuffer.setLength(inputBuffer.getLength()); - outputBuffer.setSequenceNumber(inputBuffer.getSequenceNumber()); - outputBuffer.setTimeStamp(inputBuffer.getTimeStamp()); - return BUFFER_PROCESSED_OK; - } - - /** - * Resets the state of this PlugIn. - */ - @Override - public synchronized void reset() - { - if (outputFilterBufferRef != 0) - { - FFmpeg.avfilter_unref_buffer(outputFilterBufferRef); - outputFilterBufferRef = 0; - } - if (graph != 0) - { - FFmpeg.avfilter_graph_free(graph); - graph = 0; - graphIsPending = true; - - width = 0; - height = 0; - pixFmt = FFmpeg.PIX_FMT_NONE; - buffer = 0; - ffsink = 0; - } - } -} +package org.jitsi.impl.neomedia.codec.video; + +import java.awt.*; + +import javax.media.*; + +import org.jitsi.impl.neomedia.codec.*; +import org.jitsi.util.*; + +/** + * Implements a video Effect which horizontally flips + * AVFrames. + * + * @author Sebastien Vincent + * @author Lyubomir Marinov + */ +public class HFlip + extends AbstractCodec2 + implements Effect +{ + /** + * The Logger used by the HFlip class and its instances + * for logging output. + */ + private static final Logger logger = Logger.getLogger(HFlip.class); + + /** + * The list of Formats supported by HFlip instances as + * input and output. + */ + private static final Format[] SUPPORTED_FORMATS + = new Format[] { new AVFrameFormat() }; + + /** + * The name of the FFmpeg ffsink video source AVFilter used by + * HFlip. + */ + private static final String VSINK_FFSINK_NAME = "nullsink"; + + /** + * The name of the FFmpeg buffer video source AVFilter used by + * HFlip. + */ + private static final String VSRC_BUFFER_NAME = "buffer"; + + /** + * The pointer to the AVFilterContext in {@link #graph} of the + * FFmpeg video source with the name {@link #VSRC_BUFFER_NAME}. + */ + private long buffer; + + /** + * The pointer to the AVFilterContext in {@link #graph} of the + * FFmpeg video sink with the name {@link #VSINK_FFSINK_NAME}. + */ + private long ffsink; + + /** + * The pointer to the AVFilterGraph instance which contains the + * FFmpeg hflip filter represented by this Effect. + */ + private long graph = 0; + + /** + * The indicator which determines whether the fact that {@link #graph} is + * equal to zero means that an attempt to initialize it is to be made. If + * false, indicates that such an attempt has already been made and + * has failed. In other words, prevents multiple initialization attempts + * with the same parameters. + */ + private boolean graphIsPending = true; + + /** + * The height of {@link #graph}. + */ + private int height; + + /** + * The pointer to the AVFilterBufferRef instance represented as an + * AVFrame by {@link #outputFrame}. + */ + private long outputFilterBufferRef; + + /** + * The pointer to the AVFrame instance which is the output (data) + * of this Effect. + */ + private long outputFrame; + + /** + * The FFmpeg pixel format of {@link #graph}. + */ + private int pixFmt = FFmpeg.PIX_FMT_NONE; + + /** + * The width of {@link #graph}. + */ + private int width; + + /** + * Initializes a new HFlip instance. + */ + public HFlip() + { + super("FFmpeg HFlip Filter", AVFrameFormat.class, SUPPORTED_FORMATS); + } + + /** + * Closes this Effect. + * + * @see AbstractCodecExt#doClose() + */ + @Override + protected synchronized void doClose() + { + try + { + if (outputFrame != 0) + { + FFmpeg.avcodec_free_frame(outputFrame); + outputFrame = 0; + } + } + finally + { + reset(); + } + } + + /** + * Opens this Effect. + * + * @throws ResourceUnavailableException if any of the required resource + * cannot be allocated + * @see AbstractCodecExt#doOpen() + */ + @Override + protected synchronized void doOpen() + throws ResourceUnavailableException + { + outputFrame = FFmpeg.avcodec_alloc_frame(); + if (outputFrame == 0) + { + String reason = "avcodec_alloc_frame: " + outputFrame; + + logger.error(reason); + throw new ResourceUnavailableException(reason); + } + } + + /** + * Performs the media processing defined by this Effect. + * + * @param inputBuffer the Buffer that contains the media data to be + * processed + * @param outputBuffer the Buffer in which to store the processed + * media data + * @return BUFFER_PROCESSED_OK if the processing is successful + * @see AbstractCodecExt#doProcess(Buffer, Buffer) + */ + @Override + protected synchronized int doProcess( + Buffer inputBuffer, + Buffer outputBuffer) + { + /* + * A new frame is about to be output so the old frame is no longer + * necessary. + */ + if (outputFilterBufferRef != 0) + { + FFmpeg.avfilter_unref_buffer(outputFilterBufferRef); + outputFilterBufferRef = 0; + } + + /* + * Make sure the graph is configured with the current Format i.e. size + * and pixFmt. + */ + AVFrameFormat format = (AVFrameFormat) inputBuffer.getFormat(); + Dimension size = format.getSize(); + int pixFmt = format.getPixFmt(); + + if ((this.width != size.width) + || (this.height != size.height) + || (this.pixFmt != pixFmt)) + reset(); + if (graph == 0) + { + String errorReason = null; + int error = 0; + long buffer = 0; + long ffsink = 0; + + if (graphIsPending) + { + graphIsPending = false; + + graph = FFmpeg.avfilter_graph_alloc(); + if (graph == 0) + errorReason = "avfilter_graph_alloc"; + else + { + String filters + = VSRC_BUFFER_NAME + "=" + size.width + ":" + size.height + + ":" + pixFmt + ":1:1000000:1:1,hflip," + + VSINK_FFSINK_NAME; + long log_ctx = 0; + + error + = FFmpeg.avfilter_graph_parse( + graph, + filters, + 0, 0, + log_ctx); + if (error == 0) + { + /* + * Unfortunately, the name of an AVFilterContext created by + * avfilter_graph_parse is not the name of the AVFilter. + */ + String parsedFilterNameFormat = "Parsed_%2$s_%1$d"; + String parsedFilterName + = String.format( + parsedFilterNameFormat, + 0, VSRC_BUFFER_NAME); + + buffer + = FFmpeg.avfilter_graph_get_filter( + graph, + parsedFilterName); + if (buffer == 0) + { + errorReason + = "avfilter_graph_get_filter: " + + VSRC_BUFFER_NAME + + "/" + + parsedFilterName; + } + else + { + parsedFilterName + = String.format( + parsedFilterNameFormat, + 2, + VSINK_FFSINK_NAME); + ffsink + = FFmpeg.avfilter_graph_get_filter( + graph, + parsedFilterName); + if (ffsink == 0) + { + errorReason + = "avfilter_graph_get_filter: " + + VSINK_FFSINK_NAME + + "/" + + parsedFilterName; + } + else + { + error + = FFmpeg.avfilter_graph_config(graph, log_ctx); + if (error != 0) + errorReason = "avfilter_graph_config"; + } + } + } + else + errorReason = "avfilter_graph_parse"; + if ((errorReason != null) || (error != 0)) + { + FFmpeg.avfilter_graph_free(graph); + graph = 0; + } + } + } + if (graph == 0) + { + if (errorReason != null) + { + StringBuilder msg = new StringBuilder(errorReason); + + if (error != 0) + msg.append(": ").append(error); + msg.append(", format ").append(format); + logger.error(msg); + } + return BUFFER_PROCESSED_FAILED; + } + else + { + this.width = size.width; + this.height = size.height; + this.pixFmt = pixFmt; + this.buffer = buffer; + this.ffsink = ffsink; + } + } + + /* + * The graph is configured for the current Format, apply its filters to + * the inputFrame. + */ + long inputFrame = ((AVFrame) inputBuffer.getData()).getPtr(); + + outputFilterBufferRef + = FFmpeg.get_filtered_video_frame( + inputFrame, this.width, this.height, this.pixFmt, + buffer, + ffsink, + outputFrame); + if(outputFilterBufferRef == 0) + { + /* + * If get_filtered_video_frame fails, it is likely to fail for any + * frame. Consequently, printing that it has failed will result in a + * lot of repeating logging output. Since the failure in question + * will be visible in the UI anyway, just debug it. + */ + if (logger.isDebugEnabled()) + logger.debug("get_filtered_video_frame"); + return BUFFER_PROCESSED_FAILED; + } + + Object out = outputBuffer.getData(); + + if (!(out instanceof AVFrame) + || (((AVFrame) out).getPtr() != outputFrame)) + { + outputBuffer.setData(new AVFrame(outputFrame)); + } + + outputBuffer.setDiscard(inputBuffer.isDiscard()); + outputBuffer.setDuration(inputBuffer.getDuration()); + outputBuffer.setEOM(inputBuffer.isEOM()); + outputBuffer.setFlags(inputBuffer.getFlags()); + outputBuffer.setFormat(format); + outputBuffer.setHeader(inputBuffer.getHeader()); + outputBuffer.setLength(inputBuffer.getLength()); + outputBuffer.setSequenceNumber(inputBuffer.getSequenceNumber()); + outputBuffer.setTimeStamp(inputBuffer.getTimeStamp()); + return BUFFER_PROCESSED_OK; + } + + /** + * Resets the state of this PlugIn. + */ + @Override + public synchronized void reset() + { + if (outputFilterBufferRef != 0) + { + FFmpeg.avfilter_unref_buffer(outputFilterBufferRef); + outputFilterBufferRef = 0; + } + if (graph != 0) + { + FFmpeg.avfilter_graph_free(graph); + graph = 0; + graphIsPending = true; + + width = 0; + height = 0; + pixFmt = FFmpeg.PIX_FMT_NONE; + buffer = 0; + ffsink = 0; + } + } +} diff --git a/src/org/jitsi/impl/neomedia/codec/video/h264/JNIDecoder.java b/src/org/jitsi/impl/neomedia/codec/video/h264/JNIDecoder.java index 3ab3705fe..ab05e9b3d 100644 --- a/src/org/jitsi/impl/neomedia/codec/video/h264/JNIDecoder.java +++ b/src/org/jitsi/impl/neomedia/codec/video/h264/JNIDecoder.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,486 +13,486 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.codec.video.h264; - -import java.awt.*; -import java.io.*; - -import javax.media.*; -import javax.media.format.*; - -import net.iharder.*; -import net.sf.fmj.media.*; - -import org.jitsi.impl.neomedia.codec.*; -import org.jitsi.impl.neomedia.codec.video.*; -import org.jitsi.impl.neomedia.format.*; -import org.jitsi.service.neomedia.codec.*; -import org.jitsi.service.neomedia.control.*; -import org.jitsi.util.*; - -/** - * Decodes H.264 NAL units and returns the resulting frames as FFmpeg - * AVFrames (i.e. in YUV format). - * - * @author Damian Minkov - * @author Lyubomir Marinov - * @author Sebastien Vincent - */ -public class JNIDecoder - extends AbstractCodec -{ - /** - * The default output VideoFormat. - */ - private static final VideoFormat[] DEFAULT_OUTPUT_FORMATS - = new VideoFormat[] { new AVFrameFormat(FFmpeg.PIX_FMT_YUV420P) }; - - /** - * The Logger used by the JNIDecoder class and its - * instances to print debug-related information. - */ - private static final Logger logger = Logger.getLogger(JNIDecoder.class); - - /** - * Plugin name. - */ - private static final String PLUGIN_NAME = "H.264 Decoder"; - - /** - * The codec context native pointer we will use. - */ - private long avctx; - - /** - * The AVFrame in which the video frame decoded from the encoded - * media data is stored. - */ - private AVFrame avframe; - - /** - * If decoder has got a picture. - */ - private final boolean[] got_picture = new boolean[1]; - - private boolean gotPictureAtLeastOnce; - - /** - * The last known height of {@link #avctx} i.e. the video output by this - * JNIDecoder. Used to detect changes in the output size. - */ - private int height; - - /** - * The KeyFrameControl used by this JNIDecoder to - * control its key frame-related logic. - */ - private KeyFrameControl keyFrameControl; - - /** - * Array of output VideoFormats. - */ - private final VideoFormat[] outputFormats; - - /** - * The last known width of {@link #avctx} i.e. the video output by this - * JNIDecoder. Used to detect changes in the output size. - */ - private int width; - - /** - * Initializes a new JNIDecoder instance which is to decode H.264 - * NAL units into frames in YUV format. - */ - public JNIDecoder() - { - inputFormats - = new VideoFormat[] - { - /* - * Explicitly state both ParameterizedVideoFormat (to - * receive any format parameters which may be of concern - * to this JNIDecoder) and VideoFormat (to make sure - * that nothing breaks because of equality and/or - * matching tests involving ParameterizedVideoFormat). - */ - new ParameterizedVideoFormat(Constants.H264), - new VideoFormat(Constants.H264) - }; - outputFormats = DEFAULT_OUTPUT_FORMATS; - } - - /** - * Check Format. - * - * @param format Format to check - * @return true if Format is H264_RTP - */ - public boolean checkFormat(Format format) - { - return format.getEncoding().equals(Constants.H264_RTP); - } - - /** - * Close Codec. - */ - @Override - public synchronized void close() - { - if (opened) - { - opened = false; - super.close(); - - FFmpeg.avcodec_close(avctx); - FFmpeg.av_free(avctx); - avctx = 0; - - if (avframe != null) - { - avframe.free(); - avframe = null; - } - - gotPictureAtLeastOnce = false; - } - } - - /** - * Ensure frame rate. - * - * @param frameRate frame rate - * @return frame rate - */ - private float ensureFrameRate(float frameRate) - { - return frameRate; - } - - /** - * Get matching outputs for a specified input Format. - * - * @param inputFormat input Format - * @return array of matching outputs or null if there are no matching - * outputs. - */ - protected Format[] getMatchingOutputFormats(Format inputFormat) - { - VideoFormat inputVideoFormat = (VideoFormat) inputFormat; - - return - new Format[] - { - new AVFrameFormat( - inputVideoFormat.getSize(), - ensureFrameRate(inputVideoFormat.getFrameRate()), - FFmpeg.PIX_FMT_YUV420P) - }; - } - - /** - * Get plugin name. - * - * @return "H.264 Decoder" - */ - @Override - public String getName() - { - return PLUGIN_NAME; - } - - /** - * Get all supported output Formats. - * - * @param inputFormat input Format to determine corresponding - * output Format/tt>s - * @return an array of supported output Formats - */ - @Override - public Format[] getSupportedOutputFormats(Format inputFormat) - { - Format[] supportedOutputFormats; - - if (inputFormat == null) - { - supportedOutputFormats = outputFormats; - } - else - { - // mismatch input format - if (!(inputFormat instanceof VideoFormat) - || (AbstractCodec2.matches(inputFormat, inputFormats) - == null)) - { - supportedOutputFormats = AbstractCodec2.EMPTY_FORMATS; - } - else - { - // match input format - supportedOutputFormats = getMatchingOutputFormats(inputFormat); - } - } - return supportedOutputFormats; - } - - /** - * Handles any format parameters of the input and/or output Formats - * with which this JNIDecoder has been configured. For example, - * takes into account the format parameter sprop-parameter-sets if - * it is specified by the input Format. - */ - private void handleFmtps() - { - try - { - - Format f = getInputFormat(); - - if (f instanceof ParameterizedVideoFormat) - { - ParameterizedVideoFormat pvf = (ParameterizedVideoFormat) f; - String spropParameterSets - = pvf.getFormatParameter( - VideoMediaFormatImpl.H264_SPROP_PARAMETER_SETS_FMTP); - - if (spropParameterSets != null) - { - ByteArrayOutputStream nals = new ByteArrayOutputStream(); - - for (String s : spropParameterSets.split(",")) - { - if ((s != null) && (s.length() != 0)) - { - byte[] nal = Base64.decode(s); - - if ((nal != null) && (nal.length != 0)) - { - nals.write(DePacketizer.NAL_PREFIX); - nals.write(nal); - } - } - } - if (nals.size() != 0) - { - // Add padding because it seems to be required by FFmpeg. - for (int i = 0; - i < FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE; - i++) - { - nals.write(0); - } - - /* - * In accord with RFC 6184 "RTP Payload Format for H.264 - * Video", place the NAL units conveyed by - * sprop-parameter-sets in the NAL unit stream to precede - * any other NAL units in decoding order. - */ - FFmpeg.avcodec_decode_video( - avctx, - avframe.getPtr(), - got_picture, - nals.toByteArray(), nals.size()); - } - } - } - - /* - * Because the handling of format parameter is new at the time of this - * writing and it currently handles only the format parameter - * sprop-parameter-sets the failed handling of which will be made - * visible later on anyway, do not let it kill this JNIDecoder. - */ - } - catch (Throwable t) - { - if (t instanceof InterruptedException) - Thread.currentThread().interrupt(); - else if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - logger.error("Failed to handle format parameters", t); - } - } - - /** - * Inits the codec instances. - * - * @throws ResourceUnavailableException if codec initialization failed - */ - @Override - public synchronized void open() - throws ResourceUnavailableException - { - if (opened) - return; - - if (avframe != null) - { - avframe.free(); - avframe = null; - } - avframe = new AVFrame(); - - long avcodec = FFmpeg.avcodec_find_decoder(FFmpeg.CODEC_ID_H264); - - if (avcodec == 0) - { - throw new ResourceUnavailableException( - "Could not find H.264 decoder."); - } - - avctx = FFmpeg.avcodec_alloc_context3(avcodec); - FFmpeg.avcodeccontext_set_workaround_bugs(avctx, - FFmpeg.FF_BUG_AUTODETECT); - - /* allow to pass incomplete frame to decoder */ - FFmpeg.avcodeccontext_add_flags2(avctx, - FFmpeg.CODEC_FLAG2_CHUNKS); - - if (FFmpeg.avcodec_open2(avctx, avcodec) < 0) - throw new RuntimeException("Could not open H.264 decoder."); - - gotPictureAtLeastOnce = false; - - opened = true; - super.open(); - - /* - * After this JNIDecoder has been opened, handle format parameters such - * as sprop-parameter-sets which require this JNIDecoder to be in the - * opened state. - */ - handleFmtps(); - } - - /** - * Decodes H.264 media data read from a specific input Buffer into - * a specific output Buffer. - * - * @param in input Buffer - * @param out output Buffer - * @return BUFFER_PROCESSED_OK if in has been successfully - * processed - */ - @Override - public synchronized int process(Buffer in, Buffer out) - { - if (!checkInputBuffer(in)) - return BUFFER_PROCESSED_FAILED; - if (isEOM(in) || !opened) - { - propagateEOM(out); - return BUFFER_PROCESSED_OK; - } - if (in.isDiscard()) - { - out.setDiscard(true); - return BUFFER_PROCESSED_OK; - } - - // Ask FFmpeg to decode. - got_picture[0] = false; - // TODO Take into account the offset of the input Buffer. - FFmpeg.avcodec_decode_video( - avctx, - avframe.getPtr(), - got_picture, - (byte[]) in.getData(), in.getLength()); - - if (!got_picture[0]) - { - if ((in.getFlags() & Buffer.FLAG_RTP_MARKER) != 0) - { - if (keyFrameControl != null) - keyFrameControl.requestKeyFrame(!gotPictureAtLeastOnce); - } - - out.setDiscard(true); - return BUFFER_PROCESSED_OK; - } - gotPictureAtLeastOnce = true; - - // format - int width = FFmpeg.avcodeccontext_get_width(avctx); - int height = FFmpeg.avcodeccontext_get_height(avctx); - - if ((width > 0) - && (height > 0) - && ((this.width != width) || (this.height != height))) - { - this.width = width; - this.height = height; - - // Output in same size and frame rate as input. - Dimension outSize = new Dimension(this.width, this.height); - VideoFormat inFormat = (VideoFormat) in.getFormat(); - float outFrameRate = ensureFrameRate(inFormat.getFrameRate()); - - outputFormat - = new AVFrameFormat( - outSize, - outFrameRate, - FFmpeg.PIX_FMT_YUV420P); - } - out.setFormat(outputFormat); - - // data - if (out.getData() != avframe) - out.setData(avframe); - - // timeStamp - long pts = FFmpeg.AV_NOPTS_VALUE; // TODO avframe_get_pts(avframe); - - if (pts == FFmpeg.AV_NOPTS_VALUE) - { - out.setTimeStamp(in.getTimeStamp()); - } - else - { - out.setTimeStamp(pts); - - int outFlags = out.getFlags(); - - outFlags |= Buffer.FLAG_RELATIVE_TIME; - outFlags &= ~(Buffer.FLAG_RTP_TIME | Buffer.FLAG_SYSTEM_TIME); - out.setFlags(outFlags); - } - - return BUFFER_PROCESSED_OK; - } - - /** - * Sets the Format of the media data to be input for processing in - * this Codec. - * - * @param format the Format of the media data to be input for - * processing in this Codec - * @return the Format of the media data to be input for processing - * in this Codec if format is compatible with this - * Codec; otherwise, null - */ - @Override - public Format setInputFormat(Format format) - { - Format setFormat = super.setInputFormat(format); - - if (setFormat != null) - reset(); - return setFormat; - } - - /** - * Sets the KeyFrameControl to be used by this - * DePacketizer as a means of control over its key frame-related - * logic. - * - * @param keyFrameControl the KeyFrameControl to be used by this - * DePacketizer as a means of control over its key frame-related - * logic - */ - public void setKeyFrameControl(KeyFrameControl keyFrameControl) - { - this.keyFrameControl = keyFrameControl; - } -} +package org.jitsi.impl.neomedia.codec.video.h264; + +import java.awt.*; +import java.io.*; + +import javax.media.*; +import javax.media.format.*; + +import net.iharder.*; +import net.sf.fmj.media.*; + +import org.jitsi.impl.neomedia.codec.*; +import org.jitsi.impl.neomedia.codec.video.*; +import org.jitsi.impl.neomedia.format.*; +import org.jitsi.service.neomedia.codec.*; +import org.jitsi.service.neomedia.control.*; +import org.jitsi.util.*; + +/** + * Decodes H.264 NAL units and returns the resulting frames as FFmpeg + * AVFrames (i.e. in YUV format). + * + * @author Damian Minkov + * @author Lyubomir Marinov + * @author Sebastien Vincent + */ +public class JNIDecoder + extends AbstractCodec +{ + /** + * The default output VideoFormat. + */ + private static final VideoFormat[] DEFAULT_OUTPUT_FORMATS + = new VideoFormat[] { new AVFrameFormat(FFmpeg.PIX_FMT_YUV420P) }; + + /** + * The Logger used by the JNIDecoder class and its + * instances to print debug-related information. + */ + private static final Logger logger = Logger.getLogger(JNIDecoder.class); + + /** + * Plugin name. + */ + private static final String PLUGIN_NAME = "H.264 Decoder"; + + /** + * The codec context native pointer we will use. + */ + private long avctx; + + /** + * The AVFrame in which the video frame decoded from the encoded + * media data is stored. + */ + private AVFrame avframe; + + /** + * If decoder has got a picture. + */ + private final boolean[] got_picture = new boolean[1]; + + private boolean gotPictureAtLeastOnce; + + /** + * The last known height of {@link #avctx} i.e. the video output by this + * JNIDecoder. Used to detect changes in the output size. + */ + private int height; + + /** + * The KeyFrameControl used by this JNIDecoder to + * control its key frame-related logic. + */ + private KeyFrameControl keyFrameControl; + + /** + * Array of output VideoFormats. + */ + private final VideoFormat[] outputFormats; + + /** + * The last known width of {@link #avctx} i.e. the video output by this + * JNIDecoder. Used to detect changes in the output size. + */ + private int width; + + /** + * Initializes a new JNIDecoder instance which is to decode H.264 + * NAL units into frames in YUV format. + */ + public JNIDecoder() + { + inputFormats + = new VideoFormat[] + { + /* + * Explicitly state both ParameterizedVideoFormat (to + * receive any format parameters which may be of concern + * to this JNIDecoder) and VideoFormat (to make sure + * that nothing breaks because of equality and/or + * matching tests involving ParameterizedVideoFormat). + */ + new ParameterizedVideoFormat(Constants.H264), + new VideoFormat(Constants.H264) + }; + outputFormats = DEFAULT_OUTPUT_FORMATS; + } + + /** + * Check Format. + * + * @param format Format to check + * @return true if Format is H264_RTP + */ + public boolean checkFormat(Format format) + { + return format.getEncoding().equals(Constants.H264_RTP); + } + + /** + * Close Codec. + */ + @Override + public synchronized void close() + { + if (opened) + { + opened = false; + super.close(); + + FFmpeg.avcodec_close(avctx); + FFmpeg.av_free(avctx); + avctx = 0; + + if (avframe != null) + { + avframe.free(); + avframe = null; + } + + gotPictureAtLeastOnce = false; + } + } + + /** + * Ensure frame rate. + * + * @param frameRate frame rate + * @return frame rate + */ + private float ensureFrameRate(float frameRate) + { + return frameRate; + } + + /** + * Get matching outputs for a specified input Format. + * + * @param inputFormat input Format + * @return array of matching outputs or null if there are no matching + * outputs. + */ + protected Format[] getMatchingOutputFormats(Format inputFormat) + { + VideoFormat inputVideoFormat = (VideoFormat) inputFormat; + + return + new Format[] + { + new AVFrameFormat( + inputVideoFormat.getSize(), + ensureFrameRate(inputVideoFormat.getFrameRate()), + FFmpeg.PIX_FMT_YUV420P) + }; + } + + /** + * Get plugin name. + * + * @return "H.264 Decoder" + */ + @Override + public String getName() + { + return PLUGIN_NAME; + } + + /** + * Get all supported output Formats. + * + * @param inputFormat input Format to determine corresponding + * output Format/tt>s + * @return an array of supported output Formats + */ + @Override + public Format[] getSupportedOutputFormats(Format inputFormat) + { + Format[] supportedOutputFormats; + + if (inputFormat == null) + { + supportedOutputFormats = outputFormats; + } + else + { + // mismatch input format + if (!(inputFormat instanceof VideoFormat) + || (AbstractCodec2.matches(inputFormat, inputFormats) + == null)) + { + supportedOutputFormats = AbstractCodec2.EMPTY_FORMATS; + } + else + { + // match input format + supportedOutputFormats = getMatchingOutputFormats(inputFormat); + } + } + return supportedOutputFormats; + } + + /** + * Handles any format parameters of the input and/or output Formats + * with which this JNIDecoder has been configured. For example, + * takes into account the format parameter sprop-parameter-sets if + * it is specified by the input Format. + */ + private void handleFmtps() + { + try + { + + Format f = getInputFormat(); + + if (f instanceof ParameterizedVideoFormat) + { + ParameterizedVideoFormat pvf = (ParameterizedVideoFormat) f; + String spropParameterSets + = pvf.getFormatParameter( + VideoMediaFormatImpl.H264_SPROP_PARAMETER_SETS_FMTP); + + if (spropParameterSets != null) + { + ByteArrayOutputStream nals = new ByteArrayOutputStream(); + + for (String s : spropParameterSets.split(",")) + { + if ((s != null) && (s.length() != 0)) + { + byte[] nal = Base64.decode(s); + + if ((nal != null) && (nal.length != 0)) + { + nals.write(DePacketizer.NAL_PREFIX); + nals.write(nal); + } + } + } + if (nals.size() != 0) + { + // Add padding because it seems to be required by FFmpeg. + for (int i = 0; + i < FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE; + i++) + { + nals.write(0); + } + + /* + * In accord with RFC 6184 "RTP Payload Format for H.264 + * Video", place the NAL units conveyed by + * sprop-parameter-sets in the NAL unit stream to precede + * any other NAL units in decoding order. + */ + FFmpeg.avcodec_decode_video( + avctx, + avframe.getPtr(), + got_picture, + nals.toByteArray(), nals.size()); + } + } + } + + /* + * Because the handling of format parameter is new at the time of this + * writing and it currently handles only the format parameter + * sprop-parameter-sets the failed handling of which will be made + * visible later on anyway, do not let it kill this JNIDecoder. + */ + } + catch (Throwable t) + { + if (t instanceof InterruptedException) + Thread.currentThread().interrupt(); + else if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + logger.error("Failed to handle format parameters", t); + } + } + + /** + * Inits the codec instances. + * + * @throws ResourceUnavailableException if codec initialization failed + */ + @Override + public synchronized void open() + throws ResourceUnavailableException + { + if (opened) + return; + + if (avframe != null) + { + avframe.free(); + avframe = null; + } + avframe = new AVFrame(); + + long avcodec = FFmpeg.avcodec_find_decoder(FFmpeg.CODEC_ID_H264); + + if (avcodec == 0) + { + throw new ResourceUnavailableException( + "Could not find H.264 decoder."); + } + + avctx = FFmpeg.avcodec_alloc_context3(avcodec); + FFmpeg.avcodeccontext_set_workaround_bugs(avctx, + FFmpeg.FF_BUG_AUTODETECT); + + /* allow to pass incomplete frame to decoder */ + FFmpeg.avcodeccontext_add_flags2(avctx, + FFmpeg.CODEC_FLAG2_CHUNKS); + + if (FFmpeg.avcodec_open2(avctx, avcodec) < 0) + throw new RuntimeException("Could not open H.264 decoder."); + + gotPictureAtLeastOnce = false; + + opened = true; + super.open(); + + /* + * After this JNIDecoder has been opened, handle format parameters such + * as sprop-parameter-sets which require this JNIDecoder to be in the + * opened state. + */ + handleFmtps(); + } + + /** + * Decodes H.264 media data read from a specific input Buffer into + * a specific output Buffer. + * + * @param in input Buffer + * @param out output Buffer + * @return BUFFER_PROCESSED_OK if in has been successfully + * processed + */ + @Override + public synchronized int process(Buffer in, Buffer out) + { + if (!checkInputBuffer(in)) + return BUFFER_PROCESSED_FAILED; + if (isEOM(in) || !opened) + { + propagateEOM(out); + return BUFFER_PROCESSED_OK; + } + if (in.isDiscard()) + { + out.setDiscard(true); + return BUFFER_PROCESSED_OK; + } + + // Ask FFmpeg to decode. + got_picture[0] = false; + // TODO Take into account the offset of the input Buffer. + FFmpeg.avcodec_decode_video( + avctx, + avframe.getPtr(), + got_picture, + (byte[]) in.getData(), in.getLength()); + + if (!got_picture[0]) + { + if ((in.getFlags() & Buffer.FLAG_RTP_MARKER) != 0) + { + if (keyFrameControl != null) + keyFrameControl.requestKeyFrame(!gotPictureAtLeastOnce); + } + + out.setDiscard(true); + return BUFFER_PROCESSED_OK; + } + gotPictureAtLeastOnce = true; + + // format + int width = FFmpeg.avcodeccontext_get_width(avctx); + int height = FFmpeg.avcodeccontext_get_height(avctx); + + if ((width > 0) + && (height > 0) + && ((this.width != width) || (this.height != height))) + { + this.width = width; + this.height = height; + + // Output in same size and frame rate as input. + Dimension outSize = new Dimension(this.width, this.height); + VideoFormat inFormat = (VideoFormat) in.getFormat(); + float outFrameRate = ensureFrameRate(inFormat.getFrameRate()); + + outputFormat + = new AVFrameFormat( + outSize, + outFrameRate, + FFmpeg.PIX_FMT_YUV420P); + } + out.setFormat(outputFormat); + + // data + if (out.getData() != avframe) + out.setData(avframe); + + // timeStamp + long pts = FFmpeg.AV_NOPTS_VALUE; // TODO avframe_get_pts(avframe); + + if (pts == FFmpeg.AV_NOPTS_VALUE) + { + out.setTimeStamp(in.getTimeStamp()); + } + else + { + out.setTimeStamp(pts); + + int outFlags = out.getFlags(); + + outFlags |= Buffer.FLAG_RELATIVE_TIME; + outFlags &= ~(Buffer.FLAG_RTP_TIME | Buffer.FLAG_SYSTEM_TIME); + out.setFlags(outFlags); + } + + return BUFFER_PROCESSED_OK; + } + + /** + * Sets the Format of the media data to be input for processing in + * this Codec. + * + * @param format the Format of the media data to be input for + * processing in this Codec + * @return the Format of the media data to be input for processing + * in this Codec if format is compatible with this + * Codec; otherwise, null + */ + @Override + public Format setInputFormat(Format format) + { + Format setFormat = super.setInputFormat(format); + + if (setFormat != null) + reset(); + return setFormat; + } + + /** + * Sets the KeyFrameControl to be used by this + * DePacketizer as a means of control over its key frame-related + * logic. + * + * @param keyFrameControl the KeyFrameControl to be used by this + * DePacketizer as a means of control over its key frame-related + * logic + */ + public void setKeyFrameControl(KeyFrameControl keyFrameControl) + { + this.keyFrameControl = keyFrameControl; + } +} diff --git a/src/org/jitsi/impl/neomedia/control/DiagnosticsControl.java b/src/org/jitsi/impl/neomedia/control/DiagnosticsControl.java index 30704a544..2be1b2608 100644 --- a/src/org/jitsi/impl/neomedia/control/DiagnosticsControl.java +++ b/src/org/jitsi/impl/neomedia/control/DiagnosticsControl.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,42 +13,42 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.control; - -import javax.media.*; - -/** - * Defines an FMJ Control which allows the diagnosis of the functional - * health of a procedure/process. - * - * @author Lyubomir Marinov - */ -public interface DiagnosticsControl - extends Control -{ - /** - * The constant which expresses a non-existent time in milliseconds for the - * purposes of {@link #getMalfuntioningSince()}. Explicitly chosen to be - * 0 rather than -1 in the name of efficiency. - */ - public static final long NEVER = 0; - - /** - * Gets the time in milliseconds at which the associated procedure/process - * has started malfunctioning. - * - * @return the time in milliseconds at which the associated - * procedure/process has started malfunctioning or NEVER if the - * associated procedure/process is functioning normally - */ - public long getMalfunctioningSince(); - - /** - * Returns a human-readable String representation of the associated - * procedure/process. - * - * @return a human-readable String representation of the associated - * procedure/process - */ - public String toString(); -} +package org.jitsi.impl.neomedia.control; + +import javax.media.*; + +/** + * Defines an FMJ Control which allows the diagnosis of the functional + * health of a procedure/process. + * + * @author Lyubomir Marinov + */ +public interface DiagnosticsControl + extends Control +{ + /** + * The constant which expresses a non-existent time in milliseconds for the + * purposes of {@link #getMalfuntioningSince()}. Explicitly chosen to be + * 0 rather than -1 in the name of efficiency. + */ + public static final long NEVER = 0; + + /** + * Gets the time in milliseconds at which the associated procedure/process + * has started malfunctioning. + * + * @return the time in milliseconds at which the associated + * procedure/process has started malfunctioning or NEVER if the + * associated procedure/process is functioning normally + */ + public long getMalfunctioningSince(); + + /** + * Returns a human-readable String representation of the associated + * procedure/process. + * + * @return a human-readable String representation of the associated + * procedure/process + */ + public String toString(); +} diff --git a/src/org/jitsi/impl/neomedia/device/DirectShowSystem.java b/src/org/jitsi/impl/neomedia/device/DirectShowSystem.java index 461f3aa21..0e06713ba 100644 --- a/src/org/jitsi/impl/neomedia/device/DirectShowSystem.java +++ b/src/org/jitsi/impl/neomedia/device/DirectShowSystem.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,133 +13,133 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.device; - -import java.util.*; - -import javax.media.*; - -import org.jitsi.impl.neomedia.*; -import org.jitsi.impl.neomedia.codec.*; -import org.jitsi.impl.neomedia.codec.video.*; -import org.jitsi.impl.neomedia.jmfext.media.protocol.directshow.*; -import org.jitsi.service.neomedia.*; -import org.jitsi.util.*; - -/** - * Discovers and registers DirectShow video capture devices with JMF. - * - * @author Sebastien Vincent - * @author Lyubomir Marinov - */ -public class DirectShowSystem - extends DeviceSystem -{ - /** - * The protocol of the MediaLocators identifying QuickTime/QTKit - * capture devices. - */ - private static final String LOCATOR_PROTOCOL = LOCATOR_PROTOCOL_DIRECTSHOW; - - /** - * The Logger used by the DirectShowSystem class and its - * instances for logging output. - */ - private static final Logger logger - = Logger.getLogger(DirectShowSystem.class); - - /** - * Constructor. Discover and register DirectShow capture devices - * with JMF. - * - * @throws Exception if anything goes wrong while discovering and - * registering DirectShow capture defines with JMF - */ - public DirectShowSystem() - throws Exception - { - super(MediaType.VIDEO, LOCATOR_PROTOCOL); - } - - @Override - protected void doInitialize() - throws Exception - { - DSManager manager = new DSManager(); - - try - { - DSCaptureDevice devices[] = manager.getCaptureDevices(); - boolean captureDeviceInfoIsAdded = false; - - for(int i = 0, count = (devices == null) ? 0 : devices.length; - i < count; - i++) - { - DSCaptureDevice device = devices[i]; - DSFormat[] dsFormats = device.getSupportedFormats(); - String name = device.getName(); - - if (dsFormats.length == 0) - { - logger.warn( - "Camera '" + name - + "' reported no supported formats."); - continue; - } - - List formats - = new ArrayList(dsFormats.length); - - for (DSFormat dsFormat : dsFormats) - { - int pixelFormat = dsFormat.getPixelFormat(); - int ffmpegPixFmt = DataSource.getFFmpegPixFmt(pixelFormat); - - if (ffmpegPixFmt != FFmpeg.PIX_FMT_NONE) - { - Format format - = new AVFrameFormat(ffmpegPixFmt, pixelFormat); - - if (!formats.contains(format)) - formats.add(format); - } - } - if (formats.isEmpty()) - { - logger.warn( - "No support for the formats of camera '" + name - + "': " + Arrays.toString(dsFormats)); - continue; - } - - Format[] formatsArray - = formats.toArray(new Format[formats.size()]); - - if(logger.isInfoEnabled()) - { - logger.info( - "Support for the formats of camera '" + name - + "': " + Arrays.toString(formatsArray)); - } - - CaptureDeviceInfo cdi - = new CaptureDeviceInfo( - name, - new MediaLocator(LOCATOR_PROTOCOL + ':' + name), - formatsArray); - - CaptureDeviceManager.addDevice(cdi); - captureDeviceInfoIsAdded = true; - } - - if (captureDeviceInfoIsAdded - && !MediaServiceImpl.isJmfRegistryDisableLoad()) - CaptureDeviceManager.commit(); - } - finally - { - manager.dispose(); - } - } -} +package org.jitsi.impl.neomedia.device; + +import java.util.*; + +import javax.media.*; + +import org.jitsi.impl.neomedia.*; +import org.jitsi.impl.neomedia.codec.*; +import org.jitsi.impl.neomedia.codec.video.*; +import org.jitsi.impl.neomedia.jmfext.media.protocol.directshow.*; +import org.jitsi.service.neomedia.*; +import org.jitsi.util.*; + +/** + * Discovers and registers DirectShow video capture devices with JMF. + * + * @author Sebastien Vincent + * @author Lyubomir Marinov + */ +public class DirectShowSystem + extends DeviceSystem +{ + /** + * The protocol of the MediaLocators identifying QuickTime/QTKit + * capture devices. + */ + private static final String LOCATOR_PROTOCOL = LOCATOR_PROTOCOL_DIRECTSHOW; + + /** + * The Logger used by the DirectShowSystem class and its + * instances for logging output. + */ + private static final Logger logger + = Logger.getLogger(DirectShowSystem.class); + + /** + * Constructor. Discover and register DirectShow capture devices + * with JMF. + * + * @throws Exception if anything goes wrong while discovering and + * registering DirectShow capture defines with JMF + */ + public DirectShowSystem() + throws Exception + { + super(MediaType.VIDEO, LOCATOR_PROTOCOL); + } + + @Override + protected void doInitialize() + throws Exception + { + DSManager manager = new DSManager(); + + try + { + DSCaptureDevice devices[] = manager.getCaptureDevices(); + boolean captureDeviceInfoIsAdded = false; + + for(int i = 0, count = (devices == null) ? 0 : devices.length; + i < count; + i++) + { + DSCaptureDevice device = devices[i]; + DSFormat[] dsFormats = device.getSupportedFormats(); + String name = device.getName(); + + if (dsFormats.length == 0) + { + logger.warn( + "Camera '" + name + + "' reported no supported formats."); + continue; + } + + List formats + = new ArrayList(dsFormats.length); + + for (DSFormat dsFormat : dsFormats) + { + int pixelFormat = dsFormat.getPixelFormat(); + int ffmpegPixFmt = DataSource.getFFmpegPixFmt(pixelFormat); + + if (ffmpegPixFmt != FFmpeg.PIX_FMT_NONE) + { + Format format + = new AVFrameFormat(ffmpegPixFmt, pixelFormat); + + if (!formats.contains(format)) + formats.add(format); + } + } + if (formats.isEmpty()) + { + logger.warn( + "No support for the formats of camera '" + name + + "': " + Arrays.toString(dsFormats)); + continue; + } + + Format[] formatsArray + = formats.toArray(new Format[formats.size()]); + + if(logger.isInfoEnabled()) + { + logger.info( + "Support for the formats of camera '" + name + + "': " + Arrays.toString(formatsArray)); + } + + CaptureDeviceInfo cdi + = new CaptureDeviceInfo( + name, + new MediaLocator(LOCATOR_PROTOCOL + ':' + name), + formatsArray); + + CaptureDeviceManager.addDevice(cdi); + captureDeviceInfoIsAdded = true; + } + + if (captureDeviceInfoIsAdded + && !MediaServiceImpl.isJmfRegistryDisableLoad()) + CaptureDeviceManager.commit(); + } + finally + { + manager.dispose(); + } + } +} diff --git a/src/org/jitsi/impl/neomedia/device/WASAPISystem.java b/src/org/jitsi/impl/neomedia/device/WASAPISystem.java index d7ed5c601..bfc284bf2 100644 --- a/src/org/jitsi/impl/neomedia/device/WASAPISystem.java +++ b/src/org/jitsi/impl/neomedia/device/WASAPISystem.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,1684 +13,1684 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.device; - -import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.VoiceCaptureDSP.*; -import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*; - -import java.util.*; - -import javax.media.*; -import javax.media.format.*; - -import org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.*; -import org.jitsi.impl.neomedia.jmfext.media.renderer.audio.*; -import org.jitsi.service.neomedia.codec.*; -import org.jitsi.util.*; - -/** - * Implements an AudioSystem using Windows Audio Session API (WASAPI) - * and related Core Audio APIs such as Multimedia Device (MMDevice) API. - * - * @author Lyubomir Marinov - */ -public class WASAPISystem - extends AudioSystem -{ - /** - * A GUID which identifies the audio session that streams belong to. - */ - private static String audioSessionGuid; - - /** - * The default duration of audio data in milliseconds to be read from - * WASAPIStream in an invocation of - * {@link WASAPIStream#read(Buffer)} or to be processed by - * WASAPIRenderer in an invocation of - * {@link WASAPIRenderer#process(Buffer)}. - */ - public static final long DEFAULT_BUFFER_DURATION = 20; - - /** - * The default interval in milliseconds between periodic processing passes - * by the audio engine. - */ - public static final long DEFAULT_DEVICE_PERIOD = 10; - - /** - * The protocol of the MediaLocator identifying - * CaptureDeviceInfo contributed by WASAPISystem. - */ - private static final String LOCATOR_PROTOCOL = LOCATOR_PROTOCOL_WASAPI; - - /** - * The logger used by the WASAPISystem class and its instances to - * log debugging information. - */ - private static final Logger logger = Logger.getLogger(WASAPISystem.class); - - /** - * Invokes the Windows API function CoInitializeEx (by way of - * {@link WASAPI#CoInitializeEx(long, int)}) with arguments suitable to the - * operation of WASAPIRenderer, WASAPIStream and - * WASAPISystem. - *

- * Generally, the WASAPI integration is designed with - * COINIT_MULTITHREADED in mind. However, it may turn out that it - * works with COINIT_APARTMENTTHREADED as well. - *

- * - * @return the value returned by the invocation of the Windows API function - * CoInitializeEx - * @throws HResultException if the invocation of the method - * WASAPI.CoInitializeEx throws such an exception - */ - public static int CoInitializeEx() - throws HResultException - { - int hr; - - try - { - hr = WASAPI.CoInitializeEx(0, COINIT_MULTITHREADED); - } - catch (HResultException hre) - { - hr = hre.getHResult(); - switch (hr) - { - case RPC_E_CHANGED_MODE: - hr = S_FALSE; - // Do fall through. - case S_FALSE: - case S_OK: - break; - default: - throw hre; - } - } - return hr; - } - - /** - * Gets an array of alternative AudioFormats based on - * format with which an attempt is to be made to initialize a new - * IAudioClient instance. - * - * @param format the AudioFormat on which the alternative - * AudioFormats are to be based - * @return an array of alternative AudioFormats based on - * format with which an attempt is to be made to initialize a new - * IAudioClient instance - */ - public static AudioFormat[] getFormatsToInitializeIAudioClient( - AudioFormat format) - { - // We are able to convert between mono and stereo. - int channels; - - switch (format.getChannels()) - { - case 1: - channels = 2; - break; - case 2: - channels = 1; - break; - default: - return new AudioFormat[] { format }; - } - return - new AudioFormat[] - { - /* - * Regardless of the differences in the states of the - * support of mono and stereo in the library at the time - * of this writing, try to initialize a new IAudioClient - * instance with a format which will not require - * conversion between mono and stereo. - */ - format, - new AudioFormat( - format.getEncoding(), - format.getSampleRate(), - format.getSampleSizeInBits(), - channels, - AudioFormat.LITTLE_ENDIAN, - AudioFormat.SIGNED, - Format.NOT_SPECIFIED /* frameSizeInBits */, - Format.NOT_SPECIFIED /* frameRate */, - format.getDataType()) - }; - } - - /** - * Gets the size in bytes of an audio sample of a specific - * AudioFormat. - * - * @param format the AudioFormat to get the size in bytes of an - * audio sample of - * @return the size in bytes of an audio sample of the specified - * format - */ - public static int getSampleSizeInBytes(AudioFormat format) - { - int sampleSizeInBits = format.getSampleSizeInBits(); - - switch (sampleSizeInBits) - { - case 8: - return 1; - case 16: - return 2; - default: - return sampleSizeInBits / 8; - } - } - - /** - * Sets the fields of a specific WAVEFORMATEX instance from a - * specific AudioFormat instance so that the two of them are - * equivalent in terms of the formats of audio data that they describe. - * - * @param waveformatex the WAVEFORMATEX instance to set the fields - * of from the specified audioFormat - * @param audioFormat the AudioFormat instance to set the fields of - * the specified waveformatex from - */ - public static void WAVEFORMATEX_fill( - long waveformatex, - AudioFormat audioFormat) - { - if (!AudioFormat.LINEAR.equals(audioFormat.getEncoding())) - throw new IllegalArgumentException("audioFormat.encoding"); - - int channels = audioFormat.getChannels(); - - if (channels == Format.NOT_SPECIFIED) - throw new IllegalArgumentException("audioFormat.channels"); - - int sampleRate = (int) audioFormat.getSampleRate(); - - if (sampleRate == Format.NOT_SPECIFIED) - throw new IllegalArgumentException("audioFormat.sampleRate"); - - int sampleSizeInBits = audioFormat.getSampleSizeInBits(); - - if (sampleSizeInBits == Format.NOT_SPECIFIED) - throw new IllegalArgumentException("audioFormat.sampleSizeInBits"); - - char nBlockAlign = (char) ((channels * sampleSizeInBits) / 8); - - WASAPI.WAVEFORMATEX_fill( - waveformatex, - WAVE_FORMAT_PCM, - (char) channels, - sampleRate, - sampleRate * nBlockAlign, - nBlockAlign, - (char) sampleSizeInBits, - /* cbSize */ (char) 0); - } - - /** - * The pointer to the native IMediaObject interface instance of the - * voice capture DMO that supports/implements the acoustic echo cancellation - * (AEC) feature. - */ - private long aecIMediaObject; - - /** - * The List of AudioFormats supported by the voice capture - * DMO that supports/implements the acoustic echo cancellation (AEC) feature - * i.e. {@link #aecIMediaObject}. - */ - private List aecSupportedFormats; - - /** - * The pointer to the native IMMDeviceEnumerator interface instance - * which this WASAPISystem uses to enumerate the audio endpoint - * devices. - */ - private long iMMDeviceEnumerator; - - /** - * The IMMNotificationClient which is to notify this - * WASAPISystem when an audio endpoint device is added or removed, - * when the state or properties of an endpoint device change, or when there - * is a change in the default role assigned to an endpoint device. - */ - private IMMNotificationClient pNotify; - - /** - * A WAVEFORMATEX instance allocated in {@link #preInitialize()}, - * freed in {@link #postInitialize()} and made available during the - * execution of {@link #doInitialize()} in order to minimize memory - * fragmentation. - */ - private long waveformatex; - - /** - * Initializes a new WASAPISystem instance. - * - * @throws Exception if anything goes wrong while initializing the new - * WASAPISystem instance - */ - WASAPISystem() - throws Exception - { - super( - LOCATOR_PROTOCOL, - FEATURE_AGC - | FEATURE_DENOISE - | FEATURE_ECHO_CANCELLATION - | FEATURE_NOTIFY_AND_PLAYBACK_DEVICES - | FEATURE_REINITIALIZE); - } - - /** - * Invoked after determining the AudioFormats supported by an - * IAudioClient with a specific dataFlow and before - * registering a respective CaptureDeviceInfo2 to represent that - * IAudioClient. Allows this instance to add and/or remove - * AudioFormats that it will and/or will not support in addition to - * the support of the very IAudioClient. - * - * @param dataFlow the flow of the media supported by the associated - * IAudioClient - * @param formats the List of AudioFormats supported by - * the associated IAudioClient - */ - private void configureSupportedFormats( - int dataFlow, - List formats) - { - switch (dataFlow) - { - case eCapture: - /* - * If acoustic echo cancellation (AEC) is used later on, the - * CaptureDevice/DataSource implementation will support its - * formats. - */ - List aecSupportedFormats - = getAECSupportedFormats(); - - if (!aecSupportedFormats.isEmpty()) - { - for (AudioFormat format : aecSupportedFormats) - if (!formats.contains(format)) - formats.add(format); - } - break; - - case eRender: - /* - * WASAPIRenderer has to be able to change its render endpoint - * device on the fly. Since the new render endpoint device may not - * support the inputFormat of the WASAPIRenderer which has been - * negotiated based on the old render endpoint device, - * WASAPIRenderer has to be able to resample. Expand the list of - * supported formats with the supported input formats of - * appropriate resamplers. - */ - for (int i = 0, count = formats.size(); i < count; i++) - { - AudioFormat outFormat = formats.get(i); - /* - * The resamplers are not expected to convert between mono and - * stereo. - */ - AudioFormat inFormat - = new AudioFormat( - AudioFormat.LINEAR, - /* sampleRate */ Format.NOT_SPECIFIED, - /* sampleSizeInBits */ Format.NOT_SPECIFIED, - outFormat.getChannels(), - AbstractAudioRenderer.NATIVE_AUDIO_FORMAT_ENDIAN, - AudioFormat.SIGNED, - /* frameSizeInBits */ Format.NOT_SPECIFIED, - /* frameRate */ Format.NOT_SPECIFIED, - Format.byteArray); - @SuppressWarnings("unchecked") - List classNames - = PlugInManager.getPlugInList( - inFormat, - outFormat, - PlugInManager.CODEC); - - if ((classNames != null) && !classNames.isEmpty()) - { - for (String className : classNames) - { - try - { - Codec codec - = (Codec) - Class.forName(className).newInstance(); - Format[] inFormats - = codec.getSupportedInputFormats(); - - if (inFormats != null) - { - for (Format aInFormat : inFormats) - { - if (!(aInFormat instanceof AudioFormat) - || !inFormat.matches(aInFormat)) - continue; - - Format[] outFormats - = codec.getSupportedOutputFormats( - aInFormat); - boolean add = false; - - if (outFormats != null) - { - for (Format aOutFormat : outFormats) - { - if (outFormat.matches(aOutFormat)) - { - add = true; - break; - } - } - } - if (add && !formats.contains(aInFormat)) - formats.add((AudioFormat) aInFormat); - } - } - } - catch (Throwable t) - { - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - /* - * The failings of a resampler are of no concern - * here. - */ - } - } - } - } - break; - - default: - throw new IllegalArgumentException("dataFlow"); - } - } - - /** - * {@inheritDoc} - */ - @Override - protected void doInitialize() - throws Exception - { - List captureDevices; - List playbackDevices; - - /* - * We want to protect iMMDeviceEnumerator because it may be accessed by - * multiple threads. Which the method doInitialize will not be invoked - * more than once at a time, it may be concurrently invoked along with - * other methods. We do not want the methods setCaptureDevices and - * setPlaybackDevices in the synchronized block because they may fire - * events which may in turn lead to deadlocks. - */ - synchronized (this) - { - - /* - * XXX Multiple threads may invoke the initialization of a DeviceSystem - * so we cannot be sure that the COM library has been initialized for - * the current thread. - */ - WASAPISystem.CoInitializeEx(); - - if (iMMDeviceEnumerator == 0) - { - iMMDeviceEnumerator - = CoCreateInstance( - CLSID_MMDeviceEnumerator, - 0, - CLSCTX_ALL, - IID_IMMDeviceEnumerator); - if (iMMDeviceEnumerator == 0) - throw new IllegalStateException("iMMDeviceEnumerator"); - - /* - * Register this DeviceSystem to be notified when an audio endpoint - * device is added or removed, when the state or properties of an - * endpoint device change, or when there is a change in the default - * role assigned to an endpoint device. - */ - MMNotificationClient.RegisterEndpointNotificationCallback(pNotify); - } - - long iMMDeviceCollection - = IMMDeviceEnumerator_EnumAudioEndpoints( - iMMDeviceEnumerator, - eAll, - DEVICE_STATE_ACTIVE); - - if (iMMDeviceCollection == 0) - { - throw new RuntimeException( - "IMMDeviceEnumerator_EnumAudioEndpoints"); - } - try - { - int count = IMMDeviceCollection_GetCount(iMMDeviceCollection); - - captureDevices = new ArrayList(count); - playbackDevices = new ArrayList(count); - - if (count > 0) - { - // The acoustic echo cancellation (AEC) feature is optional. - maybeInitializeAEC(); - try - { - for (int i = 0; i < count; i++) - { - long iMMDevice - = IMMDeviceCollection_Item(iMMDeviceCollection, i); - - if (iMMDevice == 0) - { - throw new RuntimeException( - "IMMDeviceCollection_Item"); - } - try - { - doInitializeIMMDevice( - iMMDevice, - captureDevices, playbackDevices); - } - catch (Throwable t) - { - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - /* - * We do not want the initialization of one - * IMMDevice to prevent the initialization of other - * IMMDevices. - */ - logger.error( - "Failed to doInitialize for IMMDevice" - + " at index " + i, - t); - } - finally - { - IMMDevice_Release(iMMDevice); - } - } - } - finally - { - maybeUninitializeAEC(); - } - } - } - finally - { - IMMDeviceCollection_Release(iMMDeviceCollection); - } - - } // synchronized (this) - - setCaptureDevices(captureDevices); - setPlaybackDevices(playbackDevices); - } - - /** - * Implements the part of {@link #doInitialize()} related to a specific - * IMMDevice. - * - * @param iMMDevice the IMMDevice to initialize as part of the - * invocation of doInitialize() on this instance - * @throws HResultException if an error occurs while initializing the - * specified iMMDevice in a native WASAPI function which returns an - * HRESULT value - * @param captureDevices the state of the execution of - * doInitialize() which stores the CaptureDeviceInfo2s of - * the capture devices discovered by this WASAPISystem - * @param playbackDevices the state of the execution of - * doInitialize() which stores the CaptureDeviceInfo2s of - * the playback devices discovered by this WASAPISystem - */ - private void doInitializeIMMDevice( - long iMMDevice, - List captureDevices, - List playbackDevices) - throws HResultException - { - String id = IMMDevice_GetId(iMMDevice); - - /* - * The ID of the IMMDevice is required because it will be used within - * the MediaLocator of its representative CaptureDeviceInfo. - */ - if (id == null) - throw new RuntimeException("IMMDevice_GetId"); - - long iAudioClient - = IMMDevice_Activate(iMMDevice, IID_IAudioClient, CLSCTX_ALL, 0); - List formats; - - if (iAudioClient == 0) - throw new RuntimeException("IMMDevice_Activate"); - try - { - formats = getIAudioClientSupportedFormats(iAudioClient); - } - finally - { - IAudioClient_Release(iAudioClient); - } - if ((formats != null) && !formats.isEmpty()) - { - String name = null; - - try - { - name = getIMMDeviceFriendlyName(iMMDevice); - } - catch (Throwable t) - { - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - logger.warn( - "Failed to retrieve the PKEY_Device_FriendlyName" - + " of IMMDevice " + id, - t); - } - if ((name == null) || (name.length() == 0)) - name = id; - - int dataFlow = getIMMDeviceDataFlow(iMMDevice); - List devices; - - switch (dataFlow) - { - case eCapture: - devices = captureDevices; - break; - case eRender: - devices = playbackDevices; - break; - default: - devices = null; - logger.error( - "Failed to retrieve dataFlow from IMMEndpoint " + id); - break; - } - if (devices != null) - { - configureSupportedFormats(dataFlow, formats); - if (!formats.isEmpty()) - { - CaptureDeviceInfo2 cdi2 - = new CaptureDeviceInfo2( - name, - new MediaLocator(LOCATOR_PROTOCOL + ":" + id), - formats.toArray(new Format[formats.size()]), - id, - /* transportType */ null, - /* modelIdentifier */ null); - - devices.add(cdi2); - } - } - } - } - - /** - * {@inheritDoc} - */ - @Override - protected void finalize() - throws Throwable - { - try - { - synchronized (this) - { - if (iMMDeviceEnumerator != 0) - { - IMMDeviceEnumerator_Release(iMMDeviceEnumerator); - iMMDeviceEnumerator = 0; - } - } - } - finally - { - super.finalize(); - } - } - - /** - * Gets the List of AudioFormats supported by the voice - * capture DMO that supports/implements the acoustic echo cancellation (AEC) - * feature. - *

- * If an AudioFormat instance contained in the returned - * List is one of the formats of a - * CaptureDeviceInfo2 or the supportedFormats of a - * FormatControl associated with a WASAPI - * CaptureDevice/DataSource or SourceStream, it - * signals that the AudioFormat in question has been included in - * that formats or supportedFormats only because it is - * supported by the voice capture DMO supporting/implementing the acoustic - * echo cancellation (AEC) feature. - *

- * - * @return the List of AudioFormats supported by the voice - * capture DMO that supports/implements the acoustic echo cancellation (AEC) - * feature - */ - public List getAECSupportedFormats() - { - List aecSupportedFormats = this.aecSupportedFormats; - - if (aecSupportedFormats == null) - aecSupportedFormats = Collections.emptyList(); - return aecSupportedFormats; - } - - /** - * Gets a List of the AudioFormats supported by a specific - * IAudioClient. - * - * @param iAudioClient the IAudioClient to get the List of - * supported AudioFormats of - * @return a List of the AudioFormats supported by the - * specified iAudioClient - * @throws HResultException if an error occurs while retrieving the - * List of AudioFormats supported by the specified - * iAudioClient in a native WASAPI function which returns an - * HRESULT value - */ - private List getIAudioClientSupportedFormats(long iAudioClient) - throws HResultException - { - char cbSize = 0; - List supportedFormats = new ArrayList(); - - for (char nChannels = 1; nChannels <= 2; nChannels++) - { - for (int i = 0; i < Constants.AUDIO_SAMPLE_RATES.length; i++) - { - int nSamplesPerSec = (int) Constants.AUDIO_SAMPLE_RATES[i]; - - for (char wBitsPerSample = 16; - wBitsPerSample > 0; - wBitsPerSample -= 8) - { - char nBlockAlign - = (char) ((nChannels * wBitsPerSample) / 8); - - WASAPI.WAVEFORMATEX_fill( - waveformatex, - WAVE_FORMAT_PCM, - nChannels, - nSamplesPerSec, - nSamplesPerSec * nBlockAlign, - nBlockAlign, - wBitsPerSample, - cbSize); - - long pClosestMatch - = IAudioClient_IsFormatSupported( - iAudioClient, - AUDCLNT_SHAREMODE_SHARED, - waveformatex); - - if (pClosestMatch == 0) // not supported - continue; - try - { - /* - * Succeeded with a closest match to the specified - * format? - */ - if (pClosestMatch != waveformatex) - { - // We support AutioFormat.LINEAR only. - if (WAVEFORMATEX_getWFormatTag(pClosestMatch) - != WAVE_FORMAT_PCM) - continue; - - nChannels - = WAVEFORMATEX_getNChannels(pClosestMatch); - nSamplesPerSec - = WAVEFORMATEX_getNSamplesPerSec( - pClosestMatch); - wBitsPerSample - = WAVEFORMATEX_getWBitsPerSample( - pClosestMatch); - } - - AudioFormat supportedFormat; - - /* - * We are able to convert between mono and stereo. - * Additionally, the stereo support within the library - * is not as advanced as the mono support at the time of - * this writing. - */ - if (nChannels == 2) - { - supportedFormat - = new NativelySupportedAudioFormat( - AudioFormat.LINEAR, - nSamplesPerSec, - wBitsPerSample, - /* channels */ 1, - AbstractAudioRenderer - .NATIVE_AUDIO_FORMAT_ENDIAN, - AudioFormat.SIGNED, - /* frameSizeInBits */ - Format.NOT_SPECIFIED, - /* frameRate */ Format.NOT_SPECIFIED, - Format.byteArray); - if (!supportedFormats.contains(supportedFormat)) - supportedFormats.add(supportedFormat); - } - supportedFormat - = new NativelySupportedAudioFormat( - AudioFormat.LINEAR, - nSamplesPerSec, - wBitsPerSample, - nChannels, - AbstractAudioRenderer - .NATIVE_AUDIO_FORMAT_ENDIAN, - AudioFormat.SIGNED, - /* frameSizeInBits */ Format.NOT_SPECIFIED, - /* frameRate */ Format.NOT_SPECIFIED, - Format.byteArray); - if (!supportedFormats.contains(supportedFormat)) - supportedFormats.add(supportedFormat); - } - finally - { - if (pClosestMatch != waveformatex) - CoTaskMemFree(pClosestMatch); - } - } - } - } - return supportedFormats; - } - - /** - * Gets a List of the AudioFormats supported by a specific - * IMediaObject. - * - * @param iMediaObject the IMediaObject to get the List of - * supported AudioFormats of - * @return a List of the AudioFormats supported by the - * specified iMediaObject - * @throws HResultException if an error occurs while retrieving the - * List of AudioFormats supported by the specified - * iMediaObject in a native WASAPI function which returns an - * HRESULT value - */ - private List getIMediaObjectSupportedFormats(long iMediaObject) - throws HResultException - { - List supportedFormats = new ArrayList(); - long pmt = MoCreateMediaType(/* cbFormat */ 0); - - if (pmt == 0) - throw new OutOfMemoryError("MoCreateMediaType"); - try - { - char cbSize = 0; - int cbFormat = WAVEFORMATEX_sizeof() + cbSize; - int hresult - = DMO_MEDIA_TYPE_fill( - pmt, - /* majortype */ MEDIATYPE_Audio, - /* subtype */ MEDIASUBTYPE_PCM, - /* bFixedSizeSamples */ true, - /* bTemporalCompression */ false, - /* lSampleSize */ 0, - /* formattype */ FORMAT_WaveFormatEx, - /* pUnk */ 0, - cbFormat, - waveformatex); - - if (FAILED(hresult)) - throw new HResultException(hresult, "DMO_MEDIA_TYPE_fill"); - - for (char nChannels = 1; nChannels <= 2; nChannels++) - { - for (int i = 0; i < Constants.AUDIO_SAMPLE_RATES.length; i++) - { - int nSamplesPerSec = (int) Constants.AUDIO_SAMPLE_RATES[i]; - - for (char wBitsPerSample = 16; - wBitsPerSample > 0; - wBitsPerSample -= 8) - { - char nBlockAlign - = (char) ((nChannels * wBitsPerSample) / 8); - - WASAPI.WAVEFORMATEX_fill( - waveformatex, - WAVE_FORMAT_PCM, - nChannels, - nSamplesPerSec, - nSamplesPerSec * nBlockAlign, - nBlockAlign, - wBitsPerSample, - cbSize); - DMO_MEDIA_TYPE_setLSampleSize(pmt, wBitsPerSample / 8); - - try - { - hresult - = IMediaObject_SetOutputType( - iMediaObject, - /* dwOutputStreamIndex */ 0, - pmt, - /* dwFlags */ DMO_SET_TYPEF_TEST_ONLY); - } - catch (HResultException hre) - { - /* - * If the specified media type is not acceptable, - * IMediaObject::SetOutputType should return - * S_FALSE. Anyway, continue testing the other media - * types. - */ - hresult = hre.getHResult(); - } - if (S_OK == hresult) - { - AudioFormat supportedFormat - = new AudioFormat( - AudioFormat.LINEAR, - nSamplesPerSec, - wBitsPerSample, - nChannels, - AbstractAudioRenderer - .NATIVE_AUDIO_FORMAT_ENDIAN, - AudioFormat.SIGNED, - /* frameSizeInBits */ - Format.NOT_SPECIFIED, - /* frameRate */ Format.NOT_SPECIFIED, - Format.byteArray); - - if (!supportedFormats.contains(supportedFormat)) - supportedFormats.add(supportedFormat); - } - } - } - } - } - finally - { - /* - * XXX MoDeleteMediaType is documented to internally call - * MoFreeMediaType to free the format block but the format block has - * not been internally allocated by MoInitMediaType. - */ - DMO_MEDIA_TYPE_setCbFormat(pmt, 0); - DMO_MEDIA_TYPE_setFormattype(pmt, FORMAT_None); - DMO_MEDIA_TYPE_setPbFormat(pmt, 0); - MoDeleteMediaType(pmt); - } - - return supportedFormats; - } - - /** - * Gets an audio endpoint device that is identified by a specific endpoint - * ID string. - * - * @param id the endpoint ID string which identifies the audio endpoint - * device to be retrieved - * @return an IMMDevice instance which represents the audio - * endpoint device that is identified by the specified endpoint ID string - * @throws HResultException if an error occurs while retrieving the audio - * endpoint device that is identified by the specified endpoint ID string in - * a native WASAPI function which returns an HRESULT value - */ - public synchronized long getIMMDevice(String id) - throws HResultException - { - long iMMDeviceEnumerator = this.iMMDeviceEnumerator; - - if (iMMDeviceEnumerator == 0) - throw new IllegalStateException("iMMDeviceEnumerator"); - else - return IMMDeviceEnumerator_GetDevice(iMMDeviceEnumerator, id); - } - - /** - * Gets the data flow of a specific IMMDevice in the form of an - * EDataFlow value. - * - * @param iMMDevice the IMMDevice to get the data flow of - * @return an EDataFlow value which represents the data flow of the - * specified IMMDevice - * @throws HResultException if an error occurs while retrieving the data - * flow of the specified iMMDevice in a native WASAPI function - * which returns an HRESULT value - */ - public int getIMMDeviceDataFlow(long iMMDevice) - throws HResultException - { - long iMMEndpoint = IMMDevice_QueryInterface(iMMDevice, IID_IMMEndpoint); - int dataFlow; - - if (iMMEndpoint == 0) - throw new RuntimeException("IMMDevice_QueryInterface"); - try - { - dataFlow = IMMEndpoint_GetDataFlow(iMMEndpoint); - } - finally - { - IMMEndpoint_Release(iMMEndpoint); - } - switch (dataFlow) - { - case eAll: - case eCapture: - case eRender: - return dataFlow; - default: - throw new RuntimeException("IMMEndpoint_GetDataFlow"); - } - } - - /** - * Gets the PKEY_Device_FriendlyName of a specific - * IMMDevice which represents the human-readable name of the device - * (interface). - * - * @param iMMDevice the IMMDevice to get the - * friendly/human-readable name of - * @return the friendly/human-readable name of the specified - * iMMDevice - * @throws HResultException if an error occurs while retrieving the friendly - * name of the specified iMMDevice in a native WASAPI function - * which returns an HRESULT value - */ - private String getIMMDeviceFriendlyName(long iMMDevice) - throws HResultException - { - long iPropertyStore = IMMDevice_OpenPropertyStore(iMMDevice, STGM_READ); - - if (iPropertyStore == 0) - throw new RuntimeException("IMMDevice_OpenPropertyStore"); - - String deviceFriendlyName; - - try - { - deviceFriendlyName - = IPropertyStore_GetString( - iPropertyStore, - PKEY_Device_FriendlyName); - } - finally - { - IPropertyStore_Release(iPropertyStore); - } - return deviceFriendlyName; - } - - /** - * Gets the zero-based index within the IMMDeviceCollection - * interface of an audio endpoint device specified by an endpoint ID string. - * - * @param id the endpoint ID string which specifies the audio endpoint - * device whose zero-based index within the IMMDeviceCollection - * interface is to be retrieved - * @return the zero-based index within the IMMDeviceCollection - * interface of an audio endpoint device identified by the specified - * endpoint ID string if the specified endpoint ID string identifies an - * actual audio endpoint device within the IMMDeviceCollection - * interface; otherwise, -1 - * @throws HResultException if an error occurs while determining the - * zero-based index within the IMMDeviceCollection interface of the - * audio endpoint device identified by the specified endpoint ID string in a - * native WASAPI function which returns an HRESULT value - */ - public synchronized int getIMMDeviceIndex(String id, int dataFlow) - throws HResultException - { - long iMMDeviceEnumerator = this.iMMDeviceEnumerator; - - if (iMMDeviceEnumerator == 0) - throw new IllegalStateException("iMMDeviceEnumerator"); - - long iMMDeviceCollection - = IMMDeviceEnumerator_EnumAudioEndpoints( - iMMDeviceEnumerator, - dataFlow, - DEVICE_STATE_ACTIVE); - - if (iMMDeviceCollection == 0) - { - throw new RuntimeException( - "IMMDeviceEnumerator_EnumAudioEndpoints"); - } - - int iMMDeviceIndex = -1; - - try - { - int count = IMMDeviceCollection_GetCount(iMMDeviceCollection); - - if (count > 0) - { - for (int i = 0; i < count; i++) - { - long iMMDevice - = IMMDeviceCollection_Item(iMMDeviceCollection, i); - - if (iMMDevice == 0) - { - throw new RuntimeException( - "IMMDeviceCollection_Item"); - } - - String iMMDeviceID; - - try - { - iMMDeviceID = IMMDevice_GetId(iMMDevice); - } - finally - { - IMMDevice_Release(iMMDevice); - } - /* - * The endpoint ID strings include GUIDs so case insensitive - * comparison should be appropriate. If we wanted to be more - * strict, we would've invoked IMMDeviceCollection_GetDevice - * in order to have Windows Audio Session API (WASAPI) make - * the comparison of the enpoint ID strings. - */ - if (id.equalsIgnoreCase(iMMDeviceID)) - { - iMMDeviceIndex = i; - break; - } - } - } - } - finally - { - IMMDeviceCollection_Release(iMMDeviceCollection); - } - return iMMDeviceIndex; - } - - /** - * {@inheritDoc} - */ - @Override - protected String getRendererClassName() - { - return WASAPIRenderer.class.getName(); - } - - /** - * Initializes a new IMediaObject instance which represents a Voice - * Capture DSP implementing acoustic echo cancellation (AEC). - * - * @return a new IMediaObject instance which represents a Voice - * Capture DSP implementing acoustic echo cancellation (AEC) - * @throws Exception if initializing the new instance fails - */ - public long initializeAEC() - throws Exception - { - long iMediaObject = 0; - long iPropertyStore = 0; - long aecIMediaObject = 0; - - /* - * XXX Multiple threads may invoke the initialization of a DeviceSystem - * so we cannot be sure that the COM library has been initialized for - * the current thread. - */ - WASAPISystem.CoInitializeEx(); - - try - { - iMediaObject - = CoCreateInstance( - CLSID_CWMAudioAEC, - /* pUnkOuter */ 0, - CLSCTX_ALL, - IID_IMediaObject); - if (iMediaObject == 0) - throw new RuntimeException("CoCreateInstance"); - else - { - iPropertyStore - = IMediaObject_QueryInterface( - iMediaObject, - IID_IPropertyStore); - if (iPropertyStore == 0) - throw new RuntimeException("IMediaObject_QueryInterface"); - else - { - int hresult - = IPropertyStore_SetValue( - iPropertyStore, - MFPKEY_WMAAECMA_SYSTEM_MODE, - SINGLE_CHANNEL_AEC); - - if (FAILED(hresult)) - { - throw new HResultException( - hresult, - "IPropertyStore_SetValue" - + " MFPKEY_WMAAECMA_SYSTEM_MODE"); - } - else - { - aecIMediaObject = iMediaObject; - iMediaObject = 0; - } - } - } - } - finally - { - if (iPropertyStore != 0) - IPropertyStore_Release(iPropertyStore); - if (iMediaObject != 0) - IMediaObject_Release(iMediaObject); - } - return aecIMediaObject; - } - - /** - * Initializes a new IAudioClient instance for an audio endpoint - * device identified by a specific MediaLocator. The initialization - * is performed to an extent suitable for the operation of - * {@link WASAPIRenderer} and {@link WASAPIStream}. - * - * @param locator a MediaLocator which identifies the audio - * endpoint device to initialize a new IAudioClient instance for - * @param dataFlow the flow of media data to be supported by the audio - * endpoint device identified by the specified locator - * @param streamFlags - * @param eventHandle - * @param hnsBufferDuration the base of the duration in milliseconds of the - * buffer that the audio application will share with the audio engine. If - * {@link Format#NOT_SPECIFIED}, the method uses the default interval - * between periodic passes by the audio engine. - * @param formats an array of alternative AudioFormats with which - * initialization of a new IAudioClient instance is to be - * attempted. The first element of the formats array which is - * supported by the new IAudioClient instance is used to initialize - * it and any preceding elements are set to null to signify that - * they are not supported and to make it possible to retrieve the - * AudioFormat with which the new IAudioClient instance - * has been initialized. - * @return a new IAudioClient instance initialized for the audio - * endpoint device identified by the specified locator - * @throws HResultException if an error occurs while initializing a new - * IAudioClient for the audio endpoint device identified by the - * specified locator in a native WASAPI function which returns an - * HRESULT value - */ - public long initializeIAudioClient( - MediaLocator locator, - DataFlow dataFlow, - int streamFlags, - long eventHandle, - long hnsBufferDuration, - AudioFormat[] formats) - throws HResultException - { - - /* - * The Windows API function CoInitializeEx must be invoked on the - * current thread. Generally, the COM library must be initialized on a - * thread before calling any of the library functions (with a few - * exceptions) on that thread. Technically, that general requirement is - * not trivial to implement in the multi-threaded architecture of FMJ. - * Practically, we will perform the invocations where we have seen the - * return value CO_E_NOTINITIALIZED. - */ - WASAPISystem.CoInitializeEx(); - - String id = locator.getRemainder(); - long iMMDevice = getIMMDevice(id); - - if (iMMDevice == 0) - { - throw new RuntimeException( - "Failed to retrieve audio endpoint device " - + "with endpoint ID string " + id); - } - - long ret = 0; - - try - { - /* - * Assert that the audio endpoint device identified by the specified - * locator supports the specified dataFlow. - */ - int iMMDeviceDataFlow = getIMMDeviceDataFlow(iMMDevice); - - switch (dataFlow) - { - case CAPTURE: - if ((iMMDeviceDataFlow != eAll) - && (iMMDeviceDataFlow != eCapture)) - throw new IllegalArgumentException("dataFlow"); - break; - case NOTIFY: - case PLAYBACK: - if ((iMMDeviceDataFlow != eAll) - && (iMMDeviceDataFlow != eRender)) - throw new IllegalArgumentException("dataFlow"); - break; - } - - long iAudioClient - = IMMDevice_Activate( - iMMDevice, - IID_IAudioClient, - CLSCTX_ALL, - 0); - - if (iAudioClient == 0) - throw new RuntimeException("IMMDevice_Activate"); - try - { - long waveformatex = WAVEFORMATEX_alloc(); - - if (waveformatex == 0) - throw new OutOfMemoryError("WAVEFORMATEX_alloc"); - try - { - int shareMode = AUDCLNT_SHAREMODE_SHARED; - int waveformatexIsInitialized = Format.NOT_SPECIFIED; - - for (int i = 0; i < formats.length; i++) - { - WAVEFORMATEX_fill(waveformatex, formats[i]); - - long pClosestMatch - = IAudioClient_IsFormatSupported( - iAudioClient, - shareMode, - waveformatex); - - if (pClosestMatch == 0) - { - // not supported - } - else - { - try - { - if (pClosestMatch == waveformatex) - { - waveformatexIsInitialized = i; - break; - } - else - { - /* - * Succeeded with a closest match to the - * specified format. - */ - } - } - finally - { - if (pClosestMatch != waveformatex) - CoTaskMemFree(pClosestMatch); - } - } - } - if ((waveformatexIsInitialized < 0) - || (waveformatexIsInitialized >= formats.length)) - { - logUnsupportedFormats(dataFlow, locator, formats); - throw new IllegalArgumentException("formats"); - } - Arrays.fill(formats, 0, waveformatexIsInitialized, null); - - streamFlags |= AUDCLNT_STREAMFLAGS_NOPERSIST; - if (eventHandle != 0) - streamFlags |= AUDCLNT_STREAMFLAGS_EVENTCALLBACK; - - if (hnsBufferDuration == Format.NOT_SPECIFIED) - { - hnsBufferDuration - = IAudioClient_GetDefaultDevicePeriod(iAudioClient) - / 10000; - if (hnsBufferDuration <= 1) - { - hnsBufferDuration - = WASAPISystem.DEFAULT_DEVICE_PERIOD; - } - } - - int hresult - = IAudioClient_Initialize( - iAudioClient, - shareMode, - streamFlags, - 3 * hnsBufferDuration * 10000, - /* hnsPeriodicity */ 0, - waveformatex, - audioSessionGuid); - - if (hresult != S_OK) - { - /* - * The execution is not expected to reach here. Anyway, - * be prepared to handle even such a case for the sake - * of completeness. - */ - throw new HResultException(hresult); - } - if (((streamFlags & AUDCLNT_STREAMFLAGS_EVENTCALLBACK) - == AUDCLNT_STREAMFLAGS_EVENTCALLBACK) - && (eventHandle != 0)) - { - IAudioClient_SetEventHandle(iAudioClient, eventHandle); - } - - ret = iAudioClient; - iAudioClient = 0; - } - finally - { - CoTaskMemFree(waveformatex); - } - } - finally - { - if (iAudioClient != 0) - IAudioClient_Release(iAudioClient); - } - } - finally - { - if (iMMDevice != 0) - IMMDevice_Release(iMMDevice); - } - return ret; - } - - /** - * Logs an error message describing that a device identified by a specific - * DataFlow and a specific MediaLocator does not support - * a specific list of Formats. - * - * @param dataFlow the flow of the media supported by the device which does - * not support the specified Formats - * @param locator the MediaLocator identifying the device which - * does not support the specified Formats - * @param unsupportedFormats the list of Format which are not - * supported by the device identified by the specified dataFlow and - * locator - */ - private void logUnsupportedFormats( - DataFlow dataFlow, MediaLocator locator, - Format[] unsupportedFormats) - { - StringBuilder msg = new StringBuilder(); - - msg.append("Unsupported formats: "); - msg.append(Arrays.toString(unsupportedFormats)); - msg.append('.'); - - Format[] supportedFormats; - - try - { - supportedFormats = getDevice(dataFlow, locator).getFormats(); - } - catch (Throwable t) - { - /* - * The supported formats are less important than the unsupported - * formats. - */ - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - supportedFormats = null; - } - msg.append("Supported formats: "); - msg.append(Arrays.toString(supportedFormats)); - msg.append('.'); - - logger.error(msg); - } - - /** - * Initializes the acoustic echo cancellation (AEC) feature if possible and - * if it has not been initialized yet. The method swallows any exceptions - * because the feature in question is optional. - */ - private void maybeInitializeAEC() - { - if ((aecIMediaObject != 0) || (aecSupportedFormats != null)) - return; - - try - { - long iMediaObject = initializeAEC(); - - try - { - List supportedFormats - = getIMediaObjectSupportedFormats(iMediaObject); - - if (!supportedFormats.isEmpty()) - { - aecIMediaObject = iMediaObject; - iMediaObject = 0; - aecSupportedFormats - = Collections.unmodifiableList( - supportedFormats); - } - } - finally - { - if (iMediaObject != 0) - IMediaObject_Release(iMediaObject); - } - } - catch (Throwable t) - { - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - { - logger.error( - "Failed to initialize acoustic echo cancellation (AEC)", - t); - } - } - } - - /** - * Uninitializes the acoustic echo cancellation (AEC) feature if it has been - * initialized. The method swallows any exceptions because the feature in - * question is optional. - */ - private void maybeUninitializeAEC() - { - try - { - if (aecIMediaObject != 0) - { - IMediaObject_Release(aecIMediaObject); - aecIMediaObject = 0; - } - } - catch (Throwable t) - { - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - { - logger.error( - "Failed to uninitialize acoustic echo cancellation (AEC)", - t); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - protected void postInitialize() - throws Exception - { - try - { - super.postInitialize(); - } - finally - { - if (waveformatex != 0) - { - CoTaskMemFree(waveformatex); - waveformatex = 0; - } - } - } - - /** - * {@inheritDoc} - */ - @Override - protected void preInitialize() - throws Exception - { - super.preInitialize(); - - /* - * Make sure a WAVEFORMATEX instance is available during the execution - * of doInitialize(). The field has been introduced to minimize memory - * fragmentation. - */ - if (waveformatex != 0) - { - CoTaskMemFree(waveformatex); - waveformatex = 0; - } - waveformatex = WAVEFORMATEX_alloc(); - if (waveformatex == 0) - throw new OutOfMemoryError("WAVEFORMATEX_alloc"); - - if (pNotify == null) - { - pNotify - = new IMMNotificationClient() - { - public void OnDefaultDeviceChanged( - int flow, - int role, - String pwstrDefaultDevice) - { - } - - public void OnDeviceAdded(String pwstrDeviceId) - { - reinitialize(pwstrDeviceId); - } - - public void OnDeviceRemoved(String pwstrDeviceId) - { - reinitialize(pwstrDeviceId); - } - - public void OnDeviceStateChanged( - String pwstrDeviceId, - int dwNewState) - { - reinitialize(pwstrDeviceId); - } - - public void OnPropertyValueChanged( - String pwstrDeviceId, - long key) - { - } - }; - } - - /* - * Generate a GUID to identify an audio session that steams to be - * initialized will belong to. - */ - if (audioSessionGuid == null) - { - try - { - audioSessionGuid = CoCreateGuid(); - } - catch (HResultException hre) - { - /* - * The application/library will work with the default audio - * session GUID. - */ - logger.warn("Failed to generate a new audio session GUID", hre); - } - } - } - - /** - * Reinitializes this WASAPISystem. The implementation assumes that - * the invocation is performed by the Multimedia Device (MMDevice) API and - * swallows any thrown Exception. - * - * @param deviceId the endpoint ID string that identifies the audio endpoint - * device which is related to the decision to reinitialize this - * WASAPISystem - */ - private void reinitialize(String deviceId) - { - try - { - /* - * XXX Invoke the initialize() method asynchronously in order to - * allow the Multimedia Device (MMDevice) callback to return - * immediately. Otherwise, the execution will freeze in the - * IAudioClient_Release function will freeze. Besides, the callback - * dispatches the notifications after the respective changes have - * been realized anyway. - */ - invokeDeviceSystemInitialize(this, true); - } - catch (Exception e) - { - logger.error("Failed to reinitialize " + getClass().getName(), e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public String toString() - { - return "Windows Audio Session API (WASAPI)"; - } -} +package org.jitsi.impl.neomedia.device; + +import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.VoiceCaptureDSP.*; +import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*; + +import java.util.*; + +import javax.media.*; +import javax.media.format.*; + +import org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.*; +import org.jitsi.impl.neomedia.jmfext.media.renderer.audio.*; +import org.jitsi.service.neomedia.codec.*; +import org.jitsi.util.*; + +/** + * Implements an AudioSystem using Windows Audio Session API (WASAPI) + * and related Core Audio APIs such as Multimedia Device (MMDevice) API. + * + * @author Lyubomir Marinov + */ +public class WASAPISystem + extends AudioSystem +{ + /** + * A GUID which identifies the audio session that streams belong to. + */ + private static String audioSessionGuid; + + /** + * The default duration of audio data in milliseconds to be read from + * WASAPIStream in an invocation of + * {@link WASAPIStream#read(Buffer)} or to be processed by + * WASAPIRenderer in an invocation of + * {@link WASAPIRenderer#process(Buffer)}. + */ + public static final long DEFAULT_BUFFER_DURATION = 20; + + /** + * The default interval in milliseconds between periodic processing passes + * by the audio engine. + */ + public static final long DEFAULT_DEVICE_PERIOD = 10; + + /** + * The protocol of the MediaLocator identifying + * CaptureDeviceInfo contributed by WASAPISystem. + */ + private static final String LOCATOR_PROTOCOL = LOCATOR_PROTOCOL_WASAPI; + + /** + * The logger used by the WASAPISystem class and its instances to + * log debugging information. + */ + private static final Logger logger = Logger.getLogger(WASAPISystem.class); + + /** + * Invokes the Windows API function CoInitializeEx (by way of + * {@link WASAPI#CoInitializeEx(long, int)}) with arguments suitable to the + * operation of WASAPIRenderer, WASAPIStream and + * WASAPISystem. + *

+ * Generally, the WASAPI integration is designed with + * COINIT_MULTITHREADED in mind. However, it may turn out that it + * works with COINIT_APARTMENTTHREADED as well. + *

+ * + * @return the value returned by the invocation of the Windows API function + * CoInitializeEx + * @throws HResultException if the invocation of the method + * WASAPI.CoInitializeEx throws such an exception + */ + public static int CoInitializeEx() + throws HResultException + { + int hr; + + try + { + hr = WASAPI.CoInitializeEx(0, COINIT_MULTITHREADED); + } + catch (HResultException hre) + { + hr = hre.getHResult(); + switch (hr) + { + case RPC_E_CHANGED_MODE: + hr = S_FALSE; + // Do fall through. + case S_FALSE: + case S_OK: + break; + default: + throw hre; + } + } + return hr; + } + + /** + * Gets an array of alternative AudioFormats based on + * format with which an attempt is to be made to initialize a new + * IAudioClient instance. + * + * @param format the AudioFormat on which the alternative + * AudioFormats are to be based + * @return an array of alternative AudioFormats based on + * format with which an attempt is to be made to initialize a new + * IAudioClient instance + */ + public static AudioFormat[] getFormatsToInitializeIAudioClient( + AudioFormat format) + { + // We are able to convert between mono and stereo. + int channels; + + switch (format.getChannels()) + { + case 1: + channels = 2; + break; + case 2: + channels = 1; + break; + default: + return new AudioFormat[] { format }; + } + return + new AudioFormat[] + { + /* + * Regardless of the differences in the states of the + * support of mono and stereo in the library at the time + * of this writing, try to initialize a new IAudioClient + * instance with a format which will not require + * conversion between mono and stereo. + */ + format, + new AudioFormat( + format.getEncoding(), + format.getSampleRate(), + format.getSampleSizeInBits(), + channels, + AudioFormat.LITTLE_ENDIAN, + AudioFormat.SIGNED, + Format.NOT_SPECIFIED /* frameSizeInBits */, + Format.NOT_SPECIFIED /* frameRate */, + format.getDataType()) + }; + } + + /** + * Gets the size in bytes of an audio sample of a specific + * AudioFormat. + * + * @param format the AudioFormat to get the size in bytes of an + * audio sample of + * @return the size in bytes of an audio sample of the specified + * format + */ + public static int getSampleSizeInBytes(AudioFormat format) + { + int sampleSizeInBits = format.getSampleSizeInBits(); + + switch (sampleSizeInBits) + { + case 8: + return 1; + case 16: + return 2; + default: + return sampleSizeInBits / 8; + } + } + + /** + * Sets the fields of a specific WAVEFORMATEX instance from a + * specific AudioFormat instance so that the two of them are + * equivalent in terms of the formats of audio data that they describe. + * + * @param waveformatex the WAVEFORMATEX instance to set the fields + * of from the specified audioFormat + * @param audioFormat the AudioFormat instance to set the fields of + * the specified waveformatex from + */ + public static void WAVEFORMATEX_fill( + long waveformatex, + AudioFormat audioFormat) + { + if (!AudioFormat.LINEAR.equals(audioFormat.getEncoding())) + throw new IllegalArgumentException("audioFormat.encoding"); + + int channels = audioFormat.getChannels(); + + if (channels == Format.NOT_SPECIFIED) + throw new IllegalArgumentException("audioFormat.channels"); + + int sampleRate = (int) audioFormat.getSampleRate(); + + if (sampleRate == Format.NOT_SPECIFIED) + throw new IllegalArgumentException("audioFormat.sampleRate"); + + int sampleSizeInBits = audioFormat.getSampleSizeInBits(); + + if (sampleSizeInBits == Format.NOT_SPECIFIED) + throw new IllegalArgumentException("audioFormat.sampleSizeInBits"); + + char nBlockAlign = (char) ((channels * sampleSizeInBits) / 8); + + WASAPI.WAVEFORMATEX_fill( + waveformatex, + WAVE_FORMAT_PCM, + (char) channels, + sampleRate, + sampleRate * nBlockAlign, + nBlockAlign, + (char) sampleSizeInBits, + /* cbSize */ (char) 0); + } + + /** + * The pointer to the native IMediaObject interface instance of the + * voice capture DMO that supports/implements the acoustic echo cancellation + * (AEC) feature. + */ + private long aecIMediaObject; + + /** + * The List of AudioFormats supported by the voice capture + * DMO that supports/implements the acoustic echo cancellation (AEC) feature + * i.e. {@link #aecIMediaObject}. + */ + private List aecSupportedFormats; + + /** + * The pointer to the native IMMDeviceEnumerator interface instance + * which this WASAPISystem uses to enumerate the audio endpoint + * devices. + */ + private long iMMDeviceEnumerator; + + /** + * The IMMNotificationClient which is to notify this + * WASAPISystem when an audio endpoint device is added or removed, + * when the state or properties of an endpoint device change, or when there + * is a change in the default role assigned to an endpoint device. + */ + private IMMNotificationClient pNotify; + + /** + * A WAVEFORMATEX instance allocated in {@link #preInitialize()}, + * freed in {@link #postInitialize()} and made available during the + * execution of {@link #doInitialize()} in order to minimize memory + * fragmentation. + */ + private long waveformatex; + + /** + * Initializes a new WASAPISystem instance. + * + * @throws Exception if anything goes wrong while initializing the new + * WASAPISystem instance + */ + WASAPISystem() + throws Exception + { + super( + LOCATOR_PROTOCOL, + FEATURE_AGC + | FEATURE_DENOISE + | FEATURE_ECHO_CANCELLATION + | FEATURE_NOTIFY_AND_PLAYBACK_DEVICES + | FEATURE_REINITIALIZE); + } + + /** + * Invoked after determining the AudioFormats supported by an + * IAudioClient with a specific dataFlow and before + * registering a respective CaptureDeviceInfo2 to represent that + * IAudioClient. Allows this instance to add and/or remove + * AudioFormats that it will and/or will not support in addition to + * the support of the very IAudioClient. + * + * @param dataFlow the flow of the media supported by the associated + * IAudioClient + * @param formats the List of AudioFormats supported by + * the associated IAudioClient + */ + private void configureSupportedFormats( + int dataFlow, + List formats) + { + switch (dataFlow) + { + case eCapture: + /* + * If acoustic echo cancellation (AEC) is used later on, the + * CaptureDevice/DataSource implementation will support its + * formats. + */ + List aecSupportedFormats + = getAECSupportedFormats(); + + if (!aecSupportedFormats.isEmpty()) + { + for (AudioFormat format : aecSupportedFormats) + if (!formats.contains(format)) + formats.add(format); + } + break; + + case eRender: + /* + * WASAPIRenderer has to be able to change its render endpoint + * device on the fly. Since the new render endpoint device may not + * support the inputFormat of the WASAPIRenderer which has been + * negotiated based on the old render endpoint device, + * WASAPIRenderer has to be able to resample. Expand the list of + * supported formats with the supported input formats of + * appropriate resamplers. + */ + for (int i = 0, count = formats.size(); i < count; i++) + { + AudioFormat outFormat = formats.get(i); + /* + * The resamplers are not expected to convert between mono and + * stereo. + */ + AudioFormat inFormat + = new AudioFormat( + AudioFormat.LINEAR, + /* sampleRate */ Format.NOT_SPECIFIED, + /* sampleSizeInBits */ Format.NOT_SPECIFIED, + outFormat.getChannels(), + AbstractAudioRenderer.NATIVE_AUDIO_FORMAT_ENDIAN, + AudioFormat.SIGNED, + /* frameSizeInBits */ Format.NOT_SPECIFIED, + /* frameRate */ Format.NOT_SPECIFIED, + Format.byteArray); + @SuppressWarnings("unchecked") + List classNames + = PlugInManager.getPlugInList( + inFormat, + outFormat, + PlugInManager.CODEC); + + if ((classNames != null) && !classNames.isEmpty()) + { + for (String className : classNames) + { + try + { + Codec codec + = (Codec) + Class.forName(className).newInstance(); + Format[] inFormats + = codec.getSupportedInputFormats(); + + if (inFormats != null) + { + for (Format aInFormat : inFormats) + { + if (!(aInFormat instanceof AudioFormat) + || !inFormat.matches(aInFormat)) + continue; + + Format[] outFormats + = codec.getSupportedOutputFormats( + aInFormat); + boolean add = false; + + if (outFormats != null) + { + for (Format aOutFormat : outFormats) + { + if (outFormat.matches(aOutFormat)) + { + add = true; + break; + } + } + } + if (add && !formats.contains(aInFormat)) + formats.add((AudioFormat) aInFormat); + } + } + } + catch (Throwable t) + { + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + /* + * The failings of a resampler are of no concern + * here. + */ + } + } + } + } + break; + + default: + throw new IllegalArgumentException("dataFlow"); + } + } + + /** + * {@inheritDoc} + */ + @Override + protected void doInitialize() + throws Exception + { + List captureDevices; + List playbackDevices; + + /* + * We want to protect iMMDeviceEnumerator because it may be accessed by + * multiple threads. Which the method doInitialize will not be invoked + * more than once at a time, it may be concurrently invoked along with + * other methods. We do not want the methods setCaptureDevices and + * setPlaybackDevices in the synchronized block because they may fire + * events which may in turn lead to deadlocks. + */ + synchronized (this) + { + + /* + * XXX Multiple threads may invoke the initialization of a DeviceSystem + * so we cannot be sure that the COM library has been initialized for + * the current thread. + */ + WASAPISystem.CoInitializeEx(); + + if (iMMDeviceEnumerator == 0) + { + iMMDeviceEnumerator + = CoCreateInstance( + CLSID_MMDeviceEnumerator, + 0, + CLSCTX_ALL, + IID_IMMDeviceEnumerator); + if (iMMDeviceEnumerator == 0) + throw new IllegalStateException("iMMDeviceEnumerator"); + + /* + * Register this DeviceSystem to be notified when an audio endpoint + * device is added or removed, when the state or properties of an + * endpoint device change, or when there is a change in the default + * role assigned to an endpoint device. + */ + MMNotificationClient.RegisterEndpointNotificationCallback(pNotify); + } + + long iMMDeviceCollection + = IMMDeviceEnumerator_EnumAudioEndpoints( + iMMDeviceEnumerator, + eAll, + DEVICE_STATE_ACTIVE); + + if (iMMDeviceCollection == 0) + { + throw new RuntimeException( + "IMMDeviceEnumerator_EnumAudioEndpoints"); + } + try + { + int count = IMMDeviceCollection_GetCount(iMMDeviceCollection); + + captureDevices = new ArrayList(count); + playbackDevices = new ArrayList(count); + + if (count > 0) + { + // The acoustic echo cancellation (AEC) feature is optional. + maybeInitializeAEC(); + try + { + for (int i = 0; i < count; i++) + { + long iMMDevice + = IMMDeviceCollection_Item(iMMDeviceCollection, i); + + if (iMMDevice == 0) + { + throw new RuntimeException( + "IMMDeviceCollection_Item"); + } + try + { + doInitializeIMMDevice( + iMMDevice, + captureDevices, playbackDevices); + } + catch (Throwable t) + { + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + /* + * We do not want the initialization of one + * IMMDevice to prevent the initialization of other + * IMMDevices. + */ + logger.error( + "Failed to doInitialize for IMMDevice" + + " at index " + i, + t); + } + finally + { + IMMDevice_Release(iMMDevice); + } + } + } + finally + { + maybeUninitializeAEC(); + } + } + } + finally + { + IMMDeviceCollection_Release(iMMDeviceCollection); + } + + } // synchronized (this) + + setCaptureDevices(captureDevices); + setPlaybackDevices(playbackDevices); + } + + /** + * Implements the part of {@link #doInitialize()} related to a specific + * IMMDevice. + * + * @param iMMDevice the IMMDevice to initialize as part of the + * invocation of doInitialize() on this instance + * @throws HResultException if an error occurs while initializing the + * specified iMMDevice in a native WASAPI function which returns an + * HRESULT value + * @param captureDevices the state of the execution of + * doInitialize() which stores the CaptureDeviceInfo2s of + * the capture devices discovered by this WASAPISystem + * @param playbackDevices the state of the execution of + * doInitialize() which stores the CaptureDeviceInfo2s of + * the playback devices discovered by this WASAPISystem + */ + private void doInitializeIMMDevice( + long iMMDevice, + List captureDevices, + List playbackDevices) + throws HResultException + { + String id = IMMDevice_GetId(iMMDevice); + + /* + * The ID of the IMMDevice is required because it will be used within + * the MediaLocator of its representative CaptureDeviceInfo. + */ + if (id == null) + throw new RuntimeException("IMMDevice_GetId"); + + long iAudioClient + = IMMDevice_Activate(iMMDevice, IID_IAudioClient, CLSCTX_ALL, 0); + List formats; + + if (iAudioClient == 0) + throw new RuntimeException("IMMDevice_Activate"); + try + { + formats = getIAudioClientSupportedFormats(iAudioClient); + } + finally + { + IAudioClient_Release(iAudioClient); + } + if ((formats != null) && !formats.isEmpty()) + { + String name = null; + + try + { + name = getIMMDeviceFriendlyName(iMMDevice); + } + catch (Throwable t) + { + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + logger.warn( + "Failed to retrieve the PKEY_Device_FriendlyName" + + " of IMMDevice " + id, + t); + } + if ((name == null) || (name.length() == 0)) + name = id; + + int dataFlow = getIMMDeviceDataFlow(iMMDevice); + List devices; + + switch (dataFlow) + { + case eCapture: + devices = captureDevices; + break; + case eRender: + devices = playbackDevices; + break; + default: + devices = null; + logger.error( + "Failed to retrieve dataFlow from IMMEndpoint " + id); + break; + } + if (devices != null) + { + configureSupportedFormats(dataFlow, formats); + if (!formats.isEmpty()) + { + CaptureDeviceInfo2 cdi2 + = new CaptureDeviceInfo2( + name, + new MediaLocator(LOCATOR_PROTOCOL + ":" + id), + formats.toArray(new Format[formats.size()]), + id, + /* transportType */ null, + /* modelIdentifier */ null); + + devices.add(cdi2); + } + } + } + } + + /** + * {@inheritDoc} + */ + @Override + protected void finalize() + throws Throwable + { + try + { + synchronized (this) + { + if (iMMDeviceEnumerator != 0) + { + IMMDeviceEnumerator_Release(iMMDeviceEnumerator); + iMMDeviceEnumerator = 0; + } + } + } + finally + { + super.finalize(); + } + } + + /** + * Gets the List of AudioFormats supported by the voice + * capture DMO that supports/implements the acoustic echo cancellation (AEC) + * feature. + *

+ * If an AudioFormat instance contained in the returned + * List is one of the formats of a + * CaptureDeviceInfo2 or the supportedFormats of a + * FormatControl associated with a WASAPI + * CaptureDevice/DataSource or SourceStream, it + * signals that the AudioFormat in question has been included in + * that formats or supportedFormats only because it is + * supported by the voice capture DMO supporting/implementing the acoustic + * echo cancellation (AEC) feature. + *

+ * + * @return the List of AudioFormats supported by the voice + * capture DMO that supports/implements the acoustic echo cancellation (AEC) + * feature + */ + public List getAECSupportedFormats() + { + List aecSupportedFormats = this.aecSupportedFormats; + + if (aecSupportedFormats == null) + aecSupportedFormats = Collections.emptyList(); + return aecSupportedFormats; + } + + /** + * Gets a List of the AudioFormats supported by a specific + * IAudioClient. + * + * @param iAudioClient the IAudioClient to get the List of + * supported AudioFormats of + * @return a List of the AudioFormats supported by the + * specified iAudioClient + * @throws HResultException if an error occurs while retrieving the + * List of AudioFormats supported by the specified + * iAudioClient in a native WASAPI function which returns an + * HRESULT value + */ + private List getIAudioClientSupportedFormats(long iAudioClient) + throws HResultException + { + char cbSize = 0; + List supportedFormats = new ArrayList(); + + for (char nChannels = 1; nChannels <= 2; nChannels++) + { + for (int i = 0; i < Constants.AUDIO_SAMPLE_RATES.length; i++) + { + int nSamplesPerSec = (int) Constants.AUDIO_SAMPLE_RATES[i]; + + for (char wBitsPerSample = 16; + wBitsPerSample > 0; + wBitsPerSample -= 8) + { + char nBlockAlign + = (char) ((nChannels * wBitsPerSample) / 8); + + WASAPI.WAVEFORMATEX_fill( + waveformatex, + WAVE_FORMAT_PCM, + nChannels, + nSamplesPerSec, + nSamplesPerSec * nBlockAlign, + nBlockAlign, + wBitsPerSample, + cbSize); + + long pClosestMatch + = IAudioClient_IsFormatSupported( + iAudioClient, + AUDCLNT_SHAREMODE_SHARED, + waveformatex); + + if (pClosestMatch == 0) // not supported + continue; + try + { + /* + * Succeeded with a closest match to the specified + * format? + */ + if (pClosestMatch != waveformatex) + { + // We support AutioFormat.LINEAR only. + if (WAVEFORMATEX_getWFormatTag(pClosestMatch) + != WAVE_FORMAT_PCM) + continue; + + nChannels + = WAVEFORMATEX_getNChannels(pClosestMatch); + nSamplesPerSec + = WAVEFORMATEX_getNSamplesPerSec( + pClosestMatch); + wBitsPerSample + = WAVEFORMATEX_getWBitsPerSample( + pClosestMatch); + } + + AudioFormat supportedFormat; + + /* + * We are able to convert between mono and stereo. + * Additionally, the stereo support within the library + * is not as advanced as the mono support at the time of + * this writing. + */ + if (nChannels == 2) + { + supportedFormat + = new NativelySupportedAudioFormat( + AudioFormat.LINEAR, + nSamplesPerSec, + wBitsPerSample, + /* channels */ 1, + AbstractAudioRenderer + .NATIVE_AUDIO_FORMAT_ENDIAN, + AudioFormat.SIGNED, + /* frameSizeInBits */ + Format.NOT_SPECIFIED, + /* frameRate */ Format.NOT_SPECIFIED, + Format.byteArray); + if (!supportedFormats.contains(supportedFormat)) + supportedFormats.add(supportedFormat); + } + supportedFormat + = new NativelySupportedAudioFormat( + AudioFormat.LINEAR, + nSamplesPerSec, + wBitsPerSample, + nChannels, + AbstractAudioRenderer + .NATIVE_AUDIO_FORMAT_ENDIAN, + AudioFormat.SIGNED, + /* frameSizeInBits */ Format.NOT_SPECIFIED, + /* frameRate */ Format.NOT_SPECIFIED, + Format.byteArray); + if (!supportedFormats.contains(supportedFormat)) + supportedFormats.add(supportedFormat); + } + finally + { + if (pClosestMatch != waveformatex) + CoTaskMemFree(pClosestMatch); + } + } + } + } + return supportedFormats; + } + + /** + * Gets a List of the AudioFormats supported by a specific + * IMediaObject. + * + * @param iMediaObject the IMediaObject to get the List of + * supported AudioFormats of + * @return a List of the AudioFormats supported by the + * specified iMediaObject + * @throws HResultException if an error occurs while retrieving the + * List of AudioFormats supported by the specified + * iMediaObject in a native WASAPI function which returns an + * HRESULT value + */ + private List getIMediaObjectSupportedFormats(long iMediaObject) + throws HResultException + { + List supportedFormats = new ArrayList(); + long pmt = MoCreateMediaType(/* cbFormat */ 0); + + if (pmt == 0) + throw new OutOfMemoryError("MoCreateMediaType"); + try + { + char cbSize = 0; + int cbFormat = WAVEFORMATEX_sizeof() + cbSize; + int hresult + = DMO_MEDIA_TYPE_fill( + pmt, + /* majortype */ MEDIATYPE_Audio, + /* subtype */ MEDIASUBTYPE_PCM, + /* bFixedSizeSamples */ true, + /* bTemporalCompression */ false, + /* lSampleSize */ 0, + /* formattype */ FORMAT_WaveFormatEx, + /* pUnk */ 0, + cbFormat, + waveformatex); + + if (FAILED(hresult)) + throw new HResultException(hresult, "DMO_MEDIA_TYPE_fill"); + + for (char nChannels = 1; nChannels <= 2; nChannels++) + { + for (int i = 0; i < Constants.AUDIO_SAMPLE_RATES.length; i++) + { + int nSamplesPerSec = (int) Constants.AUDIO_SAMPLE_RATES[i]; + + for (char wBitsPerSample = 16; + wBitsPerSample > 0; + wBitsPerSample -= 8) + { + char nBlockAlign + = (char) ((nChannels * wBitsPerSample) / 8); + + WASAPI.WAVEFORMATEX_fill( + waveformatex, + WAVE_FORMAT_PCM, + nChannels, + nSamplesPerSec, + nSamplesPerSec * nBlockAlign, + nBlockAlign, + wBitsPerSample, + cbSize); + DMO_MEDIA_TYPE_setLSampleSize(pmt, wBitsPerSample / 8); + + try + { + hresult + = IMediaObject_SetOutputType( + iMediaObject, + /* dwOutputStreamIndex */ 0, + pmt, + /* dwFlags */ DMO_SET_TYPEF_TEST_ONLY); + } + catch (HResultException hre) + { + /* + * If the specified media type is not acceptable, + * IMediaObject::SetOutputType should return + * S_FALSE. Anyway, continue testing the other media + * types. + */ + hresult = hre.getHResult(); + } + if (S_OK == hresult) + { + AudioFormat supportedFormat + = new AudioFormat( + AudioFormat.LINEAR, + nSamplesPerSec, + wBitsPerSample, + nChannels, + AbstractAudioRenderer + .NATIVE_AUDIO_FORMAT_ENDIAN, + AudioFormat.SIGNED, + /* frameSizeInBits */ + Format.NOT_SPECIFIED, + /* frameRate */ Format.NOT_SPECIFIED, + Format.byteArray); + + if (!supportedFormats.contains(supportedFormat)) + supportedFormats.add(supportedFormat); + } + } + } + } + } + finally + { + /* + * XXX MoDeleteMediaType is documented to internally call + * MoFreeMediaType to free the format block but the format block has + * not been internally allocated by MoInitMediaType. + */ + DMO_MEDIA_TYPE_setCbFormat(pmt, 0); + DMO_MEDIA_TYPE_setFormattype(pmt, FORMAT_None); + DMO_MEDIA_TYPE_setPbFormat(pmt, 0); + MoDeleteMediaType(pmt); + } + + return supportedFormats; + } + + /** + * Gets an audio endpoint device that is identified by a specific endpoint + * ID string. + * + * @param id the endpoint ID string which identifies the audio endpoint + * device to be retrieved + * @return an IMMDevice instance which represents the audio + * endpoint device that is identified by the specified endpoint ID string + * @throws HResultException if an error occurs while retrieving the audio + * endpoint device that is identified by the specified endpoint ID string in + * a native WASAPI function which returns an HRESULT value + */ + public synchronized long getIMMDevice(String id) + throws HResultException + { + long iMMDeviceEnumerator = this.iMMDeviceEnumerator; + + if (iMMDeviceEnumerator == 0) + throw new IllegalStateException("iMMDeviceEnumerator"); + else + return IMMDeviceEnumerator_GetDevice(iMMDeviceEnumerator, id); + } + + /** + * Gets the data flow of a specific IMMDevice in the form of an + * EDataFlow value. + * + * @param iMMDevice the IMMDevice to get the data flow of + * @return an EDataFlow value which represents the data flow of the + * specified IMMDevice + * @throws HResultException if an error occurs while retrieving the data + * flow of the specified iMMDevice in a native WASAPI function + * which returns an HRESULT value + */ + public int getIMMDeviceDataFlow(long iMMDevice) + throws HResultException + { + long iMMEndpoint = IMMDevice_QueryInterface(iMMDevice, IID_IMMEndpoint); + int dataFlow; + + if (iMMEndpoint == 0) + throw new RuntimeException("IMMDevice_QueryInterface"); + try + { + dataFlow = IMMEndpoint_GetDataFlow(iMMEndpoint); + } + finally + { + IMMEndpoint_Release(iMMEndpoint); + } + switch (dataFlow) + { + case eAll: + case eCapture: + case eRender: + return dataFlow; + default: + throw new RuntimeException("IMMEndpoint_GetDataFlow"); + } + } + + /** + * Gets the PKEY_Device_FriendlyName of a specific + * IMMDevice which represents the human-readable name of the device + * (interface). + * + * @param iMMDevice the IMMDevice to get the + * friendly/human-readable name of + * @return the friendly/human-readable name of the specified + * iMMDevice + * @throws HResultException if an error occurs while retrieving the friendly + * name of the specified iMMDevice in a native WASAPI function + * which returns an HRESULT value + */ + private String getIMMDeviceFriendlyName(long iMMDevice) + throws HResultException + { + long iPropertyStore = IMMDevice_OpenPropertyStore(iMMDevice, STGM_READ); + + if (iPropertyStore == 0) + throw new RuntimeException("IMMDevice_OpenPropertyStore"); + + String deviceFriendlyName; + + try + { + deviceFriendlyName + = IPropertyStore_GetString( + iPropertyStore, + PKEY_Device_FriendlyName); + } + finally + { + IPropertyStore_Release(iPropertyStore); + } + return deviceFriendlyName; + } + + /** + * Gets the zero-based index within the IMMDeviceCollection + * interface of an audio endpoint device specified by an endpoint ID string. + * + * @param id the endpoint ID string which specifies the audio endpoint + * device whose zero-based index within the IMMDeviceCollection + * interface is to be retrieved + * @return the zero-based index within the IMMDeviceCollection + * interface of an audio endpoint device identified by the specified + * endpoint ID string if the specified endpoint ID string identifies an + * actual audio endpoint device within the IMMDeviceCollection + * interface; otherwise, -1 + * @throws HResultException if an error occurs while determining the + * zero-based index within the IMMDeviceCollection interface of the + * audio endpoint device identified by the specified endpoint ID string in a + * native WASAPI function which returns an HRESULT value + */ + public synchronized int getIMMDeviceIndex(String id, int dataFlow) + throws HResultException + { + long iMMDeviceEnumerator = this.iMMDeviceEnumerator; + + if (iMMDeviceEnumerator == 0) + throw new IllegalStateException("iMMDeviceEnumerator"); + + long iMMDeviceCollection + = IMMDeviceEnumerator_EnumAudioEndpoints( + iMMDeviceEnumerator, + dataFlow, + DEVICE_STATE_ACTIVE); + + if (iMMDeviceCollection == 0) + { + throw new RuntimeException( + "IMMDeviceEnumerator_EnumAudioEndpoints"); + } + + int iMMDeviceIndex = -1; + + try + { + int count = IMMDeviceCollection_GetCount(iMMDeviceCollection); + + if (count > 0) + { + for (int i = 0; i < count; i++) + { + long iMMDevice + = IMMDeviceCollection_Item(iMMDeviceCollection, i); + + if (iMMDevice == 0) + { + throw new RuntimeException( + "IMMDeviceCollection_Item"); + } + + String iMMDeviceID; + + try + { + iMMDeviceID = IMMDevice_GetId(iMMDevice); + } + finally + { + IMMDevice_Release(iMMDevice); + } + /* + * The endpoint ID strings include GUIDs so case insensitive + * comparison should be appropriate. If we wanted to be more + * strict, we would've invoked IMMDeviceCollection_GetDevice + * in order to have Windows Audio Session API (WASAPI) make + * the comparison of the enpoint ID strings. + */ + if (id.equalsIgnoreCase(iMMDeviceID)) + { + iMMDeviceIndex = i; + break; + } + } + } + } + finally + { + IMMDeviceCollection_Release(iMMDeviceCollection); + } + return iMMDeviceIndex; + } + + /** + * {@inheritDoc} + */ + @Override + protected String getRendererClassName() + { + return WASAPIRenderer.class.getName(); + } + + /** + * Initializes a new IMediaObject instance which represents a Voice + * Capture DSP implementing acoustic echo cancellation (AEC). + * + * @return a new IMediaObject instance which represents a Voice + * Capture DSP implementing acoustic echo cancellation (AEC) + * @throws Exception if initializing the new instance fails + */ + public long initializeAEC() + throws Exception + { + long iMediaObject = 0; + long iPropertyStore = 0; + long aecIMediaObject = 0; + + /* + * XXX Multiple threads may invoke the initialization of a DeviceSystem + * so we cannot be sure that the COM library has been initialized for + * the current thread. + */ + WASAPISystem.CoInitializeEx(); + + try + { + iMediaObject + = CoCreateInstance( + CLSID_CWMAudioAEC, + /* pUnkOuter */ 0, + CLSCTX_ALL, + IID_IMediaObject); + if (iMediaObject == 0) + throw new RuntimeException("CoCreateInstance"); + else + { + iPropertyStore + = IMediaObject_QueryInterface( + iMediaObject, + IID_IPropertyStore); + if (iPropertyStore == 0) + throw new RuntimeException("IMediaObject_QueryInterface"); + else + { + int hresult + = IPropertyStore_SetValue( + iPropertyStore, + MFPKEY_WMAAECMA_SYSTEM_MODE, + SINGLE_CHANNEL_AEC); + + if (FAILED(hresult)) + { + throw new HResultException( + hresult, + "IPropertyStore_SetValue" + + " MFPKEY_WMAAECMA_SYSTEM_MODE"); + } + else + { + aecIMediaObject = iMediaObject; + iMediaObject = 0; + } + } + } + } + finally + { + if (iPropertyStore != 0) + IPropertyStore_Release(iPropertyStore); + if (iMediaObject != 0) + IMediaObject_Release(iMediaObject); + } + return aecIMediaObject; + } + + /** + * Initializes a new IAudioClient instance for an audio endpoint + * device identified by a specific MediaLocator. The initialization + * is performed to an extent suitable for the operation of + * {@link WASAPIRenderer} and {@link WASAPIStream}. + * + * @param locator a MediaLocator which identifies the audio + * endpoint device to initialize a new IAudioClient instance for + * @param dataFlow the flow of media data to be supported by the audio + * endpoint device identified by the specified locator + * @param streamFlags + * @param eventHandle + * @param hnsBufferDuration the base of the duration in milliseconds of the + * buffer that the audio application will share with the audio engine. If + * {@link Format#NOT_SPECIFIED}, the method uses the default interval + * between periodic passes by the audio engine. + * @param formats an array of alternative AudioFormats with which + * initialization of a new IAudioClient instance is to be + * attempted. The first element of the formats array which is + * supported by the new IAudioClient instance is used to initialize + * it and any preceding elements are set to null to signify that + * they are not supported and to make it possible to retrieve the + * AudioFormat with which the new IAudioClient instance + * has been initialized. + * @return a new IAudioClient instance initialized for the audio + * endpoint device identified by the specified locator + * @throws HResultException if an error occurs while initializing a new + * IAudioClient for the audio endpoint device identified by the + * specified locator in a native WASAPI function which returns an + * HRESULT value + */ + public long initializeIAudioClient( + MediaLocator locator, + DataFlow dataFlow, + int streamFlags, + long eventHandle, + long hnsBufferDuration, + AudioFormat[] formats) + throws HResultException + { + + /* + * The Windows API function CoInitializeEx must be invoked on the + * current thread. Generally, the COM library must be initialized on a + * thread before calling any of the library functions (with a few + * exceptions) on that thread. Technically, that general requirement is + * not trivial to implement in the multi-threaded architecture of FMJ. + * Practically, we will perform the invocations where we have seen the + * return value CO_E_NOTINITIALIZED. + */ + WASAPISystem.CoInitializeEx(); + + String id = locator.getRemainder(); + long iMMDevice = getIMMDevice(id); + + if (iMMDevice == 0) + { + throw new RuntimeException( + "Failed to retrieve audio endpoint device " + + "with endpoint ID string " + id); + } + + long ret = 0; + + try + { + /* + * Assert that the audio endpoint device identified by the specified + * locator supports the specified dataFlow. + */ + int iMMDeviceDataFlow = getIMMDeviceDataFlow(iMMDevice); + + switch (dataFlow) + { + case CAPTURE: + if ((iMMDeviceDataFlow != eAll) + && (iMMDeviceDataFlow != eCapture)) + throw new IllegalArgumentException("dataFlow"); + break; + case NOTIFY: + case PLAYBACK: + if ((iMMDeviceDataFlow != eAll) + && (iMMDeviceDataFlow != eRender)) + throw new IllegalArgumentException("dataFlow"); + break; + } + + long iAudioClient + = IMMDevice_Activate( + iMMDevice, + IID_IAudioClient, + CLSCTX_ALL, + 0); + + if (iAudioClient == 0) + throw new RuntimeException("IMMDevice_Activate"); + try + { + long waveformatex = WAVEFORMATEX_alloc(); + + if (waveformatex == 0) + throw new OutOfMemoryError("WAVEFORMATEX_alloc"); + try + { + int shareMode = AUDCLNT_SHAREMODE_SHARED; + int waveformatexIsInitialized = Format.NOT_SPECIFIED; + + for (int i = 0; i < formats.length; i++) + { + WAVEFORMATEX_fill(waveformatex, formats[i]); + + long pClosestMatch + = IAudioClient_IsFormatSupported( + iAudioClient, + shareMode, + waveformatex); + + if (pClosestMatch == 0) + { + // not supported + } + else + { + try + { + if (pClosestMatch == waveformatex) + { + waveformatexIsInitialized = i; + break; + } + else + { + /* + * Succeeded with a closest match to the + * specified format. + */ + } + } + finally + { + if (pClosestMatch != waveformatex) + CoTaskMemFree(pClosestMatch); + } + } + } + if ((waveformatexIsInitialized < 0) + || (waveformatexIsInitialized >= formats.length)) + { + logUnsupportedFormats(dataFlow, locator, formats); + throw new IllegalArgumentException("formats"); + } + Arrays.fill(formats, 0, waveformatexIsInitialized, null); + + streamFlags |= AUDCLNT_STREAMFLAGS_NOPERSIST; + if (eventHandle != 0) + streamFlags |= AUDCLNT_STREAMFLAGS_EVENTCALLBACK; + + if (hnsBufferDuration == Format.NOT_SPECIFIED) + { + hnsBufferDuration + = IAudioClient_GetDefaultDevicePeriod(iAudioClient) + / 10000; + if (hnsBufferDuration <= 1) + { + hnsBufferDuration + = WASAPISystem.DEFAULT_DEVICE_PERIOD; + } + } + + int hresult + = IAudioClient_Initialize( + iAudioClient, + shareMode, + streamFlags, + 3 * hnsBufferDuration * 10000, + /* hnsPeriodicity */ 0, + waveformatex, + audioSessionGuid); + + if (hresult != S_OK) + { + /* + * The execution is not expected to reach here. Anyway, + * be prepared to handle even such a case for the sake + * of completeness. + */ + throw new HResultException(hresult); + } + if (((streamFlags & AUDCLNT_STREAMFLAGS_EVENTCALLBACK) + == AUDCLNT_STREAMFLAGS_EVENTCALLBACK) + && (eventHandle != 0)) + { + IAudioClient_SetEventHandle(iAudioClient, eventHandle); + } + + ret = iAudioClient; + iAudioClient = 0; + } + finally + { + CoTaskMemFree(waveformatex); + } + } + finally + { + if (iAudioClient != 0) + IAudioClient_Release(iAudioClient); + } + } + finally + { + if (iMMDevice != 0) + IMMDevice_Release(iMMDevice); + } + return ret; + } + + /** + * Logs an error message describing that a device identified by a specific + * DataFlow and a specific MediaLocator does not support + * a specific list of Formats. + * + * @param dataFlow the flow of the media supported by the device which does + * not support the specified Formats + * @param locator the MediaLocator identifying the device which + * does not support the specified Formats + * @param unsupportedFormats the list of Format which are not + * supported by the device identified by the specified dataFlow and + * locator + */ + private void logUnsupportedFormats( + DataFlow dataFlow, MediaLocator locator, + Format[] unsupportedFormats) + { + StringBuilder msg = new StringBuilder(); + + msg.append("Unsupported formats: "); + msg.append(Arrays.toString(unsupportedFormats)); + msg.append('.'); + + Format[] supportedFormats; + + try + { + supportedFormats = getDevice(dataFlow, locator).getFormats(); + } + catch (Throwable t) + { + /* + * The supported formats are less important than the unsupported + * formats. + */ + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + supportedFormats = null; + } + msg.append("Supported formats: "); + msg.append(Arrays.toString(supportedFormats)); + msg.append('.'); + + logger.error(msg); + } + + /** + * Initializes the acoustic echo cancellation (AEC) feature if possible and + * if it has not been initialized yet. The method swallows any exceptions + * because the feature in question is optional. + */ + private void maybeInitializeAEC() + { + if ((aecIMediaObject != 0) || (aecSupportedFormats != null)) + return; + + try + { + long iMediaObject = initializeAEC(); + + try + { + List supportedFormats + = getIMediaObjectSupportedFormats(iMediaObject); + + if (!supportedFormats.isEmpty()) + { + aecIMediaObject = iMediaObject; + iMediaObject = 0; + aecSupportedFormats + = Collections.unmodifiableList( + supportedFormats); + } + } + finally + { + if (iMediaObject != 0) + IMediaObject_Release(iMediaObject); + } + } + catch (Throwable t) + { + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + { + logger.error( + "Failed to initialize acoustic echo cancellation (AEC)", + t); + } + } + } + + /** + * Uninitializes the acoustic echo cancellation (AEC) feature if it has been + * initialized. The method swallows any exceptions because the feature in + * question is optional. + */ + private void maybeUninitializeAEC() + { + try + { + if (aecIMediaObject != 0) + { + IMediaObject_Release(aecIMediaObject); + aecIMediaObject = 0; + } + } + catch (Throwable t) + { + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + { + logger.error( + "Failed to uninitialize acoustic echo cancellation (AEC)", + t); + } + } + } + + /** + * {@inheritDoc} + */ + @Override + protected void postInitialize() + throws Exception + { + try + { + super.postInitialize(); + } + finally + { + if (waveformatex != 0) + { + CoTaskMemFree(waveformatex); + waveformatex = 0; + } + } + } + + /** + * {@inheritDoc} + */ + @Override + protected void preInitialize() + throws Exception + { + super.preInitialize(); + + /* + * Make sure a WAVEFORMATEX instance is available during the execution + * of doInitialize(). The field has been introduced to minimize memory + * fragmentation. + */ + if (waveformatex != 0) + { + CoTaskMemFree(waveformatex); + waveformatex = 0; + } + waveformatex = WAVEFORMATEX_alloc(); + if (waveformatex == 0) + throw new OutOfMemoryError("WAVEFORMATEX_alloc"); + + if (pNotify == null) + { + pNotify + = new IMMNotificationClient() + { + public void OnDefaultDeviceChanged( + int flow, + int role, + String pwstrDefaultDevice) + { + } + + public void OnDeviceAdded(String pwstrDeviceId) + { + reinitialize(pwstrDeviceId); + } + + public void OnDeviceRemoved(String pwstrDeviceId) + { + reinitialize(pwstrDeviceId); + } + + public void OnDeviceStateChanged( + String pwstrDeviceId, + int dwNewState) + { + reinitialize(pwstrDeviceId); + } + + public void OnPropertyValueChanged( + String pwstrDeviceId, + long key) + { + } + }; + } + + /* + * Generate a GUID to identify an audio session that steams to be + * initialized will belong to. + */ + if (audioSessionGuid == null) + { + try + { + audioSessionGuid = CoCreateGuid(); + } + catch (HResultException hre) + { + /* + * The application/library will work with the default audio + * session GUID. + */ + logger.warn("Failed to generate a new audio session GUID", hre); + } + } + } + + /** + * Reinitializes this WASAPISystem. The implementation assumes that + * the invocation is performed by the Multimedia Device (MMDevice) API and + * swallows any thrown Exception. + * + * @param deviceId the endpoint ID string that identifies the audio endpoint + * device which is related to the decision to reinitialize this + * WASAPISystem + */ + private void reinitialize(String deviceId) + { + try + { + /* + * XXX Invoke the initialize() method asynchronously in order to + * allow the Multimedia Device (MMDevice) callback to return + * immediately. Otherwise, the execution will freeze in the + * IAudioClient_Release function will freeze. Besides, the callback + * dispatches the notifications after the respective changes have + * been realized anyway. + */ + invokeDeviceSystemInitialize(this, true); + } + catch (Exception e) + { + logger.error("Failed to reinitialize " + getClass().getName(), e); + } + } + + /** + * {@inheritDoc} + */ + @Override + public String toString() + { + return "Windows Audio Session API (WASAPI)"; + } +} diff --git a/src/org/jitsi/impl/neomedia/format/ParameterizedVideoFormat.java b/src/org/jitsi/impl/neomedia/format/ParameterizedVideoFormat.java index e4aaa4a81..e090a594b 100644 --- a/src/org/jitsi/impl/neomedia/format/ParameterizedVideoFormat.java +++ b/src/org/jitsi/impl/neomedia/format/ParameterizedVideoFormat.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,299 +13,299 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.format; - -import java.awt.*; -import java.util.*; - -import javax.media.*; -import javax.media.format.*; - -/** - * Implements a VideoFormat with format parameters (like - * {@link VideoMediaFormatImpl}) (some of) which (could) distinguish payload - * types. - * - * @author Lyubomir Marinov - */ -public class ParameterizedVideoFormat - extends VideoFormat -{ - /** - * Serial version UID. - */ - private static final long serialVersionUID = 0L; - - /** - * The format parameters of this ParameterizedVideoFormat instance. - */ - private Map fmtps; - - /** - * Constructs a new ParametizedVideoFormat. - * - * @param encoding encoding - * @param size video size - * @param maxDataLength maximum data length - * @param dataType data type - * @param frameRate frame rate - * @param fmtps format parameters - */ - public ParameterizedVideoFormat( - String encoding, - Dimension size, - int maxDataLength, - Class dataType, - float frameRate, - Map fmtps) - { - super(encoding, size, maxDataLength, dataType, frameRate); - - this.fmtps - = ((fmtps == null) || fmtps.isEmpty()) - ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS - : new HashMap(fmtps); - } - - /** - * Initializes a new ParameterizedVideoFormat with a specific - * encoding and a specific set of format parameters. - * - * @param encoding the encoding of the new instance - * @param fmtps the format parameters of the new instance - */ - public ParameterizedVideoFormat(String encoding, Map fmtps) - { - super(encoding); - - this.fmtps - = ((fmtps == null) || fmtps.isEmpty()) - ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS - : new HashMap(fmtps); - } - - /** - * Initializes a new ParameterizedVideoFormat with a specific - * encoding and a specific set of format parameters. - * - * @param encoding the encoding of the new instance - * @param fmtps the format parameters of the new instance in the form of an - * array of Strings in which the key and the value of an - * association are expressed as consecutive elements. - */ - public ParameterizedVideoFormat(String encoding, String... fmtps) - { - this(encoding, toMap(fmtps)); - } - - /** - * Initializes a new ParameterizedVideoFormat instance which has - * the same properties as this instance. - * - * @return a new ParameterizedVideoFormat instance which has the - * same properties as this instance - */ - @Override - public Object clone() - { - ParameterizedVideoFormat f - = new ParameterizedVideoFormat( - getEncoding(), - getSize(), - getMaxDataLength(), - getDataType(), - getFrameRate(), - /* - * The formatParameters will be copied by - * ParameterizedVideoFormat#copy(Format) bellow. - */ - null); - - f.copy(this); - return f; - } - - /** - * Copies the properties of the specified Format into this - * instance. - * - * @param f the Format the properties of which are to be copied - * into this instance - */ - @Override - protected void copy(Format f) - { - super.copy(f); - - if (f instanceof ParameterizedVideoFormat) - { - ParameterizedVideoFormat pvf = (ParameterizedVideoFormat) f; - Map pvfFmtps = pvf.getFormatParameters(); - - fmtps - = ((pvfFmtps == null) || pvfFmtps.isEmpty()) - ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS - : new HashMap(pvfFmtps); - } - } - - /** - * Determines whether a specific Object represents a value that is - * equal to the value represented by this instance. - * - * @param obj the Object to be determined whether it represents a - * value that is equal to the value represented by this instance - * @return true if the specified obj represents a value - * that is equal to the value represented by this instance; otherwise, - * false - */ - @Override - public boolean equals(Object obj) - { - if (!super.equals(obj)) - return false; - - Map objFmtps = null; - - if (obj instanceof ParameterizedVideoFormat) - objFmtps = ((ParameterizedVideoFormat) obj).getFormatParameters(); - return - VideoMediaFormatImpl.formatParametersAreEqual( - getEncoding(), - getFormatParameters(), objFmtps); - } - - /** - * Returns whether or not the format parameters match. - * - * @param format format to test - * @return true if the format parameters match. - */ - public boolean formatParametersMatch(Format format) - { - Map formatFmtps = null; - - if (format instanceof ParameterizedVideoFormat) - formatFmtps - = ((ParameterizedVideoFormat) format).getFormatParameters(); - return - VideoMediaFormatImpl.formatParametersMatch( - getEncoding(), - getFormatParameters(), formatFmtps); - } - - /** - * Returns the format parameters value for the specified name. - * - * @param name format parameters name - * @return value for the specified format parameters name - */ - public String getFormatParameter(String name) - { - return fmtps.get(name); - } - - /** - * Returns the format parameters Map. - * - * @return the format parameters Map. - */ - public Map getFormatParameters() - { - return new HashMap(fmtps); - } - - /** - * Finds the attributes shared by two matching Formats. If the - * specified Format does not match this one, the result is - * undefined. - * - * @param format the matching Format to intersect with this one - * @return a Format with its attributes set to the attributes - * common to this instance and the specified format - */ - @Override - public Format intersects(Format format) - { - Format intersection = super.intersects(format); - - if (intersection == null) - return null; - - ((ParameterizedVideoFormat) intersection).fmtps - = fmtps.isEmpty() - ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS - : getFormatParameters(); - return intersection; - } - - /** - * Determines whether a specific format matches this instance i.e. whether - * their attributes match according to the definition of "match" given by - * {@link Format#matches(Format)}. - * - * @param format the Format to compare to this instance - * @return true if the specified format matches this one; - * otherwise, false - */ - @Override - public boolean matches(Format format) - { - return super.matches(format) && formatParametersMatch(format); - } - - /** - * Initializes a new Map from an array in which the key and the - * value of an association are expressed as consecutive elements. - * - * @param the very type of the keys and the values to be associated in - * the new Map - * @param entries the associations to be created in the new Map - * where the key and value of an association are expressed as consecutive - * elements - * @return a new Map with the associations specified by - * entries - */ - public static Map toMap(T... entries) - { - Map map; - - if ((entries == null) || (entries.length == 0)) - map = null; - else - { - map = new HashMap(); - for (int i = 0; i < entries.length; i++) - map.put(entries[i++], entries[i]); - } - return map; - } - - @Override - public String toString() - { - StringBuilder s = new StringBuilder(); - - s.append(super.toString()); - - // fmtps - { - s.append(", fmtps={"); - for (Map.Entry fmtp : fmtps.entrySet()) - { - s.append(fmtp.getKey()); - s.append('='); - s.append(fmtp.getValue()); - s.append(','); - } - - int lastIndex = s.length() - 1; - - if (s.charAt(lastIndex) == ',') - s.setCharAt(lastIndex, '}'); - else - s.append('}'); - } - return s.toString(); - } -} +package org.jitsi.impl.neomedia.format; + +import java.awt.*; +import java.util.*; + +import javax.media.*; +import javax.media.format.*; + +/** + * Implements a VideoFormat with format parameters (like + * {@link VideoMediaFormatImpl}) (some of) which (could) distinguish payload + * types. + * + * @author Lyubomir Marinov + */ +public class ParameterizedVideoFormat + extends VideoFormat +{ + /** + * Serial version UID. + */ + private static final long serialVersionUID = 0L; + + /** + * The format parameters of this ParameterizedVideoFormat instance. + */ + private Map fmtps; + + /** + * Constructs a new ParametizedVideoFormat. + * + * @param encoding encoding + * @param size video size + * @param maxDataLength maximum data length + * @param dataType data type + * @param frameRate frame rate + * @param fmtps format parameters + */ + public ParameterizedVideoFormat( + String encoding, + Dimension size, + int maxDataLength, + Class dataType, + float frameRate, + Map fmtps) + { + super(encoding, size, maxDataLength, dataType, frameRate); + + this.fmtps + = ((fmtps == null) || fmtps.isEmpty()) + ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS + : new HashMap(fmtps); + } + + /** + * Initializes a new ParameterizedVideoFormat with a specific + * encoding and a specific set of format parameters. + * + * @param encoding the encoding of the new instance + * @param fmtps the format parameters of the new instance + */ + public ParameterizedVideoFormat(String encoding, Map fmtps) + { + super(encoding); + + this.fmtps + = ((fmtps == null) || fmtps.isEmpty()) + ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS + : new HashMap(fmtps); + } + + /** + * Initializes a new ParameterizedVideoFormat with a specific + * encoding and a specific set of format parameters. + * + * @param encoding the encoding of the new instance + * @param fmtps the format parameters of the new instance in the form of an + * array of Strings in which the key and the value of an + * association are expressed as consecutive elements. + */ + public ParameterizedVideoFormat(String encoding, String... fmtps) + { + this(encoding, toMap(fmtps)); + } + + /** + * Initializes a new ParameterizedVideoFormat instance which has + * the same properties as this instance. + * + * @return a new ParameterizedVideoFormat instance which has the + * same properties as this instance + */ + @Override + public Object clone() + { + ParameterizedVideoFormat f + = new ParameterizedVideoFormat( + getEncoding(), + getSize(), + getMaxDataLength(), + getDataType(), + getFrameRate(), + /* + * The formatParameters will be copied by + * ParameterizedVideoFormat#copy(Format) bellow. + */ + null); + + f.copy(this); + return f; + } + + /** + * Copies the properties of the specified Format into this + * instance. + * + * @param f the Format the properties of which are to be copied + * into this instance + */ + @Override + protected void copy(Format f) + { + super.copy(f); + + if (f instanceof ParameterizedVideoFormat) + { + ParameterizedVideoFormat pvf = (ParameterizedVideoFormat) f; + Map pvfFmtps = pvf.getFormatParameters(); + + fmtps + = ((pvfFmtps == null) || pvfFmtps.isEmpty()) + ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS + : new HashMap(pvfFmtps); + } + } + + /** + * Determines whether a specific Object represents a value that is + * equal to the value represented by this instance. + * + * @param obj the Object to be determined whether it represents a + * value that is equal to the value represented by this instance + * @return true if the specified obj represents a value + * that is equal to the value represented by this instance; otherwise, + * false + */ + @Override + public boolean equals(Object obj) + { + if (!super.equals(obj)) + return false; + + Map objFmtps = null; + + if (obj instanceof ParameterizedVideoFormat) + objFmtps = ((ParameterizedVideoFormat) obj).getFormatParameters(); + return + VideoMediaFormatImpl.formatParametersAreEqual( + getEncoding(), + getFormatParameters(), objFmtps); + } + + /** + * Returns whether or not the format parameters match. + * + * @param format format to test + * @return true if the format parameters match. + */ + public boolean formatParametersMatch(Format format) + { + Map formatFmtps = null; + + if (format instanceof ParameterizedVideoFormat) + formatFmtps + = ((ParameterizedVideoFormat) format).getFormatParameters(); + return + VideoMediaFormatImpl.formatParametersMatch( + getEncoding(), + getFormatParameters(), formatFmtps); + } + + /** + * Returns the format parameters value for the specified name. + * + * @param name format parameters name + * @return value for the specified format parameters name + */ + public String getFormatParameter(String name) + { + return fmtps.get(name); + } + + /** + * Returns the format parameters Map. + * + * @return the format parameters Map. + */ + public Map getFormatParameters() + { + return new HashMap(fmtps); + } + + /** + * Finds the attributes shared by two matching Formats. If the + * specified Format does not match this one, the result is + * undefined. + * + * @param format the matching Format to intersect with this one + * @return a Format with its attributes set to the attributes + * common to this instance and the specified format + */ + @Override + public Format intersects(Format format) + { + Format intersection = super.intersects(format); + + if (intersection == null) + return null; + + ((ParameterizedVideoFormat) intersection).fmtps + = fmtps.isEmpty() + ? MediaFormatImpl.EMPTY_FORMAT_PARAMETERS + : getFormatParameters(); + return intersection; + } + + /** + * Determines whether a specific format matches this instance i.e. whether + * their attributes match according to the definition of "match" given by + * {@link Format#matches(Format)}. + * + * @param format the Format to compare to this instance + * @return true if the specified format matches this one; + * otherwise, false + */ + @Override + public boolean matches(Format format) + { + return super.matches(format) && formatParametersMatch(format); + } + + /** + * Initializes a new Map from an array in which the key and the + * value of an association are expressed as consecutive elements. + * + * @param the very type of the keys and the values to be associated in + * the new Map + * @param entries the associations to be created in the new Map + * where the key and value of an association are expressed as consecutive + * elements + * @return a new Map with the associations specified by + * entries + */ + public static Map toMap(T... entries) + { + Map map; + + if ((entries == null) || (entries.length == 0)) + map = null; + else + { + map = new HashMap(); + for (int i = 0; i < entries.length; i++) + map.put(entries[i++], entries[i]); + } + return map; + } + + @Override + public String toString() + { + StringBuilder s = new StringBuilder(); + + s.append(super.toString()); + + // fmtps + { + s.append(", fmtps={"); + for (Map.Entry fmtp : fmtps.entrySet()) + { + s.append(fmtp.getKey()); + s.append('='); + s.append(fmtp.getValue()); + s.append(','); + } + + int lastIndex = s.length() - 1; + + if (s.charAt(lastIndex) == ',') + s.setCharAt(lastIndex, '}'); + else + s.append('}'); + } + return s.toString(); + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferCaptureDevice.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferCaptureDevice.java index 93f4ed4ec..2eb01de06 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferCaptureDevice.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferCaptureDevice.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,998 +13,998 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol; - -import java.io.*; -import java.lang.reflect.*; -import java.util.*; -import java.util.concurrent.locks.*; - -import javax.media.*; -import javax.media.Controls; -import javax.media.control.*; -import javax.media.protocol.*; - -import net.sf.fmj.media.util.*; - -import org.jitsi.impl.neomedia.control.*; -import org.jitsi.util.*; - -/** - * Facilitates the implementations of the CaptureDevice and - * DataSource interfaces provided by - * AbstractPullBufferCaptureDevice and - * AbstractPushBufferCaptureDevice. - * - * @param the type of AbstractBufferStream - * through which this AbstractBufferCaptureDevice is to give access to - * its media data - * - * @author Lyubomir Marinov - */ -public abstract class AbstractBufferCaptureDevice - > - implements CaptureDevice, - Controls -{ - /** - * The Logger used by the AbstractBufferCaptureDevice - * class and its instances for logging output. - */ - private static final Logger logger - = Logger.getLogger(AbstractBufferCaptureDevice.class); - - /** - * The value of the formatControls property of - * AbstractBufferCaptureDevice which represents an empty array of - * FormatControls. Explicitly defined in order to reduce - * unnecessary allocations. - */ - private static final FormatControl[] EMPTY_FORMAT_CONTROLS - = new FormatControl[0]; - - /** - * The indicator which determines whether a connection to the media source - * specified by the MediaLocator of this DataSource has - * been opened. - */ - private boolean connected = false; - - /** - * The Object to synchronize the access to the state related to the - * Controls interface implementation in order to avoid locking - * this if not necessary. - */ - private final Object controlsSyncRoot = new Object(); - - /** - * The array of FormatControl instances each one of which can be - * used before {@link #connect()} to get and set the capture Format - * of each one of the capture streams. - */ - private FormatControl[] formatControls; - - /** - * The FrameRateControls of this - * AbstractBufferCaptureDevice. - */ - private FrameRateControl[] frameRateControls; - - private final ReentrantLock lock = new ReentrantLock(); - - /** - * The RTPInfos of this AbstractBufferCaptureDevice. - */ - private RTPInfo[] rtpInfos; - - /** - * The indicator which determines whether the transfer of media data from - * this DataSource has been started. - */ - private boolean started = false; - - /** - * The PushBufferStreams through which this - * PushBufferDataSource gives access to its media data. - *

- * Warning: Caution is advised when directly using the field and access to - * it is to be synchronized with synchronization root this. - *

- */ - private AbstractBufferStream[] streams; - - private final Object streamSyncRoot = new Object(); - - /** - * Opens a connection to the media source of this - * AbstractBufferCaptureDevice. - * - * @throws IOException if anything goes wrong while opening the connection - * to the media source of this AbstractBufferCaptureDevice - */ - public void connect() - throws IOException - { - lock(); - try - { - if (!connected) - { - doConnect(); - connected = true; - } - } - finally - { - unlock(); - } - } - - /** - * Creates a new FormatControl instance which is to be associated - * with a PushBufferStream at a specific zero-based index in the - * list of streams of this PushBufferDataSource. As the - * FormatControls of a PushBufferDataSource can be - * requested before {@link #connect()}, its PushBufferStreams may - * not exist at the time of the request for the creation of the - * FormatControl. - * - * @param streamIndex the zero-based index of the PushBufferStream - * in the list of streams of this PushBufferDataSource which is to - * be associated with the new FormatControl instance - * @return a new FormatControl instance which is to be associated - * with a PushBufferStream at the specified streamIndex in - * the list of streams of this PushBufferDataSource - */ - protected FormatControl createFormatControl(final int streamIndex) - { - return - new AbstractFormatControl() - { - /** - * The Format of this FormatControl and, - * respectively, of the media data of its owner. - */ - private Format format; - - /** - * Gets the Format of the media data of the owner of - * this FormatControl. - * - * @return the Format of the media data of the owner of - * this FormatControl - */ - public Format getFormat() - { - format - = AbstractBufferCaptureDevice.this.internalGetFormat( - streamIndex, - format); - return format; - } - - /** - * Gets the Formats in which the owner of this - * FormatControl is capable of providing media data. - * - * @return an array of Formats in which the owner of - * this FormatControl is capable of providing media - * data - */ - public Format[] getSupportedFormats() - { - return - AbstractBufferCaptureDevice.this.getSupportedFormats( - streamIndex); - } - - /** - * Implements {@link FormatControl#setFormat(Format)}. Attempts - * to set the Format in which the owner of this - * FormatControl is to provide media data. - * - * @param format the Format to be set on this instance - * @return the currently set Format after the attempt - * to set it on this instance if format is supported by - * this instance and regardless of whether it was actually set; - * null if format is not supported by this - * instance - */ - @Override - public Format setFormat(Format format) - { - Format setFormat = super.setFormat(format); - - if (setFormat != null) - { - setFormat - = AbstractBufferCaptureDevice.this - .internalSetFormat( - streamIndex, - setFormat, - format); - if (setFormat != null) - this.format = setFormat; - } - return setFormat; - } - }; - } - - /** - * Creates the FormatControls of this CaptureDevice. - * - * @return an array of the FormatControls of this - * CaptureDevice - */ - protected FormatControl[] createFormatControls() - { - FormatControl formatControl = createFormatControl(0); - - return - (formatControl == null) - ? EMPTY_FORMAT_CONTROLS - : new FormatControl[] { formatControl }; - } - - /** - * Creates a new FrameRateControl instance which is to allow the - * getting and setting of the frame rate of this - * AbstractBufferCaptureDevice. - * - * @return a new FrameRateControl instance which is to allow the - * getting and setting of the frame rate of this - * AbstractBufferCaptureDevice - */ - protected FrameRateControl createFrameRateControl() - { - return null; - } - - /** - * Creates a new RTPInfo instance of this - * AbstractBufferCaptureDevice. - * - * @return a new RTPInfo instance of this - * AbstractBufferCaptureDevice - */ - protected RTPInfo createRTPInfo() - { - return - new RTPInfo() - { - public String getCNAME() - { - // TODO Auto-generated method stub - return null; - } - }; - } - - /** - * Create a new AbstractBufferStream which is to be at a specific - * zero-based index in the list of streams of this - * AbstractBufferCaptureDevice. The Format-related - * information of the new instance is to be abstracted by a specific - * FormatControl. - * - * @param streamIndex the zero-based index of the - * AbstractBufferStream in the list of streams of this - * AbstractBufferCaptureDevice - * @param formatControl the FormatControl which is to abstract the - * Format-related information of the new instance - * @return a new AbstractBufferStream which is to be at the - * specified streamIndex in the list of streams of this - * AbstractBufferCaptureDevice and which has its - * Format-related information abstracted by the specified - * formatControl - */ - protected abstract AbstractBufferStreamT createStream( - int streamIndex, - FormatControl formatControl); - - /** - * Provides the default implementation of - * AbstractBufferCaptureDevice for {@link #doStart()}. - * - * @throws IOException if anything goes wrong while starting the transfer of - * media data from this AbstractBufferCaptureDevice - * @see #doStart() - */ - final void defaultDoStart() - throws IOException - { - synchronized (getStreamSyncRoot()) - { - if (streams != null) - { - for (AbstractBufferStream stream : streams) - stream.start(); - } - } - } - - /** - * Provides the default implementation of - * AbstractBufferCaptureDevice for {@link #doStop()}. - * - * @throws IOException if anything goes wrong while stopping the transfer of - * media data from this AbstractBufferCaptureDevice - * @see #doStop() - */ - final void defaultDoStop() - throws IOException - { - synchronized (getStreamSyncRoot()) - { - if (streams != null) - { - for (AbstractBufferStream stream : streams) - stream.stop(); - } - } - } - - /** - * Provides the default implementation of - * AbstractBufferCaptureDevice for {@link #getControls()}. - * - * @return an array of Objects which represent the controls - * available for this instance - */ - final Object[] defaultGetControls() - { - FormatControl[] formatControls = internalGetFormatControls(); - int formatControlCount - = (formatControls == null) ? 0 : formatControls.length; - FrameRateControl[] frameRateControls = internalGetFrameRateControls(); - int frameRateControlCount - = (frameRateControls == null) ? 0 : frameRateControls.length; - RTPInfo[] rtpInfos = internalGetRTPInfos(); - int rtpInfoCount = (rtpInfos == null) ? 0 : rtpInfos.length; - - if ((formatControlCount == 0) - && (frameRateControlCount == 0) - && (rtpInfoCount == 0)) - return ControlsAdapter.EMPTY_CONTROLS; - else - { - Object[] controls - = new Object[ - formatControlCount - + frameRateControlCount - + rtpInfoCount]; - int offset = 0; - - if (formatControlCount != 0) - { - System.arraycopy( - formatControls, 0, - controls, offset, - formatControlCount); - offset += formatControlCount; - } - if (frameRateControlCount != 0) - { - System.arraycopy( - frameRateControls, 0, - controls, offset, - frameRateControlCount); - offset += frameRateControlCount; - } - if (rtpInfoCount != 0) - { - System.arraycopy(rtpInfos, 0, controls, offset, rtpInfoCount); - offset += rtpInfoCount; - } - return controls; - } - } - - /** - * Provides the default implementation of - * AbstractBufferCaptureDevice for {@link #getFormat(int, Format)}. - * - * @param streamIndex the zero-based index of the - * AbstractBufferStream the Format of which is to be - * retrieved - * @param oldValue the last-known Format for the - * AbstractBufferStream at the specified streamIndex - * @return the Format to be reported by the FormatControl - * of the AbstractBufferStream at the specified - * streamIndex in the list of streams of this - * AbstractBufferCaptureDevice - * @see #getFormat(int, Format) - */ - final Format defaultGetFormat(int streamIndex, Format oldValue) - { - if (oldValue != null) - return oldValue; - - Format[] supportedFormats = getSupportedFormats(streamIndex); - - return - ((supportedFormats == null) || (supportedFormats.length < 1)) - ? null - : supportedFormats[0]; - } - - /** - * Provides the default implementation of - * AbstractBufferCaptureDevice for - * {@link #getSupportedFormats(int)}. - * - * @param streamIndex the zero-based index of the - * AbstractBufferStream for which the specified - * FormatControl is to report the list of supported - * Formats - * @return an array of Formats to be reported by a - * FormatControl as the supported formats for the - * AbstractBufferStream at the specified streamIndex in - * the list of streams of this AbstractBufferCaptureDevice - */ - final Format[] defaultGetSupportedFormats(int streamIndex) - { - CaptureDeviceInfo captureDeviceInfo = getCaptureDeviceInfo(); - - return (captureDeviceInfo == null) - ? new Format[0] - : captureDeviceInfo.getFormats(); - } - - /** - * Closes the connection to the media source specified of this - * AbstractBufferCaptureDevice. If such a connection has not been - * opened, the call is ignored. - */ - public void disconnect() - { - lock(); - try - { - try - { - stop(); - } - catch (IOException ioex) - { - logger.error( - "Failed to stop " + getClass().getSimpleName(), - ioex); - } - - if (connected) - { - doDisconnect(); - connected = false; - } - } - finally - { - unlock(); - } - } - - /** - * Opens a connection to the media source of this - * AbstractBufferCaptureDevice. Allows extenders to override and be - * sure that there will be no request to open a connection if the connection - * has already been opened. - * - * @throws IOException if anything goes wrong while opening the connection - * to the media source of this AbstractBufferCaptureDevice - */ - protected abstract void doConnect() - throws IOException; - - /** - * Closes the connection to the media source of this - * AbstractBufferCaptureDevice. Allows extenders to override and be - * sure that there will be no request to close a connection if the - * connection has not been opened yet. - */ - protected abstract void doDisconnect(); - - /** - * Starts the transfer of media data from this - * AbstractBufferCaptureDevice. Allows extenders to override and be - * sure that there will be no request to start the transfer of media data if - * it has already been started. - * - * @throws IOException if anything goes wrong while starting the transfer of - * media data from this AbstractBufferCaptureDevice - */ - protected abstract void doStart() - throws IOException; - - /** - * Stops the transfer of media data from this - * AbstractBufferCaptureDevice. Allows extenders to override and be - * sure that there will be no request to stop the transfer of media data if - * it has not been started yet. - * - * @throws IOException if anything goes wrong while stopping the transfer of - * media data from this AbstractBufferCaptureDevice - */ - protected abstract void doStop() - throws IOException; - - /** - * Gets the CaptureDeviceInfo of this CaptureDevice which - * describes it. - * - * @return the CaptureDeviceInfo of this CaptureDevice - * which describes it - */ - public abstract CaptureDeviceInfo getCaptureDeviceInfo(); - - /** - * Gets the CaptureDeviceInfo of a specific CaptureDevice - * by locating its registration in JMF using its MediaLocator. - * - * @param captureDevice the CaptureDevice to gets the - * CaptureDeviceInfo of - * @return the CaptureDeviceInfo of the specified - * CaptureDevice as registered in JMF - */ - public static CaptureDeviceInfo getCaptureDeviceInfo( - DataSource captureDevice) - { - /* - * TODO The implemented search for the CaptureDeviceInfo of this - * CaptureDevice by looking for its MediaLocator is inefficient. - */ - @SuppressWarnings("unchecked") - Vector captureDeviceInfos - = CaptureDeviceManager.getDeviceList(null); - MediaLocator locator = captureDevice.getLocator(); - - for (CaptureDeviceInfo captureDeviceInfo : captureDeviceInfos) - if (captureDeviceInfo.getLocator().toString().equals( - locator.toString())) - return captureDeviceInfo; - return null; - } - - /** - * Gets the control of the specified type available for this instance. - * - * @param controlType the type of the control available for this instance to - * be retrieved - * @return an Object which represents the control of the specified - * type available for this instance if such a control is indeed available; - * otherwise, null - */ - public Object getControl(String controlType) - { - return AbstractControls.getControl(this, controlType); - } - - /** - * Implements {@link javax.media.Controls#getControls()}. Gets the controls - * available for this instance. - * - * @return an array of Objects which represent the controls - * available for this instance - */ - public Object[] getControls() - { - return defaultGetControls(); - } - - /** - * Gets the Format to be reported by the FormatControl of - * an AbstractBufferStream at a specific zero-based index in the - * list of streams of this AbstractBufferCaptureDevice. The - * AbstractBufferStream may not exist at the time of requesting its - * Format. Allows extenders to override the default behavior which - * is to report any last-known format or the first Format from the - * list of supported formats as defined in the JMF registration of this - * CaptureDevice. - * - * @param streamIndex the zero-based index of the - * AbstractBufferStream the Format of which is to be - * retrieved - * @param oldValue the last-known Format for the - * PushBufferStream at the specified streamIndex - * @return the Format to be reported by the FormatControl - * of the PushBufferStream at the specified streamIndex in - * the list of streams of this PushBufferDataSource. - */ - protected abstract Format getFormat(int streamIndex, Format oldValue); - - /** - * Gets an array of FormatControl instances each one of which can - * be used before {@link #connect()} to get and set the capture - * Format of each one of the capture streams. - * - * @return an array of FormatControl instances each one of which - * can be used before {@link #connect()} to get and set the capture - * Format of each one of the capture streams - */ - public FormatControl[] getFormatControls() - { - return AbstractFormatControl.getFormatControls(this); - } - - /** - * Gets the Object which is to synchronize the access to - * {@link #streams()} and its return value. - * - * @return the Object which is to synchronize the access to - * {@link #streams()} and its return value - */ - Object getStreamSyncRoot() - { - return streamSyncRoot; - } - - /** - * Gets the AbstractBufferStreams through which this - * AbstractBufferCaptureDevice gives access to its media data. - * - * @param the type of SourceStream which is to be - * the element type of the returned array - * @param clz the Class of SourceStream which is to be the - * element type of the returned array - * @return an array of the SourceStreams through which this - * AbstractBufferCaptureDevice gives access to its media data - */ - public - - SourceStreamT[] getStreams(Class clz) - { - synchronized (getStreamSyncRoot()) - { - return internalGetStreams(clz); - } - } - - /** - * Gets the Formats which are to be reported by a - * FormatControl as supported formats for a - * AbstractBufferStream at a specific zero-based index in the list - * of streams of this AbstractBufferCaptureDevice. - * - * @param streamIndex the zero-based index of the - * AbstractBufferStream for which the specified - * FormatControl is to report the list of supported - * Formats - * @return an array of Formats to be reported by a - * FormatControl as the supported formats for the - * AbstractBufferStream at the specified streamIndex in - * the list of streams of this AbstractBufferCaptureDevice - */ - protected abstract Format[] getSupportedFormats(int streamIndex); - - /** - * Gets the Format to be reported by the FormatControl of - * a PushBufferStream at a specific zero-based index in the list of - * streams of this PushBufferDataSource. The - * PushBufferStream may not exist at the time of requesting its - * Format. - * - * @param streamIndex the zero-based index of the PushBufferStream - * the Format of which is to be retrieved - * @param oldValue the last-known Format for the - * PushBufferStream at the specified streamIndex - * @return the Format to be reported by the FormatControl - * of the PushBufferStream at the specified streamIndex in - * the list of streams of this PushBufferDataSource. - */ - private Format internalGetFormat(int streamIndex, Format oldValue) - { - if (lock.tryLock()) - { - try - { - synchronized (getStreamSyncRoot()) - { - if (streams != null) - { - AbstractBufferStream stream = streams[streamIndex]; - - if (stream != null) - { - Format streamFormat = stream.internalGetFormat(); - - if (streamFormat != null) - return streamFormat; - } - } - } - } - finally - { - lock.unlock(); - } - } - else - { - /* - * XXX In order to prevent a deadlock, do not ask the streams about - * the format. - */ - } - return getFormat(streamIndex, oldValue); - } - - /** - * Gets an array of FormatControl instances each one of which can - * be used before {@link #connect()} to get and set the capture - * Format of each one of the capture streams. - * - * @return an array of FormatControl instances each one of which - * can be used before {@link #connect()} to get and set the capture - * Format of each one of the capture streams - */ - private FormatControl[] internalGetFormatControls() - { - synchronized (controlsSyncRoot) - { - if (formatControls == null) - formatControls = createFormatControls(); - return formatControls; - } - } - - /** - * Gets an array of FrameRateControl instances which can be used to - * get and/or set the output frame rate of this - * AbstractBufferCaptureDevice. - * - * @return an array of FrameRateControl instances which can be used - * to get and/or set the output frame rate of this - * AbstractBufferCaptureDevice. - */ - private FrameRateControl[] internalGetFrameRateControls() - { - synchronized (controlsSyncRoot) - { - if (frameRateControls == null) - { - FrameRateControl frameRateControl = createFrameRateControl(); - - // Don't try to create the FrameRateControl more than once. - frameRateControls - = (frameRateControl == null) - ? new FrameRateControl[0] - : new FrameRateControl[] { frameRateControl }; - } - return frameRateControls; - } - } - - /** - * Gets an array of RTPInfo instances of this - * AbstractBufferCaptureDevice. - * - * @return an array of RTPInfo instances of this - * AbstractBufferCaptureDevice. - */ - private RTPInfo[] internalGetRTPInfos() - { - synchronized (controlsSyncRoot) - { - if (rtpInfos == null) - { - RTPInfo rtpInfo = createRTPInfo(); - - // Don't try to create the RTPInfo more than once. - rtpInfos - = (rtpInfo == null) - ? new RTPInfo[0] - : new RTPInfo[] { rtpInfo }; - } - return rtpInfos; - } - } - - /** - * Gets the AbstractBufferStreams through which this - * AbstractBufferCaptureDevice gives access to its media data. - * - * @param the type of SourceStream which is to be - * the element type of the returned array - * @param clz the Class of SourceStream which is to be the - * element type of the returned array - * @return an array of the SourceStreams through which this - * AbstractBufferCaptureDevice gives access to its media data - */ - private - - SourceStreamT[] internalGetStreams(Class clz) - { - if (streams == null) - { - FormatControl[] formatControls = internalGetFormatControls(); - - if (formatControls != null) - { - int formatControlCount = formatControls.length; - - streams = new AbstractBufferStream[formatControlCount]; - for (int i = 0; i < formatControlCount; i++) - streams[i] = createStream(i, formatControls[i]); - - /* - * Start the streams if this DataSource has already been - * started. - */ - if (started) - { - for (AbstractBufferStream stream : streams) - { - try - { - stream.start(); - } - catch (IOException ioex) - { - throw new UndeclaredThrowableException(ioex); - } - } - } - } - } - - int streamCount = (streams == null) ? 0 : streams.length; - @SuppressWarnings("unchecked") - SourceStreamT[] clone - = (SourceStreamT[]) Array.newInstance(clz, streamCount); - - if (streamCount != 0) - System.arraycopy(streams, 0, clone, 0, streamCount); - return clone; - } - - /** - * Attempts to set the Format to be reported by the - * FormatControl of a PushBufferStream at a specific - * zero-based index in the list of streams of this - * PushBufferDataSource. - * - * @param streamIndex the zero-based index of the PushBufferStream - * the Format of which is to be set - * @param oldValue the last-known Format for the - * PushBufferStream at the specified streamIndex - * @param newValue the Format which is to be set - * @return the Format to be reported by the FormatControl - * of the PushBufferStream at the specified streamIndex - * in the list of streams of this PushBufferStream or null - * if the attempt to set the Format did not success and any - * last-known Format is to be left in effect - */ - private Format internalSetFormat( - int streamIndex, - Format oldValue, Format newValue) - { - lock(); - try - { - synchronized (getStreamSyncRoot()) - { - if (streams != null) - { - AbstractBufferStream stream = streams[streamIndex]; - - if (stream != null) - return stream.internalSetFormat(newValue); - } - } - } - finally - { - unlock(); - } - return setFormat(streamIndex, oldValue, newValue); - } - - private void lock() - { - lock.lock(); - } - - /** - * Attempts to set the Format to be reported by the - * FormatControl of a AbstractBufferStream at a specific - * zero-based index in the list of streams of this - * AbstractBufferCaptureDevice. The AbstractBufferStream - * does not exist at the time of the attempt to set its Format. - * Allows extenders to override the default behavior which is to not attempt - * to set the specified Format so that they can enable setting the - * Format prior to creating the AbstractBufferStream. If - * setting the Format of an existing AbstractBufferStream - * is desired, AbstractBufferStream#doSetFormat(Format) should be - * overridden instead. - * - * @param streamIndex the zero-based index of the - * AbstractBufferStream the Format of which is to be set - * @param oldValue the last-known Format for the - * AbstractBufferStream at the specified streamIndex - * @param newValue the Format which is to be set - * @return the Format to be reported by the FormatControl - * of the AbstractBufferStream at the specified - * streamIndex in the list of streams of this - * AbstractBufferStream or null if the attempt to set the - * Format did not success and any last-known Format is to - * be left in effect - */ - protected abstract Format setFormat( - int streamIndex, - Format oldValue, Format newValue); - - /** - * Starts the transfer of media data from this - * AbstractBufferCaptureDevice. - * - * @throws IOException if anything goes wrong while starting the transfer of - * media data from this AbstractBufferCaptureDevice - */ - public void start() - throws IOException - { - lock(); - try - { - if (!started) - { - if (!connected) - { - throw new IOException( - getClass().getName() + " not connected"); - } - - doStart(); - started = true; - } - } - finally - { - unlock(); - } - } - - /** - * Stops the transfer of media data from this - * AbstractBufferCaptureDevice. - * - * @throws IOException if anything goes wrong while stopping the transfer of - * media data from this AbstractBufferCaptureDevice - */ - public void stop() - throws IOException - { - lock(); - try - { - if (started) - { - doStop(); - started = false; - } - } - finally - { - unlock(); - } - } - - /** - * Gets the internal array of AbstractBufferStreams through which - * this AbstractBufferCaptureDevice gives access to its media data. - * - * @return the internal array of AbstractBufferStreams through - * which this AbstractBufferCaptureDevice gives access to its media - * data - */ - AbstractBufferStream[] streams() - { - return streams; - } - - private void unlock() - { - lock.unlock(); - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol; + +import java.io.*; +import java.lang.reflect.*; +import java.util.*; +import java.util.concurrent.locks.*; + +import javax.media.*; +import javax.media.Controls; +import javax.media.control.*; +import javax.media.protocol.*; + +import net.sf.fmj.media.util.*; + +import org.jitsi.impl.neomedia.control.*; +import org.jitsi.util.*; + +/** + * Facilitates the implementations of the CaptureDevice and + * DataSource interfaces provided by + * AbstractPullBufferCaptureDevice and + * AbstractPushBufferCaptureDevice. + * + * @param the type of AbstractBufferStream + * through which this AbstractBufferCaptureDevice is to give access to + * its media data + * + * @author Lyubomir Marinov + */ +public abstract class AbstractBufferCaptureDevice + > + implements CaptureDevice, + Controls +{ + /** + * The Logger used by the AbstractBufferCaptureDevice + * class and its instances for logging output. + */ + private static final Logger logger + = Logger.getLogger(AbstractBufferCaptureDevice.class); + + /** + * The value of the formatControls property of + * AbstractBufferCaptureDevice which represents an empty array of + * FormatControls. Explicitly defined in order to reduce + * unnecessary allocations. + */ + private static final FormatControl[] EMPTY_FORMAT_CONTROLS + = new FormatControl[0]; + + /** + * The indicator which determines whether a connection to the media source + * specified by the MediaLocator of this DataSource has + * been opened. + */ + private boolean connected = false; + + /** + * The Object to synchronize the access to the state related to the + * Controls interface implementation in order to avoid locking + * this if not necessary. + */ + private final Object controlsSyncRoot = new Object(); + + /** + * The array of FormatControl instances each one of which can be + * used before {@link #connect()} to get and set the capture Format + * of each one of the capture streams. + */ + private FormatControl[] formatControls; + + /** + * The FrameRateControls of this + * AbstractBufferCaptureDevice. + */ + private FrameRateControl[] frameRateControls; + + private final ReentrantLock lock = new ReentrantLock(); + + /** + * The RTPInfos of this AbstractBufferCaptureDevice. + */ + private RTPInfo[] rtpInfos; + + /** + * The indicator which determines whether the transfer of media data from + * this DataSource has been started. + */ + private boolean started = false; + + /** + * The PushBufferStreams through which this + * PushBufferDataSource gives access to its media data. + *

+ * Warning: Caution is advised when directly using the field and access to + * it is to be synchronized with synchronization root this. + *

+ */ + private AbstractBufferStream[] streams; + + private final Object streamSyncRoot = new Object(); + + /** + * Opens a connection to the media source of this + * AbstractBufferCaptureDevice. + * + * @throws IOException if anything goes wrong while opening the connection + * to the media source of this AbstractBufferCaptureDevice + */ + public void connect() + throws IOException + { + lock(); + try + { + if (!connected) + { + doConnect(); + connected = true; + } + } + finally + { + unlock(); + } + } + + /** + * Creates a new FormatControl instance which is to be associated + * with a PushBufferStream at a specific zero-based index in the + * list of streams of this PushBufferDataSource. As the + * FormatControls of a PushBufferDataSource can be + * requested before {@link #connect()}, its PushBufferStreams may + * not exist at the time of the request for the creation of the + * FormatControl. + * + * @param streamIndex the zero-based index of the PushBufferStream + * in the list of streams of this PushBufferDataSource which is to + * be associated with the new FormatControl instance + * @return a new FormatControl instance which is to be associated + * with a PushBufferStream at the specified streamIndex in + * the list of streams of this PushBufferDataSource + */ + protected FormatControl createFormatControl(final int streamIndex) + { + return + new AbstractFormatControl() + { + /** + * The Format of this FormatControl and, + * respectively, of the media data of its owner. + */ + private Format format; + + /** + * Gets the Format of the media data of the owner of + * this FormatControl. + * + * @return the Format of the media data of the owner of + * this FormatControl + */ + public Format getFormat() + { + format + = AbstractBufferCaptureDevice.this.internalGetFormat( + streamIndex, + format); + return format; + } + + /** + * Gets the Formats in which the owner of this + * FormatControl is capable of providing media data. + * + * @return an array of Formats in which the owner of + * this FormatControl is capable of providing media + * data + */ + public Format[] getSupportedFormats() + { + return + AbstractBufferCaptureDevice.this.getSupportedFormats( + streamIndex); + } + + /** + * Implements {@link FormatControl#setFormat(Format)}. Attempts + * to set the Format in which the owner of this + * FormatControl is to provide media data. + * + * @param format the Format to be set on this instance + * @return the currently set Format after the attempt + * to set it on this instance if format is supported by + * this instance and regardless of whether it was actually set; + * null if format is not supported by this + * instance + */ + @Override + public Format setFormat(Format format) + { + Format setFormat = super.setFormat(format); + + if (setFormat != null) + { + setFormat + = AbstractBufferCaptureDevice.this + .internalSetFormat( + streamIndex, + setFormat, + format); + if (setFormat != null) + this.format = setFormat; + } + return setFormat; + } + }; + } + + /** + * Creates the FormatControls of this CaptureDevice. + * + * @return an array of the FormatControls of this + * CaptureDevice + */ + protected FormatControl[] createFormatControls() + { + FormatControl formatControl = createFormatControl(0); + + return + (formatControl == null) + ? EMPTY_FORMAT_CONTROLS + : new FormatControl[] { formatControl }; + } + + /** + * Creates a new FrameRateControl instance which is to allow the + * getting and setting of the frame rate of this + * AbstractBufferCaptureDevice. + * + * @return a new FrameRateControl instance which is to allow the + * getting and setting of the frame rate of this + * AbstractBufferCaptureDevice + */ + protected FrameRateControl createFrameRateControl() + { + return null; + } + + /** + * Creates a new RTPInfo instance of this + * AbstractBufferCaptureDevice. + * + * @return a new RTPInfo instance of this + * AbstractBufferCaptureDevice + */ + protected RTPInfo createRTPInfo() + { + return + new RTPInfo() + { + public String getCNAME() + { + // TODO Auto-generated method stub + return null; + } + }; + } + + /** + * Create a new AbstractBufferStream which is to be at a specific + * zero-based index in the list of streams of this + * AbstractBufferCaptureDevice. The Format-related + * information of the new instance is to be abstracted by a specific + * FormatControl. + * + * @param streamIndex the zero-based index of the + * AbstractBufferStream in the list of streams of this + * AbstractBufferCaptureDevice + * @param formatControl the FormatControl which is to abstract the + * Format-related information of the new instance + * @return a new AbstractBufferStream which is to be at the + * specified streamIndex in the list of streams of this + * AbstractBufferCaptureDevice and which has its + * Format-related information abstracted by the specified + * formatControl + */ + protected abstract AbstractBufferStreamT createStream( + int streamIndex, + FormatControl formatControl); + + /** + * Provides the default implementation of + * AbstractBufferCaptureDevice for {@link #doStart()}. + * + * @throws IOException if anything goes wrong while starting the transfer of + * media data from this AbstractBufferCaptureDevice + * @see #doStart() + */ + final void defaultDoStart() + throws IOException + { + synchronized (getStreamSyncRoot()) + { + if (streams != null) + { + for (AbstractBufferStream stream : streams) + stream.start(); + } + } + } + + /** + * Provides the default implementation of + * AbstractBufferCaptureDevice for {@link #doStop()}. + * + * @throws IOException if anything goes wrong while stopping the transfer of + * media data from this AbstractBufferCaptureDevice + * @see #doStop() + */ + final void defaultDoStop() + throws IOException + { + synchronized (getStreamSyncRoot()) + { + if (streams != null) + { + for (AbstractBufferStream stream : streams) + stream.stop(); + } + } + } + + /** + * Provides the default implementation of + * AbstractBufferCaptureDevice for {@link #getControls()}. + * + * @return an array of Objects which represent the controls + * available for this instance + */ + final Object[] defaultGetControls() + { + FormatControl[] formatControls = internalGetFormatControls(); + int formatControlCount + = (formatControls == null) ? 0 : formatControls.length; + FrameRateControl[] frameRateControls = internalGetFrameRateControls(); + int frameRateControlCount + = (frameRateControls == null) ? 0 : frameRateControls.length; + RTPInfo[] rtpInfos = internalGetRTPInfos(); + int rtpInfoCount = (rtpInfos == null) ? 0 : rtpInfos.length; + + if ((formatControlCount == 0) + && (frameRateControlCount == 0) + && (rtpInfoCount == 0)) + return ControlsAdapter.EMPTY_CONTROLS; + else + { + Object[] controls + = new Object[ + formatControlCount + + frameRateControlCount + + rtpInfoCount]; + int offset = 0; + + if (formatControlCount != 0) + { + System.arraycopy( + formatControls, 0, + controls, offset, + formatControlCount); + offset += formatControlCount; + } + if (frameRateControlCount != 0) + { + System.arraycopy( + frameRateControls, 0, + controls, offset, + frameRateControlCount); + offset += frameRateControlCount; + } + if (rtpInfoCount != 0) + { + System.arraycopy(rtpInfos, 0, controls, offset, rtpInfoCount); + offset += rtpInfoCount; + } + return controls; + } + } + + /** + * Provides the default implementation of + * AbstractBufferCaptureDevice for {@link #getFormat(int, Format)}. + * + * @param streamIndex the zero-based index of the + * AbstractBufferStream the Format of which is to be + * retrieved + * @param oldValue the last-known Format for the + * AbstractBufferStream at the specified streamIndex + * @return the Format to be reported by the FormatControl + * of the AbstractBufferStream at the specified + * streamIndex in the list of streams of this + * AbstractBufferCaptureDevice + * @see #getFormat(int, Format) + */ + final Format defaultGetFormat(int streamIndex, Format oldValue) + { + if (oldValue != null) + return oldValue; + + Format[] supportedFormats = getSupportedFormats(streamIndex); + + return + ((supportedFormats == null) || (supportedFormats.length < 1)) + ? null + : supportedFormats[0]; + } + + /** + * Provides the default implementation of + * AbstractBufferCaptureDevice for + * {@link #getSupportedFormats(int)}. + * + * @param streamIndex the zero-based index of the + * AbstractBufferStream for which the specified + * FormatControl is to report the list of supported + * Formats + * @return an array of Formats to be reported by a + * FormatControl as the supported formats for the + * AbstractBufferStream at the specified streamIndex in + * the list of streams of this AbstractBufferCaptureDevice + */ + final Format[] defaultGetSupportedFormats(int streamIndex) + { + CaptureDeviceInfo captureDeviceInfo = getCaptureDeviceInfo(); + + return (captureDeviceInfo == null) + ? new Format[0] + : captureDeviceInfo.getFormats(); + } + + /** + * Closes the connection to the media source specified of this + * AbstractBufferCaptureDevice. If such a connection has not been + * opened, the call is ignored. + */ + public void disconnect() + { + lock(); + try + { + try + { + stop(); + } + catch (IOException ioex) + { + logger.error( + "Failed to stop " + getClass().getSimpleName(), + ioex); + } + + if (connected) + { + doDisconnect(); + connected = false; + } + } + finally + { + unlock(); + } + } + + /** + * Opens a connection to the media source of this + * AbstractBufferCaptureDevice. Allows extenders to override and be + * sure that there will be no request to open a connection if the connection + * has already been opened. + * + * @throws IOException if anything goes wrong while opening the connection + * to the media source of this AbstractBufferCaptureDevice + */ + protected abstract void doConnect() + throws IOException; + + /** + * Closes the connection to the media source of this + * AbstractBufferCaptureDevice. Allows extenders to override and be + * sure that there will be no request to close a connection if the + * connection has not been opened yet. + */ + protected abstract void doDisconnect(); + + /** + * Starts the transfer of media data from this + * AbstractBufferCaptureDevice. Allows extenders to override and be + * sure that there will be no request to start the transfer of media data if + * it has already been started. + * + * @throws IOException if anything goes wrong while starting the transfer of + * media data from this AbstractBufferCaptureDevice + */ + protected abstract void doStart() + throws IOException; + + /** + * Stops the transfer of media data from this + * AbstractBufferCaptureDevice. Allows extenders to override and be + * sure that there will be no request to stop the transfer of media data if + * it has not been started yet. + * + * @throws IOException if anything goes wrong while stopping the transfer of + * media data from this AbstractBufferCaptureDevice + */ + protected abstract void doStop() + throws IOException; + + /** + * Gets the CaptureDeviceInfo of this CaptureDevice which + * describes it. + * + * @return the CaptureDeviceInfo of this CaptureDevice + * which describes it + */ + public abstract CaptureDeviceInfo getCaptureDeviceInfo(); + + /** + * Gets the CaptureDeviceInfo of a specific CaptureDevice + * by locating its registration in JMF using its MediaLocator. + * + * @param captureDevice the CaptureDevice to gets the + * CaptureDeviceInfo of + * @return the CaptureDeviceInfo of the specified + * CaptureDevice as registered in JMF + */ + public static CaptureDeviceInfo getCaptureDeviceInfo( + DataSource captureDevice) + { + /* + * TODO The implemented search for the CaptureDeviceInfo of this + * CaptureDevice by looking for its MediaLocator is inefficient. + */ + @SuppressWarnings("unchecked") + Vector captureDeviceInfos + = CaptureDeviceManager.getDeviceList(null); + MediaLocator locator = captureDevice.getLocator(); + + for (CaptureDeviceInfo captureDeviceInfo : captureDeviceInfos) + if (captureDeviceInfo.getLocator().toString().equals( + locator.toString())) + return captureDeviceInfo; + return null; + } + + /** + * Gets the control of the specified type available for this instance. + * + * @param controlType the type of the control available for this instance to + * be retrieved + * @return an Object which represents the control of the specified + * type available for this instance if such a control is indeed available; + * otherwise, null + */ + public Object getControl(String controlType) + { + return AbstractControls.getControl(this, controlType); + } + + /** + * Implements {@link javax.media.Controls#getControls()}. Gets the controls + * available for this instance. + * + * @return an array of Objects which represent the controls + * available for this instance + */ + public Object[] getControls() + { + return defaultGetControls(); + } + + /** + * Gets the Format to be reported by the FormatControl of + * an AbstractBufferStream at a specific zero-based index in the + * list of streams of this AbstractBufferCaptureDevice. The + * AbstractBufferStream may not exist at the time of requesting its + * Format. Allows extenders to override the default behavior which + * is to report any last-known format or the first Format from the + * list of supported formats as defined in the JMF registration of this + * CaptureDevice. + * + * @param streamIndex the zero-based index of the + * AbstractBufferStream the Format of which is to be + * retrieved + * @param oldValue the last-known Format for the + * PushBufferStream at the specified streamIndex + * @return the Format to be reported by the FormatControl + * of the PushBufferStream at the specified streamIndex in + * the list of streams of this PushBufferDataSource. + */ + protected abstract Format getFormat(int streamIndex, Format oldValue); + + /** + * Gets an array of FormatControl instances each one of which can + * be used before {@link #connect()} to get and set the capture + * Format of each one of the capture streams. + * + * @return an array of FormatControl instances each one of which + * can be used before {@link #connect()} to get and set the capture + * Format of each one of the capture streams + */ + public FormatControl[] getFormatControls() + { + return AbstractFormatControl.getFormatControls(this); + } + + /** + * Gets the Object which is to synchronize the access to + * {@link #streams()} and its return value. + * + * @return the Object which is to synchronize the access to + * {@link #streams()} and its return value + */ + Object getStreamSyncRoot() + { + return streamSyncRoot; + } + + /** + * Gets the AbstractBufferStreams through which this + * AbstractBufferCaptureDevice gives access to its media data. + * + * @param the type of SourceStream which is to be + * the element type of the returned array + * @param clz the Class of SourceStream which is to be the + * element type of the returned array + * @return an array of the SourceStreams through which this + * AbstractBufferCaptureDevice gives access to its media data + */ + public + + SourceStreamT[] getStreams(Class clz) + { + synchronized (getStreamSyncRoot()) + { + return internalGetStreams(clz); + } + } + + /** + * Gets the Formats which are to be reported by a + * FormatControl as supported formats for a + * AbstractBufferStream at a specific zero-based index in the list + * of streams of this AbstractBufferCaptureDevice. + * + * @param streamIndex the zero-based index of the + * AbstractBufferStream for which the specified + * FormatControl is to report the list of supported + * Formats + * @return an array of Formats to be reported by a + * FormatControl as the supported formats for the + * AbstractBufferStream at the specified streamIndex in + * the list of streams of this AbstractBufferCaptureDevice + */ + protected abstract Format[] getSupportedFormats(int streamIndex); + + /** + * Gets the Format to be reported by the FormatControl of + * a PushBufferStream at a specific zero-based index in the list of + * streams of this PushBufferDataSource. The + * PushBufferStream may not exist at the time of requesting its + * Format. + * + * @param streamIndex the zero-based index of the PushBufferStream + * the Format of which is to be retrieved + * @param oldValue the last-known Format for the + * PushBufferStream at the specified streamIndex + * @return the Format to be reported by the FormatControl + * of the PushBufferStream at the specified streamIndex in + * the list of streams of this PushBufferDataSource. + */ + private Format internalGetFormat(int streamIndex, Format oldValue) + { + if (lock.tryLock()) + { + try + { + synchronized (getStreamSyncRoot()) + { + if (streams != null) + { + AbstractBufferStream stream = streams[streamIndex]; + + if (stream != null) + { + Format streamFormat = stream.internalGetFormat(); + + if (streamFormat != null) + return streamFormat; + } + } + } + } + finally + { + lock.unlock(); + } + } + else + { + /* + * XXX In order to prevent a deadlock, do not ask the streams about + * the format. + */ + } + return getFormat(streamIndex, oldValue); + } + + /** + * Gets an array of FormatControl instances each one of which can + * be used before {@link #connect()} to get and set the capture + * Format of each one of the capture streams. + * + * @return an array of FormatControl instances each one of which + * can be used before {@link #connect()} to get and set the capture + * Format of each one of the capture streams + */ + private FormatControl[] internalGetFormatControls() + { + synchronized (controlsSyncRoot) + { + if (formatControls == null) + formatControls = createFormatControls(); + return formatControls; + } + } + + /** + * Gets an array of FrameRateControl instances which can be used to + * get and/or set the output frame rate of this + * AbstractBufferCaptureDevice. + * + * @return an array of FrameRateControl instances which can be used + * to get and/or set the output frame rate of this + * AbstractBufferCaptureDevice. + */ + private FrameRateControl[] internalGetFrameRateControls() + { + synchronized (controlsSyncRoot) + { + if (frameRateControls == null) + { + FrameRateControl frameRateControl = createFrameRateControl(); + + // Don't try to create the FrameRateControl more than once. + frameRateControls + = (frameRateControl == null) + ? new FrameRateControl[0] + : new FrameRateControl[] { frameRateControl }; + } + return frameRateControls; + } + } + + /** + * Gets an array of RTPInfo instances of this + * AbstractBufferCaptureDevice. + * + * @return an array of RTPInfo instances of this + * AbstractBufferCaptureDevice. + */ + private RTPInfo[] internalGetRTPInfos() + { + synchronized (controlsSyncRoot) + { + if (rtpInfos == null) + { + RTPInfo rtpInfo = createRTPInfo(); + + // Don't try to create the RTPInfo more than once. + rtpInfos + = (rtpInfo == null) + ? new RTPInfo[0] + : new RTPInfo[] { rtpInfo }; + } + return rtpInfos; + } + } + + /** + * Gets the AbstractBufferStreams through which this + * AbstractBufferCaptureDevice gives access to its media data. + * + * @param the type of SourceStream which is to be + * the element type of the returned array + * @param clz the Class of SourceStream which is to be the + * element type of the returned array + * @return an array of the SourceStreams through which this + * AbstractBufferCaptureDevice gives access to its media data + */ + private + + SourceStreamT[] internalGetStreams(Class clz) + { + if (streams == null) + { + FormatControl[] formatControls = internalGetFormatControls(); + + if (formatControls != null) + { + int formatControlCount = formatControls.length; + + streams = new AbstractBufferStream[formatControlCount]; + for (int i = 0; i < formatControlCount; i++) + streams[i] = createStream(i, formatControls[i]); + + /* + * Start the streams if this DataSource has already been + * started. + */ + if (started) + { + for (AbstractBufferStream stream : streams) + { + try + { + stream.start(); + } + catch (IOException ioex) + { + throw new UndeclaredThrowableException(ioex); + } + } + } + } + } + + int streamCount = (streams == null) ? 0 : streams.length; + @SuppressWarnings("unchecked") + SourceStreamT[] clone + = (SourceStreamT[]) Array.newInstance(clz, streamCount); + + if (streamCount != 0) + System.arraycopy(streams, 0, clone, 0, streamCount); + return clone; + } + + /** + * Attempts to set the Format to be reported by the + * FormatControl of a PushBufferStream at a specific + * zero-based index in the list of streams of this + * PushBufferDataSource. + * + * @param streamIndex the zero-based index of the PushBufferStream + * the Format of which is to be set + * @param oldValue the last-known Format for the + * PushBufferStream at the specified streamIndex + * @param newValue the Format which is to be set + * @return the Format to be reported by the FormatControl + * of the PushBufferStream at the specified streamIndex + * in the list of streams of this PushBufferStream or null + * if the attempt to set the Format did not success and any + * last-known Format is to be left in effect + */ + private Format internalSetFormat( + int streamIndex, + Format oldValue, Format newValue) + { + lock(); + try + { + synchronized (getStreamSyncRoot()) + { + if (streams != null) + { + AbstractBufferStream stream = streams[streamIndex]; + + if (stream != null) + return stream.internalSetFormat(newValue); + } + } + } + finally + { + unlock(); + } + return setFormat(streamIndex, oldValue, newValue); + } + + private void lock() + { + lock.lock(); + } + + /** + * Attempts to set the Format to be reported by the + * FormatControl of a AbstractBufferStream at a specific + * zero-based index in the list of streams of this + * AbstractBufferCaptureDevice. The AbstractBufferStream + * does not exist at the time of the attempt to set its Format. + * Allows extenders to override the default behavior which is to not attempt + * to set the specified Format so that they can enable setting the + * Format prior to creating the AbstractBufferStream. If + * setting the Format of an existing AbstractBufferStream + * is desired, AbstractBufferStream#doSetFormat(Format) should be + * overridden instead. + * + * @param streamIndex the zero-based index of the + * AbstractBufferStream the Format of which is to be set + * @param oldValue the last-known Format for the + * AbstractBufferStream at the specified streamIndex + * @param newValue the Format which is to be set + * @return the Format to be reported by the FormatControl + * of the AbstractBufferStream at the specified + * streamIndex in the list of streams of this + * AbstractBufferStream or null if the attempt to set the + * Format did not success and any last-known Format is to + * be left in effect + */ + protected abstract Format setFormat( + int streamIndex, + Format oldValue, Format newValue); + + /** + * Starts the transfer of media data from this + * AbstractBufferCaptureDevice. + * + * @throws IOException if anything goes wrong while starting the transfer of + * media data from this AbstractBufferCaptureDevice + */ + public void start() + throws IOException + { + lock(); + try + { + if (!started) + { + if (!connected) + { + throw new IOException( + getClass().getName() + " not connected"); + } + + doStart(); + started = true; + } + } + finally + { + unlock(); + } + } + + /** + * Stops the transfer of media data from this + * AbstractBufferCaptureDevice. + * + * @throws IOException if anything goes wrong while stopping the transfer of + * media data from this AbstractBufferCaptureDevice + */ + public void stop() + throws IOException + { + lock(); + try + { + if (started) + { + doStop(); + started = false; + } + } + finally + { + unlock(); + } + } + + /** + * Gets the internal array of AbstractBufferStreams through which + * this AbstractBufferCaptureDevice gives access to its media data. + * + * @return the internal array of AbstractBufferStreams through + * which this AbstractBufferCaptureDevice gives access to its media + * data + */ + AbstractBufferStream[] streams() + { + return streams; + } + + private void unlock() + { + lock.unlock(); + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferStream.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferStream.java index 9bdc240ff..06ecdfa7a 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferStream.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractBufferStream.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,242 +13,242 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol; - -import java.io.*; - -import javax.media.*; -import javax.media.control.*; -import javax.media.protocol.*; - -import org.jitsi.impl.neomedia.control.*; -import org.jitsi.util.*; - -/** - * Provides a base implementation of SourceStream in order to - * facilitate implementers by taking care of boilerplate in the most common - * cases. - * - * @author Lyubomir Marinov - */ -abstract class AbstractBufferStream - extends AbstractControls - implements SourceStream -{ - /** - * The Logger used by the AbstractBufferStream class and - * its instances. - */ - private static final Logger logger - = Logger.getLogger(AbstractBufferStream.class); - - /** - * The (default) ContentDescriptor of the - * AbstractBufferStream instances. - */ - private static final ContentDescriptor CONTENT_DESCRIPTOR - = new ContentDescriptor(ContentDescriptor.RAW); - - /** - * The DataSource which has created this instance and which - * contains it as one of its streams. - */ - protected final T dataSource; - - /** - * The FormatControl which gives access to the Format of - * the media data provided by this SourceStream and which, - * optionally, allows setting it. - */ - protected final FormatControl formatControl; - - /** - * Initializes a new AbstractBufferStream instance which is to have - * its Format-related information abstracted by a specific - * FormatControl. - * - * @param dataSource the DataSource which is creating the new - * instance so that it becomes one of its streams - * @param formatControl the FormatControl which is to abstract the - * Format-related information of the new instance - */ - protected AbstractBufferStream(T dataSource, FormatControl formatControl) - { - this.dataSource = dataSource; - this.formatControl = formatControl; - } - - /** - * Releases the resources used by this instance throughout its existence and - * makes it available for garbage collection. This instance is considered - * unusable after closing. - *

- * Warning: The method is not invoked by the framework, extenders may - * choose to invoke it. - *

- */ - public void close() - { - try - { - stop(); - } - catch (IOException ioex) - { - logger.error("Failed to stop " + getClass().getSimpleName(), ioex); - } - } - - /** - * Gets the Format of this AbstractBufferStream as - * directly known by it. Allows extenders to override the Format - * known to the DataSource which created this instance and possibly - * provide more details on the currently set Format. - * - * @return the Format of this AbstractBufferStream as - * directly known by it or null if this - * AbstractBufferStream does not directly know its Format - * and it relies on the DataSource which created it to report its - * Format - */ - protected Format doGetFormat() - { - return null; - } - - /** - * Attempts to set the Format of this - * AbstractBufferStream. Allows extenders to enable setting the - * Format of an existing AbstractBufferStream (in contract - * to setting it before the AbstractBufferStream is created by the - * DataSource which will provide it). - * - * @param format the Format to be set as the format of this - * AbstractBufferStream - * @return the Format of this AbstractBufferStream or - * null if the attempt to set the Format did not succeed - * and any last-known Format is to be left in effect - */ - protected Format doSetFormat(Format format) - { - return null; - } - - /** - * Determines whether the end of this SourceStream has been - * reached. The AbstractBufferStream implementation always returns - * false. - * - * @return true if the end of this SourceStream has been - * reached; otherwise, false - */ - public boolean endOfStream() - { - return false; - } - - /** - * Gets a ContentDescriptor which describes the type of the content - * made available by this SourceStream. The - * AbstractBufferStream implementation always returns a - * ContentDescriptor with content type equal to - * ContentDescriptor#RAW. - * - * @return a ContentDescriptor which describes the type of the - * content made available by this SourceStream - */ - public ContentDescriptor getContentDescriptor() - { - return CONTENT_DESCRIPTOR; - } - - /** - * Gets the length in bytes of the content made available by this - * SourceStream. The AbstractBufferStream implementation - * always returns LENGTH_UNKNOWN. - * - * @return the length in bytes of the content made available by this - * SourceStream if it is known; otherwise, LENGTH_UKNOWN - */ - public long getContentLength() - { - return LENGTH_UNKNOWN; - } - - /** - * Implements {@link javax.media.protocol.Controls#getControls()}. Gets the - * controls available for this instance. - * - * @return an array of Objects which represent the controls - * available for this instance - */ - public Object[] getControls() - { - if (formatControl != null) - return new Object[] { formatControl }; - else - return ControlsAdapter.EMPTY_CONTROLS; - } - - /** - * Gets the Format of the media data made available by this - * AbstractBufferStream. - * - * @return the Format of the media data made available by this - * AbstractBufferStream - */ - public Format getFormat() - { - return (formatControl == null) ? null : formatControl.getFormat(); - } - - /** - * Gets the Format of this AbstractBufferStream as - * directly known by it. - * - * @return the Format of this AbstractBufferStream as - * directly known by it - */ - Format internalGetFormat() - { - return doGetFormat(); - } - - /** - * Attempts to set the Format of this - * AbstractBufferStream. - * - * @param format the Format to be set as the format of this - * AbstractBufferStream - * @return the Format of this AbstractBufferStream or - * null if the attempt to set the Format did not succeed - * and any last-known Format is to be left in effect - */ - Format internalSetFormat(Format format) - { - return doSetFormat(format); - } - - /** - * Starts the transfer of media data from this - * AbstractBufferStream. - * - * @throws IOException if anything goes wrong while starting the transfer of - * media data from this AbstractBufferStream - */ - public void start() - throws IOException - { - } - - /** - * Stops the transfer of media data from this AbstractBufferStream. - * - * @throws IOException if anything goes wrong while stopping the transfer of - * media data from this AbstractBufferStream - */ - public void stop() - throws IOException - { - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol; + +import java.io.*; + +import javax.media.*; +import javax.media.control.*; +import javax.media.protocol.*; + +import org.jitsi.impl.neomedia.control.*; +import org.jitsi.util.*; + +/** + * Provides a base implementation of SourceStream in order to + * facilitate implementers by taking care of boilerplate in the most common + * cases. + * + * @author Lyubomir Marinov + */ +abstract class AbstractBufferStream + extends AbstractControls + implements SourceStream +{ + /** + * The Logger used by the AbstractBufferStream class and + * its instances. + */ + private static final Logger logger + = Logger.getLogger(AbstractBufferStream.class); + + /** + * The (default) ContentDescriptor of the + * AbstractBufferStream instances. + */ + private static final ContentDescriptor CONTENT_DESCRIPTOR + = new ContentDescriptor(ContentDescriptor.RAW); + + /** + * The DataSource which has created this instance and which + * contains it as one of its streams. + */ + protected final T dataSource; + + /** + * The FormatControl which gives access to the Format of + * the media data provided by this SourceStream and which, + * optionally, allows setting it. + */ + protected final FormatControl formatControl; + + /** + * Initializes a new AbstractBufferStream instance which is to have + * its Format-related information abstracted by a specific + * FormatControl. + * + * @param dataSource the DataSource which is creating the new + * instance so that it becomes one of its streams + * @param formatControl the FormatControl which is to abstract the + * Format-related information of the new instance + */ + protected AbstractBufferStream(T dataSource, FormatControl formatControl) + { + this.dataSource = dataSource; + this.formatControl = formatControl; + } + + /** + * Releases the resources used by this instance throughout its existence and + * makes it available for garbage collection. This instance is considered + * unusable after closing. + *

+ * Warning: The method is not invoked by the framework, extenders may + * choose to invoke it. + *

+ */ + public void close() + { + try + { + stop(); + } + catch (IOException ioex) + { + logger.error("Failed to stop " + getClass().getSimpleName(), ioex); + } + } + + /** + * Gets the Format of this AbstractBufferStream as + * directly known by it. Allows extenders to override the Format + * known to the DataSource which created this instance and possibly + * provide more details on the currently set Format. + * + * @return the Format of this AbstractBufferStream as + * directly known by it or null if this + * AbstractBufferStream does not directly know its Format + * and it relies on the DataSource which created it to report its + * Format + */ + protected Format doGetFormat() + { + return null; + } + + /** + * Attempts to set the Format of this + * AbstractBufferStream. Allows extenders to enable setting the + * Format of an existing AbstractBufferStream (in contract + * to setting it before the AbstractBufferStream is created by the + * DataSource which will provide it). + * + * @param format the Format to be set as the format of this + * AbstractBufferStream + * @return the Format of this AbstractBufferStream or + * null if the attempt to set the Format did not succeed + * and any last-known Format is to be left in effect + */ + protected Format doSetFormat(Format format) + { + return null; + } + + /** + * Determines whether the end of this SourceStream has been + * reached. The AbstractBufferStream implementation always returns + * false. + * + * @return true if the end of this SourceStream has been + * reached; otherwise, false + */ + public boolean endOfStream() + { + return false; + } + + /** + * Gets a ContentDescriptor which describes the type of the content + * made available by this SourceStream. The + * AbstractBufferStream implementation always returns a + * ContentDescriptor with content type equal to + * ContentDescriptor#RAW. + * + * @return a ContentDescriptor which describes the type of the + * content made available by this SourceStream + */ + public ContentDescriptor getContentDescriptor() + { + return CONTENT_DESCRIPTOR; + } + + /** + * Gets the length in bytes of the content made available by this + * SourceStream. The AbstractBufferStream implementation + * always returns LENGTH_UNKNOWN. + * + * @return the length in bytes of the content made available by this + * SourceStream if it is known; otherwise, LENGTH_UKNOWN + */ + public long getContentLength() + { + return LENGTH_UNKNOWN; + } + + /** + * Implements {@link javax.media.protocol.Controls#getControls()}. Gets the + * controls available for this instance. + * + * @return an array of Objects which represent the controls + * available for this instance + */ + public Object[] getControls() + { + if (formatControl != null) + return new Object[] { formatControl }; + else + return ControlsAdapter.EMPTY_CONTROLS; + } + + /** + * Gets the Format of the media data made available by this + * AbstractBufferStream. + * + * @return the Format of the media data made available by this + * AbstractBufferStream + */ + public Format getFormat() + { + return (formatControl == null) ? null : formatControl.getFormat(); + } + + /** + * Gets the Format of this AbstractBufferStream as + * directly known by it. + * + * @return the Format of this AbstractBufferStream as + * directly known by it + */ + Format internalGetFormat() + { + return doGetFormat(); + } + + /** + * Attempts to set the Format of this + * AbstractBufferStream. + * + * @param format the Format to be set as the format of this + * AbstractBufferStream + * @return the Format of this AbstractBufferStream or + * null if the attempt to set the Format did not succeed + * and any last-known Format is to be left in effect + */ + Format internalSetFormat(Format format) + { + return doSetFormat(format); + } + + /** + * Starts the transfer of media data from this + * AbstractBufferStream. + * + * @throws IOException if anything goes wrong while starting the transfer of + * media data from this AbstractBufferStream + */ + public void start() + throws IOException + { + } + + /** + * Stops the transfer of media data from this AbstractBufferStream. + * + * @throws IOException if anything goes wrong while stopping the transfer of + * media data from this AbstractBufferStream + */ + public void stop() + throws IOException + { + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferCaptureDevice.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferCaptureDevice.java index 56c43d58a..5b94c1f5a 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferCaptureDevice.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferCaptureDevice.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,76 +13,76 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol; - -import javax.media.*; -import javax.media.control.*; - -import org.jitsi.impl.neomedia.control.*; - -/** - * Provides a base implementation of PullBufferDataSource and - * CaptureDevice for the purposes of video in order to facilitate - * implementers by taking care of boilerplate in the most common cases. - * - * @author Lyubomir Marinov - */ -public abstract class AbstractVideoPullBufferCaptureDevice - extends AbstractPullBufferCaptureDevice -{ - - /** - * Initializes a new AbstractVideoPullBufferCaptureDevice instance. - */ - protected AbstractVideoPullBufferCaptureDevice() - { - } - - /** - * Initializes a new AbstractVideoPullBufferCaptureDevice instance - * from a specific MediaLocator. - * - * @param locator the MediaLocator to create the new instance from - */ - protected AbstractVideoPullBufferCaptureDevice(MediaLocator locator) - { - super(locator); - } - - /** - * Creates a new FrameRateControl instance which is to allow the - * getting and setting of the frame rate of this - * AbstractVideoPullBufferCaptureDevice. - * - * @return a new FrameRateControl instance which is to allow the - * getting and setting of the frame rate of this - * AbstractVideoPullBufferCaptureDevice - * @see AbstractPullBufferCaptureDevice#createFrameRateControl() - */ - @Override - protected FrameRateControl createFrameRateControl() - { - return - new FrameRateControlAdapter() - { - /** - * The output frame rate of this - * AbstractVideoPullBufferCaptureDevice. - */ - private float frameRate = -1; - - @Override - public float getFrameRate() - { - return frameRate; - } - - @Override - public float setFrameRate(float frameRate) - { - this.frameRate = frameRate; - return this.frameRate; - } - }; - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol; + +import javax.media.*; +import javax.media.control.*; + +import org.jitsi.impl.neomedia.control.*; + +/** + * Provides a base implementation of PullBufferDataSource and + * CaptureDevice for the purposes of video in order to facilitate + * implementers by taking care of boilerplate in the most common cases. + * + * @author Lyubomir Marinov + */ +public abstract class AbstractVideoPullBufferCaptureDevice + extends AbstractPullBufferCaptureDevice +{ + + /** + * Initializes a new AbstractVideoPullBufferCaptureDevice instance. + */ + protected AbstractVideoPullBufferCaptureDevice() + { + } + + /** + * Initializes a new AbstractVideoPullBufferCaptureDevice instance + * from a specific MediaLocator. + * + * @param locator the MediaLocator to create the new instance from + */ + protected AbstractVideoPullBufferCaptureDevice(MediaLocator locator) + { + super(locator); + } + + /** + * Creates a new FrameRateControl instance which is to allow the + * getting and setting of the frame rate of this + * AbstractVideoPullBufferCaptureDevice. + * + * @return a new FrameRateControl instance which is to allow the + * getting and setting of the frame rate of this + * AbstractVideoPullBufferCaptureDevice + * @see AbstractPullBufferCaptureDevice#createFrameRateControl() + */ + @Override + protected FrameRateControl createFrameRateControl() + { + return + new FrameRateControlAdapter() + { + /** + * The output frame rate of this + * AbstractVideoPullBufferCaptureDevice. + */ + private float frameRate = -1; + + @Override + public float getFrameRate() + { + return frameRate; + } + + @Override + public float setFrameRate(float frameRate) + { + this.frameRate = frameRate; + return this.frameRate; + } + }; + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferStream.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferStream.java index dfe684a94..204febdc8 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferStream.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/AbstractVideoPullBufferStream.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,180 +13,180 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol; - -import java.io.*; - -import javax.media.*; -import javax.media.control.*; -import javax.media.protocol.*; - -/** - * Provides a base implementation of PullBufferStream for video in - * order to facilitate implementers by taking care of boilerplate in the most - * common cases. - * - * @author Lyubomir Marinov - */ -public abstract class AbstractVideoPullBufferStream - - extends AbstractPullBufferStream -{ - - /** - * The output frame rate of this AbstractVideoPullBufferStream - * which has been specified by {@link #frameRateControl} and depending on - * which {@link #minimumVideoFrameInterval} has been calculated. - */ - private float frameRate; - - /** - * The FrameRateControl which gets and sets the output frame rate - * of this AbstractVideoPullBufferStream. - */ - private FrameRateControl frameRateControl; - - /** - * The minimum interval in milliseconds between consecutive video frames - * i.e. the reverse of {@link #frameRate}. - */ - private long minimumVideoFrameInterval; - - /** - * Initializes a new AbstractVideoPullBufferStream instance which - * is to have its Format-related information abstracted by a - * specific FormatControl. - * - * @param dataSource the PullBufferDataSource which is creating the - * new instance so that it becomes one of its streams - * @param formatControl the FormatControl which is to abstract the - * Format-related information of the new instance - */ - protected AbstractVideoPullBufferStream( - T dataSource, - FormatControl formatControl) - { - super(dataSource, formatControl); - } - - /** - * Blocks and reads into a Buffer from this - * PullBufferStream. - * - * @param buffer the Buffer this PullBufferStream is to - * read into - * @throws IOException if an I/O error occurs while this - * PullBufferStream reads into the specified Buffer - */ - protected abstract void doRead(Buffer buffer) - throws IOException; - - /** - * Blocks and reads into a Buffer from this - * PullBufferStream. - * - * @param buffer the Buffer this PullBufferStream is to - * read into - * @throws IOException if an I/O error occurs while this - * PullBufferStream reads into the specified Buffer - */ - public void read(Buffer buffer) - throws IOException - { - FrameRateControl frameRateControl = this.frameRateControl; - - if (frameRateControl != null) - { - float frameRate = frameRateControl.getFrameRate(); - - if (frameRate > 0) - { - if (this.frameRate != frameRate) - { - minimumVideoFrameInterval = (long) (1000 / frameRate); - this.frameRate = frameRate; - } - if (minimumVideoFrameInterval > 0) - { - long startTime = System.currentTimeMillis(); - - doRead(buffer); - - if (!buffer.isDiscard()) - { - boolean interrupted = false; - - while (true) - { - // Sleep to respect the frame rate as much as possible. - long sleep - = minimumVideoFrameInterval - - (System.currentTimeMillis() - startTime); - - if (sleep > 0) - { - try - { - Thread.sleep(sleep); - } - catch (InterruptedException ie) - { - interrupted = true; - } - } - else - { - // Yield a little bit to not use all the whole CPU. - Thread.yield(); - break; - } - } - if (interrupted) - Thread.currentThread().interrupt(); - } - - // We've executed #doRead(Buffer). - return; - } - } - } - - // If there is no frame rate to be respected, just #doRead(Buffer). - doRead(buffer); - } - - /** - * Starts the transfer of media data from this - * AbstractBufferStream. - * - * @throws IOException if anything goes wrong while starting the transfer of - * media data from this AbstractBufferStream - * @see AbstractBufferStream#start() - */ - @Override - public void start() - throws IOException - { - super.start(); - - frameRateControl - = (FrameRateControl) - dataSource.getControl(FrameRateControl.class.getName()); - } - - /** - * Stops the transfer of media data from this AbstractBufferStream. - * - * @throws IOException if anything goes wrong while stopping the transfer of - * media data from this AbstractBufferStream - * @see AbstractBufferStream#stop() - */ - @Override - public void stop() - throws IOException - { - super.stop(); - - frameRateControl = null; - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol; + +import java.io.*; + +import javax.media.*; +import javax.media.control.*; +import javax.media.protocol.*; + +/** + * Provides a base implementation of PullBufferStream for video in + * order to facilitate implementers by taking care of boilerplate in the most + * common cases. + * + * @author Lyubomir Marinov + */ +public abstract class AbstractVideoPullBufferStream + + extends AbstractPullBufferStream +{ + + /** + * The output frame rate of this AbstractVideoPullBufferStream + * which has been specified by {@link #frameRateControl} and depending on + * which {@link #minimumVideoFrameInterval} has been calculated. + */ + private float frameRate; + + /** + * The FrameRateControl which gets and sets the output frame rate + * of this AbstractVideoPullBufferStream. + */ + private FrameRateControl frameRateControl; + + /** + * The minimum interval in milliseconds between consecutive video frames + * i.e. the reverse of {@link #frameRate}. + */ + private long minimumVideoFrameInterval; + + /** + * Initializes a new AbstractVideoPullBufferStream instance which + * is to have its Format-related information abstracted by a + * specific FormatControl. + * + * @param dataSource the PullBufferDataSource which is creating the + * new instance so that it becomes one of its streams + * @param formatControl the FormatControl which is to abstract the + * Format-related information of the new instance + */ + protected AbstractVideoPullBufferStream( + T dataSource, + FormatControl formatControl) + { + super(dataSource, formatControl); + } + + /** + * Blocks and reads into a Buffer from this + * PullBufferStream. + * + * @param buffer the Buffer this PullBufferStream is to + * read into + * @throws IOException if an I/O error occurs while this + * PullBufferStream reads into the specified Buffer + */ + protected abstract void doRead(Buffer buffer) + throws IOException; + + /** + * Blocks and reads into a Buffer from this + * PullBufferStream. + * + * @param buffer the Buffer this PullBufferStream is to + * read into + * @throws IOException if an I/O error occurs while this + * PullBufferStream reads into the specified Buffer + */ + public void read(Buffer buffer) + throws IOException + { + FrameRateControl frameRateControl = this.frameRateControl; + + if (frameRateControl != null) + { + float frameRate = frameRateControl.getFrameRate(); + + if (frameRate > 0) + { + if (this.frameRate != frameRate) + { + minimumVideoFrameInterval = (long) (1000 / frameRate); + this.frameRate = frameRate; + } + if (minimumVideoFrameInterval > 0) + { + long startTime = System.currentTimeMillis(); + + doRead(buffer); + + if (!buffer.isDiscard()) + { + boolean interrupted = false; + + while (true) + { + // Sleep to respect the frame rate as much as possible. + long sleep + = minimumVideoFrameInterval + - (System.currentTimeMillis() - startTime); + + if (sleep > 0) + { + try + { + Thread.sleep(sleep); + } + catch (InterruptedException ie) + { + interrupted = true; + } + } + else + { + // Yield a little bit to not use all the whole CPU. + Thread.yield(); + break; + } + } + if (interrupted) + Thread.currentThread().interrupt(); + } + + // We've executed #doRead(Buffer). + return; + } + } + } + + // If there is no frame rate to be respected, just #doRead(Buffer). + doRead(buffer); + } + + /** + * Starts the transfer of media data from this + * AbstractBufferStream. + * + * @throws IOException if anything goes wrong while starting the transfer of + * media data from this AbstractBufferStream + * @see AbstractBufferStream#start() + */ + @Override + public void start() + throws IOException + { + super.start(); + + frameRateControl + = (FrameRateControl) + dataSource.getControl(FrameRateControl.class.getName()); + } + + /** + * Stops the transfer of media data from this AbstractBufferStream. + * + * @throws IOException if anything goes wrong while stopping the transfer of + * media data from this AbstractBufferStream + * @see AbstractBufferStream#stop() + */ + @Override + public void stop() + throws IOException + { + super.stop(); + + frameRateControl = null; + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DataSource.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DataSource.java index 1b59b8b8d..2c7c5bbfc 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DataSource.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DataSource.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,429 +13,429 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol.directshow; - -import java.awt.*; -import java.io.*; -import java.util.*; -import java.util.List; - -import javax.media.*; -import javax.media.control.*; - -import org.jitsi.impl.neomedia.codec.*; -import org.jitsi.impl.neomedia.codec.video.*; -import org.jitsi.impl.neomedia.control.*; -import org.jitsi.impl.neomedia.device.*; -import org.jitsi.impl.neomedia.jmfext.media.protocol.*; -import org.jitsi.util.*; - -/** - * Implements a CaptureDevice and a DataSource using - * DirectShow. - * - * @author Lyubomir Marinov - * @author Sebastien Vincent - */ -public class DataSource - extends AbstractVideoPushBufferCaptureDevice -{ - /** - * The map of DirectShow pixel formats to FFmpeg pixel formats which allows - * converting between the two. - */ - private static final int[] DS_TO_FFMPEG_PIX_FMTS - = new int[] - { - DSFormat.RGB24, - FFmpeg.PIX_FMT_RGB24, - DSFormat.RGB32, - FFmpeg.PIX_FMT_RGB32, - DSFormat.ARGB32, - FFmpeg.PIX_FMT_ARGB, - DSFormat.YUY2, - FFmpeg.PIX_FMT_YUYV422, - DSFormat.MJPG, - FFmpeg.PIX_FMT_YUVJ422P, - DSFormat.UYVY, - FFmpeg.PIX_FMT_UYVY422, - DSFormat.Y411, - FFmpeg.PIX_FMT_UYYVYY411, - DSFormat.Y41P, - FFmpeg.PIX_FMT_YUV411P, - DSFormat.NV12, - FFmpeg.PIX_FMT_NV12, - DSFormat.I420, - FFmpeg.PIX_FMT_YUV420P - }; - - /** - * The Logger used by the DataSource class and its - * instances for logging output. - */ - private static final Logger logger = Logger.getLogger(DataSource.class); - - /** - * Gets the FFmpeg pixel format matching a specific DirectShow - * Specification pixel format. - * - * @param ffmpegPixFmt FFmpeg format - * @return the DirectShow pixel format matching the specified FFmpeg format - */ - public static int getDSPixFmt(int ffmpegPixFmt) - { - for (int i = 0; i < DS_TO_FFMPEG_PIX_FMTS.length; i += 2) - if (DS_TO_FFMPEG_PIX_FMTS[i + 1] == ffmpegPixFmt) - return DS_TO_FFMPEG_PIX_FMTS[i]; - return -1; - } - - /** - * Gets the DirectShow pixel format matching a specific FFmpeg pixel - * format. - * - * @param dsPixFmt the DirectShow pixel format to get the matching - * FFmpeg pixel format of - * @return the FFmpeg pixel format matching the specified DirectShow pixel - */ - public static int getFFmpegPixFmt(int dsPixFmt) - { - for (int i = 0; i < DS_TO_FFMPEG_PIX_FMTS.length; i += 2) - if (DS_TO_FFMPEG_PIX_FMTS[i] == dsPixFmt) - return DS_TO_FFMPEG_PIX_FMTS[i + 1]; - return FFmpeg.PIX_FMT_NONE; - } - - /** - * DirectShow capture device. - */ - private DSCaptureDevice device; - - /** - * DirectShow manager. - */ - private DSManager manager; - - /** - * Constructor. - */ - public DataSource() - { - this(null); - } - - /** - * Initializes a new DataSource instance from a specific - * MediaLocator. - * - * @param locator the MediaLocator to create the new instance from - */ - public DataSource(MediaLocator locator) - { - super(locator); - } - - /** - * Creates a new FrameRateControl instance which is to allow the - * getting and setting of the frame rate of this - * AbstractVideoPushBufferCaptureDevice. - * - * @return a new FrameRateControl instance which is to allow the - * getting and setting of the frame rate of this - * AbstractVideoPushBufferCaptureDevice - * @see AbstractPushBufferCaptureDevice#createFrameRateControl() - */ - @Override - protected FrameRateControl createFrameRateControl() - { - return - new FrameRateControlAdapter() - { - /** - * The output frame rate of this - * AbstractVideoPullBufferCaptureDevice. - */ - private float frameRate = -1; - - @Override - public float getFrameRate() - { - return frameRate; - } - - @Override - public float setFrameRate(float frameRate) - { - this.frameRate = frameRate; - return this.frameRate; - } - }; - } - - /** - * Create a new PushBufferStream which is to be at a specific - * zero-based index in the list of streams of this - * PushBufferDataSource. The Format-related information of - * the new instance is to be abstracted by a specific - * FormatControl. - * - * @param streamIndex the zero-based index of the PushBufferStream - * in the list of streams of this PushBufferDataSource - * @param formatControl the FormatControl which is to abstract the - * Format-related information of the new instance - * @return a new PushBufferStream which is to be at the specified - * streamIndex in the list of streams of this - * PushBufferDataSource and which has its Format-related - * information abstracted by the specified formatControl - * @see AbstractPushBufferCaptureDevice#createStream(int, FormatControl) - */ - @Override - protected DirectShowStream createStream( - int streamIndex, - FormatControl formatControl) - { - DirectShowStream stream = new DirectShowStream(this, formatControl); - - if (logger.isTraceEnabled()) - { - DSCaptureDevice device = this.device; - - if (device != null) - { - DSFormat supportedFormats[] = device.getSupportedFormats(); - - for (DSFormat supportedFormat : supportedFormats) - { - logger.trace( - "width= " + supportedFormat.getWidth() - + ", height= " + supportedFormat.getHeight() - + ", pixelFormat= " - + supportedFormat.getPixelFormat()); - } - } - } - - return stream; - } - - /** - * Opens a connection to the media source specified by the - * MediaLocator of this DataSource. - * - * @throws IOException if anything goes wrong while opening the connection - * to the media source specified by the MediaLocator of this - * DataSource - * @see AbstractPushBufferCaptureDevice#doConnect() - */ - @Override - protected void doConnect() - throws IOException - { - super.doConnect(); - - boolean connected = false; - - try - { - DSCaptureDevice device = getDevice(); - - device.connect(); - - synchronized (getStreamSyncRoot()) - { - for (Object stream : getStreams()) - ((DirectShowStream) stream).setDevice(device); - } - - connected = true; - } - finally - { - if (!connected) - { - /* - * The connect attempt has failed but it may have been - * successful up to the point of failure thus partially - * modifying the state. The disconnect procedure is prepared to - * deal with a partially modified state and will restore it to - * its pristine form. - */ - doDisconnect(); - } - } - } - - /** - * Closes the connection to the media source specified by the - * MediaLocator of this DataSource. - * - * @see AbstractPushBufferCaptureDevice#doDisconnect() - */ - @Override - protected void doDisconnect() - { - try - { - synchronized (getStreamSyncRoot()) - { - for (Object stream : getStreams()) - { - try - { - ((DirectShowStream) stream).setDevice(null); - } - catch (IOException ioe) - { - logger.error( - "Failed to disconnect " - + stream.getClass().getName(), - ioe); - } - } - } - } - finally - { - if (device != null) - { - device.disconnect(); - device = null; - } - if (manager != null) - { - manager.dispose(); - manager = null; - } - - super.doDisconnect(); - } - } - - private DSCaptureDevice getDevice() - { - DSCaptureDevice device = this.device; - - if (device == null) - { - MediaLocator locator = getLocator(); - - if (locator == null) - throw new IllegalStateException("locator"); - if (!locator.getProtocol().equalsIgnoreCase( - DeviceSystem.LOCATOR_PROTOCOL_DIRECTSHOW)) - throw new IllegalStateException("locator.protocol"); - - String remainder = locator.getRemainder(); - - if (remainder == null) - throw new IllegalStateException("locator.remainder"); - - if (manager == null) - manager = new DSManager(); - try - { - /* - * Find the device specified by the locator using matching by - * name. - */ - for (DSCaptureDevice d : manager.getCaptureDevices()) - { - if (remainder.equals(d.getName())) - { - device = d; - break; - } - } - - if (device != null) - this.device = device; - } - finally - { - if (this.device == null) - { - manager.dispose(); - manager = null; - } - } - } - - return device; - } - - /** - * Gets the Formats which are to be reported by a - * FormatControl as supported formats for a - * PushBufferStream at a specific zero-based index in the list of - * streams of this PushBufferDataSource. - * - * @param streamIndex the zero-based index of the PushBufferStream - * for which the specified FormatControl is to report the list of - * supported Formats - * @return an array of Formats to be reported by a - * FormatControl as the supported formats for the - * PushBufferStream at the specified streamIndex in the - * list of streams of this PushBufferDataSource - * @see AbstractPushBufferCaptureDevice#getSupportedFormats(int) - */ - @Override - protected Format[] getSupportedFormats(int streamIndex) - { - DSCaptureDevice device = this.device; - - if (device == null) - return super.getSupportedFormats(streamIndex); - - DSFormat[] deviceFmts = device.getSupportedFormats(); - List fmts = new ArrayList(deviceFmts.length); - - for (DSFormat deviceFmt : deviceFmts) - { - Dimension size - = new Dimension(deviceFmt.getWidth(), deviceFmt.getHeight()); - int devicePixFmt = deviceFmt.getPixelFormat(); - int pixFmt = getFFmpegPixFmt(devicePixFmt); - - if (pixFmt != FFmpeg.PIX_FMT_NONE) - { - fmts.add( - new AVFrameFormat( - size, - Format.NOT_SPECIFIED, - pixFmt, devicePixFmt)); - } - } - return fmts.toArray(new Format[fmts.size()]); - } - - /** - * Attempts to set the Format to be reported by the - * FormatControl of a PushBufferStream at a specific - * zero-based index in the list of streams of this - * PushBufferDataSource. The PushBufferStream does not - * exist at the time of the attempt to set its Format. - * - * @param streamIndex the zero-based index of the PushBufferStream - * the Format of which is to be set - * @param oldValue the last-known Format for the - * PushBufferStream at the specified streamIndex - * @param newValue the Format which is to be set - * @return the Format to be reported by the FormatControl - * of the PushBufferStream at the specified streamIndex - * in the list of streams of this PushBufferStream or null - * if the attempt to set the Format did not success and any - * last-known Format is to be left in effect - * @see AbstractPushBufferCaptureDevice#setFormat(int, Format, Format) - */ - @Override - protected Format setFormat( - int streamIndex, - Format oldValue, Format newValue) - { - // This DataSource supports setFormat. - return - DirectShowStream.isSupportedFormat(newValue) - ? newValue - : super.setFormat(streamIndex, oldValue, newValue); - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol.directshow; + +import java.awt.*; +import java.io.*; +import java.util.*; +import java.util.List; + +import javax.media.*; +import javax.media.control.*; + +import org.jitsi.impl.neomedia.codec.*; +import org.jitsi.impl.neomedia.codec.video.*; +import org.jitsi.impl.neomedia.control.*; +import org.jitsi.impl.neomedia.device.*; +import org.jitsi.impl.neomedia.jmfext.media.protocol.*; +import org.jitsi.util.*; + +/** + * Implements a CaptureDevice and a DataSource using + * DirectShow. + * + * @author Lyubomir Marinov + * @author Sebastien Vincent + */ +public class DataSource + extends AbstractVideoPushBufferCaptureDevice +{ + /** + * The map of DirectShow pixel formats to FFmpeg pixel formats which allows + * converting between the two. + */ + private static final int[] DS_TO_FFMPEG_PIX_FMTS + = new int[] + { + DSFormat.RGB24, + FFmpeg.PIX_FMT_RGB24, + DSFormat.RGB32, + FFmpeg.PIX_FMT_RGB32, + DSFormat.ARGB32, + FFmpeg.PIX_FMT_ARGB, + DSFormat.YUY2, + FFmpeg.PIX_FMT_YUYV422, + DSFormat.MJPG, + FFmpeg.PIX_FMT_YUVJ422P, + DSFormat.UYVY, + FFmpeg.PIX_FMT_UYVY422, + DSFormat.Y411, + FFmpeg.PIX_FMT_UYYVYY411, + DSFormat.Y41P, + FFmpeg.PIX_FMT_YUV411P, + DSFormat.NV12, + FFmpeg.PIX_FMT_NV12, + DSFormat.I420, + FFmpeg.PIX_FMT_YUV420P + }; + + /** + * The Logger used by the DataSource class and its + * instances for logging output. + */ + private static final Logger logger = Logger.getLogger(DataSource.class); + + /** + * Gets the FFmpeg pixel format matching a specific DirectShow + * Specification pixel format. + * + * @param ffmpegPixFmt FFmpeg format + * @return the DirectShow pixel format matching the specified FFmpeg format + */ + public static int getDSPixFmt(int ffmpegPixFmt) + { + for (int i = 0; i < DS_TO_FFMPEG_PIX_FMTS.length; i += 2) + if (DS_TO_FFMPEG_PIX_FMTS[i + 1] == ffmpegPixFmt) + return DS_TO_FFMPEG_PIX_FMTS[i]; + return -1; + } + + /** + * Gets the DirectShow pixel format matching a specific FFmpeg pixel + * format. + * + * @param dsPixFmt the DirectShow pixel format to get the matching + * FFmpeg pixel format of + * @return the FFmpeg pixel format matching the specified DirectShow pixel + */ + public static int getFFmpegPixFmt(int dsPixFmt) + { + for (int i = 0; i < DS_TO_FFMPEG_PIX_FMTS.length; i += 2) + if (DS_TO_FFMPEG_PIX_FMTS[i] == dsPixFmt) + return DS_TO_FFMPEG_PIX_FMTS[i + 1]; + return FFmpeg.PIX_FMT_NONE; + } + + /** + * DirectShow capture device. + */ + private DSCaptureDevice device; + + /** + * DirectShow manager. + */ + private DSManager manager; + + /** + * Constructor. + */ + public DataSource() + { + this(null); + } + + /** + * Initializes a new DataSource instance from a specific + * MediaLocator. + * + * @param locator the MediaLocator to create the new instance from + */ + public DataSource(MediaLocator locator) + { + super(locator); + } + + /** + * Creates a new FrameRateControl instance which is to allow the + * getting and setting of the frame rate of this + * AbstractVideoPushBufferCaptureDevice. + * + * @return a new FrameRateControl instance which is to allow the + * getting and setting of the frame rate of this + * AbstractVideoPushBufferCaptureDevice + * @see AbstractPushBufferCaptureDevice#createFrameRateControl() + */ + @Override + protected FrameRateControl createFrameRateControl() + { + return + new FrameRateControlAdapter() + { + /** + * The output frame rate of this + * AbstractVideoPullBufferCaptureDevice. + */ + private float frameRate = -1; + + @Override + public float getFrameRate() + { + return frameRate; + } + + @Override + public float setFrameRate(float frameRate) + { + this.frameRate = frameRate; + return this.frameRate; + } + }; + } + + /** + * Create a new PushBufferStream which is to be at a specific + * zero-based index in the list of streams of this + * PushBufferDataSource. The Format-related information of + * the new instance is to be abstracted by a specific + * FormatControl. + * + * @param streamIndex the zero-based index of the PushBufferStream + * in the list of streams of this PushBufferDataSource + * @param formatControl the FormatControl which is to abstract the + * Format-related information of the new instance + * @return a new PushBufferStream which is to be at the specified + * streamIndex in the list of streams of this + * PushBufferDataSource and which has its Format-related + * information abstracted by the specified formatControl + * @see AbstractPushBufferCaptureDevice#createStream(int, FormatControl) + */ + @Override + protected DirectShowStream createStream( + int streamIndex, + FormatControl formatControl) + { + DirectShowStream stream = new DirectShowStream(this, formatControl); + + if (logger.isTraceEnabled()) + { + DSCaptureDevice device = this.device; + + if (device != null) + { + DSFormat supportedFormats[] = device.getSupportedFormats(); + + for (DSFormat supportedFormat : supportedFormats) + { + logger.trace( + "width= " + supportedFormat.getWidth() + + ", height= " + supportedFormat.getHeight() + + ", pixelFormat= " + + supportedFormat.getPixelFormat()); + } + } + } + + return stream; + } + + /** + * Opens a connection to the media source specified by the + * MediaLocator of this DataSource. + * + * @throws IOException if anything goes wrong while opening the connection + * to the media source specified by the MediaLocator of this + * DataSource + * @see AbstractPushBufferCaptureDevice#doConnect() + */ + @Override + protected void doConnect() + throws IOException + { + super.doConnect(); + + boolean connected = false; + + try + { + DSCaptureDevice device = getDevice(); + + device.connect(); + + synchronized (getStreamSyncRoot()) + { + for (Object stream : getStreams()) + ((DirectShowStream) stream).setDevice(device); + } + + connected = true; + } + finally + { + if (!connected) + { + /* + * The connect attempt has failed but it may have been + * successful up to the point of failure thus partially + * modifying the state. The disconnect procedure is prepared to + * deal with a partially modified state and will restore it to + * its pristine form. + */ + doDisconnect(); + } + } + } + + /** + * Closes the connection to the media source specified by the + * MediaLocator of this DataSource. + * + * @see AbstractPushBufferCaptureDevice#doDisconnect() + */ + @Override + protected void doDisconnect() + { + try + { + synchronized (getStreamSyncRoot()) + { + for (Object stream : getStreams()) + { + try + { + ((DirectShowStream) stream).setDevice(null); + } + catch (IOException ioe) + { + logger.error( + "Failed to disconnect " + + stream.getClass().getName(), + ioe); + } + } + } + } + finally + { + if (device != null) + { + device.disconnect(); + device = null; + } + if (manager != null) + { + manager.dispose(); + manager = null; + } + + super.doDisconnect(); + } + } + + private DSCaptureDevice getDevice() + { + DSCaptureDevice device = this.device; + + if (device == null) + { + MediaLocator locator = getLocator(); + + if (locator == null) + throw new IllegalStateException("locator"); + if (!locator.getProtocol().equalsIgnoreCase( + DeviceSystem.LOCATOR_PROTOCOL_DIRECTSHOW)) + throw new IllegalStateException("locator.protocol"); + + String remainder = locator.getRemainder(); + + if (remainder == null) + throw new IllegalStateException("locator.remainder"); + + if (manager == null) + manager = new DSManager(); + try + { + /* + * Find the device specified by the locator using matching by + * name. + */ + for (DSCaptureDevice d : manager.getCaptureDevices()) + { + if (remainder.equals(d.getName())) + { + device = d; + break; + } + } + + if (device != null) + this.device = device; + } + finally + { + if (this.device == null) + { + manager.dispose(); + manager = null; + } + } + } + + return device; + } + + /** + * Gets the Formats which are to be reported by a + * FormatControl as supported formats for a + * PushBufferStream at a specific zero-based index in the list of + * streams of this PushBufferDataSource. + * + * @param streamIndex the zero-based index of the PushBufferStream + * for which the specified FormatControl is to report the list of + * supported Formats + * @return an array of Formats to be reported by a + * FormatControl as the supported formats for the + * PushBufferStream at the specified streamIndex in the + * list of streams of this PushBufferDataSource + * @see AbstractPushBufferCaptureDevice#getSupportedFormats(int) + */ + @Override + protected Format[] getSupportedFormats(int streamIndex) + { + DSCaptureDevice device = this.device; + + if (device == null) + return super.getSupportedFormats(streamIndex); + + DSFormat[] deviceFmts = device.getSupportedFormats(); + List fmts = new ArrayList(deviceFmts.length); + + for (DSFormat deviceFmt : deviceFmts) + { + Dimension size + = new Dimension(deviceFmt.getWidth(), deviceFmt.getHeight()); + int devicePixFmt = deviceFmt.getPixelFormat(); + int pixFmt = getFFmpegPixFmt(devicePixFmt); + + if (pixFmt != FFmpeg.PIX_FMT_NONE) + { + fmts.add( + new AVFrameFormat( + size, + Format.NOT_SPECIFIED, + pixFmt, devicePixFmt)); + } + } + return fmts.toArray(new Format[fmts.size()]); + } + + /** + * Attempts to set the Format to be reported by the + * FormatControl of a PushBufferStream at a specific + * zero-based index in the list of streams of this + * PushBufferDataSource. The PushBufferStream does not + * exist at the time of the attempt to set its Format. + * + * @param streamIndex the zero-based index of the PushBufferStream + * the Format of which is to be set + * @param oldValue the last-known Format for the + * PushBufferStream at the specified streamIndex + * @param newValue the Format which is to be set + * @return the Format to be reported by the FormatControl + * of the PushBufferStream at the specified streamIndex + * in the list of streams of this PushBufferStream or null + * if the attempt to set the Format did not success and any + * last-known Format is to be left in effect + * @see AbstractPushBufferCaptureDevice#setFormat(int, Format, Format) + */ + @Override + protected Format setFormat( + int streamIndex, + Format oldValue, Format newValue) + { + // This DataSource supports setFormat. + return + DirectShowStream.isSupportedFormat(newValue) + ? newValue + : super.setFormat(streamIndex, oldValue, newValue); + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DirectShowStream.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DirectShowStream.java index 85a19f73d..26522d93d 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DirectShowStream.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/directshow/DirectShowStream.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,772 +13,772 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol.directshow; - -import java.awt.*; -import java.io.*; - -import javax.media.*; -import javax.media.control.*; -import javax.media.protocol.*; - -import org.jitsi.impl.neomedia.codec.*; -import org.jitsi.impl.neomedia.codec.video.*; -import org.jitsi.impl.neomedia.jmfext.media.protocol.*; -import org.jitsi.util.*; - -/** - * Implements a PushBufferStream using DirectShow. - * - * @author Lyubomir Marinov - * @author Sebastien Vincent - */ -public class DirectShowStream - extends AbstractPushBufferStream -{ - /** - * The Logger used by the DirectShowStream class and its - * instances to print out debugging information. - */ - private static final Logger logger - = Logger.getLogger(DirectShowStream.class); - - /** - * Determines whether a specific Format appears to be suitable for - * attempts to be set on DirectShowStream instances. - *

- * Note: If the method returns true, an actual attempt to - * set the specified format on an specific - * DirectShowStream instance may still fail but that will be - * because the finer-grained properties of the format are not - * supported by that DirectShowStream instance. - *

- * - * @param format the Format to be checked whether it appears to be - * suitable for attempts to be set on DirectShowStream instances - * @return true if the specified format appears to be - * suitable for attempts to be set on DirectShowStream instance; - * otherwise, false - */ - static boolean isSupportedFormat(Format format) - { - if (format instanceof AVFrameFormat) - { - AVFrameFormat avFrameFormat = (AVFrameFormat) format; - long pixFmt = avFrameFormat.getDeviceSystemPixFmt(); - - if (pixFmt != -1) - { - Dimension size = avFrameFormat.getSize(); - - /* - * We will set the native format in doStart() because a - * connect-disconnect-connect sequence of the native capture - * device may reorder its formats in a different way. - * Consequently, in the absence of further calls to - * setFormat() by JMF, a crash may occur later (typically, - * during scaling) because of a wrong format. - */ - if (size != null) - return true; - } - } - return false; - } - - /** - * The indicator which determines whether {@link #delegate} - * automatically drops late frames. If false, we have to drop them - * ourselves because DirectShow will buffer them all and the video will - * be late. - */ - private final boolean automaticallyDropsLateVideoFrames = false; - - /** - * The pool of ByteBuffers this instances is using to transfer the - * media data captured by {@link #delegate} out of this instance - * through the Buffers specified in its {@link #read(Buffer)}. - */ - private final ByteBufferPool byteBufferPool = new ByteBufferPool(); - - /** - * The captured media data to be returned in {@link #read(Buffer)}. - */ - private ByteBuffer data; - - /** - * The Object which synchronizes the access to the - * {@link #data}-related fields of this instance. - */ - private final Object dataSyncRoot = new Object(); - - /** - * The time stamp in nanoseconds of {@link #data}. - */ - private long dataTimeStamp; - - /** - * Delegate class to handle video data. - */ - private final DSCaptureDevice.ISampleGrabberCB delegate - = new DSCaptureDevice.ISampleGrabberCB() - { - @Override - public void SampleCB(long source, long ptr, int length) - { - DirectShowStream.this.SampleCB(source, ptr, length); - } - }; - - /** - * The DSCaptureDevice which identifies the DirectShow video - * capture device this SourceStream is to capture data from. - */ - private DSCaptureDevice device; - - /** - * The last-known Format of the media data made available by this - * PushBufferStream. - */ - private Format format; - - /** - * The captured media data to become the value of {@link #data} as soon as - * the latter becomes is consumed. Thus prepares this - * DirectShowStream to provide the latest available frame and not - * wait for DirectShow to capture a new one. - */ - private ByteBuffer nextData; - - /** - * The time stamp in nanoseconds of {@link #nextData}. - */ - private long nextDataTimeStamp; - - /** - * The Thread which is to call - * {@link BufferTransferHandler#transferData(PushBufferStream)} for this - * DirectShowStream so that the call is not made in DirectShow - * and we can drop late frames when - * {@link #automaticallyDropsLateVideoFrames} is false. - */ - private Thread transferDataThread; - - /** - * Native Video pixel format. - */ - private int nativePixelFormat = 0; - - /** - * The AVCodecContext of the MJPEG decoder. - */ - private long avctx = 0; - - /** - * The AVFrame which represents the media data decoded by the MJPEG - * decoder/{@link #avctx}. - */ - private long avframe = 0; - - /** - * Initializes a new DirectShowStream instance which is to have its - * Format-related information abstracted by a specific - * FormatControl. - * - * @param dataSource the DataSource which is creating the new - * instance so that it becomes one of its streams - * @param formatControl the FormatControl which is to abstract the - * Format-related information of the new instance - */ - DirectShowStream(DataSource dataSource, FormatControl formatControl) - { - super(dataSource, formatControl); - } - - /** - * Connects this SourceStream to the DirectShow video capture - * device identified by {@link #device}. - * - * @throws IOException if anything goes wrong while this - * SourceStream connects to the DirectShow video capture device - * identified by device - */ - private void connect() - throws IOException - { - if (device == null) - throw new IOException("device == null"); - else - device.setDelegate(delegate); - } - - /** - * Disconnects this SourceStream from the DirectShow video capture - * device it has previously connected to during the execution of - * {@link #connect()}. - * - * @throws IOException if anything goes wrong while this - * SourceStream disconnects from the DirectShow video capture - * device it has previously connected to during the execution of - * connect() - */ - private void disconnect() - throws IOException - { - try - { - stop(); - } - finally - { - if (device != null) - device.setDelegate(null); - } - } - - /** - * Gets the Format of this PushBufferStream as directly - * known by it. - * - * @return the Format of this PushBufferStream as directly - * known by it or null if this PushBufferStream does not - * directly know its Format and it relies on the - * PushBufferDataSource which created it to report its - * Format - */ - @Override - protected Format doGetFormat() - { - return (format == null) ? super.doGetFormat() : format; - } - - /** - * {@inheritDoc} - * - * Overrides the super implementation to enable setting the Format - * of this DirectShowStream after the DataSource which - * provides it has been connected. - */ - @Override - protected Format doSetFormat(Format format) - { - if (isSupportedFormat(format)) - { - if (device == null) - return format; - else - { - try - { - setDeviceFormat(format); - } - catch (IOException ioe) - { - logger.error( - "Failed to set format on DirectShowStream: " - + format, - ioe); - /* - * Ignore the exception because the method is to report - * failures by returning null (which will be achieved - * outside the catch block). - */ - } - return format.matches(this.format) ? format : null; - } - } - else - return super.doSetFormat(format); - } - - /** - * Reads media data from this PushBufferStream into a specific - * Buffer without blocking. - * - * @param buffer the Buffer in which media data is to be read from - * this PushBufferStream - * @throws IOException if anything goes wrong while reading media data from - * this PushBufferStream into the specified buffer - */ - public void read(Buffer buffer) throws IOException - { - synchronized (dataSyncRoot) - { - if(data == null) - { - buffer.setLength(0); - return; - } - - Format bufferFormat = buffer.getFormat(); - - if(bufferFormat == null) - { - bufferFormat = getFormat(); - if(bufferFormat != null) - buffer.setFormat(bufferFormat); - } - if(bufferFormat instanceof AVFrameFormat) - { - if(nativePixelFormat == DSFormat.MJPG) - { - /* Initialize the FFmpeg MJPEG decoder if necessary. */ - if(avctx == 0) - { - long avcodec - = FFmpeg.avcodec_find_decoder(FFmpeg.CODEC_ID_MJPEG); - - avctx = FFmpeg.avcodec_alloc_context3(avcodec); - FFmpeg.avcodeccontext_set_workaround_bugs(avctx, - FFmpeg.FF_BUG_AUTODETECT); - - if (FFmpeg.avcodec_open2(avctx, avcodec) < 0) - { - throw new RuntimeException("" + - "Could not open codec CODEC_ID_MJPEG"); - } - - avframe = FFmpeg.avcodec_alloc_frame(); - } - - if(FFmpeg.avcodec_decode_video( - avctx, avframe, data.getPtr(), data.getLength()) != -1) - { - Object out = buffer.getData(); - - if (!(out instanceof AVFrame) - || (((AVFrame) out).getPtr() != avframe)) - { - buffer.setData(new AVFrame(avframe)); - } - } - - data.free(); - data = null; - } - else - { - if (AVFrame.read(buffer, bufferFormat, data) < 0) - data.free(); - /* - * XXX For the sake of safety, make sure that this instance does - * not reference the data instance as soon as it is set on the - * AVFrame. - */ - data = null; - } - } - else - { - Object o = buffer.getData(); - byte[] bytes; - int length = data.getLength(); - - if(o instanceof byte[]) - { - bytes = (byte[]) o; - if(bytes.length < length) - bytes = null; - } - else - bytes = null; - if(bytes == null) - { - bytes = new byte[length]; - buffer.setData(bytes); - } - - /* - * TODO Copy the media from the native memory into the Java - * heap. - */ - data.free(); - data = null; - - buffer.setLength(length); - buffer.setOffset(0); - } - - buffer.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME); - buffer.setTimeStamp(dataTimeStamp); - - if(!automaticallyDropsLateVideoFrames) - dataSyncRoot.notifyAll(); - } - } - - /** - * Calls {@link BufferTransferHandler#transferData(PushBufferStream)} from - * inside {@link #transferDataThread} so that the call is not made in - * DirectShow and we can drop late frames in the meantime. - */ - private void runInTransferDataThread() - { - boolean transferData = false; - FrameRateControl frameRateControl - = (FrameRateControl) - dataSource.getControl(FrameRateControl.class.getName()); - long transferDataTimeStamp = -1; - - while (Thread.currentThread().equals(transferDataThread)) - { - if (transferData) - { - BufferTransferHandler transferHandler = this.transferHandler; - - if (transferHandler != null) - { - /* - * Respect the frame rate specified through the - * FrameRateControl of the associated DataSource. - */ - if (frameRateControl != null) - { - float frameRate; - long newTransferDataTimeStamp - = System.currentTimeMillis(); - - if ((transferDataTimeStamp != -1) - && ((frameRate - = frameRateControl.getFrameRate()) - > 0)) - { - long minimumVideoFrameInterval - = (long) (1000 / frameRate); - - if (minimumVideoFrameInterval > 0) - { - long t - = newTransferDataTimeStamp - - transferDataTimeStamp; - - if ((t > 0) && (t < minimumVideoFrameInterval)) - { - boolean interrupted = false; - - try - { - Thread.sleep( - minimumVideoFrameInterval - t); - } - catch (InterruptedException ie) - { - interrupted = true; - } - if (interrupted) - Thread.currentThread().interrupt(); - continue; - } - } - } - - transferDataTimeStamp = newTransferDataTimeStamp; - } - - transferHandler.transferData(this); - } - - synchronized (dataSyncRoot) - { - if (data != null) - data.free(); - data = nextData; - dataTimeStamp = nextDataTimeStamp; - nextData = null; - } - } - - synchronized (dataSyncRoot) - { - if (data == null) - { - data = nextData; - dataTimeStamp = nextDataTimeStamp; - nextData = null; - } - if (data == null) - { - boolean interrupted = false; - - try - { - dataSyncRoot.wait(); - } - catch (InterruptedException iex) - { - interrupted = true; - } - if(interrupted) - Thread.currentThread().interrupt(); - - transferData = (data != null); - } - else - transferData = true; - } - } - } - - /** - * Process received frames from DirectShow capture device - * - * @param source pointer to the native DSCaptureDevice which is the - * source of the notification - * @param ptr native pointer to data - * @param length length of data - */ - private void SampleCB(long source, long ptr, int length) - { - boolean transferData = false; - - synchronized (dataSyncRoot) - { - if(!automaticallyDropsLateVideoFrames && (data != null)) - { - if (nextData != null) - { - nextData.free(); - nextData = null; - } - nextData = byteBufferPool.getBuffer(length); - if(nextData != null) - { - nextData.setLength( - DSCaptureDevice.samplecopy( - source, - ptr, nextData.getPtr(), length)); - nextDataTimeStamp = System.nanoTime(); - } - - return; - } - - if (data != null) - { - data.free(); - data = null; - } - data = byteBufferPool.getBuffer(length); - if(data != null) - { - data.setLength( - DSCaptureDevice.samplecopy( - source, - ptr, data.getPtr(), length)); - dataTimeStamp = System.nanoTime(); - } - - if (nextData != null) - { - nextData.free(); - nextData = null; - } - - if(automaticallyDropsLateVideoFrames) - transferData = (data != null); - else - { - transferData = false; - dataSyncRoot.notifyAll(); - } - } - - if(transferData) - { - BufferTransferHandler transferHandler = this.transferHandler; - - if(transferHandler != null) - transferHandler.transferData(this); - } - } - - /** - * Sets the DSCaptureDevice of this instance which identifies the - * DirectShow video capture device this SourceStream is to capture - * data from. - * - * @param device a DSCaptureDevice which identifies the DirectShow - * video capture device this SourceStream is to capture data from - * @throws IOException if anything goes wrong while setting the specified - * device on this instance - */ - void setDevice(DSCaptureDevice device) - throws IOException - { - if (this.device != device) - { - if (this.device != null) - disconnect(); - - this.device = device; - - if (this.device != null) - connect(); - } - } - - /** - * Sets a specific Format on the DSCaptureDevice of this - * instance. - * - * @param format the Format to set on the DSCaptureDevice - * of this instance - * @throws IOException if setting the specified format on the - * DSCaptureDevice of this instance fails - */ - private void setDeviceFormat(Format format) - throws IOException - { - if (format == null) - throw new IOException("format == null"); - else if (format instanceof AVFrameFormat) - { - AVFrameFormat avFrameFormat = (AVFrameFormat) format; - nativePixelFormat = avFrameFormat.getDeviceSystemPixFmt(); - Dimension size = avFrameFormat.getSize(); - - if (size == null) - throw new IOException("format.size == null"); - else - { - int hresult - = device.setFormat( - new DSFormat( - size.width, size.height, - avFrameFormat.getDeviceSystemPixFmt())); - - switch (hresult) - { - case DSCaptureDevice.S_FALSE: - case DSCaptureDevice.S_OK: - this.format = format; - if (logger.isDebugEnabled()) - { - logger.debug( - "Set format on DirectShowStream: " + format); - } - break; - default: - throwNewHResultException(hresult); - } - } - } - else - throw new IOException("!(format instanceof AVFrameFormat)"); - } - - /** - * Starts the transfer of media data from this PushBufferStream. - * - * @throws IOException if anything goes wrong while starting the transfer of - * media data from this PushBufferStream - */ - @Override - public void start() - throws IOException - { - super.start(); - - boolean started = false; - - try - { - setDeviceFormat(getFormat()); - - if(!automaticallyDropsLateVideoFrames) - { - if (transferDataThread == null) - { - transferDataThread - = new Thread(getClass().getSimpleName()) - { - @Override - public void run() - { - runInTransferDataThread(); - } - }; - transferDataThread.start(); - } - } - - device.start(); - - started = true; - } - finally - { - if (!started) - stop(); - } - } - - /** - * Stops the transfer of media data from this PushBufferStream. - * - * @throws IOException if anything goes wrong while stopping the transfer of - * media data from this PushBufferStream - */ - @Override - public void stop() - throws IOException - { - try - { - device.stop(); - - transferDataThread = null; - - synchronized (dataSyncRoot) - { - if (data != null) - { - data.free(); - data = null; - } - if (nextData != null) - { - nextData.free(); - nextData = null; - } - - if(!automaticallyDropsLateVideoFrames) - dataSyncRoot.notifyAll(); - } - } - finally - { - super.stop(); - - if(avctx != 0) - { - FFmpeg.avcodec_close(avctx); - FFmpeg.av_free(avctx); - avctx = 0; - } - - if(avframe != 0) - { - FFmpeg.avcodec_free_frame(avframe); - avframe = 0; - } - - byteBufferPool.drain(); - } - } - - /** - * Throws a new IOException the detail message of which describes - * a specific HRESULT value indicating a failure. - * - * @param hresult the HRESUlT to be described by the detail message - * of the new IOException to be thrown - * @throws IOException - */ - private void throwNewHResultException(int hresult) - throws IOException - { - throw new IOException( - "HRESULT 0x" + Long.toHexString(hresult & 0xffffffffL)); - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol.directshow; + +import java.awt.*; +import java.io.*; + +import javax.media.*; +import javax.media.control.*; +import javax.media.protocol.*; + +import org.jitsi.impl.neomedia.codec.*; +import org.jitsi.impl.neomedia.codec.video.*; +import org.jitsi.impl.neomedia.jmfext.media.protocol.*; +import org.jitsi.util.*; + +/** + * Implements a PushBufferStream using DirectShow. + * + * @author Lyubomir Marinov + * @author Sebastien Vincent + */ +public class DirectShowStream + extends AbstractPushBufferStream +{ + /** + * The Logger used by the DirectShowStream class and its + * instances to print out debugging information. + */ + private static final Logger logger + = Logger.getLogger(DirectShowStream.class); + + /** + * Determines whether a specific Format appears to be suitable for + * attempts to be set on DirectShowStream instances. + *

+ * Note: If the method returns true, an actual attempt to + * set the specified format on an specific + * DirectShowStream instance may still fail but that will be + * because the finer-grained properties of the format are not + * supported by that DirectShowStream instance. + *

+ * + * @param format the Format to be checked whether it appears to be + * suitable for attempts to be set on DirectShowStream instances + * @return true if the specified format appears to be + * suitable for attempts to be set on DirectShowStream instance; + * otherwise, false + */ + static boolean isSupportedFormat(Format format) + { + if (format instanceof AVFrameFormat) + { + AVFrameFormat avFrameFormat = (AVFrameFormat) format; + long pixFmt = avFrameFormat.getDeviceSystemPixFmt(); + + if (pixFmt != -1) + { + Dimension size = avFrameFormat.getSize(); + + /* + * We will set the native format in doStart() because a + * connect-disconnect-connect sequence of the native capture + * device may reorder its formats in a different way. + * Consequently, in the absence of further calls to + * setFormat() by JMF, a crash may occur later (typically, + * during scaling) because of a wrong format. + */ + if (size != null) + return true; + } + } + return false; + } + + /** + * The indicator which determines whether {@link #delegate} + * automatically drops late frames. If false, we have to drop them + * ourselves because DirectShow will buffer them all and the video will + * be late. + */ + private final boolean automaticallyDropsLateVideoFrames = false; + + /** + * The pool of ByteBuffers this instances is using to transfer the + * media data captured by {@link #delegate} out of this instance + * through the Buffers specified in its {@link #read(Buffer)}. + */ + private final ByteBufferPool byteBufferPool = new ByteBufferPool(); + + /** + * The captured media data to be returned in {@link #read(Buffer)}. + */ + private ByteBuffer data; + + /** + * The Object which synchronizes the access to the + * {@link #data}-related fields of this instance. + */ + private final Object dataSyncRoot = new Object(); + + /** + * The time stamp in nanoseconds of {@link #data}. + */ + private long dataTimeStamp; + + /** + * Delegate class to handle video data. + */ + private final DSCaptureDevice.ISampleGrabberCB delegate + = new DSCaptureDevice.ISampleGrabberCB() + { + @Override + public void SampleCB(long source, long ptr, int length) + { + DirectShowStream.this.SampleCB(source, ptr, length); + } + }; + + /** + * The DSCaptureDevice which identifies the DirectShow video + * capture device this SourceStream is to capture data from. + */ + private DSCaptureDevice device; + + /** + * The last-known Format of the media data made available by this + * PushBufferStream. + */ + private Format format; + + /** + * The captured media data to become the value of {@link #data} as soon as + * the latter becomes is consumed. Thus prepares this + * DirectShowStream to provide the latest available frame and not + * wait for DirectShow to capture a new one. + */ + private ByteBuffer nextData; + + /** + * The time stamp in nanoseconds of {@link #nextData}. + */ + private long nextDataTimeStamp; + + /** + * The Thread which is to call + * {@link BufferTransferHandler#transferData(PushBufferStream)} for this + * DirectShowStream so that the call is not made in DirectShow + * and we can drop late frames when + * {@link #automaticallyDropsLateVideoFrames} is false. + */ + private Thread transferDataThread; + + /** + * Native Video pixel format. + */ + private int nativePixelFormat = 0; + + /** + * The AVCodecContext of the MJPEG decoder. + */ + private long avctx = 0; + + /** + * The AVFrame which represents the media data decoded by the MJPEG + * decoder/{@link #avctx}. + */ + private long avframe = 0; + + /** + * Initializes a new DirectShowStream instance which is to have its + * Format-related information abstracted by a specific + * FormatControl. + * + * @param dataSource the DataSource which is creating the new + * instance so that it becomes one of its streams + * @param formatControl the FormatControl which is to abstract the + * Format-related information of the new instance + */ + DirectShowStream(DataSource dataSource, FormatControl formatControl) + { + super(dataSource, formatControl); + } + + /** + * Connects this SourceStream to the DirectShow video capture + * device identified by {@link #device}. + * + * @throws IOException if anything goes wrong while this + * SourceStream connects to the DirectShow video capture device + * identified by device + */ + private void connect() + throws IOException + { + if (device == null) + throw new IOException("device == null"); + else + device.setDelegate(delegate); + } + + /** + * Disconnects this SourceStream from the DirectShow video capture + * device it has previously connected to during the execution of + * {@link #connect()}. + * + * @throws IOException if anything goes wrong while this + * SourceStream disconnects from the DirectShow video capture + * device it has previously connected to during the execution of + * connect() + */ + private void disconnect() + throws IOException + { + try + { + stop(); + } + finally + { + if (device != null) + device.setDelegate(null); + } + } + + /** + * Gets the Format of this PushBufferStream as directly + * known by it. + * + * @return the Format of this PushBufferStream as directly + * known by it or null if this PushBufferStream does not + * directly know its Format and it relies on the + * PushBufferDataSource which created it to report its + * Format + */ + @Override + protected Format doGetFormat() + { + return (format == null) ? super.doGetFormat() : format; + } + + /** + * {@inheritDoc} + * + * Overrides the super implementation to enable setting the Format + * of this DirectShowStream after the DataSource which + * provides it has been connected. + */ + @Override + protected Format doSetFormat(Format format) + { + if (isSupportedFormat(format)) + { + if (device == null) + return format; + else + { + try + { + setDeviceFormat(format); + } + catch (IOException ioe) + { + logger.error( + "Failed to set format on DirectShowStream: " + + format, + ioe); + /* + * Ignore the exception because the method is to report + * failures by returning null (which will be achieved + * outside the catch block). + */ + } + return format.matches(this.format) ? format : null; + } + } + else + return super.doSetFormat(format); + } + + /** + * Reads media data from this PushBufferStream into a specific + * Buffer without blocking. + * + * @param buffer the Buffer in which media data is to be read from + * this PushBufferStream + * @throws IOException if anything goes wrong while reading media data from + * this PushBufferStream into the specified buffer + */ + public void read(Buffer buffer) throws IOException + { + synchronized (dataSyncRoot) + { + if(data == null) + { + buffer.setLength(0); + return; + } + + Format bufferFormat = buffer.getFormat(); + + if(bufferFormat == null) + { + bufferFormat = getFormat(); + if(bufferFormat != null) + buffer.setFormat(bufferFormat); + } + if(bufferFormat instanceof AVFrameFormat) + { + if(nativePixelFormat == DSFormat.MJPG) + { + /* Initialize the FFmpeg MJPEG decoder if necessary. */ + if(avctx == 0) + { + long avcodec + = FFmpeg.avcodec_find_decoder(FFmpeg.CODEC_ID_MJPEG); + + avctx = FFmpeg.avcodec_alloc_context3(avcodec); + FFmpeg.avcodeccontext_set_workaround_bugs(avctx, + FFmpeg.FF_BUG_AUTODETECT); + + if (FFmpeg.avcodec_open2(avctx, avcodec) < 0) + { + throw new RuntimeException("" + + "Could not open codec CODEC_ID_MJPEG"); + } + + avframe = FFmpeg.avcodec_alloc_frame(); + } + + if(FFmpeg.avcodec_decode_video( + avctx, avframe, data.getPtr(), data.getLength()) != -1) + { + Object out = buffer.getData(); + + if (!(out instanceof AVFrame) + || (((AVFrame) out).getPtr() != avframe)) + { + buffer.setData(new AVFrame(avframe)); + } + } + + data.free(); + data = null; + } + else + { + if (AVFrame.read(buffer, bufferFormat, data) < 0) + data.free(); + /* + * XXX For the sake of safety, make sure that this instance does + * not reference the data instance as soon as it is set on the + * AVFrame. + */ + data = null; + } + } + else + { + Object o = buffer.getData(); + byte[] bytes; + int length = data.getLength(); + + if(o instanceof byte[]) + { + bytes = (byte[]) o; + if(bytes.length < length) + bytes = null; + } + else + bytes = null; + if(bytes == null) + { + bytes = new byte[length]; + buffer.setData(bytes); + } + + /* + * TODO Copy the media from the native memory into the Java + * heap. + */ + data.free(); + data = null; + + buffer.setLength(length); + buffer.setOffset(0); + } + + buffer.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME); + buffer.setTimeStamp(dataTimeStamp); + + if(!automaticallyDropsLateVideoFrames) + dataSyncRoot.notifyAll(); + } + } + + /** + * Calls {@link BufferTransferHandler#transferData(PushBufferStream)} from + * inside {@link #transferDataThread} so that the call is not made in + * DirectShow and we can drop late frames in the meantime. + */ + private void runInTransferDataThread() + { + boolean transferData = false; + FrameRateControl frameRateControl + = (FrameRateControl) + dataSource.getControl(FrameRateControl.class.getName()); + long transferDataTimeStamp = -1; + + while (Thread.currentThread().equals(transferDataThread)) + { + if (transferData) + { + BufferTransferHandler transferHandler = this.transferHandler; + + if (transferHandler != null) + { + /* + * Respect the frame rate specified through the + * FrameRateControl of the associated DataSource. + */ + if (frameRateControl != null) + { + float frameRate; + long newTransferDataTimeStamp + = System.currentTimeMillis(); + + if ((transferDataTimeStamp != -1) + && ((frameRate + = frameRateControl.getFrameRate()) + > 0)) + { + long minimumVideoFrameInterval + = (long) (1000 / frameRate); + + if (minimumVideoFrameInterval > 0) + { + long t + = newTransferDataTimeStamp + - transferDataTimeStamp; + + if ((t > 0) && (t < minimumVideoFrameInterval)) + { + boolean interrupted = false; + + try + { + Thread.sleep( + minimumVideoFrameInterval - t); + } + catch (InterruptedException ie) + { + interrupted = true; + } + if (interrupted) + Thread.currentThread().interrupt(); + continue; + } + } + } + + transferDataTimeStamp = newTransferDataTimeStamp; + } + + transferHandler.transferData(this); + } + + synchronized (dataSyncRoot) + { + if (data != null) + data.free(); + data = nextData; + dataTimeStamp = nextDataTimeStamp; + nextData = null; + } + } + + synchronized (dataSyncRoot) + { + if (data == null) + { + data = nextData; + dataTimeStamp = nextDataTimeStamp; + nextData = null; + } + if (data == null) + { + boolean interrupted = false; + + try + { + dataSyncRoot.wait(); + } + catch (InterruptedException iex) + { + interrupted = true; + } + if(interrupted) + Thread.currentThread().interrupt(); + + transferData = (data != null); + } + else + transferData = true; + } + } + } + + /** + * Process received frames from DirectShow capture device + * + * @param source pointer to the native DSCaptureDevice which is the + * source of the notification + * @param ptr native pointer to data + * @param length length of data + */ + private void SampleCB(long source, long ptr, int length) + { + boolean transferData = false; + + synchronized (dataSyncRoot) + { + if(!automaticallyDropsLateVideoFrames && (data != null)) + { + if (nextData != null) + { + nextData.free(); + nextData = null; + } + nextData = byteBufferPool.getBuffer(length); + if(nextData != null) + { + nextData.setLength( + DSCaptureDevice.samplecopy( + source, + ptr, nextData.getPtr(), length)); + nextDataTimeStamp = System.nanoTime(); + } + + return; + } + + if (data != null) + { + data.free(); + data = null; + } + data = byteBufferPool.getBuffer(length); + if(data != null) + { + data.setLength( + DSCaptureDevice.samplecopy( + source, + ptr, data.getPtr(), length)); + dataTimeStamp = System.nanoTime(); + } + + if (nextData != null) + { + nextData.free(); + nextData = null; + } + + if(automaticallyDropsLateVideoFrames) + transferData = (data != null); + else + { + transferData = false; + dataSyncRoot.notifyAll(); + } + } + + if(transferData) + { + BufferTransferHandler transferHandler = this.transferHandler; + + if(transferHandler != null) + transferHandler.transferData(this); + } + } + + /** + * Sets the DSCaptureDevice of this instance which identifies the + * DirectShow video capture device this SourceStream is to capture + * data from. + * + * @param device a DSCaptureDevice which identifies the DirectShow + * video capture device this SourceStream is to capture data from + * @throws IOException if anything goes wrong while setting the specified + * device on this instance + */ + void setDevice(DSCaptureDevice device) + throws IOException + { + if (this.device != device) + { + if (this.device != null) + disconnect(); + + this.device = device; + + if (this.device != null) + connect(); + } + } + + /** + * Sets a specific Format on the DSCaptureDevice of this + * instance. + * + * @param format the Format to set on the DSCaptureDevice + * of this instance + * @throws IOException if setting the specified format on the + * DSCaptureDevice of this instance fails + */ + private void setDeviceFormat(Format format) + throws IOException + { + if (format == null) + throw new IOException("format == null"); + else if (format instanceof AVFrameFormat) + { + AVFrameFormat avFrameFormat = (AVFrameFormat) format; + nativePixelFormat = avFrameFormat.getDeviceSystemPixFmt(); + Dimension size = avFrameFormat.getSize(); + + if (size == null) + throw new IOException("format.size == null"); + else + { + int hresult + = device.setFormat( + new DSFormat( + size.width, size.height, + avFrameFormat.getDeviceSystemPixFmt())); + + switch (hresult) + { + case DSCaptureDevice.S_FALSE: + case DSCaptureDevice.S_OK: + this.format = format; + if (logger.isDebugEnabled()) + { + logger.debug( + "Set format on DirectShowStream: " + format); + } + break; + default: + throwNewHResultException(hresult); + } + } + } + else + throw new IOException("!(format instanceof AVFrameFormat)"); + } + + /** + * Starts the transfer of media data from this PushBufferStream. + * + * @throws IOException if anything goes wrong while starting the transfer of + * media data from this PushBufferStream + */ + @Override + public void start() + throws IOException + { + super.start(); + + boolean started = false; + + try + { + setDeviceFormat(getFormat()); + + if(!automaticallyDropsLateVideoFrames) + { + if (transferDataThread == null) + { + transferDataThread + = new Thread(getClass().getSimpleName()) + { + @Override + public void run() + { + runInTransferDataThread(); + } + }; + transferDataThread.start(); + } + } + + device.start(); + + started = true; + } + finally + { + if (!started) + stop(); + } + } + + /** + * Stops the transfer of media data from this PushBufferStream. + * + * @throws IOException if anything goes wrong while stopping the transfer of + * media data from this PushBufferStream + */ + @Override + public void stop() + throws IOException + { + try + { + device.stop(); + + transferDataThread = null; + + synchronized (dataSyncRoot) + { + if (data != null) + { + data.free(); + data = null; + } + if (nextData != null) + { + nextData.free(); + nextData = null; + } + + if(!automaticallyDropsLateVideoFrames) + dataSyncRoot.notifyAll(); + } + } + finally + { + super.stop(); + + if(avctx != 0) + { + FFmpeg.avcodec_close(avctx); + FFmpeg.av_free(avctx); + avctx = 0; + } + + if(avframe != 0) + { + FFmpeg.avcodec_free_frame(avframe); + avframe = 0; + } + + byteBufferPool.drain(); + } + } + + /** + * Throws a new IOException the detail message of which describes + * a specific HRESULT value indicating a failure. + * + * @param hresult the HRESUlT to be described by the detail message + * of the new IOException to be thrown + * @throws IOException + */ + private void throwNewHResultException(int hresult) + throws IOException + { + throw new IOException( + "HRESULT 0x" + Long.toHexString(hresult & 0xffffffffL)); + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java index bb885534c..4c8673da2 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,212 +13,212 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; - -import java.io.*; -import java.util.*; - -import javax.media.*; -import javax.media.control.*; -import javax.media.format.*; - -import org.jitsi.impl.neomedia.device.*; -import org.jitsi.impl.neomedia.jmfext.media.protocol.*; -import org.jitsi.util.*; - -/** - * Implements CaptureDevice and DataSource using Windows Audio - * Session API (WASAPI) and related Core Audio APIs such as Multimedia Device - * (MMDevice) API. - * - * @author Lyubomir Marinov - */ -public class DataSource - extends AbstractPushBufferCaptureDevice -{ - /** - * The Logger used by the DataSource class and its - * instances to log debugging information. - */ - private static final Logger logger = Logger.getLogger(DataSource.class); - - /** - * The indicator which determines whether the voice capture DMO is to be - * used to perform echo cancellation and/or noise reduction. - */ - final boolean aec; - - /** - * The WASAPISystem which has contributed this - * CaptureDevice/DataSource. - */ - final WASAPISystem audioSystem; - - /** - * Initializes a new DataSource instance. - */ - public DataSource() - { - this(null); - } - - /** - * Initializes a new DataSource instance with a specific - * MediaLocator. - * - * @param locator the MediaLocator to initialize the new instance - * with - */ - public DataSource(MediaLocator locator) - { - super(locator); - - audioSystem - = (WASAPISystem) - AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_WASAPI); - aec = audioSystem.isDenoise() || audioSystem.isEchoCancel(); - } - - /** - * {@inheritDoc} - */ - @Override - protected WASAPIStream createStream( - int streamIndex, - FormatControl formatControl) - { - return new WASAPIStream(this, formatControl); - } - - /** - * {@inheritDoc} - */ - @Override - protected void doConnect() - throws IOException - { - super.doConnect(); - - MediaLocator locator = getLocator(); - - synchronized (getStreamSyncRoot()) - { - for (Object stream : getStreams()) - ((WASAPIStream) stream).setLocator(locator); - } - } - - /** - * {@inheritDoc} - */ - @Override - protected void doDisconnect() - { - try - { - synchronized (getStreamSyncRoot()) - { - for (Object stream : getStreams()) - { - try - { - ((WASAPIStream) stream).setLocator(null); - } - catch (IOException ioe) - { - logger.error( - "Failed to disconnect " - + stream.getClass().getName(), - ioe); - } - } - } - } - finally - { - super.doDisconnect(); - } - } - - /** - * Gets the Formats of media data supported by the audio endpoint - * device associated with this instance. - * - * @return the Formats of media data supported by the audio - * endpoint device associated with this instance - */ - Format[] getIAudioClientSupportedFormats() - { - return getIAudioClientSupportedFormats(/* streamIndex */ 0); - } - - /** - * Gets the Formats of media data supported by the audio endpoint - * device associated with this instance. - * - * @param streamIndex the index of the SourceStream within the list - * of SourceStreams of this DataSource on behalf of which - * the query is being made - * @return the Formats of media data supported by the audio - * endpoint device associated with this instance - */ - private Format[] getIAudioClientSupportedFormats(int streamIndex) - { - Format[] superSupportedFormats = super.getSupportedFormats(streamIndex); - - /* - * If the capture endpoint device reports to support no Format, then - * acoustic echo cancellation (AEC) will surely not work. - */ - if ((superSupportedFormats == null) - || (superSupportedFormats.length == 0)) - return superSupportedFormats; - - // Return the NativelySupportedAudioFormat instances only. - List supportedFormats - = new ArrayList(superSupportedFormats.length); - - for (Format format : superSupportedFormats) - { - if ((format instanceof NativelySupportedAudioFormat) - && !supportedFormats.contains(format)) - { - supportedFormats.add(format); - } - } - - int supportedFormatCount = supportedFormats.size(); - - return - (supportedFormatCount == superSupportedFormats.length) - ? superSupportedFormats - : supportedFormats.toArray(new Format[supportedFormatCount]); - } - - /** - * {@inheritDoc} - * - * The Formats supported by this - * CaptureDevice/DataSource are either the ones supported - * by the capture endpoint device or the ones supported by the voice capture - * DMO that implements the acoustic echo cancellation (AEC) feature - * depending on whether the feature in question is disabled or enabled. - */ - @Override - protected Format[] getSupportedFormats(int streamIndex) - { - if (aec) - { - List aecSupportedFormats - = audioSystem.getAECSupportedFormats(); - - return - aecSupportedFormats.toArray( - new Format[aecSupportedFormats.size()]); - } - else - { - return getIAudioClientSupportedFormats(streamIndex); - } - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; + +import java.io.*; +import java.util.*; + +import javax.media.*; +import javax.media.control.*; +import javax.media.format.*; + +import org.jitsi.impl.neomedia.device.*; +import org.jitsi.impl.neomedia.jmfext.media.protocol.*; +import org.jitsi.util.*; + +/** + * Implements CaptureDevice and DataSource using Windows Audio + * Session API (WASAPI) and related Core Audio APIs such as Multimedia Device + * (MMDevice) API. + * + * @author Lyubomir Marinov + */ +public class DataSource + extends AbstractPushBufferCaptureDevice +{ + /** + * The Logger used by the DataSource class and its + * instances to log debugging information. + */ + private static final Logger logger = Logger.getLogger(DataSource.class); + + /** + * The indicator which determines whether the voice capture DMO is to be + * used to perform echo cancellation and/or noise reduction. + */ + final boolean aec; + + /** + * The WASAPISystem which has contributed this + * CaptureDevice/DataSource. + */ + final WASAPISystem audioSystem; + + /** + * Initializes a new DataSource instance. + */ + public DataSource() + { + this(null); + } + + /** + * Initializes a new DataSource instance with a specific + * MediaLocator. + * + * @param locator the MediaLocator to initialize the new instance + * with + */ + public DataSource(MediaLocator locator) + { + super(locator); + + audioSystem + = (WASAPISystem) + AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_WASAPI); + aec = audioSystem.isDenoise() || audioSystem.isEchoCancel(); + } + + /** + * {@inheritDoc} + */ + @Override + protected WASAPIStream createStream( + int streamIndex, + FormatControl formatControl) + { + return new WASAPIStream(this, formatControl); + } + + /** + * {@inheritDoc} + */ + @Override + protected void doConnect() + throws IOException + { + super.doConnect(); + + MediaLocator locator = getLocator(); + + synchronized (getStreamSyncRoot()) + { + for (Object stream : getStreams()) + ((WASAPIStream) stream).setLocator(locator); + } + } + + /** + * {@inheritDoc} + */ + @Override + protected void doDisconnect() + { + try + { + synchronized (getStreamSyncRoot()) + { + for (Object stream : getStreams()) + { + try + { + ((WASAPIStream) stream).setLocator(null); + } + catch (IOException ioe) + { + logger.error( + "Failed to disconnect " + + stream.getClass().getName(), + ioe); + } + } + } + } + finally + { + super.doDisconnect(); + } + } + + /** + * Gets the Formats of media data supported by the audio endpoint + * device associated with this instance. + * + * @return the Formats of media data supported by the audio + * endpoint device associated with this instance + */ + Format[] getIAudioClientSupportedFormats() + { + return getIAudioClientSupportedFormats(/* streamIndex */ 0); + } + + /** + * Gets the Formats of media data supported by the audio endpoint + * device associated with this instance. + * + * @param streamIndex the index of the SourceStream within the list + * of SourceStreams of this DataSource on behalf of which + * the query is being made + * @return the Formats of media data supported by the audio + * endpoint device associated with this instance + */ + private Format[] getIAudioClientSupportedFormats(int streamIndex) + { + Format[] superSupportedFormats = super.getSupportedFormats(streamIndex); + + /* + * If the capture endpoint device reports to support no Format, then + * acoustic echo cancellation (AEC) will surely not work. + */ + if ((superSupportedFormats == null) + || (superSupportedFormats.length == 0)) + return superSupportedFormats; + + // Return the NativelySupportedAudioFormat instances only. + List supportedFormats + = new ArrayList(superSupportedFormats.length); + + for (Format format : superSupportedFormats) + { + if ((format instanceof NativelySupportedAudioFormat) + && !supportedFormats.contains(format)) + { + supportedFormats.add(format); + } + } + + int supportedFormatCount = supportedFormats.size(); + + return + (supportedFormatCount == superSupportedFormats.length) + ? superSupportedFormats + : supportedFormats.toArray(new Format[supportedFormatCount]); + } + + /** + * {@inheritDoc} + * + * The Formats supported by this + * CaptureDevice/DataSource are either the ones supported + * by the capture endpoint device or the ones supported by the voice capture + * DMO that implements the acoustic echo cancellation (AEC) feature + * depending on whether the feature in question is disabled or enabled. + */ + @Override + protected Format[] getSupportedFormats(int streamIndex) + { + if (aec) + { + List aecSupportedFormats + = audioSystem.getAECSupportedFormats(); + + return + aecSupportedFormats.toArray( + new Format[aecSupportedFormats.size()]); + } + else + { + return getIAudioClientSupportedFormats(streamIndex); + } + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java index 83b46cbde..1d93686bb 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,70 +13,70 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; - -/** - * Implements an Exception which represents an HRESULT value. - * - * @author Lyubomir Marinov - */ -public class HResultException - extends Exception -{ - /** - * The HRESULT value represented by this instance. - */ - private final int hresult; - - /** - * Initializes a new HResultException which is to represent a - * specific HRESULT value. The detail message of the new instance - * is derived from the the specified HRESULT value. - * - * @param hresult the HRESULT value to be represented by the new - * instance - */ - public HResultException(int hresult) - { - this(hresult, toString(hresult)); - } - - /** - * Initializes a new HResultException which is to represent a - * specific HRESULT value and have a specific detail message. - * - * @param hresult the HRESULT value to be represented by the new - * instance - * @param message the detail message to initialize the new instance with - */ - public HResultException(int hresult, String message) - { - super(message); - - this.hresult = hresult; - } - - /** - * Gets the HRESULT value represented by this instance. - * - * @return the HRESULT value represented by this instance - */ - public int getHResult() - { - return hresult; - } - - /** - * Returns a String representation of a specific - * HRESULT value. - * - * @param hresult the HRESULT value of which a String - * representation is to be returned - * @return a String representation of the specified - * hresult - */ - public static String toString(int hresult) - { - return "0x" + Long.toHexString(hresult & 0xffffffffL); - } -} +package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; + +/** + * Implements an Exception which represents an HRESULT value. + * + * @author Lyubomir Marinov + */ +public class HResultException + extends Exception +{ + /** + * The HRESULT value represented by this instance. + */ + private final int hresult; + + /** + * Initializes a new HResultException which is to represent a + * specific HRESULT value. The detail message of the new instance + * is derived from the the specified HRESULT value. + * + * @param hresult the HRESULT value to be represented by the new + * instance + */ + public HResultException(int hresult) + { + this(hresult, toString(hresult)); + } + + /** + * Initializes a new HResultException which is to represent a + * specific HRESULT value and have a specific detail message. + * + * @param hresult the HRESULT value to be represented by the new + * instance + * @param message the detail message to initialize the new instance with + */ + public HResultException(int hresult, String message) + { + super(message); + + this.hresult = hresult; + } + + /** + * Gets the HRESULT value represented by this instance. + * + * @return the HRESULT value represented by this instance + */ + public int getHResult() + { + return hresult; + } + + /** + * Returns a String representation of a specific + * HRESULT value. + * + * @param hresult the HRESULT value of which a String + * representation is to be returned + * @return a String representation of the specified + * hresult + */ + public static String toString(int hresult) + { + return "0x" + Long.toHexString(hresult & 0xffffffffL); + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java index 91393dd11..051b676e4 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,24 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; - -/** - * Provides notifications when an audio endpoint device is added or removed, - * when the state or properties of an endpoint device change, or when there is a - * change in the default role assigned to an endpoint device. - * - * @author Lyubomir Marinov - */ -public interface IMMNotificationClient -{ - void OnDefaultDeviceChanged(int flow, int role, String pwstrDefaultDevice); - - void OnDeviceAdded(String pwstrDeviceId); - - void OnDeviceRemoved(String pwstrDeviceId); - - void OnDeviceStateChanged(String pwstrDeviceId, int dwNewState); - - void OnPropertyValueChanged(String pwstrDeviceId, long key); -} +package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; + +/** + * Provides notifications when an audio endpoint device is added or removed, + * when the state or properties of an endpoint device change, or when there is a + * change in the default role assigned to an endpoint device. + * + * @author Lyubomir Marinov + */ +public interface IMMNotificationClient +{ + void OnDefaultDeviceChanged(int flow, int role, String pwstrDefaultDevice); + + void OnDeviceAdded(String pwstrDeviceId); + + void OnDeviceRemoved(String pwstrDeviceId); + + void OnDeviceStateChanged(String pwstrDeviceId, int dwNewState); + + void OnPropertyValueChanged(String pwstrDeviceId, long key); +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java index 63875a4ca..ffa95323c 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,247 +13,247 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; - -import java.util.*; - -import org.jitsi.util.*; - -/** - * Implements the Java counterpart of an IMMNotificationClient instance - * statically allocated by the native counterpart of {@link WASAPI} and - * automatically registered with all IMMDeviceEnumerator instances. - * Invocations of methods on the IMMNotificationClient instance by - * Windows Audio Session API (WASAPI) are forwarded by WASAPI to the - * respective static methods of the MMNotificationClient class. - * - * @author Lyubomir Marinov - */ -public class MMNotificationClient -{ - /** - * The Logger used by the MMNotificationClient class to - * log debug information. - */ - private static final Logger logger - = Logger.getLogger(MMNotificationClient.class); - - /** - * The set of IMMNotificationClients to be notified when an audio - * endpoint device is added or removed, when the state or properties of an - * endpoint device change, or when there is a change in the default role - * assigned to an endpoint device. - */ - private static Collection pNotifySet; - - public static void OnDefaultDeviceChanged( - int flow, - int role, - String pwstrDefaultDevice) - { - // TODO Auto-generated method stub - } - - public static void OnDeviceAdded(String pwstrDeviceId) - { - Iterable pNotifySet; - - synchronized (MMNotificationClient.class) - { - pNotifySet = MMNotificationClient.pNotifySet; - } - - if (pNotifySet != null) - { - for (IMMNotificationClient pNotify : pNotifySet) - { - try - { - pNotify.OnDeviceAdded(pwstrDeviceId); - } - catch (Throwable t) - { - /* - * XXX The native counterpart of MMNotificationClient which - * normally invokes the method will eventually call - * ExceptionClear anyway. - */ - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - { - logger.error( - "An IMMNotificationClient failed to normally" - + " complete the handling of an" - + " OnDeviceAdded notification.", - t); - } - } - } - } - } - - public static void OnDeviceRemoved(String pwstrDeviceId) - { - Iterable pNotifySet; - - synchronized (MMNotificationClient.class) - { - pNotifySet = MMNotificationClient.pNotifySet; - } - - if (pNotifySet != null) - { - for (IMMNotificationClient pNotify : pNotifySet) - { - try - { - pNotify.OnDeviceRemoved(pwstrDeviceId); - } - catch (Throwable t) - { - /* - * XXX The native counterpart of MMNotificationClient which - * normally invokes the method will eventually call - * ExceptionClear anyway. - */ - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - { - logger.error( - "An IMMNotificationClient failed to normally" - + " complete the handling of an" - + " OnDeviceRemoved notification.", - t); - } - } - } - } - } - - public static void OnDeviceStateChanged( - String pwstrDeviceId, - int dwNewState) - { - Iterable pNotifySet; - - synchronized (MMNotificationClient.class) - { - pNotifySet = MMNotificationClient.pNotifySet; - } - - if (pNotifySet != null) - { - for (IMMNotificationClient pNotify : pNotifySet) - { - try - { - pNotify.OnDeviceStateChanged(pwstrDeviceId, dwNewState); - } - catch (Throwable t) - { - /* - * XXX The native counterpart of MMNotificationClient which - * normally invokes the method will eventually call - * ExceptionClear anyway. - */ - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - { - logger.error( - "An IMMNotificationClient failed to normally" - + " complete the handling of an" - + " OnDeviceStateChanged notification.", - t); - } - } - } - } - } - - public static void OnPropertyValueChanged(String pwstrDeviceId, long key) - { - // TODO Auto-generated method stub - } - - /** - * Registers a specific IMMNotificationClient to be notified when - * an audio endpoint device is added or removed, when the state or - * properties of an endpoint device change, or when there is a change in the - * default role assigned to an endpoint device. - * - * @param pNotify the IMMNotificationClient to register - */ - public static void RegisterEndpointNotificationCallback( - IMMNotificationClient pNotify) - { - if (pNotify == null) - throw new NullPointerException("pNotify"); - - synchronized (MMNotificationClient.class) - { - Collection newPNotifySet; - - if (pNotifySet == null) - newPNotifySet = new ArrayList(); - else if (pNotifySet.contains(pNotify)) - return; - else - { - newPNotifySet - = new ArrayList( - pNotifySet.size() + 1); - newPNotifySet.addAll(pNotifySet); - } - if (newPNotifySet.add(pNotify)) - pNotifySet = newPNotifySet; - } - } - - /** - * Deletes the registration of a specific IMMNotificationClient - * that the client registered in a previous call to - * {@link #RegisterEndpointNotificationCallback(IMMNotificationClient)}. - * - * @param pNotify the IMMNotificationClient to delete the - * registration of - */ - public static void UnregisterEndpointNotificationCallback( - IMMNotificationClient pNotify) - { - if (pNotify == null) - throw new NullPointerException("pNotify"); - - synchronized (MMNotificationClient.class) - { - /* - * XXX The implementation bellow is hardly optimal because it - * consecutively employs the contains and remove Collection methods - * each of which performs a linear search for one and the same - * element in effectively the same set of elements. Anyway, the - * unregistering of IMMNotificationClients will very occur much less - * often than notification deliveries. - */ - if ((pNotifySet != null) && pNotifySet.contains(pNotify)) - { - if (pNotifySet.size() == 1) - pNotifySet = null; - else - { - Collection newPNotifySet - = new ArrayList(pNotifySet); - - if (newPNotifySet.remove(pNotify)) - pNotifySet = newPNotifySet; - } - } - } - } - - /** - * Prevents the initialization of MMNotificationClient instances. - */ - private MMNotificationClient() {} -} +package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; + +import java.util.*; + +import org.jitsi.util.*; + +/** + * Implements the Java counterpart of an IMMNotificationClient instance + * statically allocated by the native counterpart of {@link WASAPI} and + * automatically registered with all IMMDeviceEnumerator instances. + * Invocations of methods on the IMMNotificationClient instance by + * Windows Audio Session API (WASAPI) are forwarded by WASAPI to the + * respective static methods of the MMNotificationClient class. + * + * @author Lyubomir Marinov + */ +public class MMNotificationClient +{ + /** + * The Logger used by the MMNotificationClient class to + * log debug information. + */ + private static final Logger logger + = Logger.getLogger(MMNotificationClient.class); + + /** + * The set of IMMNotificationClients to be notified when an audio + * endpoint device is added or removed, when the state or properties of an + * endpoint device change, or when there is a change in the default role + * assigned to an endpoint device. + */ + private static Collection pNotifySet; + + public static void OnDefaultDeviceChanged( + int flow, + int role, + String pwstrDefaultDevice) + { + // TODO Auto-generated method stub + } + + public static void OnDeviceAdded(String pwstrDeviceId) + { + Iterable pNotifySet; + + synchronized (MMNotificationClient.class) + { + pNotifySet = MMNotificationClient.pNotifySet; + } + + if (pNotifySet != null) + { + for (IMMNotificationClient pNotify : pNotifySet) + { + try + { + pNotify.OnDeviceAdded(pwstrDeviceId); + } + catch (Throwable t) + { + /* + * XXX The native counterpart of MMNotificationClient which + * normally invokes the method will eventually call + * ExceptionClear anyway. + */ + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + { + logger.error( + "An IMMNotificationClient failed to normally" + + " complete the handling of an" + + " OnDeviceAdded notification.", + t); + } + } + } + } + } + + public static void OnDeviceRemoved(String pwstrDeviceId) + { + Iterable pNotifySet; + + synchronized (MMNotificationClient.class) + { + pNotifySet = MMNotificationClient.pNotifySet; + } + + if (pNotifySet != null) + { + for (IMMNotificationClient pNotify : pNotifySet) + { + try + { + pNotify.OnDeviceRemoved(pwstrDeviceId); + } + catch (Throwable t) + { + /* + * XXX The native counterpart of MMNotificationClient which + * normally invokes the method will eventually call + * ExceptionClear anyway. + */ + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + { + logger.error( + "An IMMNotificationClient failed to normally" + + " complete the handling of an" + + " OnDeviceRemoved notification.", + t); + } + } + } + } + } + + public static void OnDeviceStateChanged( + String pwstrDeviceId, + int dwNewState) + { + Iterable pNotifySet; + + synchronized (MMNotificationClient.class) + { + pNotifySet = MMNotificationClient.pNotifySet; + } + + if (pNotifySet != null) + { + for (IMMNotificationClient pNotify : pNotifySet) + { + try + { + pNotify.OnDeviceStateChanged(pwstrDeviceId, dwNewState); + } + catch (Throwable t) + { + /* + * XXX The native counterpart of MMNotificationClient which + * normally invokes the method will eventually call + * ExceptionClear anyway. + */ + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + { + logger.error( + "An IMMNotificationClient failed to normally" + + " complete the handling of an" + + " OnDeviceStateChanged notification.", + t); + } + } + } + } + } + + public static void OnPropertyValueChanged(String pwstrDeviceId, long key) + { + // TODO Auto-generated method stub + } + + /** + * Registers a specific IMMNotificationClient to be notified when + * an audio endpoint device is added or removed, when the state or + * properties of an endpoint device change, or when there is a change in the + * default role assigned to an endpoint device. + * + * @param pNotify the IMMNotificationClient to register + */ + public static void RegisterEndpointNotificationCallback( + IMMNotificationClient pNotify) + { + if (pNotify == null) + throw new NullPointerException("pNotify"); + + synchronized (MMNotificationClient.class) + { + Collection newPNotifySet; + + if (pNotifySet == null) + newPNotifySet = new ArrayList(); + else if (pNotifySet.contains(pNotify)) + return; + else + { + newPNotifySet + = new ArrayList( + pNotifySet.size() + 1); + newPNotifySet.addAll(pNotifySet); + } + if (newPNotifySet.add(pNotify)) + pNotifySet = newPNotifySet; + } + } + + /** + * Deletes the registration of a specific IMMNotificationClient + * that the client registered in a previous call to + * {@link #RegisterEndpointNotificationCallback(IMMNotificationClient)}. + * + * @param pNotify the IMMNotificationClient to delete the + * registration of + */ + public static void UnregisterEndpointNotificationCallback( + IMMNotificationClient pNotify) + { + if (pNotify == null) + throw new NullPointerException("pNotify"); + + synchronized (MMNotificationClient.class) + { + /* + * XXX The implementation bellow is hardly optimal because it + * consecutively employs the contains and remove Collection methods + * each of which performs a linear search for one and the same + * element in effectively the same set of elements. Anyway, the + * unregistering of IMMNotificationClients will very occur much less + * often than notification deliveries. + */ + if ((pNotifySet != null) && pNotifySet.contains(pNotify)) + { + if (pNotifySet.size() == 1) + pNotifySet = null; + else + { + Collection newPNotifySet + = new ArrayList(pNotifySet); + + if (newPNotifySet.remove(pNotify)) + pNotifySet = newPNotifySet; + } + } + } + } + + /** + * Prevents the initialization of MMNotificationClient instances. + */ + private MMNotificationClient() {} +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java index be2851a66..3cbc6190e 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,430 +13,430 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; - -import org.jitsi.util.*; - -/** - * Defines the native interface to Windows Audio Session API (WASAPI) and - * related Core Audio APIs such as Multimedia Device (MMDevice) API as used by - * WASAPISystem and its associated CaptureDevice, - * DataSource and Renderer implementations. - * - * @author Lyubomir Marinov - */ -public class WASAPI -{ - public static final int AUDCLNT_E_NOT_STOPPED; - - public static final int AUDCLNT_SHAREMODE_SHARED = 0; - - public static final int AUDCLNT_STREAMFLAGS_EVENTCALLBACK = 0x00040000; - - public static final int AUDCLNT_STREAMFLAGS_LOOPBACK = 0x00020000; - - public static final int AUDCLNT_STREAMFLAGS_NOPERSIST = 0x00080000; - - public static final int CLSCTX_ALL - = /* CLSCTX_INPROC_SERVER */ 0x1 - | /* CLSCTX_INPROC_HANDLER */ 0x2 - | /* CLSCTX_LOCAL_SERVER */ 0x4 - | /* CLSCTX_REMOTE_SERVER */ 0x10; - - public static final String CLSID_MMDeviceEnumerator - = "{bcde0395-e52f-467c-8e3d-c4579291692e}"; - - public static final int COINIT_MULTITHREADED = 0x0; - - public static final int DEVICE_STATE_ACTIVE = 0x1; - - public static final int eAll = 2; - - public static final int eCapture = 1; - - public static final int eRender = 0; - - private static final int FACILIY_AUDCLNT = 0x889; - - public static final String IID_IAudioCaptureClient - = "{c8adbd64-e71e-48a0-a4de-185c395cd317}"; - - public static final String IID_IAudioClient - = "{1cb9ad4c-dbfa-4c32-b178-c2f568a703b2}"; - - public static final String IID_IAudioRenderClient - = "{f294acfc-3146-4483-a7bf-addca7c260e2}"; - - public static final String IID_IMMDeviceEnumerator - = "{a95664d2-9614-4f35-a746-de8db63617e6}"; - - public static final String IID_IMMEndpoint - = "{1be09788-6894-4089-8586-9a2a6c265ac5}"; - - public static final long PKEY_Device_FriendlyName; - - public static final int RPC_E_CHANGED_MODE = 0x80010106; - - public static final int S_FALSE = 1; - - public static final int S_OK = 0; - - private static final int SEVERITY_ERROR = 1; - - private static final int SEVERITY_SUCCESS = 0; - - public static final int STGM_READ = 0x0; - - /** - * The return value of {@link #WaitForSingleObject(long, long)} which - * indicates that the specified object is a mutex that was not released by - * the thread that owned the mutex before the owning thread terminated. - * Ownership of the mutex is granted to the calling thread and the mutex - * state is set to non-signaled. - */ - public static final int WAIT_ABANDONED = 0x00000080; - - /** - * The return value of {@link #WaitForSingleObject(long, long)} which - * indicates that the function has failed. Normally, the function will throw - * an {@link HResultException} in the case and - * {@link HResultException#getHResult()} will return WAIT_FAILED. - */ - public static final int WAIT_FAILED = 0xffffffff; - - /** - * The return value of {@link #WaitForSingleObject(long, long)} which - * indicates that the specified object is signaled. - */ - public static final int WAIT_OBJECT_0 = 0x00000000; - - /** - * The return value of {@link #WaitForSingleObject(long, long)} which - * indicates that the specified time-out interval has elapsed and the state - * of the specified object is non-signaled. - */ - public static final int WAIT_TIMEOUT = 0x00000102; - - public static final char WAVE_FORMAT_PCM = 1; - - static - { - JNIUtils.loadLibrary("jnwasapi", WASAPI.class.getClassLoader()); - - AUDCLNT_E_NOT_STOPPED - = MAKE_HRESULT(SEVERITY_ERROR, FACILIY_AUDCLNT, 5); - - /* - * XXX The pointer to native memory returned by PSPropertyKeyFromString - * is to be freed via CoTaskMemFree. - */ - String pszString = null; - - try - { - pszString = "{a45c254e-df1c-4efd-8020-67d146a850e0} 14"; - PKEY_Device_FriendlyName = PSPropertyKeyFromString(pszString); - if (PKEY_Device_FriendlyName == 0) - throw new IllegalStateException("PKEY_Device_FriendlyName"); - } - catch (HResultException hre) - { - Logger logger = Logger.getLogger(WASAPI.class); - - logger.error("PSPropertyKeyFromString(" + pszString + ")", hre); - throw new RuntimeException(hre); - } - } - - public static native void CloseHandle(long hObject) - throws HResultException; - - public static native String CoCreateGuid() - throws HResultException; - - public static native long CoCreateInstance( - String clsid, - long pUnkOuter, - int dwClsContext, - String iid) - throws HResultException; - - public static native int CoInitializeEx(long pvReserved, int dwCoInit) - throws HResultException; - - public static native void CoTaskMemFree(long pv); - - public static native void CoUninitialize(); - - public static native long CreateEvent( - long lpEventAttributes, - boolean bManualReset, - boolean bInitialState, - String lpName) - throws HResultException; - - /** - * Determines whether a specific HRESULT value indicates failure. - * - * @param hresult the HRESULT value to be checked whether it - * indicates failure - * @return true if the specified hresult indicates - * failure; otherwise, false - */ - public static boolean FAILED(int hresult) - { - return (hresult < 0); - } - - public static native int IAudioCaptureClient_GetNextPacketSize(long thiz) - throws HResultException; - - public static native int IAudioCaptureClient_Read( - long thiz, - byte[] data, int offset, int length, - int srcSampleSize, int srcChannels, - int dstSampleSize, int dstChannels) - throws HResultException; - - public static native void IAudioCaptureClient_Release(long thiz); - - public static native int IAudioClient_GetBufferSize(long thiz) - throws HResultException; - - public static native int IAudioClient_GetCurrentPadding(long thiz) - throws HResultException; - - public static native long IAudioClient_GetDefaultDevicePeriod(long thiz) - throws HResultException; - - public static native long IAudioClient_GetMinimumDevicePeriod(long thiz) - throws HResultException; - - public static native long IAudioClient_GetService(long thiz, String iid) - throws HResultException; - - public static native int IAudioClient_Initialize( - long thiz, - int shareMode, - int streamFlags, - long hnsBufferDuration, - long hnsPeriodicity, - long pFormat, - String audioSessionGuid) - throws HResultException; - - public static native long IAudioClient_IsFormatSupported( - long thiz, - int shareMode, - long pFormat) - throws HResultException; - - public static native void IAudioClient_Release(long thiz); - - public static native void IAudioClient_SetEventHandle( - long thiz, - long eventHandle) - throws HResultException; - - public static native int IAudioClient_Start(long thiz) - throws HResultException; - - public static native int IAudioClient_Stop(long thiz) - throws HResultException; - - public static native void IAudioRenderClient_Release(long thiz); - - /** - * Writes specific audio data into the rendering endpoint buffer of a - * specific IAudioRenderClient. If the sample sizes and/or the - * numbers of channels of the specified audio data and the - * specified rendering endpoint buffer differ, the method may be able to - * perform the necessary conversions. - * - * @param thiz the IAudioRenderClient which abstracts the rendering - * endpoint buffer into which the specified audio data is to be - * written - * @param data the bytes of the audio samples to be written into the - * specified rendering endpoint buffer - * @param offset the offset in bytes within data at which valid - * audio samples begin - * @param length the number of bytes of valid audio samples in data - * @param srcSampleSize the size in bytes of an audio sample in - * data - * @param srcChannels the number of channels of the audio signal provided - * in data - * @param dstSampleSize the size in bytes of an audio sample in the - * rendering endpoint buffer - * @param dstChannels the number of channels with which the rendering - * endpoint buffer has been initialized - * @return the number of bytes which have been read from data - * (beginning at offset, of course) and successfully written into - * the rendering endpoint buffer - * @throws HResultException if an HRESULT value indicating an error is - * returned by a function invoked by the method implementation or an I/O - * error is encountered during the execution of the method - */ - public static native int IAudioRenderClient_Write( - long thiz, - byte[] data, int offset, int length, - int srcSampleSize, int srcChannels, - int dstSampleSize, int dstChannels) - throws HResultException; - - public static native long IMMDevice_Activate( - long thiz, - String iid, - int dwClsCtx, - long pActivationParams) - throws HResultException; - - public static native String IMMDevice_GetId(long thiz) - throws HResultException; - - public static native int IMMDevice_GetState(long thiz) - throws HResultException; - - public static native long IMMDevice_OpenPropertyStore( - long thiz, - int stgmAccess) - throws HResultException; - - public static native long IMMDevice_QueryInterface(long thiz, String iid) - throws HResultException; - - public static native void IMMDevice_Release(long thiz); - - public static native int IMMDeviceCollection_GetCount(long thiz) - throws HResultException; - - public static native long IMMDeviceCollection_Item(long thiz, int nDevice) - throws HResultException; - - public static native void IMMDeviceCollection_Release(long thiz); - - public static native long IMMDeviceEnumerator_EnumAudioEndpoints( - long thiz, - int dataFlow, - int dwStateMask) - throws HResultException; - - public static native long IMMDeviceEnumerator_GetDevice( - long thiz, - String pwstrId) - throws HResultException; - - public static native void IMMDeviceEnumerator_Release(long thiz); - - public static native int IMMEndpoint_GetDataFlow(long thiz) - throws HResultException; - - public static native void IMMEndpoint_Release(long thiz); - - public static native String IPropertyStore_GetString(long thiz, long key) - throws HResultException; - - public static native void IPropertyStore_Release(long thiz); - - private static int MAKE_HRESULT(int sev, int fac, int code) - { - return ((sev & 0x1) << 31) | ((fac & 0x7fff) << 16) | (code & 0xffff); - } - - public static native long PSPropertyKeyFromString(String pszString) - throws HResultException; - - public static native void ResetEvent(long hEvent) - throws HResultException; - - /** - * Determines whether a specific HRESULT value indicates success. - * - * @param hresult the HRESULT value to be checked whether it - * indicates success - * @return true if the specified hresult indicates - * success; otherwise, false - */ - public static boolean SUCCEEDED(int hresult) - { - return (hresult >= 0); - } - - /** - * Waits until the specified object is in the signaled state or the - * specified time-out interval elapses. - * - * @param hHandle a HANDLE to the object to wait for - * @param dwMilliseconds the time-out interval in milliseconds to wait. If a - * nonzero value is specified, the function waits until the specified object - * is signaled or the specified time-out interval elapses. If - * dwMilliseconds is zero, the function does not enter a wait state - * if the specified object is not signaled; it always returns immediately. - * If dwMilliseconds is INFINITE, the function will return - * only when the specified object is signaled. - * @return one of the WAIT_XXX constant values defined by the - * WASAPI class to indicate the event that caused the function to - * return - * @throws HResultException if the return value is {@link #WAIT_FAILED} - */ - public static native int WaitForSingleObject( - long hHandle, - long dwMilliseconds) - throws HResultException; - - public static native long WAVEFORMATEX_alloc(); - - public static native void WAVEFORMATEX_fill( - long thiz, - char wFormatTag, - char nChannels, - int nSamplesPerSec, - int nAvgBytesPerSec, - char nBlockAlign, - char wBitsPerSample, - char cbSize); - - public static native char WAVEFORMATEX_getCbSize(long thiz); - - public static native int WAVEFORMATEX_getNAvgBytesPerSec(long thiz); - - public static native char WAVEFORMATEX_getNBlockAlign(long thiz); - - public static native char WAVEFORMATEX_getNChannels(long thiz); - - public static native int WAVEFORMATEX_getNSamplesPerSec(long thiz); - - public static native char WAVEFORMATEX_getWBitsPerSample(long thiz); - - public static native char WAVEFORMATEX_getWFormatTag(long thiz); - - public static native void WAVEFORMATEX_setCbSize(long thiz, char cbSize); - - public static native void WAVEFORMATEX_setNAvgBytesPerSec( - long thiz, - int nAvgBytesPerSec); - - public static native void WAVEFORMATEX_setNBlockAlign( - long thiz, - char nBlockAlign); - - public static native void WAVEFORMATEX_setNChannels( - long thiz, - char nChannels); - - public static native void WAVEFORMATEX_setNSamplesPerSec( - long thiz, - int nSamplesPerSec); - - public static native void WAVEFORMATEX_setWBitsPerSample( - long thiz, - char wBitsPerSample); - - public static native void WAVEFORMATEX_setWFormatTag( - long thiz, - char wFormatTag); - - public static native int WAVEFORMATEX_sizeof(); - - /** Prevents the initialization of WASAPI instances. */ - private WASAPI() {} -} +package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi; + +import org.jitsi.util.*; + +/** + * Defines the native interface to Windows Audio Session API (WASAPI) and + * related Core Audio APIs such as Multimedia Device (MMDevice) API as used by + * WASAPISystem and its associated CaptureDevice, + * DataSource and Renderer implementations. + * + * @author Lyubomir Marinov + */ +public class WASAPI +{ + public static final int AUDCLNT_E_NOT_STOPPED; + + public static final int AUDCLNT_SHAREMODE_SHARED = 0; + + public static final int AUDCLNT_STREAMFLAGS_EVENTCALLBACK = 0x00040000; + + public static final int AUDCLNT_STREAMFLAGS_LOOPBACK = 0x00020000; + + public static final int AUDCLNT_STREAMFLAGS_NOPERSIST = 0x00080000; + + public static final int CLSCTX_ALL + = /* CLSCTX_INPROC_SERVER */ 0x1 + | /* CLSCTX_INPROC_HANDLER */ 0x2 + | /* CLSCTX_LOCAL_SERVER */ 0x4 + | /* CLSCTX_REMOTE_SERVER */ 0x10; + + public static final String CLSID_MMDeviceEnumerator + = "{bcde0395-e52f-467c-8e3d-c4579291692e}"; + + public static final int COINIT_MULTITHREADED = 0x0; + + public static final int DEVICE_STATE_ACTIVE = 0x1; + + public static final int eAll = 2; + + public static final int eCapture = 1; + + public static final int eRender = 0; + + private static final int FACILIY_AUDCLNT = 0x889; + + public static final String IID_IAudioCaptureClient + = "{c8adbd64-e71e-48a0-a4de-185c395cd317}"; + + public static final String IID_IAudioClient + = "{1cb9ad4c-dbfa-4c32-b178-c2f568a703b2}"; + + public static final String IID_IAudioRenderClient + = "{f294acfc-3146-4483-a7bf-addca7c260e2}"; + + public static final String IID_IMMDeviceEnumerator + = "{a95664d2-9614-4f35-a746-de8db63617e6}"; + + public static final String IID_IMMEndpoint + = "{1be09788-6894-4089-8586-9a2a6c265ac5}"; + + public static final long PKEY_Device_FriendlyName; + + public static final int RPC_E_CHANGED_MODE = 0x80010106; + + public static final int S_FALSE = 1; + + public static final int S_OK = 0; + + private static final int SEVERITY_ERROR = 1; + + private static final int SEVERITY_SUCCESS = 0; + + public static final int STGM_READ = 0x0; + + /** + * The return value of {@link #WaitForSingleObject(long, long)} which + * indicates that the specified object is a mutex that was not released by + * the thread that owned the mutex before the owning thread terminated. + * Ownership of the mutex is granted to the calling thread and the mutex + * state is set to non-signaled. + */ + public static final int WAIT_ABANDONED = 0x00000080; + + /** + * The return value of {@link #WaitForSingleObject(long, long)} which + * indicates that the function has failed. Normally, the function will throw + * an {@link HResultException} in the case and + * {@link HResultException#getHResult()} will return WAIT_FAILED. + */ + public static final int WAIT_FAILED = 0xffffffff; + + /** + * The return value of {@link #WaitForSingleObject(long, long)} which + * indicates that the specified object is signaled. + */ + public static final int WAIT_OBJECT_0 = 0x00000000; + + /** + * The return value of {@link #WaitForSingleObject(long, long)} which + * indicates that the specified time-out interval has elapsed and the state + * of the specified object is non-signaled. + */ + public static final int WAIT_TIMEOUT = 0x00000102; + + public static final char WAVE_FORMAT_PCM = 1; + + static + { + JNIUtils.loadLibrary("jnwasapi", WASAPI.class.getClassLoader()); + + AUDCLNT_E_NOT_STOPPED + = MAKE_HRESULT(SEVERITY_ERROR, FACILIY_AUDCLNT, 5); + + /* + * XXX The pointer to native memory returned by PSPropertyKeyFromString + * is to be freed via CoTaskMemFree. + */ + String pszString = null; + + try + { + pszString = "{a45c254e-df1c-4efd-8020-67d146a850e0} 14"; + PKEY_Device_FriendlyName = PSPropertyKeyFromString(pszString); + if (PKEY_Device_FriendlyName == 0) + throw new IllegalStateException("PKEY_Device_FriendlyName"); + } + catch (HResultException hre) + { + Logger logger = Logger.getLogger(WASAPI.class); + + logger.error("PSPropertyKeyFromString(" + pszString + ")", hre); + throw new RuntimeException(hre); + } + } + + public static native void CloseHandle(long hObject) + throws HResultException; + + public static native String CoCreateGuid() + throws HResultException; + + public static native long CoCreateInstance( + String clsid, + long pUnkOuter, + int dwClsContext, + String iid) + throws HResultException; + + public static native int CoInitializeEx(long pvReserved, int dwCoInit) + throws HResultException; + + public static native void CoTaskMemFree(long pv); + + public static native void CoUninitialize(); + + public static native long CreateEvent( + long lpEventAttributes, + boolean bManualReset, + boolean bInitialState, + String lpName) + throws HResultException; + + /** + * Determines whether a specific HRESULT value indicates failure. + * + * @param hresult the HRESULT value to be checked whether it + * indicates failure + * @return true if the specified hresult indicates + * failure; otherwise, false + */ + public static boolean FAILED(int hresult) + { + return (hresult < 0); + } + + public static native int IAudioCaptureClient_GetNextPacketSize(long thiz) + throws HResultException; + + public static native int IAudioCaptureClient_Read( + long thiz, + byte[] data, int offset, int length, + int srcSampleSize, int srcChannels, + int dstSampleSize, int dstChannels) + throws HResultException; + + public static native void IAudioCaptureClient_Release(long thiz); + + public static native int IAudioClient_GetBufferSize(long thiz) + throws HResultException; + + public static native int IAudioClient_GetCurrentPadding(long thiz) + throws HResultException; + + public static native long IAudioClient_GetDefaultDevicePeriod(long thiz) + throws HResultException; + + public static native long IAudioClient_GetMinimumDevicePeriod(long thiz) + throws HResultException; + + public static native long IAudioClient_GetService(long thiz, String iid) + throws HResultException; + + public static native int IAudioClient_Initialize( + long thiz, + int shareMode, + int streamFlags, + long hnsBufferDuration, + long hnsPeriodicity, + long pFormat, + String audioSessionGuid) + throws HResultException; + + public static native long IAudioClient_IsFormatSupported( + long thiz, + int shareMode, + long pFormat) + throws HResultException; + + public static native void IAudioClient_Release(long thiz); + + public static native void IAudioClient_SetEventHandle( + long thiz, + long eventHandle) + throws HResultException; + + public static native int IAudioClient_Start(long thiz) + throws HResultException; + + public static native int IAudioClient_Stop(long thiz) + throws HResultException; + + public static native void IAudioRenderClient_Release(long thiz); + + /** + * Writes specific audio data into the rendering endpoint buffer of a + * specific IAudioRenderClient. If the sample sizes and/or the + * numbers of channels of the specified audio data and the + * specified rendering endpoint buffer differ, the method may be able to + * perform the necessary conversions. + * + * @param thiz the IAudioRenderClient which abstracts the rendering + * endpoint buffer into which the specified audio data is to be + * written + * @param data the bytes of the audio samples to be written into the + * specified rendering endpoint buffer + * @param offset the offset in bytes within data at which valid + * audio samples begin + * @param length the number of bytes of valid audio samples in data + * @param srcSampleSize the size in bytes of an audio sample in + * data + * @param srcChannels the number of channels of the audio signal provided + * in data + * @param dstSampleSize the size in bytes of an audio sample in the + * rendering endpoint buffer + * @param dstChannels the number of channels with which the rendering + * endpoint buffer has been initialized + * @return the number of bytes which have been read from data + * (beginning at offset, of course) and successfully written into + * the rendering endpoint buffer + * @throws HResultException if an HRESULT value indicating an error is + * returned by a function invoked by the method implementation or an I/O + * error is encountered during the execution of the method + */ + public static native int IAudioRenderClient_Write( + long thiz, + byte[] data, int offset, int length, + int srcSampleSize, int srcChannels, + int dstSampleSize, int dstChannels) + throws HResultException; + + public static native long IMMDevice_Activate( + long thiz, + String iid, + int dwClsCtx, + long pActivationParams) + throws HResultException; + + public static native String IMMDevice_GetId(long thiz) + throws HResultException; + + public static native int IMMDevice_GetState(long thiz) + throws HResultException; + + public static native long IMMDevice_OpenPropertyStore( + long thiz, + int stgmAccess) + throws HResultException; + + public static native long IMMDevice_QueryInterface(long thiz, String iid) + throws HResultException; + + public static native void IMMDevice_Release(long thiz); + + public static native int IMMDeviceCollection_GetCount(long thiz) + throws HResultException; + + public static native long IMMDeviceCollection_Item(long thiz, int nDevice) + throws HResultException; + + public static native void IMMDeviceCollection_Release(long thiz); + + public static native long IMMDeviceEnumerator_EnumAudioEndpoints( + long thiz, + int dataFlow, + int dwStateMask) + throws HResultException; + + public static native long IMMDeviceEnumerator_GetDevice( + long thiz, + String pwstrId) + throws HResultException; + + public static native void IMMDeviceEnumerator_Release(long thiz); + + public static native int IMMEndpoint_GetDataFlow(long thiz) + throws HResultException; + + public static native void IMMEndpoint_Release(long thiz); + + public static native String IPropertyStore_GetString(long thiz, long key) + throws HResultException; + + public static native void IPropertyStore_Release(long thiz); + + private static int MAKE_HRESULT(int sev, int fac, int code) + { + return ((sev & 0x1) << 31) | ((fac & 0x7fff) << 16) | (code & 0xffff); + } + + public static native long PSPropertyKeyFromString(String pszString) + throws HResultException; + + public static native void ResetEvent(long hEvent) + throws HResultException; + + /** + * Determines whether a specific HRESULT value indicates success. + * + * @param hresult the HRESULT value to be checked whether it + * indicates success + * @return true if the specified hresult indicates + * success; otherwise, false + */ + public static boolean SUCCEEDED(int hresult) + { + return (hresult >= 0); + } + + /** + * Waits until the specified object is in the signaled state or the + * specified time-out interval elapses. + * + * @param hHandle a HANDLE to the object to wait for + * @param dwMilliseconds the time-out interval in milliseconds to wait. If a + * nonzero value is specified, the function waits until the specified object + * is signaled or the specified time-out interval elapses. If + * dwMilliseconds is zero, the function does not enter a wait state + * if the specified object is not signaled; it always returns immediately. + * If dwMilliseconds is INFINITE, the function will return + * only when the specified object is signaled. + * @return one of the WAIT_XXX constant values defined by the + * WASAPI class to indicate the event that caused the function to + * return + * @throws HResultException if the return value is {@link #WAIT_FAILED} + */ + public static native int WaitForSingleObject( + long hHandle, + long dwMilliseconds) + throws HResultException; + + public static native long WAVEFORMATEX_alloc(); + + public static native void WAVEFORMATEX_fill( + long thiz, + char wFormatTag, + char nChannels, + int nSamplesPerSec, + int nAvgBytesPerSec, + char nBlockAlign, + char wBitsPerSample, + char cbSize); + + public static native char WAVEFORMATEX_getCbSize(long thiz); + + public static native int WAVEFORMATEX_getNAvgBytesPerSec(long thiz); + + public static native char WAVEFORMATEX_getNBlockAlign(long thiz); + + public static native char WAVEFORMATEX_getNChannels(long thiz); + + public static native int WAVEFORMATEX_getNSamplesPerSec(long thiz); + + public static native char WAVEFORMATEX_getWBitsPerSample(long thiz); + + public static native char WAVEFORMATEX_getWFormatTag(long thiz); + + public static native void WAVEFORMATEX_setCbSize(long thiz, char cbSize); + + public static native void WAVEFORMATEX_setNAvgBytesPerSec( + long thiz, + int nAvgBytesPerSec); + + public static native void WAVEFORMATEX_setNBlockAlign( + long thiz, + char nBlockAlign); + + public static native void WAVEFORMATEX_setNChannels( + long thiz, + char nChannels); + + public static native void WAVEFORMATEX_setNSamplesPerSec( + long thiz, + int nSamplesPerSec); + + public static native void WAVEFORMATEX_setWBitsPerSample( + long thiz, + char wBitsPerSample); + + public static native void WAVEFORMATEX_setWFormatTag( + long thiz, + char wFormatTag); + + public static native int WAVEFORMATEX_sizeof(); + + /** Prevents the initialization of WASAPI instances. */ + private WASAPI() {} +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java index e49b80aac..940eb5553 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,1942 +13,1942 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.renderer.audio; - -import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*; - -import java.beans.*; -import java.lang.reflect.*; -import java.util.*; -import java.util.concurrent.*; - -import javax.media.*; -import javax.media.format.*; - -import org.jitsi.impl.neomedia.*; -import org.jitsi.impl.neomedia.control.*; -import org.jitsi.impl.neomedia.device.*; -import org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.*; -import org.jitsi.service.neomedia.*; -import org.jitsi.service.neomedia.codec.*; -import org.jitsi.util.*; - -/** - * Implements an audio Renderer using Windows Audio Session API - * (WASAPI) and related Core Audio APIs such as Multimedia Device (MMDevice) - * API. - * - * @author Lyubomir Marinov - */ -public class WASAPIRenderer - extends AbstractAudioRenderer -{ - /** - * The Logger used by the WASAPIRenderer class and its - * instances to log debug information. - */ - private static final Logger logger = Logger.getLogger(WASAPIRenderer.class); - - /** - * The human-readable name of the WASAPIRenderer PlugIn - * implementation instances. - */ - private static final String PLUGIN_NAME - = "Windows Audio Session API (WASAPI) Renderer"; - - /** - * Finds the first non-null element in a specific array of - * AudioFormats. - * - * @param formats the array of AudioFormats in which the first - * non-null element is to be found - * @return the first non-null element in formats if any; - * otherwise, null - */ - private static AudioFormat findFirst(AudioFormat[] formats) - { - AudioFormat format = null; - - for (AudioFormat aFormat : formats) - { - if (aFormat != null) - { - format = aFormat; - break; - } - } - return format; - } - - /** - * Attempts to initialize and open a new Codec to resample media - * data from a specific input AudioFormat into a specific output - * AudioFormat. If no suitable resampler is found, returns - * null. If a suitable resampler is found but its initialization or - * opening fails, logs and swallows any Throwable and returns - * null. - * - * @param inFormat the AudioFormat in which the new instance is to - * input media data - * @param outFormat the AudioFormat in which the new instance is to - * output media data - * @return a new Codec which is able to resample media data from - * the specified inFormat into the specified outFormat if - * such a resampler could be found, initialized and opened; otherwise, - * null - */ - public static Codec maybeOpenResampler( - AudioFormat inFormat, - AudioFormat outFormat) - { - @SuppressWarnings("unchecked") - List classNames - = PlugInManager.getPlugInList( - inFormat, - outFormat, - PlugInManager.CODEC); - Codec resampler = null; - - if (classNames != null) - { - for (String className : classNames) - { - try - { - Codec codec - = (Codec) Class.forName(className).newInstance(); - Format setInput = codec.setInputFormat(inFormat); - - if ((setInput != null) && inFormat.matches(setInput)) - { - Format setOutput = codec.setOutputFormat(outFormat); - - if ((setOutput != null) && outFormat.matches(setOutput)) - { - codec.open(); - resampler = codec; - break; - } - } - } - catch (Throwable t) - { - if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - { - logger.warn( - "Failed to open resampler " + className, - t); - } - } - } - } - return resampler; - } - - /** - * Pops a specific number of bytes from (the head of) a specific array of - * bytes. - * - * @param array the array of byte from which the specified number - * of bytes are to be popped - * @param arrayLength the number of elements in array which contain - * valid data - * @param length the number of bytes to be popped from array - * @return the number of elements in array which contain valid data - * after the specified number of bytes have been popped from it - */ - public static int pop(byte[] array, int arrayLength, int length) - { - if (length < 0) - throw new IllegalArgumentException("length"); - if (length == 0) - return arrayLength; - - int newArrayLength = arrayLength - length; - - if (newArrayLength > 0) - { - for (int i = 0, j = length; i < newArrayLength; i++, j++) - array[i] = array[j]; - } - else - newArrayLength = 0; - return newArrayLength; - } - - /** - * The duration in milliseconds of the endpoint buffer. - */ - private long bufferDuration; - - /** - * The indicator which determines whether the audio stream represented by - * this instance, {@link #iAudioClient} and {@link #iAudioRenderClient} is - * busy and, consequently, its state should not be modified. For example, - * the audio stream is busy during the execution of - * {@link #process(Buffer)}. - */ - private boolean busy; - - /** - * The length in milliseconds of the interval between successive, periodic - * processing passes by the audio engine on the data in the endpoint buffer. - */ - private long devicePeriod = WASAPISystem.DEFAULT_DEVICE_PERIOD; - - /** - * The value of {@link #devicePeriod} expressed in terms of numbers of - * frames (i.e. takes the sample rate into account). - */ - private int devicePeriodInFrames; - - /** - * The number of channels with which {@link #iAudioClient} has been - * initialized. - */ - private int dstChannels; - - /** - * The AudioFormat with which {@link #iAudioClient} has been - * initialized. - */ - private AudioFormat dstFormat; - - /** - * The sample size in bytes with which {@link #iAudioClient} has been - * initialized. - */ - private int dstSampleSize; - - /** - * The event handle that the system signals when an audio buffer is ready to - * be processed by the client. - */ - private long eventHandle; - - /** - * The Runnable which is scheduled by this WASAPIRenderer - * and executed by {@link #eventHandleExecutor} and waits for - * {@link #eventHandle} to be signaled. - */ - private Runnable eventHandleCmd; - - /** - * The Executor implementation which is to execute - * {@link #eventHandleCmd}. - */ - private Executor eventHandleExecutor; - - /** - * The WASAPI IAudioClient instance which enables this - * Renderer to create and initialize an audio stream between this - * Renderer and the audio engine of the associated audio endpoint - * device. - */ - private long iAudioClient; - - /** - * The WASAPI IAudioRenderClient obtained from - * {@link #iAudioClient} which enables this Renderer to write - * output data to the rendering endpoint buffer. - */ - private long iAudioRenderClient; - - /** - * The indicator which determines whether the value of the locator - * property of this instance was equal to null when this Renderer - * was opened. Indicates that this Renderer should successfully - * process media data without actually rendering to any render endpoint - * device. - */ - private boolean locatorIsNull; - - /** - * The maximum capacity in frames of the endpoint buffer. - */ - private int numBufferFrames; - - /** - * The Codec which resamples the media provided to this - * Renderer via {@link #process(Buffer)} into {@link #dstFormat} - * if necessary. - */ - private Codec resampler; - - /** - * The number of channels of the audio signal output by {@link #resampler}. - * It may differ from {@link #dstChannels}. - */ - private int resamplerChannels; - - /** - * The data which has remained unwritten during earlier invocations of - * {@link #runInEventHandleCmd(Runnable)} because it represents frames which - * are few enough to be accepted on their own for writing by - * {@link #iAudioRenderClient}. - */ - private byte[] resamplerData; - - /** - * The size in bytes of an audio frame produced by {@link #resampler}. Based - * on {@link #resamplerChannels} and {@link #resamplerSampleSize} and cached - * in order to reduce calculations. - */ - private int resamplerFrameSize; - - /** - * The Buffer which provides the input to {@link #resampler}. - * Represents a unit of {@link #srcBuffer} to be processed in a single call - * to resampler. - */ - private Buffer resamplerInBuffer; - - /** - * The Buffer which receives the output of {@link #resampler}. - */ - private Buffer resamplerOutBuffer; - - /** - * The size in bytes of an audio sample produced by {@link #resampler}. - */ - private int resamplerSampleSize; - - /** - * The data which has remained unwritten during earlier invocations of - * {@link #process(Buffer)} because it represents frames which are few - * enough to be accepted on their own for writing by - * {@link #iAudioRenderClient}. - */ - private byte[] srcBuffer; - - /** - * The number of bytes in {@link #srcBuffer} which represent valid audio - * data to be written by {@link #iAudioRenderClient}. - */ - private int srcBufferLength; - - /** - * The number of channels which which this Renderer has been - * opened. - */ - private int srcChannels; - - /** - * The AudioFormat with which this Renderer has been - * opened. - */ - private AudioFormat srcFormat; - - /** - * The frame size in bytes with which this Renderer has been - * opened. It is the product of {@link #srcSampleSize} and - * {@link #srcChannels}. - */ - private int srcFrameSize; - - /** - * The sample size in bytes with which this Renderer has been - * opened. - */ - private int srcSampleSize; - - /** - * The indicator which determines whether this Renderer is started - * i.e. there has been a successful invocation of {@link #start()} without - * an intervening invocation of {@link #stop()}. - */ - private boolean started; - - /** - * The time in milliseconds at which the writing to the render endpoint - * buffer has started malfunctioning. For example, {@link #srcBuffer} being - * full from the point of view of {@link #process(Buffer)} for an extended - * period of time may indicate abnormal functioning. - */ - private long writeIsMalfunctioningSince = DiagnosticsControl.NEVER; - - /** - * The maximum interval of time in milliseconds that the writing to the - * render endpoint buffer is allowed to be under suspicion that it is - * malfunctioning. If it remains under suspicion after the maximum interval - * of time has elapsed, the writing to the render endpoint buffer is to be - * considered malfunctioning for real. - */ - private long writeIsMalfunctioningTimeout; - - /** - * Initializes a new WASAPIRenderer instance which is to perform - * playback (as opposed to sound a notification). - */ - public WASAPIRenderer() - { - this(AudioSystem.DataFlow.PLAYBACK); - } - - /** - * Initializes a new WASAPIRenderer instance which is to either - * perform playback or sound a notification. - * - * @param dataFlow {@link AudioSystem.DataFlow#PLAYBACK} if the new instance - * is to perform playback or {@link AudioSystem.DataFlow#NOTIFY} if the new - * instance is to sound a notification - */ - public WASAPIRenderer(AudioSystem.DataFlow dataFlow) - { - super(AudioSystem.LOCATOR_PROTOCOL_WASAPI, dataFlow); - } - - /** - * Initializes a new WASAPIRenderer instance which is to either - * perform playback or sound a notification. - * - * @param playback true if the new instance is to perform playback - * or false if the new instance is to sound a notification - */ - public WASAPIRenderer(boolean playback) - { - this( - playback - ? AudioSystem.DataFlow.PLAYBACK - : AudioSystem.DataFlow.NOTIFY); - } - - /** - * {@inheritDoc} - */ - @Override - public synchronized void close() - { - try - { - stop(); - } - finally - { - if (iAudioRenderClient != 0) - { - IAudioRenderClient_Release(iAudioRenderClient); - iAudioRenderClient = 0; - } - if (iAudioClient != 0) - { - IAudioClient_Release(iAudioClient); - iAudioClient = 0; - } - if (eventHandle != 0) - { - try - { - CloseHandle(eventHandle); - } - catch (HResultException hre) - { - // The event HANDLE will be leaked. - logger.warn("Failed to close event HANDLE.", hre); - } - eventHandle = 0; - } - maybeCloseResampler(); - - dstFormat = null; - locatorIsNull = false; - srcBuffer = null; - srcBufferLength = 0; - srcFormat = null; - started = false; - - super.close(); - } - } - - /** - * Gets an array of alternative AudioFormats based on - * inputFormat with which an attempt is to be made to initialize a - * new IAudioClient instance. - * - * @return an array of alternative AudioFormats based on - * inputFormat with which an attempt is to be made to initialize a - * new IAudioClient instance - */ - private AudioFormat[] getFormatsToInitializeIAudioClient() - { - AudioFormat inputFormat = this.inputFormat; - - if (inputFormat == null) - throw new NullPointerException("No inputFormat set."); - else - { - /* - * Prefer to initialize the IAudioClient with an AudioFormat which - * matches the inputFormat as closely as possible. - */ - AudioFormat[] preferredFormats - = WASAPISystem.getFormatsToInitializeIAudioClient(inputFormat); - // Otherwise, any supported Format will do. - Format[] supportedFormats = getSupportedInputFormats(); - List formats - = new ArrayList( - preferredFormats.length + supportedFormats.length); - - for (AudioFormat format : preferredFormats) - { - if (!formats.contains(format)) - formats.add(format); - } - for (Format format : supportedFormats) - { - if (!formats.contains(format) - && (format instanceof AudioFormat)) - { - formats.add((AudioFormat) format); - } - } - - /* - * Resampling isn't very cool. Moreover, resampling between sample - * rates with a non-integer quotient may result in audio glitches. - * Try to minimize the risks of having to use any of these two when - * unnecessary. - */ - final int sampleRate = (int) inputFormat.getSampleRate(); - - if (sampleRate != Format.NOT_SPECIFIED) - { - Collections.sort( - formats, - new Comparator() - { - @Override - public int compare(AudioFormat af1, AudioFormat af2) - { - int d1 = computeSampleRateDistance(af1); - int d2 = computeSampleRateDistance(af2); - - return (d1 < d2) ? -1 : (d1 == d2) ? 0 : 1; - } - - private int computeSampleRateDistance( - AudioFormat af) - { - int sr = (int) af.getSampleRate(); - - if (sr == Format.NOT_SPECIFIED) - return Integer.MAX_VALUE; - else if (sr == sampleRate) - return 0; - - int min, max; - boolean downsample; - - if (sr < sampleRate) - { - min = sr; - max = sampleRate; - downsample = true; - } - else - { - min = sampleRate; - max = sr; - downsample = false; - } - if (min == 0) - return Integer.MAX_VALUE; - else - { - int h = max % min; - int l = max / min; - - /* - * Prefer AudioFormats which will cause - * upsampling to AudioFormats which will - * cause downsampling. - */ - if (downsample) - { - l = Short.MAX_VALUE - l; - if (h != 0) - h = Short.MAX_VALUE - h; - } - - return (h << 16) | l; - } - } - }); - } - - return formats.toArray(new AudioFormat[formats.size()]); - } - } - - /** - * {@inheritDoc} - */ - public String getName() - { - return PLUGIN_NAME; - } - - /** - * {@inheritDoc} - * - * Overrides the super implementation to handle the case in which the user - * has selected "none" for the playback/notify device. - */ - @Override - public Format[] getSupportedInputFormats() - { - if (getLocator() == null) - { - /* - * XXX We toyed with the idea of calculating a list of common - * Formats supported by all devices (of the dataFlow of this - * AbstractAudioRenderer, of course) but that turned out to be - * monstrous in code, inefficient at least in terms of garbage - * collection and with questionable suitability. The following - * approach will likely have a comparable suitability with better - * efficiency achieved code that is easier to understand. - */ - - /* - * The maximums supported by the WASAPI integration at the time of - * this writing. - */ - double sampleRate = MediaUtils.MAX_AUDIO_SAMPLE_RATE; - int sampleSizeInBits = 16; - int channels = 2; - - if ((sampleRate == Format.NOT_SPECIFIED) - && (Constants.AUDIO_SAMPLE_RATES.length != 0)) - sampleRate = Constants.AUDIO_SAMPLE_RATES[0]; - return - WASAPISystem.getFormatsToInitializeIAudioClient( - new AudioFormat( - AudioFormat.LINEAR, - sampleRate, - sampleSizeInBits, - channels, - AudioFormat.LITTLE_ENDIAN, - AudioFormat.SIGNED, - /* frameSizeInBits */ Format.NOT_SPECIFIED, - /* frameRate */ Format.NOT_SPECIFIED, - Format.byteArray)); - } - else - return super.getSupportedInputFormats(); - } - - /** - * Closes {@link #resampler} if it is non-null. - */ - private void maybeCloseResampler() - { - Codec resampler = this.resampler; - - if (resampler != null) - { - this.resampler = null; - resamplerData = null; - resamplerInBuffer = null; - resamplerOutBuffer = null; - - try - { - resampler.close(); - } - catch (Throwable t) - { - if (t instanceof InterruptedException) - Thread.currentThread().interrupt(); - else if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - logger.error("Failed to close resampler.", t); - } - } - } - - /** - * Invokes WASAPI.IAudioRenderClient_Write on - * {@link #iAudioRenderClient} and logs and swallows any - * HResultException. - * - * @param data the bytes of the audio samples to be written into the render - * endpoint buffer - * @param offset the offset in data at which the bytes of the audio - * samples to be written into the render endpoint buffer begin - * @param length the number of the bytes in data beginning at - * offset of the audio samples to be written into the render - * endpoint buffer - * @param srcSampleSize the size in bytes of an audio sample in - * data - * @param srcChannels the number of channels of the audio signal provided in - * data - * @return the number of bytes from data (starting at - * offset) which have been written into the render endpoint buffer - * or 0 upon HResultException - */ - private int maybeIAudioRenderClientWrite( - byte[] data, int offset, int length, - int srcSampleSize, int srcChannels) - { - int written; - - try - { - written - = IAudioRenderClient_Write( - iAudioRenderClient, - data, offset, length, - srcSampleSize, srcChannels, - dstSampleSize, dstChannels); - } - catch (HResultException hre) - { - written = 0; - logger.error("IAudioRenderClient_Write", hre); - } - return written; - } - - /** - * Initializes and opens a new instance of {@link #resampler} if the - * Format-related state of this instance deems its existence - * necessary. - */ - private void maybeOpenResampler() - { - AudioFormat inFormat = this.inputFormat; - AudioFormat outFormat = this.dstFormat; - - // We are able to translate between mono and stereo. - if ((inFormat.getSampleRate() == outFormat.getSampleRate()) - && (inFormat.getSampleSizeInBits() - == outFormat.getSampleSizeInBits())) - return; - - // The resamplers are not expected to convert between mono and stereo. - int channels = inFormat.getChannels(); - - if (outFormat.getChannels() != channels) - { - outFormat - = new AudioFormat( - outFormat.getEncoding(), - outFormat.getSampleRate(), - outFormat.getSampleSizeInBits(), - channels, - outFormat.getEndian(), - outFormat.getSigned(), - /* frameSizeInBits */ Format.NOT_SPECIFIED, - /* frameRate */ Format.NOT_SPECIFIED, - outFormat.getDataType()); - } - - Codec resampler = maybeOpenResampler(inFormat, outFormat); - - if (resampler == null) - { - throw new IllegalStateException( - "Failed to open a codec to resample [" + inFormat - + "] into [" + outFormat + "]."); - } - else - { - this.resampler = resampler; - - resamplerInBuffer = new Buffer(); - resamplerInBuffer.setFormat(inFormat); - - resamplerChannels = outFormat.getChannels(); - resamplerSampleSize = WASAPISystem.getSampleSizeInBytes(outFormat); - resamplerFrameSize = resamplerChannels * resamplerSampleSize; - - resamplerData = new byte[numBufferFrames * resamplerFrameSize]; - - resamplerOutBuffer = new Buffer(); - resamplerOutBuffer.setData(resamplerData); - resamplerOutBuffer.setLength(0); - resamplerOutBuffer.setOffset(0); - } - } - - /** - * Processes audio samples from {@link #srcBuffer} through - * {@link #resampler} i.e. resamples them in order to produce media data - * in {@link #resamplerData} to be written into the render endpoint buffer. - * - * @param numFramesRequested the number of audio frames in the units of - * {@link #dstFormat} requested by the rendering endpoint - */ - private void maybeResample(int numFramesRequested) - { - int outLength = resamplerOutBuffer.getLength(); - - /* - * Do not resample if there is enough resampled audio to satisfy the - * request of the rendering endpoint buffer. - */ - if (outLength < numFramesRequested * resamplerFrameSize) - { - // Sample rate conversions work on audio frames, not on bytes. - int outFrames - = (resamplerData.length - outLength) / resamplerFrameSize; - - if (outFrames > 0) - { - /* - * Predict how many bytes will be consumed from the input during - * the sample rate conversion. - */ - int srcSampleRate = (int) srcFormat.getSampleRate(); - int dstSampleRate = (int) dstFormat.getSampleRate(); - int inLength - = (outFrames * srcSampleRate / dstSampleRate) - * srcFrameSize; - - if (inLength > srcBuffer.length) - inLength = srcBuffer.length; - if (inLength > srcBufferLength) - inLength = srcBufferLength; - if (inLength > 0) - { - int resampled; - - resamplerOutBuffer.setLength(0); - resamplerOutBuffer.setOffset(outLength); - try - { - resamplerOutBuffer.setDiscard(false); - resamplerInBuffer.setLength(inLength); - resamplerInBuffer.setOffset(0); - - resampler.process( - resamplerInBuffer, - resamplerOutBuffer); - } - finally - { - resampled = resamplerOutBuffer.getLength(); - outLength = resamplerOutBuffer.getOffset() + resampled; - resamplerOutBuffer.setLength(outLength); - resamplerOutBuffer.setOffset(0); - } - - if (resampled > 0) - { - /* - * How many bytes have actually been consumed from the - * input during the sample rate conversion? - */ - resampled - = ((resampled / resamplerFrameSize) - * srcSampleRate - / dstSampleRate) - * srcFrameSize; - if (resampled > 0) - popFromSrcBuffer(resampled); - } - } - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public synchronized void open() - throws ResourceUnavailableException - { - if (this.iAudioClient != 0) - return; - - MediaLocator locator = null; - - try - { - locator = getLocator(); - if (locatorIsNull = (locator == null)) - { - /* - * We actually want to allow the user to switch the playback - * and/or notify device to none mid-stream in order to disable - * the playback. - */ - } - else - { - - /* - * The method getFormatsToInitializeIAudioClient will assert that - * inputFormat is set. - */ - AudioFormat[] formats = getFormatsToInitializeIAudioClient(); - long eventHandle = CreateEvent(0, false, false, null); - - try - { - long iAudioClient - = audioSystem.initializeIAudioClient( - locator, - dataFlow, - /* streamFlags */ 0, - eventHandle, - WASAPISystem.DEFAULT_BUFFER_DURATION, - formats); - - if (iAudioClient == 0) - { - throw new ResourceUnavailableException( - "Failed to initialize IAudioClient" - + " for MediaLocator " + locator - + " and AudioSystem.DataFlow " + dataFlow); - } - try - { - long iAudioRenderClient - = IAudioClient_GetService( - iAudioClient, - IID_IAudioRenderClient); - - if (iAudioRenderClient == 0) - { - throw new ResourceUnavailableException( - "IAudioClient_GetService" - + "(IID_IAudioRenderClient)"); - } - try - { - srcFormat = this.inputFormat; - dstFormat = findFirst(formats); - - /* - * The value hnsDefaultDevicePeriod is documented to - * specify the default scheduling period for a - * shared-mode stream. - */ - devicePeriod - = IAudioClient_GetDefaultDevicePeriod(iAudioClient) - / 10000L; - numBufferFrames - = IAudioClient_GetBufferSize(iAudioClient); - - int dstSampleRate = (int) dstFormat.getSampleRate(); - - bufferDuration - = numBufferFrames * 1000L / dstSampleRate; - /* - * We will very likely be inefficient if we fail to - * synchronize with the scheduling period of the audio - * engine but we have to make do with what we have. - */ - if (devicePeriod <= 1) - { - devicePeriod = bufferDuration / 2; - if ((devicePeriod - > WASAPISystem.DEFAULT_DEVICE_PERIOD) - || (devicePeriod <= 1)) - devicePeriod - = WASAPISystem.DEFAULT_DEVICE_PERIOD; - } - devicePeriodInFrames - = (int) (devicePeriod * dstSampleRate / 1000L); - - dstChannels = dstFormat.getChannels(); - dstSampleSize - = WASAPISystem.getSampleSizeInBytes(dstFormat); - - maybeOpenResampler(); - - srcChannels = srcFormat.getChannels(); - srcSampleSize - = WASAPISystem.getSampleSizeInBytes(srcFormat); - srcFrameSize = srcSampleSize * srcChannels; - - /* - * The remainder/residue in frames of - * IAudioRenderClient_Write cannot be more than the - * maximum capacity of the endpoint buffer. - */ - int srcBufferCapacityInFrames; - - if (resampler == null) - { - srcBufferCapacityInFrames = numBufferFrames; - } - else - { - /* - * The units of srcBuffer are based on srcFormat, - * the units of numBufferFrames are based on - * dstFormat. - */ - int srcSampleRate = (int) srcFormat.getSampleRate(); - - srcBufferCapacityInFrames - = numBufferFrames - * srcSampleRate - / dstSampleRate; - } - srcBuffer - = new byte[ - srcBufferCapacityInFrames * srcFrameSize]; - if (resamplerInBuffer != null) - resamplerInBuffer.setData(srcBuffer); - - /* - * Introduce latency in order to decrease the likelihood - * of underflow. - */ - srcBufferLength = srcBuffer.length; - - writeIsMalfunctioningSince = DiagnosticsControl.NEVER; - writeIsMalfunctioningTimeout - = 2 * Math.max(bufferDuration, devicePeriod); - - this.eventHandle = eventHandle; - eventHandle = 0; - this.iAudioClient = iAudioClient; - iAudioClient = 0; - this.iAudioRenderClient = iAudioRenderClient; - iAudioRenderClient = 0; - } - finally - { - if (iAudioRenderClient != 0) - IAudioRenderClient_Release(iAudioRenderClient); - } - } - finally - { - if (iAudioClient != 0) - { - IAudioClient_Release(iAudioClient); - maybeCloseResampler(); - } - } - } - finally - { - if (eventHandle != 0) - CloseHandle(eventHandle); - } - - } // The locator of this Renderer is not null. - } - catch (Throwable t) - { - if (t instanceof InterruptedException) - Thread.currentThread().interrupt(); - else if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - else - { - logger.error( - "Failed to open a WASAPIRenderer on audio endpoint" - + " device " + toString(locator), - t); - if (t instanceof ResourceUnavailableException) - throw (ResourceUnavailableException) t; - else - { - ResourceUnavailableException rue - = new ResourceUnavailableException(); - - rue.initCause(t); - throw rue; - } - } - } - - super.open(); - } - - /** - * {@inheritDoc} - */ - @Override - protected synchronized void playbackDevicePropertyChange( - PropertyChangeEvent ev) - { - /* - * Stop, close, re-open and re-start this Renderer (performing whichever - * of these in order to bring it into the same state) in order to - * reflect the change in the selection with respect to the playback or - * notify device. - */ - - waitWhileBusy(); - - boolean open - = ((iAudioClient != 0) && (iAudioRenderClient != 0)) - || locatorIsNull; - - if (open) - { - boolean start = started; - - close(); - - try - { - open(); - } - catch (ResourceUnavailableException rue) - { - throw new UndeclaredThrowableException(rue); - } - if (start) - start(); - } - } - - /** - * Pops a specific number of bytes from {@link #srcBuffer}. For example, - * because such a number of bytes have been read from srcBuffer and - * written into the rendering endpoint buffer. - * - * @param length the number of bytes to pop from srcBuffer - */ - private void popFromSrcBuffer(int length) - { - srcBufferLength = pop(srcBuffer, srcBufferLength, length); - } - - /** - * {@inheritDoc} - */ - public int process(Buffer buffer) - { - int length = buffer.getLength(); - - if (length < 1) - return BUFFER_PROCESSED_OK; - - byte[] data = (byte[]) buffer.getData(); - int offset = buffer.getOffset(); - - synchronized (this) - { - if ((iAudioClient == 0) || (iAudioRenderClient == 0)) - { - /* - * We actually want to allow the user to switch the playback - * and/or notify device to none mid-stream in order to disable - * the playback. - */ - return - locatorIsNull - ? BUFFER_PROCESSED_OK - : BUFFER_PROCESSED_FAILED; - } - else if (!started) - return BUFFER_PROCESSED_FAILED; - else - { - waitWhileBusy(); - busy = true; - } - } - - int ret = BUFFER_PROCESSED_OK; - long sleep = 0; - - try - { - int numPaddingFrames; - - if (eventHandle == 0) - { - try - { - numPaddingFrames - = IAudioClient_GetCurrentPadding(iAudioClient); - } - catch (HResultException hre) - { - numPaddingFrames = 0; - ret = BUFFER_PROCESSED_FAILED; - logger.error("IAudioClient_GetCurrentPadding", hre); - } - } - else - { - /* - * The process method will not write into the rendering endpoint - * buffer, the runInEventHandleCmd method will. - */ - numPaddingFrames = numBufferFrames; - } - if (ret != BUFFER_PROCESSED_FAILED) - { - int numFramesRequested = numBufferFrames - numPaddingFrames; - - if (numFramesRequested == 0) - { - if (eventHandle == 0) - { - /* - * There is NO available space in the rendering endpoint - * buffer into which this Renderer can write data. - */ - ret |= INPUT_BUFFER_NOT_CONSUMED; - sleep = devicePeriod; - /* - * The writing to the render endpoint buffer may or may - * not be malfunctioning, it depends on the interval of - * time that the state remains unchanged. - */ - if (writeIsMalfunctioningSince - == DiagnosticsControl.NEVER) - setWriteIsMalfunctioning(true); - } - else - { - /* - * The process method will write into srcBuffer, the - * runInEventHandleCmd will read from srcBuffer and - * write into the rendering endpoint buffer. - */ - int toCopy = srcBuffer.length - srcBufferLength; - - if (toCopy > 0) - { - if (toCopy > length) - toCopy = length; - System.arraycopy( - data, offset, - srcBuffer, srcBufferLength, - toCopy); - srcBufferLength += toCopy; - - if (length > toCopy) - { - buffer.setLength(length - toCopy); - buffer.setOffset(offset + toCopy); - ret |= INPUT_BUFFER_NOT_CONSUMED; - } - - /* - * Writing from the input Buffer into srcBuffer has - * occurred so it does not look like the writing to - * the render endpoint buffer is malfunctioning. - */ - if (writeIsMalfunctioningSince - != DiagnosticsControl.NEVER) - setWriteIsMalfunctioning(false); - } - else - { - ret |= INPUT_BUFFER_NOT_CONSUMED; - sleep = devicePeriod; - /* - * No writing from the input Buffer into srcBuffer - * has occurred so it is possible that the writing - * to the render endpoint buffer is malfunctioning. - */ - if (writeIsMalfunctioningSince - == DiagnosticsControl.NEVER) - setWriteIsMalfunctioning(true); - } - } - } - else - { - /* - * There is available space in the rendering endpoint - * buffer into which this Renderer can write data. - */ - int effectiveLength = srcBufferLength + length; - int toWrite - = Math.min( - effectiveLength, - numFramesRequested * srcFrameSize); - byte[] effectiveData; - int effectiveOffset; - - if (srcBufferLength > 0) - { - /* - * There is remainder/residue from earlier invocations - * of the method. This Renderer will feed - * iAudioRenderClient from srcBuffer. - */ - effectiveData = srcBuffer; - effectiveOffset = 0; - - int toCopy = toWrite - srcBufferLength; - - if (toCopy <= 0) - ret |= INPUT_BUFFER_NOT_CONSUMED; - else - { - if (toCopy > length) - toCopy = length; - System.arraycopy( - data, offset, - srcBuffer, srcBufferLength, - toCopy); - srcBufferLength += toCopy; - - if (toWrite > srcBufferLength) - toWrite = srcBufferLength; - - if (length > toCopy) - { - buffer.setLength(length - toCopy); - buffer.setOffset(offset + toCopy); - ret |= INPUT_BUFFER_NOT_CONSUMED; - } - } - } - else - { - /* - * There is no remainder/residue from earlier - * invocations of the method. This Renderer will feed - * iAudioRenderClient from data. - */ - effectiveData = data; - effectiveOffset = offset; - } - - int written; - - if ((toWrite / srcFrameSize) == 0) - written = 0; - else - { - /* - * Take into account the user's preferences with respect - * to the output volume. - */ - GainControl gainControl = getGainControl(); - - if (gainControl != null) - { - BasicVolumeControl.applyGain( - gainControl, - effectiveData, effectiveOffset, toWrite); - } - - try - { - written - = IAudioRenderClient_Write( - iAudioRenderClient, - effectiveData, effectiveOffset, toWrite, - srcSampleSize, srcChannels, - dstSampleSize, dstChannels); - } - catch (HResultException hre) - { - written = 0; - ret = BUFFER_PROCESSED_FAILED; - logger.error("IAudioRenderClient_Write", hre); - } - } - if (ret != BUFFER_PROCESSED_FAILED) - { - if (effectiveData == data) - { - // We have consumed frames from data. - if (written == 0) - { - /* - * The available number of frames appear to be - * too few for IAudioRenderClient to accept. - * They will have to be prepended to the next - * input Buffer. - */ - System.arraycopy( - data, offset, - srcBuffer, srcBufferLength, - toWrite); - srcBufferLength += toWrite; - written = toWrite; - } - if (length > written) - { - buffer.setLength(length - written); - buffer.setOffset(offset + written); - ret |= INPUT_BUFFER_NOT_CONSUMED; - } - } - else if (written > 0) - { - // We have consumed frames from srcBuffer. - popFromSrcBuffer(written); - } - - if (writeIsMalfunctioningSince - != DiagnosticsControl.NEVER) - setWriteIsMalfunctioning(false); - } - } - - /* - * If the writing to the render endpoint buffer is - * malfunctioning, fail the processing of the input Buffer in - * order to avoid blocking of the Codec chain. - */ - if (((ret & INPUT_BUFFER_NOT_CONSUMED) - == INPUT_BUFFER_NOT_CONSUMED) - && (writeIsMalfunctioningSince - != DiagnosticsControl.NEVER)) - { - long writeIsMalfunctioningDuration - = System.currentTimeMillis() - - writeIsMalfunctioningSince; - - if (writeIsMalfunctioningDuration - > writeIsMalfunctioningTimeout) - { - /* - * The writing to the render endpoint buffer has taken - * too long so whatever is in srcBuffer is surely - * out-of-date. - */ - srcBufferLength = 0; - ret = BUFFER_PROCESSED_FAILED; - logger.warn( - "Audio endpoint device appears to be" - + " malfunctioning: " - + getLocator()); - } - } - } - } - finally - { - synchronized (this) - { - busy = false; - notifyAll(); - } - } - /* - * If there was no available space in the rendering endpoint buffer, we - * will want to wait a bit for such space to be made available. - */ - if (((ret & INPUT_BUFFER_NOT_CONSUMED) == INPUT_BUFFER_NOT_CONSUMED) - && (sleep > 0)) - { - boolean interrupted = false; - - synchronized (this) - { - /* - * Spurious wake-ups should not be a big issue here. While this - * Renderer may check for available space in the rendering - * endpoint buffer more often than practically necessary (which - * may very well classify as a case of performance loss), the - * ability to unblock this Renderer is considered more - * important. - */ - try - { - wait(sleep); - } - catch (InterruptedException ie) - { - interrupted = true; - } - } - if (interrupted) - Thread.currentThread().interrupt(); - } - return ret; - } - - /** - * Runs/executes in the thread associated with a specific Runnable - * initialized to wait for {@link #eventHandle} to be signaled. - * - * @param eventHandleCmd the Runnable which has been initialized to - * wait for eventHandle to be signaled and in whose associated - * thread the method is invoked - */ - private void runInEventHandleCmd(Runnable eventHandleCmd) - { - try - { - useAudioThreadPriority(); - - do - { - long eventHandle; - - synchronized (this) - { - /* - * Does this WASAPIRender still want eventHandleCmd to - * execute? - */ - if (!eventHandleCmd.equals(this.eventHandleCmd)) - break; - // Is this WASAPIRenderer still opened and started? - if ((iAudioClient == 0) - || (iAudioRenderClient == 0) - || !started) - break; - - /* - * The value of eventHandle will remain valid while this - * WASAPIRenderer wants eventHandleCmd to execute. - */ - eventHandle = this.eventHandle; - if (eventHandle == 0) - throw new IllegalStateException("eventHandle"); - - waitWhileBusy(); - busy = true; - } - try - { - int numPaddingFrames; - - try - { - numPaddingFrames - = IAudioClient_GetCurrentPadding(iAudioClient); - } - catch (HResultException hre) - { - numPaddingFrames = numBufferFrames; - logger.error("IAudioClient_GetCurrentPadding", hre); - } - - int numFramesRequested = numBufferFrames - numPaddingFrames; - - /* - * If there is no available space in the rendering endpoint - * buffer, wait for the system to signal when an audio - * buffer is ready to be processed by the client. - */ - if (numFramesRequested > 0) - { - byte[] buf; - int bufChannels; - int bufFrameSize; - int bufLength; - int bufSampleSize; - - if (resampler == null) - { - buf = srcBuffer; - bufChannels = srcChannels; - bufFrameSize = srcFrameSize; - bufLength = srcBufferLength; - bufSampleSize = srcSampleSize; - } - else - { - /* - * The units of srcBuffer are based on srcFormat, - * the units of numFramesRequested are based on - * dstFormat. - */ - maybeResample(numFramesRequested); - - buf = resamplerData; - bufChannels = resamplerChannels; - bufFrameSize = resamplerFrameSize; - bufLength = resamplerOutBuffer.getLength(); - bufSampleSize = resamplerSampleSize; - } - - /* - * Write as much from buf as possible while minimizing - * the risk of audio glitches and the amount of - * artificial/induced silence. - */ - int bufFrames = bufLength / bufFrameSize; - - if ((numFramesRequested > bufFrames) - && (bufFrames >= devicePeriodInFrames)) - numFramesRequested = bufFrames; - - // Pad with silence in order to avoid underflows. - int toWrite = numFramesRequested * bufFrameSize; - - if (toWrite > buf.length) - toWrite = buf.length; - - int silence = toWrite - bufLength; - - if (silence > 0) - { - Arrays.fill(buf, bufLength, toWrite, (byte) 0); - bufLength = toWrite; - } - - /* - * Take into account the user's preferences with respect - * to the output volume. - */ - GainControl gainControl = getGainControl(); - - if ((gainControl != null) && (toWrite != 0)) - { - BasicVolumeControl.applyGain( - gainControl, - buf, 0, toWrite); - } - - int written - = maybeIAudioRenderClientWrite( - buf, 0, toWrite, - bufSampleSize, bufChannels); - - if (written != 0) - { - bufLength = pop(buf, bufLength, written); - if (buf == srcBuffer) - srcBufferLength = bufLength; - else - resamplerOutBuffer.setLength(bufLength); - - if (writeIsMalfunctioningSince - != DiagnosticsControl.NEVER) - setWriteIsMalfunctioning(false); - } - } - } - finally - { - synchronized (this) - { - busy = false; - notifyAll(); - } - } - - int wfso; - - try - { - wfso = WaitForSingleObject(eventHandle, devicePeriod); - } - catch (HResultException hre) - { - /* - * WaitForSingleObject will throw HResultException only in - * the case of WAIT_FAILED. Event if it didn't, it would - * still be a failure from our point of view. - */ - wfso = WAIT_FAILED; - logger.error("WaitForSingleObject", hre); - } - /* - * If the function WaitForSingleObject fails once, it will very - * likely fail forever. Bail out of a possible busy wait. - */ - if ((wfso == WAIT_FAILED) || (wfso == WAIT_ABANDONED)) - break; - } - while (true); - } - finally - { - synchronized (this) - { - if (eventHandleCmd.equals(this.eventHandleCmd)) - { - this.eventHandleCmd = null; - notifyAll(); - } - } - } - } - - /** - * {@inheritDoc} - * - * Disallows mid-stream changes of the inputFormat of this - * AbstractRenderer. - */ - @Override - public synchronized Format setInputFormat(Format format) - { - /* - * WASAPIRenderer does not support mid-stream changes of the - * inputFormat. - */ - if ((iAudioClient != 0) || (iAudioRenderClient != 0)) - return null; - else - return super.setInputFormat(format); - } - - /** - * Indicates whether the writing to the render endpoint buffer is - * malfunctioning. Keeps track of the time at which the malfunction has - * started. - * - * @param writeIsMalfunctioning true if the writing to the render - * endpoint buffer is (believed to be) malfunctioning; otherwise, - * false - */ - private void setWriteIsMalfunctioning(boolean writeIsMalfunctioning) - { - if (writeIsMalfunctioning) - { - if (writeIsMalfunctioningSince == DiagnosticsControl.NEVER) - writeIsMalfunctioningSince = System.currentTimeMillis(); - } - else - writeIsMalfunctioningSince = DiagnosticsControl.NEVER; - } - - /** - * {@inheritDoc} - */ - public synchronized void start() - { - if (iAudioClient == 0) - { - /* - * We actually want to allow the user to switch the playback and/or - * notify device to none mid-stream in order to disable the - * playback. - */ - if (locatorIsNull) - started = true; - } - else - { - waitWhileBusy(); - waitWhileEventHandleCmd(); - - /* - * Introduce latency in order to decrease the likelihood of - * underflow. - */ - if (srcBuffer != null) - { - if (srcBufferLength > 0) - { - /* - * Shift the valid audio data to the end of srcBuffer so - * that silence can be written at the beginning. - */ - for (int i = srcBuffer.length - 1, j = srcBufferLength - 1; - j >= 0; - i--, j--) - { - srcBuffer[i] = srcBuffer[j]; - } - } - else if (srcBufferLength < 0) - srcBufferLength = 0; - - /* - * If there is valid audio data in srcBuffer, it has been - * shifted to the end to make room for silence at the beginning. - */ - int silence = srcBuffer.length - srcBufferLength; - - if (silence > 0) - Arrays.fill(srcBuffer, 0, silence, (byte) 0); - srcBufferLength = srcBuffer.length; - } - - try - { - IAudioClient_Start(iAudioClient); - started = true; - - if ((eventHandle != 0) && (this.eventHandleCmd == null)) - { - Runnable eventHandleCmd - = new Runnable() - { - public void run() - { - runInEventHandleCmd(this); - } - }; - boolean submitted = false; - - try - { - if (eventHandleExecutor == null) - { - eventHandleExecutor - = Executors.newSingleThreadExecutor(); - } - - this.eventHandleCmd = eventHandleCmd; - eventHandleExecutor.execute(eventHandleCmd); - submitted = true; - } - finally - { - if (!submitted - && eventHandleCmd.equals(this.eventHandleCmd)) - this.eventHandleCmd = null; - } - } - } - catch (HResultException hre) - { - /* - * If IAudioClient_Start is invoked multiple times without - * intervening IAudioClient_Stop, it will likely return/throw - * AUDCLNT_E_NOT_STOPPED. - */ - if (hre.getHResult() != AUDCLNT_E_NOT_STOPPED) - logger.error("IAudioClient_Start", hre); - } - } - } - - /** - * {@inheritDoc} - */ - public synchronized void stop() - { - if (iAudioClient == 0) - { - /* - * We actually want to allow the user to switch the playback and/or - * notify device to none mid-stream in order to disable the - * playback. - */ - if (locatorIsNull) - started = false; - } - else - { - waitWhileBusy(); - - try - { - /* - * If IAudioClient_Stop is invoked multiple times without - * intervening IAudioClient_Start, it is documented to return - * S_FALSE. - */ - IAudioClient_Stop(iAudioClient); - started = false; - - waitWhileEventHandleCmd(); - - writeIsMalfunctioningSince = DiagnosticsControl.NEVER; - } - catch (HResultException hre) - { - logger.error("IAudioClient_Stop", hre); - } - } - } - - /** - * Gets a human-readable representation of a specific MediaLocator - * for the purposes of testing/debugging. - * - * @param locator the MediaLocator that is to be represented in a - * human-readable form for the purposes of testing/debugging - * @return a human-readable representation of the specified locator - * for the purposes of testing/debugging - */ - private String toString(MediaLocator locator) - { - String s; - - if (locator == null) - s = "null"; - else - { - s = null; - /* - * Try to not throw any exceptions because the purpose is to produce - * at least some identification of the specified MediaLocator even - * if not the most complete. - */ - try - { - String id = locator.getRemainder(); - - if (id != null) - { - CaptureDeviceInfo2 cdi2 - = audioSystem.getDevice(dataFlow, locator); - - if (cdi2 != null) - { - String name = cdi2.getName(); - - if ((name != null) && !id.equals(name)) - s = id + " with friendly name " + name; - } - if (s == null) - s = id; - } - } - catch (Throwable t) - { - if (t instanceof InterruptedException) - Thread.currentThread().interrupt(); - else if (t instanceof ThreadDeath) - throw (ThreadDeath) t; - } - if (s == null) - s = locator.toString(); - } - return s; - } - - /** - * Waits on this instance while the value of {@link #busy} is equal to - * true. - */ - private synchronized void waitWhileBusy() - { - boolean interrupted = false; - - while (busy) - { - try - { - wait(devicePeriod); - } - catch (InterruptedException ie) - { - interrupted = true; - } - } - if (interrupted) - Thread.currentThread().interrupt(); - } - - /** - * Waits on this instance while the value of {@link #eventHandleCmd} is - * non-null. - */ - private synchronized void waitWhileEventHandleCmd() - { - if (eventHandle == 0) - throw new IllegalStateException("eventHandle"); - - boolean interrupted = false; - - while (eventHandleCmd != null) - { - try - { - wait(devicePeriod); - } - catch (InterruptedException ie) - { - interrupted = true; - } - } - if (interrupted) - Thread.currentThread().interrupt(); - } -} +package org.jitsi.impl.neomedia.jmfext.media.renderer.audio; + +import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*; + +import java.beans.*; +import java.lang.reflect.*; +import java.util.*; +import java.util.concurrent.*; + +import javax.media.*; +import javax.media.format.*; + +import org.jitsi.impl.neomedia.*; +import org.jitsi.impl.neomedia.control.*; +import org.jitsi.impl.neomedia.device.*; +import org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.*; +import org.jitsi.service.neomedia.*; +import org.jitsi.service.neomedia.codec.*; +import org.jitsi.util.*; + +/** + * Implements an audio Renderer using Windows Audio Session API + * (WASAPI) and related Core Audio APIs such as Multimedia Device (MMDevice) + * API. + * + * @author Lyubomir Marinov + */ +public class WASAPIRenderer + extends AbstractAudioRenderer +{ + /** + * The Logger used by the WASAPIRenderer class and its + * instances to log debug information. + */ + private static final Logger logger = Logger.getLogger(WASAPIRenderer.class); + + /** + * The human-readable name of the WASAPIRenderer PlugIn + * implementation instances. + */ + private static final String PLUGIN_NAME + = "Windows Audio Session API (WASAPI) Renderer"; + + /** + * Finds the first non-null element in a specific array of + * AudioFormats. + * + * @param formats the array of AudioFormats in which the first + * non-null element is to be found + * @return the first non-null element in formats if any; + * otherwise, null + */ + private static AudioFormat findFirst(AudioFormat[] formats) + { + AudioFormat format = null; + + for (AudioFormat aFormat : formats) + { + if (aFormat != null) + { + format = aFormat; + break; + } + } + return format; + } + + /** + * Attempts to initialize and open a new Codec to resample media + * data from a specific input AudioFormat into a specific output + * AudioFormat. If no suitable resampler is found, returns + * null. If a suitable resampler is found but its initialization or + * opening fails, logs and swallows any Throwable and returns + * null. + * + * @param inFormat the AudioFormat in which the new instance is to + * input media data + * @param outFormat the AudioFormat in which the new instance is to + * output media data + * @return a new Codec which is able to resample media data from + * the specified inFormat into the specified outFormat if + * such a resampler could be found, initialized and opened; otherwise, + * null + */ + public static Codec maybeOpenResampler( + AudioFormat inFormat, + AudioFormat outFormat) + { + @SuppressWarnings("unchecked") + List classNames + = PlugInManager.getPlugInList( + inFormat, + outFormat, + PlugInManager.CODEC); + Codec resampler = null; + + if (classNames != null) + { + for (String className : classNames) + { + try + { + Codec codec + = (Codec) Class.forName(className).newInstance(); + Format setInput = codec.setInputFormat(inFormat); + + if ((setInput != null) && inFormat.matches(setInput)) + { + Format setOutput = codec.setOutputFormat(outFormat); + + if ((setOutput != null) && outFormat.matches(setOutput)) + { + codec.open(); + resampler = codec; + break; + } + } + } + catch (Throwable t) + { + if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + { + logger.warn( + "Failed to open resampler " + className, + t); + } + } + } + } + return resampler; + } + + /** + * Pops a specific number of bytes from (the head of) a specific array of + * bytes. + * + * @param array the array of byte from which the specified number + * of bytes are to be popped + * @param arrayLength the number of elements in array which contain + * valid data + * @param length the number of bytes to be popped from array + * @return the number of elements in array which contain valid data + * after the specified number of bytes have been popped from it + */ + public static int pop(byte[] array, int arrayLength, int length) + { + if (length < 0) + throw new IllegalArgumentException("length"); + if (length == 0) + return arrayLength; + + int newArrayLength = arrayLength - length; + + if (newArrayLength > 0) + { + for (int i = 0, j = length; i < newArrayLength; i++, j++) + array[i] = array[j]; + } + else + newArrayLength = 0; + return newArrayLength; + } + + /** + * The duration in milliseconds of the endpoint buffer. + */ + private long bufferDuration; + + /** + * The indicator which determines whether the audio stream represented by + * this instance, {@link #iAudioClient} and {@link #iAudioRenderClient} is + * busy and, consequently, its state should not be modified. For example, + * the audio stream is busy during the execution of + * {@link #process(Buffer)}. + */ + private boolean busy; + + /** + * The length in milliseconds of the interval between successive, periodic + * processing passes by the audio engine on the data in the endpoint buffer. + */ + private long devicePeriod = WASAPISystem.DEFAULT_DEVICE_PERIOD; + + /** + * The value of {@link #devicePeriod} expressed in terms of numbers of + * frames (i.e. takes the sample rate into account). + */ + private int devicePeriodInFrames; + + /** + * The number of channels with which {@link #iAudioClient} has been + * initialized. + */ + private int dstChannels; + + /** + * The AudioFormat with which {@link #iAudioClient} has been + * initialized. + */ + private AudioFormat dstFormat; + + /** + * The sample size in bytes with which {@link #iAudioClient} has been + * initialized. + */ + private int dstSampleSize; + + /** + * The event handle that the system signals when an audio buffer is ready to + * be processed by the client. + */ + private long eventHandle; + + /** + * The Runnable which is scheduled by this WASAPIRenderer + * and executed by {@link #eventHandleExecutor} and waits for + * {@link #eventHandle} to be signaled. + */ + private Runnable eventHandleCmd; + + /** + * The Executor implementation which is to execute + * {@link #eventHandleCmd}. + */ + private Executor eventHandleExecutor; + + /** + * The WASAPI IAudioClient instance which enables this + * Renderer to create and initialize an audio stream between this + * Renderer and the audio engine of the associated audio endpoint + * device. + */ + private long iAudioClient; + + /** + * The WASAPI IAudioRenderClient obtained from + * {@link #iAudioClient} which enables this Renderer to write + * output data to the rendering endpoint buffer. + */ + private long iAudioRenderClient; + + /** + * The indicator which determines whether the value of the locator + * property of this instance was equal to null when this Renderer + * was opened. Indicates that this Renderer should successfully + * process media data without actually rendering to any render endpoint + * device. + */ + private boolean locatorIsNull; + + /** + * The maximum capacity in frames of the endpoint buffer. + */ + private int numBufferFrames; + + /** + * The Codec which resamples the media provided to this + * Renderer via {@link #process(Buffer)} into {@link #dstFormat} + * if necessary. + */ + private Codec resampler; + + /** + * The number of channels of the audio signal output by {@link #resampler}. + * It may differ from {@link #dstChannels}. + */ + private int resamplerChannels; + + /** + * The data which has remained unwritten during earlier invocations of + * {@link #runInEventHandleCmd(Runnable)} because it represents frames which + * are few enough to be accepted on their own for writing by + * {@link #iAudioRenderClient}. + */ + private byte[] resamplerData; + + /** + * The size in bytes of an audio frame produced by {@link #resampler}. Based + * on {@link #resamplerChannels} and {@link #resamplerSampleSize} and cached + * in order to reduce calculations. + */ + private int resamplerFrameSize; + + /** + * The Buffer which provides the input to {@link #resampler}. + * Represents a unit of {@link #srcBuffer} to be processed in a single call + * to resampler. + */ + private Buffer resamplerInBuffer; + + /** + * The Buffer which receives the output of {@link #resampler}. + */ + private Buffer resamplerOutBuffer; + + /** + * The size in bytes of an audio sample produced by {@link #resampler}. + */ + private int resamplerSampleSize; + + /** + * The data which has remained unwritten during earlier invocations of + * {@link #process(Buffer)} because it represents frames which are few + * enough to be accepted on their own for writing by + * {@link #iAudioRenderClient}. + */ + private byte[] srcBuffer; + + /** + * The number of bytes in {@link #srcBuffer} which represent valid audio + * data to be written by {@link #iAudioRenderClient}. + */ + private int srcBufferLength; + + /** + * The number of channels which which this Renderer has been + * opened. + */ + private int srcChannels; + + /** + * The AudioFormat with which this Renderer has been + * opened. + */ + private AudioFormat srcFormat; + + /** + * The frame size in bytes with which this Renderer has been + * opened. It is the product of {@link #srcSampleSize} and + * {@link #srcChannels}. + */ + private int srcFrameSize; + + /** + * The sample size in bytes with which this Renderer has been + * opened. + */ + private int srcSampleSize; + + /** + * The indicator which determines whether this Renderer is started + * i.e. there has been a successful invocation of {@link #start()} without + * an intervening invocation of {@link #stop()}. + */ + private boolean started; + + /** + * The time in milliseconds at which the writing to the render endpoint + * buffer has started malfunctioning. For example, {@link #srcBuffer} being + * full from the point of view of {@link #process(Buffer)} for an extended + * period of time may indicate abnormal functioning. + */ + private long writeIsMalfunctioningSince = DiagnosticsControl.NEVER; + + /** + * The maximum interval of time in milliseconds that the writing to the + * render endpoint buffer is allowed to be under suspicion that it is + * malfunctioning. If it remains under suspicion after the maximum interval + * of time has elapsed, the writing to the render endpoint buffer is to be + * considered malfunctioning for real. + */ + private long writeIsMalfunctioningTimeout; + + /** + * Initializes a new WASAPIRenderer instance which is to perform + * playback (as opposed to sound a notification). + */ + public WASAPIRenderer() + { + this(AudioSystem.DataFlow.PLAYBACK); + } + + /** + * Initializes a new WASAPIRenderer instance which is to either + * perform playback or sound a notification. + * + * @param dataFlow {@link AudioSystem.DataFlow#PLAYBACK} if the new instance + * is to perform playback or {@link AudioSystem.DataFlow#NOTIFY} if the new + * instance is to sound a notification + */ + public WASAPIRenderer(AudioSystem.DataFlow dataFlow) + { + super(AudioSystem.LOCATOR_PROTOCOL_WASAPI, dataFlow); + } + + /** + * Initializes a new WASAPIRenderer instance which is to either + * perform playback or sound a notification. + * + * @param playback true if the new instance is to perform playback + * or false if the new instance is to sound a notification + */ + public WASAPIRenderer(boolean playback) + { + this( + playback + ? AudioSystem.DataFlow.PLAYBACK + : AudioSystem.DataFlow.NOTIFY); + } + + /** + * {@inheritDoc} + */ + @Override + public synchronized void close() + { + try + { + stop(); + } + finally + { + if (iAudioRenderClient != 0) + { + IAudioRenderClient_Release(iAudioRenderClient); + iAudioRenderClient = 0; + } + if (iAudioClient != 0) + { + IAudioClient_Release(iAudioClient); + iAudioClient = 0; + } + if (eventHandle != 0) + { + try + { + CloseHandle(eventHandle); + } + catch (HResultException hre) + { + // The event HANDLE will be leaked. + logger.warn("Failed to close event HANDLE.", hre); + } + eventHandle = 0; + } + maybeCloseResampler(); + + dstFormat = null; + locatorIsNull = false; + srcBuffer = null; + srcBufferLength = 0; + srcFormat = null; + started = false; + + super.close(); + } + } + + /** + * Gets an array of alternative AudioFormats based on + * inputFormat with which an attempt is to be made to initialize a + * new IAudioClient instance. + * + * @return an array of alternative AudioFormats based on + * inputFormat with which an attempt is to be made to initialize a + * new IAudioClient instance + */ + private AudioFormat[] getFormatsToInitializeIAudioClient() + { + AudioFormat inputFormat = this.inputFormat; + + if (inputFormat == null) + throw new NullPointerException("No inputFormat set."); + else + { + /* + * Prefer to initialize the IAudioClient with an AudioFormat which + * matches the inputFormat as closely as possible. + */ + AudioFormat[] preferredFormats + = WASAPISystem.getFormatsToInitializeIAudioClient(inputFormat); + // Otherwise, any supported Format will do. + Format[] supportedFormats = getSupportedInputFormats(); + List formats + = new ArrayList( + preferredFormats.length + supportedFormats.length); + + for (AudioFormat format : preferredFormats) + { + if (!formats.contains(format)) + formats.add(format); + } + for (Format format : supportedFormats) + { + if (!formats.contains(format) + && (format instanceof AudioFormat)) + { + formats.add((AudioFormat) format); + } + } + + /* + * Resampling isn't very cool. Moreover, resampling between sample + * rates with a non-integer quotient may result in audio glitches. + * Try to minimize the risks of having to use any of these two when + * unnecessary. + */ + final int sampleRate = (int) inputFormat.getSampleRate(); + + if (sampleRate != Format.NOT_SPECIFIED) + { + Collections.sort( + formats, + new Comparator() + { + @Override + public int compare(AudioFormat af1, AudioFormat af2) + { + int d1 = computeSampleRateDistance(af1); + int d2 = computeSampleRateDistance(af2); + + return (d1 < d2) ? -1 : (d1 == d2) ? 0 : 1; + } + + private int computeSampleRateDistance( + AudioFormat af) + { + int sr = (int) af.getSampleRate(); + + if (sr == Format.NOT_SPECIFIED) + return Integer.MAX_VALUE; + else if (sr == sampleRate) + return 0; + + int min, max; + boolean downsample; + + if (sr < sampleRate) + { + min = sr; + max = sampleRate; + downsample = true; + } + else + { + min = sampleRate; + max = sr; + downsample = false; + } + if (min == 0) + return Integer.MAX_VALUE; + else + { + int h = max % min; + int l = max / min; + + /* + * Prefer AudioFormats which will cause + * upsampling to AudioFormats which will + * cause downsampling. + */ + if (downsample) + { + l = Short.MAX_VALUE - l; + if (h != 0) + h = Short.MAX_VALUE - h; + } + + return (h << 16) | l; + } + } + }); + } + + return formats.toArray(new AudioFormat[formats.size()]); + } + } + + /** + * {@inheritDoc} + */ + public String getName() + { + return PLUGIN_NAME; + } + + /** + * {@inheritDoc} + * + * Overrides the super implementation to handle the case in which the user + * has selected "none" for the playback/notify device. + */ + @Override + public Format[] getSupportedInputFormats() + { + if (getLocator() == null) + { + /* + * XXX We toyed with the idea of calculating a list of common + * Formats supported by all devices (of the dataFlow of this + * AbstractAudioRenderer, of course) but that turned out to be + * monstrous in code, inefficient at least in terms of garbage + * collection and with questionable suitability. The following + * approach will likely have a comparable suitability with better + * efficiency achieved code that is easier to understand. + */ + + /* + * The maximums supported by the WASAPI integration at the time of + * this writing. + */ + double sampleRate = MediaUtils.MAX_AUDIO_SAMPLE_RATE; + int sampleSizeInBits = 16; + int channels = 2; + + if ((sampleRate == Format.NOT_SPECIFIED) + && (Constants.AUDIO_SAMPLE_RATES.length != 0)) + sampleRate = Constants.AUDIO_SAMPLE_RATES[0]; + return + WASAPISystem.getFormatsToInitializeIAudioClient( + new AudioFormat( + AudioFormat.LINEAR, + sampleRate, + sampleSizeInBits, + channels, + AudioFormat.LITTLE_ENDIAN, + AudioFormat.SIGNED, + /* frameSizeInBits */ Format.NOT_SPECIFIED, + /* frameRate */ Format.NOT_SPECIFIED, + Format.byteArray)); + } + else + return super.getSupportedInputFormats(); + } + + /** + * Closes {@link #resampler} if it is non-null. + */ + private void maybeCloseResampler() + { + Codec resampler = this.resampler; + + if (resampler != null) + { + this.resampler = null; + resamplerData = null; + resamplerInBuffer = null; + resamplerOutBuffer = null; + + try + { + resampler.close(); + } + catch (Throwable t) + { + if (t instanceof InterruptedException) + Thread.currentThread().interrupt(); + else if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + logger.error("Failed to close resampler.", t); + } + } + } + + /** + * Invokes WASAPI.IAudioRenderClient_Write on + * {@link #iAudioRenderClient} and logs and swallows any + * HResultException. + * + * @param data the bytes of the audio samples to be written into the render + * endpoint buffer + * @param offset the offset in data at which the bytes of the audio + * samples to be written into the render endpoint buffer begin + * @param length the number of the bytes in data beginning at + * offset of the audio samples to be written into the render + * endpoint buffer + * @param srcSampleSize the size in bytes of an audio sample in + * data + * @param srcChannels the number of channels of the audio signal provided in + * data + * @return the number of bytes from data (starting at + * offset) which have been written into the render endpoint buffer + * or 0 upon HResultException + */ + private int maybeIAudioRenderClientWrite( + byte[] data, int offset, int length, + int srcSampleSize, int srcChannels) + { + int written; + + try + { + written + = IAudioRenderClient_Write( + iAudioRenderClient, + data, offset, length, + srcSampleSize, srcChannels, + dstSampleSize, dstChannels); + } + catch (HResultException hre) + { + written = 0; + logger.error("IAudioRenderClient_Write", hre); + } + return written; + } + + /** + * Initializes and opens a new instance of {@link #resampler} if the + * Format-related state of this instance deems its existence + * necessary. + */ + private void maybeOpenResampler() + { + AudioFormat inFormat = this.inputFormat; + AudioFormat outFormat = this.dstFormat; + + // We are able to translate between mono and stereo. + if ((inFormat.getSampleRate() == outFormat.getSampleRate()) + && (inFormat.getSampleSizeInBits() + == outFormat.getSampleSizeInBits())) + return; + + // The resamplers are not expected to convert between mono and stereo. + int channels = inFormat.getChannels(); + + if (outFormat.getChannels() != channels) + { + outFormat + = new AudioFormat( + outFormat.getEncoding(), + outFormat.getSampleRate(), + outFormat.getSampleSizeInBits(), + channels, + outFormat.getEndian(), + outFormat.getSigned(), + /* frameSizeInBits */ Format.NOT_SPECIFIED, + /* frameRate */ Format.NOT_SPECIFIED, + outFormat.getDataType()); + } + + Codec resampler = maybeOpenResampler(inFormat, outFormat); + + if (resampler == null) + { + throw new IllegalStateException( + "Failed to open a codec to resample [" + inFormat + + "] into [" + outFormat + "]."); + } + else + { + this.resampler = resampler; + + resamplerInBuffer = new Buffer(); + resamplerInBuffer.setFormat(inFormat); + + resamplerChannels = outFormat.getChannels(); + resamplerSampleSize = WASAPISystem.getSampleSizeInBytes(outFormat); + resamplerFrameSize = resamplerChannels * resamplerSampleSize; + + resamplerData = new byte[numBufferFrames * resamplerFrameSize]; + + resamplerOutBuffer = new Buffer(); + resamplerOutBuffer.setData(resamplerData); + resamplerOutBuffer.setLength(0); + resamplerOutBuffer.setOffset(0); + } + } + + /** + * Processes audio samples from {@link #srcBuffer} through + * {@link #resampler} i.e. resamples them in order to produce media data + * in {@link #resamplerData} to be written into the render endpoint buffer. + * + * @param numFramesRequested the number of audio frames in the units of + * {@link #dstFormat} requested by the rendering endpoint + */ + private void maybeResample(int numFramesRequested) + { + int outLength = resamplerOutBuffer.getLength(); + + /* + * Do not resample if there is enough resampled audio to satisfy the + * request of the rendering endpoint buffer. + */ + if (outLength < numFramesRequested * resamplerFrameSize) + { + // Sample rate conversions work on audio frames, not on bytes. + int outFrames + = (resamplerData.length - outLength) / resamplerFrameSize; + + if (outFrames > 0) + { + /* + * Predict how many bytes will be consumed from the input during + * the sample rate conversion. + */ + int srcSampleRate = (int) srcFormat.getSampleRate(); + int dstSampleRate = (int) dstFormat.getSampleRate(); + int inLength + = (outFrames * srcSampleRate / dstSampleRate) + * srcFrameSize; + + if (inLength > srcBuffer.length) + inLength = srcBuffer.length; + if (inLength > srcBufferLength) + inLength = srcBufferLength; + if (inLength > 0) + { + int resampled; + + resamplerOutBuffer.setLength(0); + resamplerOutBuffer.setOffset(outLength); + try + { + resamplerOutBuffer.setDiscard(false); + resamplerInBuffer.setLength(inLength); + resamplerInBuffer.setOffset(0); + + resampler.process( + resamplerInBuffer, + resamplerOutBuffer); + } + finally + { + resampled = resamplerOutBuffer.getLength(); + outLength = resamplerOutBuffer.getOffset() + resampled; + resamplerOutBuffer.setLength(outLength); + resamplerOutBuffer.setOffset(0); + } + + if (resampled > 0) + { + /* + * How many bytes have actually been consumed from the + * input during the sample rate conversion? + */ + resampled + = ((resampled / resamplerFrameSize) + * srcSampleRate + / dstSampleRate) + * srcFrameSize; + if (resampled > 0) + popFromSrcBuffer(resampled); + } + } + } + } + } + + /** + * {@inheritDoc} + */ + @Override + public synchronized void open() + throws ResourceUnavailableException + { + if (this.iAudioClient != 0) + return; + + MediaLocator locator = null; + + try + { + locator = getLocator(); + if (locatorIsNull = (locator == null)) + { + /* + * We actually want to allow the user to switch the playback + * and/or notify device to none mid-stream in order to disable + * the playback. + */ + } + else + { + + /* + * The method getFormatsToInitializeIAudioClient will assert that + * inputFormat is set. + */ + AudioFormat[] formats = getFormatsToInitializeIAudioClient(); + long eventHandle = CreateEvent(0, false, false, null); + + try + { + long iAudioClient + = audioSystem.initializeIAudioClient( + locator, + dataFlow, + /* streamFlags */ 0, + eventHandle, + WASAPISystem.DEFAULT_BUFFER_DURATION, + formats); + + if (iAudioClient == 0) + { + throw new ResourceUnavailableException( + "Failed to initialize IAudioClient" + + " for MediaLocator " + locator + + " and AudioSystem.DataFlow " + dataFlow); + } + try + { + long iAudioRenderClient + = IAudioClient_GetService( + iAudioClient, + IID_IAudioRenderClient); + + if (iAudioRenderClient == 0) + { + throw new ResourceUnavailableException( + "IAudioClient_GetService" + + "(IID_IAudioRenderClient)"); + } + try + { + srcFormat = this.inputFormat; + dstFormat = findFirst(formats); + + /* + * The value hnsDefaultDevicePeriod is documented to + * specify the default scheduling period for a + * shared-mode stream. + */ + devicePeriod + = IAudioClient_GetDefaultDevicePeriod(iAudioClient) + / 10000L; + numBufferFrames + = IAudioClient_GetBufferSize(iAudioClient); + + int dstSampleRate = (int) dstFormat.getSampleRate(); + + bufferDuration + = numBufferFrames * 1000L / dstSampleRate; + /* + * We will very likely be inefficient if we fail to + * synchronize with the scheduling period of the audio + * engine but we have to make do with what we have. + */ + if (devicePeriod <= 1) + { + devicePeriod = bufferDuration / 2; + if ((devicePeriod + > WASAPISystem.DEFAULT_DEVICE_PERIOD) + || (devicePeriod <= 1)) + devicePeriod + = WASAPISystem.DEFAULT_DEVICE_PERIOD; + } + devicePeriodInFrames + = (int) (devicePeriod * dstSampleRate / 1000L); + + dstChannels = dstFormat.getChannels(); + dstSampleSize + = WASAPISystem.getSampleSizeInBytes(dstFormat); + + maybeOpenResampler(); + + srcChannels = srcFormat.getChannels(); + srcSampleSize + = WASAPISystem.getSampleSizeInBytes(srcFormat); + srcFrameSize = srcSampleSize * srcChannels; + + /* + * The remainder/residue in frames of + * IAudioRenderClient_Write cannot be more than the + * maximum capacity of the endpoint buffer. + */ + int srcBufferCapacityInFrames; + + if (resampler == null) + { + srcBufferCapacityInFrames = numBufferFrames; + } + else + { + /* + * The units of srcBuffer are based on srcFormat, + * the units of numBufferFrames are based on + * dstFormat. + */ + int srcSampleRate = (int) srcFormat.getSampleRate(); + + srcBufferCapacityInFrames + = numBufferFrames + * srcSampleRate + / dstSampleRate; + } + srcBuffer + = new byte[ + srcBufferCapacityInFrames * srcFrameSize]; + if (resamplerInBuffer != null) + resamplerInBuffer.setData(srcBuffer); + + /* + * Introduce latency in order to decrease the likelihood + * of underflow. + */ + srcBufferLength = srcBuffer.length; + + writeIsMalfunctioningSince = DiagnosticsControl.NEVER; + writeIsMalfunctioningTimeout + = 2 * Math.max(bufferDuration, devicePeriod); + + this.eventHandle = eventHandle; + eventHandle = 0; + this.iAudioClient = iAudioClient; + iAudioClient = 0; + this.iAudioRenderClient = iAudioRenderClient; + iAudioRenderClient = 0; + } + finally + { + if (iAudioRenderClient != 0) + IAudioRenderClient_Release(iAudioRenderClient); + } + } + finally + { + if (iAudioClient != 0) + { + IAudioClient_Release(iAudioClient); + maybeCloseResampler(); + } + } + } + finally + { + if (eventHandle != 0) + CloseHandle(eventHandle); + } + + } // The locator of this Renderer is not null. + } + catch (Throwable t) + { + if (t instanceof InterruptedException) + Thread.currentThread().interrupt(); + else if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + else + { + logger.error( + "Failed to open a WASAPIRenderer on audio endpoint" + + " device " + toString(locator), + t); + if (t instanceof ResourceUnavailableException) + throw (ResourceUnavailableException) t; + else + { + ResourceUnavailableException rue + = new ResourceUnavailableException(); + + rue.initCause(t); + throw rue; + } + } + } + + super.open(); + } + + /** + * {@inheritDoc} + */ + @Override + protected synchronized void playbackDevicePropertyChange( + PropertyChangeEvent ev) + { + /* + * Stop, close, re-open and re-start this Renderer (performing whichever + * of these in order to bring it into the same state) in order to + * reflect the change in the selection with respect to the playback or + * notify device. + */ + + waitWhileBusy(); + + boolean open + = ((iAudioClient != 0) && (iAudioRenderClient != 0)) + || locatorIsNull; + + if (open) + { + boolean start = started; + + close(); + + try + { + open(); + } + catch (ResourceUnavailableException rue) + { + throw new UndeclaredThrowableException(rue); + } + if (start) + start(); + } + } + + /** + * Pops a specific number of bytes from {@link #srcBuffer}. For example, + * because such a number of bytes have been read from srcBuffer and + * written into the rendering endpoint buffer. + * + * @param length the number of bytes to pop from srcBuffer + */ + private void popFromSrcBuffer(int length) + { + srcBufferLength = pop(srcBuffer, srcBufferLength, length); + } + + /** + * {@inheritDoc} + */ + public int process(Buffer buffer) + { + int length = buffer.getLength(); + + if (length < 1) + return BUFFER_PROCESSED_OK; + + byte[] data = (byte[]) buffer.getData(); + int offset = buffer.getOffset(); + + synchronized (this) + { + if ((iAudioClient == 0) || (iAudioRenderClient == 0)) + { + /* + * We actually want to allow the user to switch the playback + * and/or notify device to none mid-stream in order to disable + * the playback. + */ + return + locatorIsNull + ? BUFFER_PROCESSED_OK + : BUFFER_PROCESSED_FAILED; + } + else if (!started) + return BUFFER_PROCESSED_FAILED; + else + { + waitWhileBusy(); + busy = true; + } + } + + int ret = BUFFER_PROCESSED_OK; + long sleep = 0; + + try + { + int numPaddingFrames; + + if (eventHandle == 0) + { + try + { + numPaddingFrames + = IAudioClient_GetCurrentPadding(iAudioClient); + } + catch (HResultException hre) + { + numPaddingFrames = 0; + ret = BUFFER_PROCESSED_FAILED; + logger.error("IAudioClient_GetCurrentPadding", hre); + } + } + else + { + /* + * The process method will not write into the rendering endpoint + * buffer, the runInEventHandleCmd method will. + */ + numPaddingFrames = numBufferFrames; + } + if (ret != BUFFER_PROCESSED_FAILED) + { + int numFramesRequested = numBufferFrames - numPaddingFrames; + + if (numFramesRequested == 0) + { + if (eventHandle == 0) + { + /* + * There is NO available space in the rendering endpoint + * buffer into which this Renderer can write data. + */ + ret |= INPUT_BUFFER_NOT_CONSUMED; + sleep = devicePeriod; + /* + * The writing to the render endpoint buffer may or may + * not be malfunctioning, it depends on the interval of + * time that the state remains unchanged. + */ + if (writeIsMalfunctioningSince + == DiagnosticsControl.NEVER) + setWriteIsMalfunctioning(true); + } + else + { + /* + * The process method will write into srcBuffer, the + * runInEventHandleCmd will read from srcBuffer and + * write into the rendering endpoint buffer. + */ + int toCopy = srcBuffer.length - srcBufferLength; + + if (toCopy > 0) + { + if (toCopy > length) + toCopy = length; + System.arraycopy( + data, offset, + srcBuffer, srcBufferLength, + toCopy); + srcBufferLength += toCopy; + + if (length > toCopy) + { + buffer.setLength(length - toCopy); + buffer.setOffset(offset + toCopy); + ret |= INPUT_BUFFER_NOT_CONSUMED; + } + + /* + * Writing from the input Buffer into srcBuffer has + * occurred so it does not look like the writing to + * the render endpoint buffer is malfunctioning. + */ + if (writeIsMalfunctioningSince + != DiagnosticsControl.NEVER) + setWriteIsMalfunctioning(false); + } + else + { + ret |= INPUT_BUFFER_NOT_CONSUMED; + sleep = devicePeriod; + /* + * No writing from the input Buffer into srcBuffer + * has occurred so it is possible that the writing + * to the render endpoint buffer is malfunctioning. + */ + if (writeIsMalfunctioningSince + == DiagnosticsControl.NEVER) + setWriteIsMalfunctioning(true); + } + } + } + else + { + /* + * There is available space in the rendering endpoint + * buffer into which this Renderer can write data. + */ + int effectiveLength = srcBufferLength + length; + int toWrite + = Math.min( + effectiveLength, + numFramesRequested * srcFrameSize); + byte[] effectiveData; + int effectiveOffset; + + if (srcBufferLength > 0) + { + /* + * There is remainder/residue from earlier invocations + * of the method. This Renderer will feed + * iAudioRenderClient from srcBuffer. + */ + effectiveData = srcBuffer; + effectiveOffset = 0; + + int toCopy = toWrite - srcBufferLength; + + if (toCopy <= 0) + ret |= INPUT_BUFFER_NOT_CONSUMED; + else + { + if (toCopy > length) + toCopy = length; + System.arraycopy( + data, offset, + srcBuffer, srcBufferLength, + toCopy); + srcBufferLength += toCopy; + + if (toWrite > srcBufferLength) + toWrite = srcBufferLength; + + if (length > toCopy) + { + buffer.setLength(length - toCopy); + buffer.setOffset(offset + toCopy); + ret |= INPUT_BUFFER_NOT_CONSUMED; + } + } + } + else + { + /* + * There is no remainder/residue from earlier + * invocations of the method. This Renderer will feed + * iAudioRenderClient from data. + */ + effectiveData = data; + effectiveOffset = offset; + } + + int written; + + if ((toWrite / srcFrameSize) == 0) + written = 0; + else + { + /* + * Take into account the user's preferences with respect + * to the output volume. + */ + GainControl gainControl = getGainControl(); + + if (gainControl != null) + { + BasicVolumeControl.applyGain( + gainControl, + effectiveData, effectiveOffset, toWrite); + } + + try + { + written + = IAudioRenderClient_Write( + iAudioRenderClient, + effectiveData, effectiveOffset, toWrite, + srcSampleSize, srcChannels, + dstSampleSize, dstChannels); + } + catch (HResultException hre) + { + written = 0; + ret = BUFFER_PROCESSED_FAILED; + logger.error("IAudioRenderClient_Write", hre); + } + } + if (ret != BUFFER_PROCESSED_FAILED) + { + if (effectiveData == data) + { + // We have consumed frames from data. + if (written == 0) + { + /* + * The available number of frames appear to be + * too few for IAudioRenderClient to accept. + * They will have to be prepended to the next + * input Buffer. + */ + System.arraycopy( + data, offset, + srcBuffer, srcBufferLength, + toWrite); + srcBufferLength += toWrite; + written = toWrite; + } + if (length > written) + { + buffer.setLength(length - written); + buffer.setOffset(offset + written); + ret |= INPUT_BUFFER_NOT_CONSUMED; + } + } + else if (written > 0) + { + // We have consumed frames from srcBuffer. + popFromSrcBuffer(written); + } + + if (writeIsMalfunctioningSince + != DiagnosticsControl.NEVER) + setWriteIsMalfunctioning(false); + } + } + + /* + * If the writing to the render endpoint buffer is + * malfunctioning, fail the processing of the input Buffer in + * order to avoid blocking of the Codec chain. + */ + if (((ret & INPUT_BUFFER_NOT_CONSUMED) + == INPUT_BUFFER_NOT_CONSUMED) + && (writeIsMalfunctioningSince + != DiagnosticsControl.NEVER)) + { + long writeIsMalfunctioningDuration + = System.currentTimeMillis() + - writeIsMalfunctioningSince; + + if (writeIsMalfunctioningDuration + > writeIsMalfunctioningTimeout) + { + /* + * The writing to the render endpoint buffer has taken + * too long so whatever is in srcBuffer is surely + * out-of-date. + */ + srcBufferLength = 0; + ret = BUFFER_PROCESSED_FAILED; + logger.warn( + "Audio endpoint device appears to be" + + " malfunctioning: " + + getLocator()); + } + } + } + } + finally + { + synchronized (this) + { + busy = false; + notifyAll(); + } + } + /* + * If there was no available space in the rendering endpoint buffer, we + * will want to wait a bit for such space to be made available. + */ + if (((ret & INPUT_BUFFER_NOT_CONSUMED) == INPUT_BUFFER_NOT_CONSUMED) + && (sleep > 0)) + { + boolean interrupted = false; + + synchronized (this) + { + /* + * Spurious wake-ups should not be a big issue here. While this + * Renderer may check for available space in the rendering + * endpoint buffer more often than practically necessary (which + * may very well classify as a case of performance loss), the + * ability to unblock this Renderer is considered more + * important. + */ + try + { + wait(sleep); + } + catch (InterruptedException ie) + { + interrupted = true; + } + } + if (interrupted) + Thread.currentThread().interrupt(); + } + return ret; + } + + /** + * Runs/executes in the thread associated with a specific Runnable + * initialized to wait for {@link #eventHandle} to be signaled. + * + * @param eventHandleCmd the Runnable which has been initialized to + * wait for eventHandle to be signaled and in whose associated + * thread the method is invoked + */ + private void runInEventHandleCmd(Runnable eventHandleCmd) + { + try + { + useAudioThreadPriority(); + + do + { + long eventHandle; + + synchronized (this) + { + /* + * Does this WASAPIRender still want eventHandleCmd to + * execute? + */ + if (!eventHandleCmd.equals(this.eventHandleCmd)) + break; + // Is this WASAPIRenderer still opened and started? + if ((iAudioClient == 0) + || (iAudioRenderClient == 0) + || !started) + break; + + /* + * The value of eventHandle will remain valid while this + * WASAPIRenderer wants eventHandleCmd to execute. + */ + eventHandle = this.eventHandle; + if (eventHandle == 0) + throw new IllegalStateException("eventHandle"); + + waitWhileBusy(); + busy = true; + } + try + { + int numPaddingFrames; + + try + { + numPaddingFrames + = IAudioClient_GetCurrentPadding(iAudioClient); + } + catch (HResultException hre) + { + numPaddingFrames = numBufferFrames; + logger.error("IAudioClient_GetCurrentPadding", hre); + } + + int numFramesRequested = numBufferFrames - numPaddingFrames; + + /* + * If there is no available space in the rendering endpoint + * buffer, wait for the system to signal when an audio + * buffer is ready to be processed by the client. + */ + if (numFramesRequested > 0) + { + byte[] buf; + int bufChannels; + int bufFrameSize; + int bufLength; + int bufSampleSize; + + if (resampler == null) + { + buf = srcBuffer; + bufChannels = srcChannels; + bufFrameSize = srcFrameSize; + bufLength = srcBufferLength; + bufSampleSize = srcSampleSize; + } + else + { + /* + * The units of srcBuffer are based on srcFormat, + * the units of numFramesRequested are based on + * dstFormat. + */ + maybeResample(numFramesRequested); + + buf = resamplerData; + bufChannels = resamplerChannels; + bufFrameSize = resamplerFrameSize; + bufLength = resamplerOutBuffer.getLength(); + bufSampleSize = resamplerSampleSize; + } + + /* + * Write as much from buf as possible while minimizing + * the risk of audio glitches and the amount of + * artificial/induced silence. + */ + int bufFrames = bufLength / bufFrameSize; + + if ((numFramesRequested > bufFrames) + && (bufFrames >= devicePeriodInFrames)) + numFramesRequested = bufFrames; + + // Pad with silence in order to avoid underflows. + int toWrite = numFramesRequested * bufFrameSize; + + if (toWrite > buf.length) + toWrite = buf.length; + + int silence = toWrite - bufLength; + + if (silence > 0) + { + Arrays.fill(buf, bufLength, toWrite, (byte) 0); + bufLength = toWrite; + } + + /* + * Take into account the user's preferences with respect + * to the output volume. + */ + GainControl gainControl = getGainControl(); + + if ((gainControl != null) && (toWrite != 0)) + { + BasicVolumeControl.applyGain( + gainControl, + buf, 0, toWrite); + } + + int written + = maybeIAudioRenderClientWrite( + buf, 0, toWrite, + bufSampleSize, bufChannels); + + if (written != 0) + { + bufLength = pop(buf, bufLength, written); + if (buf == srcBuffer) + srcBufferLength = bufLength; + else + resamplerOutBuffer.setLength(bufLength); + + if (writeIsMalfunctioningSince + != DiagnosticsControl.NEVER) + setWriteIsMalfunctioning(false); + } + } + } + finally + { + synchronized (this) + { + busy = false; + notifyAll(); + } + } + + int wfso; + + try + { + wfso = WaitForSingleObject(eventHandle, devicePeriod); + } + catch (HResultException hre) + { + /* + * WaitForSingleObject will throw HResultException only in + * the case of WAIT_FAILED. Event if it didn't, it would + * still be a failure from our point of view. + */ + wfso = WAIT_FAILED; + logger.error("WaitForSingleObject", hre); + } + /* + * If the function WaitForSingleObject fails once, it will very + * likely fail forever. Bail out of a possible busy wait. + */ + if ((wfso == WAIT_FAILED) || (wfso == WAIT_ABANDONED)) + break; + } + while (true); + } + finally + { + synchronized (this) + { + if (eventHandleCmd.equals(this.eventHandleCmd)) + { + this.eventHandleCmd = null; + notifyAll(); + } + } + } + } + + /** + * {@inheritDoc} + * + * Disallows mid-stream changes of the inputFormat of this + * AbstractRenderer. + */ + @Override + public synchronized Format setInputFormat(Format format) + { + /* + * WASAPIRenderer does not support mid-stream changes of the + * inputFormat. + */ + if ((iAudioClient != 0) || (iAudioRenderClient != 0)) + return null; + else + return super.setInputFormat(format); + } + + /** + * Indicates whether the writing to the render endpoint buffer is + * malfunctioning. Keeps track of the time at which the malfunction has + * started. + * + * @param writeIsMalfunctioning true if the writing to the render + * endpoint buffer is (believed to be) malfunctioning; otherwise, + * false + */ + private void setWriteIsMalfunctioning(boolean writeIsMalfunctioning) + { + if (writeIsMalfunctioning) + { + if (writeIsMalfunctioningSince == DiagnosticsControl.NEVER) + writeIsMalfunctioningSince = System.currentTimeMillis(); + } + else + writeIsMalfunctioningSince = DiagnosticsControl.NEVER; + } + + /** + * {@inheritDoc} + */ + public synchronized void start() + { + if (iAudioClient == 0) + { + /* + * We actually want to allow the user to switch the playback and/or + * notify device to none mid-stream in order to disable the + * playback. + */ + if (locatorIsNull) + started = true; + } + else + { + waitWhileBusy(); + waitWhileEventHandleCmd(); + + /* + * Introduce latency in order to decrease the likelihood of + * underflow. + */ + if (srcBuffer != null) + { + if (srcBufferLength > 0) + { + /* + * Shift the valid audio data to the end of srcBuffer so + * that silence can be written at the beginning. + */ + for (int i = srcBuffer.length - 1, j = srcBufferLength - 1; + j >= 0; + i--, j--) + { + srcBuffer[i] = srcBuffer[j]; + } + } + else if (srcBufferLength < 0) + srcBufferLength = 0; + + /* + * If there is valid audio data in srcBuffer, it has been + * shifted to the end to make room for silence at the beginning. + */ + int silence = srcBuffer.length - srcBufferLength; + + if (silence > 0) + Arrays.fill(srcBuffer, 0, silence, (byte) 0); + srcBufferLength = srcBuffer.length; + } + + try + { + IAudioClient_Start(iAudioClient); + started = true; + + if ((eventHandle != 0) && (this.eventHandleCmd == null)) + { + Runnable eventHandleCmd + = new Runnable() + { + public void run() + { + runInEventHandleCmd(this); + } + }; + boolean submitted = false; + + try + { + if (eventHandleExecutor == null) + { + eventHandleExecutor + = Executors.newSingleThreadExecutor(); + } + + this.eventHandleCmd = eventHandleCmd; + eventHandleExecutor.execute(eventHandleCmd); + submitted = true; + } + finally + { + if (!submitted + && eventHandleCmd.equals(this.eventHandleCmd)) + this.eventHandleCmd = null; + } + } + } + catch (HResultException hre) + { + /* + * If IAudioClient_Start is invoked multiple times without + * intervening IAudioClient_Stop, it will likely return/throw + * AUDCLNT_E_NOT_STOPPED. + */ + if (hre.getHResult() != AUDCLNT_E_NOT_STOPPED) + logger.error("IAudioClient_Start", hre); + } + } + } + + /** + * {@inheritDoc} + */ + public synchronized void stop() + { + if (iAudioClient == 0) + { + /* + * We actually want to allow the user to switch the playback and/or + * notify device to none mid-stream in order to disable the + * playback. + */ + if (locatorIsNull) + started = false; + } + else + { + waitWhileBusy(); + + try + { + /* + * If IAudioClient_Stop is invoked multiple times without + * intervening IAudioClient_Start, it is documented to return + * S_FALSE. + */ + IAudioClient_Stop(iAudioClient); + started = false; + + waitWhileEventHandleCmd(); + + writeIsMalfunctioningSince = DiagnosticsControl.NEVER; + } + catch (HResultException hre) + { + logger.error("IAudioClient_Stop", hre); + } + } + } + + /** + * Gets a human-readable representation of a specific MediaLocator + * for the purposes of testing/debugging. + * + * @param locator the MediaLocator that is to be represented in a + * human-readable form for the purposes of testing/debugging + * @return a human-readable representation of the specified locator + * for the purposes of testing/debugging + */ + private String toString(MediaLocator locator) + { + String s; + + if (locator == null) + s = "null"; + else + { + s = null; + /* + * Try to not throw any exceptions because the purpose is to produce + * at least some identification of the specified MediaLocator even + * if not the most complete. + */ + try + { + String id = locator.getRemainder(); + + if (id != null) + { + CaptureDeviceInfo2 cdi2 + = audioSystem.getDevice(dataFlow, locator); + + if (cdi2 != null) + { + String name = cdi2.getName(); + + if ((name != null) && !id.equals(name)) + s = id + " with friendly name " + name; + } + if (s == null) + s = id; + } + } + catch (Throwable t) + { + if (t instanceof InterruptedException) + Thread.currentThread().interrupt(); + else if (t instanceof ThreadDeath) + throw (ThreadDeath) t; + } + if (s == null) + s = locator.toString(); + } + return s; + } + + /** + * Waits on this instance while the value of {@link #busy} is equal to + * true. + */ + private synchronized void waitWhileBusy() + { + boolean interrupted = false; + + while (busy) + { + try + { + wait(devicePeriod); + } + catch (InterruptedException ie) + { + interrupted = true; + } + } + if (interrupted) + Thread.currentThread().interrupt(); + } + + /** + * Waits on this instance while the value of {@link #eventHandleCmd} is + * non-null. + */ + private synchronized void waitWhileEventHandleCmd() + { + if (eventHandle == 0) + throw new IllegalStateException("eventHandle"); + + boolean interrupted = false; + + while (eventHandleCmd != null) + { + try + { + wait(devicePeriod); + } + catch (InterruptedException ie) + { + interrupted = true; + } + } + if (interrupted) + Thread.currentThread().interrupt(); + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java index 4bfb74879..303c7d72b 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,343 +13,343 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.renderer.video; - -import java.awt.*; - -import javax.media.*; -import javax.media.format.*; -import javax.media.renderer.*; -import javax.swing.*; - -import org.jitsi.impl.neomedia.codec.video.*; -import org.jitsi.impl.neomedia.jmfext.media.renderer.*; -import org.jitsi.util.*; -import org.jitsi.util.swing.*; - -/** - * Video renderer using pure Java2D. - * - * @author Ingo Bauersachs - */ -public class Java2DRenderer - extends AbstractRenderer - implements VideoRenderer -{ - /** - * The default, initial height and width to set on the Components - * of JAWTRenderers before video frames with actual sizes are - * processed. Introduced to mitigate multiple failures to realize the actual - * video frame size and/or to properly scale the visual/video - * Components. - */ - private static final int DEFAULT_COMPONENT_HEIGHT_OR_WIDTH = 16; - - /** - * The array of supported input formats. - */ - private static final Format[] SUPPORTED_INPUT_FORMATS - = new Format[] - { - OSUtils.IS_LINUX - ? new YUVFormat( - null /* size */, - Format.NOT_SPECIFIED /* maxDataLength */, - Format.intArray, - Format.NOT_SPECIFIED /* frameRate */, - YUVFormat.YUV_420, - Format.NOT_SPECIFIED /* strideY */, - Format.NOT_SPECIFIED /* strideUV */, - Format.NOT_SPECIFIED /* offsetY */, - Format.NOT_SPECIFIED /* offsetU */, - Format.NOT_SPECIFIED /* offsetV */) - : OSUtils.IS_ANDROID - ? new RGBFormat( - null, - Format.NOT_SPECIFIED, - Format.intArray, - Format.NOT_SPECIFIED, - 32, - 0x000000ff, 0x0000ff00, 0x00ff0000) - : new RGBFormat( - null, - Format.NOT_SPECIFIED, - Format.intArray, - Format.NOT_SPECIFIED, - 32, - 0x00ff0000, 0x0000ff00, 0x000000ff) - }; - - private Java2DRendererVideoComponent component; - - /** - * The last known height of the input processed by this - * JAWTRenderer. - */ - private int height = 0; - - /** - * The last known width of the input processed by this - * JAWTRenderer. - */ - private int width = 0; - - /** - * The Runnable which is executed to bring the invocations of - * {@link #reflectInputFormatOnComponent()} into the AWT event dispatching - * thread. - */ - private final Runnable reflectInputFormatOnComponentInEventDispatchThread - = new Runnable() - { - @Override - public void run() - { - reflectInputFormatOnComponentInEventDispatchThread(); - } - }; - - @Override - public Format[] getSupportedInputFormats() - { - return SUPPORTED_INPUT_FORMATS.clone(); - } - - /** - * Processes the data provided in a specific Buffer and renders it - * to the output device represented by this Renderer. - * - * @param buffer a Buffer containing the data to be processed and - * rendered - * @return BUFFER_PROCESSED_OK if the processing is successful; - * otherwise, the other possible return codes defined in the PlugIn - * interface - */ - @Override - public synchronized int process(Buffer buffer) - { - if (buffer.isDiscard()) - { - return BUFFER_PROCESSED_OK; - } - - int bufferLength = buffer.getLength(); - if (bufferLength == 0) - { - return BUFFER_PROCESSED_OK; - } - - Format format = buffer.getFormat(); - if (format != null - && format != this.inputFormat - && !format.equals(this.inputFormat) - && setInputFormat(format) == null) - { - return BUFFER_PROCESSED_FAILED; - } - - Dimension size = null; - if (format != null) - { - size = ((VideoFormat) format).getSize(); - } - - if (size == null) - { - size = this.inputFormat.getSize(); - if (size == null) - { - return BUFFER_PROCESSED_FAILED; - } - } - - // XXX If the size of the video frame to be displayed is tiny enough - // to crash sws_scale, then it may cause issues with other - // functionality as well. Stay on the safe side. - if (size.width >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH - && size.height >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH) - { - getComponent().process(buffer, size); - } - - return BUFFER_PROCESSED_OK; - } - - @Override - public void start() - { - } - - @Override - public void stop() - { - } - - @Override - public void close() - { - } - - @Override - public String getName() - { - return "Pure Java Video Renderer"; - } - - @Override - public void open() throws ResourceUnavailableException - { - } - - @Override - public Rectangle getBounds() - { - return null; - } - - @Override - public Java2DRendererVideoComponent getComponent() - { - if (component == null) - { - component = new Java2DRendererVideoComponent(); - - // Make sure to have non-zero height and width because actual video - // frames may have not been processed yet. - component.setSize( - DEFAULT_COMPONENT_HEIGHT_OR_WIDTH, - DEFAULT_COMPONENT_HEIGHT_OR_WIDTH); - } - - return component; - } - - @Override - public void setBounds(Rectangle rect) - { - } - - @Override - public boolean setComponent(Component comp) - { - return false; - } - - /** - * Sets the Format of the input to be processed by this - * Renderer. - * - * @param format the Format to be set as the Format of the - * input to be processed by this Renderer - * @return the Format of the input to be processed by this - * Renderer if the specified format is supported or - * null if the specified format is not supported by this - * Renderer. Typically, it is the supported input Format - * which most closely matches the specified Format. - */ - @Override - public synchronized Format setInputFormat(Format format) - { - VideoFormat oldInputFormat = inputFormat; - Format newInputFormat = super.setInputFormat(format); - - // Short-circuit because we will be calculating a lot and we do not want - // to do that unless necessary. - if (oldInputFormat == inputFormat) - return newInputFormat; - - // Know the width and height of the input because we'll be depicting it - // and we may want, for example, to report them as the preferred size of - // our AWT Component. More importantly, know them because they determine - // certain arguments to be passed to the native counterpart of this - // JAWTRenderer i.e. handle. - Dimension size = inputFormat.getSize(); - - if (size == null) - { - width = height = 0; - } - else - { - width = size.width; - height = size.height; - } - - reflectInputFormatOnComponent(); - - return newInputFormat; - } - - /** - * Sets properties of the AWT Component of this Renderer - * which depend on the properties of the inputFormat of this - * Renderer. Makes sure that the procedure is executed on the AWT - * event dispatching thread because an AWT Component's properties - * (such as preferredSize) should be accessed in the AWT event - * dispatching thread. - */ - private void reflectInputFormatOnComponent() - { - if (SwingUtilities.isEventDispatchThread()) - { - reflectInputFormatOnComponentInEventDispatchThread(); - } - else - { - SwingUtilities.invokeLater( - reflectInputFormatOnComponentInEventDispatchThread); - } - } - - /** - * Sets properties of the AWT Component of this Renderer - * which depend on the properties of the inputFormat of this - * Renderer. The invocation is presumed to be performed on the AWT - * event dispatching thread. - */ - private void reflectInputFormatOnComponentInEventDispatchThread() - { - // Reflect the width and height of the input onto the prefSize of our - // AWT Component (if necessary). - if ((component != null) && (width > 0) && (height > 0)) - { - Dimension prefSize = component.getPreferredSize(); - - // Apart from the simplest of cases in which the component has no - // prefSize, it is also necessary to reflect the width and height of - // the input onto the prefSize when the ratio of the input is - // different than the ratio of the prefSize. It may also be argued - // that the component needs to know of the width and height of the - // input if its prefSize is with the same ratio but is smaller. - if ((prefSize == null) - || (prefSize.width < 1) || (prefSize.height < 1) - || !VideoLayout.areAspectRatiosEqual( - prefSize, - width, height) - || (prefSize.width < width) || (prefSize.height < height)) - { - component.setPreferredSize( - new Dimension(width, height)); - } - - // If the component does not have a size, it looks strange given - // that we know a prefSize for it. However, if the component has - // already been added into a Container, the Container will dictate - // the size as part of its layout logic. - if (component.isPreferredSizeSet() - && (component.getParent() == null)) - { - Dimension size = component.getSize(); - - prefSize = component.getPreferredSize(); - if ((size.width < 1) || (size.height < 1) - || !VideoLayout.areAspectRatiosEqual( - size, - prefSize.width, prefSize.height)) - { - component.setSize(prefSize.width, prefSize.height); - } - } - } - } -} +package org.jitsi.impl.neomedia.jmfext.media.renderer.video; + +import java.awt.*; + +import javax.media.*; +import javax.media.format.*; +import javax.media.renderer.*; +import javax.swing.*; + +import org.jitsi.impl.neomedia.codec.video.*; +import org.jitsi.impl.neomedia.jmfext.media.renderer.*; +import org.jitsi.util.*; +import org.jitsi.util.swing.*; + +/** + * Video renderer using pure Java2D. + * + * @author Ingo Bauersachs + */ +public class Java2DRenderer + extends AbstractRenderer + implements VideoRenderer +{ + /** + * The default, initial height and width to set on the Components + * of JAWTRenderers before video frames with actual sizes are + * processed. Introduced to mitigate multiple failures to realize the actual + * video frame size and/or to properly scale the visual/video + * Components. + */ + private static final int DEFAULT_COMPONENT_HEIGHT_OR_WIDTH = 16; + + /** + * The array of supported input formats. + */ + private static final Format[] SUPPORTED_INPUT_FORMATS + = new Format[] + { + OSUtils.IS_LINUX + ? new YUVFormat( + null /* size */, + Format.NOT_SPECIFIED /* maxDataLength */, + Format.intArray, + Format.NOT_SPECIFIED /* frameRate */, + YUVFormat.YUV_420, + Format.NOT_SPECIFIED /* strideY */, + Format.NOT_SPECIFIED /* strideUV */, + Format.NOT_SPECIFIED /* offsetY */, + Format.NOT_SPECIFIED /* offsetU */, + Format.NOT_SPECIFIED /* offsetV */) + : OSUtils.IS_ANDROID + ? new RGBFormat( + null, + Format.NOT_SPECIFIED, + Format.intArray, + Format.NOT_SPECIFIED, + 32, + 0x000000ff, 0x0000ff00, 0x00ff0000) + : new RGBFormat( + null, + Format.NOT_SPECIFIED, + Format.intArray, + Format.NOT_SPECIFIED, + 32, + 0x00ff0000, 0x0000ff00, 0x000000ff) + }; + + private Java2DRendererVideoComponent component; + + /** + * The last known height of the input processed by this + * JAWTRenderer. + */ + private int height = 0; + + /** + * The last known width of the input processed by this + * JAWTRenderer. + */ + private int width = 0; + + /** + * The Runnable which is executed to bring the invocations of + * {@link #reflectInputFormatOnComponent()} into the AWT event dispatching + * thread. + */ + private final Runnable reflectInputFormatOnComponentInEventDispatchThread + = new Runnable() + { + @Override + public void run() + { + reflectInputFormatOnComponentInEventDispatchThread(); + } + }; + + @Override + public Format[] getSupportedInputFormats() + { + return SUPPORTED_INPUT_FORMATS.clone(); + } + + /** + * Processes the data provided in a specific Buffer and renders it + * to the output device represented by this Renderer. + * + * @param buffer a Buffer containing the data to be processed and + * rendered + * @return BUFFER_PROCESSED_OK if the processing is successful; + * otherwise, the other possible return codes defined in the PlugIn + * interface + */ + @Override + public synchronized int process(Buffer buffer) + { + if (buffer.isDiscard()) + { + return BUFFER_PROCESSED_OK; + } + + int bufferLength = buffer.getLength(); + if (bufferLength == 0) + { + return BUFFER_PROCESSED_OK; + } + + Format format = buffer.getFormat(); + if (format != null + && format != this.inputFormat + && !format.equals(this.inputFormat) + && setInputFormat(format) == null) + { + return BUFFER_PROCESSED_FAILED; + } + + Dimension size = null; + if (format != null) + { + size = ((VideoFormat) format).getSize(); + } + + if (size == null) + { + size = this.inputFormat.getSize(); + if (size == null) + { + return BUFFER_PROCESSED_FAILED; + } + } + + // XXX If the size of the video frame to be displayed is tiny enough + // to crash sws_scale, then it may cause issues with other + // functionality as well. Stay on the safe side. + if (size.width >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH + && size.height >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH) + { + getComponent().process(buffer, size); + } + + return BUFFER_PROCESSED_OK; + } + + @Override + public void start() + { + } + + @Override + public void stop() + { + } + + @Override + public void close() + { + } + + @Override + public String getName() + { + return "Pure Java Video Renderer"; + } + + @Override + public void open() throws ResourceUnavailableException + { + } + + @Override + public Rectangle getBounds() + { + return null; + } + + @Override + public Java2DRendererVideoComponent getComponent() + { + if (component == null) + { + component = new Java2DRendererVideoComponent(); + + // Make sure to have non-zero height and width because actual video + // frames may have not been processed yet. + component.setSize( + DEFAULT_COMPONENT_HEIGHT_OR_WIDTH, + DEFAULT_COMPONENT_HEIGHT_OR_WIDTH); + } + + return component; + } + + @Override + public void setBounds(Rectangle rect) + { + } + + @Override + public boolean setComponent(Component comp) + { + return false; + } + + /** + * Sets the Format of the input to be processed by this + * Renderer. + * + * @param format the Format to be set as the Format of the + * input to be processed by this Renderer + * @return the Format of the input to be processed by this + * Renderer if the specified format is supported or + * null if the specified format is not supported by this + * Renderer. Typically, it is the supported input Format + * which most closely matches the specified Format. + */ + @Override + public synchronized Format setInputFormat(Format format) + { + VideoFormat oldInputFormat = inputFormat; + Format newInputFormat = super.setInputFormat(format); + + // Short-circuit because we will be calculating a lot and we do not want + // to do that unless necessary. + if (oldInputFormat == inputFormat) + return newInputFormat; + + // Know the width and height of the input because we'll be depicting it + // and we may want, for example, to report them as the preferred size of + // our AWT Component. More importantly, know them because they determine + // certain arguments to be passed to the native counterpart of this + // JAWTRenderer i.e. handle. + Dimension size = inputFormat.getSize(); + + if (size == null) + { + width = height = 0; + } + else + { + width = size.width; + height = size.height; + } + + reflectInputFormatOnComponent(); + + return newInputFormat; + } + + /** + * Sets properties of the AWT Component of this Renderer + * which depend on the properties of the inputFormat of this + * Renderer. Makes sure that the procedure is executed on the AWT + * event dispatching thread because an AWT Component's properties + * (such as preferredSize) should be accessed in the AWT event + * dispatching thread. + */ + private void reflectInputFormatOnComponent() + { + if (SwingUtilities.isEventDispatchThread()) + { + reflectInputFormatOnComponentInEventDispatchThread(); + } + else + { + SwingUtilities.invokeLater( + reflectInputFormatOnComponentInEventDispatchThread); + } + } + + /** + * Sets properties of the AWT Component of this Renderer + * which depend on the properties of the inputFormat of this + * Renderer. The invocation is presumed to be performed on the AWT + * event dispatching thread. + */ + private void reflectInputFormatOnComponentInEventDispatchThread() + { + // Reflect the width and height of the input onto the prefSize of our + // AWT Component (if necessary). + if ((component != null) && (width > 0) && (height > 0)) + { + Dimension prefSize = component.getPreferredSize(); + + // Apart from the simplest of cases in which the component has no + // prefSize, it is also necessary to reflect the width and height of + // the input onto the prefSize when the ratio of the input is + // different than the ratio of the prefSize. It may also be argued + // that the component needs to know of the width and height of the + // input if its prefSize is with the same ratio but is smaller. + if ((prefSize == null) + || (prefSize.width < 1) || (prefSize.height < 1) + || !VideoLayout.areAspectRatiosEqual( + prefSize, + width, height) + || (prefSize.width < width) || (prefSize.height < height)) + { + component.setPreferredSize( + new Dimension(width, height)); + } + + // If the component does not have a size, it looks strange given + // that we know a prefSize for it. However, if the component has + // already been added into a Container, the Container will dictate + // the size as part of its layout logic. + if (component.isPreferredSizeSet() + && (component.getParent() == null)) + { + Dimension size = component.getSize(); + + prefSize = component.getPreferredSize(); + if ((size.width < 1) || (size.height < 1) + || !VideoLayout.areAspectRatiosEqual( + size, + prefSize.width, prefSize.height)) + { + component.setSize(prefSize.width, prefSize.height); + } + } + } + } +} diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java index 292c1582e..a47f6b6b9 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,63 +13,63 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.jmfext.media.renderer.video; - -import java.awt.*; -import java.awt.image.*; - -import javax.media.Buffer; - -/** - * Canvas that draws the video buffer using Java's built-in Graphics. - * - * @author Ingo Bauersachs - */ -public class Java2DRendererVideoComponent - extends Canvas -{ - private static final long serialVersionUID = -3229966946026776451L; - private MemoryImageSource mis; - private Dimension size = new Dimension(0, 0); - - /** - * Draws the current image as prepared by the - * {@link #process(Buffer, Dimension)} - * - * @param g the graphics context to draw to. - */ - @Override - public void paint(Graphics g) - { - if (this.mis != null) - { - g.drawImage(this.createImage(mis), 0, 0, - getWidth(), getHeight(), null); - } - } - - /** - * Updates the image to be drawn on the graphics context. - * - * @param buffer the RAW image data. - * @param size the dimension of the image in the buffer. - */ - void process(Buffer buffer, Dimension size) - { - if (mis == null || !this.size.equals(size)) - { - this.size = size; - mis = - new MemoryImageSource(size.width, size.height, - (int[]) buffer.getData(), buffer.getOffset(), size.width); - } - else - { - mis.newPixels((int[]) buffer.getData(), ColorModel.getRGBdefault(), - buffer.getOffset(), size.width); - } - - this.repaint(); - } - -} +package org.jitsi.impl.neomedia.jmfext.media.renderer.video; + +import java.awt.*; +import java.awt.image.*; + +import javax.media.Buffer; + +/** + * Canvas that draws the video buffer using Java's built-in Graphics. + * + * @author Ingo Bauersachs + */ +public class Java2DRendererVideoComponent + extends Canvas +{ + private static final long serialVersionUID = -3229966946026776451L; + private MemoryImageSource mis; + private Dimension size = new Dimension(0, 0); + + /** + * Draws the current image as prepared by the + * {@link #process(Buffer, Dimension)} + * + * @param g the graphics context to draw to. + */ + @Override + public void paint(Graphics g) + { + if (this.mis != null) + { + g.drawImage(this.createImage(mis), 0, 0, + getWidth(), getHeight(), null); + } + } + + /** + * Updates the image to be drawn on the graphics context. + * + * @param buffer the RAW image data. + * @param size the dimension of the image in the buffer. + */ + void process(Buffer buffer, Dimension size) + { + if (mis == null || !this.size.equals(size)) + { + this.size = size; + mis = + new MemoryImageSource(size.width, size.height, + (int[]) buffer.getData(), buffer.getOffset(), size.width); + } + else + { + mis.newPixels((int[]) buffer.getData(), ColorModel.getRGBdefault(), + buffer.getOffset(), size.width); + } + + this.repaint(); + } + +} diff --git a/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java b/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java index bb1850f92..55c4376a2 100644 --- a/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java +++ b/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,41 +13,41 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.transform; - -import org.jitsi.impl.neomedia.*; - -/** - * Encapsulate the concept of packet transformation. Given an array of packets, - * PacketTransformer can either "transform" each one of them, or - * "reverse transform" (e.g. restore) each one of them. - * - * @author Bing SU (nova.su@gmail.com) - * @author Boris Grozev - */ -public interface PacketTransformer -{ - /** - * Closes this PacketTransformer i.e. releases the resources - * allocated by it and prepares it for garbage collection. - */ - public void close(); - - /** - * Reverse-transforms each packet in an array of packets. Null values - * must be ignored. - * - * @param pkts the transformed packets to be restored. - * @return the restored packets. - */ - public RawPacket[] reverseTransform(RawPacket[] pkts); - - /** - * Transforms each packet in an array of packets. Null values must be - * ignored. - * - * @param pkts the packets to be transformed - * @return the transformed packets - */ - public RawPacket[] transform(RawPacket[] pkts); -} +package org.jitsi.impl.neomedia.transform; + +import org.jitsi.impl.neomedia.*; + +/** + * Encapsulate the concept of packet transformation. Given an array of packets, + * PacketTransformer can either "transform" each one of them, or + * "reverse transform" (e.g. restore) each one of them. + * + * @author Bing SU (nova.su@gmail.com) + * @author Boris Grozev + */ +public interface PacketTransformer +{ + /** + * Closes this PacketTransformer i.e. releases the resources + * allocated by it and prepares it for garbage collection. + */ + public void close(); + + /** + * Reverse-transforms each packet in an array of packets. Null values + * must be ignored. + * + * @param pkts the transformed packets to be restored. + * @return the restored packets. + */ + public RawPacket[] reverseTransform(RawPacket[] pkts); + + /** + * Transforms each packet in an array of packets. Null values must be + * ignored. + * + * @param pkts the packets to be transformed + * @return the transformed packets + */ + public RawPacket[] transform(RawPacket[] pkts); +} diff --git a/src/org/jitsi/impl/neomedia/transform/TransformEngine.java b/src/org/jitsi/impl/neomedia/transform/TransformEngine.java index a7ba33aef..f44a08ddf 100644 --- a/src/org/jitsi/impl/neomedia/transform/TransformEngine.java +++ b/src/org/jitsi/impl/neomedia/transform/TransformEngine.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,28 +13,28 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.transform; - -/** - * Defines how to get PacketTransformers for RTP and RTCP packets. A - * single PacketTransformer can be used for both RTP and RTCP packets - * or there can be two separate PacketTransformers. - * - * @author Bing SU (nova.su@gmail.com) - */ -public interface TransformEngine -{ - /** - * Gets the PacketTransformer for RTP packets. - * - * @return the PacketTransformer for RTP packets - */ - public PacketTransformer getRTPTransformer(); - - /** - * Gets the PacketTransformer for RTCP packets. - * - * @return the PacketTransformer for RTCP packets - */ - public PacketTransformer getRTCPTransformer(); -} +package org.jitsi.impl.neomedia.transform; + +/** + * Defines how to get PacketTransformers for RTP and RTCP packets. A + * single PacketTransformer can be used for both RTP and RTCP packets + * or there can be two separate PacketTransformers. + * + * @author Bing SU (nova.su@gmail.com) + */ +public interface TransformEngine +{ + /** + * Gets the PacketTransformer for RTP packets. + * + * @return the PacketTransformer for RTP packets + */ + public PacketTransformer getRTPTransformer(); + + /** + * Gets the PacketTransformer for RTCP packets. + * + * @return the PacketTransformer for RTCP packets + */ + public PacketTransformer getRTCPTransformer(); +} diff --git a/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java b/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java index b59d299c6..6ab86f7b8 100644 --- a/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java +++ b/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,573 +13,573 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.transform.dtls; - -import java.io.*; -import java.util.*; -import java.util.concurrent.*; - -import javax.media.rtp.*; - -import org.bouncycastle.crypto.tls.*; -import org.ice4j.ice.*; -import org.jitsi.impl.neomedia.*; -import org.jitsi.impl.neomedia.codec.video.h264.*; -import org.jitsi.util.*; - -/** - * Implements {@link DatagramTransport} in order to integrate the Bouncy Castle - * Crypto APIs in libjitsi for the purposes of implementing DTLS-SRTP. - * - * @author Lyubomir Marinov - */ -public class DatagramTransportImpl - implements DatagramTransport -{ - /** - * The Logger used by the DatagramTransportImpl class and - * its instances to print debug information. - */ - private static final Logger logger - = Logger.getLogger(DatagramTransportImpl.class); - - /** - * The ID of the component which this instance works for/is associated with. - */ - private final int componentID; - - /** - * The RTPConnector which represents and implements the actual - * DatagramSocket adapted by this instance. - */ - private AbstractRTPConnector connector; - - /** - * The pool of RawPackets instances to reduce their allocations - * and garbage collection. - */ - private final Queue rawPacketPool - = new LinkedBlockingQueue(); - - /** - * The queue of RawPackets which have been received from the - * network are awaiting to be received by the application through this - * DatagramTransport. - */ - private final ArrayBlockingQueue receiveQ; - - /** - * The capacity of {@link #receiveQ}. - */ - private final int receiveQCapacity; - - /** - * The byte buffer which represents a datagram to be sent. It may - * consist of multiple DTLS records which are simple encoded consecutively. - */ - private byte[] sendBuf; - - /** - * The length in bytes of {@link #sendBuf} i.e. the number of - * sendBuf elements which constitute actual DTLS records. - */ - private int sendBufLength; - - /** - * The Object that synchronizes the access to {@link #sendBuf}, - * {@link #sendBufLength}. - */ - private final Object sendBufSyncRoot = new Object(); - - /** - * Initializes a new DatagramTransportImpl. - * - * @param componentID {@link Component#RTP} if the new instance is to work - * on data/RTP packets or {@link Component#RTCP} if the new instance is to - * work on control/RTCP packets - */ - public DatagramTransportImpl(int componentID) - { - switch (componentID) - { - case Component.RTCP: - case Component.RTP: - this.componentID = componentID; - break; - default: - throw new IllegalArgumentException("componentID"); - } - - receiveQCapacity = RTPConnectorOutputStream.PACKET_QUEUE_CAPACITY; - receiveQ = new ArrayBlockingQueue<>(receiveQCapacity); - } - - private AbstractRTPConnector assertNotClosed( - boolean breakOutOfDTLSReliableHandshakeReceiveMessage) - throws IOException - { - AbstractRTPConnector connector = this.connector; - - if (connector == null) - { - IOException ioe - = new IOException(getClass().getName() + " is closed!"); - - if (breakOutOfDTLSReliableHandshakeReceiveMessage) - breakOutOfDTLSReliableHandshakeReceiveMessage(ioe); - throw ioe; - } - else - { - return connector; - } - } - - /** - * Works around a bug in the Bouncy Castle Crypto APIs which may cause - * org.bouncycastle.crypto.tls.DTLSReliableHandshake.receiveMessage() - * to enter an endless loop. - * - * @param cause the Throwable which would have been thrown if the - * bug did not exist - */ - private void breakOutOfDTLSReliableHandshakeReceiveMessage(Throwable cause) - { - for (StackTraceElement stackTraceElement : cause.getStackTrace()) - { - if ("org.bouncycastle.crypto.tls.DTLSReliableHandshake".equals( - stackTraceElement.getClassName()) - && "receiveMessage".equals( - stackTraceElement.getMethodName())) - { - throw new IllegalStateException(cause); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public void close() - throws IOException - { - setConnector(null); - } - - private void doSend(byte[] buf, int off, int len) - throws IOException - { - // Do preserve the sequence of sends. - flush(); - - AbstractRTPConnector connector = assertNotClosed(false); - RTPConnectorOutputStream outputStream; - - switch (componentID) - { - case Component.RTCP: - outputStream = connector.getControlOutputStream(); - break; - case Component.RTP: - outputStream = connector.getDataOutputStream(); - break; - default: - String msg = "componentID"; - IllegalStateException ise = new IllegalStateException(msg); - - logger.error(msg, ise); - throw ise; - } - - // Write synchronously in order to avoid our packet getting stuck in the - // write queue (in case it is blocked waiting for DTLS to finish, for - // example). - outputStream.syncWrite(buf, off, len); - } - - private void flush() - throws IOException - { - assertNotClosed(false); - - byte[] buf; - int len; - - synchronized (sendBufSyncRoot) - { - if ((sendBuf != null) && (sendBufLength != 0)) - { - buf = sendBuf; - sendBuf = null; - len = sendBufLength; - sendBufLength = 0; - } - else - { - buf = null; - len = 0; - } - } - if (buf != null) - { - doSend(buf, 0, len); - - // Attempt to reduce allocations and garbage collection. - synchronized (sendBufSyncRoot) - { - if (sendBuf == null) - sendBuf = buf; - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public int getReceiveLimit() - throws IOException - { - AbstractRTPConnector connector = this.connector; - int receiveLimit - = (connector == null) ? -1 : connector.getReceiveBufferSize(); - - if (receiveLimit <= 0) - receiveLimit = RTPConnectorInputStream.PACKET_RECEIVE_BUFFER_LENGTH; - return receiveLimit; - } - - /** - * {@inheritDoc} - */ - @Override - public int getSendLimit() - throws IOException - { - AbstractRTPConnector connector = this.connector; - int sendLimit - = (connector == null) ? -1 : connector.getSendBufferSize(); - - if (sendLimit <= 0) - { - /* - * XXX The estimation bellow is wildly inaccurate and hardly related - * but we have to start somewhere. - */ - sendLimit - = DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH - + Packetizer.MAX_PAYLOAD_SIZE; - } - return sendLimit; - } - - /** - * Queues a packet received from the network to be received by the - * application through this DatagramTransport. - * - * @param buf the array of bytes which contains the packet to be - * queued - * @param off the offset within buf at which the packet to be - * queued starts - * @param len the length within buf starting at off of the - * packet to be queued - */ - void queueReceive(byte[] buf, int off, int len) - { - if (len > 0) - { - synchronized (receiveQ) - { - try - { - assertNotClosed(false); - } - catch (IOException ioe) - { - throw new IllegalStateException(ioe); - } - - RawPacket pkt = rawPacketPool.poll(); - byte[] pktBuf; - - if ((pkt == null) || ((pktBuf = pkt.getBuffer()).length < len)) - { - pktBuf = new byte[len]; - pkt = new RawPacket(pktBuf, 0, len); - } - else - { - pktBuf = pkt.getBuffer(); - pkt.setLength(len); - pkt.setOffset(0); - } - System.arraycopy(buf, off, pktBuf, 0, len); - - if (receiveQ.size() == receiveQCapacity) - { - RawPacket oldPkt = receiveQ.remove(); - - rawPacketPool.offer(oldPkt); - } - receiveQ.add(pkt); - receiveQ.notifyAll(); - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public int receive(byte[] buf, int off, int len, int waitMillis) - throws IOException - { - long enterTime = System.currentTimeMillis(); - - /* - * If this DatagramTransportImpl is to be received from, then what - * is to be received may be a response to a request that was earlier - * scheduled for send. - */ - /* - * XXX However, it may unnecessarily break up a flight into multiple - * datagrams. Since we have implemented the recognition of the end of - * flights, it should be fairly safe to rely on it alone. - */ -// flush(); - - /* - * If no datagram is received at all and the specified waitMillis - * expires, a negative value is to be returned in order to have the - * outbound flight retransmitted. - */ - int received = -1; - boolean interrupted = false; - - while (received < len) - { - long timeout; - - if (waitMillis > 0) - { - timeout = waitMillis - System.currentTimeMillis() + enterTime; - if (timeout == 0 /* wait forever */) - timeout = -1 /* do not wait */; - } - else - { - timeout = waitMillis; - } - - synchronized (receiveQ) - { - assertNotClosed(true); - - RawPacket pkt = receiveQ.peek(); - - if (pkt != null) - { - /* - * If a datagram has been received and even if it carries - * no/zero bytes, a non-negative value is to be returned in - * order to distinguish the case with that of no received - * datagram. If the received bytes do not represent a DTLS - * record, the record layer may still not retransmit the - * outbound flight. But that should not be much of a concern - * because we queue DTLS records into DatagramTransportImpl. - */ - if (received < 0) - received = 0; - - int toReceive = len - received; - boolean toReceiveIsPositive = (toReceive > 0); - - if (toReceiveIsPositive) - { - int pktLength = pkt.getLength(); - int pktOffset = pkt.getOffset(); - - if (toReceive > pktLength) - { - toReceive = pktLength; - toReceiveIsPositive = (toReceive > 0); - } - if (toReceiveIsPositive) - { - System.arraycopy( - pkt.getBuffer(), pktOffset, - buf, off + received, - toReceive); - received += toReceive; - } - if (toReceive == pktLength) - { - receiveQ.remove(); - rawPacketPool.offer(pkt); - } - else - { - pkt.setLength(pktLength - toReceive); - pkt.setOffset(pktOffset + toReceive); - } - if (toReceiveIsPositive) - { - /* - * The specified buf has received toReceive bytes - * and we do not concatenate RawPackets. - */ - break; - } - } - else - { - // The specified buf has received at least len bytes. - break; - } - } - - if (receiveQ.isEmpty()) - { - if (timeout >= 0) - { - try - { - receiveQ.wait(timeout); - } - catch (InterruptedException ie) - { - interrupted = true; - } - } - else - { - // The specified waitMillis has been exceeded. - break; - } - } - } - } - if (interrupted) - Thread.currentThread().interrupt(); - - return received; - } - - /** - * {@inheritDoc} - */ - @Override - public void send(byte[] buf, int off, int len) - throws IOException - { - assertNotClosed(false); - - // If possible, construct a single datagram from multiple DTLS records. - if (len >= DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH) - { - short type = TlsUtils.readUint8(buf, off); - boolean endOfFlight = false; - - switch (type) - { - case ContentType.handshake: - short msg_type = TlsUtils.readUint8(buf, off + 11); - - switch (msg_type) - { - case HandshakeType.certificate: - case HandshakeType.certificate_request: - case HandshakeType.certificate_verify: - case HandshakeType.client_key_exchange: - case HandshakeType.server_hello: - case HandshakeType.server_key_exchange: - case HandshakeType.session_ticket: - case HandshakeType.supplemental_data: - endOfFlight = false; - break; - case HandshakeType.client_hello: - case HandshakeType.finished: - case HandshakeType.hello_request: - case HandshakeType.hello_verify_request: - case HandshakeType.server_hello_done: - default: - endOfFlight = true; - break; - } - // Do fall through! - case ContentType.change_cipher_spec: - synchronized (sendBufSyncRoot) - { - int newSendBufLength = sendBufLength + len; - int sendLimit = getSendLimit(); - - if (newSendBufLength <= sendLimit) - { - if (sendBuf == null) - { - sendBuf = new byte[sendLimit]; - sendBufLength = 0; - } - else if (sendBuf.length < sendLimit) - { - byte[] oldSendBuf = sendBuf; - - sendBuf = new byte[sendLimit]; - System.arraycopy( - oldSendBuf, 0, - sendBuf, 0, - Math.min(sendBufLength, sendBuf.length)); - } - - System.arraycopy(buf, off, sendBuf, sendBufLength, len); - sendBufLength = newSendBufLength; - - if (endOfFlight) - flush(); - } - else - { - if (endOfFlight) - { - doSend(buf, off, len); - } - else - { - flush(); - send(buf, off, len); - } - } - } - break; - - case ContentType.alert: - case ContentType.application_data: - default: - doSend(buf, off, len); - break; - } - } - else - { - doSend(buf, off, len); - } - } - - /** - * Sets the RTPConnector which represents and implements the actual - * DatagramSocket to be adapted by this instance. - * - * @param connector the RTPConnector which represents and - * implements the actual DatagramSocket to be adapted by this - * instance - */ - void setConnector(AbstractRTPConnector connector) - { - synchronized (receiveQ) - { - this.connector = connector; - receiveQ.notifyAll(); - } - } -} +package org.jitsi.impl.neomedia.transform.dtls; + +import java.io.*; +import java.util.*; +import java.util.concurrent.*; + +import javax.media.rtp.*; + +import org.bouncycastle.crypto.tls.*; +import org.ice4j.ice.*; +import org.jitsi.impl.neomedia.*; +import org.jitsi.impl.neomedia.codec.video.h264.*; +import org.jitsi.util.*; + +/** + * Implements {@link DatagramTransport} in order to integrate the Bouncy Castle + * Crypto APIs in libjitsi for the purposes of implementing DTLS-SRTP. + * + * @author Lyubomir Marinov + */ +public class DatagramTransportImpl + implements DatagramTransport +{ + /** + * The Logger used by the DatagramTransportImpl class and + * its instances to print debug information. + */ + private static final Logger logger + = Logger.getLogger(DatagramTransportImpl.class); + + /** + * The ID of the component which this instance works for/is associated with. + */ + private final int componentID; + + /** + * The RTPConnector which represents and implements the actual + * DatagramSocket adapted by this instance. + */ + private AbstractRTPConnector connector; + + /** + * The pool of RawPackets instances to reduce their allocations + * and garbage collection. + */ + private final Queue rawPacketPool + = new LinkedBlockingQueue(); + + /** + * The queue of RawPackets which have been received from the + * network are awaiting to be received by the application through this + * DatagramTransport. + */ + private final ArrayBlockingQueue receiveQ; + + /** + * The capacity of {@link #receiveQ}. + */ + private final int receiveQCapacity; + + /** + * The byte buffer which represents a datagram to be sent. It may + * consist of multiple DTLS records which are simple encoded consecutively. + */ + private byte[] sendBuf; + + /** + * The length in bytes of {@link #sendBuf} i.e. the number of + * sendBuf elements which constitute actual DTLS records. + */ + private int sendBufLength; + + /** + * The Object that synchronizes the access to {@link #sendBuf}, + * {@link #sendBufLength}. + */ + private final Object sendBufSyncRoot = new Object(); + + /** + * Initializes a new DatagramTransportImpl. + * + * @param componentID {@link Component#RTP} if the new instance is to work + * on data/RTP packets or {@link Component#RTCP} if the new instance is to + * work on control/RTCP packets + */ + public DatagramTransportImpl(int componentID) + { + switch (componentID) + { + case Component.RTCP: + case Component.RTP: + this.componentID = componentID; + break; + default: + throw new IllegalArgumentException("componentID"); + } + + receiveQCapacity = RTPConnectorOutputStream.PACKET_QUEUE_CAPACITY; + receiveQ = new ArrayBlockingQueue<>(receiveQCapacity); + } + + private AbstractRTPConnector assertNotClosed( + boolean breakOutOfDTLSReliableHandshakeReceiveMessage) + throws IOException + { + AbstractRTPConnector connector = this.connector; + + if (connector == null) + { + IOException ioe + = new IOException(getClass().getName() + " is closed!"); + + if (breakOutOfDTLSReliableHandshakeReceiveMessage) + breakOutOfDTLSReliableHandshakeReceiveMessage(ioe); + throw ioe; + } + else + { + return connector; + } + } + + /** + * Works around a bug in the Bouncy Castle Crypto APIs which may cause + * org.bouncycastle.crypto.tls.DTLSReliableHandshake.receiveMessage() + * to enter an endless loop. + * + * @param cause the Throwable which would have been thrown if the + * bug did not exist + */ + private void breakOutOfDTLSReliableHandshakeReceiveMessage(Throwable cause) + { + for (StackTraceElement stackTraceElement : cause.getStackTrace()) + { + if ("org.bouncycastle.crypto.tls.DTLSReliableHandshake".equals( + stackTraceElement.getClassName()) + && "receiveMessage".equals( + stackTraceElement.getMethodName())) + { + throw new IllegalStateException(cause); + } + } + } + + /** + * {@inheritDoc} + */ + @Override + public void close() + throws IOException + { + setConnector(null); + } + + private void doSend(byte[] buf, int off, int len) + throws IOException + { + // Do preserve the sequence of sends. + flush(); + + AbstractRTPConnector connector = assertNotClosed(false); + RTPConnectorOutputStream outputStream; + + switch (componentID) + { + case Component.RTCP: + outputStream = connector.getControlOutputStream(); + break; + case Component.RTP: + outputStream = connector.getDataOutputStream(); + break; + default: + String msg = "componentID"; + IllegalStateException ise = new IllegalStateException(msg); + + logger.error(msg, ise); + throw ise; + } + + // Write synchronously in order to avoid our packet getting stuck in the + // write queue (in case it is blocked waiting for DTLS to finish, for + // example). + outputStream.syncWrite(buf, off, len); + } + + private void flush() + throws IOException + { + assertNotClosed(false); + + byte[] buf; + int len; + + synchronized (sendBufSyncRoot) + { + if ((sendBuf != null) && (sendBufLength != 0)) + { + buf = sendBuf; + sendBuf = null; + len = sendBufLength; + sendBufLength = 0; + } + else + { + buf = null; + len = 0; + } + } + if (buf != null) + { + doSend(buf, 0, len); + + // Attempt to reduce allocations and garbage collection. + synchronized (sendBufSyncRoot) + { + if (sendBuf == null) + sendBuf = buf; + } + } + } + + /** + * {@inheritDoc} + */ + @Override + public int getReceiveLimit() + throws IOException + { + AbstractRTPConnector connector = this.connector; + int receiveLimit + = (connector == null) ? -1 : connector.getReceiveBufferSize(); + + if (receiveLimit <= 0) + receiveLimit = RTPConnectorInputStream.PACKET_RECEIVE_BUFFER_LENGTH; + return receiveLimit; + } + + /** + * {@inheritDoc} + */ + @Override + public int getSendLimit() + throws IOException + { + AbstractRTPConnector connector = this.connector; + int sendLimit + = (connector == null) ? -1 : connector.getSendBufferSize(); + + if (sendLimit <= 0) + { + /* + * XXX The estimation bellow is wildly inaccurate and hardly related + * but we have to start somewhere. + */ + sendLimit + = DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH + + Packetizer.MAX_PAYLOAD_SIZE; + } + return sendLimit; + } + + /** + * Queues a packet received from the network to be received by the + * application through this DatagramTransport. + * + * @param buf the array of bytes which contains the packet to be + * queued + * @param off the offset within buf at which the packet to be + * queued starts + * @param len the length within buf starting at off of the + * packet to be queued + */ + void queueReceive(byte[] buf, int off, int len) + { + if (len > 0) + { + synchronized (receiveQ) + { + try + { + assertNotClosed(false); + } + catch (IOException ioe) + { + throw new IllegalStateException(ioe); + } + + RawPacket pkt = rawPacketPool.poll(); + byte[] pktBuf; + + if ((pkt == null) || ((pktBuf = pkt.getBuffer()).length < len)) + { + pktBuf = new byte[len]; + pkt = new RawPacket(pktBuf, 0, len); + } + else + { + pktBuf = pkt.getBuffer(); + pkt.setLength(len); + pkt.setOffset(0); + } + System.arraycopy(buf, off, pktBuf, 0, len); + + if (receiveQ.size() == receiveQCapacity) + { + RawPacket oldPkt = receiveQ.remove(); + + rawPacketPool.offer(oldPkt); + } + receiveQ.add(pkt); + receiveQ.notifyAll(); + } + } + } + + /** + * {@inheritDoc} + */ + @Override + public int receive(byte[] buf, int off, int len, int waitMillis) + throws IOException + { + long enterTime = System.currentTimeMillis(); + + /* + * If this DatagramTransportImpl is to be received from, then what + * is to be received may be a response to a request that was earlier + * scheduled for send. + */ + /* + * XXX However, it may unnecessarily break up a flight into multiple + * datagrams. Since we have implemented the recognition of the end of + * flights, it should be fairly safe to rely on it alone. + */ +// flush(); + + /* + * If no datagram is received at all and the specified waitMillis + * expires, a negative value is to be returned in order to have the + * outbound flight retransmitted. + */ + int received = -1; + boolean interrupted = false; + + while (received < len) + { + long timeout; + + if (waitMillis > 0) + { + timeout = waitMillis - System.currentTimeMillis() + enterTime; + if (timeout == 0 /* wait forever */) + timeout = -1 /* do not wait */; + } + else + { + timeout = waitMillis; + } + + synchronized (receiveQ) + { + assertNotClosed(true); + + RawPacket pkt = receiveQ.peek(); + + if (pkt != null) + { + /* + * If a datagram has been received and even if it carries + * no/zero bytes, a non-negative value is to be returned in + * order to distinguish the case with that of no received + * datagram. If the received bytes do not represent a DTLS + * record, the record layer may still not retransmit the + * outbound flight. But that should not be much of a concern + * because we queue DTLS records into DatagramTransportImpl. + */ + if (received < 0) + received = 0; + + int toReceive = len - received; + boolean toReceiveIsPositive = (toReceive > 0); + + if (toReceiveIsPositive) + { + int pktLength = pkt.getLength(); + int pktOffset = pkt.getOffset(); + + if (toReceive > pktLength) + { + toReceive = pktLength; + toReceiveIsPositive = (toReceive > 0); + } + if (toReceiveIsPositive) + { + System.arraycopy( + pkt.getBuffer(), pktOffset, + buf, off + received, + toReceive); + received += toReceive; + } + if (toReceive == pktLength) + { + receiveQ.remove(); + rawPacketPool.offer(pkt); + } + else + { + pkt.setLength(pktLength - toReceive); + pkt.setOffset(pktOffset + toReceive); + } + if (toReceiveIsPositive) + { + /* + * The specified buf has received toReceive bytes + * and we do not concatenate RawPackets. + */ + break; + } + } + else + { + // The specified buf has received at least len bytes. + break; + } + } + + if (receiveQ.isEmpty()) + { + if (timeout >= 0) + { + try + { + receiveQ.wait(timeout); + } + catch (InterruptedException ie) + { + interrupted = true; + } + } + else + { + // The specified waitMillis has been exceeded. + break; + } + } + } + } + if (interrupted) + Thread.currentThread().interrupt(); + + return received; + } + + /** + * {@inheritDoc} + */ + @Override + public void send(byte[] buf, int off, int len) + throws IOException + { + assertNotClosed(false); + + // If possible, construct a single datagram from multiple DTLS records. + if (len >= DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH) + { + short type = TlsUtils.readUint8(buf, off); + boolean endOfFlight = false; + + switch (type) + { + case ContentType.handshake: + short msg_type = TlsUtils.readUint8(buf, off + 11); + + switch (msg_type) + { + case HandshakeType.certificate: + case HandshakeType.certificate_request: + case HandshakeType.certificate_verify: + case HandshakeType.client_key_exchange: + case HandshakeType.server_hello: + case HandshakeType.server_key_exchange: + case HandshakeType.session_ticket: + case HandshakeType.supplemental_data: + endOfFlight = false; + break; + case HandshakeType.client_hello: + case HandshakeType.finished: + case HandshakeType.hello_request: + case HandshakeType.hello_verify_request: + case HandshakeType.server_hello_done: + default: + endOfFlight = true; + break; + } + // Do fall through! + case ContentType.change_cipher_spec: + synchronized (sendBufSyncRoot) + { + int newSendBufLength = sendBufLength + len; + int sendLimit = getSendLimit(); + + if (newSendBufLength <= sendLimit) + { + if (sendBuf == null) + { + sendBuf = new byte[sendLimit]; + sendBufLength = 0; + } + else if (sendBuf.length < sendLimit) + { + byte[] oldSendBuf = sendBuf; + + sendBuf = new byte[sendLimit]; + System.arraycopy( + oldSendBuf, 0, + sendBuf, 0, + Math.min(sendBufLength, sendBuf.length)); + } + + System.arraycopy(buf, off, sendBuf, sendBufLength, len); + sendBufLength = newSendBufLength; + + if (endOfFlight) + flush(); + } + else + { + if (endOfFlight) + { + doSend(buf, off, len); + } + else + { + flush(); + send(buf, off, len); + } + } + } + break; + + case ContentType.alert: + case ContentType.application_data: + default: + doSend(buf, off, len); + break; + } + } + else + { + doSend(buf, off, len); + } + } + + /** + * Sets the RTPConnector which represents and implements the actual + * DatagramSocket to be adapted by this instance. + * + * @param connector the RTPConnector which represents and + * implements the actual DatagramSocket to be adapted by this + * instance + */ + void setConnector(AbstractRTPConnector connector) + { + synchronized (receiveQ) + { + this.connector = connector; + receiveQ.notifyAll(); + } + } +} diff --git a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java index 339f9accc..e1721890b 100644 --- a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java +++ b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,110 +13,110 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.transform.srtp; - -import org.bouncycastle.crypto.*; - -/** - * SRTPCipherCTR implements SRTP Counter Mode AES Encryption (AES-CM). - * Counter Mode AES Encryption algorithm is defined in RFC3711, section 4.1.1. - * - * Other than Null Cipher, RFC3711 defined two two encryption algorithms: - * Counter Mode AES Encryption and F8 Mode AES encryption. Both encryption - * algorithms are capable to encrypt / decrypt arbitrary length data, and the - * size of packet data is not required to be a multiple of the AES block - * size (128bit). So, no padding is needed. - * - * Please note: these two encryption algorithms are specially defined by SRTP. - * They are not common AES encryption modes, so you will not be able to find a - * replacement implementation in common cryptographic libraries. - * - * As defined by RFC3711: Counter Mode Encryption is mandatory.. - * - * mandatory to impl optional default - * ------------------------------------------------------------------------- - * encryption AES-CM, NULL AES-f8 AES-CM - * message integrity HMAC-SHA1 - HMAC-SHA1 - * key derivation (PRF) AES-CM - AES-CM - * - * We use AESCipher to handle basic AES encryption / decryption. - * - * @author Werner Dittmann (Werner.Dittmann@t-online.de) - * @author Bing SU (nova.su@gmail.com) - */ -public class SRTPCipherCTR -{ - private static final int BLKLEN = 16; - private static final int MAX_BUFFER_LENGTH = 10 * 1024; - - private final byte[] cipherInBlock = new byte[BLKLEN]; - private byte[] streamBuf = new byte[1024]; - private final byte[] tmpCipherBlock = new byte[BLKLEN]; - - public SRTPCipherCTR() - { - } - - /** - * Computes the cipher stream for AES CM mode. See section 4.1.1 in RFC3711 - * for detailed description. - * - * @param out byte array holding the output cipher stream - * @param length length of the cipher stream to produce, in bytes - * @param iv initialization vector used to generate this cipher stream - */ - public void getCipherStream( - BlockCipher aesCipher, - byte[] out, int length, - byte[] iv) - { - System.arraycopy(iv, 0, cipherInBlock, 0, 14); - - int ctr, ctrEnd; - - for (ctr = 0, ctrEnd = length / BLKLEN; ctr < ctrEnd; ctr++) - { - // compute the cipher stream - cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8); - cipherInBlock[15] = (byte) (ctr & 0x00FF); - - aesCipher.processBlock(cipherInBlock, 0, out, ctr * BLKLEN); - } - - // Treat the last bytes: - cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8); - cipherInBlock[15] = (byte) ((ctr & 0x00FF)); - - aesCipher.processBlock(cipherInBlock, 0, tmpCipherBlock, 0); - System.arraycopy(tmpCipherBlock, 0, out, ctr * BLKLEN, length % BLKLEN); - } - - public void process( - BlockCipher cipher, - byte[] data, int off, int len, - byte[] iv) - { - if (off + len > data.length) - return; - - // If data fits in inter buffer, use it. Otherwise, allocate bigger - // buffer and store it (up to a defined maximum size) to use it for - // later processing. - byte[] cipherStream; - - if (len > streamBuf.length) - { - cipherStream = new byte[len]; - if (cipherStream.length <= MAX_BUFFER_LENGTH) - streamBuf = cipherStream; - } - else - { - cipherStream = streamBuf; - } - - getCipherStream(cipher, cipherStream, len, iv); - for (int i = 0; i < len; i++) - data[i + off] ^= cipherStream[i]; - } -} +package org.jitsi.impl.neomedia.transform.srtp; + +import org.bouncycastle.crypto.*; + +/** + * SRTPCipherCTR implements SRTP Counter Mode AES Encryption (AES-CM). + * Counter Mode AES Encryption algorithm is defined in RFC3711, section 4.1.1. + * + * Other than Null Cipher, RFC3711 defined two two encryption algorithms: + * Counter Mode AES Encryption and F8 Mode AES encryption. Both encryption + * algorithms are capable to encrypt / decrypt arbitrary length data, and the + * size of packet data is not required to be a multiple of the AES block + * size (128bit). So, no padding is needed. + * + * Please note: these two encryption algorithms are specially defined by SRTP. + * They are not common AES encryption modes, so you will not be able to find a + * replacement implementation in common cryptographic libraries. + * + * As defined by RFC3711: Counter Mode Encryption is mandatory.. + * + * mandatory to impl optional default + * ------------------------------------------------------------------------- + * encryption AES-CM, NULL AES-f8 AES-CM + * message integrity HMAC-SHA1 - HMAC-SHA1 + * key derivation (PRF) AES-CM - AES-CM + * + * We use AESCipher to handle basic AES encryption / decryption. + * + * @author Werner Dittmann (Werner.Dittmann@t-online.de) + * @author Bing SU (nova.su@gmail.com) + */ +public class SRTPCipherCTR +{ + private static final int BLKLEN = 16; + private static final int MAX_BUFFER_LENGTH = 10 * 1024; + + private final byte[] cipherInBlock = new byte[BLKLEN]; + private byte[] streamBuf = new byte[1024]; + private final byte[] tmpCipherBlock = new byte[BLKLEN]; + + public SRTPCipherCTR() + { + } + + /** + * Computes the cipher stream for AES CM mode. See section 4.1.1 in RFC3711 + * for detailed description. + * + * @param out byte array holding the output cipher stream + * @param length length of the cipher stream to produce, in bytes + * @param iv initialization vector used to generate this cipher stream + */ + public void getCipherStream( + BlockCipher aesCipher, + byte[] out, int length, + byte[] iv) + { + System.arraycopy(iv, 0, cipherInBlock, 0, 14); + + int ctr, ctrEnd; + + for (ctr = 0, ctrEnd = length / BLKLEN; ctr < ctrEnd; ctr++) + { + // compute the cipher stream + cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8); + cipherInBlock[15] = (byte) (ctr & 0x00FF); + + aesCipher.processBlock(cipherInBlock, 0, out, ctr * BLKLEN); + } + + // Treat the last bytes: + cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8); + cipherInBlock[15] = (byte) ((ctr & 0x00FF)); + + aesCipher.processBlock(cipherInBlock, 0, tmpCipherBlock, 0); + System.arraycopy(tmpCipherBlock, 0, out, ctr * BLKLEN, length % BLKLEN); + } + + public void process( + BlockCipher cipher, + byte[] data, int off, int len, + byte[] iv) + { + if (off + len > data.length) + return; + + // If data fits in inter buffer, use it. Otherwise, allocate bigger + // buffer and store it (up to a defined maximum size) to use it for + // later processing. + byte[] cipherStream; + + if (len > streamBuf.length) + { + cipherStream = new byte[len]; + if (cipherStream.length <= MAX_BUFFER_LENGTH) + streamBuf = cipherStream; + } + else + { + cipherStream = streamBuf; + } + + getCipherStream(cipher, cipherStream, len, iv); + for (int i = 0; i < len; i++) + data[i + off] ^= cipherStream[i]; + } +} diff --git a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java index d32f4d93f..f632502bf 100644 --- a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java +++ b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,97 +13,97 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.impl.neomedia.transform.srtp; - -/** - * The SRTPContextFactory creates the initial crypto contexts for RTP - * and RTCP encryption using the supplied key material. - * - * @author Bing SU (nova.su@gmail.com) - */ -public class SRTPContextFactory -{ - /** - * The default SRTPCryptoContext, which will be used to derive other - * contexts. - */ - private SRTPCryptoContext defaultContext; - - /** - * The default SRTPCryptoContext, which will be used to derive other - * contexts. - */ - private SRTCPCryptoContext defaultContextControl; - - /** - * Construct a SRTPTransformEngine based on given master encryption key, - * master salt key and SRTP/SRTCP policy. - * - * @param sender true if the new instance is to be used by an SRTP - * sender; false if the new instance is to be used by an SRTP - * receiver - * @param masterKey the master encryption key - * @param masterSalt the master salt key - * @param srtpPolicy SRTP policy - * @param srtcpPolicy SRTCP policy - */ - public SRTPContextFactory( - boolean sender, - byte[] masterKey, - byte[] masterSalt, - SRTPPolicy srtpPolicy, - SRTPPolicy srtcpPolicy) - { - defaultContext - = new SRTPCryptoContext( - sender, - 0, - 0, - 0, - masterKey, - masterSalt, - srtpPolicy); - defaultContextControl - = new SRTCPCryptoContext(0, masterKey, masterSalt, srtcpPolicy); - } - - /** - * Close the transformer engine. - * - * The close functions closes all stored default crypto contexts. This - * deletes key data and forces a cleanup of the crypto contexts. - */ - public void close() - { - if (defaultContext != null) - { - defaultContext.close(); - defaultContext = null; - } - if (defaultContextControl != null) - { - defaultContextControl.close(); - defaultContextControl = null; - } - } - - /** - * Get the default SRTPCryptoContext - * - * @return the default SRTPCryptoContext - */ - public SRTPCryptoContext getDefaultContext() - { - return defaultContext; - } - - /** - * Get the default SRTPCryptoContext - * - * @return the default SRTPCryptoContext - */ - public SRTCPCryptoContext getDefaultContextControl() - { - return defaultContextControl; - } -} +package org.jitsi.impl.neomedia.transform.srtp; + +/** + * The SRTPContextFactory creates the initial crypto contexts for RTP + * and RTCP encryption using the supplied key material. + * + * @author Bing SU (nova.su@gmail.com) + */ +public class SRTPContextFactory +{ + /** + * The default SRTPCryptoContext, which will be used to derive other + * contexts. + */ + private SRTPCryptoContext defaultContext; + + /** + * The default SRTPCryptoContext, which will be used to derive other + * contexts. + */ + private SRTCPCryptoContext defaultContextControl; + + /** + * Construct a SRTPTransformEngine based on given master encryption key, + * master salt key and SRTP/SRTCP policy. + * + * @param sender true if the new instance is to be used by an SRTP + * sender; false if the new instance is to be used by an SRTP + * receiver + * @param masterKey the master encryption key + * @param masterSalt the master salt key + * @param srtpPolicy SRTP policy + * @param srtcpPolicy SRTCP policy + */ + public SRTPContextFactory( + boolean sender, + byte[] masterKey, + byte[] masterSalt, + SRTPPolicy srtpPolicy, + SRTPPolicy srtcpPolicy) + { + defaultContext + = new SRTPCryptoContext( + sender, + 0, + 0, + 0, + masterKey, + masterSalt, + srtpPolicy); + defaultContextControl + = new SRTCPCryptoContext(0, masterKey, masterSalt, srtcpPolicy); + } + + /** + * Close the transformer engine. + * + * The close functions closes all stored default crypto contexts. This + * deletes key data and forces a cleanup of the crypto contexts. + */ + public void close() + { + if (defaultContext != null) + { + defaultContext.close(); + defaultContext = null; + } + if (defaultContextControl != null) + { + defaultContextControl.close(); + defaultContextControl = null; + } + } + + /** + * Get the default SRTPCryptoContext + * + * @return the default SRTPCryptoContext + */ + public SRTPCryptoContext getDefaultContext() + { + return defaultContext; + } + + /** + * Get the default SRTPCryptoContext + * + * @return the default SRTPCryptoContext + */ + public SRTCPCryptoContext getDefaultContextControl() + { + return defaultContextControl; + } +} diff --git a/src/org/jitsi/service/libjitsi/LibJitsiActivator.java b/src/org/jitsi/service/libjitsi/LibJitsiActivator.java index 61044660c..a8615135b 100644 --- a/src/org/jitsi/service/libjitsi/LibJitsiActivator.java +++ b/src/org/jitsi/service/libjitsi/LibJitsiActivator.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,25 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.service.libjitsi; - -import org.osgi.framework.*; - -/** - * Activates libjitsi in an OSGi environment. - */ -public class LibJitsiActivator - implements BundleActivator -{ - public void start(BundleContext bundleContext) - throws Exception - { - LibJitsi.start(bundleContext); - } - - public void stop(BundleContext bundleContext) - throws Exception - { - LibJitsi.stop(); - } -} +package org.jitsi.service.libjitsi; + +import org.osgi.framework.*; + +/** + * Activates libjitsi in an OSGi environment. + */ +public class LibJitsiActivator + implements BundleActivator +{ + public void start(BundleContext bundleContext) + throws Exception + { + LibJitsi.start(bundleContext); + } + + public void stop(BundleContext bundleContext) + throws Exception + { + LibJitsi.stop(); + } +} diff --git a/src/org/jitsi/service/neomedia/SDesControl.java b/src/org/jitsi/service/neomedia/SDesControl.java index 1615be02f..e39cf8ce1 100644 --- a/src/org/jitsi/service/neomedia/SDesControl.java +++ b/src/org/jitsi/service/neomedia/SDesControl.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,81 +13,81 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.service.neomedia; - -import ch.imvs.sdes4j.srtp.*; - -/** - * SDES based SRTP MediaStream encryption control. - * - * @author Ingo Bauersachs - */ -public interface SDesControl - extends SrtpControl -{ - /** - * Name of the config setting that supplies the default enabled cipher - * suites. Cipher suites are comma-separated. - */ - public static final String SDES_CIPHER_SUITES = - "net.java.sip.communicator.service.neomedia.SDES_CIPHER_SUITES"; - - /** - * Gets the crypto attribute of the incoming MediaStream. - * - * @return the crypto attribute of the incoming MediaStream. - */ - public SrtpCryptoAttribute getInAttribute(); - - /** - * Returns the crypto attributes enabled on this computer. - * - * @return The crypto attributes enabled on this computer. - */ - public SrtpCryptoAttribute[] getInitiatorCryptoAttributes(); - - /** - * Gets the crypto attribute of the outgoing MediaStream. - * - * @return the crypto attribute of the outgoing MediaStream. - */ - public SrtpCryptoAttribute getOutAttribute(); - - /** - * Gets all supported cipher suites. - * - * @return all supported cipher suites. - */ - public Iterable getSupportedCryptoSuites(); - - /** - * Selects the local crypto attribute from the initial offering - * ({@link #getInitiatorCryptoAttributes()}) based on the peer's first - * matching cipher suite. - * - * @param peerAttributes The peer's crypto offers. - * @return A SrtpCryptoAttribute when a matching cipher suite was found; - * null, otherwise. - */ - public SrtpCryptoAttribute initiatorSelectAttribute( - Iterable peerAttributes); - - /** - * Chooses a supported crypto attribute from the peer's list of supplied - * attributes and creates the local crypto attribute. Used when the control - * is running in the role as responder. - * - * @param peerAttributes The peer's crypto attribute offering. - * @return The local crypto attribute for the answer of the offer or - * null if no matching cipher suite could be found. - */ - public SrtpCryptoAttribute responderSelectAttribute( - Iterable peerAttributes); - - /** - * Sets the enabled SDES ciphers. - * - * @param ciphers The list of enabled ciphers. - */ - public void setEnabledCiphers(Iterable ciphers); -} +package org.jitsi.service.neomedia; + +import ch.imvs.sdes4j.srtp.*; + +/** + * SDES based SRTP MediaStream encryption control. + * + * @author Ingo Bauersachs + */ +public interface SDesControl + extends SrtpControl +{ + /** + * Name of the config setting that supplies the default enabled cipher + * suites. Cipher suites are comma-separated. + */ + public static final String SDES_CIPHER_SUITES = + "net.java.sip.communicator.service.neomedia.SDES_CIPHER_SUITES"; + + /** + * Gets the crypto attribute of the incoming MediaStream. + * + * @return the crypto attribute of the incoming MediaStream. + */ + public SrtpCryptoAttribute getInAttribute(); + + /** + * Returns the crypto attributes enabled on this computer. + * + * @return The crypto attributes enabled on this computer. + */ + public SrtpCryptoAttribute[] getInitiatorCryptoAttributes(); + + /** + * Gets the crypto attribute of the outgoing MediaStream. + * + * @return the crypto attribute of the outgoing MediaStream. + */ + public SrtpCryptoAttribute getOutAttribute(); + + /** + * Gets all supported cipher suites. + * + * @return all supported cipher suites. + */ + public Iterable getSupportedCryptoSuites(); + + /** + * Selects the local crypto attribute from the initial offering + * ({@link #getInitiatorCryptoAttributes()}) based on the peer's first + * matching cipher suite. + * + * @param peerAttributes The peer's crypto offers. + * @return A SrtpCryptoAttribute when a matching cipher suite was found; + * null, otherwise. + */ + public SrtpCryptoAttribute initiatorSelectAttribute( + Iterable peerAttributes); + + /** + * Chooses a supported crypto attribute from the peer's list of supplied + * attributes and creates the local crypto attribute. Used when the control + * is running in the role as responder. + * + * @param peerAttributes The peer's crypto attribute offering. + * @return The local crypto attribute for the answer of the offer or + * null if no matching cipher suite could be found. + */ + public SrtpCryptoAttribute responderSelectAttribute( + Iterable peerAttributes); + + /** + * Sets the enabled SDES ciphers. + * + * @param ciphers The list of enabled ciphers. + */ + public void setEnabledCiphers(Iterable ciphers); +} diff --git a/src/org/jitsi/service/neomedia/SrtpControlType.java b/src/org/jitsi/service/neomedia/SrtpControlType.java index 4f535fb12..b6c1cd8c1 100644 --- a/src/org/jitsi/service/neomedia/SrtpControlType.java +++ b/src/org/jitsi/service/neomedia/SrtpControlType.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,77 +13,77 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.service.neomedia; - -/** - * The SrtpControlType enumeration contains all currently known - * SrtpControl implementations. - * - * @author Ingo Bauersachs - * @author Lyubomir Marinov - */ -public enum SrtpControlType -{ - /** - * Datagram Transport Layer Security (DTLS) Extension to Establish Keys for - * the Secure Real-time Transport Protocol (SRTP) - */ - DTLS_SRTP("DTLS-SRTP"), - - /** - * Multimedia Internet KEYing (RFC 3830) - */ - MIKEY("MIKEY"), - - /** - * Session Description Protocol (SDP) Security Descriptions for Media - * Streams (RFC 4568) - */ - SDES("SDES"), - - /** - * ZRTP: Media Path Key Agreement for Unicast Secure RTP (RFC 6189) - */ - ZRTP("ZRTP"); - - /** - * The human-readable non-localized name of the (S)RTP transport protocol - * represented by this SrtpControlType and its respective - * SrtpControl class. - */ - private final String protoName; - - /** - * Initializes a new SrtpControlType instance with a specific - * human-readable non-localized (S)RTP transport protocol name. - * - * @param protoName the human-readable non-localized name of the (S)RTP - * transport protocol represented by the new instance and its respective - * SrtpControl class - */ - private SrtpControlType(String protoName) - { - this.protoName = protoName; - } - - @Override - public String toString() - { - return protoName; - } - - /** - * @see SrtpControlType#valueOf(String) - */ - public static SrtpControlType fromString(String protoName) - { - if (protoName.equals(SrtpControlType.DTLS_SRTP.toString())) - { - return SrtpControlType.DTLS_SRTP; - } - else - { - return SrtpControlType.valueOf(protoName); - } - } -} +package org.jitsi.service.neomedia; + +/** + * The SrtpControlType enumeration contains all currently known + * SrtpControl implementations. + * + * @author Ingo Bauersachs + * @author Lyubomir Marinov + */ +public enum SrtpControlType +{ + /** + * Datagram Transport Layer Security (DTLS) Extension to Establish Keys for + * the Secure Real-time Transport Protocol (SRTP) + */ + DTLS_SRTP("DTLS-SRTP"), + + /** + * Multimedia Internet KEYing (RFC 3830) + */ + MIKEY("MIKEY"), + + /** + * Session Description Protocol (SDP) Security Descriptions for Media + * Streams (RFC 4568) + */ + SDES("SDES"), + + /** + * ZRTP: Media Path Key Agreement for Unicast Secure RTP (RFC 6189) + */ + ZRTP("ZRTP"); + + /** + * The human-readable non-localized name of the (S)RTP transport protocol + * represented by this SrtpControlType and its respective + * SrtpControl class. + */ + private final String protoName; + + /** + * Initializes a new SrtpControlType instance with a specific + * human-readable non-localized (S)RTP transport protocol name. + * + * @param protoName the human-readable non-localized name of the (S)RTP + * transport protocol represented by the new instance and its respective + * SrtpControl class + */ + private SrtpControlType(String protoName) + { + this.protoName = protoName; + } + + @Override + public String toString() + { + return protoName; + } + + /** + * @see SrtpControlType#valueOf(String) + */ + public static SrtpControlType fromString(String protoName) + { + if (protoName.equals(SrtpControlType.DTLS_SRTP.toString())) + { + return SrtpControlType.DTLS_SRTP; + } + else + { + return SrtpControlType.valueOf(protoName); + } + } +} diff --git a/src/org/jitsi/service/neomedia/control/KeyFrameControl.java b/src/org/jitsi/service/neomedia/control/KeyFrameControl.java index cb44d563a..a544cd0d3 100644 --- a/src/org/jitsi/service/neomedia/control/KeyFrameControl.java +++ b/src/org/jitsi/service/neomedia/control/KeyFrameControl.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,179 +13,179 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.service.neomedia.control; - -import java.util.*; - -/** - * Represents a control over the key frame-related logic of a - * VideoMediaStream. - * - * @author Lyubomir Marinov - */ -public interface KeyFrameControl -{ - /** - * Adds a KeyFrameRequestee to be made available through this - * KeyFrameControl. - * - * @param index the zero-based index at which keyFrameRequestee is - * to be added to the list of KeyFrameRequestees made available or - * -1 to have this KeyFrameControl choose at which index - * it is to be added in accord with its internal logic - * through this KeyFrameControl - * @param keyFrameRequestee the KeyFrameRequestee to be added to - * this KeyFrameControl so that it is made available through it - */ - public void addKeyFrameRequestee( - int index, - KeyFrameRequestee keyFrameRequestee); - - /** - * Adds a KeyFrameRequester to be made available through this - * KeyFrameControl. - * - * @param index the zero-based index at which keyFrameRequester is - * to be added to the list of KeyFrameRequesters made available or - * -1 to have this KeyFrameControl choose at which index - * it is to be added in accord with its internal logic - * through this KeyFrameControl - * @param keyFrameRequester the KeyFrameRequester to be added to - * this KeyFrameControl so that it is made available through it - */ - public void addKeyFrameRequester( - int index, - KeyFrameRequester keyFrameRequester); - - /** - * Gets the KeyFrameRequestees made available through this - * KeyFrameControl. - * - * @return an unmodifiable list of KeyFrameRequestees made - * available through this KeyFrameControl - */ - public List getKeyFrameRequestees(); - - /** - * Gets the KeyFrameRequesters made available through this - * KeyFrameControl. - * - * @return an unmodifiable list of KeyFrameRequesters made - * available through this KeyFrameControl - */ - public List getKeyFrameRequesters(); - - /** - * Notifies this KeyFrameControl that the remote peer of the - * associated VideoMediaStream has requested a key frame from the - * local peer. - * - * @return true if the local peer has honored the request from the - * remote peer for a key frame; otherwise, false - */ - public boolean keyFrameRequest(); - - /** - * Removes a KeyFrameRequestee to no longer be made available - * through this KeyFrameControl. - * - * @param keyFrameRequestee the KeyFrameRequestee to be removed - * from this KeyFrameControl so that it is no longer made available - * through it - * @return true if keyFrameRequestee was found in this - * KeyFrameControl; otherwise, false - */ - public boolean removeKeyFrameRequestee(KeyFrameRequestee keyFrameRequestee); - - /** - * Removes a KeyFrameRequester to no longer be made available - * through this KeyFrameControl. - * - * @param keyFrameRequester the KeyFrameRequester to be removed - * from this KeyFrameControl so that it is no longer made available - * through it - * @return true if keyFrameRequester was found in this - * KeyFrameControl; otherwise, false - */ - public boolean removeKeyFrameRequester(KeyFrameRequester keyFrameRequester); - - /** - * Requests a key frame from the remote peer of the associated - * VideoMediaStream. - * - * @param urgent true if the caller has determined that the need - * for a key frame is urgent and should not obey all constraints with - * respect to time between two subsequent requests for key frames - * @return true if a key frame was indeed requested from the remote - * peer of the associated VideoMediaStream in response to the call; - * otherwise, false - */ - public boolean requestKeyFrame(boolean urgent); - - /** - * Represents a way for the remote peer of a VideoMediaStream to - * request a key frame from its local peer. - * - * @author Lyubomir Marinov - */ - public interface KeyFrameRequestee - { - /** - * Notifies this KeyFrameRequestee that the remote peer of the - * associated VideoMediaStream requests a key frame from the - * local peer. - * - * @return true if this KeyFrameRequestee has honored - * the request for a key frame; otherwise, false - */ - public boolean keyFrameRequest(); - } - - /** - * Represents a way for a VideoMediaStream to request a key frame - * from its remote peer. - * - * @author Lyubomir Marinov - */ - public interface KeyFrameRequester - { - /** - * The name of the ConfigurationService property which - * specifies the preferred KeyFrameRequester to be used. - */ - public static final String PREFERRED_PNAME - = "net.java.sip.communicator.impl.neomedia.codec.video.h264." - + "preferredKeyFrameRequester"; - - /** - * The value of the {@link #PREFERRED_PNAME} - * ConfigurationService property which indicates that the - * RTCP KeyFrameRequester is preferred. - */ - public static final String RTCP = "rtcp"; - - /** - * The value of the {@link #PREFERRED_PNAME} - * ConfigurationService property which indicates that the - * signaling/protocol KeyFrameRequester is preferred. - */ - public static final String SIGNALING = "signaling"; - - /** - * The default value of the {@link #PREFERRED_PNAME} - * ConfigurationService property. - */ - public static final String DEFAULT_PREFERRED = RTCP; - - /** - * Requests a key frame from the remote peer of the associated - * VideoMediaStream. - * - * @return true if this KeyFrameRequester has - * indeed requested a key frame from the remote peer of the associated - * VideoMediaStream in response to the call; otherwise, - * false - */ - public boolean requestKeyFrame(); - } -} +package org.jitsi.service.neomedia.control; + +import java.util.*; + +/** + * Represents a control over the key frame-related logic of a + * VideoMediaStream. + * + * @author Lyubomir Marinov + */ +public interface KeyFrameControl +{ + /** + * Adds a KeyFrameRequestee to be made available through this + * KeyFrameControl. + * + * @param index the zero-based index at which keyFrameRequestee is + * to be added to the list of KeyFrameRequestees made available or + * -1 to have this KeyFrameControl choose at which index + * it is to be added in accord with its internal logic + * through this KeyFrameControl + * @param keyFrameRequestee the KeyFrameRequestee to be added to + * this KeyFrameControl so that it is made available through it + */ + public void addKeyFrameRequestee( + int index, + KeyFrameRequestee keyFrameRequestee); + + /** + * Adds a KeyFrameRequester to be made available through this + * KeyFrameControl. + * + * @param index the zero-based index at which keyFrameRequester is + * to be added to the list of KeyFrameRequesters made available or + * -1 to have this KeyFrameControl choose at which index + * it is to be added in accord with its internal logic + * through this KeyFrameControl + * @param keyFrameRequester the KeyFrameRequester to be added to + * this KeyFrameControl so that it is made available through it + */ + public void addKeyFrameRequester( + int index, + KeyFrameRequester keyFrameRequester); + + /** + * Gets the KeyFrameRequestees made available through this + * KeyFrameControl. + * + * @return an unmodifiable list of KeyFrameRequestees made + * available through this KeyFrameControl + */ + public List getKeyFrameRequestees(); + + /** + * Gets the KeyFrameRequesters made available through this + * KeyFrameControl. + * + * @return an unmodifiable list of KeyFrameRequesters made + * available through this KeyFrameControl + */ + public List getKeyFrameRequesters(); + + /** + * Notifies this KeyFrameControl that the remote peer of the + * associated VideoMediaStream has requested a key frame from the + * local peer. + * + * @return true if the local peer has honored the request from the + * remote peer for a key frame; otherwise, false + */ + public boolean keyFrameRequest(); + + /** + * Removes a KeyFrameRequestee to no longer be made available + * through this KeyFrameControl. + * + * @param keyFrameRequestee the KeyFrameRequestee to be removed + * from this KeyFrameControl so that it is no longer made available + * through it + * @return true if keyFrameRequestee was found in this + * KeyFrameControl; otherwise, false + */ + public boolean removeKeyFrameRequestee(KeyFrameRequestee keyFrameRequestee); + + /** + * Removes a KeyFrameRequester to no longer be made available + * through this KeyFrameControl. + * + * @param keyFrameRequester the KeyFrameRequester to be removed + * from this KeyFrameControl so that it is no longer made available + * through it + * @return true if keyFrameRequester was found in this + * KeyFrameControl; otherwise, false + */ + public boolean removeKeyFrameRequester(KeyFrameRequester keyFrameRequester); + + /** + * Requests a key frame from the remote peer of the associated + * VideoMediaStream. + * + * @param urgent true if the caller has determined that the need + * for a key frame is urgent and should not obey all constraints with + * respect to time between two subsequent requests for key frames + * @return true if a key frame was indeed requested from the remote + * peer of the associated VideoMediaStream in response to the call; + * otherwise, false + */ + public boolean requestKeyFrame(boolean urgent); + + /** + * Represents a way for the remote peer of a VideoMediaStream to + * request a key frame from its local peer. + * + * @author Lyubomir Marinov + */ + public interface KeyFrameRequestee + { + /** + * Notifies this KeyFrameRequestee that the remote peer of the + * associated VideoMediaStream requests a key frame from the + * local peer. + * + * @return true if this KeyFrameRequestee has honored + * the request for a key frame; otherwise, false + */ + public boolean keyFrameRequest(); + } + + /** + * Represents a way for a VideoMediaStream to request a key frame + * from its remote peer. + * + * @author Lyubomir Marinov + */ + public interface KeyFrameRequester + { + /** + * The name of the ConfigurationService property which + * specifies the preferred KeyFrameRequester to be used. + */ + public static final String PREFERRED_PNAME + = "net.java.sip.communicator.impl.neomedia.codec.video.h264." + + "preferredKeyFrameRequester"; + + /** + * The value of the {@link #PREFERRED_PNAME} + * ConfigurationService property which indicates that the + * RTCP KeyFrameRequester is preferred. + */ + public static final String RTCP = "rtcp"; + + /** + * The value of the {@link #PREFERRED_PNAME} + * ConfigurationService property which indicates that the + * signaling/protocol KeyFrameRequester is preferred. + */ + public static final String SIGNALING = "signaling"; + + /** + * The default value of the {@link #PREFERRED_PNAME} + * ConfigurationService property. + */ + public static final String DEFAULT_PREFERRED = RTCP; + + /** + * Requests a key frame from the remote peer of the associated + * VideoMediaStream. + * + * @return true if this KeyFrameRequester has + * indeed requested a key frame from the remote peer of the associated + * VideoMediaStream in response to the call; otherwise, + * false + */ + public boolean requestKeyFrame(); + } +} diff --git a/src/org/jitsi/util/event/PropertyChangeNotifier.java b/src/org/jitsi/util/event/PropertyChangeNotifier.java index 8a66764b9..fc0e5bf0c 100644 --- a/src/org/jitsi/util/event/PropertyChangeNotifier.java +++ b/src/org/jitsi/util/event/PropertyChangeNotifier.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,185 +13,185 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.util.event; - -import java.beans.*; -import java.util.*; - -import org.jitsi.util.*; - -/** - * Represents a source of PropertyChangeEvents which notifies - * PropertyChangeListeners about changes in the values of properties. - * - * @author Lyubomir Marinov - */ -public class PropertyChangeNotifier -{ - /** - * The Logger used by the PropertyChangeNotifier class and - * its instances for logging output. - */ - private static final Logger logger - = Logger.getLogger(PropertyChangeNotifier.class); - - /** - * The list of PropertyChangeListeners interested in and notified - * about changes in the values of the properties of this - * PropertyChangeNotifier. - */ - private final List listeners - = new ArrayList(); - - /** - * Initializes a new PropertyChangeNotifier instance. - */ - public PropertyChangeNotifier() - { - } - - /** - * Adds a specific PropertyChangeListener to the list of listeners - * interested in and notified about changes in the values of the properties - * of this PropertyChangeNotifier. - * - * @param listener a PropertyChangeListener to be notified about - * changes in the values of the properties of this - * PropertyChangeNotifier. If the specified listener is already in - * the list of interested listeners (i.e. it has been previously added), it - * is not added again. - */ - public void addPropertyChangeListener(PropertyChangeListener listener) - { - if (listener == null) - { - if (logger.isDebugEnabled()) - { - logger.debug( - "The specified argument listener is null" - + " and that does not make sense."); - } - } - else - { - synchronized (listeners) - { - if (!listeners.contains(listener)) - listeners.add(listener); - } - } - } - - /** - * Fires a new PropertyChangeEvent to the - * PropertyChangeListeners registered with this - * PropertyChangeNotifier in order to notify about a change in the - * value of a specific property which had its old value modified to a - * specific new value. PropertyChangeNotifier does not check - * whether the specified oldValue and newValue are indeed - * different. - * - * @param property the name of the property of this - * PropertyChangeNotifier which had its value changed - * @param oldValue the value of the property with the specified name before - * the change - * @param newValue the value of the property with the specified name after - * the change - */ - protected void firePropertyChange( - String property, - Object oldValue, Object newValue) - { - PropertyChangeListener[] ls; - - synchronized (listeners) - { - ls - = listeners.toArray( - new PropertyChangeListener[listeners.size()]); - } - - if (ls.length != 0) - { - PropertyChangeEvent ev - = new PropertyChangeEvent( - getPropertyChangeSource(property, oldValue, newValue), - property, - oldValue, newValue); - - for (PropertyChangeListener l : ls) - { - try - { - l.propertyChange(ev); - } - catch (Throwable t) - { - if (t instanceof InterruptedException) - { - Thread.currentThread().interrupt(); - } - else if (t instanceof ThreadDeath) - { - throw (ThreadDeath) t; - } - else - { - logger.warn( - "A PropertyChangeListener threw an exception" - + " while handling a PropertyChangeEvent.", - t); - } - } - } - } - } - - /** - * Gets the Object to be reported as the source of a new - * PropertyChangeEvent which is to notify the - * PropertyChangeListeners registered with this - * PropertyChangeNotifier about the change in the value of a - * property with a specific name from a specific old value to a specific new - * value. - * - * @param property the name of the property which had its value changed from - * the specified old value to the specified new value - * @param oldValue the value of the property with the specified name before - * the change - * @param newValue the value of the property with the specified name after - * the change - * @return the Object to be reported as the source of the new - * PropertyChangeEvent which is to notify the - * PropertyChangeListeners registered with this - * PropertyChangeNotifier about the change in the value of the - * property with the specified name from the specified old value to the - * specified new value - */ - protected Object getPropertyChangeSource( - String property, - Object oldValue, Object newValue) - { - return this; - } - - /** - * Removes a specific PropertyChangeListener from the list of - * listeners interested in and notified about changes in the values of the - * properties of this PropertyChangeNotifer. - * - * @param listener a PropertyChangeListener to no longer be - * notified about changes in the values of the properties of this - * PropertyChangeNotifier - */ - public void removePropertyChangeListener(PropertyChangeListener listener) - { - if (listener != null) - { - synchronized (listeners) - { - listeners.remove(listener); - } - } - } -} +package org.jitsi.util.event; + +import java.beans.*; +import java.util.*; + +import org.jitsi.util.*; + +/** + * Represents a source of PropertyChangeEvents which notifies + * PropertyChangeListeners about changes in the values of properties. + * + * @author Lyubomir Marinov + */ +public class PropertyChangeNotifier +{ + /** + * The Logger used by the PropertyChangeNotifier class and + * its instances for logging output. + */ + private static final Logger logger + = Logger.getLogger(PropertyChangeNotifier.class); + + /** + * The list of PropertyChangeListeners interested in and notified + * about changes in the values of the properties of this + * PropertyChangeNotifier. + */ + private final List listeners + = new ArrayList(); + + /** + * Initializes a new PropertyChangeNotifier instance. + */ + public PropertyChangeNotifier() + { + } + + /** + * Adds a specific PropertyChangeListener to the list of listeners + * interested in and notified about changes in the values of the properties + * of this PropertyChangeNotifier. + * + * @param listener a PropertyChangeListener to be notified about + * changes in the values of the properties of this + * PropertyChangeNotifier. If the specified listener is already in + * the list of interested listeners (i.e. it has been previously added), it + * is not added again. + */ + public void addPropertyChangeListener(PropertyChangeListener listener) + { + if (listener == null) + { + if (logger.isDebugEnabled()) + { + logger.debug( + "The specified argument listener is null" + + " and that does not make sense."); + } + } + else + { + synchronized (listeners) + { + if (!listeners.contains(listener)) + listeners.add(listener); + } + } + } + + /** + * Fires a new PropertyChangeEvent to the + * PropertyChangeListeners registered with this + * PropertyChangeNotifier in order to notify about a change in the + * value of a specific property which had its old value modified to a + * specific new value. PropertyChangeNotifier does not check + * whether the specified oldValue and newValue are indeed + * different. + * + * @param property the name of the property of this + * PropertyChangeNotifier which had its value changed + * @param oldValue the value of the property with the specified name before + * the change + * @param newValue the value of the property with the specified name after + * the change + */ + protected void firePropertyChange( + String property, + Object oldValue, Object newValue) + { + PropertyChangeListener[] ls; + + synchronized (listeners) + { + ls + = listeners.toArray( + new PropertyChangeListener[listeners.size()]); + } + + if (ls.length != 0) + { + PropertyChangeEvent ev + = new PropertyChangeEvent( + getPropertyChangeSource(property, oldValue, newValue), + property, + oldValue, newValue); + + for (PropertyChangeListener l : ls) + { + try + { + l.propertyChange(ev); + } + catch (Throwable t) + { + if (t instanceof InterruptedException) + { + Thread.currentThread().interrupt(); + } + else if (t instanceof ThreadDeath) + { + throw (ThreadDeath) t; + } + else + { + logger.warn( + "A PropertyChangeListener threw an exception" + + " while handling a PropertyChangeEvent.", + t); + } + } + } + } + } + + /** + * Gets the Object to be reported as the source of a new + * PropertyChangeEvent which is to notify the + * PropertyChangeListeners registered with this + * PropertyChangeNotifier about the change in the value of a + * property with a specific name from a specific old value to a specific new + * value. + * + * @param property the name of the property which had its value changed from + * the specified old value to the specified new value + * @param oldValue the value of the property with the specified name before + * the change + * @param newValue the value of the property with the specified name after + * the change + * @return the Object to be reported as the source of the new + * PropertyChangeEvent which is to notify the + * PropertyChangeListeners registered with this + * PropertyChangeNotifier about the change in the value of the + * property with the specified name from the specified old value to the + * specified new value + */ + protected Object getPropertyChangeSource( + String property, + Object oldValue, Object newValue) + { + return this; + } + + /** + * Removes a specific PropertyChangeListener from the list of + * listeners interested in and notified about changes in the values of the + * properties of this PropertyChangeNotifer. + * + * @param listener a PropertyChangeListener to no longer be + * notified about changes in the values of the properties of this + * PropertyChangeNotifier + */ + public void removePropertyChangeListener(PropertyChangeListener listener) + { + if (listener != null) + { + synchronized (listeners) + { + listeners.remove(listener); + } + } + } +} diff --git a/src/org/jitsi/util/swing/FitLayout.java b/src/org/jitsi/util/swing/FitLayout.java index af7361eb7..7fd607566 100644 --- a/src/org/jitsi/util/swing/FitLayout.java +++ b/src/org/jitsi/util/swing/FitLayout.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,208 +13,208 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.util.swing; - -import java.awt.*; - -import javax.swing.*; - -/** - * Represents a LayoutManager which centers the first - * Component within its Container and, if the preferred size - * of the Component is larger than the size of the Container, - * scales the former within the bounds of the latter while preserving the aspect - * ratio. FitLayout is appropriate for Containers which - * display a single image or video Component in its entirety for which - * preserving the aspect ratio is important. - * - * @author Lyubomir Marinov - */ -public class FitLayout - implements LayoutManager -{ - /** - * The default height and width to be used by FitLayout and its - * extenders in order to avoid falling back to zero height and/or width. - * Introduced to mitigate issues arising from the fact that a - * Component zero height and/or width. - */ - protected static final int DEFAULT_HEIGHT_OR_WIDTH = 16; - - /** - * {@inheritDoc} - * - * Does nothing because this LayoutManager lays out only the first - * Component of the parent Container and thus doesn't need - * any String associations. - */ - public void addLayoutComponent(String name, Component comp) {} - - /** - * Gets the first Component of a specific Container if - * there is such a Component. - * - * @param parent the Container to retrieve the first - * Component of - * @return the first Component of a specific Container if - * there is such a Component; otherwise, null - */ - protected Component getComponent(Container parent) - { - Component[] components = parent.getComponents(); - - return (components.length > 0) ? components[0] : null; - } - - protected void layoutComponent( - Component component, - Rectangle bounds, - float alignmentX, float alignmentY) - { - Dimension size; - - /* - * XXX The following (mostly) represents a quick and dirty hack for the - * purposes of video conferencing which adds transparent JPanels to - * VideoContainer and does not want them fitted because they contain - * VideoContainers themselves and the videos get fitted in them. - */ - if (((component instanceof JPanel) - && !component.isOpaque() - && (((Container) component).getComponentCount() > 1)) - || (component instanceof VideoContainer) - /* - * If the specified component does not have a preferredSize, we - * cannot know its aspect ratio and we are left with no choice - * but to stretch it within the complete bounds. - */ - || ((size = component.getPreferredSize()) == null)) - { - size = bounds.getSize(); - } - else - { - boolean scale = false; - double widthRatio; - double heightRatio; - - if ((size.width != bounds.width) && (size.width > 0)) - { - scale = true; - widthRatio = bounds.width / (double) size.width; - } - else - widthRatio = 1; - if ((size.height != bounds.height) && (size.height > 0)) - { - scale = true; - heightRatio = bounds.height / (double) size.height; - } - else - heightRatio = 1; - if (scale) - { - double ratio = Math.min(widthRatio, heightRatio); - - size.width = (int) (size.width * ratio); - size.height = (int) (size.height * ratio); - } - } - - // Respect the maximumSize of the component. - if (component.isMaximumSizeSet()) - { - Dimension maxSize = component.getMaximumSize(); - - if (size.width > maxSize.width) - size.width = maxSize.width; - if (size.height > maxSize.height) - size.height = maxSize.height; - } - - /* - * Why would one fit a Component into a rectangle with zero width and - * height? - */ - if (size.height < 1) - size.height = 1; - if (size.width < 1) - size.width = 1; - - component.setBounds( - bounds.x + Math.round((bounds.width - size.width) * alignmentX), - bounds.y - + Math.round((bounds.height - size.height) * alignmentY), - size.width, - size.height); - } - - /* - * Scales the first Component if its preferred size is larger than the size - * of its parent Container in order to display the Component in its entirety - * and then centers it within the display area of the parent. - */ - public void layoutContainer(Container parent) - { - layoutContainer(parent, Component.CENTER_ALIGNMENT); - } - - protected void layoutContainer(Container parent, float componentAlignmentX) - { - Component component = getComponent(parent); - - if (component != null) - { - layoutComponent( - component, - new Rectangle(parent.getSize()), - componentAlignmentX, Component.CENTER_ALIGNMENT); - } - } - - /* - * Since this LayoutManager lays out only the first Component of the - * specified parent Container, the minimum size of the Container is the - * minimum size of the mentioned Component. - */ - public Dimension minimumLayoutSize(Container parent) - { - Component component = getComponent(parent); - - return - (component != null) - ? component.getMinimumSize() - : new Dimension( - DEFAULT_HEIGHT_OR_WIDTH, - DEFAULT_HEIGHT_OR_WIDTH); - } - - /** - * {@inheritDoc} - * - * Since this LayoutManager lays out only the first - * Component of the specified parent Container, the - * preferred size of the Container is the preferred size of the - * mentioned Component. - */ - public Dimension preferredLayoutSize(Container parent) - { - Component component = getComponent(parent); - - return - (component != null) - ? component.getPreferredSize() - : new Dimension( - DEFAULT_HEIGHT_OR_WIDTH, - DEFAULT_HEIGHT_OR_WIDTH); - } - - /** - * {@inheritDoc} - * - * Does nothing because this LayoutManager lays out only the first - * Component of the parent Container and thus doesn't need - * any String associations. - */ - public void removeLayoutComponent(Component comp) {} -} +package org.jitsi.util.swing; + +import java.awt.*; + +import javax.swing.*; + +/** + * Represents a LayoutManager which centers the first + * Component within its Container and, if the preferred size + * of the Component is larger than the size of the Container, + * scales the former within the bounds of the latter while preserving the aspect + * ratio. FitLayout is appropriate for Containers which + * display a single image or video Component in its entirety for which + * preserving the aspect ratio is important. + * + * @author Lyubomir Marinov + */ +public class FitLayout + implements LayoutManager +{ + /** + * The default height and width to be used by FitLayout and its + * extenders in order to avoid falling back to zero height and/or width. + * Introduced to mitigate issues arising from the fact that a + * Component zero height and/or width. + */ + protected static final int DEFAULT_HEIGHT_OR_WIDTH = 16; + + /** + * {@inheritDoc} + * + * Does nothing because this LayoutManager lays out only the first + * Component of the parent Container and thus doesn't need + * any String associations. + */ + public void addLayoutComponent(String name, Component comp) {} + + /** + * Gets the first Component of a specific Container if + * there is such a Component. + * + * @param parent the Container to retrieve the first + * Component of + * @return the first Component of a specific Container if + * there is such a Component; otherwise, null + */ + protected Component getComponent(Container parent) + { + Component[] components = parent.getComponents(); + + return (components.length > 0) ? components[0] : null; + } + + protected void layoutComponent( + Component component, + Rectangle bounds, + float alignmentX, float alignmentY) + { + Dimension size; + + /* + * XXX The following (mostly) represents a quick and dirty hack for the + * purposes of video conferencing which adds transparent JPanels to + * VideoContainer and does not want them fitted because they contain + * VideoContainers themselves and the videos get fitted in them. + */ + if (((component instanceof JPanel) + && !component.isOpaque() + && (((Container) component).getComponentCount() > 1)) + || (component instanceof VideoContainer) + /* + * If the specified component does not have a preferredSize, we + * cannot know its aspect ratio and we are left with no choice + * but to stretch it within the complete bounds. + */ + || ((size = component.getPreferredSize()) == null)) + { + size = bounds.getSize(); + } + else + { + boolean scale = false; + double widthRatio; + double heightRatio; + + if ((size.width != bounds.width) && (size.width > 0)) + { + scale = true; + widthRatio = bounds.width / (double) size.width; + } + else + widthRatio = 1; + if ((size.height != bounds.height) && (size.height > 0)) + { + scale = true; + heightRatio = bounds.height / (double) size.height; + } + else + heightRatio = 1; + if (scale) + { + double ratio = Math.min(widthRatio, heightRatio); + + size.width = (int) (size.width * ratio); + size.height = (int) (size.height * ratio); + } + } + + // Respect the maximumSize of the component. + if (component.isMaximumSizeSet()) + { + Dimension maxSize = component.getMaximumSize(); + + if (size.width > maxSize.width) + size.width = maxSize.width; + if (size.height > maxSize.height) + size.height = maxSize.height; + } + + /* + * Why would one fit a Component into a rectangle with zero width and + * height? + */ + if (size.height < 1) + size.height = 1; + if (size.width < 1) + size.width = 1; + + component.setBounds( + bounds.x + Math.round((bounds.width - size.width) * alignmentX), + bounds.y + + Math.round((bounds.height - size.height) * alignmentY), + size.width, + size.height); + } + + /* + * Scales the first Component if its preferred size is larger than the size + * of its parent Container in order to display the Component in its entirety + * and then centers it within the display area of the parent. + */ + public void layoutContainer(Container parent) + { + layoutContainer(parent, Component.CENTER_ALIGNMENT); + } + + protected void layoutContainer(Container parent, float componentAlignmentX) + { + Component component = getComponent(parent); + + if (component != null) + { + layoutComponent( + component, + new Rectangle(parent.getSize()), + componentAlignmentX, Component.CENTER_ALIGNMENT); + } + } + + /* + * Since this LayoutManager lays out only the first Component of the + * specified parent Container, the minimum size of the Container is the + * minimum size of the mentioned Component. + */ + public Dimension minimumLayoutSize(Container parent) + { + Component component = getComponent(parent); + + return + (component != null) + ? component.getMinimumSize() + : new Dimension( + DEFAULT_HEIGHT_OR_WIDTH, + DEFAULT_HEIGHT_OR_WIDTH); + } + + /** + * {@inheritDoc} + * + * Since this LayoutManager lays out only the first + * Component of the specified parent Container, the + * preferred size of the Container is the preferred size of the + * mentioned Component. + */ + public Dimension preferredLayoutSize(Container parent) + { + Component component = getComponent(parent); + + return + (component != null) + ? component.getPreferredSize() + : new Dimension( + DEFAULT_HEIGHT_OR_WIDTH, + DEFAULT_HEIGHT_OR_WIDTH); + } + + /** + * {@inheritDoc} + * + * Does nothing because this LayoutManager lays out only the first + * Component of the parent Container and thus doesn't need + * any String associations. + */ + public void removeLayoutComponent(Component comp) {} +} diff --git a/src/org/jitsi/util/swing/VideoContainer.java b/src/org/jitsi/util/swing/VideoContainer.java index c7b4a3a3e..22f044b27 100644 --- a/src/org/jitsi/util/swing/VideoContainer.java +++ b/src/org/jitsi/util/swing/VideoContainer.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,361 +13,361 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.util.swing; - -import java.awt.*; -import java.awt.event.*; -import java.beans.*; - -import javax.swing.*; - -/** - * Implements a Container for video/visual Components. - * VideoContainer uses {@link VideoLayout} to layout the video/visual - * Components it contains. A specific Component can be - * displayed by default at {@link VideoLayout#CENTER_REMOTE}. - * - * @author Lyubomir Marinov - * @author Yana Stamcheva - */ -public class VideoContainer - extends TransparentPanel -{ - /** - * Serial version UID. - */ - private static final long serialVersionUID = 0L; - - /** - * The default background color of VideoContainer when it contains - * Component instances other than {@link #noVideoComponent}. - */ - public static final Color DEFAULT_BACKGROUND_COLOR = Color.BLACK; - - private static final String PREFERRED_SIZE_PROPERTY_NAME = "preferredSize"; - - /** - * The number of times that add or remove methods are - * currently being executed on this instance. Decreases the number of - * unnecessary invocations to {@link #doLayout()}, {@link #repaint()} and - * {@link #validate()}. - */ - private int inAddOrRemove; - - /** - * The Component to be displayed by this VideoContainer - * at {@link VideoLayout#CENTER_REMOTE} when no other Component has - * been added to it to be displayed there. For example, the avatar of the - * remote peer may be displayed in place of the remote video when the remote - * video is not available. - */ - private final Component noVideoComponent; - - private final PropertyChangeListener propertyChangeListener - = new PropertyChangeListener() - { - public void propertyChange(PropertyChangeEvent ev) - { - VideoContainer.this.propertyChange(ev); - } - }; - - private final Object syncRoot = new Object(); - - /** - * The indicator which determines whether this instance is aware that - * {@link #doLayout()}, {@link #repaint()} and/or {@link #validate()} are to - * be invoked (as soon as {@link #inAddOrRemove} decreases from a positive - * number to zero). - */ - private boolean validateAndRepaint; - - /** - * Initializes a new VideoContainer with a specific - * Component to be displayed when no remote video is available. - * - * @param noVideoComponent the component to be displayed when no remote - * video is available - * @param conference true to dedicate the new instance to a - * telephony conferencing user interface; otherwise, false - */ - public VideoContainer(Component noVideoComponent, boolean conference) - { - setLayout(new VideoLayout(conference)); - - this.noVideoComponent = noVideoComponent; - - if (DEFAULT_BACKGROUND_COLOR != null) - setBackground(DEFAULT_BACKGROUND_COLOR); - - addContainerListener( - new ContainerListener() - { - public void componentAdded(ContainerEvent ev) - { - VideoContainer.this.onContainerEvent(ev); - } - - public void componentRemoved(ContainerEvent ev) - { - VideoContainer.this.onContainerEvent(ev); - } - }); - - if (this.noVideoComponent != null) - add(this.noVideoComponent, VideoLayout.CENTER_REMOTE, -1); - } - - /** - * Adds the given component at the {@link VideoLayout#CENTER_REMOTE} - * position in the default video layout. - * - * @param comp the component to add - * @return the added component - */ - @Override - public Component add(Component comp) - { - add(comp, VideoLayout.CENTER_REMOTE); - return comp; - } - - @Override - public Component add(Component comp, int index) - { - add(comp, null, index); - return comp; - } - - @Override - public void add(Component comp, Object constraints) - { - add(comp, constraints, -1); - } - - /** - * Overrides the default behavior of add in order to be sure to remove the - * default "no video" component when a remote video component is added. - * - * @param comp the component to add - * @param constraints - * @param index - */ - @Override - public void add(Component comp, Object constraints, int index) - { - enterAddOrRemove(); - try - { - if (VideoLayout.CENTER_REMOTE.equals(constraints) - && (noVideoComponent != null) - && !noVideoComponent.equals(comp) - || (comp.equals(noVideoComponent) - && noVideoComponent.getParent() != null)) - { - remove(noVideoComponent); - } - - super.add(comp, constraints, index); - } - finally - { - exitAddOrRemove(); - } - } - - private void enterAddOrRemove() - { - synchronized (syncRoot) - { - if (inAddOrRemove == 0) - validateAndRepaint = false; - inAddOrRemove++; - } - } - - private void exitAddOrRemove() - { - synchronized (syncRoot) - { - inAddOrRemove--; - if (inAddOrRemove < 1) - { - inAddOrRemove = 0; - if (validateAndRepaint) - { - validateAndRepaint = false; - - if (isDisplayable()) - { - if (isValid()) - doLayout(); - else - validate(); - repaint(); - } - else - doLayout(); - } - } - } - } - - /** - * Notifies this instance that a specific Component has been added - * to or removed from this Container. - * - * @param ev a ContainerEvent which details the specifics of the - * notification such as the Component that has been added or - * removed - */ - private void onContainerEvent(ContainerEvent ev) - { - try - { - Component component = ev.getChild(); - - switch (ev.getID()) - { - case ContainerEvent.COMPONENT_ADDED: - component.addPropertyChangeListener( - PREFERRED_SIZE_PROPERTY_NAME, - propertyChangeListener); - break; - case ContainerEvent.COMPONENT_REMOVED: - component.removePropertyChangeListener( - PREFERRED_SIZE_PROPERTY_NAME, - propertyChangeListener); - break; - } - - /* - * If an explicit background color is to be displayed by this - * Component, make sure that its opaque property i.e. transparency - * does not interfere with that display. - */ - if (DEFAULT_BACKGROUND_COLOR != null) - { - int componentCount = getComponentCount(); - - if ((componentCount == 1) - && (getComponent(0) - == VideoContainer.this.noVideoComponent)) - { - componentCount = 0; - } - - setOpaque(componentCount > 0); - } - } - finally - { - synchronized (syncRoot) - { - if (inAddOrRemove != 0) - validateAndRepaint = true; - } - } - } - - /** - * Notifies this instance about a change in the value of a property of a - * Component contained by this Container. Since the - * VideoLayout of this Container sizes the contained - * Components based on their preferredSizes, this - * Container invokes {@link #doLayout()}, {@link #repaint()} and/or - * {@link #validate()} upon changes in the values of the property in - * question. - * - * @param ev a PropertyChangeEvent which details the specifics of - * the notification such as the name of the property whose value changed and - * the Component which fired the notification - */ - private void propertyChange(PropertyChangeEvent ev) - { - if (PREFERRED_SIZE_PROPERTY_NAME.equals(ev.getPropertyName()) - && SwingUtilities.isEventDispatchThread()) - { - /* - * The goal is to invoke doLayout, repaint and/or validate. These - * methods and the specifics with respect to avoiding unnecessary - * calls to them are already dealt with by enterAddOrRemove, - * exitAddOrRemove and validateAndRepaint. - */ - synchronized (syncRoot) - { - enterAddOrRemove(); - validateAndRepaint = true; - exitAddOrRemove(); - } - } - } - - /** - * Overrides the default remove behavior in order to add the default no - * video component when the remote video is removed. - * - * @param comp the component to remove - */ - @Override - public void remove(Component comp) - { - enterAddOrRemove(); - try - { - super.remove(comp); - - Component[] components = getComponents(); - VideoLayout videoLayout = (VideoLayout) getLayout(); - boolean hasComponentsAtCenterRemote = false; - - for (Component c : components) - { - if (!c.equals(noVideoComponent) - && VideoLayout.CENTER_REMOTE.equals( - videoLayout.getComponentConstraints(c))) - { - hasComponentsAtCenterRemote = true; - break; - } - } - - if (!hasComponentsAtCenterRemote - && (noVideoComponent != null) - && !noVideoComponent.equals(comp)) - { - add(noVideoComponent, VideoLayout.CENTER_REMOTE); - } - } - finally - { - exitAddOrRemove(); - } - } - - /** - * Ensures noVideoComponent is displayed even when the clients of the - * videoContainer invoke its #removeAll() to remove their previous visual - * Components representing video. Just adding noVideoComponent upon - * ContainerEvent#COMPONENT_REMOVED when there is no other Component left in - * the Container will cause an infinite loop because Container#removeAll() - * will detect that a new Component has been added while dispatching the - * event and will then try to remove the new Component. - */ - @Override - public void removeAll() - { - enterAddOrRemove(); - try - { - super.removeAll(); - - if (noVideoComponent != null) - add(noVideoComponent, VideoLayout.CENTER_REMOTE); - } - finally - { - exitAddOrRemove(); - } - } -} +package org.jitsi.util.swing; + +import java.awt.*; +import java.awt.event.*; +import java.beans.*; + +import javax.swing.*; + +/** + * Implements a Container for video/visual Components. + * VideoContainer uses {@link VideoLayout} to layout the video/visual + * Components it contains. A specific Component can be + * displayed by default at {@link VideoLayout#CENTER_REMOTE}. + * + * @author Lyubomir Marinov + * @author Yana Stamcheva + */ +public class VideoContainer + extends TransparentPanel +{ + /** + * Serial version UID. + */ + private static final long serialVersionUID = 0L; + + /** + * The default background color of VideoContainer when it contains + * Component instances other than {@link #noVideoComponent}. + */ + public static final Color DEFAULT_BACKGROUND_COLOR = Color.BLACK; + + private static final String PREFERRED_SIZE_PROPERTY_NAME = "preferredSize"; + + /** + * The number of times that add or remove methods are + * currently being executed on this instance. Decreases the number of + * unnecessary invocations to {@link #doLayout()}, {@link #repaint()} and + * {@link #validate()}. + */ + private int inAddOrRemove; + + /** + * The Component to be displayed by this VideoContainer + * at {@link VideoLayout#CENTER_REMOTE} when no other Component has + * been added to it to be displayed there. For example, the avatar of the + * remote peer may be displayed in place of the remote video when the remote + * video is not available. + */ + private final Component noVideoComponent; + + private final PropertyChangeListener propertyChangeListener + = new PropertyChangeListener() + { + public void propertyChange(PropertyChangeEvent ev) + { + VideoContainer.this.propertyChange(ev); + } + }; + + private final Object syncRoot = new Object(); + + /** + * The indicator which determines whether this instance is aware that + * {@link #doLayout()}, {@link #repaint()} and/or {@link #validate()} are to + * be invoked (as soon as {@link #inAddOrRemove} decreases from a positive + * number to zero). + */ + private boolean validateAndRepaint; + + /** + * Initializes a new VideoContainer with a specific + * Component to be displayed when no remote video is available. + * + * @param noVideoComponent the component to be displayed when no remote + * video is available + * @param conference true to dedicate the new instance to a + * telephony conferencing user interface; otherwise, false + */ + public VideoContainer(Component noVideoComponent, boolean conference) + { + setLayout(new VideoLayout(conference)); + + this.noVideoComponent = noVideoComponent; + + if (DEFAULT_BACKGROUND_COLOR != null) + setBackground(DEFAULT_BACKGROUND_COLOR); + + addContainerListener( + new ContainerListener() + { + public void componentAdded(ContainerEvent ev) + { + VideoContainer.this.onContainerEvent(ev); + } + + public void componentRemoved(ContainerEvent ev) + { + VideoContainer.this.onContainerEvent(ev); + } + }); + + if (this.noVideoComponent != null) + add(this.noVideoComponent, VideoLayout.CENTER_REMOTE, -1); + } + + /** + * Adds the given component at the {@link VideoLayout#CENTER_REMOTE} + * position in the default video layout. + * + * @param comp the component to add + * @return the added component + */ + @Override + public Component add(Component comp) + { + add(comp, VideoLayout.CENTER_REMOTE); + return comp; + } + + @Override + public Component add(Component comp, int index) + { + add(comp, null, index); + return comp; + } + + @Override + public void add(Component comp, Object constraints) + { + add(comp, constraints, -1); + } + + /** + * Overrides the default behavior of add in order to be sure to remove the + * default "no video" component when a remote video component is added. + * + * @param comp the component to add + * @param constraints + * @param index + */ + @Override + public void add(Component comp, Object constraints, int index) + { + enterAddOrRemove(); + try + { + if (VideoLayout.CENTER_REMOTE.equals(constraints) + && (noVideoComponent != null) + && !noVideoComponent.equals(comp) + || (comp.equals(noVideoComponent) + && noVideoComponent.getParent() != null)) + { + remove(noVideoComponent); + } + + super.add(comp, constraints, index); + } + finally + { + exitAddOrRemove(); + } + } + + private void enterAddOrRemove() + { + synchronized (syncRoot) + { + if (inAddOrRemove == 0) + validateAndRepaint = false; + inAddOrRemove++; + } + } + + private void exitAddOrRemove() + { + synchronized (syncRoot) + { + inAddOrRemove--; + if (inAddOrRemove < 1) + { + inAddOrRemove = 0; + if (validateAndRepaint) + { + validateAndRepaint = false; + + if (isDisplayable()) + { + if (isValid()) + doLayout(); + else + validate(); + repaint(); + } + else + doLayout(); + } + } + } + } + + /** + * Notifies this instance that a specific Component has been added + * to or removed from this Container. + * + * @param ev a ContainerEvent which details the specifics of the + * notification such as the Component that has been added or + * removed + */ + private void onContainerEvent(ContainerEvent ev) + { + try + { + Component component = ev.getChild(); + + switch (ev.getID()) + { + case ContainerEvent.COMPONENT_ADDED: + component.addPropertyChangeListener( + PREFERRED_SIZE_PROPERTY_NAME, + propertyChangeListener); + break; + case ContainerEvent.COMPONENT_REMOVED: + component.removePropertyChangeListener( + PREFERRED_SIZE_PROPERTY_NAME, + propertyChangeListener); + break; + } + + /* + * If an explicit background color is to be displayed by this + * Component, make sure that its opaque property i.e. transparency + * does not interfere with that display. + */ + if (DEFAULT_BACKGROUND_COLOR != null) + { + int componentCount = getComponentCount(); + + if ((componentCount == 1) + && (getComponent(0) + == VideoContainer.this.noVideoComponent)) + { + componentCount = 0; + } + + setOpaque(componentCount > 0); + } + } + finally + { + synchronized (syncRoot) + { + if (inAddOrRemove != 0) + validateAndRepaint = true; + } + } + } + + /** + * Notifies this instance about a change in the value of a property of a + * Component contained by this Container. Since the + * VideoLayout of this Container sizes the contained + * Components based on their preferredSizes, this + * Container invokes {@link #doLayout()}, {@link #repaint()} and/or + * {@link #validate()} upon changes in the values of the property in + * question. + * + * @param ev a PropertyChangeEvent which details the specifics of + * the notification such as the name of the property whose value changed and + * the Component which fired the notification + */ + private void propertyChange(PropertyChangeEvent ev) + { + if (PREFERRED_SIZE_PROPERTY_NAME.equals(ev.getPropertyName()) + && SwingUtilities.isEventDispatchThread()) + { + /* + * The goal is to invoke doLayout, repaint and/or validate. These + * methods and the specifics with respect to avoiding unnecessary + * calls to them are already dealt with by enterAddOrRemove, + * exitAddOrRemove and validateAndRepaint. + */ + synchronized (syncRoot) + { + enterAddOrRemove(); + validateAndRepaint = true; + exitAddOrRemove(); + } + } + } + + /** + * Overrides the default remove behavior in order to add the default no + * video component when the remote video is removed. + * + * @param comp the component to remove + */ + @Override + public void remove(Component comp) + { + enterAddOrRemove(); + try + { + super.remove(comp); + + Component[] components = getComponents(); + VideoLayout videoLayout = (VideoLayout) getLayout(); + boolean hasComponentsAtCenterRemote = false; + + for (Component c : components) + { + if (!c.equals(noVideoComponent) + && VideoLayout.CENTER_REMOTE.equals( + videoLayout.getComponentConstraints(c))) + { + hasComponentsAtCenterRemote = true; + break; + } + } + + if (!hasComponentsAtCenterRemote + && (noVideoComponent != null) + && !noVideoComponent.equals(comp)) + { + add(noVideoComponent, VideoLayout.CENTER_REMOTE); + } + } + finally + { + exitAddOrRemove(); + } + } + + /** + * Ensures noVideoComponent is displayed even when the clients of the + * videoContainer invoke its #removeAll() to remove their previous visual + * Components representing video. Just adding noVideoComponent upon + * ContainerEvent#COMPONENT_REMOVED when there is no other Component left in + * the Container will cause an infinite loop because Container#removeAll() + * will detect that a new Component has been added while dispatching the + * event and will then try to remove the new Component. + */ + @Override + public void removeAll() + { + enterAddOrRemove(); + try + { + super.removeAll(); + + if (noVideoComponent != null) + add(noVideoComponent, VideoLayout.CENTER_REMOTE); + } + finally + { + exitAddOrRemove(); + } + } +} diff --git a/src/org/jitsi/util/swing/VideoLayout.java b/src/org/jitsi/util/swing/VideoLayout.java index 31dd770f1..8794d664f 100644 --- a/src/org/jitsi/util/swing/VideoLayout.java +++ b/src/org/jitsi/util/swing/VideoLayout.java @@ -1,4 +1,4 @@ -/* +/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,633 +13,633 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jitsi.util.swing; - -import java.awt.*; -import java.util.*; -import java.util.List; - -import javax.swing.*; - -/** - * Implements the LayoutManager which lays out the local and remote - * videos in a video Call. - * - * @author Lyubomir Marinov - * @author Yana Stamcheva - */ -public class VideoLayout - extends FitLayout -{ - /** - * The video canvas constraint. - */ - public static final String CANVAS = "CANVAS"; - - /** - * The center remote video constraint. - */ - public static final String CENTER_REMOTE = "CENTER_REMOTE"; - - /** - * The close local video constraint. - */ - public static final String CLOSE_LOCAL_BUTTON = "CLOSE_LOCAL_BUTTON"; - - /** - * The east remote video constraint. - */ - public static final String EAST_REMOTE = "EAST_REMOTE"; - - /** - * The horizontal gap between the Component being laid out by - * VideoLayout. - */ - private static final int HGAP = 10; - - /** - * The local video constraint. - */ - public static final String LOCAL = "LOCAL"; - - /** - * The ration between the local and the remote video. - */ - private static final float LOCAL_TO_REMOTE_RATIO = 0.30f; - - /** - * The video canvas. - */ - private Component canvas; - - /** - * The close local video button component. - */ - private Component closeButton; - - /** - * The indicator which determines whether this instance is dedicated to a - * conference. - */ - private final boolean conference; - - /** - * The map of component constraints. - */ - private final Map constraints - = new HashMap(); - - /** - * The component containing the local video. - */ - private Component local; - - /** - * The x coordinate alignment of the remote video. - */ - private float remoteAlignmentX = Component.CENTER_ALIGNMENT; - - /** - * The list of Components depicting remote videos. - */ - private final List remotes = new LinkedList(); - - /** - * Creates an instance of VideoLayout by also indicating if this - * video layout is dedicated to a conference interface. - * - * @param conference true if the new instance will be dedicated to - * a conference; otherwise, false - */ - public VideoLayout(boolean conference) - { - this.conference = conference; - } - - /** - * Adds the given component in this layout on the specified by name - * position. - * - * @param name the constraint giving the position of the component in this - * layout - * @param comp the component to add - */ - @Override - public void addLayoutComponent(String name, Component comp) - { - super.addLayoutComponent(name, comp); - - synchronized (constraints) - { - constraints.put(comp, name); - } - - if ((name == null) || name.equals(CENTER_REMOTE)) - { - if (!remotes.contains(comp)) - remotes.add(comp); - remoteAlignmentX = Component.CENTER_ALIGNMENT; - } - else if (name.equals(EAST_REMOTE)) - { - if (!remotes.contains(comp)) - remotes.add(comp); - remoteAlignmentX = Component.RIGHT_ALIGNMENT; - } - else if (name.equals(LOCAL)) - local = comp; - else if (name.equals(CLOSE_LOCAL_BUTTON)) - closeButton = comp; - else if (name.equals(CANVAS)) - canvas = comp; - } - - /** - * Determines whether the aspect ratio of a specific Dimension is - * to be considered equal to the aspect ratio of specific width and - * height. - * - * @param size the Dimension whose aspect ratio is to be compared - * to the aspect ratio of width and height - * @param width the width which defines in combination with height - * the aspect ratio to be compared to the aspect ratio of size - * @param height the height which defines in combination with width - * the aspect ratio to be compared to the aspect ratio of size - * @return true if the aspect ratio of size is to be - * considered equal to the aspect ratio of width and - * height; otherwise, false - */ - public static boolean areAspectRatiosEqual( - Dimension size, - int width, int height) - { - if ((size.height == 0) || (height == 0)) - return false; - else - { - double a = size.width / (double) size.height; - double b = width / (double) height; - double diff = a - b; - - return (-0.01 < diff) && (diff < 0.01); - } - } - - /** - * Determines how may columns to use for the grid display of specific remote - * visual/video Components. - * - * @param remotes the remote visual/video Components to be - * displayed in a grid - * @return the number of columns to use for the grid display of the - * specified remote visual/video Components - */ - private int calculateColumnCount(List remotes) - { - int remoteCount = remotes.size(); - - if (remoteCount == 1) - return 1; - else if ((remoteCount == 2) || (remoteCount == 4)) - return 2; - else - return 3; - } - - /** - * Returns the remote video component. - * - * @return the remote video component - */ - @Override - protected Component getComponent(Container parent) - { - return (remotes.size() == 1) ? remotes.get(0) : null; - } - - /** - * Returns the constraints for the given component. - * - * @param c the component for which constraints we're looking for - * @return the constraints for the given component - */ - public Object getComponentConstraints(Component c) - { - synchronized (constraints) - { - return constraints.get(c); - } - } - - /** - * Returns the local video component. - * - * @return the local video component - */ - public Component getLocal() - { - return local; - } - - /** - * Returns the local video close button. - * - * @return the local video close button - */ - public Component getLocalCloseButton() - { - return closeButton; - } - - /** - * Lays out the specified Container (i.e. the Components - * it contains) in accord with the logic implemented by this - * LayoutManager. - * - * @param parent the Container to lay out - */ - @Override - public void layoutContainer(Container parent) - { - /* - * XXX The methods layoutContainer and preferredLayoutSize must be kept - * in sync. - */ - - List visibleRemotes = new ArrayList(); - List remotes; - Component local = getLocal(); - - for (int i = 0; i < this.remotes.size(); i++) - { - if (this.remotes.get(i).isVisible()) - visibleRemotes.add(this.remotes.get(i)); - } - - /* - * When there are multiple remote visual/video Components, the local one - * will be displayed as if it is a remote one i.e. in the same grid, not - * on top of a remote one. The same layout will be used when this - * instance is dedicated to a telephony conference. - */ - if (conference || ((visibleRemotes.size() > 1) && (local != null))) - { - remotes = new ArrayList(); - remotes.addAll(visibleRemotes); - if (local != null) - remotes.add(local); - } - else - remotes = visibleRemotes; - - int remoteCount = remotes.size(); - Dimension parentSize = parent.getSize(); - - if (!conference && (remoteCount == 1)) - { - /* - * If the videos are to be laid out as in a one-to-one call, the - * remote video has to fill the parent and the local video will be - * placed on top of the remote video. The remote video will be laid - * out now and the local video will be laid out later/further - * bellow. - */ - super.layoutContainer( - parent, - (local == null) - ? Component.CENTER_ALIGNMENT - : remoteAlignmentX); - } - else if (remoteCount > 0) - { - int columns = calculateColumnCount(remotes); - int columnsMinus1 = columns - 1; - int rows = (remoteCount + columnsMinus1) / columns; - int rowsMinus1 = rows - 1; - Rectangle bounds - = new Rectangle( - 0, - 0, - /* - * HGAP is the horizontal gap between the Components - * being laid out by this VideoLayout so the number of - * HGAPs will be with one less than the number of - * columns and that horizontal space cannot be allocated - * to the bounds of the Components. - */ - (parentSize.width - (columnsMinus1 * HGAP)) / columns, - parentSize.height / rows); - - for (int i = 0; i < remoteCount; i++) - { - int column = i % columns; - int row = i / columns; - - /* - * On the x axis, the first column starts at zero and each - * subsequent column starts relative to the end of its preceding - * column. - */ - if (column == 0) - { - bounds.x = 0; - /* - * Eventually, there may be empty cells in the last row. - * Center the non-empty cells horizontally. - */ - if (row == rowsMinus1) - { - int available = remoteCount - i; - - if (available < columns) - { - bounds.x - = (parentSize.width - - available * bounds.width - - (available - 1) * HGAP) - / 2; - } - } - } - else - bounds.x += (bounds.width + HGAP); - bounds.y = row * bounds.height; - - super.layoutComponent( - remotes.get(i), - bounds, - Component.CENTER_ALIGNMENT, - Component.CENTER_ALIGNMENT); - } - } - - if (local == null) - { - /* - * It is plain wrong to display a close button for the local video - * if there is no local video. - */ - if (closeButton != null) - closeButton.setVisible(false); - } - else - { - /* - * If the local visual/video Component is not displayed as if it is - * a remote one, it will be placed on top of a remote one. - */ - if (!remotes.contains(local)) - { - Component remote0 = remotes.isEmpty() ? null : remotes.get(0); - int localX; - int localY; - int height - = Math.round(parentSize.height * LOCAL_TO_REMOTE_RATIO); - int width - = Math.round(parentSize.width * LOCAL_TO_REMOTE_RATIO); - float alignmentX; - - /* - * XXX The remote Component being a JLabel is meant to signal - * that there is no remote video and the remote is the - * photoLabel. - */ - if ((remoteCount == 1) && (remote0 instanceof JLabel)) - { - localX = (parentSize.width - width) / 2; - localY = parentSize.height - height; - alignmentX = Component.CENTER_ALIGNMENT; - } - else - { - localX = ((remote0 == null) ? 0 : remote0.getX()) + 5; - localY = parentSize.height - height - 5; - alignmentX = Component.LEFT_ALIGNMENT; - } - super.layoutComponent( - local, - new Rectangle(localX, localY, width, height), - alignmentX, - Component.BOTTOM_ALIGNMENT); - } - - /* The closeButton has to be on top of the local video. */ - if (closeButton != null) - { - /* - * XXX We may be overwriting the visible property set by our - * client (who has initialized the close button) but it is wrong - * to display a close button for the local video if the local - * video is not visible. - */ - closeButton.setVisible(local.isVisible()); - - super.layoutComponent( - closeButton, - new Rectangle( - local.getX() - + local.getWidth() - - closeButton.getWidth(), - local.getY(), - closeButton.getWidth(), - closeButton.getHeight()), - Component.CENTER_ALIGNMENT, - Component.CENTER_ALIGNMENT); - } - } - - /* - * The video canvas will get the locations of the other components to - * paint so it has to cover the parent completely. - */ - if (canvas != null) - canvas.setBounds(0, 0, parentSize.width, parentSize.height); - } - - /** - * Returns the preferred layout size for the given container. - * - * @param parent the container which preferred layout size we're looking for - * @return a Dimension containing, the preferred layout size for the given - * container - */ - @Override - public Dimension preferredLayoutSize(Container parent) - { - List visibleRemotes = new ArrayList(); - List remotes; - Component local = getLocal(); - - for (int i = 0; i < this.remotes.size(); i++) - { - if (this.remotes.get(i).isVisible()) - visibleRemotes.add(this.remotes.get(i)); - } - - /* - * When there are multiple remote visual/video Components, the local one - * will be displayed as if it is a remote one i.e. in the same grid, not - * on top of a remote one. The same layout will be used when this - * instance is dedicated to a telephony conference. - */ - if (conference || ((visibleRemotes.size() > 1) && (local != null))) - { - remotes = new ArrayList(); - remotes.addAll(visibleRemotes); - if (local != null) - remotes.add(local); - } - else - remotes = visibleRemotes; - - int remoteCount = remotes.size(); - Dimension prefLayoutSize; - - if (!conference && (remoteCount == 1)) - { - /* - * If the videos are to be laid out as in a one-to-one call, the - * remote video has to fill the parent and the local video will be - * placed on top of the remote video. The remote video will be laid - * out now and the local video will be laid out later/further - * bellow. - */ - prefLayoutSize = super.preferredLayoutSize(parent); - } - else if (remoteCount > 0) - { - int columns = calculateColumnCount(remotes); - int columnsMinus1 = columns - 1; - int rows = (remoteCount + columnsMinus1) / columns; - int i = 0; - Dimension[] prefSizes = new Dimension[columns * rows]; - - for (Component remote : remotes) - { - int column = columnsMinus1 - (i % columns); - int row = i / columns; - - prefSizes[column + row * columns] = remote.getPreferredSize(); - - i++; - if (i >= remoteCount) - break; - } - - int prefLayoutWidth = 0; - - for (int column = 0; column < columns; column++) - { - int prefColumnWidth = 0; - - for (int row = 0; row < rows; row++) - { - Dimension prefSize = prefSizes[column + row * columns]; - - if (prefSize != null) - prefColumnWidth += prefSize.width; - } - prefColumnWidth /= rows; - - prefLayoutWidth += prefColumnWidth; - } - - int prefLayoutHeight = 0; - - for (int row = 0; row < rows; row++) - { - int prefRowHeight = 0; - - for (int column = 0; column < columns; column++) - { - Dimension prefSize = prefSizes[column + row * columns]; - - if (prefSize != null) - prefRowHeight = prefSize.height; - } - prefRowHeight /= columns; - - prefLayoutHeight += prefRowHeight; - } - - prefLayoutSize - = new Dimension( - prefLayoutWidth + columnsMinus1 * HGAP, - prefLayoutHeight); - } - else - prefLayoutSize = null; - - if (local != null) - { - /* - * If the local visual/video Component is not displayed as if it is - * a remote one, it will be placed on top of a remote one. Then for - * the purposes of the preferredLayoutSize method it needs to be - * considered only if there is no remote video whatsoever. - */ - if (!remotes.contains(local) && (prefLayoutSize == null)) - { - Dimension prefSize = local.getPreferredSize(); - - if (prefSize != null) - { - int prefHeight - = Math.round(prefSize.height * LOCAL_TO_REMOTE_RATIO); - int prefWidth - = Math.round(prefSize.width * LOCAL_TO_REMOTE_RATIO); - - prefLayoutSize = new Dimension(prefWidth, prefHeight); - } - } - - /* - * The closeButton has to be on top of the local video. - * Consequently, the preferredLayoutSize method does not have to - * consider it. Well, maybe if does if the local video is smaller - * than the closeButton... but that's just not cool anyway. - */ - } - - /* - * The video canvas will get the locations of the other components to - * paint so it has to cover the parent completely. In other words, the - * preferredLayoutSize method does not have to consider it. - */ - - if (prefLayoutSize == null) - prefLayoutSize = super.preferredLayoutSize(parent); - else if ((prefLayoutSize.height < 1) || (prefLayoutSize.width < 1)) - { - prefLayoutSize.height = DEFAULT_HEIGHT_OR_WIDTH; - prefLayoutSize.width = DEFAULT_HEIGHT_OR_WIDTH; - } - - return prefLayoutSize; - } - - /** - * Removes the given component from this layout. - * - * @param comp the component to remove from the layout - */ - @Override - public void removeLayoutComponent(Component comp) - { - super.removeLayoutComponent(comp); - - synchronized (constraints) - { - constraints.remove(comp); - } - - if (local == comp) - local = null; - else if (closeButton == comp) - closeButton = null; - else if (canvas == comp) - canvas = null; - else - remotes.remove(comp); - } -} +package org.jitsi.util.swing; + +import java.awt.*; +import java.util.*; +import java.util.List; + +import javax.swing.*; + +/** + * Implements the LayoutManager which lays out the local and remote + * videos in a video Call. + * + * @author Lyubomir Marinov + * @author Yana Stamcheva + */ +public class VideoLayout + extends FitLayout +{ + /** + * The video canvas constraint. + */ + public static final String CANVAS = "CANVAS"; + + /** + * The center remote video constraint. + */ + public static final String CENTER_REMOTE = "CENTER_REMOTE"; + + /** + * The close local video constraint. + */ + public static final String CLOSE_LOCAL_BUTTON = "CLOSE_LOCAL_BUTTON"; + + /** + * The east remote video constraint. + */ + public static final String EAST_REMOTE = "EAST_REMOTE"; + + /** + * The horizontal gap between the Component being laid out by + * VideoLayout. + */ + private static final int HGAP = 10; + + /** + * The local video constraint. + */ + public static final String LOCAL = "LOCAL"; + + /** + * The ration between the local and the remote video. + */ + private static final float LOCAL_TO_REMOTE_RATIO = 0.30f; + + /** + * The video canvas. + */ + private Component canvas; + + /** + * The close local video button component. + */ + private Component closeButton; + + /** + * The indicator which determines whether this instance is dedicated to a + * conference. + */ + private final boolean conference; + + /** + * The map of component constraints. + */ + private final Map constraints + = new HashMap(); + + /** + * The component containing the local video. + */ + private Component local; + + /** + * The x coordinate alignment of the remote video. + */ + private float remoteAlignmentX = Component.CENTER_ALIGNMENT; + + /** + * The list of Components depicting remote videos. + */ + private final List remotes = new LinkedList(); + + /** + * Creates an instance of VideoLayout by also indicating if this + * video layout is dedicated to a conference interface. + * + * @param conference true if the new instance will be dedicated to + * a conference; otherwise, false + */ + public VideoLayout(boolean conference) + { + this.conference = conference; + } + + /** + * Adds the given component in this layout on the specified by name + * position. + * + * @param name the constraint giving the position of the component in this + * layout + * @param comp the component to add + */ + @Override + public void addLayoutComponent(String name, Component comp) + { + super.addLayoutComponent(name, comp); + + synchronized (constraints) + { + constraints.put(comp, name); + } + + if ((name == null) || name.equals(CENTER_REMOTE)) + { + if (!remotes.contains(comp)) + remotes.add(comp); + remoteAlignmentX = Component.CENTER_ALIGNMENT; + } + else if (name.equals(EAST_REMOTE)) + { + if (!remotes.contains(comp)) + remotes.add(comp); + remoteAlignmentX = Component.RIGHT_ALIGNMENT; + } + else if (name.equals(LOCAL)) + local = comp; + else if (name.equals(CLOSE_LOCAL_BUTTON)) + closeButton = comp; + else if (name.equals(CANVAS)) + canvas = comp; + } + + /** + * Determines whether the aspect ratio of a specific Dimension is + * to be considered equal to the aspect ratio of specific width and + * height. + * + * @param size the Dimension whose aspect ratio is to be compared + * to the aspect ratio of width and height + * @param width the width which defines in combination with height + * the aspect ratio to be compared to the aspect ratio of size + * @param height the height which defines in combination with width + * the aspect ratio to be compared to the aspect ratio of size + * @return true if the aspect ratio of size is to be + * considered equal to the aspect ratio of width and + * height; otherwise, false + */ + public static boolean areAspectRatiosEqual( + Dimension size, + int width, int height) + { + if ((size.height == 0) || (height == 0)) + return false; + else + { + double a = size.width / (double) size.height; + double b = width / (double) height; + double diff = a - b; + + return (-0.01 < diff) && (diff < 0.01); + } + } + + /** + * Determines how may columns to use for the grid display of specific remote + * visual/video Components. + * + * @param remotes the remote visual/video Components to be + * displayed in a grid + * @return the number of columns to use for the grid display of the + * specified remote visual/video Components + */ + private int calculateColumnCount(List remotes) + { + int remoteCount = remotes.size(); + + if (remoteCount == 1) + return 1; + else if ((remoteCount == 2) || (remoteCount == 4)) + return 2; + else + return 3; + } + + /** + * Returns the remote video component. + * + * @return the remote video component + */ + @Override + protected Component getComponent(Container parent) + { + return (remotes.size() == 1) ? remotes.get(0) : null; + } + + /** + * Returns the constraints for the given component. + * + * @param c the component for which constraints we're looking for + * @return the constraints for the given component + */ + public Object getComponentConstraints(Component c) + { + synchronized (constraints) + { + return constraints.get(c); + } + } + + /** + * Returns the local video component. + * + * @return the local video component + */ + public Component getLocal() + { + return local; + } + + /** + * Returns the local video close button. + * + * @return the local video close button + */ + public Component getLocalCloseButton() + { + return closeButton; + } + + /** + * Lays out the specified Container (i.e. the Components + * it contains) in accord with the logic implemented by this + * LayoutManager. + * + * @param parent the Container to lay out + */ + @Override + public void layoutContainer(Container parent) + { + /* + * XXX The methods layoutContainer and preferredLayoutSize must be kept + * in sync. + */ + + List visibleRemotes = new ArrayList(); + List remotes; + Component local = getLocal(); + + for (int i = 0; i < this.remotes.size(); i++) + { + if (this.remotes.get(i).isVisible()) + visibleRemotes.add(this.remotes.get(i)); + } + + /* + * When there are multiple remote visual/video Components, the local one + * will be displayed as if it is a remote one i.e. in the same grid, not + * on top of a remote one. The same layout will be used when this + * instance is dedicated to a telephony conference. + */ + if (conference || ((visibleRemotes.size() > 1) && (local != null))) + { + remotes = new ArrayList(); + remotes.addAll(visibleRemotes); + if (local != null) + remotes.add(local); + } + else + remotes = visibleRemotes; + + int remoteCount = remotes.size(); + Dimension parentSize = parent.getSize(); + + if (!conference && (remoteCount == 1)) + { + /* + * If the videos are to be laid out as in a one-to-one call, the + * remote video has to fill the parent and the local video will be + * placed on top of the remote video. The remote video will be laid + * out now and the local video will be laid out later/further + * bellow. + */ + super.layoutContainer( + parent, + (local == null) + ? Component.CENTER_ALIGNMENT + : remoteAlignmentX); + } + else if (remoteCount > 0) + { + int columns = calculateColumnCount(remotes); + int columnsMinus1 = columns - 1; + int rows = (remoteCount + columnsMinus1) / columns; + int rowsMinus1 = rows - 1; + Rectangle bounds + = new Rectangle( + 0, + 0, + /* + * HGAP is the horizontal gap between the Components + * being laid out by this VideoLayout so the number of + * HGAPs will be with one less than the number of + * columns and that horizontal space cannot be allocated + * to the bounds of the Components. + */ + (parentSize.width - (columnsMinus1 * HGAP)) / columns, + parentSize.height / rows); + + for (int i = 0; i < remoteCount; i++) + { + int column = i % columns; + int row = i / columns; + + /* + * On the x axis, the first column starts at zero and each + * subsequent column starts relative to the end of its preceding + * column. + */ + if (column == 0) + { + bounds.x = 0; + /* + * Eventually, there may be empty cells in the last row. + * Center the non-empty cells horizontally. + */ + if (row == rowsMinus1) + { + int available = remoteCount - i; + + if (available < columns) + { + bounds.x + = (parentSize.width + - available * bounds.width + - (available - 1) * HGAP) + / 2; + } + } + } + else + bounds.x += (bounds.width + HGAP); + bounds.y = row * bounds.height; + + super.layoutComponent( + remotes.get(i), + bounds, + Component.CENTER_ALIGNMENT, + Component.CENTER_ALIGNMENT); + } + } + + if (local == null) + { + /* + * It is plain wrong to display a close button for the local video + * if there is no local video. + */ + if (closeButton != null) + closeButton.setVisible(false); + } + else + { + /* + * If the local visual/video Component is not displayed as if it is + * a remote one, it will be placed on top of a remote one. + */ + if (!remotes.contains(local)) + { + Component remote0 = remotes.isEmpty() ? null : remotes.get(0); + int localX; + int localY; + int height + = Math.round(parentSize.height * LOCAL_TO_REMOTE_RATIO); + int width + = Math.round(parentSize.width * LOCAL_TO_REMOTE_RATIO); + float alignmentX; + + /* + * XXX The remote Component being a JLabel is meant to signal + * that there is no remote video and the remote is the + * photoLabel. + */ + if ((remoteCount == 1) && (remote0 instanceof JLabel)) + { + localX = (parentSize.width - width) / 2; + localY = parentSize.height - height; + alignmentX = Component.CENTER_ALIGNMENT; + } + else + { + localX = ((remote0 == null) ? 0 : remote0.getX()) + 5; + localY = parentSize.height - height - 5; + alignmentX = Component.LEFT_ALIGNMENT; + } + super.layoutComponent( + local, + new Rectangle(localX, localY, width, height), + alignmentX, + Component.BOTTOM_ALIGNMENT); + } + + /* The closeButton has to be on top of the local video. */ + if (closeButton != null) + { + /* + * XXX We may be overwriting the visible property set by our + * client (who has initialized the close button) but it is wrong + * to display a close button for the local video if the local + * video is not visible. + */ + closeButton.setVisible(local.isVisible()); + + super.layoutComponent( + closeButton, + new Rectangle( + local.getX() + + local.getWidth() + - closeButton.getWidth(), + local.getY(), + closeButton.getWidth(), + closeButton.getHeight()), + Component.CENTER_ALIGNMENT, + Component.CENTER_ALIGNMENT); + } + } + + /* + * The video canvas will get the locations of the other components to + * paint so it has to cover the parent completely. + */ + if (canvas != null) + canvas.setBounds(0, 0, parentSize.width, parentSize.height); + } + + /** + * Returns the preferred layout size for the given container. + * + * @param parent the container which preferred layout size we're looking for + * @return a Dimension containing, the preferred layout size for the given + * container + */ + @Override + public Dimension preferredLayoutSize(Container parent) + { + List visibleRemotes = new ArrayList(); + List remotes; + Component local = getLocal(); + + for (int i = 0; i < this.remotes.size(); i++) + { + if (this.remotes.get(i).isVisible()) + visibleRemotes.add(this.remotes.get(i)); + } + + /* + * When there are multiple remote visual/video Components, the local one + * will be displayed as if it is a remote one i.e. in the same grid, not + * on top of a remote one. The same layout will be used when this + * instance is dedicated to a telephony conference. + */ + if (conference || ((visibleRemotes.size() > 1) && (local != null))) + { + remotes = new ArrayList(); + remotes.addAll(visibleRemotes); + if (local != null) + remotes.add(local); + } + else + remotes = visibleRemotes; + + int remoteCount = remotes.size(); + Dimension prefLayoutSize; + + if (!conference && (remoteCount == 1)) + { + /* + * If the videos are to be laid out as in a one-to-one call, the + * remote video has to fill the parent and the local video will be + * placed on top of the remote video. The remote video will be laid + * out now and the local video will be laid out later/further + * bellow. + */ + prefLayoutSize = super.preferredLayoutSize(parent); + } + else if (remoteCount > 0) + { + int columns = calculateColumnCount(remotes); + int columnsMinus1 = columns - 1; + int rows = (remoteCount + columnsMinus1) / columns; + int i = 0; + Dimension[] prefSizes = new Dimension[columns * rows]; + + for (Component remote : remotes) + { + int column = columnsMinus1 - (i % columns); + int row = i / columns; + + prefSizes[column + row * columns] = remote.getPreferredSize(); + + i++; + if (i >= remoteCount) + break; + } + + int prefLayoutWidth = 0; + + for (int column = 0; column < columns; column++) + { + int prefColumnWidth = 0; + + for (int row = 0; row < rows; row++) + { + Dimension prefSize = prefSizes[column + row * columns]; + + if (prefSize != null) + prefColumnWidth += prefSize.width; + } + prefColumnWidth /= rows; + + prefLayoutWidth += prefColumnWidth; + } + + int prefLayoutHeight = 0; + + for (int row = 0; row < rows; row++) + { + int prefRowHeight = 0; + + for (int column = 0; column < columns; column++) + { + Dimension prefSize = prefSizes[column + row * columns]; + + if (prefSize != null) + prefRowHeight = prefSize.height; + } + prefRowHeight /= columns; + + prefLayoutHeight += prefRowHeight; + } + + prefLayoutSize + = new Dimension( + prefLayoutWidth + columnsMinus1 * HGAP, + prefLayoutHeight); + } + else + prefLayoutSize = null; + + if (local != null) + { + /* + * If the local visual/video Component is not displayed as if it is + * a remote one, it will be placed on top of a remote one. Then for + * the purposes of the preferredLayoutSize method it needs to be + * considered only if there is no remote video whatsoever. + */ + if (!remotes.contains(local) && (prefLayoutSize == null)) + { + Dimension prefSize = local.getPreferredSize(); + + if (prefSize != null) + { + int prefHeight + = Math.round(prefSize.height * LOCAL_TO_REMOTE_RATIO); + int prefWidth + = Math.round(prefSize.width * LOCAL_TO_REMOTE_RATIO); + + prefLayoutSize = new Dimension(prefWidth, prefHeight); + } + } + + /* + * The closeButton has to be on top of the local video. + * Consequently, the preferredLayoutSize method does not have to + * consider it. Well, maybe if does if the local video is smaller + * than the closeButton... but that's just not cool anyway. + */ + } + + /* + * The video canvas will get the locations of the other components to + * paint so it has to cover the parent completely. In other words, the + * preferredLayoutSize method does not have to consider it. + */ + + if (prefLayoutSize == null) + prefLayoutSize = super.preferredLayoutSize(parent); + else if ((prefLayoutSize.height < 1) || (prefLayoutSize.width < 1)) + { + prefLayoutSize.height = DEFAULT_HEIGHT_OR_WIDTH; + prefLayoutSize.width = DEFAULT_HEIGHT_OR_WIDTH; + } + + return prefLayoutSize; + } + + /** + * Removes the given component from this layout. + * + * @param comp the component to remove from the layout + */ + @Override + public void removeLayoutComponent(Component comp) + { + super.removeLayoutComponent(comp); + + synchronized (constraints) + { + constraints.remove(comp); + } + + if (local == comp) + local = null; + else if (closeButton == comp) + closeButton = null; + else if (canvas == comp) + canvas = null; + else + remotes.remove(comp); + } +}