-{
- /**
- * The Logger used by the DirectShowStream class and its
- * instances to print out debugging information.
- */
- private static final Logger logger
- = Logger.getLogger(DirectShowStream.class);
-
- /**
- * Determines whether a specific Format appears to be suitable for
- * attempts to be set on DirectShowStream instances.
- *
- * Note: If the method returns true, an actual attempt to
- * set the specified format on an specific
- * DirectShowStream instance may still fail but that will be
- * because the finer-grained properties of the format are not
- * supported by that DirectShowStream instance.
- *
- *
- * @param format the Format to be checked whether it appears to be
- * suitable for attempts to be set on DirectShowStream instances
- * @return true if the specified format appears to be
- * suitable for attempts to be set on DirectShowStream instance;
- * otherwise, false
- */
- static boolean isSupportedFormat(Format format)
- {
- if (format instanceof AVFrameFormat)
- {
- AVFrameFormat avFrameFormat = (AVFrameFormat) format;
- long pixFmt = avFrameFormat.getDeviceSystemPixFmt();
-
- if (pixFmt != -1)
- {
- Dimension size = avFrameFormat.getSize();
-
- /*
- * We will set the native format in doStart() because a
- * connect-disconnect-connect sequence of the native capture
- * device may reorder its formats in a different way.
- * Consequently, in the absence of further calls to
- * setFormat() by JMF, a crash may occur later (typically,
- * during scaling) because of a wrong format.
- */
- if (size != null)
- return true;
- }
- }
- return false;
- }
-
- /**
- * The indicator which determines whether {@link #delegate}
- * automatically drops late frames. If false, we have to drop them
- * ourselves because DirectShow will buffer them all and the video will
- * be late.
- */
- private final boolean automaticallyDropsLateVideoFrames = false;
-
- /**
- * The pool of ByteBuffers this instances is using to transfer the
- * media data captured by {@link #delegate} out of this instance
- * through the Buffers specified in its {@link #read(Buffer)}.
- */
- private final ByteBufferPool byteBufferPool = new ByteBufferPool();
-
- /**
- * The captured media data to be returned in {@link #read(Buffer)}.
- */
- private ByteBuffer data;
-
- /**
- * The Object which synchronizes the access to the
- * {@link #data}-related fields of this instance.
- */
- private final Object dataSyncRoot = new Object();
-
- /**
- * The time stamp in nanoseconds of {@link #data}.
- */
- private long dataTimeStamp;
-
- /**
- * Delegate class to handle video data.
- */
- private final DSCaptureDevice.ISampleGrabberCB delegate
- = new DSCaptureDevice.ISampleGrabberCB()
- {
- @Override
- public void SampleCB(long source, long ptr, int length)
- {
- DirectShowStream.this.SampleCB(source, ptr, length);
- }
- };
-
- /**
- * The DSCaptureDevice which identifies the DirectShow video
- * capture device this SourceStream is to capture data from.
- */
- private DSCaptureDevice device;
-
- /**
- * The last-known Format of the media data made available by this
- * PushBufferStream.
- */
- private Format format;
-
- /**
- * The captured media data to become the value of {@link #data} as soon as
- * the latter becomes is consumed. Thus prepares this
- * DirectShowStream to provide the latest available frame and not
- * wait for DirectShow to capture a new one.
- */
- private ByteBuffer nextData;
-
- /**
- * The time stamp in nanoseconds of {@link #nextData}.
- */
- private long nextDataTimeStamp;
-
- /**
- * The Thread which is to call
- * {@link BufferTransferHandler#transferData(PushBufferStream)} for this
- * DirectShowStream so that the call is not made in DirectShow
- * and we can drop late frames when
- * {@link #automaticallyDropsLateVideoFrames} is false.
- */
- private Thread transferDataThread;
-
- /**
- * Native Video pixel format.
- */
- private int nativePixelFormat = 0;
-
- /**
- * The AVCodecContext of the MJPEG decoder.
- */
- private long avctx = 0;
-
- /**
- * The AVFrame which represents the media data decoded by the MJPEG
- * decoder/{@link #avctx}.
- */
- private long avframe = 0;
-
- /**
- * Initializes a new DirectShowStream instance which is to have its
- * Format-related information abstracted by a specific
- * FormatControl.
- *
- * @param dataSource the DataSource which is creating the new
- * instance so that it becomes one of its streams
- * @param formatControl the FormatControl which is to abstract the
- * Format-related information of the new instance
- */
- DirectShowStream(DataSource dataSource, FormatControl formatControl)
- {
- super(dataSource, formatControl);
- }
-
- /**
- * Connects this SourceStream to the DirectShow video capture
- * device identified by {@link #device}.
- *
- * @throws IOException if anything goes wrong while this
- * SourceStream connects to the DirectShow video capture device
- * identified by device
- */
- private void connect()
- throws IOException
- {
- if (device == null)
- throw new IOException("device == null");
- else
- device.setDelegate(delegate);
- }
-
- /**
- * Disconnects this SourceStream from the DirectShow video capture
- * device it has previously connected to during the execution of
- * {@link #connect()}.
- *
- * @throws IOException if anything goes wrong while this
- * SourceStream disconnects from the DirectShow video capture
- * device it has previously connected to during the execution of
- * connect()
- */
- private void disconnect()
- throws IOException
- {
- try
- {
- stop();
- }
- finally
- {
- if (device != null)
- device.setDelegate(null);
- }
- }
-
- /**
- * Gets the Format of this PushBufferStream as directly
- * known by it.
- *
- * @return the Format of this PushBufferStream as directly
- * known by it or null if this PushBufferStream does not
- * directly know its Format and it relies on the
- * PushBufferDataSource which created it to report its
- * Format
- */
- @Override
- protected Format doGetFormat()
- {
- return (format == null) ? super.doGetFormat() : format;
- }
-
- /**
- * {@inheritDoc}
- *
- * Overrides the super implementation to enable setting the Format
- * of this DirectShowStream after the DataSource which
- * provides it has been connected.
- */
- @Override
- protected Format doSetFormat(Format format)
- {
- if (isSupportedFormat(format))
- {
- if (device == null)
- return format;
- else
- {
- try
- {
- setDeviceFormat(format);
- }
- catch (IOException ioe)
- {
- logger.error(
- "Failed to set format on DirectShowStream: "
- + format,
- ioe);
- /*
- * Ignore the exception because the method is to report
- * failures by returning null (which will be achieved
- * outside the catch block).
- */
- }
- return format.matches(this.format) ? format : null;
- }
- }
- else
- return super.doSetFormat(format);
- }
-
- /**
- * Reads media data from this PushBufferStream into a specific
- * Buffer without blocking.
- *
- * @param buffer the Buffer in which media data is to be read from
- * this PushBufferStream
- * @throws IOException if anything goes wrong while reading media data from
- * this PushBufferStream into the specified buffer
- */
- public void read(Buffer buffer) throws IOException
- {
- synchronized (dataSyncRoot)
- {
- if(data == null)
- {
- buffer.setLength(0);
- return;
- }
-
- Format bufferFormat = buffer.getFormat();
-
- if(bufferFormat == null)
- {
- bufferFormat = getFormat();
- if(bufferFormat != null)
- buffer.setFormat(bufferFormat);
- }
- if(bufferFormat instanceof AVFrameFormat)
- {
- if(nativePixelFormat == DSFormat.MJPG)
- {
- /* Initialize the FFmpeg MJPEG decoder if necessary. */
- if(avctx == 0)
- {
- long avcodec
- = FFmpeg.avcodec_find_decoder(FFmpeg.CODEC_ID_MJPEG);
-
- avctx = FFmpeg.avcodec_alloc_context3(avcodec);
- FFmpeg.avcodeccontext_set_workaround_bugs(avctx,
- FFmpeg.FF_BUG_AUTODETECT);
-
- if (FFmpeg.avcodec_open2(avctx, avcodec) < 0)
- {
- throw new RuntimeException("" +
- "Could not open codec CODEC_ID_MJPEG");
- }
-
- avframe = FFmpeg.avcodec_alloc_frame();
- }
-
- if(FFmpeg.avcodec_decode_video(
- avctx, avframe, data.getPtr(), data.getLength()) != -1)
- {
- Object out = buffer.getData();
-
- if (!(out instanceof AVFrame)
- || (((AVFrame) out).getPtr() != avframe))
- {
- buffer.setData(new AVFrame(avframe));
- }
- }
-
- data.free();
- data = null;
- }
- else
- {
- if (AVFrame.read(buffer, bufferFormat, data) < 0)
- data.free();
- /*
- * XXX For the sake of safety, make sure that this instance does
- * not reference the data instance as soon as it is set on the
- * AVFrame.
- */
- data = null;
- }
- }
- else
- {
- Object o = buffer.getData();
- byte[] bytes;
- int length = data.getLength();
-
- if(o instanceof byte[])
- {
- bytes = (byte[]) o;
- if(bytes.length < length)
- bytes = null;
- }
- else
- bytes = null;
- if(bytes == null)
- {
- bytes = new byte[length];
- buffer.setData(bytes);
- }
-
- /*
- * TODO Copy the media from the native memory into the Java
- * heap.
- */
- data.free();
- data = null;
-
- buffer.setLength(length);
- buffer.setOffset(0);
- }
-
- buffer.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME);
- buffer.setTimeStamp(dataTimeStamp);
-
- if(!automaticallyDropsLateVideoFrames)
- dataSyncRoot.notifyAll();
- }
- }
-
- /**
- * Calls {@link BufferTransferHandler#transferData(PushBufferStream)} from
- * inside {@link #transferDataThread} so that the call is not made in
- * DirectShow and we can drop late frames in the meantime.
- */
- private void runInTransferDataThread()
- {
- boolean transferData = false;
- FrameRateControl frameRateControl
- = (FrameRateControl)
- dataSource.getControl(FrameRateControl.class.getName());
- long transferDataTimeStamp = -1;
-
- while (Thread.currentThread().equals(transferDataThread))
- {
- if (transferData)
- {
- BufferTransferHandler transferHandler = this.transferHandler;
-
- if (transferHandler != null)
- {
- /*
- * Respect the frame rate specified through the
- * FrameRateControl of the associated DataSource.
- */
- if (frameRateControl != null)
- {
- float frameRate;
- long newTransferDataTimeStamp
- = System.currentTimeMillis();
-
- if ((transferDataTimeStamp != -1)
- && ((frameRate
- = frameRateControl.getFrameRate())
- > 0))
- {
- long minimumVideoFrameInterval
- = (long) (1000 / frameRate);
-
- if (minimumVideoFrameInterval > 0)
- {
- long t
- = newTransferDataTimeStamp
- - transferDataTimeStamp;
-
- if ((t > 0) && (t < minimumVideoFrameInterval))
- {
- boolean interrupted = false;
-
- try
- {
- Thread.sleep(
- minimumVideoFrameInterval - t);
- }
- catch (InterruptedException ie)
- {
- interrupted = true;
- }
- if (interrupted)
- Thread.currentThread().interrupt();
- continue;
- }
- }
- }
-
- transferDataTimeStamp = newTransferDataTimeStamp;
- }
-
- transferHandler.transferData(this);
- }
-
- synchronized (dataSyncRoot)
- {
- if (data != null)
- data.free();
- data = nextData;
- dataTimeStamp = nextDataTimeStamp;
- nextData = null;
- }
- }
-
- synchronized (dataSyncRoot)
- {
- if (data == null)
- {
- data = nextData;
- dataTimeStamp = nextDataTimeStamp;
- nextData = null;
- }
- if (data == null)
- {
- boolean interrupted = false;
-
- try
- {
- dataSyncRoot.wait();
- }
- catch (InterruptedException iex)
- {
- interrupted = true;
- }
- if(interrupted)
- Thread.currentThread().interrupt();
-
- transferData = (data != null);
- }
- else
- transferData = true;
- }
- }
- }
-
- /**
- * Process received frames from DirectShow capture device
- *
- * @param source pointer to the native DSCaptureDevice which is the
- * source of the notification
- * @param ptr native pointer to data
- * @param length length of data
- */
- private void SampleCB(long source, long ptr, int length)
- {
- boolean transferData = false;
-
- synchronized (dataSyncRoot)
- {
- if(!automaticallyDropsLateVideoFrames && (data != null))
- {
- if (nextData != null)
- {
- nextData.free();
- nextData = null;
- }
- nextData = byteBufferPool.getBuffer(length);
- if(nextData != null)
- {
- nextData.setLength(
- DSCaptureDevice.samplecopy(
- source,
- ptr, nextData.getPtr(), length));
- nextDataTimeStamp = System.nanoTime();
- }
-
- return;
- }
-
- if (data != null)
- {
- data.free();
- data = null;
- }
- data = byteBufferPool.getBuffer(length);
- if(data != null)
- {
- data.setLength(
- DSCaptureDevice.samplecopy(
- source,
- ptr, data.getPtr(), length));
- dataTimeStamp = System.nanoTime();
- }
-
- if (nextData != null)
- {
- nextData.free();
- nextData = null;
- }
-
- if(automaticallyDropsLateVideoFrames)
- transferData = (data != null);
- else
- {
- transferData = false;
- dataSyncRoot.notifyAll();
- }
- }
-
- if(transferData)
- {
- BufferTransferHandler transferHandler = this.transferHandler;
-
- if(transferHandler != null)
- transferHandler.transferData(this);
- }
- }
-
- /**
- * Sets the DSCaptureDevice of this instance which identifies the
- * DirectShow video capture device this SourceStream is to capture
- * data from.
- *
- * @param device a DSCaptureDevice which identifies the DirectShow
- * video capture device this SourceStream is to capture data from
- * @throws IOException if anything goes wrong while setting the specified
- * device on this instance
- */
- void setDevice(DSCaptureDevice device)
- throws IOException
- {
- if (this.device != device)
- {
- if (this.device != null)
- disconnect();
-
- this.device = device;
-
- if (this.device != null)
- connect();
- }
- }
-
- /**
- * Sets a specific Format on the DSCaptureDevice of this
- * instance.
- *
- * @param format the Format to set on the DSCaptureDevice
- * of this instance
- * @throws IOException if setting the specified format on the
- * DSCaptureDevice of this instance fails
- */
- private void setDeviceFormat(Format format)
- throws IOException
- {
- if (format == null)
- throw new IOException("format == null");
- else if (format instanceof AVFrameFormat)
- {
- AVFrameFormat avFrameFormat = (AVFrameFormat) format;
- nativePixelFormat = avFrameFormat.getDeviceSystemPixFmt();
- Dimension size = avFrameFormat.getSize();
-
- if (size == null)
- throw new IOException("format.size == null");
- else
- {
- int hresult
- = device.setFormat(
- new DSFormat(
- size.width, size.height,
- avFrameFormat.getDeviceSystemPixFmt()));
-
- switch (hresult)
- {
- case DSCaptureDevice.S_FALSE:
- case DSCaptureDevice.S_OK:
- this.format = format;
- if (logger.isDebugEnabled())
- {
- logger.debug(
- "Set format on DirectShowStream: " + format);
- }
- break;
- default:
- throwNewHResultException(hresult);
- }
- }
- }
- else
- throw new IOException("!(format instanceof AVFrameFormat)");
- }
-
- /**
- * Starts the transfer of media data from this PushBufferStream.
- *
- * @throws IOException if anything goes wrong while starting the transfer of
- * media data from this PushBufferStream
- */
- @Override
- public void start()
- throws IOException
- {
- super.start();
-
- boolean started = false;
-
- try
- {
- setDeviceFormat(getFormat());
-
- if(!automaticallyDropsLateVideoFrames)
- {
- if (transferDataThread == null)
- {
- transferDataThread
- = new Thread(getClass().getSimpleName())
- {
- @Override
- public void run()
- {
- runInTransferDataThread();
- }
- };
- transferDataThread.start();
- }
- }
-
- device.start();
-
- started = true;
- }
- finally
- {
- if (!started)
- stop();
- }
- }
-
- /**
- * Stops the transfer of media data from this PushBufferStream.
- *
- * @throws IOException if anything goes wrong while stopping the transfer of
- * media data from this PushBufferStream
- */
- @Override
- public void stop()
- throws IOException
- {
- try
- {
- device.stop();
-
- transferDataThread = null;
-
- synchronized (dataSyncRoot)
- {
- if (data != null)
- {
- data.free();
- data = null;
- }
- if (nextData != null)
- {
- nextData.free();
- nextData = null;
- }
-
- if(!automaticallyDropsLateVideoFrames)
- dataSyncRoot.notifyAll();
- }
- }
- finally
- {
- super.stop();
-
- if(avctx != 0)
- {
- FFmpeg.avcodec_close(avctx);
- FFmpeg.av_free(avctx);
- avctx = 0;
- }
-
- if(avframe != 0)
- {
- FFmpeg.avcodec_free_frame(avframe);
- avframe = 0;
- }
-
- byteBufferPool.drain();
- }
- }
-
- /**
- * Throws a new IOException the detail message of which describes
- * a specific HRESULT value indicating a failure.
- *
- * @param hresult the HRESUlT to be described by the detail message
- * of the new IOException to be thrown
- * @throws IOException
- */
- private void throwNewHResultException(int hresult)
- throws IOException
- {
- throw new IOException(
- "HRESULT 0x" + Long.toHexString(hresult & 0xffffffffL));
- }
-}
+package org.jitsi.impl.neomedia.jmfext.media.protocol.directshow;
+
+import java.awt.*;
+import java.io.*;
+
+import javax.media.*;
+import javax.media.control.*;
+import javax.media.protocol.*;
+
+import org.jitsi.impl.neomedia.codec.*;
+import org.jitsi.impl.neomedia.codec.video.*;
+import org.jitsi.impl.neomedia.jmfext.media.protocol.*;
+import org.jitsi.util.*;
+
+/**
+ * Implements a PushBufferStream using DirectShow.
+ *
+ * @author Lyubomir Marinov
+ * @author Sebastien Vincent
+ */
+public class DirectShowStream
+ extends AbstractPushBufferStream
+{
+ /**
+ * The Logger used by the DirectShowStream class and its
+ * instances to print out debugging information.
+ */
+ private static final Logger logger
+ = Logger.getLogger(DirectShowStream.class);
+
+ /**
+ * Determines whether a specific Format appears to be suitable for
+ * attempts to be set on DirectShowStream instances.
+ *
+ * Note: If the method returns true, an actual attempt to
+ * set the specified format on an specific
+ * DirectShowStream instance may still fail but that will be
+ * because the finer-grained properties of the format are not
+ * supported by that DirectShowStream instance.
+ *
+ *
+ * @param format the Format to be checked whether it appears to be
+ * suitable for attempts to be set on DirectShowStream instances
+ * @return true if the specified format appears to be
+ * suitable for attempts to be set on DirectShowStream instance;
+ * otherwise, false
+ */
+ static boolean isSupportedFormat(Format format)
+ {
+ if (format instanceof AVFrameFormat)
+ {
+ AVFrameFormat avFrameFormat = (AVFrameFormat) format;
+ long pixFmt = avFrameFormat.getDeviceSystemPixFmt();
+
+ if (pixFmt != -1)
+ {
+ Dimension size = avFrameFormat.getSize();
+
+ /*
+ * We will set the native format in doStart() because a
+ * connect-disconnect-connect sequence of the native capture
+ * device may reorder its formats in a different way.
+ * Consequently, in the absence of further calls to
+ * setFormat() by JMF, a crash may occur later (typically,
+ * during scaling) because of a wrong format.
+ */
+ if (size != null)
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * The indicator which determines whether {@link #delegate}
+ * automatically drops late frames. If false, we have to drop them
+ * ourselves because DirectShow will buffer them all and the video will
+ * be late.
+ */
+ private final boolean automaticallyDropsLateVideoFrames = false;
+
+ /**
+ * The pool of ByteBuffers this instances is using to transfer the
+ * media data captured by {@link #delegate} out of this instance
+ * through the Buffers specified in its {@link #read(Buffer)}.
+ */
+ private final ByteBufferPool byteBufferPool = new ByteBufferPool();
+
+ /**
+ * The captured media data to be returned in {@link #read(Buffer)}.
+ */
+ private ByteBuffer data;
+
+ /**
+ * The Object which synchronizes the access to the
+ * {@link #data}-related fields of this instance.
+ */
+ private final Object dataSyncRoot = new Object();
+
+ /**
+ * The time stamp in nanoseconds of {@link #data}.
+ */
+ private long dataTimeStamp;
+
+ /**
+ * Delegate class to handle video data.
+ */
+ private final DSCaptureDevice.ISampleGrabberCB delegate
+ = new DSCaptureDevice.ISampleGrabberCB()
+ {
+ @Override
+ public void SampleCB(long source, long ptr, int length)
+ {
+ DirectShowStream.this.SampleCB(source, ptr, length);
+ }
+ };
+
+ /**
+ * The DSCaptureDevice which identifies the DirectShow video
+ * capture device this SourceStream is to capture data from.
+ */
+ private DSCaptureDevice device;
+
+ /**
+ * The last-known Format of the media data made available by this
+ * PushBufferStream.
+ */
+ private Format format;
+
+ /**
+ * The captured media data to become the value of {@link #data} as soon as
+ * the latter becomes is consumed. Thus prepares this
+ * DirectShowStream to provide the latest available frame and not
+ * wait for DirectShow to capture a new one.
+ */
+ private ByteBuffer nextData;
+
+ /**
+ * The time stamp in nanoseconds of {@link #nextData}.
+ */
+ private long nextDataTimeStamp;
+
+ /**
+ * The Thread which is to call
+ * {@link BufferTransferHandler#transferData(PushBufferStream)} for this
+ * DirectShowStream so that the call is not made in DirectShow
+ * and we can drop late frames when
+ * {@link #automaticallyDropsLateVideoFrames} is false.
+ */
+ private Thread transferDataThread;
+
+ /**
+ * Native Video pixel format.
+ */
+ private int nativePixelFormat = 0;
+
+ /**
+ * The AVCodecContext of the MJPEG decoder.
+ */
+ private long avctx = 0;
+
+ /**
+ * The AVFrame which represents the media data decoded by the MJPEG
+ * decoder/{@link #avctx}.
+ */
+ private long avframe = 0;
+
+ /**
+ * Initializes a new DirectShowStream instance which is to have its
+ * Format-related information abstracted by a specific
+ * FormatControl.
+ *
+ * @param dataSource the DataSource which is creating the new
+ * instance so that it becomes one of its streams
+ * @param formatControl the FormatControl which is to abstract the
+ * Format-related information of the new instance
+ */
+ DirectShowStream(DataSource dataSource, FormatControl formatControl)
+ {
+ super(dataSource, formatControl);
+ }
+
+ /**
+ * Connects this SourceStream to the DirectShow video capture
+ * device identified by {@link #device}.
+ *
+ * @throws IOException if anything goes wrong while this
+ * SourceStream connects to the DirectShow video capture device
+ * identified by device
+ */
+ private void connect()
+ throws IOException
+ {
+ if (device == null)
+ throw new IOException("device == null");
+ else
+ device.setDelegate(delegate);
+ }
+
+ /**
+ * Disconnects this SourceStream from the DirectShow video capture
+ * device it has previously connected to during the execution of
+ * {@link #connect()}.
+ *
+ * @throws IOException if anything goes wrong while this
+ * SourceStream disconnects from the DirectShow video capture
+ * device it has previously connected to during the execution of
+ * connect()
+ */
+ private void disconnect()
+ throws IOException
+ {
+ try
+ {
+ stop();
+ }
+ finally
+ {
+ if (device != null)
+ device.setDelegate(null);
+ }
+ }
+
+ /**
+ * Gets the Format of this PushBufferStream as directly
+ * known by it.
+ *
+ * @return the Format of this PushBufferStream as directly
+ * known by it or null if this PushBufferStream does not
+ * directly know its Format and it relies on the
+ * PushBufferDataSource which created it to report its
+ * Format
+ */
+ @Override
+ protected Format doGetFormat()
+ {
+ return (format == null) ? super.doGetFormat() : format;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Overrides the super implementation to enable setting the Format
+ * of this DirectShowStream after the DataSource which
+ * provides it has been connected.
+ */
+ @Override
+ protected Format doSetFormat(Format format)
+ {
+ if (isSupportedFormat(format))
+ {
+ if (device == null)
+ return format;
+ else
+ {
+ try
+ {
+ setDeviceFormat(format);
+ }
+ catch (IOException ioe)
+ {
+ logger.error(
+ "Failed to set format on DirectShowStream: "
+ + format,
+ ioe);
+ /*
+ * Ignore the exception because the method is to report
+ * failures by returning null (which will be achieved
+ * outside the catch block).
+ */
+ }
+ return format.matches(this.format) ? format : null;
+ }
+ }
+ else
+ return super.doSetFormat(format);
+ }
+
+ /**
+ * Reads media data from this PushBufferStream into a specific
+ * Buffer without blocking.
+ *
+ * @param buffer the Buffer in which media data is to be read from
+ * this PushBufferStream
+ * @throws IOException if anything goes wrong while reading media data from
+ * this PushBufferStream into the specified buffer
+ */
+ public void read(Buffer buffer) throws IOException
+ {
+ synchronized (dataSyncRoot)
+ {
+ if(data == null)
+ {
+ buffer.setLength(0);
+ return;
+ }
+
+ Format bufferFormat = buffer.getFormat();
+
+ if(bufferFormat == null)
+ {
+ bufferFormat = getFormat();
+ if(bufferFormat != null)
+ buffer.setFormat(bufferFormat);
+ }
+ if(bufferFormat instanceof AVFrameFormat)
+ {
+ if(nativePixelFormat == DSFormat.MJPG)
+ {
+ /* Initialize the FFmpeg MJPEG decoder if necessary. */
+ if(avctx == 0)
+ {
+ long avcodec
+ = FFmpeg.avcodec_find_decoder(FFmpeg.CODEC_ID_MJPEG);
+
+ avctx = FFmpeg.avcodec_alloc_context3(avcodec);
+ FFmpeg.avcodeccontext_set_workaround_bugs(avctx,
+ FFmpeg.FF_BUG_AUTODETECT);
+
+ if (FFmpeg.avcodec_open2(avctx, avcodec) < 0)
+ {
+ throw new RuntimeException("" +
+ "Could not open codec CODEC_ID_MJPEG");
+ }
+
+ avframe = FFmpeg.avcodec_alloc_frame();
+ }
+
+ if(FFmpeg.avcodec_decode_video(
+ avctx, avframe, data.getPtr(), data.getLength()) != -1)
+ {
+ Object out = buffer.getData();
+
+ if (!(out instanceof AVFrame)
+ || (((AVFrame) out).getPtr() != avframe))
+ {
+ buffer.setData(new AVFrame(avframe));
+ }
+ }
+
+ data.free();
+ data = null;
+ }
+ else
+ {
+ if (AVFrame.read(buffer, bufferFormat, data) < 0)
+ data.free();
+ /*
+ * XXX For the sake of safety, make sure that this instance does
+ * not reference the data instance as soon as it is set on the
+ * AVFrame.
+ */
+ data = null;
+ }
+ }
+ else
+ {
+ Object o = buffer.getData();
+ byte[] bytes;
+ int length = data.getLength();
+
+ if(o instanceof byte[])
+ {
+ bytes = (byte[]) o;
+ if(bytes.length < length)
+ bytes = null;
+ }
+ else
+ bytes = null;
+ if(bytes == null)
+ {
+ bytes = new byte[length];
+ buffer.setData(bytes);
+ }
+
+ /*
+ * TODO Copy the media from the native memory into the Java
+ * heap.
+ */
+ data.free();
+ data = null;
+
+ buffer.setLength(length);
+ buffer.setOffset(0);
+ }
+
+ buffer.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME);
+ buffer.setTimeStamp(dataTimeStamp);
+
+ if(!automaticallyDropsLateVideoFrames)
+ dataSyncRoot.notifyAll();
+ }
+ }
+
+ /**
+ * Calls {@link BufferTransferHandler#transferData(PushBufferStream)} from
+ * inside {@link #transferDataThread} so that the call is not made in
+ * DirectShow and we can drop late frames in the meantime.
+ */
+ private void runInTransferDataThread()
+ {
+ boolean transferData = false;
+ FrameRateControl frameRateControl
+ = (FrameRateControl)
+ dataSource.getControl(FrameRateControl.class.getName());
+ long transferDataTimeStamp = -1;
+
+ while (Thread.currentThread().equals(transferDataThread))
+ {
+ if (transferData)
+ {
+ BufferTransferHandler transferHandler = this.transferHandler;
+
+ if (transferHandler != null)
+ {
+ /*
+ * Respect the frame rate specified through the
+ * FrameRateControl of the associated DataSource.
+ */
+ if (frameRateControl != null)
+ {
+ float frameRate;
+ long newTransferDataTimeStamp
+ = System.currentTimeMillis();
+
+ if ((transferDataTimeStamp != -1)
+ && ((frameRate
+ = frameRateControl.getFrameRate())
+ > 0))
+ {
+ long minimumVideoFrameInterval
+ = (long) (1000 / frameRate);
+
+ if (minimumVideoFrameInterval > 0)
+ {
+ long t
+ = newTransferDataTimeStamp
+ - transferDataTimeStamp;
+
+ if ((t > 0) && (t < minimumVideoFrameInterval))
+ {
+ boolean interrupted = false;
+
+ try
+ {
+ Thread.sleep(
+ minimumVideoFrameInterval - t);
+ }
+ catch (InterruptedException ie)
+ {
+ interrupted = true;
+ }
+ if (interrupted)
+ Thread.currentThread().interrupt();
+ continue;
+ }
+ }
+ }
+
+ transferDataTimeStamp = newTransferDataTimeStamp;
+ }
+
+ transferHandler.transferData(this);
+ }
+
+ synchronized (dataSyncRoot)
+ {
+ if (data != null)
+ data.free();
+ data = nextData;
+ dataTimeStamp = nextDataTimeStamp;
+ nextData = null;
+ }
+ }
+
+ synchronized (dataSyncRoot)
+ {
+ if (data == null)
+ {
+ data = nextData;
+ dataTimeStamp = nextDataTimeStamp;
+ nextData = null;
+ }
+ if (data == null)
+ {
+ boolean interrupted = false;
+
+ try
+ {
+ dataSyncRoot.wait();
+ }
+ catch (InterruptedException iex)
+ {
+ interrupted = true;
+ }
+ if(interrupted)
+ Thread.currentThread().interrupt();
+
+ transferData = (data != null);
+ }
+ else
+ transferData = true;
+ }
+ }
+ }
+
+ /**
+ * Process received frames from DirectShow capture device
+ *
+ * @param source pointer to the native DSCaptureDevice which is the
+ * source of the notification
+ * @param ptr native pointer to data
+ * @param length length of data
+ */
+ private void SampleCB(long source, long ptr, int length)
+ {
+ boolean transferData = false;
+
+ synchronized (dataSyncRoot)
+ {
+ if(!automaticallyDropsLateVideoFrames && (data != null))
+ {
+ if (nextData != null)
+ {
+ nextData.free();
+ nextData = null;
+ }
+ nextData = byteBufferPool.getBuffer(length);
+ if(nextData != null)
+ {
+ nextData.setLength(
+ DSCaptureDevice.samplecopy(
+ source,
+ ptr, nextData.getPtr(), length));
+ nextDataTimeStamp = System.nanoTime();
+ }
+
+ return;
+ }
+
+ if (data != null)
+ {
+ data.free();
+ data = null;
+ }
+ data = byteBufferPool.getBuffer(length);
+ if(data != null)
+ {
+ data.setLength(
+ DSCaptureDevice.samplecopy(
+ source,
+ ptr, data.getPtr(), length));
+ dataTimeStamp = System.nanoTime();
+ }
+
+ if (nextData != null)
+ {
+ nextData.free();
+ nextData = null;
+ }
+
+ if(automaticallyDropsLateVideoFrames)
+ transferData = (data != null);
+ else
+ {
+ transferData = false;
+ dataSyncRoot.notifyAll();
+ }
+ }
+
+ if(transferData)
+ {
+ BufferTransferHandler transferHandler = this.transferHandler;
+
+ if(transferHandler != null)
+ transferHandler.transferData(this);
+ }
+ }
+
+ /**
+ * Sets the DSCaptureDevice of this instance which identifies the
+ * DirectShow video capture device this SourceStream is to capture
+ * data from.
+ *
+ * @param device a DSCaptureDevice which identifies the DirectShow
+ * video capture device this SourceStream is to capture data from
+ * @throws IOException if anything goes wrong while setting the specified
+ * device on this instance
+ */
+ void setDevice(DSCaptureDevice device)
+ throws IOException
+ {
+ if (this.device != device)
+ {
+ if (this.device != null)
+ disconnect();
+
+ this.device = device;
+
+ if (this.device != null)
+ connect();
+ }
+ }
+
+ /**
+ * Sets a specific Format on the DSCaptureDevice of this
+ * instance.
+ *
+ * @param format the Format to set on the DSCaptureDevice
+ * of this instance
+ * @throws IOException if setting the specified format on the
+ * DSCaptureDevice of this instance fails
+ */
+ private void setDeviceFormat(Format format)
+ throws IOException
+ {
+ if (format == null)
+ throw new IOException("format == null");
+ else if (format instanceof AVFrameFormat)
+ {
+ AVFrameFormat avFrameFormat = (AVFrameFormat) format;
+ nativePixelFormat = avFrameFormat.getDeviceSystemPixFmt();
+ Dimension size = avFrameFormat.getSize();
+
+ if (size == null)
+ throw new IOException("format.size == null");
+ else
+ {
+ int hresult
+ = device.setFormat(
+ new DSFormat(
+ size.width, size.height,
+ avFrameFormat.getDeviceSystemPixFmt()));
+
+ switch (hresult)
+ {
+ case DSCaptureDevice.S_FALSE:
+ case DSCaptureDevice.S_OK:
+ this.format = format;
+ if (logger.isDebugEnabled())
+ {
+ logger.debug(
+ "Set format on DirectShowStream: " + format);
+ }
+ break;
+ default:
+ throwNewHResultException(hresult);
+ }
+ }
+ }
+ else
+ throw new IOException("!(format instanceof AVFrameFormat)");
+ }
+
+ /**
+ * Starts the transfer of media data from this PushBufferStream.
+ *
+ * @throws IOException if anything goes wrong while starting the transfer of
+ * media data from this PushBufferStream
+ */
+ @Override
+ public void start()
+ throws IOException
+ {
+ super.start();
+
+ boolean started = false;
+
+ try
+ {
+ setDeviceFormat(getFormat());
+
+ if(!automaticallyDropsLateVideoFrames)
+ {
+ if (transferDataThread == null)
+ {
+ transferDataThread
+ = new Thread(getClass().getSimpleName())
+ {
+ @Override
+ public void run()
+ {
+ runInTransferDataThread();
+ }
+ };
+ transferDataThread.start();
+ }
+ }
+
+ device.start();
+
+ started = true;
+ }
+ finally
+ {
+ if (!started)
+ stop();
+ }
+ }
+
+ /**
+ * Stops the transfer of media data from this PushBufferStream.
+ *
+ * @throws IOException if anything goes wrong while stopping the transfer of
+ * media data from this PushBufferStream
+ */
+ @Override
+ public void stop()
+ throws IOException
+ {
+ try
+ {
+ device.stop();
+
+ transferDataThread = null;
+
+ synchronized (dataSyncRoot)
+ {
+ if (data != null)
+ {
+ data.free();
+ data = null;
+ }
+ if (nextData != null)
+ {
+ nextData.free();
+ nextData = null;
+ }
+
+ if(!automaticallyDropsLateVideoFrames)
+ dataSyncRoot.notifyAll();
+ }
+ }
+ finally
+ {
+ super.stop();
+
+ if(avctx != 0)
+ {
+ FFmpeg.avcodec_close(avctx);
+ FFmpeg.av_free(avctx);
+ avctx = 0;
+ }
+
+ if(avframe != 0)
+ {
+ FFmpeg.avcodec_free_frame(avframe);
+ avframe = 0;
+ }
+
+ byteBufferPool.drain();
+ }
+ }
+
+ /**
+ * Throws a new IOException the detail message of which describes
+ * a specific HRESULT value indicating a failure.
+ *
+ * @param hresult the HRESUlT to be described by the detail message
+ * of the new IOException to be thrown
+ * @throws IOException
+ */
+ private void throwNewHResultException(int hresult)
+ throws IOException
+ {
+ throw new IOException(
+ "HRESULT 0x" + Long.toHexString(hresult & 0xffffffffL));
+ }
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
index bb885534c..4c8673da2 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,212 +13,212 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
-
-import java.io.*;
-import java.util.*;
-
-import javax.media.*;
-import javax.media.control.*;
-import javax.media.format.*;
-
-import org.jitsi.impl.neomedia.device.*;
-import org.jitsi.impl.neomedia.jmfext.media.protocol.*;
-import org.jitsi.util.*;
-
-/**
- * Implements CaptureDevice and DataSource using Windows Audio
- * Session API (WASAPI) and related Core Audio APIs such as Multimedia Device
- * (MMDevice) API.
- *
- * @author Lyubomir Marinov
- */
-public class DataSource
- extends AbstractPushBufferCaptureDevice
-{
- /**
- * The Logger used by the DataSource class and its
- * instances to log debugging information.
- */
- private static final Logger logger = Logger.getLogger(DataSource.class);
-
- /**
- * The indicator which determines whether the voice capture DMO is to be
- * used to perform echo cancellation and/or noise reduction.
- */
- final boolean aec;
-
- /**
- * The WASAPISystem which has contributed this
- * CaptureDevice/DataSource.
- */
- final WASAPISystem audioSystem;
-
- /**
- * Initializes a new DataSource instance.
- */
- public DataSource()
- {
- this(null);
- }
-
- /**
- * Initializes a new DataSource instance with a specific
- * MediaLocator.
- *
- * @param locator the MediaLocator to initialize the new instance
- * with
- */
- public DataSource(MediaLocator locator)
- {
- super(locator);
-
- audioSystem
- = (WASAPISystem)
- AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_WASAPI);
- aec = audioSystem.isDenoise() || audioSystem.isEchoCancel();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- protected WASAPIStream createStream(
- int streamIndex,
- FormatControl formatControl)
- {
- return new WASAPIStream(this, formatControl);
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- protected void doConnect()
- throws IOException
- {
- super.doConnect();
-
- MediaLocator locator = getLocator();
-
- synchronized (getStreamSyncRoot())
- {
- for (Object stream : getStreams())
- ((WASAPIStream) stream).setLocator(locator);
- }
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- protected void doDisconnect()
- {
- try
- {
- synchronized (getStreamSyncRoot())
- {
- for (Object stream : getStreams())
- {
- try
- {
- ((WASAPIStream) stream).setLocator(null);
- }
- catch (IOException ioe)
- {
- logger.error(
- "Failed to disconnect "
- + stream.getClass().getName(),
- ioe);
- }
- }
- }
- }
- finally
- {
- super.doDisconnect();
- }
- }
-
- /**
- * Gets the Formats of media data supported by the audio endpoint
- * device associated with this instance.
- *
- * @return the Formats of media data supported by the audio
- * endpoint device associated with this instance
- */
- Format[] getIAudioClientSupportedFormats()
- {
- return getIAudioClientSupportedFormats(/* streamIndex */ 0);
- }
-
- /**
- * Gets the Formats of media data supported by the audio endpoint
- * device associated with this instance.
- *
- * @param streamIndex the index of the SourceStream within the list
- * of SourceStreams of this DataSource on behalf of which
- * the query is being made
- * @return the Formats of media data supported by the audio
- * endpoint device associated with this instance
- */
- private Format[] getIAudioClientSupportedFormats(int streamIndex)
- {
- Format[] superSupportedFormats = super.getSupportedFormats(streamIndex);
-
- /*
- * If the capture endpoint device reports to support no Format, then
- * acoustic echo cancellation (AEC) will surely not work.
- */
- if ((superSupportedFormats == null)
- || (superSupportedFormats.length == 0))
- return superSupportedFormats;
-
- // Return the NativelySupportedAudioFormat instances only.
- List supportedFormats
- = new ArrayList(superSupportedFormats.length);
-
- for (Format format : superSupportedFormats)
- {
- if ((format instanceof NativelySupportedAudioFormat)
- && !supportedFormats.contains(format))
- {
- supportedFormats.add(format);
- }
- }
-
- int supportedFormatCount = supportedFormats.size();
-
- return
- (supportedFormatCount == superSupportedFormats.length)
- ? superSupportedFormats
- : supportedFormats.toArray(new Format[supportedFormatCount]);
- }
-
- /**
- * {@inheritDoc}
- *
- * The Formats supported by this
- * CaptureDevice/DataSource are either the ones supported
- * by the capture endpoint device or the ones supported by the voice capture
- * DMO that implements the acoustic echo cancellation (AEC) feature
- * depending on whether the feature in question is disabled or enabled.
- */
- @Override
- protected Format[] getSupportedFormats(int streamIndex)
- {
- if (aec)
- {
- List aecSupportedFormats
- = audioSystem.getAECSupportedFormats();
-
- return
- aecSupportedFormats.toArray(
- new Format[aecSupportedFormats.size()]);
- }
- else
- {
- return getIAudioClientSupportedFormats(streamIndex);
- }
- }
-}
+package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
+
+import java.io.*;
+import java.util.*;
+
+import javax.media.*;
+import javax.media.control.*;
+import javax.media.format.*;
+
+import org.jitsi.impl.neomedia.device.*;
+import org.jitsi.impl.neomedia.jmfext.media.protocol.*;
+import org.jitsi.util.*;
+
+/**
+ * Implements CaptureDevice and DataSource using Windows Audio
+ * Session API (WASAPI) and related Core Audio APIs such as Multimedia Device
+ * (MMDevice) API.
+ *
+ * @author Lyubomir Marinov
+ */
+public class DataSource
+ extends AbstractPushBufferCaptureDevice
+{
+ /**
+ * The Logger used by the DataSource class and its
+ * instances to log debugging information.
+ */
+ private static final Logger logger = Logger.getLogger(DataSource.class);
+
+ /**
+ * The indicator which determines whether the voice capture DMO is to be
+ * used to perform echo cancellation and/or noise reduction.
+ */
+ final boolean aec;
+
+ /**
+ * The WASAPISystem which has contributed this
+ * CaptureDevice/DataSource.
+ */
+ final WASAPISystem audioSystem;
+
+ /**
+ * Initializes a new DataSource instance.
+ */
+ public DataSource()
+ {
+ this(null);
+ }
+
+ /**
+ * Initializes a new DataSource instance with a specific
+ * MediaLocator.
+ *
+ * @param locator the MediaLocator to initialize the new instance
+ * with
+ */
+ public DataSource(MediaLocator locator)
+ {
+ super(locator);
+
+ audioSystem
+ = (WASAPISystem)
+ AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_WASAPI);
+ aec = audioSystem.isDenoise() || audioSystem.isEchoCancel();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected WASAPIStream createStream(
+ int streamIndex,
+ FormatControl formatControl)
+ {
+ return new WASAPIStream(this, formatControl);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void doConnect()
+ throws IOException
+ {
+ super.doConnect();
+
+ MediaLocator locator = getLocator();
+
+ synchronized (getStreamSyncRoot())
+ {
+ for (Object stream : getStreams())
+ ((WASAPIStream) stream).setLocator(locator);
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void doDisconnect()
+ {
+ try
+ {
+ synchronized (getStreamSyncRoot())
+ {
+ for (Object stream : getStreams())
+ {
+ try
+ {
+ ((WASAPIStream) stream).setLocator(null);
+ }
+ catch (IOException ioe)
+ {
+ logger.error(
+ "Failed to disconnect "
+ + stream.getClass().getName(),
+ ioe);
+ }
+ }
+ }
+ }
+ finally
+ {
+ super.doDisconnect();
+ }
+ }
+
+ /**
+ * Gets the Formats of media data supported by the audio endpoint
+ * device associated with this instance.
+ *
+ * @return the Formats of media data supported by the audio
+ * endpoint device associated with this instance
+ */
+ Format[] getIAudioClientSupportedFormats()
+ {
+ return getIAudioClientSupportedFormats(/* streamIndex */ 0);
+ }
+
+ /**
+ * Gets the Formats of media data supported by the audio endpoint
+ * device associated with this instance.
+ *
+ * @param streamIndex the index of the SourceStream within the list
+ * of SourceStreams of this DataSource on behalf of which
+ * the query is being made
+ * @return the Formats of media data supported by the audio
+ * endpoint device associated with this instance
+ */
+ private Format[] getIAudioClientSupportedFormats(int streamIndex)
+ {
+ Format[] superSupportedFormats = super.getSupportedFormats(streamIndex);
+
+ /*
+ * If the capture endpoint device reports to support no Format, then
+ * acoustic echo cancellation (AEC) will surely not work.
+ */
+ if ((superSupportedFormats == null)
+ || (superSupportedFormats.length == 0))
+ return superSupportedFormats;
+
+ // Return the NativelySupportedAudioFormat instances only.
+ List supportedFormats
+ = new ArrayList(superSupportedFormats.length);
+
+ for (Format format : superSupportedFormats)
+ {
+ if ((format instanceof NativelySupportedAudioFormat)
+ && !supportedFormats.contains(format))
+ {
+ supportedFormats.add(format);
+ }
+ }
+
+ int supportedFormatCount = supportedFormats.size();
+
+ return
+ (supportedFormatCount == superSupportedFormats.length)
+ ? superSupportedFormats
+ : supportedFormats.toArray(new Format[supportedFormatCount]);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * The Formats supported by this
+ * CaptureDevice/DataSource are either the ones supported
+ * by the capture endpoint device or the ones supported by the voice capture
+ * DMO that implements the acoustic echo cancellation (AEC) feature
+ * depending on whether the feature in question is disabled or enabled.
+ */
+ @Override
+ protected Format[] getSupportedFormats(int streamIndex)
+ {
+ if (aec)
+ {
+ List aecSupportedFormats
+ = audioSystem.getAECSupportedFormats();
+
+ return
+ aecSupportedFormats.toArray(
+ new Format[aecSupportedFormats.size()]);
+ }
+ else
+ {
+ return getIAudioClientSupportedFormats(streamIndex);
+ }
+ }
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java
index 83b46cbde..1d93686bb 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,70 +13,70 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
-
-/**
- * Implements an Exception which represents an HRESULT value.
- *
- * @author Lyubomir Marinov
- */
-public class HResultException
- extends Exception
-{
- /**
- * The HRESULT value represented by this instance.
- */
- private final int hresult;
-
- /**
- * Initializes a new HResultException which is to represent a
- * specific HRESULT value. The detail message of the new instance
- * is derived from the the specified HRESULT value.
- *
- * @param hresult the HRESULT value to be represented by the new
- * instance
- */
- public HResultException(int hresult)
- {
- this(hresult, toString(hresult));
- }
-
- /**
- * Initializes a new HResultException which is to represent a
- * specific HRESULT value and have a specific detail message.
- *
- * @param hresult the HRESULT value to be represented by the new
- * instance
- * @param message the detail message to initialize the new instance with
- */
- public HResultException(int hresult, String message)
- {
- super(message);
-
- this.hresult = hresult;
- }
-
- /**
- * Gets the HRESULT value represented by this instance.
- *
- * @return the HRESULT value represented by this instance
- */
- public int getHResult()
- {
- return hresult;
- }
-
- /**
- * Returns a String representation of a specific
- * HRESULT value.
- *
- * @param hresult the HRESULT value of which a String
- * representation is to be returned
- * @return a String representation of the specified
- * hresult
- */
- public static String toString(int hresult)
- {
- return "0x" + Long.toHexString(hresult & 0xffffffffL);
- }
-}
+package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
+
+/**
+ * Implements an Exception which represents an HRESULT value.
+ *
+ * @author Lyubomir Marinov
+ */
+public class HResultException
+ extends Exception
+{
+ /**
+ * The HRESULT value represented by this instance.
+ */
+ private final int hresult;
+
+ /**
+ * Initializes a new HResultException which is to represent a
+ * specific HRESULT value. The detail message of the new instance
+ * is derived from the the specified HRESULT value.
+ *
+ * @param hresult the HRESULT value to be represented by the new
+ * instance
+ */
+ public HResultException(int hresult)
+ {
+ this(hresult, toString(hresult));
+ }
+
+ /**
+ * Initializes a new HResultException which is to represent a
+ * specific HRESULT value and have a specific detail message.
+ *
+ * @param hresult the HRESULT value to be represented by the new
+ * instance
+ * @param message the detail message to initialize the new instance with
+ */
+ public HResultException(int hresult, String message)
+ {
+ super(message);
+
+ this.hresult = hresult;
+ }
+
+ /**
+ * Gets the HRESULT value represented by this instance.
+ *
+ * @return the HRESULT value represented by this instance
+ */
+ public int getHResult()
+ {
+ return hresult;
+ }
+
+ /**
+ * Returns a String representation of a specific
+ * HRESULT value.
+ *
+ * @param hresult the HRESULT value of which a String
+ * representation is to be returned
+ * @return a String representation of the specified
+ * hresult
+ */
+ public static String toString(int hresult)
+ {
+ return "0x" + Long.toHexString(hresult & 0xffffffffL);
+ }
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java
index 91393dd11..051b676e4 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/IMMNotificationClient.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,24 +13,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
-
-/**
- * Provides notifications when an audio endpoint device is added or removed,
- * when the state or properties of an endpoint device change, or when there is a
- * change in the default role assigned to an endpoint device.
- *
- * @author Lyubomir Marinov
- */
-public interface IMMNotificationClient
-{
- void OnDefaultDeviceChanged(int flow, int role, String pwstrDefaultDevice);
-
- void OnDeviceAdded(String pwstrDeviceId);
-
- void OnDeviceRemoved(String pwstrDeviceId);
-
- void OnDeviceStateChanged(String pwstrDeviceId, int dwNewState);
-
- void OnPropertyValueChanged(String pwstrDeviceId, long key);
-}
+package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
+
+/**
+ * Provides notifications when an audio endpoint device is added or removed,
+ * when the state or properties of an endpoint device change, or when there is a
+ * change in the default role assigned to an endpoint device.
+ *
+ * @author Lyubomir Marinov
+ */
+public interface IMMNotificationClient
+{
+ void OnDefaultDeviceChanged(int flow, int role, String pwstrDefaultDevice);
+
+ void OnDeviceAdded(String pwstrDeviceId);
+
+ void OnDeviceRemoved(String pwstrDeviceId);
+
+ void OnDeviceStateChanged(String pwstrDeviceId, int dwNewState);
+
+ void OnPropertyValueChanged(String pwstrDeviceId, long key);
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java
index 63875a4ca..ffa95323c 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/MMNotificationClient.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,247 +13,247 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
-
-import java.util.*;
-
-import org.jitsi.util.*;
-
-/**
- * Implements the Java counterpart of an IMMNotificationClient instance
- * statically allocated by the native counterpart of {@link WASAPI} and
- * automatically registered with all IMMDeviceEnumerator instances.
- * Invocations of methods on the IMMNotificationClient instance by
- * Windows Audio Session API (WASAPI) are forwarded by WASAPI to the
- * respective static methods of the MMNotificationClient class.
- *
- * @author Lyubomir Marinov
- */
-public class MMNotificationClient
-{
- /**
- * The Logger used by the MMNotificationClient class to
- * log debug information.
- */
- private static final Logger logger
- = Logger.getLogger(MMNotificationClient.class);
-
- /**
- * The set of IMMNotificationClients to be notified when an audio
- * endpoint device is added or removed, when the state or properties of an
- * endpoint device change, or when there is a change in the default role
- * assigned to an endpoint device.
- */
- private static Collection pNotifySet;
-
- public static void OnDefaultDeviceChanged(
- int flow,
- int role,
- String pwstrDefaultDevice)
- {
- // TODO Auto-generated method stub
- }
-
- public static void OnDeviceAdded(String pwstrDeviceId)
- {
- Iterable pNotifySet;
-
- synchronized (MMNotificationClient.class)
- {
- pNotifySet = MMNotificationClient.pNotifySet;
- }
-
- if (pNotifySet != null)
- {
- for (IMMNotificationClient pNotify : pNotifySet)
- {
- try
- {
- pNotify.OnDeviceAdded(pwstrDeviceId);
- }
- catch (Throwable t)
- {
- /*
- * XXX The native counterpart of MMNotificationClient which
- * normally invokes the method will eventually call
- * ExceptionClear anyway.
- */
- if (t instanceof ThreadDeath)
- throw (ThreadDeath) t;
- else
- {
- logger.error(
- "An IMMNotificationClient failed to normally"
- + " complete the handling of an"
- + " OnDeviceAdded notification.",
- t);
- }
- }
- }
- }
- }
-
- public static void OnDeviceRemoved(String pwstrDeviceId)
- {
- Iterable pNotifySet;
-
- synchronized (MMNotificationClient.class)
- {
- pNotifySet = MMNotificationClient.pNotifySet;
- }
-
- if (pNotifySet != null)
- {
- for (IMMNotificationClient pNotify : pNotifySet)
- {
- try
- {
- pNotify.OnDeviceRemoved(pwstrDeviceId);
- }
- catch (Throwable t)
- {
- /*
- * XXX The native counterpart of MMNotificationClient which
- * normally invokes the method will eventually call
- * ExceptionClear anyway.
- */
- if (t instanceof ThreadDeath)
- throw (ThreadDeath) t;
- else
- {
- logger.error(
- "An IMMNotificationClient failed to normally"
- + " complete the handling of an"
- + " OnDeviceRemoved notification.",
- t);
- }
- }
- }
- }
- }
-
- public static void OnDeviceStateChanged(
- String pwstrDeviceId,
- int dwNewState)
- {
- Iterable pNotifySet;
-
- synchronized (MMNotificationClient.class)
- {
- pNotifySet = MMNotificationClient.pNotifySet;
- }
-
- if (pNotifySet != null)
- {
- for (IMMNotificationClient pNotify : pNotifySet)
- {
- try
- {
- pNotify.OnDeviceStateChanged(pwstrDeviceId, dwNewState);
- }
- catch (Throwable t)
- {
- /*
- * XXX The native counterpart of MMNotificationClient which
- * normally invokes the method will eventually call
- * ExceptionClear anyway.
- */
- if (t instanceof ThreadDeath)
- throw (ThreadDeath) t;
- else
- {
- logger.error(
- "An IMMNotificationClient failed to normally"
- + " complete the handling of an"
- + " OnDeviceStateChanged notification.",
- t);
- }
- }
- }
- }
- }
-
- public static void OnPropertyValueChanged(String pwstrDeviceId, long key)
- {
- // TODO Auto-generated method stub
- }
-
- /**
- * Registers a specific IMMNotificationClient to be notified when
- * an audio endpoint device is added or removed, when the state or
- * properties of an endpoint device change, or when there is a change in the
- * default role assigned to an endpoint device.
- *
- * @param pNotify the IMMNotificationClient to register
- */
- public static void RegisterEndpointNotificationCallback(
- IMMNotificationClient pNotify)
- {
- if (pNotify == null)
- throw new NullPointerException("pNotify");
-
- synchronized (MMNotificationClient.class)
- {
- Collection newPNotifySet;
-
- if (pNotifySet == null)
- newPNotifySet = new ArrayList();
- else if (pNotifySet.contains(pNotify))
- return;
- else
- {
- newPNotifySet
- = new ArrayList(
- pNotifySet.size() + 1);
- newPNotifySet.addAll(pNotifySet);
- }
- if (newPNotifySet.add(pNotify))
- pNotifySet = newPNotifySet;
- }
- }
-
- /**
- * Deletes the registration of a specific IMMNotificationClient
- * that the client registered in a previous call to
- * {@link #RegisterEndpointNotificationCallback(IMMNotificationClient)}.
- *
- * @param pNotify the IMMNotificationClient to delete the
- * registration of
- */
- public static void UnregisterEndpointNotificationCallback(
- IMMNotificationClient pNotify)
- {
- if (pNotify == null)
- throw new NullPointerException("pNotify");
-
- synchronized (MMNotificationClient.class)
- {
- /*
- * XXX The implementation bellow is hardly optimal because it
- * consecutively employs the contains and remove Collection methods
- * each of which performs a linear search for one and the same
- * element in effectively the same set of elements. Anyway, the
- * unregistering of IMMNotificationClients will very occur much less
- * often than notification deliveries.
- */
- if ((pNotifySet != null) && pNotifySet.contains(pNotify))
- {
- if (pNotifySet.size() == 1)
- pNotifySet = null;
- else
- {
- Collection newPNotifySet
- = new ArrayList(pNotifySet);
-
- if (newPNotifySet.remove(pNotify))
- pNotifySet = newPNotifySet;
- }
- }
- }
- }
-
- /**
- * Prevents the initialization of MMNotificationClient instances.
- */
- private MMNotificationClient() {}
-}
+package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
+
+import java.util.*;
+
+import org.jitsi.util.*;
+
+/**
+ * Implements the Java counterpart of an IMMNotificationClient instance
+ * statically allocated by the native counterpart of {@link WASAPI} and
+ * automatically registered with all IMMDeviceEnumerator instances.
+ * Invocations of methods on the IMMNotificationClient instance by
+ * Windows Audio Session API (WASAPI) are forwarded by WASAPI to the
+ * respective static methods of the MMNotificationClient class.
+ *
+ * @author Lyubomir Marinov
+ */
+public class MMNotificationClient
+{
+ /**
+ * The Logger used by the MMNotificationClient class to
+ * log debug information.
+ */
+ private static final Logger logger
+ = Logger.getLogger(MMNotificationClient.class);
+
+ /**
+ * The set of IMMNotificationClients to be notified when an audio
+ * endpoint device is added or removed, when the state or properties of an
+ * endpoint device change, or when there is a change in the default role
+ * assigned to an endpoint device.
+ */
+ private static Collection pNotifySet;
+
+ public static void OnDefaultDeviceChanged(
+ int flow,
+ int role,
+ String pwstrDefaultDevice)
+ {
+ // TODO Auto-generated method stub
+ }
+
+ public static void OnDeviceAdded(String pwstrDeviceId)
+ {
+ Iterable pNotifySet;
+
+ synchronized (MMNotificationClient.class)
+ {
+ pNotifySet = MMNotificationClient.pNotifySet;
+ }
+
+ if (pNotifySet != null)
+ {
+ for (IMMNotificationClient pNotify : pNotifySet)
+ {
+ try
+ {
+ pNotify.OnDeviceAdded(pwstrDeviceId);
+ }
+ catch (Throwable t)
+ {
+ /*
+ * XXX The native counterpart of MMNotificationClient which
+ * normally invokes the method will eventually call
+ * ExceptionClear anyway.
+ */
+ if (t instanceof ThreadDeath)
+ throw (ThreadDeath) t;
+ else
+ {
+ logger.error(
+ "An IMMNotificationClient failed to normally"
+ + " complete the handling of an"
+ + " OnDeviceAdded notification.",
+ t);
+ }
+ }
+ }
+ }
+ }
+
+ public static void OnDeviceRemoved(String pwstrDeviceId)
+ {
+ Iterable pNotifySet;
+
+ synchronized (MMNotificationClient.class)
+ {
+ pNotifySet = MMNotificationClient.pNotifySet;
+ }
+
+ if (pNotifySet != null)
+ {
+ for (IMMNotificationClient pNotify : pNotifySet)
+ {
+ try
+ {
+ pNotify.OnDeviceRemoved(pwstrDeviceId);
+ }
+ catch (Throwable t)
+ {
+ /*
+ * XXX The native counterpart of MMNotificationClient which
+ * normally invokes the method will eventually call
+ * ExceptionClear anyway.
+ */
+ if (t instanceof ThreadDeath)
+ throw (ThreadDeath) t;
+ else
+ {
+ logger.error(
+ "An IMMNotificationClient failed to normally"
+ + " complete the handling of an"
+ + " OnDeviceRemoved notification.",
+ t);
+ }
+ }
+ }
+ }
+ }
+
+ public static void OnDeviceStateChanged(
+ String pwstrDeviceId,
+ int dwNewState)
+ {
+ Iterable pNotifySet;
+
+ synchronized (MMNotificationClient.class)
+ {
+ pNotifySet = MMNotificationClient.pNotifySet;
+ }
+
+ if (pNotifySet != null)
+ {
+ for (IMMNotificationClient pNotify : pNotifySet)
+ {
+ try
+ {
+ pNotify.OnDeviceStateChanged(pwstrDeviceId, dwNewState);
+ }
+ catch (Throwable t)
+ {
+ /*
+ * XXX The native counterpart of MMNotificationClient which
+ * normally invokes the method will eventually call
+ * ExceptionClear anyway.
+ */
+ if (t instanceof ThreadDeath)
+ throw (ThreadDeath) t;
+ else
+ {
+ logger.error(
+ "An IMMNotificationClient failed to normally"
+ + " complete the handling of an"
+ + " OnDeviceStateChanged notification.",
+ t);
+ }
+ }
+ }
+ }
+ }
+
+ public static void OnPropertyValueChanged(String pwstrDeviceId, long key)
+ {
+ // TODO Auto-generated method stub
+ }
+
+ /**
+ * Registers a specific IMMNotificationClient to be notified when
+ * an audio endpoint device is added or removed, when the state or
+ * properties of an endpoint device change, or when there is a change in the
+ * default role assigned to an endpoint device.
+ *
+ * @param pNotify the IMMNotificationClient to register
+ */
+ public static void RegisterEndpointNotificationCallback(
+ IMMNotificationClient pNotify)
+ {
+ if (pNotify == null)
+ throw new NullPointerException("pNotify");
+
+ synchronized (MMNotificationClient.class)
+ {
+ Collection newPNotifySet;
+
+ if (pNotifySet == null)
+ newPNotifySet = new ArrayList();
+ else if (pNotifySet.contains(pNotify))
+ return;
+ else
+ {
+ newPNotifySet
+ = new ArrayList(
+ pNotifySet.size() + 1);
+ newPNotifySet.addAll(pNotifySet);
+ }
+ if (newPNotifySet.add(pNotify))
+ pNotifySet = newPNotifySet;
+ }
+ }
+
+ /**
+ * Deletes the registration of a specific IMMNotificationClient
+ * that the client registered in a previous call to
+ * {@link #RegisterEndpointNotificationCallback(IMMNotificationClient)}.
+ *
+ * @param pNotify the IMMNotificationClient to delete the
+ * registration of
+ */
+ public static void UnregisterEndpointNotificationCallback(
+ IMMNotificationClient pNotify)
+ {
+ if (pNotify == null)
+ throw new NullPointerException("pNotify");
+
+ synchronized (MMNotificationClient.class)
+ {
+ /*
+ * XXX The implementation bellow is hardly optimal because it
+ * consecutively employs the contains and remove Collection methods
+ * each of which performs a linear search for one and the same
+ * element in effectively the same set of elements. Anyway, the
+ * unregistering of IMMNotificationClients will very occur much less
+ * often than notification deliveries.
+ */
+ if ((pNotifySet != null) && pNotifySet.contains(pNotify))
+ {
+ if (pNotifySet.size() == 1)
+ pNotifySet = null;
+ else
+ {
+ Collection newPNotifySet
+ = new ArrayList(pNotifySet);
+
+ if (newPNotifySet.remove(pNotify))
+ pNotifySet = newPNotifySet;
+ }
+ }
+ }
+ }
+
+ /**
+ * Prevents the initialization of MMNotificationClient instances.
+ */
+ private MMNotificationClient() {}
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java
index be2851a66..3cbc6190e 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,430 +13,430 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
-
-import org.jitsi.util.*;
-
-/**
- * Defines the native interface to Windows Audio Session API (WASAPI) and
- * related Core Audio APIs such as Multimedia Device (MMDevice) API as used by
- * WASAPISystem and its associated CaptureDevice,
- * DataSource and Renderer implementations.
- *
- * @author Lyubomir Marinov
- */
-public class WASAPI
-{
- public static final int AUDCLNT_E_NOT_STOPPED;
-
- public static final int AUDCLNT_SHAREMODE_SHARED = 0;
-
- public static final int AUDCLNT_STREAMFLAGS_EVENTCALLBACK = 0x00040000;
-
- public static final int AUDCLNT_STREAMFLAGS_LOOPBACK = 0x00020000;
-
- public static final int AUDCLNT_STREAMFLAGS_NOPERSIST = 0x00080000;
-
- public static final int CLSCTX_ALL
- = /* CLSCTX_INPROC_SERVER */ 0x1
- | /* CLSCTX_INPROC_HANDLER */ 0x2
- | /* CLSCTX_LOCAL_SERVER */ 0x4
- | /* CLSCTX_REMOTE_SERVER */ 0x10;
-
- public static final String CLSID_MMDeviceEnumerator
- = "{bcde0395-e52f-467c-8e3d-c4579291692e}";
-
- public static final int COINIT_MULTITHREADED = 0x0;
-
- public static final int DEVICE_STATE_ACTIVE = 0x1;
-
- public static final int eAll = 2;
-
- public static final int eCapture = 1;
-
- public static final int eRender = 0;
-
- private static final int FACILIY_AUDCLNT = 0x889;
-
- public static final String IID_IAudioCaptureClient
- = "{c8adbd64-e71e-48a0-a4de-185c395cd317}";
-
- public static final String IID_IAudioClient
- = "{1cb9ad4c-dbfa-4c32-b178-c2f568a703b2}";
-
- public static final String IID_IAudioRenderClient
- = "{f294acfc-3146-4483-a7bf-addca7c260e2}";
-
- public static final String IID_IMMDeviceEnumerator
- = "{a95664d2-9614-4f35-a746-de8db63617e6}";
-
- public static final String IID_IMMEndpoint
- = "{1be09788-6894-4089-8586-9a2a6c265ac5}";
-
- public static final long PKEY_Device_FriendlyName;
-
- public static final int RPC_E_CHANGED_MODE = 0x80010106;
-
- public static final int S_FALSE = 1;
-
- public static final int S_OK = 0;
-
- private static final int SEVERITY_ERROR = 1;
-
- private static final int SEVERITY_SUCCESS = 0;
-
- public static final int STGM_READ = 0x0;
-
- /**
- * The return value of {@link #WaitForSingleObject(long, long)} which
- * indicates that the specified object is a mutex that was not released by
- * the thread that owned the mutex before the owning thread terminated.
- * Ownership of the mutex is granted to the calling thread and the mutex
- * state is set to non-signaled.
- */
- public static final int WAIT_ABANDONED = 0x00000080;
-
- /**
- * The return value of {@link #WaitForSingleObject(long, long)} which
- * indicates that the function has failed. Normally, the function will throw
- * an {@link HResultException} in the case and
- * {@link HResultException#getHResult()} will return WAIT_FAILED.
- */
- public static final int WAIT_FAILED = 0xffffffff;
-
- /**
- * The return value of {@link #WaitForSingleObject(long, long)} which
- * indicates that the specified object is signaled.
- */
- public static final int WAIT_OBJECT_0 = 0x00000000;
-
- /**
- * The return value of {@link #WaitForSingleObject(long, long)} which
- * indicates that the specified time-out interval has elapsed and the state
- * of the specified object is non-signaled.
- */
- public static final int WAIT_TIMEOUT = 0x00000102;
-
- public static final char WAVE_FORMAT_PCM = 1;
-
- static
- {
- JNIUtils.loadLibrary("jnwasapi", WASAPI.class.getClassLoader());
-
- AUDCLNT_E_NOT_STOPPED
- = MAKE_HRESULT(SEVERITY_ERROR, FACILIY_AUDCLNT, 5);
-
- /*
- * XXX The pointer to native memory returned by PSPropertyKeyFromString
- * is to be freed via CoTaskMemFree.
- */
- String pszString = null;
-
- try
- {
- pszString = "{a45c254e-df1c-4efd-8020-67d146a850e0} 14";
- PKEY_Device_FriendlyName = PSPropertyKeyFromString(pszString);
- if (PKEY_Device_FriendlyName == 0)
- throw new IllegalStateException("PKEY_Device_FriendlyName");
- }
- catch (HResultException hre)
- {
- Logger logger = Logger.getLogger(WASAPI.class);
-
- logger.error("PSPropertyKeyFromString(" + pszString + ")", hre);
- throw new RuntimeException(hre);
- }
- }
-
- public static native void CloseHandle(long hObject)
- throws HResultException;
-
- public static native String CoCreateGuid()
- throws HResultException;
-
- public static native long CoCreateInstance(
- String clsid,
- long pUnkOuter,
- int dwClsContext,
- String iid)
- throws HResultException;
-
- public static native int CoInitializeEx(long pvReserved, int dwCoInit)
- throws HResultException;
-
- public static native void CoTaskMemFree(long pv);
-
- public static native void CoUninitialize();
-
- public static native long CreateEvent(
- long lpEventAttributes,
- boolean bManualReset,
- boolean bInitialState,
- String lpName)
- throws HResultException;
-
- /**
- * Determines whether a specific HRESULT value indicates failure.
- *
- * @param hresult the HRESULT value to be checked whether it
- * indicates failure
- * @return true if the specified hresult indicates
- * failure; otherwise, false
- */
- public static boolean FAILED(int hresult)
- {
- return (hresult < 0);
- }
-
- public static native int IAudioCaptureClient_GetNextPacketSize(long thiz)
- throws HResultException;
-
- public static native int IAudioCaptureClient_Read(
- long thiz,
- byte[] data, int offset, int length,
- int srcSampleSize, int srcChannels,
- int dstSampleSize, int dstChannels)
- throws HResultException;
-
- public static native void IAudioCaptureClient_Release(long thiz);
-
- public static native int IAudioClient_GetBufferSize(long thiz)
- throws HResultException;
-
- public static native int IAudioClient_GetCurrentPadding(long thiz)
- throws HResultException;
-
- public static native long IAudioClient_GetDefaultDevicePeriod(long thiz)
- throws HResultException;
-
- public static native long IAudioClient_GetMinimumDevicePeriod(long thiz)
- throws HResultException;
-
- public static native long IAudioClient_GetService(long thiz, String iid)
- throws HResultException;
-
- public static native int IAudioClient_Initialize(
- long thiz,
- int shareMode,
- int streamFlags,
- long hnsBufferDuration,
- long hnsPeriodicity,
- long pFormat,
- String audioSessionGuid)
- throws HResultException;
-
- public static native long IAudioClient_IsFormatSupported(
- long thiz,
- int shareMode,
- long pFormat)
- throws HResultException;
-
- public static native void IAudioClient_Release(long thiz);
-
- public static native void IAudioClient_SetEventHandle(
- long thiz,
- long eventHandle)
- throws HResultException;
-
- public static native int IAudioClient_Start(long thiz)
- throws HResultException;
-
- public static native int IAudioClient_Stop(long thiz)
- throws HResultException;
-
- public static native void IAudioRenderClient_Release(long thiz);
-
- /**
- * Writes specific audio data into the rendering endpoint buffer of a
- * specific IAudioRenderClient. If the sample sizes and/or the
- * numbers of channels of the specified audio data and the
- * specified rendering endpoint buffer differ, the method may be able to
- * perform the necessary conversions.
- *
- * @param thiz the IAudioRenderClient which abstracts the rendering
- * endpoint buffer into which the specified audio data is to be
- * written
- * @param data the bytes of the audio samples to be written into the
- * specified rendering endpoint buffer
- * @param offset the offset in bytes within data at which valid
- * audio samples begin
- * @param length the number of bytes of valid audio samples in data
- * @param srcSampleSize the size in bytes of an audio sample in
- * data
- * @param srcChannels the number of channels of the audio signal provided
- * in data
- * @param dstSampleSize the size in bytes of an audio sample in the
- * rendering endpoint buffer
- * @param dstChannels the number of channels with which the rendering
- * endpoint buffer has been initialized
- * @return the number of bytes which have been read from data
- * (beginning at offset, of course) and successfully written into
- * the rendering endpoint buffer
- * @throws HResultException if an HRESULT value indicating an error is
- * returned by a function invoked by the method implementation or an I/O
- * error is encountered during the execution of the method
- */
- public static native int IAudioRenderClient_Write(
- long thiz,
- byte[] data, int offset, int length,
- int srcSampleSize, int srcChannels,
- int dstSampleSize, int dstChannels)
- throws HResultException;
-
- public static native long IMMDevice_Activate(
- long thiz,
- String iid,
- int dwClsCtx,
- long pActivationParams)
- throws HResultException;
-
- public static native String IMMDevice_GetId(long thiz)
- throws HResultException;
-
- public static native int IMMDevice_GetState(long thiz)
- throws HResultException;
-
- public static native long IMMDevice_OpenPropertyStore(
- long thiz,
- int stgmAccess)
- throws HResultException;
-
- public static native long IMMDevice_QueryInterface(long thiz, String iid)
- throws HResultException;
-
- public static native void IMMDevice_Release(long thiz);
-
- public static native int IMMDeviceCollection_GetCount(long thiz)
- throws HResultException;
-
- public static native long IMMDeviceCollection_Item(long thiz, int nDevice)
- throws HResultException;
-
- public static native void IMMDeviceCollection_Release(long thiz);
-
- public static native long IMMDeviceEnumerator_EnumAudioEndpoints(
- long thiz,
- int dataFlow,
- int dwStateMask)
- throws HResultException;
-
- public static native long IMMDeviceEnumerator_GetDevice(
- long thiz,
- String pwstrId)
- throws HResultException;
-
- public static native void IMMDeviceEnumerator_Release(long thiz);
-
- public static native int IMMEndpoint_GetDataFlow(long thiz)
- throws HResultException;
-
- public static native void IMMEndpoint_Release(long thiz);
-
- public static native String IPropertyStore_GetString(long thiz, long key)
- throws HResultException;
-
- public static native void IPropertyStore_Release(long thiz);
-
- private static int MAKE_HRESULT(int sev, int fac, int code)
- {
- return ((sev & 0x1) << 31) | ((fac & 0x7fff) << 16) | (code & 0xffff);
- }
-
- public static native long PSPropertyKeyFromString(String pszString)
- throws HResultException;
-
- public static native void ResetEvent(long hEvent)
- throws HResultException;
-
- /**
- * Determines whether a specific HRESULT value indicates success.
- *
- * @param hresult the HRESULT value to be checked whether it
- * indicates success
- * @return true if the specified hresult indicates
- * success; otherwise, false
- */
- public static boolean SUCCEEDED(int hresult)
- {
- return (hresult >= 0);
- }
-
- /**
- * Waits until the specified object is in the signaled state or the
- * specified time-out interval elapses.
- *
- * @param hHandle a HANDLE to the object to wait for
- * @param dwMilliseconds the time-out interval in milliseconds to wait. If a
- * nonzero value is specified, the function waits until the specified object
- * is signaled or the specified time-out interval elapses. If
- * dwMilliseconds is zero, the function does not enter a wait state
- * if the specified object is not signaled; it always returns immediately.
- * If dwMilliseconds is INFINITE, the function will return
- * only when the specified object is signaled.
- * @return one of the WAIT_XXX constant values defined by the
- * WASAPI class to indicate the event that caused the function to
- * return
- * @throws HResultException if the return value is {@link #WAIT_FAILED}
- */
- public static native int WaitForSingleObject(
- long hHandle,
- long dwMilliseconds)
- throws HResultException;
-
- public static native long WAVEFORMATEX_alloc();
-
- public static native void WAVEFORMATEX_fill(
- long thiz,
- char wFormatTag,
- char nChannels,
- int nSamplesPerSec,
- int nAvgBytesPerSec,
- char nBlockAlign,
- char wBitsPerSample,
- char cbSize);
-
- public static native char WAVEFORMATEX_getCbSize(long thiz);
-
- public static native int WAVEFORMATEX_getNAvgBytesPerSec(long thiz);
-
- public static native char WAVEFORMATEX_getNBlockAlign(long thiz);
-
- public static native char WAVEFORMATEX_getNChannels(long thiz);
-
- public static native int WAVEFORMATEX_getNSamplesPerSec(long thiz);
-
- public static native char WAVEFORMATEX_getWBitsPerSample(long thiz);
-
- public static native char WAVEFORMATEX_getWFormatTag(long thiz);
-
- public static native void WAVEFORMATEX_setCbSize(long thiz, char cbSize);
-
- public static native void WAVEFORMATEX_setNAvgBytesPerSec(
- long thiz,
- int nAvgBytesPerSec);
-
- public static native void WAVEFORMATEX_setNBlockAlign(
- long thiz,
- char nBlockAlign);
-
- public static native void WAVEFORMATEX_setNChannels(
- long thiz,
- char nChannels);
-
- public static native void WAVEFORMATEX_setNSamplesPerSec(
- long thiz,
- int nSamplesPerSec);
-
- public static native void WAVEFORMATEX_setWBitsPerSample(
- long thiz,
- char wBitsPerSample);
-
- public static native void WAVEFORMATEX_setWFormatTag(
- long thiz,
- char wFormatTag);
-
- public static native int WAVEFORMATEX_sizeof();
-
- /** Prevents the initialization of WASAPI instances. */
- private WASAPI() {}
-}
+package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
+
+import org.jitsi.util.*;
+
+/**
+ * Defines the native interface to Windows Audio Session API (WASAPI) and
+ * related Core Audio APIs such as Multimedia Device (MMDevice) API as used by
+ * WASAPISystem and its associated CaptureDevice,
+ * DataSource and Renderer implementations.
+ *
+ * @author Lyubomir Marinov
+ */
+public class WASAPI
+{
+ public static final int AUDCLNT_E_NOT_STOPPED;
+
+ public static final int AUDCLNT_SHAREMODE_SHARED = 0;
+
+ public static final int AUDCLNT_STREAMFLAGS_EVENTCALLBACK = 0x00040000;
+
+ public static final int AUDCLNT_STREAMFLAGS_LOOPBACK = 0x00020000;
+
+ public static final int AUDCLNT_STREAMFLAGS_NOPERSIST = 0x00080000;
+
+ public static final int CLSCTX_ALL
+ = /* CLSCTX_INPROC_SERVER */ 0x1
+ | /* CLSCTX_INPROC_HANDLER */ 0x2
+ | /* CLSCTX_LOCAL_SERVER */ 0x4
+ | /* CLSCTX_REMOTE_SERVER */ 0x10;
+
+ public static final String CLSID_MMDeviceEnumerator
+ = "{bcde0395-e52f-467c-8e3d-c4579291692e}";
+
+ public static final int COINIT_MULTITHREADED = 0x0;
+
+ public static final int DEVICE_STATE_ACTIVE = 0x1;
+
+ public static final int eAll = 2;
+
+ public static final int eCapture = 1;
+
+ public static final int eRender = 0;
+
+ private static final int FACILIY_AUDCLNT = 0x889;
+
+ public static final String IID_IAudioCaptureClient
+ = "{c8adbd64-e71e-48a0-a4de-185c395cd317}";
+
+ public static final String IID_IAudioClient
+ = "{1cb9ad4c-dbfa-4c32-b178-c2f568a703b2}";
+
+ public static final String IID_IAudioRenderClient
+ = "{f294acfc-3146-4483-a7bf-addca7c260e2}";
+
+ public static final String IID_IMMDeviceEnumerator
+ = "{a95664d2-9614-4f35-a746-de8db63617e6}";
+
+ public static final String IID_IMMEndpoint
+ = "{1be09788-6894-4089-8586-9a2a6c265ac5}";
+
+ public static final long PKEY_Device_FriendlyName;
+
+ public static final int RPC_E_CHANGED_MODE = 0x80010106;
+
+ public static final int S_FALSE = 1;
+
+ public static final int S_OK = 0;
+
+ private static final int SEVERITY_ERROR = 1;
+
+ private static final int SEVERITY_SUCCESS = 0;
+
+ public static final int STGM_READ = 0x0;
+
+ /**
+ * The return value of {@link #WaitForSingleObject(long, long)} which
+ * indicates that the specified object is a mutex that was not released by
+ * the thread that owned the mutex before the owning thread terminated.
+ * Ownership of the mutex is granted to the calling thread and the mutex
+ * state is set to non-signaled.
+ */
+ public static final int WAIT_ABANDONED = 0x00000080;
+
+ /**
+ * The return value of {@link #WaitForSingleObject(long, long)} which
+ * indicates that the function has failed. Normally, the function will throw
+ * an {@link HResultException} in the case and
+ * {@link HResultException#getHResult()} will return WAIT_FAILED.
+ */
+ public static final int WAIT_FAILED = 0xffffffff;
+
+ /**
+ * The return value of {@link #WaitForSingleObject(long, long)} which
+ * indicates that the specified object is signaled.
+ */
+ public static final int WAIT_OBJECT_0 = 0x00000000;
+
+ /**
+ * The return value of {@link #WaitForSingleObject(long, long)} which
+ * indicates that the specified time-out interval has elapsed and the state
+ * of the specified object is non-signaled.
+ */
+ public static final int WAIT_TIMEOUT = 0x00000102;
+
+ public static final char WAVE_FORMAT_PCM = 1;
+
+ static
+ {
+ JNIUtils.loadLibrary("jnwasapi", WASAPI.class.getClassLoader());
+
+ AUDCLNT_E_NOT_STOPPED
+ = MAKE_HRESULT(SEVERITY_ERROR, FACILIY_AUDCLNT, 5);
+
+ /*
+ * XXX The pointer to native memory returned by PSPropertyKeyFromString
+ * is to be freed via CoTaskMemFree.
+ */
+ String pszString = null;
+
+ try
+ {
+ pszString = "{a45c254e-df1c-4efd-8020-67d146a850e0} 14";
+ PKEY_Device_FriendlyName = PSPropertyKeyFromString(pszString);
+ if (PKEY_Device_FriendlyName == 0)
+ throw new IllegalStateException("PKEY_Device_FriendlyName");
+ }
+ catch (HResultException hre)
+ {
+ Logger logger = Logger.getLogger(WASAPI.class);
+
+ logger.error("PSPropertyKeyFromString(" + pszString + ")", hre);
+ throw new RuntimeException(hre);
+ }
+ }
+
+ public static native void CloseHandle(long hObject)
+ throws HResultException;
+
+ public static native String CoCreateGuid()
+ throws HResultException;
+
+ public static native long CoCreateInstance(
+ String clsid,
+ long pUnkOuter,
+ int dwClsContext,
+ String iid)
+ throws HResultException;
+
+ public static native int CoInitializeEx(long pvReserved, int dwCoInit)
+ throws HResultException;
+
+ public static native void CoTaskMemFree(long pv);
+
+ public static native void CoUninitialize();
+
+ public static native long CreateEvent(
+ long lpEventAttributes,
+ boolean bManualReset,
+ boolean bInitialState,
+ String lpName)
+ throws HResultException;
+
+ /**
+ * Determines whether a specific HRESULT value indicates failure.
+ *
+ * @param hresult the HRESULT value to be checked whether it
+ * indicates failure
+ * @return true if the specified hresult indicates
+ * failure; otherwise, false
+ */
+ public static boolean FAILED(int hresult)
+ {
+ return (hresult < 0);
+ }
+
+ public static native int IAudioCaptureClient_GetNextPacketSize(long thiz)
+ throws HResultException;
+
+ public static native int IAudioCaptureClient_Read(
+ long thiz,
+ byte[] data, int offset, int length,
+ int srcSampleSize, int srcChannels,
+ int dstSampleSize, int dstChannels)
+ throws HResultException;
+
+ public static native void IAudioCaptureClient_Release(long thiz);
+
+ public static native int IAudioClient_GetBufferSize(long thiz)
+ throws HResultException;
+
+ public static native int IAudioClient_GetCurrentPadding(long thiz)
+ throws HResultException;
+
+ public static native long IAudioClient_GetDefaultDevicePeriod(long thiz)
+ throws HResultException;
+
+ public static native long IAudioClient_GetMinimumDevicePeriod(long thiz)
+ throws HResultException;
+
+ public static native long IAudioClient_GetService(long thiz, String iid)
+ throws HResultException;
+
+ public static native int IAudioClient_Initialize(
+ long thiz,
+ int shareMode,
+ int streamFlags,
+ long hnsBufferDuration,
+ long hnsPeriodicity,
+ long pFormat,
+ String audioSessionGuid)
+ throws HResultException;
+
+ public static native long IAudioClient_IsFormatSupported(
+ long thiz,
+ int shareMode,
+ long pFormat)
+ throws HResultException;
+
+ public static native void IAudioClient_Release(long thiz);
+
+ public static native void IAudioClient_SetEventHandle(
+ long thiz,
+ long eventHandle)
+ throws HResultException;
+
+ public static native int IAudioClient_Start(long thiz)
+ throws HResultException;
+
+ public static native int IAudioClient_Stop(long thiz)
+ throws HResultException;
+
+ public static native void IAudioRenderClient_Release(long thiz);
+
+ /**
+ * Writes specific audio data into the rendering endpoint buffer of a
+ * specific IAudioRenderClient. If the sample sizes and/or the
+ * numbers of channels of the specified audio data and the
+ * specified rendering endpoint buffer differ, the method may be able to
+ * perform the necessary conversions.
+ *
+ * @param thiz the IAudioRenderClient which abstracts the rendering
+ * endpoint buffer into which the specified audio data is to be
+ * written
+ * @param data the bytes of the audio samples to be written into the
+ * specified rendering endpoint buffer
+ * @param offset the offset in bytes within data at which valid
+ * audio samples begin
+ * @param length the number of bytes of valid audio samples in data
+ * @param srcSampleSize the size in bytes of an audio sample in
+ * data
+ * @param srcChannels the number of channels of the audio signal provided
+ * in data
+ * @param dstSampleSize the size in bytes of an audio sample in the
+ * rendering endpoint buffer
+ * @param dstChannels the number of channels with which the rendering
+ * endpoint buffer has been initialized
+ * @return the number of bytes which have been read from data
+ * (beginning at offset, of course) and successfully written into
+ * the rendering endpoint buffer
+ * @throws HResultException if an HRESULT value indicating an error is
+ * returned by a function invoked by the method implementation or an I/O
+ * error is encountered during the execution of the method
+ */
+ public static native int IAudioRenderClient_Write(
+ long thiz,
+ byte[] data, int offset, int length,
+ int srcSampleSize, int srcChannels,
+ int dstSampleSize, int dstChannels)
+ throws HResultException;
+
+ public static native long IMMDevice_Activate(
+ long thiz,
+ String iid,
+ int dwClsCtx,
+ long pActivationParams)
+ throws HResultException;
+
+ public static native String IMMDevice_GetId(long thiz)
+ throws HResultException;
+
+ public static native int IMMDevice_GetState(long thiz)
+ throws HResultException;
+
+ public static native long IMMDevice_OpenPropertyStore(
+ long thiz,
+ int stgmAccess)
+ throws HResultException;
+
+ public static native long IMMDevice_QueryInterface(long thiz, String iid)
+ throws HResultException;
+
+ public static native void IMMDevice_Release(long thiz);
+
+ public static native int IMMDeviceCollection_GetCount(long thiz)
+ throws HResultException;
+
+ public static native long IMMDeviceCollection_Item(long thiz, int nDevice)
+ throws HResultException;
+
+ public static native void IMMDeviceCollection_Release(long thiz);
+
+ public static native long IMMDeviceEnumerator_EnumAudioEndpoints(
+ long thiz,
+ int dataFlow,
+ int dwStateMask)
+ throws HResultException;
+
+ public static native long IMMDeviceEnumerator_GetDevice(
+ long thiz,
+ String pwstrId)
+ throws HResultException;
+
+ public static native void IMMDeviceEnumerator_Release(long thiz);
+
+ public static native int IMMEndpoint_GetDataFlow(long thiz)
+ throws HResultException;
+
+ public static native void IMMEndpoint_Release(long thiz);
+
+ public static native String IPropertyStore_GetString(long thiz, long key)
+ throws HResultException;
+
+ public static native void IPropertyStore_Release(long thiz);
+
+ private static int MAKE_HRESULT(int sev, int fac, int code)
+ {
+ return ((sev & 0x1) << 31) | ((fac & 0x7fff) << 16) | (code & 0xffff);
+ }
+
+ public static native long PSPropertyKeyFromString(String pszString)
+ throws HResultException;
+
+ public static native void ResetEvent(long hEvent)
+ throws HResultException;
+
+ /**
+ * Determines whether a specific HRESULT value indicates success.
+ *
+ * @param hresult the HRESULT value to be checked whether it
+ * indicates success
+ * @return true if the specified hresult indicates
+ * success; otherwise, false
+ */
+ public static boolean SUCCEEDED(int hresult)
+ {
+ return (hresult >= 0);
+ }
+
+ /**
+ * Waits until the specified object is in the signaled state or the
+ * specified time-out interval elapses.
+ *
+ * @param hHandle a HANDLE to the object to wait for
+ * @param dwMilliseconds the time-out interval in milliseconds to wait. If a
+ * nonzero value is specified, the function waits until the specified object
+ * is signaled or the specified time-out interval elapses. If
+ * dwMilliseconds is zero, the function does not enter a wait state
+ * if the specified object is not signaled; it always returns immediately.
+ * If dwMilliseconds is INFINITE, the function will return
+ * only when the specified object is signaled.
+ * @return one of the WAIT_XXX constant values defined by the
+ * WASAPI class to indicate the event that caused the function to
+ * return
+ * @throws HResultException if the return value is {@link #WAIT_FAILED}
+ */
+ public static native int WaitForSingleObject(
+ long hHandle,
+ long dwMilliseconds)
+ throws HResultException;
+
+ public static native long WAVEFORMATEX_alloc();
+
+ public static native void WAVEFORMATEX_fill(
+ long thiz,
+ char wFormatTag,
+ char nChannels,
+ int nSamplesPerSec,
+ int nAvgBytesPerSec,
+ char nBlockAlign,
+ char wBitsPerSample,
+ char cbSize);
+
+ public static native char WAVEFORMATEX_getCbSize(long thiz);
+
+ public static native int WAVEFORMATEX_getNAvgBytesPerSec(long thiz);
+
+ public static native char WAVEFORMATEX_getNBlockAlign(long thiz);
+
+ public static native char WAVEFORMATEX_getNChannels(long thiz);
+
+ public static native int WAVEFORMATEX_getNSamplesPerSec(long thiz);
+
+ public static native char WAVEFORMATEX_getWBitsPerSample(long thiz);
+
+ public static native char WAVEFORMATEX_getWFormatTag(long thiz);
+
+ public static native void WAVEFORMATEX_setCbSize(long thiz, char cbSize);
+
+ public static native void WAVEFORMATEX_setNAvgBytesPerSec(
+ long thiz,
+ int nAvgBytesPerSec);
+
+ public static native void WAVEFORMATEX_setNBlockAlign(
+ long thiz,
+ char nBlockAlign);
+
+ public static native void WAVEFORMATEX_setNChannels(
+ long thiz,
+ char nChannels);
+
+ public static native void WAVEFORMATEX_setNSamplesPerSec(
+ long thiz,
+ int nSamplesPerSec);
+
+ public static native void WAVEFORMATEX_setWBitsPerSample(
+ long thiz,
+ char wBitsPerSample);
+
+ public static native void WAVEFORMATEX_setWFormatTag(
+ long thiz,
+ char wFormatTag);
+
+ public static native int WAVEFORMATEX_sizeof();
+
+ /** Prevents the initialization of WASAPI instances. */
+ private WASAPI() {}
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
index e49b80aac..940eb5553 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,1942 +13,1942 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.renderer.audio;
-
-import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*;
-
-import java.beans.*;
-import java.lang.reflect.*;
-import java.util.*;
-import java.util.concurrent.*;
-
-import javax.media.*;
-import javax.media.format.*;
-
-import org.jitsi.impl.neomedia.*;
-import org.jitsi.impl.neomedia.control.*;
-import org.jitsi.impl.neomedia.device.*;
-import org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.*;
-import org.jitsi.service.neomedia.*;
-import org.jitsi.service.neomedia.codec.*;
-import org.jitsi.util.*;
-
-/**
- * Implements an audio Renderer using Windows Audio Session API
- * (WASAPI) and related Core Audio APIs such as Multimedia Device (MMDevice)
- * API.
- *
- * @author Lyubomir Marinov
- */
-public class WASAPIRenderer
- extends AbstractAudioRenderer
-{
- /**
- * The Logger used by the WASAPIRenderer class and its
- * instances to log debug information.
- */
- private static final Logger logger = Logger.getLogger(WASAPIRenderer.class);
-
- /**
- * The human-readable name of the WASAPIRenderer PlugIn
- * implementation instances.
- */
- private static final String PLUGIN_NAME
- = "Windows Audio Session API (WASAPI) Renderer";
-
- /**
- * Finds the first non-null element in a specific array of
- * AudioFormats.
- *
- * @param formats the array of AudioFormats in which the first
- * non-null element is to be found
- * @return the first non-null element in formats if any;
- * otherwise, null
- */
- private static AudioFormat findFirst(AudioFormat[] formats)
- {
- AudioFormat format = null;
-
- for (AudioFormat aFormat : formats)
- {
- if (aFormat != null)
- {
- format = aFormat;
- break;
- }
- }
- return format;
- }
-
- /**
- * Attempts to initialize and open a new Codec to resample media
- * data from a specific input AudioFormat into a specific output
- * AudioFormat. If no suitable resampler is found, returns
- * null. If a suitable resampler is found but its initialization or
- * opening fails, logs and swallows any Throwable and returns
- * null.
- *
- * @param inFormat the AudioFormat in which the new instance is to
- * input media data
- * @param outFormat the AudioFormat in which the new instance is to
- * output media data
- * @return a new Codec which is able to resample media data from
- * the specified inFormat into the specified outFormat if
- * such a resampler could be found, initialized and opened; otherwise,
- * null
- */
- public static Codec maybeOpenResampler(
- AudioFormat inFormat,
- AudioFormat outFormat)
- {
- @SuppressWarnings("unchecked")
- List classNames
- = PlugInManager.getPlugInList(
- inFormat,
- outFormat,
- PlugInManager.CODEC);
- Codec resampler = null;
-
- if (classNames != null)
- {
- for (String className : classNames)
- {
- try
- {
- Codec codec
- = (Codec) Class.forName(className).newInstance();
- Format setInput = codec.setInputFormat(inFormat);
-
- if ((setInput != null) && inFormat.matches(setInput))
- {
- Format setOutput = codec.setOutputFormat(outFormat);
-
- if ((setOutput != null) && outFormat.matches(setOutput))
- {
- codec.open();
- resampler = codec;
- break;
- }
- }
- }
- catch (Throwable t)
- {
- if (t instanceof ThreadDeath)
- throw (ThreadDeath) t;
- else
- {
- logger.warn(
- "Failed to open resampler " + className,
- t);
- }
- }
- }
- }
- return resampler;
- }
-
- /**
- * Pops a specific number of bytes from (the head of) a specific array of
- * bytes.
- *
- * @param array the array of byte from which the specified number
- * of bytes are to be popped
- * @param arrayLength the number of elements in array which contain
- * valid data
- * @param length the number of bytes to be popped from array
- * @return the number of elements in array which contain valid data
- * after the specified number of bytes have been popped from it
- */
- public static int pop(byte[] array, int arrayLength, int length)
- {
- if (length < 0)
- throw new IllegalArgumentException("length");
- if (length == 0)
- return arrayLength;
-
- int newArrayLength = arrayLength - length;
-
- if (newArrayLength > 0)
- {
- for (int i = 0, j = length; i < newArrayLength; i++, j++)
- array[i] = array[j];
- }
- else
- newArrayLength = 0;
- return newArrayLength;
- }
-
- /**
- * The duration in milliseconds of the endpoint buffer.
- */
- private long bufferDuration;
-
- /**
- * The indicator which determines whether the audio stream represented by
- * this instance, {@link #iAudioClient} and {@link #iAudioRenderClient} is
- * busy and, consequently, its state should not be modified. For example,
- * the audio stream is busy during the execution of
- * {@link #process(Buffer)}.
- */
- private boolean busy;
-
- /**
- * The length in milliseconds of the interval between successive, periodic
- * processing passes by the audio engine on the data in the endpoint buffer.
- */
- private long devicePeriod = WASAPISystem.DEFAULT_DEVICE_PERIOD;
-
- /**
- * The value of {@link #devicePeriod} expressed in terms of numbers of
- * frames (i.e. takes the sample rate into account).
- */
- private int devicePeriodInFrames;
-
- /**
- * The number of channels with which {@link #iAudioClient} has been
- * initialized.
- */
- private int dstChannels;
-
- /**
- * The AudioFormat with which {@link #iAudioClient} has been
- * initialized.
- */
- private AudioFormat dstFormat;
-
- /**
- * The sample size in bytes with which {@link #iAudioClient} has been
- * initialized.
- */
- private int dstSampleSize;
-
- /**
- * The event handle that the system signals when an audio buffer is ready to
- * be processed by the client.
- */
- private long eventHandle;
-
- /**
- * The Runnable which is scheduled by this WASAPIRenderer
- * and executed by {@link #eventHandleExecutor} and waits for
- * {@link #eventHandle} to be signaled.
- */
- private Runnable eventHandleCmd;
-
- /**
- * The Executor implementation which is to execute
- * {@link #eventHandleCmd}.
- */
- private Executor eventHandleExecutor;
-
- /**
- * The WASAPI IAudioClient instance which enables this
- * Renderer to create and initialize an audio stream between this
- * Renderer and the audio engine of the associated audio endpoint
- * device.
- */
- private long iAudioClient;
-
- /**
- * The WASAPI IAudioRenderClient obtained from
- * {@link #iAudioClient} which enables this Renderer to write
- * output data to the rendering endpoint buffer.
- */
- private long iAudioRenderClient;
-
- /**
- * The indicator which determines whether the value of the locator
- * property of this instance was equal to null when this Renderer
- * was opened. Indicates that this Renderer should successfully
- * process media data without actually rendering to any render endpoint
- * device.
- */
- private boolean locatorIsNull;
-
- /**
- * The maximum capacity in frames of the endpoint buffer.
- */
- private int numBufferFrames;
-
- /**
- * The Codec which resamples the media provided to this
- * Renderer via {@link #process(Buffer)} into {@link #dstFormat}
- * if necessary.
- */
- private Codec resampler;
-
- /**
- * The number of channels of the audio signal output by {@link #resampler}.
- * It may differ from {@link #dstChannels}.
- */
- private int resamplerChannels;
-
- /**
- * The data which has remained unwritten during earlier invocations of
- * {@link #runInEventHandleCmd(Runnable)} because it represents frames which
- * are few enough to be accepted on their own for writing by
- * {@link #iAudioRenderClient}.
- */
- private byte[] resamplerData;
-
- /**
- * The size in bytes of an audio frame produced by {@link #resampler}. Based
- * on {@link #resamplerChannels} and {@link #resamplerSampleSize} and cached
- * in order to reduce calculations.
- */
- private int resamplerFrameSize;
-
- /**
- * The Buffer which provides the input to {@link #resampler}.
- * Represents a unit of {@link #srcBuffer} to be processed in a single call
- * to resampler.
- */
- private Buffer resamplerInBuffer;
-
- /**
- * The Buffer which receives the output of {@link #resampler}.
- */
- private Buffer resamplerOutBuffer;
-
- /**
- * The size in bytes of an audio sample produced by {@link #resampler}.
- */
- private int resamplerSampleSize;
-
- /**
- * The data which has remained unwritten during earlier invocations of
- * {@link #process(Buffer)} because it represents frames which are few
- * enough to be accepted on their own for writing by
- * {@link #iAudioRenderClient}.
- */
- private byte[] srcBuffer;
-
- /**
- * The number of bytes in {@link #srcBuffer} which represent valid audio
- * data to be written by {@link #iAudioRenderClient}.
- */
- private int srcBufferLength;
-
- /**
- * The number of channels which which this Renderer has been
- * opened.
- */
- private int srcChannels;
-
- /**
- * The AudioFormat with which this Renderer has been
- * opened.
- */
- private AudioFormat srcFormat;
-
- /**
- * The frame size in bytes with which this Renderer has been
- * opened. It is the product of {@link #srcSampleSize} and
- * {@link #srcChannels}.
- */
- private int srcFrameSize;
-
- /**
- * The sample size in bytes with which this Renderer has been
- * opened.
- */
- private int srcSampleSize;
-
- /**
- * The indicator which determines whether this Renderer is started
- * i.e. there has been a successful invocation of {@link #start()} without
- * an intervening invocation of {@link #stop()}.
- */
- private boolean started;
-
- /**
- * The time in milliseconds at which the writing to the render endpoint
- * buffer has started malfunctioning. For example, {@link #srcBuffer} being
- * full from the point of view of {@link #process(Buffer)} for an extended
- * period of time may indicate abnormal functioning.
- */
- private long writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
-
- /**
- * The maximum interval of time in milliseconds that the writing to the
- * render endpoint buffer is allowed to be under suspicion that it is
- * malfunctioning. If it remains under suspicion after the maximum interval
- * of time has elapsed, the writing to the render endpoint buffer is to be
- * considered malfunctioning for real.
- */
- private long writeIsMalfunctioningTimeout;
-
- /**
- * Initializes a new WASAPIRenderer instance which is to perform
- * playback (as opposed to sound a notification).
- */
- public WASAPIRenderer()
- {
- this(AudioSystem.DataFlow.PLAYBACK);
- }
-
- /**
- * Initializes a new WASAPIRenderer instance which is to either
- * perform playback or sound a notification.
- *
- * @param dataFlow {@link AudioSystem.DataFlow#PLAYBACK} if the new instance
- * is to perform playback or {@link AudioSystem.DataFlow#NOTIFY} if the new
- * instance is to sound a notification
- */
- public WASAPIRenderer(AudioSystem.DataFlow dataFlow)
- {
- super(AudioSystem.LOCATOR_PROTOCOL_WASAPI, dataFlow);
- }
-
- /**
- * Initializes a new WASAPIRenderer instance which is to either
- * perform playback or sound a notification.
- *
- * @param playback true if the new instance is to perform playback
- * or false if the new instance is to sound a notification
- */
- public WASAPIRenderer(boolean playback)
- {
- this(
- playback
- ? AudioSystem.DataFlow.PLAYBACK
- : AudioSystem.DataFlow.NOTIFY);
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public synchronized void close()
- {
- try
- {
- stop();
- }
- finally
- {
- if (iAudioRenderClient != 0)
- {
- IAudioRenderClient_Release(iAudioRenderClient);
- iAudioRenderClient = 0;
- }
- if (iAudioClient != 0)
- {
- IAudioClient_Release(iAudioClient);
- iAudioClient = 0;
- }
- if (eventHandle != 0)
- {
- try
- {
- CloseHandle(eventHandle);
- }
- catch (HResultException hre)
- {
- // The event HANDLE will be leaked.
- logger.warn("Failed to close event HANDLE.", hre);
- }
- eventHandle = 0;
- }
- maybeCloseResampler();
-
- dstFormat = null;
- locatorIsNull = false;
- srcBuffer = null;
- srcBufferLength = 0;
- srcFormat = null;
- started = false;
-
- super.close();
- }
- }
-
- /**
- * Gets an array of alternative AudioFormats based on
- * inputFormat with which an attempt is to be made to initialize a
- * new IAudioClient instance.
- *
- * @return an array of alternative AudioFormats based on
- * inputFormat with which an attempt is to be made to initialize a
- * new IAudioClient instance
- */
- private AudioFormat[] getFormatsToInitializeIAudioClient()
- {
- AudioFormat inputFormat = this.inputFormat;
-
- if (inputFormat == null)
- throw new NullPointerException("No inputFormat set.");
- else
- {
- /*
- * Prefer to initialize the IAudioClient with an AudioFormat which
- * matches the inputFormat as closely as possible.
- */
- AudioFormat[] preferredFormats
- = WASAPISystem.getFormatsToInitializeIAudioClient(inputFormat);
- // Otherwise, any supported Format will do.
- Format[] supportedFormats = getSupportedInputFormats();
- List formats
- = new ArrayList(
- preferredFormats.length + supportedFormats.length);
-
- for (AudioFormat format : preferredFormats)
- {
- if (!formats.contains(format))
- formats.add(format);
- }
- for (Format format : supportedFormats)
- {
- if (!formats.contains(format)
- && (format instanceof AudioFormat))
- {
- formats.add((AudioFormat) format);
- }
- }
-
- /*
- * Resampling isn't very cool. Moreover, resampling between sample
- * rates with a non-integer quotient may result in audio glitches.
- * Try to minimize the risks of having to use any of these two when
- * unnecessary.
- */
- final int sampleRate = (int) inputFormat.getSampleRate();
-
- if (sampleRate != Format.NOT_SPECIFIED)
- {
- Collections.sort(
- formats,
- new Comparator()
- {
- @Override
- public int compare(AudioFormat af1, AudioFormat af2)
- {
- int d1 = computeSampleRateDistance(af1);
- int d2 = computeSampleRateDistance(af2);
-
- return (d1 < d2) ? -1 : (d1 == d2) ? 0 : 1;
- }
-
- private int computeSampleRateDistance(
- AudioFormat af)
- {
- int sr = (int) af.getSampleRate();
-
- if (sr == Format.NOT_SPECIFIED)
- return Integer.MAX_VALUE;
- else if (sr == sampleRate)
- return 0;
-
- int min, max;
- boolean downsample;
-
- if (sr < sampleRate)
- {
- min = sr;
- max = sampleRate;
- downsample = true;
- }
- else
- {
- min = sampleRate;
- max = sr;
- downsample = false;
- }
- if (min == 0)
- return Integer.MAX_VALUE;
- else
- {
- int h = max % min;
- int l = max / min;
-
- /*
- * Prefer AudioFormats which will cause
- * upsampling to AudioFormats which will
- * cause downsampling.
- */
- if (downsample)
- {
- l = Short.MAX_VALUE - l;
- if (h != 0)
- h = Short.MAX_VALUE - h;
- }
-
- return (h << 16) | l;
- }
- }
- });
- }
-
- return formats.toArray(new AudioFormat[formats.size()]);
- }
- }
-
- /**
- * {@inheritDoc}
- */
- public String getName()
- {
- return PLUGIN_NAME;
- }
-
- /**
- * {@inheritDoc}
- *
- * Overrides the super implementation to handle the case in which the user
- * has selected "none" for the playback/notify device.
- */
- @Override
- public Format[] getSupportedInputFormats()
- {
- if (getLocator() == null)
- {
- /*
- * XXX We toyed with the idea of calculating a list of common
- * Formats supported by all devices (of the dataFlow of this
- * AbstractAudioRenderer, of course) but that turned out to be
- * monstrous in code, inefficient at least in terms of garbage
- * collection and with questionable suitability. The following
- * approach will likely have a comparable suitability with better
- * efficiency achieved code that is easier to understand.
- */
-
- /*
- * The maximums supported by the WASAPI integration at the time of
- * this writing.
- */
- double sampleRate = MediaUtils.MAX_AUDIO_SAMPLE_RATE;
- int sampleSizeInBits = 16;
- int channels = 2;
-
- if ((sampleRate == Format.NOT_SPECIFIED)
- && (Constants.AUDIO_SAMPLE_RATES.length != 0))
- sampleRate = Constants.AUDIO_SAMPLE_RATES[0];
- return
- WASAPISystem.getFormatsToInitializeIAudioClient(
- new AudioFormat(
- AudioFormat.LINEAR,
- sampleRate,
- sampleSizeInBits,
- channels,
- AudioFormat.LITTLE_ENDIAN,
- AudioFormat.SIGNED,
- /* frameSizeInBits */ Format.NOT_SPECIFIED,
- /* frameRate */ Format.NOT_SPECIFIED,
- Format.byteArray));
- }
- else
- return super.getSupportedInputFormats();
- }
-
- /**
- * Closes {@link #resampler} if it is non-null.
- */
- private void maybeCloseResampler()
- {
- Codec resampler = this.resampler;
-
- if (resampler != null)
- {
- this.resampler = null;
- resamplerData = null;
- resamplerInBuffer = null;
- resamplerOutBuffer = null;
-
- try
- {
- resampler.close();
- }
- catch (Throwable t)
- {
- if (t instanceof InterruptedException)
- Thread.currentThread().interrupt();
- else if (t instanceof ThreadDeath)
- throw (ThreadDeath) t;
- else
- logger.error("Failed to close resampler.", t);
- }
- }
- }
-
- /**
- * Invokes WASAPI.IAudioRenderClient_Write on
- * {@link #iAudioRenderClient} and logs and swallows any
- * HResultException.
- *
- * @param data the bytes of the audio samples to be written into the render
- * endpoint buffer
- * @param offset the offset in data at which the bytes of the audio
- * samples to be written into the render endpoint buffer begin
- * @param length the number of the bytes in data beginning at
- * offset of the audio samples to be written into the render
- * endpoint buffer
- * @param srcSampleSize the size in bytes of an audio sample in
- * data
- * @param srcChannels the number of channels of the audio signal provided in
- * data
- * @return the number of bytes from data (starting at
- * offset) which have been written into the render endpoint buffer
- * or 0 upon HResultException
- */
- private int maybeIAudioRenderClientWrite(
- byte[] data, int offset, int length,
- int srcSampleSize, int srcChannels)
- {
- int written;
-
- try
- {
- written
- = IAudioRenderClient_Write(
- iAudioRenderClient,
- data, offset, length,
- srcSampleSize, srcChannels,
- dstSampleSize, dstChannels);
- }
- catch (HResultException hre)
- {
- written = 0;
- logger.error("IAudioRenderClient_Write", hre);
- }
- return written;
- }
-
- /**
- * Initializes and opens a new instance of {@link #resampler} if the
- * Format-related state of this instance deems its existence
- * necessary.
- */
- private void maybeOpenResampler()
- {
- AudioFormat inFormat = this.inputFormat;
- AudioFormat outFormat = this.dstFormat;
-
- // We are able to translate between mono and stereo.
- if ((inFormat.getSampleRate() == outFormat.getSampleRate())
- && (inFormat.getSampleSizeInBits()
- == outFormat.getSampleSizeInBits()))
- return;
-
- // The resamplers are not expected to convert between mono and stereo.
- int channels = inFormat.getChannels();
-
- if (outFormat.getChannels() != channels)
- {
- outFormat
- = new AudioFormat(
- outFormat.getEncoding(),
- outFormat.getSampleRate(),
- outFormat.getSampleSizeInBits(),
- channels,
- outFormat.getEndian(),
- outFormat.getSigned(),
- /* frameSizeInBits */ Format.NOT_SPECIFIED,
- /* frameRate */ Format.NOT_SPECIFIED,
- outFormat.getDataType());
- }
-
- Codec resampler = maybeOpenResampler(inFormat, outFormat);
-
- if (resampler == null)
- {
- throw new IllegalStateException(
- "Failed to open a codec to resample [" + inFormat
- + "] into [" + outFormat + "].");
- }
- else
- {
- this.resampler = resampler;
-
- resamplerInBuffer = new Buffer();
- resamplerInBuffer.setFormat(inFormat);
-
- resamplerChannels = outFormat.getChannels();
- resamplerSampleSize = WASAPISystem.getSampleSizeInBytes(outFormat);
- resamplerFrameSize = resamplerChannels * resamplerSampleSize;
-
- resamplerData = new byte[numBufferFrames * resamplerFrameSize];
-
- resamplerOutBuffer = new Buffer();
- resamplerOutBuffer.setData(resamplerData);
- resamplerOutBuffer.setLength(0);
- resamplerOutBuffer.setOffset(0);
- }
- }
-
- /**
- * Processes audio samples from {@link #srcBuffer} through
- * {@link #resampler} i.e. resamples them in order to produce media data
- * in {@link #resamplerData} to be written into the render endpoint buffer.
- *
- * @param numFramesRequested the number of audio frames in the units of
- * {@link #dstFormat} requested by the rendering endpoint
- */
- private void maybeResample(int numFramesRequested)
- {
- int outLength = resamplerOutBuffer.getLength();
-
- /*
- * Do not resample if there is enough resampled audio to satisfy the
- * request of the rendering endpoint buffer.
- */
- if (outLength < numFramesRequested * resamplerFrameSize)
- {
- // Sample rate conversions work on audio frames, not on bytes.
- int outFrames
- = (resamplerData.length - outLength) / resamplerFrameSize;
-
- if (outFrames > 0)
- {
- /*
- * Predict how many bytes will be consumed from the input during
- * the sample rate conversion.
- */
- int srcSampleRate = (int) srcFormat.getSampleRate();
- int dstSampleRate = (int) dstFormat.getSampleRate();
- int inLength
- = (outFrames * srcSampleRate / dstSampleRate)
- * srcFrameSize;
-
- if (inLength > srcBuffer.length)
- inLength = srcBuffer.length;
- if (inLength > srcBufferLength)
- inLength = srcBufferLength;
- if (inLength > 0)
- {
- int resampled;
-
- resamplerOutBuffer.setLength(0);
- resamplerOutBuffer.setOffset(outLength);
- try
- {
- resamplerOutBuffer.setDiscard(false);
- resamplerInBuffer.setLength(inLength);
- resamplerInBuffer.setOffset(0);
-
- resampler.process(
- resamplerInBuffer,
- resamplerOutBuffer);
- }
- finally
- {
- resampled = resamplerOutBuffer.getLength();
- outLength = resamplerOutBuffer.getOffset() + resampled;
- resamplerOutBuffer.setLength(outLength);
- resamplerOutBuffer.setOffset(0);
- }
-
- if (resampled > 0)
- {
- /*
- * How many bytes have actually been consumed from the
- * input during the sample rate conversion?
- */
- resampled
- = ((resampled / resamplerFrameSize)
- * srcSampleRate
- / dstSampleRate)
- * srcFrameSize;
- if (resampled > 0)
- popFromSrcBuffer(resampled);
- }
- }
- }
- }
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public synchronized void open()
- throws ResourceUnavailableException
- {
- if (this.iAudioClient != 0)
- return;
-
- MediaLocator locator = null;
-
- try
- {
- locator = getLocator();
- if (locatorIsNull = (locator == null))
- {
- /*
- * We actually want to allow the user to switch the playback
- * and/or notify device to none mid-stream in order to disable
- * the playback.
- */
- }
- else
- {
-
- /*
- * The method getFormatsToInitializeIAudioClient will assert that
- * inputFormat is set.
- */
- AudioFormat[] formats = getFormatsToInitializeIAudioClient();
- long eventHandle = CreateEvent(0, false, false, null);
-
- try
- {
- long iAudioClient
- = audioSystem.initializeIAudioClient(
- locator,
- dataFlow,
- /* streamFlags */ 0,
- eventHandle,
- WASAPISystem.DEFAULT_BUFFER_DURATION,
- formats);
-
- if (iAudioClient == 0)
- {
- throw new ResourceUnavailableException(
- "Failed to initialize IAudioClient"
- + " for MediaLocator " + locator
- + " and AudioSystem.DataFlow " + dataFlow);
- }
- try
- {
- long iAudioRenderClient
- = IAudioClient_GetService(
- iAudioClient,
- IID_IAudioRenderClient);
-
- if (iAudioRenderClient == 0)
- {
- throw new ResourceUnavailableException(
- "IAudioClient_GetService"
- + "(IID_IAudioRenderClient)");
- }
- try
- {
- srcFormat = this.inputFormat;
- dstFormat = findFirst(formats);
-
- /*
- * The value hnsDefaultDevicePeriod is documented to
- * specify the default scheduling period for a
- * shared-mode stream.
- */
- devicePeriod
- = IAudioClient_GetDefaultDevicePeriod(iAudioClient)
- / 10000L;
- numBufferFrames
- = IAudioClient_GetBufferSize(iAudioClient);
-
- int dstSampleRate = (int) dstFormat.getSampleRate();
-
- bufferDuration
- = numBufferFrames * 1000L / dstSampleRate;
- /*
- * We will very likely be inefficient if we fail to
- * synchronize with the scheduling period of the audio
- * engine but we have to make do with what we have.
- */
- if (devicePeriod <= 1)
- {
- devicePeriod = bufferDuration / 2;
- if ((devicePeriod
- > WASAPISystem.DEFAULT_DEVICE_PERIOD)
- || (devicePeriod <= 1))
- devicePeriod
- = WASAPISystem.DEFAULT_DEVICE_PERIOD;
- }
- devicePeriodInFrames
- = (int) (devicePeriod * dstSampleRate / 1000L);
-
- dstChannels = dstFormat.getChannels();
- dstSampleSize
- = WASAPISystem.getSampleSizeInBytes(dstFormat);
-
- maybeOpenResampler();
-
- srcChannels = srcFormat.getChannels();
- srcSampleSize
- = WASAPISystem.getSampleSizeInBytes(srcFormat);
- srcFrameSize = srcSampleSize * srcChannels;
-
- /*
- * The remainder/residue in frames of
- * IAudioRenderClient_Write cannot be more than the
- * maximum capacity of the endpoint buffer.
- */
- int srcBufferCapacityInFrames;
-
- if (resampler == null)
- {
- srcBufferCapacityInFrames = numBufferFrames;
- }
- else
- {
- /*
- * The units of srcBuffer are based on srcFormat,
- * the units of numBufferFrames are based on
- * dstFormat.
- */
- int srcSampleRate = (int) srcFormat.getSampleRate();
-
- srcBufferCapacityInFrames
- = numBufferFrames
- * srcSampleRate
- / dstSampleRate;
- }
- srcBuffer
- = new byte[
- srcBufferCapacityInFrames * srcFrameSize];
- if (resamplerInBuffer != null)
- resamplerInBuffer.setData(srcBuffer);
-
- /*
- * Introduce latency in order to decrease the likelihood
- * of underflow.
- */
- srcBufferLength = srcBuffer.length;
-
- writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
- writeIsMalfunctioningTimeout
- = 2 * Math.max(bufferDuration, devicePeriod);
-
- this.eventHandle = eventHandle;
- eventHandle = 0;
- this.iAudioClient = iAudioClient;
- iAudioClient = 0;
- this.iAudioRenderClient = iAudioRenderClient;
- iAudioRenderClient = 0;
- }
- finally
- {
- if (iAudioRenderClient != 0)
- IAudioRenderClient_Release(iAudioRenderClient);
- }
- }
- finally
- {
- if (iAudioClient != 0)
- {
- IAudioClient_Release(iAudioClient);
- maybeCloseResampler();
- }
- }
- }
- finally
- {
- if (eventHandle != 0)
- CloseHandle(eventHandle);
- }
-
- } // The locator of this Renderer is not null.
- }
- catch (Throwable t)
- {
- if (t instanceof InterruptedException)
- Thread.currentThread().interrupt();
- else if (t instanceof ThreadDeath)
- throw (ThreadDeath) t;
- else
- {
- logger.error(
- "Failed to open a WASAPIRenderer on audio endpoint"
- + " device " + toString(locator),
- t);
- if (t instanceof ResourceUnavailableException)
- throw (ResourceUnavailableException) t;
- else
- {
- ResourceUnavailableException rue
- = new ResourceUnavailableException();
-
- rue.initCause(t);
- throw rue;
- }
- }
- }
-
- super.open();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- protected synchronized void playbackDevicePropertyChange(
- PropertyChangeEvent ev)
- {
- /*
- * Stop, close, re-open and re-start this Renderer (performing whichever
- * of these in order to bring it into the same state) in order to
- * reflect the change in the selection with respect to the playback or
- * notify device.
- */
-
- waitWhileBusy();
-
- boolean open
- = ((iAudioClient != 0) && (iAudioRenderClient != 0))
- || locatorIsNull;
-
- if (open)
- {
- boolean start = started;
-
- close();
-
- try
- {
- open();
- }
- catch (ResourceUnavailableException rue)
- {
- throw new UndeclaredThrowableException(rue);
- }
- if (start)
- start();
- }
- }
-
- /**
- * Pops a specific number of bytes from {@link #srcBuffer}. For example,
- * because such a number of bytes have been read from srcBuffer and
- * written into the rendering endpoint buffer.
- *
- * @param length the number of bytes to pop from srcBuffer
- */
- private void popFromSrcBuffer(int length)
- {
- srcBufferLength = pop(srcBuffer, srcBufferLength, length);
- }
-
- /**
- * {@inheritDoc}
- */
- public int process(Buffer buffer)
- {
- int length = buffer.getLength();
-
- if (length < 1)
- return BUFFER_PROCESSED_OK;
-
- byte[] data = (byte[]) buffer.getData();
- int offset = buffer.getOffset();
-
- synchronized (this)
- {
- if ((iAudioClient == 0) || (iAudioRenderClient == 0))
- {
- /*
- * We actually want to allow the user to switch the playback
- * and/or notify device to none mid-stream in order to disable
- * the playback.
- */
- return
- locatorIsNull
- ? BUFFER_PROCESSED_OK
- : BUFFER_PROCESSED_FAILED;
- }
- else if (!started)
- return BUFFER_PROCESSED_FAILED;
- else
- {
- waitWhileBusy();
- busy = true;
- }
- }
-
- int ret = BUFFER_PROCESSED_OK;
- long sleep = 0;
-
- try
- {
- int numPaddingFrames;
-
- if (eventHandle == 0)
- {
- try
- {
- numPaddingFrames
- = IAudioClient_GetCurrentPadding(iAudioClient);
- }
- catch (HResultException hre)
- {
- numPaddingFrames = 0;
- ret = BUFFER_PROCESSED_FAILED;
- logger.error("IAudioClient_GetCurrentPadding", hre);
- }
- }
- else
- {
- /*
- * The process method will not write into the rendering endpoint
- * buffer, the runInEventHandleCmd method will.
- */
- numPaddingFrames = numBufferFrames;
- }
- if (ret != BUFFER_PROCESSED_FAILED)
- {
- int numFramesRequested = numBufferFrames - numPaddingFrames;
-
- if (numFramesRequested == 0)
- {
- if (eventHandle == 0)
- {
- /*
- * There is NO available space in the rendering endpoint
- * buffer into which this Renderer can write data.
- */
- ret |= INPUT_BUFFER_NOT_CONSUMED;
- sleep = devicePeriod;
- /*
- * The writing to the render endpoint buffer may or may
- * not be malfunctioning, it depends on the interval of
- * time that the state remains unchanged.
- */
- if (writeIsMalfunctioningSince
- == DiagnosticsControl.NEVER)
- setWriteIsMalfunctioning(true);
- }
- else
- {
- /*
- * The process method will write into srcBuffer, the
- * runInEventHandleCmd will read from srcBuffer and
- * write into the rendering endpoint buffer.
- */
- int toCopy = srcBuffer.length - srcBufferLength;
-
- if (toCopy > 0)
- {
- if (toCopy > length)
- toCopy = length;
- System.arraycopy(
- data, offset,
- srcBuffer, srcBufferLength,
- toCopy);
- srcBufferLength += toCopy;
-
- if (length > toCopy)
- {
- buffer.setLength(length - toCopy);
- buffer.setOffset(offset + toCopy);
- ret |= INPUT_BUFFER_NOT_CONSUMED;
- }
-
- /*
- * Writing from the input Buffer into srcBuffer has
- * occurred so it does not look like the writing to
- * the render endpoint buffer is malfunctioning.
- */
- if (writeIsMalfunctioningSince
- != DiagnosticsControl.NEVER)
- setWriteIsMalfunctioning(false);
- }
- else
- {
- ret |= INPUT_BUFFER_NOT_CONSUMED;
- sleep = devicePeriod;
- /*
- * No writing from the input Buffer into srcBuffer
- * has occurred so it is possible that the writing
- * to the render endpoint buffer is malfunctioning.
- */
- if (writeIsMalfunctioningSince
- == DiagnosticsControl.NEVER)
- setWriteIsMalfunctioning(true);
- }
- }
- }
- else
- {
- /*
- * There is available space in the rendering endpoint
- * buffer into which this Renderer can write data.
- */
- int effectiveLength = srcBufferLength + length;
- int toWrite
- = Math.min(
- effectiveLength,
- numFramesRequested * srcFrameSize);
- byte[] effectiveData;
- int effectiveOffset;
-
- if (srcBufferLength > 0)
- {
- /*
- * There is remainder/residue from earlier invocations
- * of the method. This Renderer will feed
- * iAudioRenderClient from srcBuffer.
- */
- effectiveData = srcBuffer;
- effectiveOffset = 0;
-
- int toCopy = toWrite - srcBufferLength;
-
- if (toCopy <= 0)
- ret |= INPUT_BUFFER_NOT_CONSUMED;
- else
- {
- if (toCopy > length)
- toCopy = length;
- System.arraycopy(
- data, offset,
- srcBuffer, srcBufferLength,
- toCopy);
- srcBufferLength += toCopy;
-
- if (toWrite > srcBufferLength)
- toWrite = srcBufferLength;
-
- if (length > toCopy)
- {
- buffer.setLength(length - toCopy);
- buffer.setOffset(offset + toCopy);
- ret |= INPUT_BUFFER_NOT_CONSUMED;
- }
- }
- }
- else
- {
- /*
- * There is no remainder/residue from earlier
- * invocations of the method. This Renderer will feed
- * iAudioRenderClient from data.
- */
- effectiveData = data;
- effectiveOffset = offset;
- }
-
- int written;
-
- if ((toWrite / srcFrameSize) == 0)
- written = 0;
- else
- {
- /*
- * Take into account the user's preferences with respect
- * to the output volume.
- */
- GainControl gainControl = getGainControl();
-
- if (gainControl != null)
- {
- BasicVolumeControl.applyGain(
- gainControl,
- effectiveData, effectiveOffset, toWrite);
- }
-
- try
- {
- written
- = IAudioRenderClient_Write(
- iAudioRenderClient,
- effectiveData, effectiveOffset, toWrite,
- srcSampleSize, srcChannels,
- dstSampleSize, dstChannels);
- }
- catch (HResultException hre)
- {
- written = 0;
- ret = BUFFER_PROCESSED_FAILED;
- logger.error("IAudioRenderClient_Write", hre);
- }
- }
- if (ret != BUFFER_PROCESSED_FAILED)
- {
- if (effectiveData == data)
- {
- // We have consumed frames from data.
- if (written == 0)
- {
- /*
- * The available number of frames appear to be
- * too few for IAudioRenderClient to accept.
- * They will have to be prepended to the next
- * input Buffer.
- */
- System.arraycopy(
- data, offset,
- srcBuffer, srcBufferLength,
- toWrite);
- srcBufferLength += toWrite;
- written = toWrite;
- }
- if (length > written)
- {
- buffer.setLength(length - written);
- buffer.setOffset(offset + written);
- ret |= INPUT_BUFFER_NOT_CONSUMED;
- }
- }
- else if (written > 0)
- {
- // We have consumed frames from srcBuffer.
- popFromSrcBuffer(written);
- }
-
- if (writeIsMalfunctioningSince
- != DiagnosticsControl.NEVER)
- setWriteIsMalfunctioning(false);
- }
- }
-
- /*
- * If the writing to the render endpoint buffer is
- * malfunctioning, fail the processing of the input Buffer in
- * order to avoid blocking of the Codec chain.
- */
- if (((ret & INPUT_BUFFER_NOT_CONSUMED)
- == INPUT_BUFFER_NOT_CONSUMED)
- && (writeIsMalfunctioningSince
- != DiagnosticsControl.NEVER))
- {
- long writeIsMalfunctioningDuration
- = System.currentTimeMillis()
- - writeIsMalfunctioningSince;
-
- if (writeIsMalfunctioningDuration
- > writeIsMalfunctioningTimeout)
- {
- /*
- * The writing to the render endpoint buffer has taken
- * too long so whatever is in srcBuffer is surely
- * out-of-date.
- */
- srcBufferLength = 0;
- ret = BUFFER_PROCESSED_FAILED;
- logger.warn(
- "Audio endpoint device appears to be"
- + " malfunctioning: "
- + getLocator());
- }
- }
- }
- }
- finally
- {
- synchronized (this)
- {
- busy = false;
- notifyAll();
- }
- }
- /*
- * If there was no available space in the rendering endpoint buffer, we
- * will want to wait a bit for such space to be made available.
- */
- if (((ret & INPUT_BUFFER_NOT_CONSUMED) == INPUT_BUFFER_NOT_CONSUMED)
- && (sleep > 0))
- {
- boolean interrupted = false;
-
- synchronized (this)
- {
- /*
- * Spurious wake-ups should not be a big issue here. While this
- * Renderer may check for available space in the rendering
- * endpoint buffer more often than practically necessary (which
- * may very well classify as a case of performance loss), the
- * ability to unblock this Renderer is considered more
- * important.
- */
- try
- {
- wait(sleep);
- }
- catch (InterruptedException ie)
- {
- interrupted = true;
- }
- }
- if (interrupted)
- Thread.currentThread().interrupt();
- }
- return ret;
- }
-
- /**
- * Runs/executes in the thread associated with a specific Runnable
- * initialized to wait for {@link #eventHandle} to be signaled.
- *
- * @param eventHandleCmd the Runnable which has been initialized to
- * wait for eventHandle to be signaled and in whose associated
- * thread the method is invoked
- */
- private void runInEventHandleCmd(Runnable eventHandleCmd)
- {
- try
- {
- useAudioThreadPriority();
-
- do
- {
- long eventHandle;
-
- synchronized (this)
- {
- /*
- * Does this WASAPIRender still want eventHandleCmd to
- * execute?
- */
- if (!eventHandleCmd.equals(this.eventHandleCmd))
- break;
- // Is this WASAPIRenderer still opened and started?
- if ((iAudioClient == 0)
- || (iAudioRenderClient == 0)
- || !started)
- break;
-
- /*
- * The value of eventHandle will remain valid while this
- * WASAPIRenderer wants eventHandleCmd to execute.
- */
- eventHandle = this.eventHandle;
- if (eventHandle == 0)
- throw new IllegalStateException("eventHandle");
-
- waitWhileBusy();
- busy = true;
- }
- try
- {
- int numPaddingFrames;
-
- try
- {
- numPaddingFrames
- = IAudioClient_GetCurrentPadding(iAudioClient);
- }
- catch (HResultException hre)
- {
- numPaddingFrames = numBufferFrames;
- logger.error("IAudioClient_GetCurrentPadding", hre);
- }
-
- int numFramesRequested = numBufferFrames - numPaddingFrames;
-
- /*
- * If there is no available space in the rendering endpoint
- * buffer, wait for the system to signal when an audio
- * buffer is ready to be processed by the client.
- */
- if (numFramesRequested > 0)
- {
- byte[] buf;
- int bufChannels;
- int bufFrameSize;
- int bufLength;
- int bufSampleSize;
-
- if (resampler == null)
- {
- buf = srcBuffer;
- bufChannels = srcChannels;
- bufFrameSize = srcFrameSize;
- bufLength = srcBufferLength;
- bufSampleSize = srcSampleSize;
- }
- else
- {
- /*
- * The units of srcBuffer are based on srcFormat,
- * the units of numFramesRequested are based on
- * dstFormat.
- */
- maybeResample(numFramesRequested);
-
- buf = resamplerData;
- bufChannels = resamplerChannels;
- bufFrameSize = resamplerFrameSize;
- bufLength = resamplerOutBuffer.getLength();
- bufSampleSize = resamplerSampleSize;
- }
-
- /*
- * Write as much from buf as possible while minimizing
- * the risk of audio glitches and the amount of
- * artificial/induced silence.
- */
- int bufFrames = bufLength / bufFrameSize;
-
- if ((numFramesRequested > bufFrames)
- && (bufFrames >= devicePeriodInFrames))
- numFramesRequested = bufFrames;
-
- // Pad with silence in order to avoid underflows.
- int toWrite = numFramesRequested * bufFrameSize;
-
- if (toWrite > buf.length)
- toWrite = buf.length;
-
- int silence = toWrite - bufLength;
-
- if (silence > 0)
- {
- Arrays.fill(buf, bufLength, toWrite, (byte) 0);
- bufLength = toWrite;
- }
-
- /*
- * Take into account the user's preferences with respect
- * to the output volume.
- */
- GainControl gainControl = getGainControl();
-
- if ((gainControl != null) && (toWrite != 0))
- {
- BasicVolumeControl.applyGain(
- gainControl,
- buf, 0, toWrite);
- }
-
- int written
- = maybeIAudioRenderClientWrite(
- buf, 0, toWrite,
- bufSampleSize, bufChannels);
-
- if (written != 0)
- {
- bufLength = pop(buf, bufLength, written);
- if (buf == srcBuffer)
- srcBufferLength = bufLength;
- else
- resamplerOutBuffer.setLength(bufLength);
-
- if (writeIsMalfunctioningSince
- != DiagnosticsControl.NEVER)
- setWriteIsMalfunctioning(false);
- }
- }
- }
- finally
- {
- synchronized (this)
- {
- busy = false;
- notifyAll();
- }
- }
-
- int wfso;
-
- try
- {
- wfso = WaitForSingleObject(eventHandle, devicePeriod);
- }
- catch (HResultException hre)
- {
- /*
- * WaitForSingleObject will throw HResultException only in
- * the case of WAIT_FAILED. Event if it didn't, it would
- * still be a failure from our point of view.
- */
- wfso = WAIT_FAILED;
- logger.error("WaitForSingleObject", hre);
- }
- /*
- * If the function WaitForSingleObject fails once, it will very
- * likely fail forever. Bail out of a possible busy wait.
- */
- if ((wfso == WAIT_FAILED) || (wfso == WAIT_ABANDONED))
- break;
- }
- while (true);
- }
- finally
- {
- synchronized (this)
- {
- if (eventHandleCmd.equals(this.eventHandleCmd))
- {
- this.eventHandleCmd = null;
- notifyAll();
- }
- }
- }
- }
-
- /**
- * {@inheritDoc}
- *
- * Disallows mid-stream changes of the inputFormat of this
- * AbstractRenderer.
- */
- @Override
- public synchronized Format setInputFormat(Format format)
- {
- /*
- * WASAPIRenderer does not support mid-stream changes of the
- * inputFormat.
- */
- if ((iAudioClient != 0) || (iAudioRenderClient != 0))
- return null;
- else
- return super.setInputFormat(format);
- }
-
- /**
- * Indicates whether the writing to the render endpoint buffer is
- * malfunctioning. Keeps track of the time at which the malfunction has
- * started.
- *
- * @param writeIsMalfunctioning true if the writing to the render
- * endpoint buffer is (believed to be) malfunctioning; otherwise,
- * false
- */
- private void setWriteIsMalfunctioning(boolean writeIsMalfunctioning)
- {
- if (writeIsMalfunctioning)
- {
- if (writeIsMalfunctioningSince == DiagnosticsControl.NEVER)
- writeIsMalfunctioningSince = System.currentTimeMillis();
- }
- else
- writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
- }
-
- /**
- * {@inheritDoc}
- */
- public synchronized void start()
- {
- if (iAudioClient == 0)
- {
- /*
- * We actually want to allow the user to switch the playback and/or
- * notify device to none mid-stream in order to disable the
- * playback.
- */
- if (locatorIsNull)
- started = true;
- }
- else
- {
- waitWhileBusy();
- waitWhileEventHandleCmd();
-
- /*
- * Introduce latency in order to decrease the likelihood of
- * underflow.
- */
- if (srcBuffer != null)
- {
- if (srcBufferLength > 0)
- {
- /*
- * Shift the valid audio data to the end of srcBuffer so
- * that silence can be written at the beginning.
- */
- for (int i = srcBuffer.length - 1, j = srcBufferLength - 1;
- j >= 0;
- i--, j--)
- {
- srcBuffer[i] = srcBuffer[j];
- }
- }
- else if (srcBufferLength < 0)
- srcBufferLength = 0;
-
- /*
- * If there is valid audio data in srcBuffer, it has been
- * shifted to the end to make room for silence at the beginning.
- */
- int silence = srcBuffer.length - srcBufferLength;
-
- if (silence > 0)
- Arrays.fill(srcBuffer, 0, silence, (byte) 0);
- srcBufferLength = srcBuffer.length;
- }
-
- try
- {
- IAudioClient_Start(iAudioClient);
- started = true;
-
- if ((eventHandle != 0) && (this.eventHandleCmd == null))
- {
- Runnable eventHandleCmd
- = new Runnable()
- {
- public void run()
- {
- runInEventHandleCmd(this);
- }
- };
- boolean submitted = false;
-
- try
- {
- if (eventHandleExecutor == null)
- {
- eventHandleExecutor
- = Executors.newSingleThreadExecutor();
- }
-
- this.eventHandleCmd = eventHandleCmd;
- eventHandleExecutor.execute(eventHandleCmd);
- submitted = true;
- }
- finally
- {
- if (!submitted
- && eventHandleCmd.equals(this.eventHandleCmd))
- this.eventHandleCmd = null;
- }
- }
- }
- catch (HResultException hre)
- {
- /*
- * If IAudioClient_Start is invoked multiple times without
- * intervening IAudioClient_Stop, it will likely return/throw
- * AUDCLNT_E_NOT_STOPPED.
- */
- if (hre.getHResult() != AUDCLNT_E_NOT_STOPPED)
- logger.error("IAudioClient_Start", hre);
- }
- }
- }
-
- /**
- * {@inheritDoc}
- */
- public synchronized void stop()
- {
- if (iAudioClient == 0)
- {
- /*
- * We actually want to allow the user to switch the playback and/or
- * notify device to none mid-stream in order to disable the
- * playback.
- */
- if (locatorIsNull)
- started = false;
- }
- else
- {
- waitWhileBusy();
-
- try
- {
- /*
- * If IAudioClient_Stop is invoked multiple times without
- * intervening IAudioClient_Start, it is documented to return
- * S_FALSE.
- */
- IAudioClient_Stop(iAudioClient);
- started = false;
-
- waitWhileEventHandleCmd();
-
- writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
- }
- catch (HResultException hre)
- {
- logger.error("IAudioClient_Stop", hre);
- }
- }
- }
-
- /**
- * Gets a human-readable representation of a specific MediaLocator
- * for the purposes of testing/debugging.
- *
- * @param locator the MediaLocator that is to be represented in a
- * human-readable form for the purposes of testing/debugging
- * @return a human-readable representation of the specified locator
- * for the purposes of testing/debugging
- */
- private String toString(MediaLocator locator)
- {
- String s;
-
- if (locator == null)
- s = "null";
- else
- {
- s = null;
- /*
- * Try to not throw any exceptions because the purpose is to produce
- * at least some identification of the specified MediaLocator even
- * if not the most complete.
- */
- try
- {
- String id = locator.getRemainder();
-
- if (id != null)
- {
- CaptureDeviceInfo2 cdi2
- = audioSystem.getDevice(dataFlow, locator);
-
- if (cdi2 != null)
- {
- String name = cdi2.getName();
-
- if ((name != null) && !id.equals(name))
- s = id + " with friendly name " + name;
- }
- if (s == null)
- s = id;
- }
- }
- catch (Throwable t)
- {
- if (t instanceof InterruptedException)
- Thread.currentThread().interrupt();
- else if (t instanceof ThreadDeath)
- throw (ThreadDeath) t;
- }
- if (s == null)
- s = locator.toString();
- }
- return s;
- }
-
- /**
- * Waits on this instance while the value of {@link #busy} is equal to
- * true.
- */
- private synchronized void waitWhileBusy()
- {
- boolean interrupted = false;
-
- while (busy)
- {
- try
- {
- wait(devicePeriod);
- }
- catch (InterruptedException ie)
- {
- interrupted = true;
- }
- }
- if (interrupted)
- Thread.currentThread().interrupt();
- }
-
- /**
- * Waits on this instance while the value of {@link #eventHandleCmd} is
- * non-null.
- */
- private synchronized void waitWhileEventHandleCmd()
- {
- if (eventHandle == 0)
- throw new IllegalStateException("eventHandle");
-
- boolean interrupted = false;
-
- while (eventHandleCmd != null)
- {
- try
- {
- wait(devicePeriod);
- }
- catch (InterruptedException ie)
- {
- interrupted = true;
- }
- }
- if (interrupted)
- Thread.currentThread().interrupt();
- }
-}
+package org.jitsi.impl.neomedia.jmfext.media.renderer.audio;
+
+import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*;
+
+import java.beans.*;
+import java.lang.reflect.*;
+import java.util.*;
+import java.util.concurrent.*;
+
+import javax.media.*;
+import javax.media.format.*;
+
+import org.jitsi.impl.neomedia.*;
+import org.jitsi.impl.neomedia.control.*;
+import org.jitsi.impl.neomedia.device.*;
+import org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.*;
+import org.jitsi.service.neomedia.*;
+import org.jitsi.service.neomedia.codec.*;
+import org.jitsi.util.*;
+
+/**
+ * Implements an audio Renderer using Windows Audio Session API
+ * (WASAPI) and related Core Audio APIs such as Multimedia Device (MMDevice)
+ * API.
+ *
+ * @author Lyubomir Marinov
+ */
+public class WASAPIRenderer
+ extends AbstractAudioRenderer
+{
+ /**
+ * The Logger used by the WASAPIRenderer class and its
+ * instances to log debug information.
+ */
+ private static final Logger logger = Logger.getLogger(WASAPIRenderer.class);
+
+ /**
+ * The human-readable name of the WASAPIRenderer PlugIn
+ * implementation instances.
+ */
+ private static final String PLUGIN_NAME
+ = "Windows Audio Session API (WASAPI) Renderer";
+
+ /**
+ * Finds the first non-null element in a specific array of
+ * AudioFormats.
+ *
+ * @param formats the array of AudioFormats in which the first
+ * non-null element is to be found
+ * @return the first non-null element in formats if any;
+ * otherwise, null
+ */
+ private static AudioFormat findFirst(AudioFormat[] formats)
+ {
+ AudioFormat format = null;
+
+ for (AudioFormat aFormat : formats)
+ {
+ if (aFormat != null)
+ {
+ format = aFormat;
+ break;
+ }
+ }
+ return format;
+ }
+
+ /**
+ * Attempts to initialize and open a new Codec to resample media
+ * data from a specific input AudioFormat into a specific output
+ * AudioFormat. If no suitable resampler is found, returns
+ * null. If a suitable resampler is found but its initialization or
+ * opening fails, logs and swallows any Throwable and returns
+ * null.
+ *
+ * @param inFormat the AudioFormat in which the new instance is to
+ * input media data
+ * @param outFormat the AudioFormat in which the new instance is to
+ * output media data
+ * @return a new Codec which is able to resample media data from
+ * the specified inFormat into the specified outFormat if
+ * such a resampler could be found, initialized and opened; otherwise,
+ * null
+ */
+ public static Codec maybeOpenResampler(
+ AudioFormat inFormat,
+ AudioFormat outFormat)
+ {
+ @SuppressWarnings("unchecked")
+ List classNames
+ = PlugInManager.getPlugInList(
+ inFormat,
+ outFormat,
+ PlugInManager.CODEC);
+ Codec resampler = null;
+
+ if (classNames != null)
+ {
+ for (String className : classNames)
+ {
+ try
+ {
+ Codec codec
+ = (Codec) Class.forName(className).newInstance();
+ Format setInput = codec.setInputFormat(inFormat);
+
+ if ((setInput != null) && inFormat.matches(setInput))
+ {
+ Format setOutput = codec.setOutputFormat(outFormat);
+
+ if ((setOutput != null) && outFormat.matches(setOutput))
+ {
+ codec.open();
+ resampler = codec;
+ break;
+ }
+ }
+ }
+ catch (Throwable t)
+ {
+ if (t instanceof ThreadDeath)
+ throw (ThreadDeath) t;
+ else
+ {
+ logger.warn(
+ "Failed to open resampler " + className,
+ t);
+ }
+ }
+ }
+ }
+ return resampler;
+ }
+
+ /**
+ * Pops a specific number of bytes from (the head of) a specific array of
+ * bytes.
+ *
+ * @param array the array of byte from which the specified number
+ * of bytes are to be popped
+ * @param arrayLength the number of elements in array which contain
+ * valid data
+ * @param length the number of bytes to be popped from array
+ * @return the number of elements in array which contain valid data
+ * after the specified number of bytes have been popped from it
+ */
+ public static int pop(byte[] array, int arrayLength, int length)
+ {
+ if (length < 0)
+ throw new IllegalArgumentException("length");
+ if (length == 0)
+ return arrayLength;
+
+ int newArrayLength = arrayLength - length;
+
+ if (newArrayLength > 0)
+ {
+ for (int i = 0, j = length; i < newArrayLength; i++, j++)
+ array[i] = array[j];
+ }
+ else
+ newArrayLength = 0;
+ return newArrayLength;
+ }
+
+ /**
+ * The duration in milliseconds of the endpoint buffer.
+ */
+ private long bufferDuration;
+
+ /**
+ * The indicator which determines whether the audio stream represented by
+ * this instance, {@link #iAudioClient} and {@link #iAudioRenderClient} is
+ * busy and, consequently, its state should not be modified. For example,
+ * the audio stream is busy during the execution of
+ * {@link #process(Buffer)}.
+ */
+ private boolean busy;
+
+ /**
+ * The length in milliseconds of the interval between successive, periodic
+ * processing passes by the audio engine on the data in the endpoint buffer.
+ */
+ private long devicePeriod = WASAPISystem.DEFAULT_DEVICE_PERIOD;
+
+ /**
+ * The value of {@link #devicePeriod} expressed in terms of numbers of
+ * frames (i.e. takes the sample rate into account).
+ */
+ private int devicePeriodInFrames;
+
+ /**
+ * The number of channels with which {@link #iAudioClient} has been
+ * initialized.
+ */
+ private int dstChannels;
+
+ /**
+ * The AudioFormat with which {@link #iAudioClient} has been
+ * initialized.
+ */
+ private AudioFormat dstFormat;
+
+ /**
+ * The sample size in bytes with which {@link #iAudioClient} has been
+ * initialized.
+ */
+ private int dstSampleSize;
+
+ /**
+ * The event handle that the system signals when an audio buffer is ready to
+ * be processed by the client.
+ */
+ private long eventHandle;
+
+ /**
+ * The Runnable which is scheduled by this WASAPIRenderer
+ * and executed by {@link #eventHandleExecutor} and waits for
+ * {@link #eventHandle} to be signaled.
+ */
+ private Runnable eventHandleCmd;
+
+ /**
+ * The Executor implementation which is to execute
+ * {@link #eventHandleCmd}.
+ */
+ private Executor eventHandleExecutor;
+
+ /**
+ * The WASAPI IAudioClient instance which enables this
+ * Renderer to create and initialize an audio stream between this
+ * Renderer and the audio engine of the associated audio endpoint
+ * device.
+ */
+ private long iAudioClient;
+
+ /**
+ * The WASAPI IAudioRenderClient obtained from
+ * {@link #iAudioClient} which enables this Renderer to write
+ * output data to the rendering endpoint buffer.
+ */
+ private long iAudioRenderClient;
+
+ /**
+ * The indicator which determines whether the value of the locator
+ * property of this instance was equal to null when this Renderer
+ * was opened. Indicates that this Renderer should successfully
+ * process media data without actually rendering to any render endpoint
+ * device.
+ */
+ private boolean locatorIsNull;
+
+ /**
+ * The maximum capacity in frames of the endpoint buffer.
+ */
+ private int numBufferFrames;
+
+ /**
+ * The Codec which resamples the media provided to this
+ * Renderer via {@link #process(Buffer)} into {@link #dstFormat}
+ * if necessary.
+ */
+ private Codec resampler;
+
+ /**
+ * The number of channels of the audio signal output by {@link #resampler}.
+ * It may differ from {@link #dstChannels}.
+ */
+ private int resamplerChannels;
+
+ /**
+ * The data which has remained unwritten during earlier invocations of
+ * {@link #runInEventHandleCmd(Runnable)} because it represents frames which
+ * are few enough to be accepted on their own for writing by
+ * {@link #iAudioRenderClient}.
+ */
+ private byte[] resamplerData;
+
+ /**
+ * The size in bytes of an audio frame produced by {@link #resampler}. Based
+ * on {@link #resamplerChannels} and {@link #resamplerSampleSize} and cached
+ * in order to reduce calculations.
+ */
+ private int resamplerFrameSize;
+
+ /**
+ * The Buffer which provides the input to {@link #resampler}.
+ * Represents a unit of {@link #srcBuffer} to be processed in a single call
+ * to resampler.
+ */
+ private Buffer resamplerInBuffer;
+
+ /**
+ * The Buffer which receives the output of {@link #resampler}.
+ */
+ private Buffer resamplerOutBuffer;
+
+ /**
+ * The size in bytes of an audio sample produced by {@link #resampler}.
+ */
+ private int resamplerSampleSize;
+
+ /**
+ * The data which has remained unwritten during earlier invocations of
+ * {@link #process(Buffer)} because it represents frames which are few
+ * enough to be accepted on their own for writing by
+ * {@link #iAudioRenderClient}.
+ */
+ private byte[] srcBuffer;
+
+ /**
+ * The number of bytes in {@link #srcBuffer} which represent valid audio
+ * data to be written by {@link #iAudioRenderClient}.
+ */
+ private int srcBufferLength;
+
+ /**
+ * The number of channels which which this Renderer has been
+ * opened.
+ */
+ private int srcChannels;
+
+ /**
+ * The AudioFormat with which this Renderer has been
+ * opened.
+ */
+ private AudioFormat srcFormat;
+
+ /**
+ * The frame size in bytes with which this Renderer has been
+ * opened. It is the product of {@link #srcSampleSize} and
+ * {@link #srcChannels}.
+ */
+ private int srcFrameSize;
+
+ /**
+ * The sample size in bytes with which this Renderer has been
+ * opened.
+ */
+ private int srcSampleSize;
+
+ /**
+ * The indicator which determines whether this Renderer is started
+ * i.e. there has been a successful invocation of {@link #start()} without
+ * an intervening invocation of {@link #stop()}.
+ */
+ private boolean started;
+
+ /**
+ * The time in milliseconds at which the writing to the render endpoint
+ * buffer has started malfunctioning. For example, {@link #srcBuffer} being
+ * full from the point of view of {@link #process(Buffer)} for an extended
+ * period of time may indicate abnormal functioning.
+ */
+ private long writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
+
+ /**
+ * The maximum interval of time in milliseconds that the writing to the
+ * render endpoint buffer is allowed to be under suspicion that it is
+ * malfunctioning. If it remains under suspicion after the maximum interval
+ * of time has elapsed, the writing to the render endpoint buffer is to be
+ * considered malfunctioning for real.
+ */
+ private long writeIsMalfunctioningTimeout;
+
+ /**
+ * Initializes a new WASAPIRenderer instance which is to perform
+ * playback (as opposed to sound a notification).
+ */
+ public WASAPIRenderer()
+ {
+ this(AudioSystem.DataFlow.PLAYBACK);
+ }
+
+ /**
+ * Initializes a new WASAPIRenderer instance which is to either
+ * perform playback or sound a notification.
+ *
+ * @param dataFlow {@link AudioSystem.DataFlow#PLAYBACK} if the new instance
+ * is to perform playback or {@link AudioSystem.DataFlow#NOTIFY} if the new
+ * instance is to sound a notification
+ */
+ public WASAPIRenderer(AudioSystem.DataFlow dataFlow)
+ {
+ super(AudioSystem.LOCATOR_PROTOCOL_WASAPI, dataFlow);
+ }
+
+ /**
+ * Initializes a new WASAPIRenderer instance which is to either
+ * perform playback or sound a notification.
+ *
+ * @param playback true if the new instance is to perform playback
+ * or false if the new instance is to sound a notification
+ */
+ public WASAPIRenderer(boolean playback)
+ {
+ this(
+ playback
+ ? AudioSystem.DataFlow.PLAYBACK
+ : AudioSystem.DataFlow.NOTIFY);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public synchronized void close()
+ {
+ try
+ {
+ stop();
+ }
+ finally
+ {
+ if (iAudioRenderClient != 0)
+ {
+ IAudioRenderClient_Release(iAudioRenderClient);
+ iAudioRenderClient = 0;
+ }
+ if (iAudioClient != 0)
+ {
+ IAudioClient_Release(iAudioClient);
+ iAudioClient = 0;
+ }
+ if (eventHandle != 0)
+ {
+ try
+ {
+ CloseHandle(eventHandle);
+ }
+ catch (HResultException hre)
+ {
+ // The event HANDLE will be leaked.
+ logger.warn("Failed to close event HANDLE.", hre);
+ }
+ eventHandle = 0;
+ }
+ maybeCloseResampler();
+
+ dstFormat = null;
+ locatorIsNull = false;
+ srcBuffer = null;
+ srcBufferLength = 0;
+ srcFormat = null;
+ started = false;
+
+ super.close();
+ }
+ }
+
+ /**
+ * Gets an array of alternative AudioFormats based on
+ * inputFormat with which an attempt is to be made to initialize a
+ * new IAudioClient instance.
+ *
+ * @return an array of alternative AudioFormats based on
+ * inputFormat with which an attempt is to be made to initialize a
+ * new IAudioClient instance
+ */
+ private AudioFormat[] getFormatsToInitializeIAudioClient()
+ {
+ AudioFormat inputFormat = this.inputFormat;
+
+ if (inputFormat == null)
+ throw new NullPointerException("No inputFormat set.");
+ else
+ {
+ /*
+ * Prefer to initialize the IAudioClient with an AudioFormat which
+ * matches the inputFormat as closely as possible.
+ */
+ AudioFormat[] preferredFormats
+ = WASAPISystem.getFormatsToInitializeIAudioClient(inputFormat);
+ // Otherwise, any supported Format will do.
+ Format[] supportedFormats = getSupportedInputFormats();
+ List formats
+ = new ArrayList(
+ preferredFormats.length + supportedFormats.length);
+
+ for (AudioFormat format : preferredFormats)
+ {
+ if (!formats.contains(format))
+ formats.add(format);
+ }
+ for (Format format : supportedFormats)
+ {
+ if (!formats.contains(format)
+ && (format instanceof AudioFormat))
+ {
+ formats.add((AudioFormat) format);
+ }
+ }
+
+ /*
+ * Resampling isn't very cool. Moreover, resampling between sample
+ * rates with a non-integer quotient may result in audio glitches.
+ * Try to minimize the risks of having to use any of these two when
+ * unnecessary.
+ */
+ final int sampleRate = (int) inputFormat.getSampleRate();
+
+ if (sampleRate != Format.NOT_SPECIFIED)
+ {
+ Collections.sort(
+ formats,
+ new Comparator()
+ {
+ @Override
+ public int compare(AudioFormat af1, AudioFormat af2)
+ {
+ int d1 = computeSampleRateDistance(af1);
+ int d2 = computeSampleRateDistance(af2);
+
+ return (d1 < d2) ? -1 : (d1 == d2) ? 0 : 1;
+ }
+
+ private int computeSampleRateDistance(
+ AudioFormat af)
+ {
+ int sr = (int) af.getSampleRate();
+
+ if (sr == Format.NOT_SPECIFIED)
+ return Integer.MAX_VALUE;
+ else if (sr == sampleRate)
+ return 0;
+
+ int min, max;
+ boolean downsample;
+
+ if (sr < sampleRate)
+ {
+ min = sr;
+ max = sampleRate;
+ downsample = true;
+ }
+ else
+ {
+ min = sampleRate;
+ max = sr;
+ downsample = false;
+ }
+ if (min == 0)
+ return Integer.MAX_VALUE;
+ else
+ {
+ int h = max % min;
+ int l = max / min;
+
+ /*
+ * Prefer AudioFormats which will cause
+ * upsampling to AudioFormats which will
+ * cause downsampling.
+ */
+ if (downsample)
+ {
+ l = Short.MAX_VALUE - l;
+ if (h != 0)
+ h = Short.MAX_VALUE - h;
+ }
+
+ return (h << 16) | l;
+ }
+ }
+ });
+ }
+
+ return formats.toArray(new AudioFormat[formats.size()]);
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public String getName()
+ {
+ return PLUGIN_NAME;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Overrides the super implementation to handle the case in which the user
+ * has selected "none" for the playback/notify device.
+ */
+ @Override
+ public Format[] getSupportedInputFormats()
+ {
+ if (getLocator() == null)
+ {
+ /*
+ * XXX We toyed with the idea of calculating a list of common
+ * Formats supported by all devices (of the dataFlow of this
+ * AbstractAudioRenderer, of course) but that turned out to be
+ * monstrous in code, inefficient at least in terms of garbage
+ * collection and with questionable suitability. The following
+ * approach will likely have a comparable suitability with better
+ * efficiency achieved code that is easier to understand.
+ */
+
+ /*
+ * The maximums supported by the WASAPI integration at the time of
+ * this writing.
+ */
+ double sampleRate = MediaUtils.MAX_AUDIO_SAMPLE_RATE;
+ int sampleSizeInBits = 16;
+ int channels = 2;
+
+ if ((sampleRate == Format.NOT_SPECIFIED)
+ && (Constants.AUDIO_SAMPLE_RATES.length != 0))
+ sampleRate = Constants.AUDIO_SAMPLE_RATES[0];
+ return
+ WASAPISystem.getFormatsToInitializeIAudioClient(
+ new AudioFormat(
+ AudioFormat.LINEAR,
+ sampleRate,
+ sampleSizeInBits,
+ channels,
+ AudioFormat.LITTLE_ENDIAN,
+ AudioFormat.SIGNED,
+ /* frameSizeInBits */ Format.NOT_SPECIFIED,
+ /* frameRate */ Format.NOT_SPECIFIED,
+ Format.byteArray));
+ }
+ else
+ return super.getSupportedInputFormats();
+ }
+
+ /**
+ * Closes {@link #resampler} if it is non-null.
+ */
+ private void maybeCloseResampler()
+ {
+ Codec resampler = this.resampler;
+
+ if (resampler != null)
+ {
+ this.resampler = null;
+ resamplerData = null;
+ resamplerInBuffer = null;
+ resamplerOutBuffer = null;
+
+ try
+ {
+ resampler.close();
+ }
+ catch (Throwable t)
+ {
+ if (t instanceof InterruptedException)
+ Thread.currentThread().interrupt();
+ else if (t instanceof ThreadDeath)
+ throw (ThreadDeath) t;
+ else
+ logger.error("Failed to close resampler.", t);
+ }
+ }
+ }
+
+ /**
+ * Invokes WASAPI.IAudioRenderClient_Write on
+ * {@link #iAudioRenderClient} and logs and swallows any
+ * HResultException.
+ *
+ * @param data the bytes of the audio samples to be written into the render
+ * endpoint buffer
+ * @param offset the offset in data at which the bytes of the audio
+ * samples to be written into the render endpoint buffer begin
+ * @param length the number of the bytes in data beginning at
+ * offset of the audio samples to be written into the render
+ * endpoint buffer
+ * @param srcSampleSize the size in bytes of an audio sample in
+ * data
+ * @param srcChannels the number of channels of the audio signal provided in
+ * data
+ * @return the number of bytes from data (starting at
+ * offset) which have been written into the render endpoint buffer
+ * or 0 upon HResultException
+ */
+ private int maybeIAudioRenderClientWrite(
+ byte[] data, int offset, int length,
+ int srcSampleSize, int srcChannels)
+ {
+ int written;
+
+ try
+ {
+ written
+ = IAudioRenderClient_Write(
+ iAudioRenderClient,
+ data, offset, length,
+ srcSampleSize, srcChannels,
+ dstSampleSize, dstChannels);
+ }
+ catch (HResultException hre)
+ {
+ written = 0;
+ logger.error("IAudioRenderClient_Write", hre);
+ }
+ return written;
+ }
+
+ /**
+ * Initializes and opens a new instance of {@link #resampler} if the
+ * Format-related state of this instance deems its existence
+ * necessary.
+ */
+ private void maybeOpenResampler()
+ {
+ AudioFormat inFormat = this.inputFormat;
+ AudioFormat outFormat = this.dstFormat;
+
+ // We are able to translate between mono and stereo.
+ if ((inFormat.getSampleRate() == outFormat.getSampleRate())
+ && (inFormat.getSampleSizeInBits()
+ == outFormat.getSampleSizeInBits()))
+ return;
+
+ // The resamplers are not expected to convert between mono and stereo.
+ int channels = inFormat.getChannels();
+
+ if (outFormat.getChannels() != channels)
+ {
+ outFormat
+ = new AudioFormat(
+ outFormat.getEncoding(),
+ outFormat.getSampleRate(),
+ outFormat.getSampleSizeInBits(),
+ channels,
+ outFormat.getEndian(),
+ outFormat.getSigned(),
+ /* frameSizeInBits */ Format.NOT_SPECIFIED,
+ /* frameRate */ Format.NOT_SPECIFIED,
+ outFormat.getDataType());
+ }
+
+ Codec resampler = maybeOpenResampler(inFormat, outFormat);
+
+ if (resampler == null)
+ {
+ throw new IllegalStateException(
+ "Failed to open a codec to resample [" + inFormat
+ + "] into [" + outFormat + "].");
+ }
+ else
+ {
+ this.resampler = resampler;
+
+ resamplerInBuffer = new Buffer();
+ resamplerInBuffer.setFormat(inFormat);
+
+ resamplerChannels = outFormat.getChannels();
+ resamplerSampleSize = WASAPISystem.getSampleSizeInBytes(outFormat);
+ resamplerFrameSize = resamplerChannels * resamplerSampleSize;
+
+ resamplerData = new byte[numBufferFrames * resamplerFrameSize];
+
+ resamplerOutBuffer = new Buffer();
+ resamplerOutBuffer.setData(resamplerData);
+ resamplerOutBuffer.setLength(0);
+ resamplerOutBuffer.setOffset(0);
+ }
+ }
+
+ /**
+ * Processes audio samples from {@link #srcBuffer} through
+ * {@link #resampler} i.e. resamples them in order to produce media data
+ * in {@link #resamplerData} to be written into the render endpoint buffer.
+ *
+ * @param numFramesRequested the number of audio frames in the units of
+ * {@link #dstFormat} requested by the rendering endpoint
+ */
+ private void maybeResample(int numFramesRequested)
+ {
+ int outLength = resamplerOutBuffer.getLength();
+
+ /*
+ * Do not resample if there is enough resampled audio to satisfy the
+ * request of the rendering endpoint buffer.
+ */
+ if (outLength < numFramesRequested * resamplerFrameSize)
+ {
+ // Sample rate conversions work on audio frames, not on bytes.
+ int outFrames
+ = (resamplerData.length - outLength) / resamplerFrameSize;
+
+ if (outFrames > 0)
+ {
+ /*
+ * Predict how many bytes will be consumed from the input during
+ * the sample rate conversion.
+ */
+ int srcSampleRate = (int) srcFormat.getSampleRate();
+ int dstSampleRate = (int) dstFormat.getSampleRate();
+ int inLength
+ = (outFrames * srcSampleRate / dstSampleRate)
+ * srcFrameSize;
+
+ if (inLength > srcBuffer.length)
+ inLength = srcBuffer.length;
+ if (inLength > srcBufferLength)
+ inLength = srcBufferLength;
+ if (inLength > 0)
+ {
+ int resampled;
+
+ resamplerOutBuffer.setLength(0);
+ resamplerOutBuffer.setOffset(outLength);
+ try
+ {
+ resamplerOutBuffer.setDiscard(false);
+ resamplerInBuffer.setLength(inLength);
+ resamplerInBuffer.setOffset(0);
+
+ resampler.process(
+ resamplerInBuffer,
+ resamplerOutBuffer);
+ }
+ finally
+ {
+ resampled = resamplerOutBuffer.getLength();
+ outLength = resamplerOutBuffer.getOffset() + resampled;
+ resamplerOutBuffer.setLength(outLength);
+ resamplerOutBuffer.setOffset(0);
+ }
+
+ if (resampled > 0)
+ {
+ /*
+ * How many bytes have actually been consumed from the
+ * input during the sample rate conversion?
+ */
+ resampled
+ = ((resampled / resamplerFrameSize)
+ * srcSampleRate
+ / dstSampleRate)
+ * srcFrameSize;
+ if (resampled > 0)
+ popFromSrcBuffer(resampled);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public synchronized void open()
+ throws ResourceUnavailableException
+ {
+ if (this.iAudioClient != 0)
+ return;
+
+ MediaLocator locator = null;
+
+ try
+ {
+ locator = getLocator();
+ if (locatorIsNull = (locator == null))
+ {
+ /*
+ * We actually want to allow the user to switch the playback
+ * and/or notify device to none mid-stream in order to disable
+ * the playback.
+ */
+ }
+ else
+ {
+
+ /*
+ * The method getFormatsToInitializeIAudioClient will assert that
+ * inputFormat is set.
+ */
+ AudioFormat[] formats = getFormatsToInitializeIAudioClient();
+ long eventHandle = CreateEvent(0, false, false, null);
+
+ try
+ {
+ long iAudioClient
+ = audioSystem.initializeIAudioClient(
+ locator,
+ dataFlow,
+ /* streamFlags */ 0,
+ eventHandle,
+ WASAPISystem.DEFAULT_BUFFER_DURATION,
+ formats);
+
+ if (iAudioClient == 0)
+ {
+ throw new ResourceUnavailableException(
+ "Failed to initialize IAudioClient"
+ + " for MediaLocator " + locator
+ + " and AudioSystem.DataFlow " + dataFlow);
+ }
+ try
+ {
+ long iAudioRenderClient
+ = IAudioClient_GetService(
+ iAudioClient,
+ IID_IAudioRenderClient);
+
+ if (iAudioRenderClient == 0)
+ {
+ throw new ResourceUnavailableException(
+ "IAudioClient_GetService"
+ + "(IID_IAudioRenderClient)");
+ }
+ try
+ {
+ srcFormat = this.inputFormat;
+ dstFormat = findFirst(formats);
+
+ /*
+ * The value hnsDefaultDevicePeriod is documented to
+ * specify the default scheduling period for a
+ * shared-mode stream.
+ */
+ devicePeriod
+ = IAudioClient_GetDefaultDevicePeriod(iAudioClient)
+ / 10000L;
+ numBufferFrames
+ = IAudioClient_GetBufferSize(iAudioClient);
+
+ int dstSampleRate = (int) dstFormat.getSampleRate();
+
+ bufferDuration
+ = numBufferFrames * 1000L / dstSampleRate;
+ /*
+ * We will very likely be inefficient if we fail to
+ * synchronize with the scheduling period of the audio
+ * engine but we have to make do with what we have.
+ */
+ if (devicePeriod <= 1)
+ {
+ devicePeriod = bufferDuration / 2;
+ if ((devicePeriod
+ > WASAPISystem.DEFAULT_DEVICE_PERIOD)
+ || (devicePeriod <= 1))
+ devicePeriod
+ = WASAPISystem.DEFAULT_DEVICE_PERIOD;
+ }
+ devicePeriodInFrames
+ = (int) (devicePeriod * dstSampleRate / 1000L);
+
+ dstChannels = dstFormat.getChannels();
+ dstSampleSize
+ = WASAPISystem.getSampleSizeInBytes(dstFormat);
+
+ maybeOpenResampler();
+
+ srcChannels = srcFormat.getChannels();
+ srcSampleSize
+ = WASAPISystem.getSampleSizeInBytes(srcFormat);
+ srcFrameSize = srcSampleSize * srcChannels;
+
+ /*
+ * The remainder/residue in frames of
+ * IAudioRenderClient_Write cannot be more than the
+ * maximum capacity of the endpoint buffer.
+ */
+ int srcBufferCapacityInFrames;
+
+ if (resampler == null)
+ {
+ srcBufferCapacityInFrames = numBufferFrames;
+ }
+ else
+ {
+ /*
+ * The units of srcBuffer are based on srcFormat,
+ * the units of numBufferFrames are based on
+ * dstFormat.
+ */
+ int srcSampleRate = (int) srcFormat.getSampleRate();
+
+ srcBufferCapacityInFrames
+ = numBufferFrames
+ * srcSampleRate
+ / dstSampleRate;
+ }
+ srcBuffer
+ = new byte[
+ srcBufferCapacityInFrames * srcFrameSize];
+ if (resamplerInBuffer != null)
+ resamplerInBuffer.setData(srcBuffer);
+
+ /*
+ * Introduce latency in order to decrease the likelihood
+ * of underflow.
+ */
+ srcBufferLength = srcBuffer.length;
+
+ writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
+ writeIsMalfunctioningTimeout
+ = 2 * Math.max(bufferDuration, devicePeriod);
+
+ this.eventHandle = eventHandle;
+ eventHandle = 0;
+ this.iAudioClient = iAudioClient;
+ iAudioClient = 0;
+ this.iAudioRenderClient = iAudioRenderClient;
+ iAudioRenderClient = 0;
+ }
+ finally
+ {
+ if (iAudioRenderClient != 0)
+ IAudioRenderClient_Release(iAudioRenderClient);
+ }
+ }
+ finally
+ {
+ if (iAudioClient != 0)
+ {
+ IAudioClient_Release(iAudioClient);
+ maybeCloseResampler();
+ }
+ }
+ }
+ finally
+ {
+ if (eventHandle != 0)
+ CloseHandle(eventHandle);
+ }
+
+ } // The locator of this Renderer is not null.
+ }
+ catch (Throwable t)
+ {
+ if (t instanceof InterruptedException)
+ Thread.currentThread().interrupt();
+ else if (t instanceof ThreadDeath)
+ throw (ThreadDeath) t;
+ else
+ {
+ logger.error(
+ "Failed to open a WASAPIRenderer on audio endpoint"
+ + " device " + toString(locator),
+ t);
+ if (t instanceof ResourceUnavailableException)
+ throw (ResourceUnavailableException) t;
+ else
+ {
+ ResourceUnavailableException rue
+ = new ResourceUnavailableException();
+
+ rue.initCause(t);
+ throw rue;
+ }
+ }
+ }
+
+ super.open();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected synchronized void playbackDevicePropertyChange(
+ PropertyChangeEvent ev)
+ {
+ /*
+ * Stop, close, re-open and re-start this Renderer (performing whichever
+ * of these in order to bring it into the same state) in order to
+ * reflect the change in the selection with respect to the playback or
+ * notify device.
+ */
+
+ waitWhileBusy();
+
+ boolean open
+ = ((iAudioClient != 0) && (iAudioRenderClient != 0))
+ || locatorIsNull;
+
+ if (open)
+ {
+ boolean start = started;
+
+ close();
+
+ try
+ {
+ open();
+ }
+ catch (ResourceUnavailableException rue)
+ {
+ throw new UndeclaredThrowableException(rue);
+ }
+ if (start)
+ start();
+ }
+ }
+
+ /**
+ * Pops a specific number of bytes from {@link #srcBuffer}. For example,
+ * because such a number of bytes have been read from srcBuffer and
+ * written into the rendering endpoint buffer.
+ *
+ * @param length the number of bytes to pop from srcBuffer
+ */
+ private void popFromSrcBuffer(int length)
+ {
+ srcBufferLength = pop(srcBuffer, srcBufferLength, length);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public int process(Buffer buffer)
+ {
+ int length = buffer.getLength();
+
+ if (length < 1)
+ return BUFFER_PROCESSED_OK;
+
+ byte[] data = (byte[]) buffer.getData();
+ int offset = buffer.getOffset();
+
+ synchronized (this)
+ {
+ if ((iAudioClient == 0) || (iAudioRenderClient == 0))
+ {
+ /*
+ * We actually want to allow the user to switch the playback
+ * and/or notify device to none mid-stream in order to disable
+ * the playback.
+ */
+ return
+ locatorIsNull
+ ? BUFFER_PROCESSED_OK
+ : BUFFER_PROCESSED_FAILED;
+ }
+ else if (!started)
+ return BUFFER_PROCESSED_FAILED;
+ else
+ {
+ waitWhileBusy();
+ busy = true;
+ }
+ }
+
+ int ret = BUFFER_PROCESSED_OK;
+ long sleep = 0;
+
+ try
+ {
+ int numPaddingFrames;
+
+ if (eventHandle == 0)
+ {
+ try
+ {
+ numPaddingFrames
+ = IAudioClient_GetCurrentPadding(iAudioClient);
+ }
+ catch (HResultException hre)
+ {
+ numPaddingFrames = 0;
+ ret = BUFFER_PROCESSED_FAILED;
+ logger.error("IAudioClient_GetCurrentPadding", hre);
+ }
+ }
+ else
+ {
+ /*
+ * The process method will not write into the rendering endpoint
+ * buffer, the runInEventHandleCmd method will.
+ */
+ numPaddingFrames = numBufferFrames;
+ }
+ if (ret != BUFFER_PROCESSED_FAILED)
+ {
+ int numFramesRequested = numBufferFrames - numPaddingFrames;
+
+ if (numFramesRequested == 0)
+ {
+ if (eventHandle == 0)
+ {
+ /*
+ * There is NO available space in the rendering endpoint
+ * buffer into which this Renderer can write data.
+ */
+ ret |= INPUT_BUFFER_NOT_CONSUMED;
+ sleep = devicePeriod;
+ /*
+ * The writing to the render endpoint buffer may or may
+ * not be malfunctioning, it depends on the interval of
+ * time that the state remains unchanged.
+ */
+ if (writeIsMalfunctioningSince
+ == DiagnosticsControl.NEVER)
+ setWriteIsMalfunctioning(true);
+ }
+ else
+ {
+ /*
+ * The process method will write into srcBuffer, the
+ * runInEventHandleCmd will read from srcBuffer and
+ * write into the rendering endpoint buffer.
+ */
+ int toCopy = srcBuffer.length - srcBufferLength;
+
+ if (toCopy > 0)
+ {
+ if (toCopy > length)
+ toCopy = length;
+ System.arraycopy(
+ data, offset,
+ srcBuffer, srcBufferLength,
+ toCopy);
+ srcBufferLength += toCopy;
+
+ if (length > toCopy)
+ {
+ buffer.setLength(length - toCopy);
+ buffer.setOffset(offset + toCopy);
+ ret |= INPUT_BUFFER_NOT_CONSUMED;
+ }
+
+ /*
+ * Writing from the input Buffer into srcBuffer has
+ * occurred so it does not look like the writing to
+ * the render endpoint buffer is malfunctioning.
+ */
+ if (writeIsMalfunctioningSince
+ != DiagnosticsControl.NEVER)
+ setWriteIsMalfunctioning(false);
+ }
+ else
+ {
+ ret |= INPUT_BUFFER_NOT_CONSUMED;
+ sleep = devicePeriod;
+ /*
+ * No writing from the input Buffer into srcBuffer
+ * has occurred so it is possible that the writing
+ * to the render endpoint buffer is malfunctioning.
+ */
+ if (writeIsMalfunctioningSince
+ == DiagnosticsControl.NEVER)
+ setWriteIsMalfunctioning(true);
+ }
+ }
+ }
+ else
+ {
+ /*
+ * There is available space in the rendering endpoint
+ * buffer into which this Renderer can write data.
+ */
+ int effectiveLength = srcBufferLength + length;
+ int toWrite
+ = Math.min(
+ effectiveLength,
+ numFramesRequested * srcFrameSize);
+ byte[] effectiveData;
+ int effectiveOffset;
+
+ if (srcBufferLength > 0)
+ {
+ /*
+ * There is remainder/residue from earlier invocations
+ * of the method. This Renderer will feed
+ * iAudioRenderClient from srcBuffer.
+ */
+ effectiveData = srcBuffer;
+ effectiveOffset = 0;
+
+ int toCopy = toWrite - srcBufferLength;
+
+ if (toCopy <= 0)
+ ret |= INPUT_BUFFER_NOT_CONSUMED;
+ else
+ {
+ if (toCopy > length)
+ toCopy = length;
+ System.arraycopy(
+ data, offset,
+ srcBuffer, srcBufferLength,
+ toCopy);
+ srcBufferLength += toCopy;
+
+ if (toWrite > srcBufferLength)
+ toWrite = srcBufferLength;
+
+ if (length > toCopy)
+ {
+ buffer.setLength(length - toCopy);
+ buffer.setOffset(offset + toCopy);
+ ret |= INPUT_BUFFER_NOT_CONSUMED;
+ }
+ }
+ }
+ else
+ {
+ /*
+ * There is no remainder/residue from earlier
+ * invocations of the method. This Renderer will feed
+ * iAudioRenderClient from data.
+ */
+ effectiveData = data;
+ effectiveOffset = offset;
+ }
+
+ int written;
+
+ if ((toWrite / srcFrameSize) == 0)
+ written = 0;
+ else
+ {
+ /*
+ * Take into account the user's preferences with respect
+ * to the output volume.
+ */
+ GainControl gainControl = getGainControl();
+
+ if (gainControl != null)
+ {
+ BasicVolumeControl.applyGain(
+ gainControl,
+ effectiveData, effectiveOffset, toWrite);
+ }
+
+ try
+ {
+ written
+ = IAudioRenderClient_Write(
+ iAudioRenderClient,
+ effectiveData, effectiveOffset, toWrite,
+ srcSampleSize, srcChannels,
+ dstSampleSize, dstChannels);
+ }
+ catch (HResultException hre)
+ {
+ written = 0;
+ ret = BUFFER_PROCESSED_FAILED;
+ logger.error("IAudioRenderClient_Write", hre);
+ }
+ }
+ if (ret != BUFFER_PROCESSED_FAILED)
+ {
+ if (effectiveData == data)
+ {
+ // We have consumed frames from data.
+ if (written == 0)
+ {
+ /*
+ * The available number of frames appear to be
+ * too few for IAudioRenderClient to accept.
+ * They will have to be prepended to the next
+ * input Buffer.
+ */
+ System.arraycopy(
+ data, offset,
+ srcBuffer, srcBufferLength,
+ toWrite);
+ srcBufferLength += toWrite;
+ written = toWrite;
+ }
+ if (length > written)
+ {
+ buffer.setLength(length - written);
+ buffer.setOffset(offset + written);
+ ret |= INPUT_BUFFER_NOT_CONSUMED;
+ }
+ }
+ else if (written > 0)
+ {
+ // We have consumed frames from srcBuffer.
+ popFromSrcBuffer(written);
+ }
+
+ if (writeIsMalfunctioningSince
+ != DiagnosticsControl.NEVER)
+ setWriteIsMalfunctioning(false);
+ }
+ }
+
+ /*
+ * If the writing to the render endpoint buffer is
+ * malfunctioning, fail the processing of the input Buffer in
+ * order to avoid blocking of the Codec chain.
+ */
+ if (((ret & INPUT_BUFFER_NOT_CONSUMED)
+ == INPUT_BUFFER_NOT_CONSUMED)
+ && (writeIsMalfunctioningSince
+ != DiagnosticsControl.NEVER))
+ {
+ long writeIsMalfunctioningDuration
+ = System.currentTimeMillis()
+ - writeIsMalfunctioningSince;
+
+ if (writeIsMalfunctioningDuration
+ > writeIsMalfunctioningTimeout)
+ {
+ /*
+ * The writing to the render endpoint buffer has taken
+ * too long so whatever is in srcBuffer is surely
+ * out-of-date.
+ */
+ srcBufferLength = 0;
+ ret = BUFFER_PROCESSED_FAILED;
+ logger.warn(
+ "Audio endpoint device appears to be"
+ + " malfunctioning: "
+ + getLocator());
+ }
+ }
+ }
+ }
+ finally
+ {
+ synchronized (this)
+ {
+ busy = false;
+ notifyAll();
+ }
+ }
+ /*
+ * If there was no available space in the rendering endpoint buffer, we
+ * will want to wait a bit for such space to be made available.
+ */
+ if (((ret & INPUT_BUFFER_NOT_CONSUMED) == INPUT_BUFFER_NOT_CONSUMED)
+ && (sleep > 0))
+ {
+ boolean interrupted = false;
+
+ synchronized (this)
+ {
+ /*
+ * Spurious wake-ups should not be a big issue here. While this
+ * Renderer may check for available space in the rendering
+ * endpoint buffer more often than practically necessary (which
+ * may very well classify as a case of performance loss), the
+ * ability to unblock this Renderer is considered more
+ * important.
+ */
+ try
+ {
+ wait(sleep);
+ }
+ catch (InterruptedException ie)
+ {
+ interrupted = true;
+ }
+ }
+ if (interrupted)
+ Thread.currentThread().interrupt();
+ }
+ return ret;
+ }
+
+ /**
+ * Runs/executes in the thread associated with a specific Runnable
+ * initialized to wait for {@link #eventHandle} to be signaled.
+ *
+ * @param eventHandleCmd the Runnable which has been initialized to
+ * wait for eventHandle to be signaled and in whose associated
+ * thread the method is invoked
+ */
+ private void runInEventHandleCmd(Runnable eventHandleCmd)
+ {
+ try
+ {
+ useAudioThreadPriority();
+
+ do
+ {
+ long eventHandle;
+
+ synchronized (this)
+ {
+ /*
+ * Does this WASAPIRender still want eventHandleCmd to
+ * execute?
+ */
+ if (!eventHandleCmd.equals(this.eventHandleCmd))
+ break;
+ // Is this WASAPIRenderer still opened and started?
+ if ((iAudioClient == 0)
+ || (iAudioRenderClient == 0)
+ || !started)
+ break;
+
+ /*
+ * The value of eventHandle will remain valid while this
+ * WASAPIRenderer wants eventHandleCmd to execute.
+ */
+ eventHandle = this.eventHandle;
+ if (eventHandle == 0)
+ throw new IllegalStateException("eventHandle");
+
+ waitWhileBusy();
+ busy = true;
+ }
+ try
+ {
+ int numPaddingFrames;
+
+ try
+ {
+ numPaddingFrames
+ = IAudioClient_GetCurrentPadding(iAudioClient);
+ }
+ catch (HResultException hre)
+ {
+ numPaddingFrames = numBufferFrames;
+ logger.error("IAudioClient_GetCurrentPadding", hre);
+ }
+
+ int numFramesRequested = numBufferFrames - numPaddingFrames;
+
+ /*
+ * If there is no available space in the rendering endpoint
+ * buffer, wait for the system to signal when an audio
+ * buffer is ready to be processed by the client.
+ */
+ if (numFramesRequested > 0)
+ {
+ byte[] buf;
+ int bufChannels;
+ int bufFrameSize;
+ int bufLength;
+ int bufSampleSize;
+
+ if (resampler == null)
+ {
+ buf = srcBuffer;
+ bufChannels = srcChannels;
+ bufFrameSize = srcFrameSize;
+ bufLength = srcBufferLength;
+ bufSampleSize = srcSampleSize;
+ }
+ else
+ {
+ /*
+ * The units of srcBuffer are based on srcFormat,
+ * the units of numFramesRequested are based on
+ * dstFormat.
+ */
+ maybeResample(numFramesRequested);
+
+ buf = resamplerData;
+ bufChannels = resamplerChannels;
+ bufFrameSize = resamplerFrameSize;
+ bufLength = resamplerOutBuffer.getLength();
+ bufSampleSize = resamplerSampleSize;
+ }
+
+ /*
+ * Write as much from buf as possible while minimizing
+ * the risk of audio glitches and the amount of
+ * artificial/induced silence.
+ */
+ int bufFrames = bufLength / bufFrameSize;
+
+ if ((numFramesRequested > bufFrames)
+ && (bufFrames >= devicePeriodInFrames))
+ numFramesRequested = bufFrames;
+
+ // Pad with silence in order to avoid underflows.
+ int toWrite = numFramesRequested * bufFrameSize;
+
+ if (toWrite > buf.length)
+ toWrite = buf.length;
+
+ int silence = toWrite - bufLength;
+
+ if (silence > 0)
+ {
+ Arrays.fill(buf, bufLength, toWrite, (byte) 0);
+ bufLength = toWrite;
+ }
+
+ /*
+ * Take into account the user's preferences with respect
+ * to the output volume.
+ */
+ GainControl gainControl = getGainControl();
+
+ if ((gainControl != null) && (toWrite != 0))
+ {
+ BasicVolumeControl.applyGain(
+ gainControl,
+ buf, 0, toWrite);
+ }
+
+ int written
+ = maybeIAudioRenderClientWrite(
+ buf, 0, toWrite,
+ bufSampleSize, bufChannels);
+
+ if (written != 0)
+ {
+ bufLength = pop(buf, bufLength, written);
+ if (buf == srcBuffer)
+ srcBufferLength = bufLength;
+ else
+ resamplerOutBuffer.setLength(bufLength);
+
+ if (writeIsMalfunctioningSince
+ != DiagnosticsControl.NEVER)
+ setWriteIsMalfunctioning(false);
+ }
+ }
+ }
+ finally
+ {
+ synchronized (this)
+ {
+ busy = false;
+ notifyAll();
+ }
+ }
+
+ int wfso;
+
+ try
+ {
+ wfso = WaitForSingleObject(eventHandle, devicePeriod);
+ }
+ catch (HResultException hre)
+ {
+ /*
+ * WaitForSingleObject will throw HResultException only in
+ * the case of WAIT_FAILED. Event if it didn't, it would
+ * still be a failure from our point of view.
+ */
+ wfso = WAIT_FAILED;
+ logger.error("WaitForSingleObject", hre);
+ }
+ /*
+ * If the function WaitForSingleObject fails once, it will very
+ * likely fail forever. Bail out of a possible busy wait.
+ */
+ if ((wfso == WAIT_FAILED) || (wfso == WAIT_ABANDONED))
+ break;
+ }
+ while (true);
+ }
+ finally
+ {
+ synchronized (this)
+ {
+ if (eventHandleCmd.equals(this.eventHandleCmd))
+ {
+ this.eventHandleCmd = null;
+ notifyAll();
+ }
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Disallows mid-stream changes of the inputFormat of this
+ * AbstractRenderer.
+ */
+ @Override
+ public synchronized Format setInputFormat(Format format)
+ {
+ /*
+ * WASAPIRenderer does not support mid-stream changes of the
+ * inputFormat.
+ */
+ if ((iAudioClient != 0) || (iAudioRenderClient != 0))
+ return null;
+ else
+ return super.setInputFormat(format);
+ }
+
+ /**
+ * Indicates whether the writing to the render endpoint buffer is
+ * malfunctioning. Keeps track of the time at which the malfunction has
+ * started.
+ *
+ * @param writeIsMalfunctioning true if the writing to the render
+ * endpoint buffer is (believed to be) malfunctioning; otherwise,
+ * false
+ */
+ private void setWriteIsMalfunctioning(boolean writeIsMalfunctioning)
+ {
+ if (writeIsMalfunctioning)
+ {
+ if (writeIsMalfunctioningSince == DiagnosticsControl.NEVER)
+ writeIsMalfunctioningSince = System.currentTimeMillis();
+ }
+ else
+ writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public synchronized void start()
+ {
+ if (iAudioClient == 0)
+ {
+ /*
+ * We actually want to allow the user to switch the playback and/or
+ * notify device to none mid-stream in order to disable the
+ * playback.
+ */
+ if (locatorIsNull)
+ started = true;
+ }
+ else
+ {
+ waitWhileBusy();
+ waitWhileEventHandleCmd();
+
+ /*
+ * Introduce latency in order to decrease the likelihood of
+ * underflow.
+ */
+ if (srcBuffer != null)
+ {
+ if (srcBufferLength > 0)
+ {
+ /*
+ * Shift the valid audio data to the end of srcBuffer so
+ * that silence can be written at the beginning.
+ */
+ for (int i = srcBuffer.length - 1, j = srcBufferLength - 1;
+ j >= 0;
+ i--, j--)
+ {
+ srcBuffer[i] = srcBuffer[j];
+ }
+ }
+ else if (srcBufferLength < 0)
+ srcBufferLength = 0;
+
+ /*
+ * If there is valid audio data in srcBuffer, it has been
+ * shifted to the end to make room for silence at the beginning.
+ */
+ int silence = srcBuffer.length - srcBufferLength;
+
+ if (silence > 0)
+ Arrays.fill(srcBuffer, 0, silence, (byte) 0);
+ srcBufferLength = srcBuffer.length;
+ }
+
+ try
+ {
+ IAudioClient_Start(iAudioClient);
+ started = true;
+
+ if ((eventHandle != 0) && (this.eventHandleCmd == null))
+ {
+ Runnable eventHandleCmd
+ = new Runnable()
+ {
+ public void run()
+ {
+ runInEventHandleCmd(this);
+ }
+ };
+ boolean submitted = false;
+
+ try
+ {
+ if (eventHandleExecutor == null)
+ {
+ eventHandleExecutor
+ = Executors.newSingleThreadExecutor();
+ }
+
+ this.eventHandleCmd = eventHandleCmd;
+ eventHandleExecutor.execute(eventHandleCmd);
+ submitted = true;
+ }
+ finally
+ {
+ if (!submitted
+ && eventHandleCmd.equals(this.eventHandleCmd))
+ this.eventHandleCmd = null;
+ }
+ }
+ }
+ catch (HResultException hre)
+ {
+ /*
+ * If IAudioClient_Start is invoked multiple times without
+ * intervening IAudioClient_Stop, it will likely return/throw
+ * AUDCLNT_E_NOT_STOPPED.
+ */
+ if (hre.getHResult() != AUDCLNT_E_NOT_STOPPED)
+ logger.error("IAudioClient_Start", hre);
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public synchronized void stop()
+ {
+ if (iAudioClient == 0)
+ {
+ /*
+ * We actually want to allow the user to switch the playback and/or
+ * notify device to none mid-stream in order to disable the
+ * playback.
+ */
+ if (locatorIsNull)
+ started = false;
+ }
+ else
+ {
+ waitWhileBusy();
+
+ try
+ {
+ /*
+ * If IAudioClient_Stop is invoked multiple times without
+ * intervening IAudioClient_Start, it is documented to return
+ * S_FALSE.
+ */
+ IAudioClient_Stop(iAudioClient);
+ started = false;
+
+ waitWhileEventHandleCmd();
+
+ writeIsMalfunctioningSince = DiagnosticsControl.NEVER;
+ }
+ catch (HResultException hre)
+ {
+ logger.error("IAudioClient_Stop", hre);
+ }
+ }
+ }
+
+ /**
+ * Gets a human-readable representation of a specific MediaLocator
+ * for the purposes of testing/debugging.
+ *
+ * @param locator the MediaLocator that is to be represented in a
+ * human-readable form for the purposes of testing/debugging
+ * @return a human-readable representation of the specified locator
+ * for the purposes of testing/debugging
+ */
+ private String toString(MediaLocator locator)
+ {
+ String s;
+
+ if (locator == null)
+ s = "null";
+ else
+ {
+ s = null;
+ /*
+ * Try to not throw any exceptions because the purpose is to produce
+ * at least some identification of the specified MediaLocator even
+ * if not the most complete.
+ */
+ try
+ {
+ String id = locator.getRemainder();
+
+ if (id != null)
+ {
+ CaptureDeviceInfo2 cdi2
+ = audioSystem.getDevice(dataFlow, locator);
+
+ if (cdi2 != null)
+ {
+ String name = cdi2.getName();
+
+ if ((name != null) && !id.equals(name))
+ s = id + " with friendly name " + name;
+ }
+ if (s == null)
+ s = id;
+ }
+ }
+ catch (Throwable t)
+ {
+ if (t instanceof InterruptedException)
+ Thread.currentThread().interrupt();
+ else if (t instanceof ThreadDeath)
+ throw (ThreadDeath) t;
+ }
+ if (s == null)
+ s = locator.toString();
+ }
+ return s;
+ }
+
+ /**
+ * Waits on this instance while the value of {@link #busy} is equal to
+ * true.
+ */
+ private synchronized void waitWhileBusy()
+ {
+ boolean interrupted = false;
+
+ while (busy)
+ {
+ try
+ {
+ wait(devicePeriod);
+ }
+ catch (InterruptedException ie)
+ {
+ interrupted = true;
+ }
+ }
+ if (interrupted)
+ Thread.currentThread().interrupt();
+ }
+
+ /**
+ * Waits on this instance while the value of {@link #eventHandleCmd} is
+ * non-null.
+ */
+ private synchronized void waitWhileEventHandleCmd()
+ {
+ if (eventHandle == 0)
+ throw new IllegalStateException("eventHandle");
+
+ boolean interrupted = false;
+
+ while (eventHandleCmd != null)
+ {
+ try
+ {
+ wait(devicePeriod);
+ }
+ catch (InterruptedException ie)
+ {
+ interrupted = true;
+ }
+ }
+ if (interrupted)
+ Thread.currentThread().interrupt();
+ }
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java
index 4bfb74879..303c7d72b 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRenderer.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,343 +13,343 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.renderer.video;
-
-import java.awt.*;
-
-import javax.media.*;
-import javax.media.format.*;
-import javax.media.renderer.*;
-import javax.swing.*;
-
-import org.jitsi.impl.neomedia.codec.video.*;
-import org.jitsi.impl.neomedia.jmfext.media.renderer.*;
-import org.jitsi.util.*;
-import org.jitsi.util.swing.*;
-
-/**
- * Video renderer using pure Java2D.
- *
- * @author Ingo Bauersachs
- */
-public class Java2DRenderer
- extends AbstractRenderer
- implements VideoRenderer
-{
- /**
- * The default, initial height and width to set on the Components
- * of JAWTRenderers before video frames with actual sizes are
- * processed. Introduced to mitigate multiple failures to realize the actual
- * video frame size and/or to properly scale the visual/video
- * Components.
- */
- private static final int DEFAULT_COMPONENT_HEIGHT_OR_WIDTH = 16;
-
- /**
- * The array of supported input formats.
- */
- private static final Format[] SUPPORTED_INPUT_FORMATS
- = new Format[]
- {
- OSUtils.IS_LINUX
- ? new YUVFormat(
- null /* size */,
- Format.NOT_SPECIFIED /* maxDataLength */,
- Format.intArray,
- Format.NOT_SPECIFIED /* frameRate */,
- YUVFormat.YUV_420,
- Format.NOT_SPECIFIED /* strideY */,
- Format.NOT_SPECIFIED /* strideUV */,
- Format.NOT_SPECIFIED /* offsetY */,
- Format.NOT_SPECIFIED /* offsetU */,
- Format.NOT_SPECIFIED /* offsetV */)
- : OSUtils.IS_ANDROID
- ? new RGBFormat(
- null,
- Format.NOT_SPECIFIED,
- Format.intArray,
- Format.NOT_SPECIFIED,
- 32,
- 0x000000ff, 0x0000ff00, 0x00ff0000)
- : new RGBFormat(
- null,
- Format.NOT_SPECIFIED,
- Format.intArray,
- Format.NOT_SPECIFIED,
- 32,
- 0x00ff0000, 0x0000ff00, 0x000000ff)
- };
-
- private Java2DRendererVideoComponent component;
-
- /**
- * The last known height of the input processed by this
- * JAWTRenderer.
- */
- private int height = 0;
-
- /**
- * The last known width of the input processed by this
- * JAWTRenderer.
- */
- private int width = 0;
-
- /**
- * The Runnable which is executed to bring the invocations of
- * {@link #reflectInputFormatOnComponent()} into the AWT event dispatching
- * thread.
- */
- private final Runnable reflectInputFormatOnComponentInEventDispatchThread
- = new Runnable()
- {
- @Override
- public void run()
- {
- reflectInputFormatOnComponentInEventDispatchThread();
- }
- };
-
- @Override
- public Format[] getSupportedInputFormats()
- {
- return SUPPORTED_INPUT_FORMATS.clone();
- }
-
- /**
- * Processes the data provided in a specific Buffer and renders it
- * to the output device represented by this Renderer.
- *
- * @param buffer a Buffer containing the data to be processed and
- * rendered
- * @return BUFFER_PROCESSED_OK if the processing is successful;
- * otherwise, the other possible return codes defined in the PlugIn
- * interface
- */
- @Override
- public synchronized int process(Buffer buffer)
- {
- if (buffer.isDiscard())
- {
- return BUFFER_PROCESSED_OK;
- }
-
- int bufferLength = buffer.getLength();
- if (bufferLength == 0)
- {
- return BUFFER_PROCESSED_OK;
- }
-
- Format format = buffer.getFormat();
- if (format != null
- && format != this.inputFormat
- && !format.equals(this.inputFormat)
- && setInputFormat(format) == null)
- {
- return BUFFER_PROCESSED_FAILED;
- }
-
- Dimension size = null;
- if (format != null)
- {
- size = ((VideoFormat) format).getSize();
- }
-
- if (size == null)
- {
- size = this.inputFormat.getSize();
- if (size == null)
- {
- return BUFFER_PROCESSED_FAILED;
- }
- }
-
- // XXX If the size of the video frame to be displayed is tiny enough
- // to crash sws_scale, then it may cause issues with other
- // functionality as well. Stay on the safe side.
- if (size.width >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH
- && size.height >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH)
- {
- getComponent().process(buffer, size);
- }
-
- return BUFFER_PROCESSED_OK;
- }
-
- @Override
- public void start()
- {
- }
-
- @Override
- public void stop()
- {
- }
-
- @Override
- public void close()
- {
- }
-
- @Override
- public String getName()
- {
- return "Pure Java Video Renderer";
- }
-
- @Override
- public void open() throws ResourceUnavailableException
- {
- }
-
- @Override
- public Rectangle getBounds()
- {
- return null;
- }
-
- @Override
- public Java2DRendererVideoComponent getComponent()
- {
- if (component == null)
- {
- component = new Java2DRendererVideoComponent();
-
- // Make sure to have non-zero height and width because actual video
- // frames may have not been processed yet.
- component.setSize(
- DEFAULT_COMPONENT_HEIGHT_OR_WIDTH,
- DEFAULT_COMPONENT_HEIGHT_OR_WIDTH);
- }
-
- return component;
- }
-
- @Override
- public void setBounds(Rectangle rect)
- {
- }
-
- @Override
- public boolean setComponent(Component comp)
- {
- return false;
- }
-
- /**
- * Sets the Format of the input to be processed by this
- * Renderer.
- *
- * @param format the Format to be set as the Format of the
- * input to be processed by this Renderer
- * @return the Format of the input to be processed by this
- * Renderer if the specified format is supported or
- * null if the specified format is not supported by this
- * Renderer. Typically, it is the supported input Format
- * which most closely matches the specified Format.
- */
- @Override
- public synchronized Format setInputFormat(Format format)
- {
- VideoFormat oldInputFormat = inputFormat;
- Format newInputFormat = super.setInputFormat(format);
-
- // Short-circuit because we will be calculating a lot and we do not want
- // to do that unless necessary.
- if (oldInputFormat == inputFormat)
- return newInputFormat;
-
- // Know the width and height of the input because we'll be depicting it
- // and we may want, for example, to report them as the preferred size of
- // our AWT Component. More importantly, know them because they determine
- // certain arguments to be passed to the native counterpart of this
- // JAWTRenderer i.e. handle.
- Dimension size = inputFormat.getSize();
-
- if (size == null)
- {
- width = height = 0;
- }
- else
- {
- width = size.width;
- height = size.height;
- }
-
- reflectInputFormatOnComponent();
-
- return newInputFormat;
- }
-
- /**
- * Sets properties of the AWT Component of this Renderer
- * which depend on the properties of the inputFormat of this
- * Renderer. Makes sure that the procedure is executed on the AWT
- * event dispatching thread because an AWT Component's properties
- * (such as preferredSize) should be accessed in the AWT event
- * dispatching thread.
- */
- private void reflectInputFormatOnComponent()
- {
- if (SwingUtilities.isEventDispatchThread())
- {
- reflectInputFormatOnComponentInEventDispatchThread();
- }
- else
- {
- SwingUtilities.invokeLater(
- reflectInputFormatOnComponentInEventDispatchThread);
- }
- }
-
- /**
- * Sets properties of the AWT Component of this Renderer
- * which depend on the properties of the inputFormat of this
- * Renderer. The invocation is presumed to be performed on the AWT
- * event dispatching thread.
- */
- private void reflectInputFormatOnComponentInEventDispatchThread()
- {
- // Reflect the width and height of the input onto the prefSize of our
- // AWT Component (if necessary).
- if ((component != null) && (width > 0) && (height > 0))
- {
- Dimension prefSize = component.getPreferredSize();
-
- // Apart from the simplest of cases in which the component has no
- // prefSize, it is also necessary to reflect the width and height of
- // the input onto the prefSize when the ratio of the input is
- // different than the ratio of the prefSize. It may also be argued
- // that the component needs to know of the width and height of the
- // input if its prefSize is with the same ratio but is smaller.
- if ((prefSize == null)
- || (prefSize.width < 1) || (prefSize.height < 1)
- || !VideoLayout.areAspectRatiosEqual(
- prefSize,
- width, height)
- || (prefSize.width < width) || (prefSize.height < height))
- {
- component.setPreferredSize(
- new Dimension(width, height));
- }
-
- // If the component does not have a size, it looks strange given
- // that we know a prefSize for it. However, if the component has
- // already been added into a Container, the Container will dictate
- // the size as part of its layout logic.
- if (component.isPreferredSizeSet()
- && (component.getParent() == null))
- {
- Dimension size = component.getSize();
-
- prefSize = component.getPreferredSize();
- if ((size.width < 1) || (size.height < 1)
- || !VideoLayout.areAspectRatiosEqual(
- size,
- prefSize.width, prefSize.height))
- {
- component.setSize(prefSize.width, prefSize.height);
- }
- }
- }
- }
-}
+package org.jitsi.impl.neomedia.jmfext.media.renderer.video;
+
+import java.awt.*;
+
+import javax.media.*;
+import javax.media.format.*;
+import javax.media.renderer.*;
+import javax.swing.*;
+
+import org.jitsi.impl.neomedia.codec.video.*;
+import org.jitsi.impl.neomedia.jmfext.media.renderer.*;
+import org.jitsi.util.*;
+import org.jitsi.util.swing.*;
+
+/**
+ * Video renderer using pure Java2D.
+ *
+ * @author Ingo Bauersachs
+ */
+public class Java2DRenderer
+ extends AbstractRenderer
+ implements VideoRenderer
+{
+ /**
+ * The default, initial height and width to set on the Components
+ * of JAWTRenderers before video frames with actual sizes are
+ * processed. Introduced to mitigate multiple failures to realize the actual
+ * video frame size and/or to properly scale the visual/video
+ * Components.
+ */
+ private static final int DEFAULT_COMPONENT_HEIGHT_OR_WIDTH = 16;
+
+ /**
+ * The array of supported input formats.
+ */
+ private static final Format[] SUPPORTED_INPUT_FORMATS
+ = new Format[]
+ {
+ OSUtils.IS_LINUX
+ ? new YUVFormat(
+ null /* size */,
+ Format.NOT_SPECIFIED /* maxDataLength */,
+ Format.intArray,
+ Format.NOT_SPECIFIED /* frameRate */,
+ YUVFormat.YUV_420,
+ Format.NOT_SPECIFIED /* strideY */,
+ Format.NOT_SPECIFIED /* strideUV */,
+ Format.NOT_SPECIFIED /* offsetY */,
+ Format.NOT_SPECIFIED /* offsetU */,
+ Format.NOT_SPECIFIED /* offsetV */)
+ : OSUtils.IS_ANDROID
+ ? new RGBFormat(
+ null,
+ Format.NOT_SPECIFIED,
+ Format.intArray,
+ Format.NOT_SPECIFIED,
+ 32,
+ 0x000000ff, 0x0000ff00, 0x00ff0000)
+ : new RGBFormat(
+ null,
+ Format.NOT_SPECIFIED,
+ Format.intArray,
+ Format.NOT_SPECIFIED,
+ 32,
+ 0x00ff0000, 0x0000ff00, 0x000000ff)
+ };
+
+ private Java2DRendererVideoComponent component;
+
+ /**
+ * The last known height of the input processed by this
+ * JAWTRenderer.
+ */
+ private int height = 0;
+
+ /**
+ * The last known width of the input processed by this
+ * JAWTRenderer.
+ */
+ private int width = 0;
+
+ /**
+ * The Runnable which is executed to bring the invocations of
+ * {@link #reflectInputFormatOnComponent()} into the AWT event dispatching
+ * thread.
+ */
+ private final Runnable reflectInputFormatOnComponentInEventDispatchThread
+ = new Runnable()
+ {
+ @Override
+ public void run()
+ {
+ reflectInputFormatOnComponentInEventDispatchThread();
+ }
+ };
+
+ @Override
+ public Format[] getSupportedInputFormats()
+ {
+ return SUPPORTED_INPUT_FORMATS.clone();
+ }
+
+ /**
+ * Processes the data provided in a specific Buffer and renders it
+ * to the output device represented by this Renderer.
+ *
+ * @param buffer a Buffer containing the data to be processed and
+ * rendered
+ * @return BUFFER_PROCESSED_OK if the processing is successful;
+ * otherwise, the other possible return codes defined in the PlugIn
+ * interface
+ */
+ @Override
+ public synchronized int process(Buffer buffer)
+ {
+ if (buffer.isDiscard())
+ {
+ return BUFFER_PROCESSED_OK;
+ }
+
+ int bufferLength = buffer.getLength();
+ if (bufferLength == 0)
+ {
+ return BUFFER_PROCESSED_OK;
+ }
+
+ Format format = buffer.getFormat();
+ if (format != null
+ && format != this.inputFormat
+ && !format.equals(this.inputFormat)
+ && setInputFormat(format) == null)
+ {
+ return BUFFER_PROCESSED_FAILED;
+ }
+
+ Dimension size = null;
+ if (format != null)
+ {
+ size = ((VideoFormat) format).getSize();
+ }
+
+ if (size == null)
+ {
+ size = this.inputFormat.getSize();
+ if (size == null)
+ {
+ return BUFFER_PROCESSED_FAILED;
+ }
+ }
+
+ // XXX If the size of the video frame to be displayed is tiny enough
+ // to crash sws_scale, then it may cause issues with other
+ // functionality as well. Stay on the safe side.
+ if (size.width >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH
+ && size.height >= SwScale.MIN_SWS_SCALE_HEIGHT_OR_WIDTH)
+ {
+ getComponent().process(buffer, size);
+ }
+
+ return BUFFER_PROCESSED_OK;
+ }
+
+ @Override
+ public void start()
+ {
+ }
+
+ @Override
+ public void stop()
+ {
+ }
+
+ @Override
+ public void close()
+ {
+ }
+
+ @Override
+ public String getName()
+ {
+ return "Pure Java Video Renderer";
+ }
+
+ @Override
+ public void open() throws ResourceUnavailableException
+ {
+ }
+
+ @Override
+ public Rectangle getBounds()
+ {
+ return null;
+ }
+
+ @Override
+ public Java2DRendererVideoComponent getComponent()
+ {
+ if (component == null)
+ {
+ component = new Java2DRendererVideoComponent();
+
+ // Make sure to have non-zero height and width because actual video
+ // frames may have not been processed yet.
+ component.setSize(
+ DEFAULT_COMPONENT_HEIGHT_OR_WIDTH,
+ DEFAULT_COMPONENT_HEIGHT_OR_WIDTH);
+ }
+
+ return component;
+ }
+
+ @Override
+ public void setBounds(Rectangle rect)
+ {
+ }
+
+ @Override
+ public boolean setComponent(Component comp)
+ {
+ return false;
+ }
+
+ /**
+ * Sets the Format of the input to be processed by this
+ * Renderer.
+ *
+ * @param format the Format to be set as the Format of the
+ * input to be processed by this Renderer
+ * @return the Format of the input to be processed by this
+ * Renderer if the specified format is supported or
+ * null if the specified format is not supported by this
+ * Renderer. Typically, it is the supported input Format
+ * which most closely matches the specified Format.
+ */
+ @Override
+ public synchronized Format setInputFormat(Format format)
+ {
+ VideoFormat oldInputFormat = inputFormat;
+ Format newInputFormat = super.setInputFormat(format);
+
+ // Short-circuit because we will be calculating a lot and we do not want
+ // to do that unless necessary.
+ if (oldInputFormat == inputFormat)
+ return newInputFormat;
+
+ // Know the width and height of the input because we'll be depicting it
+ // and we may want, for example, to report them as the preferred size of
+ // our AWT Component. More importantly, know them because they determine
+ // certain arguments to be passed to the native counterpart of this
+ // JAWTRenderer i.e. handle.
+ Dimension size = inputFormat.getSize();
+
+ if (size == null)
+ {
+ width = height = 0;
+ }
+ else
+ {
+ width = size.width;
+ height = size.height;
+ }
+
+ reflectInputFormatOnComponent();
+
+ return newInputFormat;
+ }
+
+ /**
+ * Sets properties of the AWT Component of this Renderer
+ * which depend on the properties of the inputFormat of this
+ * Renderer. Makes sure that the procedure is executed on the AWT
+ * event dispatching thread because an AWT Component's properties
+ * (such as preferredSize) should be accessed in the AWT event
+ * dispatching thread.
+ */
+ private void reflectInputFormatOnComponent()
+ {
+ if (SwingUtilities.isEventDispatchThread())
+ {
+ reflectInputFormatOnComponentInEventDispatchThread();
+ }
+ else
+ {
+ SwingUtilities.invokeLater(
+ reflectInputFormatOnComponentInEventDispatchThread);
+ }
+ }
+
+ /**
+ * Sets properties of the AWT Component of this Renderer
+ * which depend on the properties of the inputFormat of this
+ * Renderer. The invocation is presumed to be performed on the AWT
+ * event dispatching thread.
+ */
+ private void reflectInputFormatOnComponentInEventDispatchThread()
+ {
+ // Reflect the width and height of the input onto the prefSize of our
+ // AWT Component (if necessary).
+ if ((component != null) && (width > 0) && (height > 0))
+ {
+ Dimension prefSize = component.getPreferredSize();
+
+ // Apart from the simplest of cases in which the component has no
+ // prefSize, it is also necessary to reflect the width and height of
+ // the input onto the prefSize when the ratio of the input is
+ // different than the ratio of the prefSize. It may also be argued
+ // that the component needs to know of the width and height of the
+ // input if its prefSize is with the same ratio but is smaller.
+ if ((prefSize == null)
+ || (prefSize.width < 1) || (prefSize.height < 1)
+ || !VideoLayout.areAspectRatiosEqual(
+ prefSize,
+ width, height)
+ || (prefSize.width < width) || (prefSize.height < height))
+ {
+ component.setPreferredSize(
+ new Dimension(width, height));
+ }
+
+ // If the component does not have a size, it looks strange given
+ // that we know a prefSize for it. However, if the component has
+ // already been added into a Container, the Container will dictate
+ // the size as part of its layout logic.
+ if (component.isPreferredSizeSet()
+ && (component.getParent() == null))
+ {
+ Dimension size = component.getSize();
+
+ prefSize = component.getPreferredSize();
+ if ((size.width < 1) || (size.height < 1)
+ || !VideoLayout.areAspectRatiosEqual(
+ size,
+ prefSize.width, prefSize.height))
+ {
+ component.setSize(prefSize.width, prefSize.height);
+ }
+ }
+ }
+ }
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java
index 292c1582e..a47f6b6b9 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/video/Java2DRendererVideoComponent.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,63 +13,63 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.jmfext.media.renderer.video;
-
-import java.awt.*;
-import java.awt.image.*;
-
-import javax.media.Buffer;
-
-/**
- * Canvas that draws the video buffer using Java's built-in Graphics.
- *
- * @author Ingo Bauersachs
- */
-public class Java2DRendererVideoComponent
- extends Canvas
-{
- private static final long serialVersionUID = -3229966946026776451L;
- private MemoryImageSource mis;
- private Dimension size = new Dimension(0, 0);
-
- /**
- * Draws the current image as prepared by the
- * {@link #process(Buffer, Dimension)}
- *
- * @param g the graphics context to draw to.
- */
- @Override
- public void paint(Graphics g)
- {
- if (this.mis != null)
- {
- g.drawImage(this.createImage(mis), 0, 0,
- getWidth(), getHeight(), null);
- }
- }
-
- /**
- * Updates the image to be drawn on the graphics context.
- *
- * @param buffer the RAW image data.
- * @param size the dimension of the image in the buffer.
- */
- void process(Buffer buffer, Dimension size)
- {
- if (mis == null || !this.size.equals(size))
- {
- this.size = size;
- mis =
- new MemoryImageSource(size.width, size.height,
- (int[]) buffer.getData(), buffer.getOffset(), size.width);
- }
- else
- {
- mis.newPixels((int[]) buffer.getData(), ColorModel.getRGBdefault(),
- buffer.getOffset(), size.width);
- }
-
- this.repaint();
- }
-
-}
+package org.jitsi.impl.neomedia.jmfext.media.renderer.video;
+
+import java.awt.*;
+import java.awt.image.*;
+
+import javax.media.Buffer;
+
+/**
+ * Canvas that draws the video buffer using Java's built-in Graphics.
+ *
+ * @author Ingo Bauersachs
+ */
+public class Java2DRendererVideoComponent
+ extends Canvas
+{
+ private static final long serialVersionUID = -3229966946026776451L;
+ private MemoryImageSource mis;
+ private Dimension size = new Dimension(0, 0);
+
+ /**
+ * Draws the current image as prepared by the
+ * {@link #process(Buffer, Dimension)}
+ *
+ * @param g the graphics context to draw to.
+ */
+ @Override
+ public void paint(Graphics g)
+ {
+ if (this.mis != null)
+ {
+ g.drawImage(this.createImage(mis), 0, 0,
+ getWidth(), getHeight(), null);
+ }
+ }
+
+ /**
+ * Updates the image to be drawn on the graphics context.
+ *
+ * @param buffer the RAW image data.
+ * @param size the dimension of the image in the buffer.
+ */
+ void process(Buffer buffer, Dimension size)
+ {
+ if (mis == null || !this.size.equals(size))
+ {
+ this.size = size;
+ mis =
+ new MemoryImageSource(size.width, size.height,
+ (int[]) buffer.getData(), buffer.getOffset(), size.width);
+ }
+ else
+ {
+ mis.newPixels((int[]) buffer.getData(), ColorModel.getRGBdefault(),
+ buffer.getOffset(), size.width);
+ }
+
+ this.repaint();
+ }
+
+}
diff --git a/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java b/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java
index bb1850f92..55c4376a2 100644
--- a/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java
+++ b/src/org/jitsi/impl/neomedia/transform/PacketTransformer.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,41 +13,41 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.transform;
-
-import org.jitsi.impl.neomedia.*;
-
-/**
- * Encapsulate the concept of packet transformation. Given an array of packets,
- * PacketTransformer can either "transform" each one of them, or
- * "reverse transform" (e.g. restore) each one of them.
- *
- * @author Bing SU (nova.su@gmail.com)
- * @author Boris Grozev
- */
-public interface PacketTransformer
-{
- /**
- * Closes this PacketTransformer i.e. releases the resources
- * allocated by it and prepares it for garbage collection.
- */
- public void close();
-
- /**
- * Reverse-transforms each packet in an array of packets. Null values
- * must be ignored.
- *
- * @param pkts the transformed packets to be restored.
- * @return the restored packets.
- */
- public RawPacket[] reverseTransform(RawPacket[] pkts);
-
- /**
- * Transforms each packet in an array of packets. Null values must be
- * ignored.
- *
- * @param pkts the packets to be transformed
- * @return the transformed packets
- */
- public RawPacket[] transform(RawPacket[] pkts);
-}
+package org.jitsi.impl.neomedia.transform;
+
+import org.jitsi.impl.neomedia.*;
+
+/**
+ * Encapsulate the concept of packet transformation. Given an array of packets,
+ * PacketTransformer can either "transform" each one of them, or
+ * "reverse transform" (e.g. restore) each one of them.
+ *
+ * @author Bing SU (nova.su@gmail.com)
+ * @author Boris Grozev
+ */
+public interface PacketTransformer
+{
+ /**
+ * Closes this PacketTransformer i.e. releases the resources
+ * allocated by it and prepares it for garbage collection.
+ */
+ public void close();
+
+ /**
+ * Reverse-transforms each packet in an array of packets. Null values
+ * must be ignored.
+ *
+ * @param pkts the transformed packets to be restored.
+ * @return the restored packets.
+ */
+ public RawPacket[] reverseTransform(RawPacket[] pkts);
+
+ /**
+ * Transforms each packet in an array of packets. Null values must be
+ * ignored.
+ *
+ * @param pkts the packets to be transformed
+ * @return the transformed packets
+ */
+ public RawPacket[] transform(RawPacket[] pkts);
+}
diff --git a/src/org/jitsi/impl/neomedia/transform/TransformEngine.java b/src/org/jitsi/impl/neomedia/transform/TransformEngine.java
index a7ba33aef..f44a08ddf 100644
--- a/src/org/jitsi/impl/neomedia/transform/TransformEngine.java
+++ b/src/org/jitsi/impl/neomedia/transform/TransformEngine.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,28 +13,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.transform;
-
-/**
- * Defines how to get PacketTransformers for RTP and RTCP packets. A
- * single PacketTransformer can be used for both RTP and RTCP packets
- * or there can be two separate PacketTransformers.
- *
- * @author Bing SU (nova.su@gmail.com)
- */
-public interface TransformEngine
-{
- /**
- * Gets the PacketTransformer for RTP packets.
- *
- * @return the PacketTransformer for RTP packets
- */
- public PacketTransformer getRTPTransformer();
-
- /**
- * Gets the PacketTransformer for RTCP packets.
- *
- * @return the PacketTransformer for RTCP packets
- */
- public PacketTransformer getRTCPTransformer();
-}
+package org.jitsi.impl.neomedia.transform;
+
+/**
+ * Defines how to get PacketTransformers for RTP and RTCP packets. A
+ * single PacketTransformer can be used for both RTP and RTCP packets
+ * or there can be two separate PacketTransformers.
+ *
+ * @author Bing SU (nova.su@gmail.com)
+ */
+public interface TransformEngine
+{
+ /**
+ * Gets the PacketTransformer for RTP packets.
+ *
+ * @return the PacketTransformer for RTP packets
+ */
+ public PacketTransformer getRTPTransformer();
+
+ /**
+ * Gets the PacketTransformer for RTCP packets.
+ *
+ * @return the PacketTransformer for RTCP packets
+ */
+ public PacketTransformer getRTCPTransformer();
+}
diff --git a/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java b/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java
index b59d299c6..6ab86f7b8 100644
--- a/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java
+++ b/src/org/jitsi/impl/neomedia/transform/dtls/DatagramTransportImpl.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,573 +13,573 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.transform.dtls;
-
-import java.io.*;
-import java.util.*;
-import java.util.concurrent.*;
-
-import javax.media.rtp.*;
-
-import org.bouncycastle.crypto.tls.*;
-import org.ice4j.ice.*;
-import org.jitsi.impl.neomedia.*;
-import org.jitsi.impl.neomedia.codec.video.h264.*;
-import org.jitsi.util.*;
-
-/**
- * Implements {@link DatagramTransport} in order to integrate the Bouncy Castle
- * Crypto APIs in libjitsi for the purposes of implementing DTLS-SRTP.
- *
- * @author Lyubomir Marinov
- */
-public class DatagramTransportImpl
- implements DatagramTransport
-{
- /**
- * The Logger used by the DatagramTransportImpl class and
- * its instances to print debug information.
- */
- private static final Logger logger
- = Logger.getLogger(DatagramTransportImpl.class);
-
- /**
- * The ID of the component which this instance works for/is associated with.
- */
- private final int componentID;
-
- /**
- * The RTPConnector which represents and implements the actual
- * DatagramSocket adapted by this instance.
- */
- private AbstractRTPConnector connector;
-
- /**
- * The pool of RawPackets instances to reduce their allocations
- * and garbage collection.
- */
- private final Queue rawPacketPool
- = new LinkedBlockingQueue();
-
- /**
- * The queue of RawPackets which have been received from the
- * network are awaiting to be received by the application through this
- * DatagramTransport.
- */
- private final ArrayBlockingQueue receiveQ;
-
- /**
- * The capacity of {@link #receiveQ}.
- */
- private final int receiveQCapacity;
-
- /**
- * The byte buffer which represents a datagram to be sent. It may
- * consist of multiple DTLS records which are simple encoded consecutively.
- */
- private byte[] sendBuf;
-
- /**
- * The length in bytes of {@link #sendBuf} i.e. the number of
- * sendBuf elements which constitute actual DTLS records.
- */
- private int sendBufLength;
-
- /**
- * The Object that synchronizes the access to {@link #sendBuf},
- * {@link #sendBufLength}.
- */
- private final Object sendBufSyncRoot = new Object();
-
- /**
- * Initializes a new DatagramTransportImpl.
- *
- * @param componentID {@link Component#RTP} if the new instance is to work
- * on data/RTP packets or {@link Component#RTCP} if the new instance is to
- * work on control/RTCP packets
- */
- public DatagramTransportImpl(int componentID)
- {
- switch (componentID)
- {
- case Component.RTCP:
- case Component.RTP:
- this.componentID = componentID;
- break;
- default:
- throw new IllegalArgumentException("componentID");
- }
-
- receiveQCapacity = RTPConnectorOutputStream.PACKET_QUEUE_CAPACITY;
- receiveQ = new ArrayBlockingQueue<>(receiveQCapacity);
- }
-
- private AbstractRTPConnector assertNotClosed(
- boolean breakOutOfDTLSReliableHandshakeReceiveMessage)
- throws IOException
- {
- AbstractRTPConnector connector = this.connector;
-
- if (connector == null)
- {
- IOException ioe
- = new IOException(getClass().getName() + " is closed!");
-
- if (breakOutOfDTLSReliableHandshakeReceiveMessage)
- breakOutOfDTLSReliableHandshakeReceiveMessage(ioe);
- throw ioe;
- }
- else
- {
- return connector;
- }
- }
-
- /**
- * Works around a bug in the Bouncy Castle Crypto APIs which may cause
- * org.bouncycastle.crypto.tls.DTLSReliableHandshake.receiveMessage()
- * to enter an endless loop.
- *
- * @param cause the Throwable which would have been thrown if the
- * bug did not exist
- */
- private void breakOutOfDTLSReliableHandshakeReceiveMessage(Throwable cause)
- {
- for (StackTraceElement stackTraceElement : cause.getStackTrace())
- {
- if ("org.bouncycastle.crypto.tls.DTLSReliableHandshake".equals(
- stackTraceElement.getClassName())
- && "receiveMessage".equals(
- stackTraceElement.getMethodName()))
- {
- throw new IllegalStateException(cause);
- }
- }
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public void close()
- throws IOException
- {
- setConnector(null);
- }
-
- private void doSend(byte[] buf, int off, int len)
- throws IOException
- {
- // Do preserve the sequence of sends.
- flush();
-
- AbstractRTPConnector connector = assertNotClosed(false);
- RTPConnectorOutputStream outputStream;
-
- switch (componentID)
- {
- case Component.RTCP:
- outputStream = connector.getControlOutputStream();
- break;
- case Component.RTP:
- outputStream = connector.getDataOutputStream();
- break;
- default:
- String msg = "componentID";
- IllegalStateException ise = new IllegalStateException(msg);
-
- logger.error(msg, ise);
- throw ise;
- }
-
- // Write synchronously in order to avoid our packet getting stuck in the
- // write queue (in case it is blocked waiting for DTLS to finish, for
- // example).
- outputStream.syncWrite(buf, off, len);
- }
-
- private void flush()
- throws IOException
- {
- assertNotClosed(false);
-
- byte[] buf;
- int len;
-
- synchronized (sendBufSyncRoot)
- {
- if ((sendBuf != null) && (sendBufLength != 0))
- {
- buf = sendBuf;
- sendBuf = null;
- len = sendBufLength;
- sendBufLength = 0;
- }
- else
- {
- buf = null;
- len = 0;
- }
- }
- if (buf != null)
- {
- doSend(buf, 0, len);
-
- // Attempt to reduce allocations and garbage collection.
- synchronized (sendBufSyncRoot)
- {
- if (sendBuf == null)
- sendBuf = buf;
- }
- }
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public int getReceiveLimit()
- throws IOException
- {
- AbstractRTPConnector connector = this.connector;
- int receiveLimit
- = (connector == null) ? -1 : connector.getReceiveBufferSize();
-
- if (receiveLimit <= 0)
- receiveLimit = RTPConnectorInputStream.PACKET_RECEIVE_BUFFER_LENGTH;
- return receiveLimit;
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public int getSendLimit()
- throws IOException
- {
- AbstractRTPConnector connector = this.connector;
- int sendLimit
- = (connector == null) ? -1 : connector.getSendBufferSize();
-
- if (sendLimit <= 0)
- {
- /*
- * XXX The estimation bellow is wildly inaccurate and hardly related
- * but we have to start somewhere.
- */
- sendLimit
- = DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH
- + Packetizer.MAX_PAYLOAD_SIZE;
- }
- return sendLimit;
- }
-
- /**
- * Queues a packet received from the network to be received by the
- * application through this DatagramTransport.
- *
- * @param buf the array of bytes which contains the packet to be
- * queued
- * @param off the offset within buf at which the packet to be
- * queued starts
- * @param len the length within buf starting at off of the
- * packet to be queued
- */
- void queueReceive(byte[] buf, int off, int len)
- {
- if (len > 0)
- {
- synchronized (receiveQ)
- {
- try
- {
- assertNotClosed(false);
- }
- catch (IOException ioe)
- {
- throw new IllegalStateException(ioe);
- }
-
- RawPacket pkt = rawPacketPool.poll();
- byte[] pktBuf;
-
- if ((pkt == null) || ((pktBuf = pkt.getBuffer()).length < len))
- {
- pktBuf = new byte[len];
- pkt = new RawPacket(pktBuf, 0, len);
- }
- else
- {
- pktBuf = pkt.getBuffer();
- pkt.setLength(len);
- pkt.setOffset(0);
- }
- System.arraycopy(buf, off, pktBuf, 0, len);
-
- if (receiveQ.size() == receiveQCapacity)
- {
- RawPacket oldPkt = receiveQ.remove();
-
- rawPacketPool.offer(oldPkt);
- }
- receiveQ.add(pkt);
- receiveQ.notifyAll();
- }
- }
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public int receive(byte[] buf, int off, int len, int waitMillis)
- throws IOException
- {
- long enterTime = System.currentTimeMillis();
-
- /*
- * If this DatagramTransportImpl is to be received from, then what
- * is to be received may be a response to a request that was earlier
- * scheduled for send.
- */
- /*
- * XXX However, it may unnecessarily break up a flight into multiple
- * datagrams. Since we have implemented the recognition of the end of
- * flights, it should be fairly safe to rely on it alone.
- */
-// flush();
-
- /*
- * If no datagram is received at all and the specified waitMillis
- * expires, a negative value is to be returned in order to have the
- * outbound flight retransmitted.
- */
- int received = -1;
- boolean interrupted = false;
-
- while (received < len)
- {
- long timeout;
-
- if (waitMillis > 0)
- {
- timeout = waitMillis - System.currentTimeMillis() + enterTime;
- if (timeout == 0 /* wait forever */)
- timeout = -1 /* do not wait */;
- }
- else
- {
- timeout = waitMillis;
- }
-
- synchronized (receiveQ)
- {
- assertNotClosed(true);
-
- RawPacket pkt = receiveQ.peek();
-
- if (pkt != null)
- {
- /*
- * If a datagram has been received and even if it carries
- * no/zero bytes, a non-negative value is to be returned in
- * order to distinguish the case with that of no received
- * datagram. If the received bytes do not represent a DTLS
- * record, the record layer may still not retransmit the
- * outbound flight. But that should not be much of a concern
- * because we queue DTLS records into DatagramTransportImpl.
- */
- if (received < 0)
- received = 0;
-
- int toReceive = len - received;
- boolean toReceiveIsPositive = (toReceive > 0);
-
- if (toReceiveIsPositive)
- {
- int pktLength = pkt.getLength();
- int pktOffset = pkt.getOffset();
-
- if (toReceive > pktLength)
- {
- toReceive = pktLength;
- toReceiveIsPositive = (toReceive > 0);
- }
- if (toReceiveIsPositive)
- {
- System.arraycopy(
- pkt.getBuffer(), pktOffset,
- buf, off + received,
- toReceive);
- received += toReceive;
- }
- if (toReceive == pktLength)
- {
- receiveQ.remove();
- rawPacketPool.offer(pkt);
- }
- else
- {
- pkt.setLength(pktLength - toReceive);
- pkt.setOffset(pktOffset + toReceive);
- }
- if (toReceiveIsPositive)
- {
- /*
- * The specified buf has received toReceive bytes
- * and we do not concatenate RawPackets.
- */
- break;
- }
- }
- else
- {
- // The specified buf has received at least len bytes.
- break;
- }
- }
-
- if (receiveQ.isEmpty())
- {
- if (timeout >= 0)
- {
- try
- {
- receiveQ.wait(timeout);
- }
- catch (InterruptedException ie)
- {
- interrupted = true;
- }
- }
- else
- {
- // The specified waitMillis has been exceeded.
- break;
- }
- }
- }
- }
- if (interrupted)
- Thread.currentThread().interrupt();
-
- return received;
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public void send(byte[] buf, int off, int len)
- throws IOException
- {
- assertNotClosed(false);
-
- // If possible, construct a single datagram from multiple DTLS records.
- if (len >= DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH)
- {
- short type = TlsUtils.readUint8(buf, off);
- boolean endOfFlight = false;
-
- switch (type)
- {
- case ContentType.handshake:
- short msg_type = TlsUtils.readUint8(buf, off + 11);
-
- switch (msg_type)
- {
- case HandshakeType.certificate:
- case HandshakeType.certificate_request:
- case HandshakeType.certificate_verify:
- case HandshakeType.client_key_exchange:
- case HandshakeType.server_hello:
- case HandshakeType.server_key_exchange:
- case HandshakeType.session_ticket:
- case HandshakeType.supplemental_data:
- endOfFlight = false;
- break;
- case HandshakeType.client_hello:
- case HandshakeType.finished:
- case HandshakeType.hello_request:
- case HandshakeType.hello_verify_request:
- case HandshakeType.server_hello_done:
- default:
- endOfFlight = true;
- break;
- }
- // Do fall through!
- case ContentType.change_cipher_spec:
- synchronized (sendBufSyncRoot)
- {
- int newSendBufLength = sendBufLength + len;
- int sendLimit = getSendLimit();
-
- if (newSendBufLength <= sendLimit)
- {
- if (sendBuf == null)
- {
- sendBuf = new byte[sendLimit];
- sendBufLength = 0;
- }
- else if (sendBuf.length < sendLimit)
- {
- byte[] oldSendBuf = sendBuf;
-
- sendBuf = new byte[sendLimit];
- System.arraycopy(
- oldSendBuf, 0,
- sendBuf, 0,
- Math.min(sendBufLength, sendBuf.length));
- }
-
- System.arraycopy(buf, off, sendBuf, sendBufLength, len);
- sendBufLength = newSendBufLength;
-
- if (endOfFlight)
- flush();
- }
- else
- {
- if (endOfFlight)
- {
- doSend(buf, off, len);
- }
- else
- {
- flush();
- send(buf, off, len);
- }
- }
- }
- break;
-
- case ContentType.alert:
- case ContentType.application_data:
- default:
- doSend(buf, off, len);
- break;
- }
- }
- else
- {
- doSend(buf, off, len);
- }
- }
-
- /**
- * Sets the RTPConnector which represents and implements the actual
- * DatagramSocket to be adapted by this instance.
- *
- * @param connector the RTPConnector which represents and
- * implements the actual DatagramSocket to be adapted by this
- * instance
- */
- void setConnector(AbstractRTPConnector connector)
- {
- synchronized (receiveQ)
- {
- this.connector = connector;
- receiveQ.notifyAll();
- }
- }
-}
+package org.jitsi.impl.neomedia.transform.dtls;
+
+import java.io.*;
+import java.util.*;
+import java.util.concurrent.*;
+
+import javax.media.rtp.*;
+
+import org.bouncycastle.crypto.tls.*;
+import org.ice4j.ice.*;
+import org.jitsi.impl.neomedia.*;
+import org.jitsi.impl.neomedia.codec.video.h264.*;
+import org.jitsi.util.*;
+
+/**
+ * Implements {@link DatagramTransport} in order to integrate the Bouncy Castle
+ * Crypto APIs in libjitsi for the purposes of implementing DTLS-SRTP.
+ *
+ * @author Lyubomir Marinov
+ */
+public class DatagramTransportImpl
+ implements DatagramTransport
+{
+ /**
+ * The Logger used by the DatagramTransportImpl class and
+ * its instances to print debug information.
+ */
+ private static final Logger logger
+ = Logger.getLogger(DatagramTransportImpl.class);
+
+ /**
+ * The ID of the component which this instance works for/is associated with.
+ */
+ private final int componentID;
+
+ /**
+ * The RTPConnector which represents and implements the actual
+ * DatagramSocket adapted by this instance.
+ */
+ private AbstractRTPConnector connector;
+
+ /**
+ * The pool of RawPackets instances to reduce their allocations
+ * and garbage collection.
+ */
+ private final Queue rawPacketPool
+ = new LinkedBlockingQueue();
+
+ /**
+ * The queue of RawPackets which have been received from the
+ * network are awaiting to be received by the application through this
+ * DatagramTransport.
+ */
+ private final ArrayBlockingQueue receiveQ;
+
+ /**
+ * The capacity of {@link #receiveQ}.
+ */
+ private final int receiveQCapacity;
+
+ /**
+ * The byte buffer which represents a datagram to be sent. It may
+ * consist of multiple DTLS records which are simple encoded consecutively.
+ */
+ private byte[] sendBuf;
+
+ /**
+ * The length in bytes of {@link #sendBuf} i.e. the number of
+ * sendBuf elements which constitute actual DTLS records.
+ */
+ private int sendBufLength;
+
+ /**
+ * The Object that synchronizes the access to {@link #sendBuf},
+ * {@link #sendBufLength}.
+ */
+ private final Object sendBufSyncRoot = new Object();
+
+ /**
+ * Initializes a new DatagramTransportImpl.
+ *
+ * @param componentID {@link Component#RTP} if the new instance is to work
+ * on data/RTP packets or {@link Component#RTCP} if the new instance is to
+ * work on control/RTCP packets
+ */
+ public DatagramTransportImpl(int componentID)
+ {
+ switch (componentID)
+ {
+ case Component.RTCP:
+ case Component.RTP:
+ this.componentID = componentID;
+ break;
+ default:
+ throw new IllegalArgumentException("componentID");
+ }
+
+ receiveQCapacity = RTPConnectorOutputStream.PACKET_QUEUE_CAPACITY;
+ receiveQ = new ArrayBlockingQueue<>(receiveQCapacity);
+ }
+
+ private AbstractRTPConnector assertNotClosed(
+ boolean breakOutOfDTLSReliableHandshakeReceiveMessage)
+ throws IOException
+ {
+ AbstractRTPConnector connector = this.connector;
+
+ if (connector == null)
+ {
+ IOException ioe
+ = new IOException(getClass().getName() + " is closed!");
+
+ if (breakOutOfDTLSReliableHandshakeReceiveMessage)
+ breakOutOfDTLSReliableHandshakeReceiveMessage(ioe);
+ throw ioe;
+ }
+ else
+ {
+ return connector;
+ }
+ }
+
+ /**
+ * Works around a bug in the Bouncy Castle Crypto APIs which may cause
+ * org.bouncycastle.crypto.tls.DTLSReliableHandshake.receiveMessage()
+ * to enter an endless loop.
+ *
+ * @param cause the Throwable which would have been thrown if the
+ * bug did not exist
+ */
+ private void breakOutOfDTLSReliableHandshakeReceiveMessage(Throwable cause)
+ {
+ for (StackTraceElement stackTraceElement : cause.getStackTrace())
+ {
+ if ("org.bouncycastle.crypto.tls.DTLSReliableHandshake".equals(
+ stackTraceElement.getClassName())
+ && "receiveMessage".equals(
+ stackTraceElement.getMethodName()))
+ {
+ throw new IllegalStateException(cause);
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void close()
+ throws IOException
+ {
+ setConnector(null);
+ }
+
+ private void doSend(byte[] buf, int off, int len)
+ throws IOException
+ {
+ // Do preserve the sequence of sends.
+ flush();
+
+ AbstractRTPConnector connector = assertNotClosed(false);
+ RTPConnectorOutputStream outputStream;
+
+ switch (componentID)
+ {
+ case Component.RTCP:
+ outputStream = connector.getControlOutputStream();
+ break;
+ case Component.RTP:
+ outputStream = connector.getDataOutputStream();
+ break;
+ default:
+ String msg = "componentID";
+ IllegalStateException ise = new IllegalStateException(msg);
+
+ logger.error(msg, ise);
+ throw ise;
+ }
+
+ // Write synchronously in order to avoid our packet getting stuck in the
+ // write queue (in case it is blocked waiting for DTLS to finish, for
+ // example).
+ outputStream.syncWrite(buf, off, len);
+ }
+
+ private void flush()
+ throws IOException
+ {
+ assertNotClosed(false);
+
+ byte[] buf;
+ int len;
+
+ synchronized (sendBufSyncRoot)
+ {
+ if ((sendBuf != null) && (sendBufLength != 0))
+ {
+ buf = sendBuf;
+ sendBuf = null;
+ len = sendBufLength;
+ sendBufLength = 0;
+ }
+ else
+ {
+ buf = null;
+ len = 0;
+ }
+ }
+ if (buf != null)
+ {
+ doSend(buf, 0, len);
+
+ // Attempt to reduce allocations and garbage collection.
+ synchronized (sendBufSyncRoot)
+ {
+ if (sendBuf == null)
+ sendBuf = buf;
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int getReceiveLimit()
+ throws IOException
+ {
+ AbstractRTPConnector connector = this.connector;
+ int receiveLimit
+ = (connector == null) ? -1 : connector.getReceiveBufferSize();
+
+ if (receiveLimit <= 0)
+ receiveLimit = RTPConnectorInputStream.PACKET_RECEIVE_BUFFER_LENGTH;
+ return receiveLimit;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int getSendLimit()
+ throws IOException
+ {
+ AbstractRTPConnector connector = this.connector;
+ int sendLimit
+ = (connector == null) ? -1 : connector.getSendBufferSize();
+
+ if (sendLimit <= 0)
+ {
+ /*
+ * XXX The estimation bellow is wildly inaccurate and hardly related
+ * but we have to start somewhere.
+ */
+ sendLimit
+ = DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH
+ + Packetizer.MAX_PAYLOAD_SIZE;
+ }
+ return sendLimit;
+ }
+
+ /**
+ * Queues a packet received from the network to be received by the
+ * application through this DatagramTransport.
+ *
+ * @param buf the array of bytes which contains the packet to be
+ * queued
+ * @param off the offset within buf at which the packet to be
+ * queued starts
+ * @param len the length within buf starting at off of the
+ * packet to be queued
+ */
+ void queueReceive(byte[] buf, int off, int len)
+ {
+ if (len > 0)
+ {
+ synchronized (receiveQ)
+ {
+ try
+ {
+ assertNotClosed(false);
+ }
+ catch (IOException ioe)
+ {
+ throw new IllegalStateException(ioe);
+ }
+
+ RawPacket pkt = rawPacketPool.poll();
+ byte[] pktBuf;
+
+ if ((pkt == null) || ((pktBuf = pkt.getBuffer()).length < len))
+ {
+ pktBuf = new byte[len];
+ pkt = new RawPacket(pktBuf, 0, len);
+ }
+ else
+ {
+ pktBuf = pkt.getBuffer();
+ pkt.setLength(len);
+ pkt.setOffset(0);
+ }
+ System.arraycopy(buf, off, pktBuf, 0, len);
+
+ if (receiveQ.size() == receiveQCapacity)
+ {
+ RawPacket oldPkt = receiveQ.remove();
+
+ rawPacketPool.offer(oldPkt);
+ }
+ receiveQ.add(pkt);
+ receiveQ.notifyAll();
+ }
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int receive(byte[] buf, int off, int len, int waitMillis)
+ throws IOException
+ {
+ long enterTime = System.currentTimeMillis();
+
+ /*
+ * If this DatagramTransportImpl is to be received from, then what
+ * is to be received may be a response to a request that was earlier
+ * scheduled for send.
+ */
+ /*
+ * XXX However, it may unnecessarily break up a flight into multiple
+ * datagrams. Since we have implemented the recognition of the end of
+ * flights, it should be fairly safe to rely on it alone.
+ */
+// flush();
+
+ /*
+ * If no datagram is received at all and the specified waitMillis
+ * expires, a negative value is to be returned in order to have the
+ * outbound flight retransmitted.
+ */
+ int received = -1;
+ boolean interrupted = false;
+
+ while (received < len)
+ {
+ long timeout;
+
+ if (waitMillis > 0)
+ {
+ timeout = waitMillis - System.currentTimeMillis() + enterTime;
+ if (timeout == 0 /* wait forever */)
+ timeout = -1 /* do not wait */;
+ }
+ else
+ {
+ timeout = waitMillis;
+ }
+
+ synchronized (receiveQ)
+ {
+ assertNotClosed(true);
+
+ RawPacket pkt = receiveQ.peek();
+
+ if (pkt != null)
+ {
+ /*
+ * If a datagram has been received and even if it carries
+ * no/zero bytes, a non-negative value is to be returned in
+ * order to distinguish the case with that of no received
+ * datagram. If the received bytes do not represent a DTLS
+ * record, the record layer may still not retransmit the
+ * outbound flight. But that should not be much of a concern
+ * because we queue DTLS records into DatagramTransportImpl.
+ */
+ if (received < 0)
+ received = 0;
+
+ int toReceive = len - received;
+ boolean toReceiveIsPositive = (toReceive > 0);
+
+ if (toReceiveIsPositive)
+ {
+ int pktLength = pkt.getLength();
+ int pktOffset = pkt.getOffset();
+
+ if (toReceive > pktLength)
+ {
+ toReceive = pktLength;
+ toReceiveIsPositive = (toReceive > 0);
+ }
+ if (toReceiveIsPositive)
+ {
+ System.arraycopy(
+ pkt.getBuffer(), pktOffset,
+ buf, off + received,
+ toReceive);
+ received += toReceive;
+ }
+ if (toReceive == pktLength)
+ {
+ receiveQ.remove();
+ rawPacketPool.offer(pkt);
+ }
+ else
+ {
+ pkt.setLength(pktLength - toReceive);
+ pkt.setOffset(pktOffset + toReceive);
+ }
+ if (toReceiveIsPositive)
+ {
+ /*
+ * The specified buf has received toReceive bytes
+ * and we do not concatenate RawPackets.
+ */
+ break;
+ }
+ }
+ else
+ {
+ // The specified buf has received at least len bytes.
+ break;
+ }
+ }
+
+ if (receiveQ.isEmpty())
+ {
+ if (timeout >= 0)
+ {
+ try
+ {
+ receiveQ.wait(timeout);
+ }
+ catch (InterruptedException ie)
+ {
+ interrupted = true;
+ }
+ }
+ else
+ {
+ // The specified waitMillis has been exceeded.
+ break;
+ }
+ }
+ }
+ }
+ if (interrupted)
+ Thread.currentThread().interrupt();
+
+ return received;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void send(byte[] buf, int off, int len)
+ throws IOException
+ {
+ assertNotClosed(false);
+
+ // If possible, construct a single datagram from multiple DTLS records.
+ if (len >= DtlsPacketTransformer.DTLS_RECORD_HEADER_LENGTH)
+ {
+ short type = TlsUtils.readUint8(buf, off);
+ boolean endOfFlight = false;
+
+ switch (type)
+ {
+ case ContentType.handshake:
+ short msg_type = TlsUtils.readUint8(buf, off + 11);
+
+ switch (msg_type)
+ {
+ case HandshakeType.certificate:
+ case HandshakeType.certificate_request:
+ case HandshakeType.certificate_verify:
+ case HandshakeType.client_key_exchange:
+ case HandshakeType.server_hello:
+ case HandshakeType.server_key_exchange:
+ case HandshakeType.session_ticket:
+ case HandshakeType.supplemental_data:
+ endOfFlight = false;
+ break;
+ case HandshakeType.client_hello:
+ case HandshakeType.finished:
+ case HandshakeType.hello_request:
+ case HandshakeType.hello_verify_request:
+ case HandshakeType.server_hello_done:
+ default:
+ endOfFlight = true;
+ break;
+ }
+ // Do fall through!
+ case ContentType.change_cipher_spec:
+ synchronized (sendBufSyncRoot)
+ {
+ int newSendBufLength = sendBufLength + len;
+ int sendLimit = getSendLimit();
+
+ if (newSendBufLength <= sendLimit)
+ {
+ if (sendBuf == null)
+ {
+ sendBuf = new byte[sendLimit];
+ sendBufLength = 0;
+ }
+ else if (sendBuf.length < sendLimit)
+ {
+ byte[] oldSendBuf = sendBuf;
+
+ sendBuf = new byte[sendLimit];
+ System.arraycopy(
+ oldSendBuf, 0,
+ sendBuf, 0,
+ Math.min(sendBufLength, sendBuf.length));
+ }
+
+ System.arraycopy(buf, off, sendBuf, sendBufLength, len);
+ sendBufLength = newSendBufLength;
+
+ if (endOfFlight)
+ flush();
+ }
+ else
+ {
+ if (endOfFlight)
+ {
+ doSend(buf, off, len);
+ }
+ else
+ {
+ flush();
+ send(buf, off, len);
+ }
+ }
+ }
+ break;
+
+ case ContentType.alert:
+ case ContentType.application_data:
+ default:
+ doSend(buf, off, len);
+ break;
+ }
+ }
+ else
+ {
+ doSend(buf, off, len);
+ }
+ }
+
+ /**
+ * Sets the RTPConnector which represents and implements the actual
+ * DatagramSocket to be adapted by this instance.
+ *
+ * @param connector the RTPConnector which represents and
+ * implements the actual DatagramSocket to be adapted by this
+ * instance
+ */
+ void setConnector(AbstractRTPConnector connector)
+ {
+ synchronized (receiveQ)
+ {
+ this.connector = connector;
+ receiveQ.notifyAll();
+ }
+ }
+}
diff --git a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java
index 339f9accc..e1721890b 100644
--- a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java
+++ b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPCipherCTR.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,110 +13,110 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.transform.srtp;
-
-import org.bouncycastle.crypto.*;
-
-/**
- * SRTPCipherCTR implements SRTP Counter Mode AES Encryption (AES-CM).
- * Counter Mode AES Encryption algorithm is defined in RFC3711, section 4.1.1.
- *
- * Other than Null Cipher, RFC3711 defined two two encryption algorithms:
- * Counter Mode AES Encryption and F8 Mode AES encryption. Both encryption
- * algorithms are capable to encrypt / decrypt arbitrary length data, and the
- * size of packet data is not required to be a multiple of the AES block
- * size (128bit). So, no padding is needed.
- *
- * Please note: these two encryption algorithms are specially defined by SRTP.
- * They are not common AES encryption modes, so you will not be able to find a
- * replacement implementation in common cryptographic libraries.
- *
- * As defined by RFC3711: Counter Mode Encryption is mandatory..
- *
- * mandatory to impl optional default
- * -------------------------------------------------------------------------
- * encryption AES-CM, NULL AES-f8 AES-CM
- * message integrity HMAC-SHA1 - HMAC-SHA1
- * key derivation (PRF) AES-CM - AES-CM
- *
- * We use AESCipher to handle basic AES encryption / decryption.
- *
- * @author Werner Dittmann (Werner.Dittmann@t-online.de)
- * @author Bing SU (nova.su@gmail.com)
- */
-public class SRTPCipherCTR
-{
- private static final int BLKLEN = 16;
- private static final int MAX_BUFFER_LENGTH = 10 * 1024;
-
- private final byte[] cipherInBlock = new byte[BLKLEN];
- private byte[] streamBuf = new byte[1024];
- private final byte[] tmpCipherBlock = new byte[BLKLEN];
-
- public SRTPCipherCTR()
- {
- }
-
- /**
- * Computes the cipher stream for AES CM mode. See section 4.1.1 in RFC3711
- * for detailed description.
- *
- * @param out byte array holding the output cipher stream
- * @param length length of the cipher stream to produce, in bytes
- * @param iv initialization vector used to generate this cipher stream
- */
- public void getCipherStream(
- BlockCipher aesCipher,
- byte[] out, int length,
- byte[] iv)
- {
- System.arraycopy(iv, 0, cipherInBlock, 0, 14);
-
- int ctr, ctrEnd;
-
- for (ctr = 0, ctrEnd = length / BLKLEN; ctr < ctrEnd; ctr++)
- {
- // compute the cipher stream
- cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8);
- cipherInBlock[15] = (byte) (ctr & 0x00FF);
-
- aesCipher.processBlock(cipherInBlock, 0, out, ctr * BLKLEN);
- }
-
- // Treat the last bytes:
- cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8);
- cipherInBlock[15] = (byte) ((ctr & 0x00FF));
-
- aesCipher.processBlock(cipherInBlock, 0, tmpCipherBlock, 0);
- System.arraycopy(tmpCipherBlock, 0, out, ctr * BLKLEN, length % BLKLEN);
- }
-
- public void process(
- BlockCipher cipher,
- byte[] data, int off, int len,
- byte[] iv)
- {
- if (off + len > data.length)
- return;
-
- // If data fits in inter buffer, use it. Otherwise, allocate bigger
- // buffer and store it (up to a defined maximum size) to use it for
- // later processing.
- byte[] cipherStream;
-
- if (len > streamBuf.length)
- {
- cipherStream = new byte[len];
- if (cipherStream.length <= MAX_BUFFER_LENGTH)
- streamBuf = cipherStream;
- }
- else
- {
- cipherStream = streamBuf;
- }
-
- getCipherStream(cipher, cipherStream, len, iv);
- for (int i = 0; i < len; i++)
- data[i + off] ^= cipherStream[i];
- }
-}
+package org.jitsi.impl.neomedia.transform.srtp;
+
+import org.bouncycastle.crypto.*;
+
+/**
+ * SRTPCipherCTR implements SRTP Counter Mode AES Encryption (AES-CM).
+ * Counter Mode AES Encryption algorithm is defined in RFC3711, section 4.1.1.
+ *
+ * Other than Null Cipher, RFC3711 defined two two encryption algorithms:
+ * Counter Mode AES Encryption and F8 Mode AES encryption. Both encryption
+ * algorithms are capable to encrypt / decrypt arbitrary length data, and the
+ * size of packet data is not required to be a multiple of the AES block
+ * size (128bit). So, no padding is needed.
+ *
+ * Please note: these two encryption algorithms are specially defined by SRTP.
+ * They are not common AES encryption modes, so you will not be able to find a
+ * replacement implementation in common cryptographic libraries.
+ *
+ * As defined by RFC3711: Counter Mode Encryption is mandatory..
+ *
+ * mandatory to impl optional default
+ * -------------------------------------------------------------------------
+ * encryption AES-CM, NULL AES-f8 AES-CM
+ * message integrity HMAC-SHA1 - HMAC-SHA1
+ * key derivation (PRF) AES-CM - AES-CM
+ *
+ * We use AESCipher to handle basic AES encryption / decryption.
+ *
+ * @author Werner Dittmann (Werner.Dittmann@t-online.de)
+ * @author Bing SU (nova.su@gmail.com)
+ */
+public class SRTPCipherCTR
+{
+ private static final int BLKLEN = 16;
+ private static final int MAX_BUFFER_LENGTH = 10 * 1024;
+
+ private final byte[] cipherInBlock = new byte[BLKLEN];
+ private byte[] streamBuf = new byte[1024];
+ private final byte[] tmpCipherBlock = new byte[BLKLEN];
+
+ public SRTPCipherCTR()
+ {
+ }
+
+ /**
+ * Computes the cipher stream for AES CM mode. See section 4.1.1 in RFC3711
+ * for detailed description.
+ *
+ * @param out byte array holding the output cipher stream
+ * @param length length of the cipher stream to produce, in bytes
+ * @param iv initialization vector used to generate this cipher stream
+ */
+ public void getCipherStream(
+ BlockCipher aesCipher,
+ byte[] out, int length,
+ byte[] iv)
+ {
+ System.arraycopy(iv, 0, cipherInBlock, 0, 14);
+
+ int ctr, ctrEnd;
+
+ for (ctr = 0, ctrEnd = length / BLKLEN; ctr < ctrEnd; ctr++)
+ {
+ // compute the cipher stream
+ cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8);
+ cipherInBlock[15] = (byte) (ctr & 0x00FF);
+
+ aesCipher.processBlock(cipherInBlock, 0, out, ctr * BLKLEN);
+ }
+
+ // Treat the last bytes:
+ cipherInBlock[14] = (byte) ((ctr & 0xFF00) >> 8);
+ cipherInBlock[15] = (byte) ((ctr & 0x00FF));
+
+ aesCipher.processBlock(cipherInBlock, 0, tmpCipherBlock, 0);
+ System.arraycopy(tmpCipherBlock, 0, out, ctr * BLKLEN, length % BLKLEN);
+ }
+
+ public void process(
+ BlockCipher cipher,
+ byte[] data, int off, int len,
+ byte[] iv)
+ {
+ if (off + len > data.length)
+ return;
+
+ // If data fits in inter buffer, use it. Otherwise, allocate bigger
+ // buffer and store it (up to a defined maximum size) to use it for
+ // later processing.
+ byte[] cipherStream;
+
+ if (len > streamBuf.length)
+ {
+ cipherStream = new byte[len];
+ if (cipherStream.length <= MAX_BUFFER_LENGTH)
+ streamBuf = cipherStream;
+ }
+ else
+ {
+ cipherStream = streamBuf;
+ }
+
+ getCipherStream(cipher, cipherStream, len, iv);
+ for (int i = 0; i < len; i++)
+ data[i + off] ^= cipherStream[i];
+ }
+}
diff --git a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java
index d32f4d93f..f632502bf 100644
--- a/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java
+++ b/src/org/jitsi/impl/neomedia/transform/srtp/SRTPContextFactory.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,97 +13,97 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.impl.neomedia.transform.srtp;
-
-/**
- * The SRTPContextFactory creates the initial crypto contexts for RTP
- * and RTCP encryption using the supplied key material.
- *
- * @author Bing SU (nova.su@gmail.com)
- */
-public class SRTPContextFactory
-{
- /**
- * The default SRTPCryptoContext, which will be used to derive other
- * contexts.
- */
- private SRTPCryptoContext defaultContext;
-
- /**
- * The default SRTPCryptoContext, which will be used to derive other
- * contexts.
- */
- private SRTCPCryptoContext defaultContextControl;
-
- /**
- * Construct a SRTPTransformEngine based on given master encryption key,
- * master salt key and SRTP/SRTCP policy.
- *
- * @param sender true if the new instance is to be used by an SRTP
- * sender; false if the new instance is to be used by an SRTP
- * receiver
- * @param masterKey the master encryption key
- * @param masterSalt the master salt key
- * @param srtpPolicy SRTP policy
- * @param srtcpPolicy SRTCP policy
- */
- public SRTPContextFactory(
- boolean sender,
- byte[] masterKey,
- byte[] masterSalt,
- SRTPPolicy srtpPolicy,
- SRTPPolicy srtcpPolicy)
- {
- defaultContext
- = new SRTPCryptoContext(
- sender,
- 0,
- 0,
- 0,
- masterKey,
- masterSalt,
- srtpPolicy);
- defaultContextControl
- = new SRTCPCryptoContext(0, masterKey, masterSalt, srtcpPolicy);
- }
-
- /**
- * Close the transformer engine.
- *
- * The close functions closes all stored default crypto contexts. This
- * deletes key data and forces a cleanup of the crypto contexts.
- */
- public void close()
- {
- if (defaultContext != null)
- {
- defaultContext.close();
- defaultContext = null;
- }
- if (defaultContextControl != null)
- {
- defaultContextControl.close();
- defaultContextControl = null;
- }
- }
-
- /**
- * Get the default SRTPCryptoContext
- *
- * @return the default SRTPCryptoContext
- */
- public SRTPCryptoContext getDefaultContext()
- {
- return defaultContext;
- }
-
- /**
- * Get the default SRTPCryptoContext
- *
- * @return the default SRTPCryptoContext
- */
- public SRTCPCryptoContext getDefaultContextControl()
- {
- return defaultContextControl;
- }
-}
+package org.jitsi.impl.neomedia.transform.srtp;
+
+/**
+ * The SRTPContextFactory creates the initial crypto contexts for RTP
+ * and RTCP encryption using the supplied key material.
+ *
+ * @author Bing SU (nova.su@gmail.com)
+ */
+public class SRTPContextFactory
+{
+ /**
+ * The default SRTPCryptoContext, which will be used to derive other
+ * contexts.
+ */
+ private SRTPCryptoContext defaultContext;
+
+ /**
+ * The default SRTPCryptoContext, which will be used to derive other
+ * contexts.
+ */
+ private SRTCPCryptoContext defaultContextControl;
+
+ /**
+ * Construct a SRTPTransformEngine based on given master encryption key,
+ * master salt key and SRTP/SRTCP policy.
+ *
+ * @param sender true if the new instance is to be used by an SRTP
+ * sender; false if the new instance is to be used by an SRTP
+ * receiver
+ * @param masterKey the master encryption key
+ * @param masterSalt the master salt key
+ * @param srtpPolicy SRTP policy
+ * @param srtcpPolicy SRTCP policy
+ */
+ public SRTPContextFactory(
+ boolean sender,
+ byte[] masterKey,
+ byte[] masterSalt,
+ SRTPPolicy srtpPolicy,
+ SRTPPolicy srtcpPolicy)
+ {
+ defaultContext
+ = new SRTPCryptoContext(
+ sender,
+ 0,
+ 0,
+ 0,
+ masterKey,
+ masterSalt,
+ srtpPolicy);
+ defaultContextControl
+ = new SRTCPCryptoContext(0, masterKey, masterSalt, srtcpPolicy);
+ }
+
+ /**
+ * Close the transformer engine.
+ *
+ * The close functions closes all stored default crypto contexts. This
+ * deletes key data and forces a cleanup of the crypto contexts.
+ */
+ public void close()
+ {
+ if (defaultContext != null)
+ {
+ defaultContext.close();
+ defaultContext = null;
+ }
+ if (defaultContextControl != null)
+ {
+ defaultContextControl.close();
+ defaultContextControl = null;
+ }
+ }
+
+ /**
+ * Get the default SRTPCryptoContext
+ *
+ * @return the default SRTPCryptoContext
+ */
+ public SRTPCryptoContext getDefaultContext()
+ {
+ return defaultContext;
+ }
+
+ /**
+ * Get the default SRTPCryptoContext
+ *
+ * @return the default SRTPCryptoContext
+ */
+ public SRTCPCryptoContext getDefaultContextControl()
+ {
+ return defaultContextControl;
+ }
+}
diff --git a/src/org/jitsi/service/libjitsi/LibJitsiActivator.java b/src/org/jitsi/service/libjitsi/LibJitsiActivator.java
index 61044660c..a8615135b 100644
--- a/src/org/jitsi/service/libjitsi/LibJitsiActivator.java
+++ b/src/org/jitsi/service/libjitsi/LibJitsiActivator.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,25 +13,25 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.service.libjitsi;
-
-import org.osgi.framework.*;
-
-/**
- * Activates libjitsi in an OSGi environment.
- */
-public class LibJitsiActivator
- implements BundleActivator
-{
- public void start(BundleContext bundleContext)
- throws Exception
- {
- LibJitsi.start(bundleContext);
- }
-
- public void stop(BundleContext bundleContext)
- throws Exception
- {
- LibJitsi.stop();
- }
-}
+package org.jitsi.service.libjitsi;
+
+import org.osgi.framework.*;
+
+/**
+ * Activates libjitsi in an OSGi environment.
+ */
+public class LibJitsiActivator
+ implements BundleActivator
+{
+ public void start(BundleContext bundleContext)
+ throws Exception
+ {
+ LibJitsi.start(bundleContext);
+ }
+
+ public void stop(BundleContext bundleContext)
+ throws Exception
+ {
+ LibJitsi.stop();
+ }
+}
diff --git a/src/org/jitsi/service/neomedia/SDesControl.java b/src/org/jitsi/service/neomedia/SDesControl.java
index 1615be02f..e39cf8ce1 100644
--- a/src/org/jitsi/service/neomedia/SDesControl.java
+++ b/src/org/jitsi/service/neomedia/SDesControl.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,81 +13,81 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.service.neomedia;
-
-import ch.imvs.sdes4j.srtp.*;
-
-/**
- * SDES based SRTP MediaStream encryption control.
- *
- * @author Ingo Bauersachs
- */
-public interface SDesControl
- extends SrtpControl
-{
- /**
- * Name of the config setting that supplies the default enabled cipher
- * suites. Cipher suites are comma-separated.
- */
- public static final String SDES_CIPHER_SUITES =
- "net.java.sip.communicator.service.neomedia.SDES_CIPHER_SUITES";
-
- /**
- * Gets the crypto attribute of the incoming MediaStream.
- *
- * @return the crypto attribute of the incoming MediaStream.
- */
- public SrtpCryptoAttribute getInAttribute();
-
- /**
- * Returns the crypto attributes enabled on this computer.
- *
- * @return The crypto attributes enabled on this computer.
- */
- public SrtpCryptoAttribute[] getInitiatorCryptoAttributes();
-
- /**
- * Gets the crypto attribute of the outgoing MediaStream.
- *
- * @return the crypto attribute of the outgoing MediaStream.
- */
- public SrtpCryptoAttribute getOutAttribute();
-
- /**
- * Gets all supported cipher suites.
- *
- * @return all supported cipher suites.
- */
- public Iterable getSupportedCryptoSuites();
-
- /**
- * Selects the local crypto attribute from the initial offering
- * ({@link #getInitiatorCryptoAttributes()}) based on the peer's first
- * matching cipher suite.
- *
- * @param peerAttributes The peer's crypto offers.
- * @return A SrtpCryptoAttribute when a matching cipher suite was found;
- * null, otherwise.
- */
- public SrtpCryptoAttribute initiatorSelectAttribute(
- Iterable peerAttributes);
-
- /**
- * Chooses a supported crypto attribute from the peer's list of supplied
- * attributes and creates the local crypto attribute. Used when the control
- * is running in the role as responder.
- *
- * @param peerAttributes The peer's crypto attribute offering.
- * @return The local crypto attribute for the answer of the offer or
- * null if no matching cipher suite could be found.
- */
- public SrtpCryptoAttribute responderSelectAttribute(
- Iterable peerAttributes);
-
- /**
- * Sets the enabled SDES ciphers.
- *
- * @param ciphers The list of enabled ciphers.
- */
- public void setEnabledCiphers(Iterable ciphers);
-}
+package org.jitsi.service.neomedia;
+
+import ch.imvs.sdes4j.srtp.*;
+
+/**
+ * SDES based SRTP MediaStream encryption control.
+ *
+ * @author Ingo Bauersachs
+ */
+public interface SDesControl
+ extends SrtpControl
+{
+ /**
+ * Name of the config setting that supplies the default enabled cipher
+ * suites. Cipher suites are comma-separated.
+ */
+ public static final String SDES_CIPHER_SUITES =
+ "net.java.sip.communicator.service.neomedia.SDES_CIPHER_SUITES";
+
+ /**
+ * Gets the crypto attribute of the incoming MediaStream.
+ *
+ * @return the crypto attribute of the incoming MediaStream.
+ */
+ public SrtpCryptoAttribute getInAttribute();
+
+ /**
+ * Returns the crypto attributes enabled on this computer.
+ *
+ * @return The crypto attributes enabled on this computer.
+ */
+ public SrtpCryptoAttribute[] getInitiatorCryptoAttributes();
+
+ /**
+ * Gets the crypto attribute of the outgoing MediaStream.
+ *
+ * @return the crypto attribute of the outgoing MediaStream.
+ */
+ public SrtpCryptoAttribute getOutAttribute();
+
+ /**
+ * Gets all supported cipher suites.
+ *
+ * @return all supported cipher suites.
+ */
+ public Iterable getSupportedCryptoSuites();
+
+ /**
+ * Selects the local crypto attribute from the initial offering
+ * ({@link #getInitiatorCryptoAttributes()}) based on the peer's first
+ * matching cipher suite.
+ *
+ * @param peerAttributes The peer's crypto offers.
+ * @return A SrtpCryptoAttribute when a matching cipher suite was found;
+ * null, otherwise.
+ */
+ public SrtpCryptoAttribute initiatorSelectAttribute(
+ Iterable peerAttributes);
+
+ /**
+ * Chooses a supported crypto attribute from the peer's list of supplied
+ * attributes and creates the local crypto attribute. Used when the control
+ * is running in the role as responder.
+ *
+ * @param peerAttributes The peer's crypto attribute offering.
+ * @return The local crypto attribute for the answer of the offer or
+ * null if no matching cipher suite could be found.
+ */
+ public SrtpCryptoAttribute responderSelectAttribute(
+ Iterable peerAttributes);
+
+ /**
+ * Sets the enabled SDES ciphers.
+ *
+ * @param ciphers The list of enabled ciphers.
+ */
+ public void setEnabledCiphers(Iterable ciphers);
+}
diff --git a/src/org/jitsi/service/neomedia/SrtpControlType.java b/src/org/jitsi/service/neomedia/SrtpControlType.java
index 4f535fb12..b6c1cd8c1 100644
--- a/src/org/jitsi/service/neomedia/SrtpControlType.java
+++ b/src/org/jitsi/service/neomedia/SrtpControlType.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,77 +13,77 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.service.neomedia;
-
-/**
- * The SrtpControlType enumeration contains all currently known
- * SrtpControl implementations.
- *
- * @author Ingo Bauersachs
- * @author Lyubomir Marinov
- */
-public enum SrtpControlType
-{
- /**
- * Datagram Transport Layer Security (DTLS) Extension to Establish Keys for
- * the Secure Real-time Transport Protocol (SRTP)
- */
- DTLS_SRTP("DTLS-SRTP"),
-
- /**
- * Multimedia Internet KEYing (RFC 3830)
- */
- MIKEY("MIKEY"),
-
- /**
- * Session Description Protocol (SDP) Security Descriptions for Media
- * Streams (RFC 4568)
- */
- SDES("SDES"),
-
- /**
- * ZRTP: Media Path Key Agreement for Unicast Secure RTP (RFC 6189)
- */
- ZRTP("ZRTP");
-
- /**
- * The human-readable non-localized name of the (S)RTP transport protocol
- * represented by this SrtpControlType and its respective
- * SrtpControl class.
- */
- private final String protoName;
-
- /**
- * Initializes a new SrtpControlType instance with a specific
- * human-readable non-localized (S)RTP transport protocol name.
- *
- * @param protoName the human-readable non-localized name of the (S)RTP
- * transport protocol represented by the new instance and its respective
- * SrtpControl class
- */
- private SrtpControlType(String protoName)
- {
- this.protoName = protoName;
- }
-
- @Override
- public String toString()
- {
- return protoName;
- }
-
- /**
- * @see SrtpControlType#valueOf(String)
- */
- public static SrtpControlType fromString(String protoName)
- {
- if (protoName.equals(SrtpControlType.DTLS_SRTP.toString()))
- {
- return SrtpControlType.DTLS_SRTP;
- }
- else
- {
- return SrtpControlType.valueOf(protoName);
- }
- }
-}
+package org.jitsi.service.neomedia;
+
+/**
+ * The SrtpControlType enumeration contains all currently known
+ * SrtpControl implementations.
+ *
+ * @author Ingo Bauersachs
+ * @author Lyubomir Marinov
+ */
+public enum SrtpControlType
+{
+ /**
+ * Datagram Transport Layer Security (DTLS) Extension to Establish Keys for
+ * the Secure Real-time Transport Protocol (SRTP)
+ */
+ DTLS_SRTP("DTLS-SRTP"),
+
+ /**
+ * Multimedia Internet KEYing (RFC 3830)
+ */
+ MIKEY("MIKEY"),
+
+ /**
+ * Session Description Protocol (SDP) Security Descriptions for Media
+ * Streams (RFC 4568)
+ */
+ SDES("SDES"),
+
+ /**
+ * ZRTP: Media Path Key Agreement for Unicast Secure RTP (RFC 6189)
+ */
+ ZRTP("ZRTP");
+
+ /**
+ * The human-readable non-localized name of the (S)RTP transport protocol
+ * represented by this SrtpControlType and its respective
+ * SrtpControl class.
+ */
+ private final String protoName;
+
+ /**
+ * Initializes a new SrtpControlType instance with a specific
+ * human-readable non-localized (S)RTP transport protocol name.
+ *
+ * @param protoName the human-readable non-localized name of the (S)RTP
+ * transport protocol represented by the new instance and its respective
+ * SrtpControl class
+ */
+ private SrtpControlType(String protoName)
+ {
+ this.protoName = protoName;
+ }
+
+ @Override
+ public String toString()
+ {
+ return protoName;
+ }
+
+ /**
+ * @see SrtpControlType#valueOf(String)
+ */
+ public static SrtpControlType fromString(String protoName)
+ {
+ if (protoName.equals(SrtpControlType.DTLS_SRTP.toString()))
+ {
+ return SrtpControlType.DTLS_SRTP;
+ }
+ else
+ {
+ return SrtpControlType.valueOf(protoName);
+ }
+ }
+}
diff --git a/src/org/jitsi/service/neomedia/control/KeyFrameControl.java b/src/org/jitsi/service/neomedia/control/KeyFrameControl.java
index cb44d563a..a544cd0d3 100644
--- a/src/org/jitsi/service/neomedia/control/KeyFrameControl.java
+++ b/src/org/jitsi/service/neomedia/control/KeyFrameControl.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,179 +13,179 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.service.neomedia.control;
-
-import java.util.*;
-
-/**
- * Represents a control over the key frame-related logic of a
- * VideoMediaStream.
- *
- * @author Lyubomir Marinov
- */
-public interface KeyFrameControl
-{
- /**
- * Adds a KeyFrameRequestee to be made available through this
- * KeyFrameControl.
- *
- * @param index the zero-based index at which keyFrameRequestee is
- * to be added to the list of KeyFrameRequestees made available or
- * -1 to have this KeyFrameControl choose at which index
- * it is to be added in accord with its internal logic
- * through this KeyFrameControl
- * @param keyFrameRequestee the KeyFrameRequestee to be added to
- * this KeyFrameControl so that it is made available through it
- */
- public void addKeyFrameRequestee(
- int index,
- KeyFrameRequestee keyFrameRequestee);
-
- /**
- * Adds a KeyFrameRequester to be made available through this
- * KeyFrameControl.
- *
- * @param index the zero-based index at which keyFrameRequester is
- * to be added to the list of KeyFrameRequesters made available or
- * -1 to have this KeyFrameControl choose at which index
- * it is to be added in accord with its internal logic
- * through this KeyFrameControl
- * @param keyFrameRequester the KeyFrameRequester to be added to
- * this KeyFrameControl so that it is made available through it
- */
- public void addKeyFrameRequester(
- int index,
- KeyFrameRequester keyFrameRequester);
-
- /**
- * Gets the KeyFrameRequestees made available through this
- * KeyFrameControl.
- *
- * @return an unmodifiable list of KeyFrameRequestees made
- * available through this KeyFrameControl
- */
- public List getKeyFrameRequestees();
-
- /**
- * Gets the KeyFrameRequesters made available through this
- * KeyFrameControl.
- *
- * @return an unmodifiable list of KeyFrameRequesters made
- * available through this KeyFrameControl
- */
- public List getKeyFrameRequesters();
-
- /**
- * Notifies this KeyFrameControl that the remote peer of the
- * associated VideoMediaStream has requested a key frame from the
- * local peer.
- *
- * @return true if the local peer has honored the request from the
- * remote peer for a key frame; otherwise, false
- */
- public boolean keyFrameRequest();
-
- /**
- * Removes a KeyFrameRequestee to no longer be made available
- * through this KeyFrameControl.
- *
- * @param keyFrameRequestee the KeyFrameRequestee to be removed
- * from this KeyFrameControl so that it is no longer made available
- * through it
- * @return true if keyFrameRequestee was found in this
- * KeyFrameControl; otherwise, false
- */
- public boolean removeKeyFrameRequestee(KeyFrameRequestee keyFrameRequestee);
-
- /**
- * Removes a KeyFrameRequester to no longer be made available
- * through this KeyFrameControl.
- *
- * @param keyFrameRequester the KeyFrameRequester to be removed
- * from this KeyFrameControl so that it is no longer made available
- * through it
- * @return true if keyFrameRequester was found in this
- * KeyFrameControl; otherwise, false
- */
- public boolean removeKeyFrameRequester(KeyFrameRequester keyFrameRequester);
-
- /**
- * Requests a key frame from the remote peer of the associated
- * VideoMediaStream.
- *
- * @param urgent true if the caller has determined that the need
- * for a key frame is urgent and should not obey all constraints with
- * respect to time between two subsequent requests for key frames
- * @return true if a key frame was indeed requested from the remote
- * peer of the associated VideoMediaStream in response to the call;
- * otherwise, false
- */
- public boolean requestKeyFrame(boolean urgent);
-
- /**
- * Represents a way for the remote peer of a VideoMediaStream to
- * request a key frame from its local peer.
- *
- * @author Lyubomir Marinov
- */
- public interface KeyFrameRequestee
- {
- /**
- * Notifies this KeyFrameRequestee that the remote peer of the
- * associated VideoMediaStream requests a key frame from the
- * local peer.
- *
- * @return true if this KeyFrameRequestee has honored
- * the request for a key frame; otherwise, false
- */
- public boolean keyFrameRequest();
- }
-
- /**
- * Represents a way for a VideoMediaStream to request a key frame
- * from its remote peer.
- *
- * @author Lyubomir Marinov
- */
- public interface KeyFrameRequester
- {
- /**
- * The name of the ConfigurationService property which
- * specifies the preferred KeyFrameRequester to be used.
- */
- public static final String PREFERRED_PNAME
- = "net.java.sip.communicator.impl.neomedia.codec.video.h264."
- + "preferredKeyFrameRequester";
-
- /**
- * The value of the {@link #PREFERRED_PNAME}
- * ConfigurationService property which indicates that the
- * RTCP KeyFrameRequester is preferred.
- */
- public static final String RTCP = "rtcp";
-
- /**
- * The value of the {@link #PREFERRED_PNAME}
- * ConfigurationService property which indicates that the
- * signaling/protocol KeyFrameRequester is preferred.
- */
- public static final String SIGNALING = "signaling";
-
- /**
- * The default value of the {@link #PREFERRED_PNAME}
- * ConfigurationService property.
- */
- public static final String DEFAULT_PREFERRED = RTCP;
-
- /**
- * Requests a key frame from the remote peer of the associated
- * VideoMediaStream.
- *
- * @return true if this KeyFrameRequester has
- * indeed requested a key frame from the remote peer of the associated
- * VideoMediaStream in response to the call; otherwise,
- * false
- */
- public boolean requestKeyFrame();
- }
-}
+package org.jitsi.service.neomedia.control;
+
+import java.util.*;
+
+/**
+ * Represents a control over the key frame-related logic of a
+ * VideoMediaStream.
+ *
+ * @author Lyubomir Marinov
+ */
+public interface KeyFrameControl
+{
+ /**
+ * Adds a KeyFrameRequestee to be made available through this
+ * KeyFrameControl.
+ *
+ * @param index the zero-based index at which keyFrameRequestee is
+ * to be added to the list of KeyFrameRequestees made available or
+ * -1 to have this KeyFrameControl choose at which index
+ * it is to be added in accord with its internal logic
+ * through this KeyFrameControl
+ * @param keyFrameRequestee the KeyFrameRequestee to be added to
+ * this KeyFrameControl so that it is made available through it
+ */
+ public void addKeyFrameRequestee(
+ int index,
+ KeyFrameRequestee keyFrameRequestee);
+
+ /**
+ * Adds a KeyFrameRequester to be made available through this
+ * KeyFrameControl.
+ *
+ * @param index the zero-based index at which keyFrameRequester is
+ * to be added to the list of KeyFrameRequesters made available or
+ * -1 to have this KeyFrameControl choose at which index
+ * it is to be added in accord with its internal logic
+ * through this KeyFrameControl
+ * @param keyFrameRequester the KeyFrameRequester to be added to
+ * this KeyFrameControl so that it is made available through it
+ */
+ public void addKeyFrameRequester(
+ int index,
+ KeyFrameRequester keyFrameRequester);
+
+ /**
+ * Gets the KeyFrameRequestees made available through this
+ * KeyFrameControl.
+ *
+ * @return an unmodifiable list of KeyFrameRequestees made
+ * available through this KeyFrameControl
+ */
+ public List getKeyFrameRequestees();
+
+ /**
+ * Gets the KeyFrameRequesters made available through this
+ * KeyFrameControl.
+ *
+ * @return an unmodifiable list of KeyFrameRequesters made
+ * available through this KeyFrameControl
+ */
+ public List getKeyFrameRequesters();
+
+ /**
+ * Notifies this KeyFrameControl that the remote peer of the
+ * associated VideoMediaStream has requested a key frame from the
+ * local peer.
+ *
+ * @return true if the local peer has honored the request from the
+ * remote peer for a key frame; otherwise, false
+ */
+ public boolean keyFrameRequest();
+
+ /**
+ * Removes a KeyFrameRequestee to no longer be made available
+ * through this KeyFrameControl.
+ *
+ * @param keyFrameRequestee the KeyFrameRequestee to be removed
+ * from this KeyFrameControl so that it is no longer made available
+ * through it
+ * @return true if keyFrameRequestee was found in this
+ * KeyFrameControl; otherwise, false
+ */
+ public boolean removeKeyFrameRequestee(KeyFrameRequestee keyFrameRequestee);
+
+ /**
+ * Removes a KeyFrameRequester to no longer be made available
+ * through this KeyFrameControl.
+ *
+ * @param keyFrameRequester the KeyFrameRequester to be removed
+ * from this KeyFrameControl so that it is no longer made available
+ * through it
+ * @return true if keyFrameRequester was found in this
+ * KeyFrameControl; otherwise, false
+ */
+ public boolean removeKeyFrameRequester(KeyFrameRequester keyFrameRequester);
+
+ /**
+ * Requests a key frame from the remote peer of the associated
+ * VideoMediaStream.
+ *
+ * @param urgent true if the caller has determined that the need
+ * for a key frame is urgent and should not obey all constraints with
+ * respect to time between two subsequent requests for key frames
+ * @return true if a key frame was indeed requested from the remote
+ * peer of the associated VideoMediaStream in response to the call;
+ * otherwise, false
+ */
+ public boolean requestKeyFrame(boolean urgent);
+
+ /**
+ * Represents a way for the remote peer of a VideoMediaStream to
+ * request a key frame from its local peer.
+ *
+ * @author Lyubomir Marinov
+ */
+ public interface KeyFrameRequestee
+ {
+ /**
+ * Notifies this KeyFrameRequestee that the remote peer of the
+ * associated VideoMediaStream requests a key frame from the
+ * local peer.
+ *
+ * @return true if this KeyFrameRequestee has honored
+ * the request for a key frame; otherwise, false
+ */
+ public boolean keyFrameRequest();
+ }
+
+ /**
+ * Represents a way for a VideoMediaStream to request a key frame
+ * from its remote peer.
+ *
+ * @author Lyubomir Marinov
+ */
+ public interface KeyFrameRequester
+ {
+ /**
+ * The name of the ConfigurationService property which
+ * specifies the preferred KeyFrameRequester to be used.
+ */
+ public static final String PREFERRED_PNAME
+ = "net.java.sip.communicator.impl.neomedia.codec.video.h264."
+ + "preferredKeyFrameRequester";
+
+ /**
+ * The value of the {@link #PREFERRED_PNAME}
+ * ConfigurationService property which indicates that the
+ * RTCP KeyFrameRequester is preferred.
+ */
+ public static final String RTCP = "rtcp";
+
+ /**
+ * The value of the {@link #PREFERRED_PNAME}
+ * ConfigurationService property which indicates that the
+ * signaling/protocol KeyFrameRequester is preferred.
+ */
+ public static final String SIGNALING = "signaling";
+
+ /**
+ * The default value of the {@link #PREFERRED_PNAME}
+ * ConfigurationService property.
+ */
+ public static final String DEFAULT_PREFERRED = RTCP;
+
+ /**
+ * Requests a key frame from the remote peer of the associated
+ * VideoMediaStream.
+ *
+ * @return true if this KeyFrameRequester has
+ * indeed requested a key frame from the remote peer of the associated
+ * VideoMediaStream in response to the call; otherwise,
+ * false
+ */
+ public boolean requestKeyFrame();
+ }
+}
diff --git a/src/org/jitsi/util/event/PropertyChangeNotifier.java b/src/org/jitsi/util/event/PropertyChangeNotifier.java
index 8a66764b9..fc0e5bf0c 100644
--- a/src/org/jitsi/util/event/PropertyChangeNotifier.java
+++ b/src/org/jitsi/util/event/PropertyChangeNotifier.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,185 +13,185 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.util.event;
-
-import java.beans.*;
-import java.util.*;
-
-import org.jitsi.util.*;
-
-/**
- * Represents a source of PropertyChangeEvents which notifies
- * PropertyChangeListeners about changes in the values of properties.
- *
- * @author Lyubomir Marinov
- */
-public class PropertyChangeNotifier
-{
- /**
- * The Logger used by the PropertyChangeNotifier class and
- * its instances for logging output.
- */
- private static final Logger logger
- = Logger.getLogger(PropertyChangeNotifier.class);
-
- /**
- * The list of PropertyChangeListeners interested in and notified
- * about changes in the values of the properties of this
- * PropertyChangeNotifier.
- */
- private final List listeners
- = new ArrayList();
-
- /**
- * Initializes a new PropertyChangeNotifier instance.
- */
- public PropertyChangeNotifier()
- {
- }
-
- /**
- * Adds a specific PropertyChangeListener to the list of listeners
- * interested in and notified about changes in the values of the properties
- * of this PropertyChangeNotifier.
- *
- * @param listener a PropertyChangeListener to be notified about
- * changes in the values of the properties of this
- * PropertyChangeNotifier. If the specified listener is already in
- * the list of interested listeners (i.e. it has been previously added), it
- * is not added again.
- */
- public void addPropertyChangeListener(PropertyChangeListener listener)
- {
- if (listener == null)
- {
- if (logger.isDebugEnabled())
- {
- logger.debug(
- "The specified argument listener is null"
- + " and that does not make sense.");
- }
- }
- else
- {
- synchronized (listeners)
- {
- if (!listeners.contains(listener))
- listeners.add(listener);
- }
- }
- }
-
- /**
- * Fires a new PropertyChangeEvent to the
- * PropertyChangeListeners registered with this
- * PropertyChangeNotifier in order to notify about a change in the
- * value of a specific property which had its old value modified to a
- * specific new value. PropertyChangeNotifier does not check
- * whether the specified oldValue and newValue are indeed
- * different.
- *
- * @param property the name of the property of this
- * PropertyChangeNotifier which had its value changed
- * @param oldValue the value of the property with the specified name before
- * the change
- * @param newValue the value of the property with the specified name after
- * the change
- */
- protected void firePropertyChange(
- String property,
- Object oldValue, Object newValue)
- {
- PropertyChangeListener[] ls;
-
- synchronized (listeners)
- {
- ls
- = listeners.toArray(
- new PropertyChangeListener[listeners.size()]);
- }
-
- if (ls.length != 0)
- {
- PropertyChangeEvent ev
- = new PropertyChangeEvent(
- getPropertyChangeSource(property, oldValue, newValue),
- property,
- oldValue, newValue);
-
- for (PropertyChangeListener l : ls)
- {
- try
- {
- l.propertyChange(ev);
- }
- catch (Throwable t)
- {
- if (t instanceof InterruptedException)
- {
- Thread.currentThread().interrupt();
- }
- else if (t instanceof ThreadDeath)
- {
- throw (ThreadDeath) t;
- }
- else
- {
- logger.warn(
- "A PropertyChangeListener threw an exception"
- + " while handling a PropertyChangeEvent.",
- t);
- }
- }
- }
- }
- }
-
- /**
- * Gets the Object to be reported as the source of a new
- * PropertyChangeEvent which is to notify the
- * PropertyChangeListeners registered with this
- * PropertyChangeNotifier about the change in the value of a
- * property with a specific name from a specific old value to a specific new
- * value.
- *
- * @param property the name of the property which had its value changed from
- * the specified old value to the specified new value
- * @param oldValue the value of the property with the specified name before
- * the change
- * @param newValue the value of the property with the specified name after
- * the change
- * @return the Object to be reported as the source of the new
- * PropertyChangeEvent which is to notify the
- * PropertyChangeListeners registered with this
- * PropertyChangeNotifier about the change in the value of the
- * property with the specified name from the specified old value to the
- * specified new value
- */
- protected Object getPropertyChangeSource(
- String property,
- Object oldValue, Object newValue)
- {
- return this;
- }
-
- /**
- * Removes a specific PropertyChangeListener from the list of
- * listeners interested in and notified about changes in the values of the
- * properties of this PropertyChangeNotifer.
- *
- * @param listener a PropertyChangeListener to no longer be
- * notified about changes in the values of the properties of this
- * PropertyChangeNotifier
- */
- public void removePropertyChangeListener(PropertyChangeListener listener)
- {
- if (listener != null)
- {
- synchronized (listeners)
- {
- listeners.remove(listener);
- }
- }
- }
-}
+package org.jitsi.util.event;
+
+import java.beans.*;
+import java.util.*;
+
+import org.jitsi.util.*;
+
+/**
+ * Represents a source of PropertyChangeEvents which notifies
+ * PropertyChangeListeners about changes in the values of properties.
+ *
+ * @author Lyubomir Marinov
+ */
+public class PropertyChangeNotifier
+{
+ /**
+ * The Logger used by the PropertyChangeNotifier class and
+ * its instances for logging output.
+ */
+ private static final Logger logger
+ = Logger.getLogger(PropertyChangeNotifier.class);
+
+ /**
+ * The list of PropertyChangeListeners interested in and notified
+ * about changes in the values of the properties of this
+ * PropertyChangeNotifier.
+ */
+ private final List listeners
+ = new ArrayList();
+
+ /**
+ * Initializes a new PropertyChangeNotifier instance.
+ */
+ public PropertyChangeNotifier()
+ {
+ }
+
+ /**
+ * Adds a specific PropertyChangeListener to the list of listeners
+ * interested in and notified about changes in the values of the properties
+ * of this PropertyChangeNotifier.
+ *
+ * @param listener a PropertyChangeListener to be notified about
+ * changes in the values of the properties of this
+ * PropertyChangeNotifier. If the specified listener is already in
+ * the list of interested listeners (i.e. it has been previously added), it
+ * is not added again.
+ */
+ public void addPropertyChangeListener(PropertyChangeListener listener)
+ {
+ if (listener == null)
+ {
+ if (logger.isDebugEnabled())
+ {
+ logger.debug(
+ "The specified argument listener is null"
+ + " and that does not make sense.");
+ }
+ }
+ else
+ {
+ synchronized (listeners)
+ {
+ if (!listeners.contains(listener))
+ listeners.add(listener);
+ }
+ }
+ }
+
+ /**
+ * Fires a new PropertyChangeEvent to the
+ * PropertyChangeListeners registered with this
+ * PropertyChangeNotifier in order to notify about a change in the
+ * value of a specific property which had its old value modified to a
+ * specific new value. PropertyChangeNotifier does not check
+ * whether the specified oldValue and newValue are indeed
+ * different.
+ *
+ * @param property the name of the property of this
+ * PropertyChangeNotifier which had its value changed
+ * @param oldValue the value of the property with the specified name before
+ * the change
+ * @param newValue the value of the property with the specified name after
+ * the change
+ */
+ protected void firePropertyChange(
+ String property,
+ Object oldValue, Object newValue)
+ {
+ PropertyChangeListener[] ls;
+
+ synchronized (listeners)
+ {
+ ls
+ = listeners.toArray(
+ new PropertyChangeListener[listeners.size()]);
+ }
+
+ if (ls.length != 0)
+ {
+ PropertyChangeEvent ev
+ = new PropertyChangeEvent(
+ getPropertyChangeSource(property, oldValue, newValue),
+ property,
+ oldValue, newValue);
+
+ for (PropertyChangeListener l : ls)
+ {
+ try
+ {
+ l.propertyChange(ev);
+ }
+ catch (Throwable t)
+ {
+ if (t instanceof InterruptedException)
+ {
+ Thread.currentThread().interrupt();
+ }
+ else if (t instanceof ThreadDeath)
+ {
+ throw (ThreadDeath) t;
+ }
+ else
+ {
+ logger.warn(
+ "A PropertyChangeListener threw an exception"
+ + " while handling a PropertyChangeEvent.",
+ t);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Gets the Object to be reported as the source of a new
+ * PropertyChangeEvent which is to notify the
+ * PropertyChangeListeners registered with this
+ * PropertyChangeNotifier about the change in the value of a
+ * property with a specific name from a specific old value to a specific new
+ * value.
+ *
+ * @param property the name of the property which had its value changed from
+ * the specified old value to the specified new value
+ * @param oldValue the value of the property with the specified name before
+ * the change
+ * @param newValue the value of the property with the specified name after
+ * the change
+ * @return the Object to be reported as the source of the new
+ * PropertyChangeEvent which is to notify the
+ * PropertyChangeListeners registered with this
+ * PropertyChangeNotifier about the change in the value of the
+ * property with the specified name from the specified old value to the
+ * specified new value
+ */
+ protected Object getPropertyChangeSource(
+ String property,
+ Object oldValue, Object newValue)
+ {
+ return this;
+ }
+
+ /**
+ * Removes a specific PropertyChangeListener from the list of
+ * listeners interested in and notified about changes in the values of the
+ * properties of this PropertyChangeNotifer.
+ *
+ * @param listener a PropertyChangeListener to no longer be
+ * notified about changes in the values of the properties of this
+ * PropertyChangeNotifier
+ */
+ public void removePropertyChangeListener(PropertyChangeListener listener)
+ {
+ if (listener != null)
+ {
+ synchronized (listeners)
+ {
+ listeners.remove(listener);
+ }
+ }
+ }
+}
diff --git a/src/org/jitsi/util/swing/FitLayout.java b/src/org/jitsi/util/swing/FitLayout.java
index af7361eb7..7fd607566 100644
--- a/src/org/jitsi/util/swing/FitLayout.java
+++ b/src/org/jitsi/util/swing/FitLayout.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,208 +13,208 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.util.swing;
-
-import java.awt.*;
-
-import javax.swing.*;
-
-/**
- * Represents a LayoutManager which centers the first
- * Component within its Container and, if the preferred size
- * of the Component is larger than the size of the Container,
- * scales the former within the bounds of the latter while preserving the aspect
- * ratio. FitLayout is appropriate for Containers which
- * display a single image or video Component in its entirety for which
- * preserving the aspect ratio is important.
- *
- * @author Lyubomir Marinov
- */
-public class FitLayout
- implements LayoutManager
-{
- /**
- * The default height and width to be used by FitLayout and its
- * extenders in order to avoid falling back to zero height and/or width.
- * Introduced to mitigate issues arising from the fact that a
- * Component zero height and/or width.
- */
- protected static final int DEFAULT_HEIGHT_OR_WIDTH = 16;
-
- /**
- * {@inheritDoc}
- *
- * Does nothing because this LayoutManager lays out only the first
- * Component of the parent Container and thus doesn't need
- * any String associations.
- */
- public void addLayoutComponent(String name, Component comp) {}
-
- /**
- * Gets the first Component of a specific Container if
- * there is such a Component.
- *
- * @param parent the Container to retrieve the first
- * Component of
- * @return the first Component of a specific Container if
- * there is such a Component; otherwise, null
- */
- protected Component getComponent(Container parent)
- {
- Component[] components = parent.getComponents();
-
- return (components.length > 0) ? components[0] : null;
- }
-
- protected void layoutComponent(
- Component component,
- Rectangle bounds,
- float alignmentX, float alignmentY)
- {
- Dimension size;
-
- /*
- * XXX The following (mostly) represents a quick and dirty hack for the
- * purposes of video conferencing which adds transparent JPanels to
- * VideoContainer and does not want them fitted because they contain
- * VideoContainers themselves and the videos get fitted in them.
- */
- if (((component instanceof JPanel)
- && !component.isOpaque()
- && (((Container) component).getComponentCount() > 1))
- || (component instanceof VideoContainer)
- /*
- * If the specified component does not have a preferredSize, we
- * cannot know its aspect ratio and we are left with no choice
- * but to stretch it within the complete bounds.
- */
- || ((size = component.getPreferredSize()) == null))
- {
- size = bounds.getSize();
- }
- else
- {
- boolean scale = false;
- double widthRatio;
- double heightRatio;
-
- if ((size.width != bounds.width) && (size.width > 0))
- {
- scale = true;
- widthRatio = bounds.width / (double) size.width;
- }
- else
- widthRatio = 1;
- if ((size.height != bounds.height) && (size.height > 0))
- {
- scale = true;
- heightRatio = bounds.height / (double) size.height;
- }
- else
- heightRatio = 1;
- if (scale)
- {
- double ratio = Math.min(widthRatio, heightRatio);
-
- size.width = (int) (size.width * ratio);
- size.height = (int) (size.height * ratio);
- }
- }
-
- // Respect the maximumSize of the component.
- if (component.isMaximumSizeSet())
- {
- Dimension maxSize = component.getMaximumSize();
-
- if (size.width > maxSize.width)
- size.width = maxSize.width;
- if (size.height > maxSize.height)
- size.height = maxSize.height;
- }
-
- /*
- * Why would one fit a Component into a rectangle with zero width and
- * height?
- */
- if (size.height < 1)
- size.height = 1;
- if (size.width < 1)
- size.width = 1;
-
- component.setBounds(
- bounds.x + Math.round((bounds.width - size.width) * alignmentX),
- bounds.y
- + Math.round((bounds.height - size.height) * alignmentY),
- size.width,
- size.height);
- }
-
- /*
- * Scales the first Component if its preferred size is larger than the size
- * of its parent Container in order to display the Component in its entirety
- * and then centers it within the display area of the parent.
- */
- public void layoutContainer(Container parent)
- {
- layoutContainer(parent, Component.CENTER_ALIGNMENT);
- }
-
- protected void layoutContainer(Container parent, float componentAlignmentX)
- {
- Component component = getComponent(parent);
-
- if (component != null)
- {
- layoutComponent(
- component,
- new Rectangle(parent.getSize()),
- componentAlignmentX, Component.CENTER_ALIGNMENT);
- }
- }
-
- /*
- * Since this LayoutManager lays out only the first Component of the
- * specified parent Container, the minimum size of the Container is the
- * minimum size of the mentioned Component.
- */
- public Dimension minimumLayoutSize(Container parent)
- {
- Component component = getComponent(parent);
-
- return
- (component != null)
- ? component.getMinimumSize()
- : new Dimension(
- DEFAULT_HEIGHT_OR_WIDTH,
- DEFAULT_HEIGHT_OR_WIDTH);
- }
-
- /**
- * {@inheritDoc}
- *
- * Since this LayoutManager lays out only the first
- * Component of the specified parent Container, the
- * preferred size of the Container is the preferred size of the
- * mentioned Component.
- */
- public Dimension preferredLayoutSize(Container parent)
- {
- Component component = getComponent(parent);
-
- return
- (component != null)
- ? component.getPreferredSize()
- : new Dimension(
- DEFAULT_HEIGHT_OR_WIDTH,
- DEFAULT_HEIGHT_OR_WIDTH);
- }
-
- /**
- * {@inheritDoc}
- *
- * Does nothing because this LayoutManager lays out only the first
- * Component of the parent Container and thus doesn't need
- * any String associations.
- */
- public void removeLayoutComponent(Component comp) {}
-}
+package org.jitsi.util.swing;
+
+import java.awt.*;
+
+import javax.swing.*;
+
+/**
+ * Represents a LayoutManager which centers the first
+ * Component within its Container and, if the preferred size
+ * of the Component is larger than the size of the Container,
+ * scales the former within the bounds of the latter while preserving the aspect
+ * ratio. FitLayout is appropriate for Containers which
+ * display a single image or video Component in its entirety for which
+ * preserving the aspect ratio is important.
+ *
+ * @author Lyubomir Marinov
+ */
+public class FitLayout
+ implements LayoutManager
+{
+ /**
+ * The default height and width to be used by FitLayout and its
+ * extenders in order to avoid falling back to zero height and/or width.
+ * Introduced to mitigate issues arising from the fact that a
+ * Component zero height and/or width.
+ */
+ protected static final int DEFAULT_HEIGHT_OR_WIDTH = 16;
+
+ /**
+ * {@inheritDoc}
+ *
+ * Does nothing because this LayoutManager lays out only the first
+ * Component of the parent Container and thus doesn't need
+ * any String associations.
+ */
+ public void addLayoutComponent(String name, Component comp) {}
+
+ /**
+ * Gets the first Component of a specific Container if
+ * there is such a Component.
+ *
+ * @param parent the Container to retrieve the first
+ * Component of
+ * @return the first Component of a specific Container if
+ * there is such a Component; otherwise, null
+ */
+ protected Component getComponent(Container parent)
+ {
+ Component[] components = parent.getComponents();
+
+ return (components.length > 0) ? components[0] : null;
+ }
+
+ protected void layoutComponent(
+ Component component,
+ Rectangle bounds,
+ float alignmentX, float alignmentY)
+ {
+ Dimension size;
+
+ /*
+ * XXX The following (mostly) represents a quick and dirty hack for the
+ * purposes of video conferencing which adds transparent JPanels to
+ * VideoContainer and does not want them fitted because they contain
+ * VideoContainers themselves and the videos get fitted in them.
+ */
+ if (((component instanceof JPanel)
+ && !component.isOpaque()
+ && (((Container) component).getComponentCount() > 1))
+ || (component instanceof VideoContainer)
+ /*
+ * If the specified component does not have a preferredSize, we
+ * cannot know its aspect ratio and we are left with no choice
+ * but to stretch it within the complete bounds.
+ */
+ || ((size = component.getPreferredSize()) == null))
+ {
+ size = bounds.getSize();
+ }
+ else
+ {
+ boolean scale = false;
+ double widthRatio;
+ double heightRatio;
+
+ if ((size.width != bounds.width) && (size.width > 0))
+ {
+ scale = true;
+ widthRatio = bounds.width / (double) size.width;
+ }
+ else
+ widthRatio = 1;
+ if ((size.height != bounds.height) && (size.height > 0))
+ {
+ scale = true;
+ heightRatio = bounds.height / (double) size.height;
+ }
+ else
+ heightRatio = 1;
+ if (scale)
+ {
+ double ratio = Math.min(widthRatio, heightRatio);
+
+ size.width = (int) (size.width * ratio);
+ size.height = (int) (size.height * ratio);
+ }
+ }
+
+ // Respect the maximumSize of the component.
+ if (component.isMaximumSizeSet())
+ {
+ Dimension maxSize = component.getMaximumSize();
+
+ if (size.width > maxSize.width)
+ size.width = maxSize.width;
+ if (size.height > maxSize.height)
+ size.height = maxSize.height;
+ }
+
+ /*
+ * Why would one fit a Component into a rectangle with zero width and
+ * height?
+ */
+ if (size.height < 1)
+ size.height = 1;
+ if (size.width < 1)
+ size.width = 1;
+
+ component.setBounds(
+ bounds.x + Math.round((bounds.width - size.width) * alignmentX),
+ bounds.y
+ + Math.round((bounds.height - size.height) * alignmentY),
+ size.width,
+ size.height);
+ }
+
+ /*
+ * Scales the first Component if its preferred size is larger than the size
+ * of its parent Container in order to display the Component in its entirety
+ * and then centers it within the display area of the parent.
+ */
+ public void layoutContainer(Container parent)
+ {
+ layoutContainer(parent, Component.CENTER_ALIGNMENT);
+ }
+
+ protected void layoutContainer(Container parent, float componentAlignmentX)
+ {
+ Component component = getComponent(parent);
+
+ if (component != null)
+ {
+ layoutComponent(
+ component,
+ new Rectangle(parent.getSize()),
+ componentAlignmentX, Component.CENTER_ALIGNMENT);
+ }
+ }
+
+ /*
+ * Since this LayoutManager lays out only the first Component of the
+ * specified parent Container, the minimum size of the Container is the
+ * minimum size of the mentioned Component.
+ */
+ public Dimension minimumLayoutSize(Container parent)
+ {
+ Component component = getComponent(parent);
+
+ return
+ (component != null)
+ ? component.getMinimumSize()
+ : new Dimension(
+ DEFAULT_HEIGHT_OR_WIDTH,
+ DEFAULT_HEIGHT_OR_WIDTH);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Since this LayoutManager lays out only the first
+ * Component of the specified parent Container, the
+ * preferred size of the Container is the preferred size of the
+ * mentioned Component.
+ */
+ public Dimension preferredLayoutSize(Container parent)
+ {
+ Component component = getComponent(parent);
+
+ return
+ (component != null)
+ ? component.getPreferredSize()
+ : new Dimension(
+ DEFAULT_HEIGHT_OR_WIDTH,
+ DEFAULT_HEIGHT_OR_WIDTH);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * Does nothing because this LayoutManager lays out only the first
+ * Component of the parent Container and thus doesn't need
+ * any String associations.
+ */
+ public void removeLayoutComponent(Component comp) {}
+}
diff --git a/src/org/jitsi/util/swing/VideoContainer.java b/src/org/jitsi/util/swing/VideoContainer.java
index c7b4a3a3e..22f044b27 100644
--- a/src/org/jitsi/util/swing/VideoContainer.java
+++ b/src/org/jitsi/util/swing/VideoContainer.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,361 +13,361 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.util.swing;
-
-import java.awt.*;
-import java.awt.event.*;
-import java.beans.*;
-
-import javax.swing.*;
-
-/**
- * Implements a Container for video/visual Components.
- * VideoContainer uses {@link VideoLayout} to layout the video/visual
- * Components it contains. A specific Component can be
- * displayed by default at {@link VideoLayout#CENTER_REMOTE}.
- *
- * @author Lyubomir Marinov
- * @author Yana Stamcheva
- */
-public class VideoContainer
- extends TransparentPanel
-{
- /**
- * Serial version UID.
- */
- private static final long serialVersionUID = 0L;
-
- /**
- * The default background color of VideoContainer when it contains
- * Component instances other than {@link #noVideoComponent}.
- */
- public static final Color DEFAULT_BACKGROUND_COLOR = Color.BLACK;
-
- private static final String PREFERRED_SIZE_PROPERTY_NAME = "preferredSize";
-
- /**
- * The number of times that add or remove methods are
- * currently being executed on this instance. Decreases the number of
- * unnecessary invocations to {@link #doLayout()}, {@link #repaint()} and
- * {@link #validate()}.
- */
- private int inAddOrRemove;
-
- /**
- * The Component to be displayed by this VideoContainer
- * at {@link VideoLayout#CENTER_REMOTE} when no other Component has
- * been added to it to be displayed there. For example, the avatar of the
- * remote peer may be displayed in place of the remote video when the remote
- * video is not available.
- */
- private final Component noVideoComponent;
-
- private final PropertyChangeListener propertyChangeListener
- = new PropertyChangeListener()
- {
- public void propertyChange(PropertyChangeEvent ev)
- {
- VideoContainer.this.propertyChange(ev);
- }
- };
-
- private final Object syncRoot = new Object();
-
- /**
- * The indicator which determines whether this instance is aware that
- * {@link #doLayout()}, {@link #repaint()} and/or {@link #validate()} are to
- * be invoked (as soon as {@link #inAddOrRemove} decreases from a positive
- * number to zero).
- */
- private boolean validateAndRepaint;
-
- /**
- * Initializes a new VideoContainer with a specific
- * Component to be displayed when no remote video is available.
- *
- * @param noVideoComponent the component to be displayed when no remote
- * video is available
- * @param conference true to dedicate the new instance to a
- * telephony conferencing user interface; otherwise, false
- */
- public VideoContainer(Component noVideoComponent, boolean conference)
- {
- setLayout(new VideoLayout(conference));
-
- this.noVideoComponent = noVideoComponent;
-
- if (DEFAULT_BACKGROUND_COLOR != null)
- setBackground(DEFAULT_BACKGROUND_COLOR);
-
- addContainerListener(
- new ContainerListener()
- {
- public void componentAdded(ContainerEvent ev)
- {
- VideoContainer.this.onContainerEvent(ev);
- }
-
- public void componentRemoved(ContainerEvent ev)
- {
- VideoContainer.this.onContainerEvent(ev);
- }
- });
-
- if (this.noVideoComponent != null)
- add(this.noVideoComponent, VideoLayout.CENTER_REMOTE, -1);
- }
-
- /**
- * Adds the given component at the {@link VideoLayout#CENTER_REMOTE}
- * position in the default video layout.
- *
- * @param comp the component to add
- * @return the added component
- */
- @Override
- public Component add(Component comp)
- {
- add(comp, VideoLayout.CENTER_REMOTE);
- return comp;
- }
-
- @Override
- public Component add(Component comp, int index)
- {
- add(comp, null, index);
- return comp;
- }
-
- @Override
- public void add(Component comp, Object constraints)
- {
- add(comp, constraints, -1);
- }
-
- /**
- * Overrides the default behavior of add in order to be sure to remove the
- * default "no video" component when a remote video component is added.
- *
- * @param comp the component to add
- * @param constraints
- * @param index
- */
- @Override
- public void add(Component comp, Object constraints, int index)
- {
- enterAddOrRemove();
- try
- {
- if (VideoLayout.CENTER_REMOTE.equals(constraints)
- && (noVideoComponent != null)
- && !noVideoComponent.equals(comp)
- || (comp.equals(noVideoComponent)
- && noVideoComponent.getParent() != null))
- {
- remove(noVideoComponent);
- }
-
- super.add(comp, constraints, index);
- }
- finally
- {
- exitAddOrRemove();
- }
- }
-
- private void enterAddOrRemove()
- {
- synchronized (syncRoot)
- {
- if (inAddOrRemove == 0)
- validateAndRepaint = false;
- inAddOrRemove++;
- }
- }
-
- private void exitAddOrRemove()
- {
- synchronized (syncRoot)
- {
- inAddOrRemove--;
- if (inAddOrRemove < 1)
- {
- inAddOrRemove = 0;
- if (validateAndRepaint)
- {
- validateAndRepaint = false;
-
- if (isDisplayable())
- {
- if (isValid())
- doLayout();
- else
- validate();
- repaint();
- }
- else
- doLayout();
- }
- }
- }
- }
-
- /**
- * Notifies this instance that a specific Component has been added
- * to or removed from this Container.
- *
- * @param ev a ContainerEvent which details the specifics of the
- * notification such as the Component that has been added or
- * removed
- */
- private void onContainerEvent(ContainerEvent ev)
- {
- try
- {
- Component component = ev.getChild();
-
- switch (ev.getID())
- {
- case ContainerEvent.COMPONENT_ADDED:
- component.addPropertyChangeListener(
- PREFERRED_SIZE_PROPERTY_NAME,
- propertyChangeListener);
- break;
- case ContainerEvent.COMPONENT_REMOVED:
- component.removePropertyChangeListener(
- PREFERRED_SIZE_PROPERTY_NAME,
- propertyChangeListener);
- break;
- }
-
- /*
- * If an explicit background color is to be displayed by this
- * Component, make sure that its opaque property i.e. transparency
- * does not interfere with that display.
- */
- if (DEFAULT_BACKGROUND_COLOR != null)
- {
- int componentCount = getComponentCount();
-
- if ((componentCount == 1)
- && (getComponent(0)
- == VideoContainer.this.noVideoComponent))
- {
- componentCount = 0;
- }
-
- setOpaque(componentCount > 0);
- }
- }
- finally
- {
- synchronized (syncRoot)
- {
- if (inAddOrRemove != 0)
- validateAndRepaint = true;
- }
- }
- }
-
- /**
- * Notifies this instance about a change in the value of a property of a
- * Component contained by this Container. Since the
- * VideoLayout of this Container sizes the contained
- * Components based on their preferredSizes, this
- * Container invokes {@link #doLayout()}, {@link #repaint()} and/or
- * {@link #validate()} upon changes in the values of the property in
- * question.
- *
- * @param ev a PropertyChangeEvent which details the specifics of
- * the notification such as the name of the property whose value changed and
- * the Component which fired the notification
- */
- private void propertyChange(PropertyChangeEvent ev)
- {
- if (PREFERRED_SIZE_PROPERTY_NAME.equals(ev.getPropertyName())
- && SwingUtilities.isEventDispatchThread())
- {
- /*
- * The goal is to invoke doLayout, repaint and/or validate. These
- * methods and the specifics with respect to avoiding unnecessary
- * calls to them are already dealt with by enterAddOrRemove,
- * exitAddOrRemove and validateAndRepaint.
- */
- synchronized (syncRoot)
- {
- enterAddOrRemove();
- validateAndRepaint = true;
- exitAddOrRemove();
- }
- }
- }
-
- /**
- * Overrides the default remove behavior in order to add the default no
- * video component when the remote video is removed.
- *
- * @param comp the component to remove
- */
- @Override
- public void remove(Component comp)
- {
- enterAddOrRemove();
- try
- {
- super.remove(comp);
-
- Component[] components = getComponents();
- VideoLayout videoLayout = (VideoLayout) getLayout();
- boolean hasComponentsAtCenterRemote = false;
-
- for (Component c : components)
- {
- if (!c.equals(noVideoComponent)
- && VideoLayout.CENTER_REMOTE.equals(
- videoLayout.getComponentConstraints(c)))
- {
- hasComponentsAtCenterRemote = true;
- break;
- }
- }
-
- if (!hasComponentsAtCenterRemote
- && (noVideoComponent != null)
- && !noVideoComponent.equals(comp))
- {
- add(noVideoComponent, VideoLayout.CENTER_REMOTE);
- }
- }
- finally
- {
- exitAddOrRemove();
- }
- }
-
- /**
- * Ensures noVideoComponent is displayed even when the clients of the
- * videoContainer invoke its #removeAll() to remove their previous visual
- * Components representing video. Just adding noVideoComponent upon
- * ContainerEvent#COMPONENT_REMOVED when there is no other Component left in
- * the Container will cause an infinite loop because Container#removeAll()
- * will detect that a new Component has been added while dispatching the
- * event and will then try to remove the new Component.
- */
- @Override
- public void removeAll()
- {
- enterAddOrRemove();
- try
- {
- super.removeAll();
-
- if (noVideoComponent != null)
- add(noVideoComponent, VideoLayout.CENTER_REMOTE);
- }
- finally
- {
- exitAddOrRemove();
- }
- }
-}
+package org.jitsi.util.swing;
+
+import java.awt.*;
+import java.awt.event.*;
+import java.beans.*;
+
+import javax.swing.*;
+
+/**
+ * Implements a Container for video/visual Components.
+ * VideoContainer uses {@link VideoLayout} to layout the video/visual
+ * Components it contains. A specific Component can be
+ * displayed by default at {@link VideoLayout#CENTER_REMOTE}.
+ *
+ * @author Lyubomir Marinov
+ * @author Yana Stamcheva
+ */
+public class VideoContainer
+ extends TransparentPanel
+{
+ /**
+ * Serial version UID.
+ */
+ private static final long serialVersionUID = 0L;
+
+ /**
+ * The default background color of VideoContainer when it contains
+ * Component instances other than {@link #noVideoComponent}.
+ */
+ public static final Color DEFAULT_BACKGROUND_COLOR = Color.BLACK;
+
+ private static final String PREFERRED_SIZE_PROPERTY_NAME = "preferredSize";
+
+ /**
+ * The number of times that add or remove methods are
+ * currently being executed on this instance. Decreases the number of
+ * unnecessary invocations to {@link #doLayout()}, {@link #repaint()} and
+ * {@link #validate()}.
+ */
+ private int inAddOrRemove;
+
+ /**
+ * The Component to be displayed by this VideoContainer
+ * at {@link VideoLayout#CENTER_REMOTE} when no other Component has
+ * been added to it to be displayed there. For example, the avatar of the
+ * remote peer may be displayed in place of the remote video when the remote
+ * video is not available.
+ */
+ private final Component noVideoComponent;
+
+ private final PropertyChangeListener propertyChangeListener
+ = new PropertyChangeListener()
+ {
+ public void propertyChange(PropertyChangeEvent ev)
+ {
+ VideoContainer.this.propertyChange(ev);
+ }
+ };
+
+ private final Object syncRoot = new Object();
+
+ /**
+ * The indicator which determines whether this instance is aware that
+ * {@link #doLayout()}, {@link #repaint()} and/or {@link #validate()} are to
+ * be invoked (as soon as {@link #inAddOrRemove} decreases from a positive
+ * number to zero).
+ */
+ private boolean validateAndRepaint;
+
+ /**
+ * Initializes a new VideoContainer with a specific
+ * Component to be displayed when no remote video is available.
+ *
+ * @param noVideoComponent the component to be displayed when no remote
+ * video is available
+ * @param conference true to dedicate the new instance to a
+ * telephony conferencing user interface; otherwise, false
+ */
+ public VideoContainer(Component noVideoComponent, boolean conference)
+ {
+ setLayout(new VideoLayout(conference));
+
+ this.noVideoComponent = noVideoComponent;
+
+ if (DEFAULT_BACKGROUND_COLOR != null)
+ setBackground(DEFAULT_BACKGROUND_COLOR);
+
+ addContainerListener(
+ new ContainerListener()
+ {
+ public void componentAdded(ContainerEvent ev)
+ {
+ VideoContainer.this.onContainerEvent(ev);
+ }
+
+ public void componentRemoved(ContainerEvent ev)
+ {
+ VideoContainer.this.onContainerEvent(ev);
+ }
+ });
+
+ if (this.noVideoComponent != null)
+ add(this.noVideoComponent, VideoLayout.CENTER_REMOTE, -1);
+ }
+
+ /**
+ * Adds the given component at the {@link VideoLayout#CENTER_REMOTE}
+ * position in the default video layout.
+ *
+ * @param comp the component to add
+ * @return the added component
+ */
+ @Override
+ public Component add(Component comp)
+ {
+ add(comp, VideoLayout.CENTER_REMOTE);
+ return comp;
+ }
+
+ @Override
+ public Component add(Component comp, int index)
+ {
+ add(comp, null, index);
+ return comp;
+ }
+
+ @Override
+ public void add(Component comp, Object constraints)
+ {
+ add(comp, constraints, -1);
+ }
+
+ /**
+ * Overrides the default behavior of add in order to be sure to remove the
+ * default "no video" component when a remote video component is added.
+ *
+ * @param comp the component to add
+ * @param constraints
+ * @param index
+ */
+ @Override
+ public void add(Component comp, Object constraints, int index)
+ {
+ enterAddOrRemove();
+ try
+ {
+ if (VideoLayout.CENTER_REMOTE.equals(constraints)
+ && (noVideoComponent != null)
+ && !noVideoComponent.equals(comp)
+ || (comp.equals(noVideoComponent)
+ && noVideoComponent.getParent() != null))
+ {
+ remove(noVideoComponent);
+ }
+
+ super.add(comp, constraints, index);
+ }
+ finally
+ {
+ exitAddOrRemove();
+ }
+ }
+
+ private void enterAddOrRemove()
+ {
+ synchronized (syncRoot)
+ {
+ if (inAddOrRemove == 0)
+ validateAndRepaint = false;
+ inAddOrRemove++;
+ }
+ }
+
+ private void exitAddOrRemove()
+ {
+ synchronized (syncRoot)
+ {
+ inAddOrRemove--;
+ if (inAddOrRemove < 1)
+ {
+ inAddOrRemove = 0;
+ if (validateAndRepaint)
+ {
+ validateAndRepaint = false;
+
+ if (isDisplayable())
+ {
+ if (isValid())
+ doLayout();
+ else
+ validate();
+ repaint();
+ }
+ else
+ doLayout();
+ }
+ }
+ }
+ }
+
+ /**
+ * Notifies this instance that a specific Component has been added
+ * to or removed from this Container.
+ *
+ * @param ev a ContainerEvent which details the specifics of the
+ * notification such as the Component that has been added or
+ * removed
+ */
+ private void onContainerEvent(ContainerEvent ev)
+ {
+ try
+ {
+ Component component = ev.getChild();
+
+ switch (ev.getID())
+ {
+ case ContainerEvent.COMPONENT_ADDED:
+ component.addPropertyChangeListener(
+ PREFERRED_SIZE_PROPERTY_NAME,
+ propertyChangeListener);
+ break;
+ case ContainerEvent.COMPONENT_REMOVED:
+ component.removePropertyChangeListener(
+ PREFERRED_SIZE_PROPERTY_NAME,
+ propertyChangeListener);
+ break;
+ }
+
+ /*
+ * If an explicit background color is to be displayed by this
+ * Component, make sure that its opaque property i.e. transparency
+ * does not interfere with that display.
+ */
+ if (DEFAULT_BACKGROUND_COLOR != null)
+ {
+ int componentCount = getComponentCount();
+
+ if ((componentCount == 1)
+ && (getComponent(0)
+ == VideoContainer.this.noVideoComponent))
+ {
+ componentCount = 0;
+ }
+
+ setOpaque(componentCount > 0);
+ }
+ }
+ finally
+ {
+ synchronized (syncRoot)
+ {
+ if (inAddOrRemove != 0)
+ validateAndRepaint = true;
+ }
+ }
+ }
+
+ /**
+ * Notifies this instance about a change in the value of a property of a
+ * Component contained by this Container. Since the
+ * VideoLayout of this Container sizes the contained
+ * Components based on their preferredSizes, this
+ * Container invokes {@link #doLayout()}, {@link #repaint()} and/or
+ * {@link #validate()} upon changes in the values of the property in
+ * question.
+ *
+ * @param ev a PropertyChangeEvent which details the specifics of
+ * the notification such as the name of the property whose value changed and
+ * the Component which fired the notification
+ */
+ private void propertyChange(PropertyChangeEvent ev)
+ {
+ if (PREFERRED_SIZE_PROPERTY_NAME.equals(ev.getPropertyName())
+ && SwingUtilities.isEventDispatchThread())
+ {
+ /*
+ * The goal is to invoke doLayout, repaint and/or validate. These
+ * methods and the specifics with respect to avoiding unnecessary
+ * calls to them are already dealt with by enterAddOrRemove,
+ * exitAddOrRemove and validateAndRepaint.
+ */
+ synchronized (syncRoot)
+ {
+ enterAddOrRemove();
+ validateAndRepaint = true;
+ exitAddOrRemove();
+ }
+ }
+ }
+
+ /**
+ * Overrides the default remove behavior in order to add the default no
+ * video component when the remote video is removed.
+ *
+ * @param comp the component to remove
+ */
+ @Override
+ public void remove(Component comp)
+ {
+ enterAddOrRemove();
+ try
+ {
+ super.remove(comp);
+
+ Component[] components = getComponents();
+ VideoLayout videoLayout = (VideoLayout) getLayout();
+ boolean hasComponentsAtCenterRemote = false;
+
+ for (Component c : components)
+ {
+ if (!c.equals(noVideoComponent)
+ && VideoLayout.CENTER_REMOTE.equals(
+ videoLayout.getComponentConstraints(c)))
+ {
+ hasComponentsAtCenterRemote = true;
+ break;
+ }
+ }
+
+ if (!hasComponentsAtCenterRemote
+ && (noVideoComponent != null)
+ && !noVideoComponent.equals(comp))
+ {
+ add(noVideoComponent, VideoLayout.CENTER_REMOTE);
+ }
+ }
+ finally
+ {
+ exitAddOrRemove();
+ }
+ }
+
+ /**
+ * Ensures noVideoComponent is displayed even when the clients of the
+ * videoContainer invoke its #removeAll() to remove their previous visual
+ * Components representing video. Just adding noVideoComponent upon
+ * ContainerEvent#COMPONENT_REMOVED when there is no other Component left in
+ * the Container will cause an infinite loop because Container#removeAll()
+ * will detect that a new Component has been added while dispatching the
+ * event and will then try to remove the new Component.
+ */
+ @Override
+ public void removeAll()
+ {
+ enterAddOrRemove();
+ try
+ {
+ super.removeAll();
+
+ if (noVideoComponent != null)
+ add(noVideoComponent, VideoLayout.CENTER_REMOTE);
+ }
+ finally
+ {
+ exitAddOrRemove();
+ }
+ }
+}
diff --git a/src/org/jitsi/util/swing/VideoLayout.java b/src/org/jitsi/util/swing/VideoLayout.java
index 31dd770f1..8794d664f 100644
--- a/src/org/jitsi/util/swing/VideoLayout.java
+++ b/src/org/jitsi/util/swing/VideoLayout.java
@@ -1,4 +1,4 @@
-/*
+/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,633 +13,633 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.jitsi.util.swing;
-
-import java.awt.*;
-import java.util.*;
-import java.util.List;
-
-import javax.swing.*;
-
-/**
- * Implements the LayoutManager which lays out the local and remote
- * videos in a video Call.
- *
- * @author Lyubomir Marinov
- * @author Yana Stamcheva
- */
-public class VideoLayout
- extends FitLayout
-{
- /**
- * The video canvas constraint.
- */
- public static final String CANVAS = "CANVAS";
-
- /**
- * The center remote video constraint.
- */
- public static final String CENTER_REMOTE = "CENTER_REMOTE";
-
- /**
- * The close local video constraint.
- */
- public static final String CLOSE_LOCAL_BUTTON = "CLOSE_LOCAL_BUTTON";
-
- /**
- * The east remote video constraint.
- */
- public static final String EAST_REMOTE = "EAST_REMOTE";
-
- /**
- * The horizontal gap between the Component being laid out by
- * VideoLayout.
- */
- private static final int HGAP = 10;
-
- /**
- * The local video constraint.
- */
- public static final String LOCAL = "LOCAL";
-
- /**
- * The ration between the local and the remote video.
- */
- private static final float LOCAL_TO_REMOTE_RATIO = 0.30f;
-
- /**
- * The video canvas.
- */
- private Component canvas;
-
- /**
- * The close local video button component.
- */
- private Component closeButton;
-
- /**
- * The indicator which determines whether this instance is dedicated to a
- * conference.
- */
- private final boolean conference;
-
- /**
- * The map of component constraints.
- */
- private final Map constraints
- = new HashMap();
-
- /**
- * The component containing the local video.
- */
- private Component local;
-
- /**
- * The x coordinate alignment of the remote video.
- */
- private float remoteAlignmentX = Component.CENTER_ALIGNMENT;
-
- /**
- * The list of Components depicting remote videos.
- */
- private final List remotes = new LinkedList();
-
- /**
- * Creates an instance of VideoLayout by also indicating if this
- * video layout is dedicated to a conference interface.
- *
- * @param conference true if the new instance will be dedicated to
- * a conference; otherwise, false
- */
- public VideoLayout(boolean conference)
- {
- this.conference = conference;
- }
-
- /**
- * Adds the given component in this layout on the specified by name
- * position.
- *
- * @param name the constraint giving the position of the component in this
- * layout
- * @param comp the component to add
- */
- @Override
- public void addLayoutComponent(String name, Component comp)
- {
- super.addLayoutComponent(name, comp);
-
- synchronized (constraints)
- {
- constraints.put(comp, name);
- }
-
- if ((name == null) || name.equals(CENTER_REMOTE))
- {
- if (!remotes.contains(comp))
- remotes.add(comp);
- remoteAlignmentX = Component.CENTER_ALIGNMENT;
- }
- else if (name.equals(EAST_REMOTE))
- {
- if (!remotes.contains(comp))
- remotes.add(comp);
- remoteAlignmentX = Component.RIGHT_ALIGNMENT;
- }
- else if (name.equals(LOCAL))
- local = comp;
- else if (name.equals(CLOSE_LOCAL_BUTTON))
- closeButton = comp;
- else if (name.equals(CANVAS))
- canvas = comp;
- }
-
- /**
- * Determines whether the aspect ratio of a specific Dimension is
- * to be considered equal to the aspect ratio of specific width and
- * height.
- *
- * @param size the Dimension whose aspect ratio is to be compared
- * to the aspect ratio of width and height
- * @param width the width which defines in combination with height
- * the aspect ratio to be compared to the aspect ratio of size
- * @param height the height which defines in combination with width
- * the aspect ratio to be compared to the aspect ratio of size
- * @return true if the aspect ratio of size is to be
- * considered equal to the aspect ratio of width and
- * height; otherwise, false
- */
- public static boolean areAspectRatiosEqual(
- Dimension size,
- int width, int height)
- {
- if ((size.height == 0) || (height == 0))
- return false;
- else
- {
- double a = size.width / (double) size.height;
- double b = width / (double) height;
- double diff = a - b;
-
- return (-0.01 < diff) && (diff < 0.01);
- }
- }
-
- /**
- * Determines how may columns to use for the grid display of specific remote
- * visual/video Components.
- *
- * @param remotes the remote visual/video Components to be
- * displayed in a grid
- * @return the number of columns to use for the grid display of the
- * specified remote visual/video Components
- */
- private int calculateColumnCount(List remotes)
- {
- int remoteCount = remotes.size();
-
- if (remoteCount == 1)
- return 1;
- else if ((remoteCount == 2) || (remoteCount == 4))
- return 2;
- else
- return 3;
- }
-
- /**
- * Returns the remote video component.
- *
- * @return the remote video component
- */
- @Override
- protected Component getComponent(Container parent)
- {
- return (remotes.size() == 1) ? remotes.get(0) : null;
- }
-
- /**
- * Returns the constraints for the given component.
- *
- * @param c the component for which constraints we're looking for
- * @return the constraints for the given component
- */
- public Object getComponentConstraints(Component c)
- {
- synchronized (constraints)
- {
- return constraints.get(c);
- }
- }
-
- /**
- * Returns the local video component.
- *
- * @return the local video component
- */
- public Component getLocal()
- {
- return local;
- }
-
- /**
- * Returns the local video close button.
- *
- * @return the local video close button
- */
- public Component getLocalCloseButton()
- {
- return closeButton;
- }
-
- /**
- * Lays out the specified Container (i.e. the Components
- * it contains) in accord with the logic implemented by this
- * LayoutManager.
- *
- * @param parent the Container to lay out
- */
- @Override
- public void layoutContainer(Container parent)
- {
- /*
- * XXX The methods layoutContainer and preferredLayoutSize must be kept
- * in sync.
- */
-
- List visibleRemotes = new ArrayList();
- List remotes;
- Component local = getLocal();
-
- for (int i = 0; i < this.remotes.size(); i++)
- {
- if (this.remotes.get(i).isVisible())
- visibleRemotes.add(this.remotes.get(i));
- }
-
- /*
- * When there are multiple remote visual/video Components, the local one
- * will be displayed as if it is a remote one i.e. in the same grid, not
- * on top of a remote one. The same layout will be used when this
- * instance is dedicated to a telephony conference.
- */
- if (conference || ((visibleRemotes.size() > 1) && (local != null)))
- {
- remotes = new ArrayList();
- remotes.addAll(visibleRemotes);
- if (local != null)
- remotes.add(local);
- }
- else
- remotes = visibleRemotes;
-
- int remoteCount = remotes.size();
- Dimension parentSize = parent.getSize();
-
- if (!conference && (remoteCount == 1))
- {
- /*
- * If the videos are to be laid out as in a one-to-one call, the
- * remote video has to fill the parent and the local video will be
- * placed on top of the remote video. The remote video will be laid
- * out now and the local video will be laid out later/further
- * bellow.
- */
- super.layoutContainer(
- parent,
- (local == null)
- ? Component.CENTER_ALIGNMENT
- : remoteAlignmentX);
- }
- else if (remoteCount > 0)
- {
- int columns = calculateColumnCount(remotes);
- int columnsMinus1 = columns - 1;
- int rows = (remoteCount + columnsMinus1) / columns;
- int rowsMinus1 = rows - 1;
- Rectangle bounds
- = new Rectangle(
- 0,
- 0,
- /*
- * HGAP is the horizontal gap between the Components
- * being laid out by this VideoLayout so the number of
- * HGAPs will be with one less than the number of
- * columns and that horizontal space cannot be allocated
- * to the bounds of the Components.
- */
- (parentSize.width - (columnsMinus1 * HGAP)) / columns,
- parentSize.height / rows);
-
- for (int i = 0; i < remoteCount; i++)
- {
- int column = i % columns;
- int row = i / columns;
-
- /*
- * On the x axis, the first column starts at zero and each
- * subsequent column starts relative to the end of its preceding
- * column.
- */
- if (column == 0)
- {
- bounds.x = 0;
- /*
- * Eventually, there may be empty cells in the last row.
- * Center the non-empty cells horizontally.
- */
- if (row == rowsMinus1)
- {
- int available = remoteCount - i;
-
- if (available < columns)
- {
- bounds.x
- = (parentSize.width
- - available * bounds.width
- - (available - 1) * HGAP)
- / 2;
- }
- }
- }
- else
- bounds.x += (bounds.width + HGAP);
- bounds.y = row * bounds.height;
-
- super.layoutComponent(
- remotes.get(i),
- bounds,
- Component.CENTER_ALIGNMENT,
- Component.CENTER_ALIGNMENT);
- }
- }
-
- if (local == null)
- {
- /*
- * It is plain wrong to display a close button for the local video
- * if there is no local video.
- */
- if (closeButton != null)
- closeButton.setVisible(false);
- }
- else
- {
- /*
- * If the local visual/video Component is not displayed as if it is
- * a remote one, it will be placed on top of a remote one.
- */
- if (!remotes.contains(local))
- {
- Component remote0 = remotes.isEmpty() ? null : remotes.get(0);
- int localX;
- int localY;
- int height
- = Math.round(parentSize.height * LOCAL_TO_REMOTE_RATIO);
- int width
- = Math.round(parentSize.width * LOCAL_TO_REMOTE_RATIO);
- float alignmentX;
-
- /*
- * XXX The remote Component being a JLabel is meant to signal
- * that there is no remote video and the remote is the
- * photoLabel.
- */
- if ((remoteCount == 1) && (remote0 instanceof JLabel))
- {
- localX = (parentSize.width - width) / 2;
- localY = parentSize.height - height;
- alignmentX = Component.CENTER_ALIGNMENT;
- }
- else
- {
- localX = ((remote0 == null) ? 0 : remote0.getX()) + 5;
- localY = parentSize.height - height - 5;
- alignmentX = Component.LEFT_ALIGNMENT;
- }
- super.layoutComponent(
- local,
- new Rectangle(localX, localY, width, height),
- alignmentX,
- Component.BOTTOM_ALIGNMENT);
- }
-
- /* The closeButton has to be on top of the local video. */
- if (closeButton != null)
- {
- /*
- * XXX We may be overwriting the visible property set by our
- * client (who has initialized the close button) but it is wrong
- * to display a close button for the local video if the local
- * video is not visible.
- */
- closeButton.setVisible(local.isVisible());
-
- super.layoutComponent(
- closeButton,
- new Rectangle(
- local.getX()
- + local.getWidth()
- - closeButton.getWidth(),
- local.getY(),
- closeButton.getWidth(),
- closeButton.getHeight()),
- Component.CENTER_ALIGNMENT,
- Component.CENTER_ALIGNMENT);
- }
- }
-
- /*
- * The video canvas will get the locations of the other components to
- * paint so it has to cover the parent completely.
- */
- if (canvas != null)
- canvas.setBounds(0, 0, parentSize.width, parentSize.height);
- }
-
- /**
- * Returns the preferred layout size for the given container.
- *
- * @param parent the container which preferred layout size we're looking for
- * @return a Dimension containing, the preferred layout size for the given
- * container
- */
- @Override
- public Dimension preferredLayoutSize(Container parent)
- {
- List visibleRemotes = new ArrayList();
- List remotes;
- Component local = getLocal();
-
- for (int i = 0; i < this.remotes.size(); i++)
- {
- if (this.remotes.get(i).isVisible())
- visibleRemotes.add(this.remotes.get(i));
- }
-
- /*
- * When there are multiple remote visual/video Components, the local one
- * will be displayed as if it is a remote one i.e. in the same grid, not
- * on top of a remote one. The same layout will be used when this
- * instance is dedicated to a telephony conference.
- */
- if (conference || ((visibleRemotes.size() > 1) && (local != null)))
- {
- remotes = new ArrayList();
- remotes.addAll(visibleRemotes);
- if (local != null)
- remotes.add(local);
- }
- else
- remotes = visibleRemotes;
-
- int remoteCount = remotes.size();
- Dimension prefLayoutSize;
-
- if (!conference && (remoteCount == 1))
- {
- /*
- * If the videos are to be laid out as in a one-to-one call, the
- * remote video has to fill the parent and the local video will be
- * placed on top of the remote video. The remote video will be laid
- * out now and the local video will be laid out later/further
- * bellow.
- */
- prefLayoutSize = super.preferredLayoutSize(parent);
- }
- else if (remoteCount > 0)
- {
- int columns = calculateColumnCount(remotes);
- int columnsMinus1 = columns - 1;
- int rows = (remoteCount + columnsMinus1) / columns;
- int i = 0;
- Dimension[] prefSizes = new Dimension[columns * rows];
-
- for (Component remote : remotes)
- {
- int column = columnsMinus1 - (i % columns);
- int row = i / columns;
-
- prefSizes[column + row * columns] = remote.getPreferredSize();
-
- i++;
- if (i >= remoteCount)
- break;
- }
-
- int prefLayoutWidth = 0;
-
- for (int column = 0; column < columns; column++)
- {
- int prefColumnWidth = 0;
-
- for (int row = 0; row < rows; row++)
- {
- Dimension prefSize = prefSizes[column + row * columns];
-
- if (prefSize != null)
- prefColumnWidth += prefSize.width;
- }
- prefColumnWidth /= rows;
-
- prefLayoutWidth += prefColumnWidth;
- }
-
- int prefLayoutHeight = 0;
-
- for (int row = 0; row < rows; row++)
- {
- int prefRowHeight = 0;
-
- for (int column = 0; column < columns; column++)
- {
- Dimension prefSize = prefSizes[column + row * columns];
-
- if (prefSize != null)
- prefRowHeight = prefSize.height;
- }
- prefRowHeight /= columns;
-
- prefLayoutHeight += prefRowHeight;
- }
-
- prefLayoutSize
- = new Dimension(
- prefLayoutWidth + columnsMinus1 * HGAP,
- prefLayoutHeight);
- }
- else
- prefLayoutSize = null;
-
- if (local != null)
- {
- /*
- * If the local visual/video Component is not displayed as if it is
- * a remote one, it will be placed on top of a remote one. Then for
- * the purposes of the preferredLayoutSize method it needs to be
- * considered only if there is no remote video whatsoever.
- */
- if (!remotes.contains(local) && (prefLayoutSize == null))
- {
- Dimension prefSize = local.getPreferredSize();
-
- if (prefSize != null)
- {
- int prefHeight
- = Math.round(prefSize.height * LOCAL_TO_REMOTE_RATIO);
- int prefWidth
- = Math.round(prefSize.width * LOCAL_TO_REMOTE_RATIO);
-
- prefLayoutSize = new Dimension(prefWidth, prefHeight);
- }
- }
-
- /*
- * The closeButton has to be on top of the local video.
- * Consequently, the preferredLayoutSize method does not have to
- * consider it. Well, maybe if does if the local video is smaller
- * than the closeButton... but that's just not cool anyway.
- */
- }
-
- /*
- * The video canvas will get the locations of the other components to
- * paint so it has to cover the parent completely. In other words, the
- * preferredLayoutSize method does not have to consider it.
- */
-
- if (prefLayoutSize == null)
- prefLayoutSize = super.preferredLayoutSize(parent);
- else if ((prefLayoutSize.height < 1) || (prefLayoutSize.width < 1))
- {
- prefLayoutSize.height = DEFAULT_HEIGHT_OR_WIDTH;
- prefLayoutSize.width = DEFAULT_HEIGHT_OR_WIDTH;
- }
-
- return prefLayoutSize;
- }
-
- /**
- * Removes the given component from this layout.
- *
- * @param comp the component to remove from the layout
- */
- @Override
- public void removeLayoutComponent(Component comp)
- {
- super.removeLayoutComponent(comp);
-
- synchronized (constraints)
- {
- constraints.remove(comp);
- }
-
- if (local == comp)
- local = null;
- else if (closeButton == comp)
- closeButton = null;
- else if (canvas == comp)
- canvas = null;
- else
- remotes.remove(comp);
- }
-}
+package org.jitsi.util.swing;
+
+import java.awt.*;
+import java.util.*;
+import java.util.List;
+
+import javax.swing.*;
+
+/**
+ * Implements the LayoutManager which lays out the local and remote
+ * videos in a video Call.
+ *
+ * @author Lyubomir Marinov
+ * @author Yana Stamcheva
+ */
+public class VideoLayout
+ extends FitLayout
+{
+ /**
+ * The video canvas constraint.
+ */
+ public static final String CANVAS = "CANVAS";
+
+ /**
+ * The center remote video constraint.
+ */
+ public static final String CENTER_REMOTE = "CENTER_REMOTE";
+
+ /**
+ * The close local video constraint.
+ */
+ public static final String CLOSE_LOCAL_BUTTON = "CLOSE_LOCAL_BUTTON";
+
+ /**
+ * The east remote video constraint.
+ */
+ public static final String EAST_REMOTE = "EAST_REMOTE";
+
+ /**
+ * The horizontal gap between the Component being laid out by
+ * VideoLayout.
+ */
+ private static final int HGAP = 10;
+
+ /**
+ * The local video constraint.
+ */
+ public static final String LOCAL = "LOCAL";
+
+ /**
+ * The ration between the local and the remote video.
+ */
+ private static final float LOCAL_TO_REMOTE_RATIO = 0.30f;
+
+ /**
+ * The video canvas.
+ */
+ private Component canvas;
+
+ /**
+ * The close local video button component.
+ */
+ private Component closeButton;
+
+ /**
+ * The indicator which determines whether this instance is dedicated to a
+ * conference.
+ */
+ private final boolean conference;
+
+ /**
+ * The map of component constraints.
+ */
+ private final Map constraints
+ = new HashMap();
+
+ /**
+ * The component containing the local video.
+ */
+ private Component local;
+
+ /**
+ * The x coordinate alignment of the remote video.
+ */
+ private float remoteAlignmentX = Component.CENTER_ALIGNMENT;
+
+ /**
+ * The list of Components depicting remote videos.
+ */
+ private final List remotes = new LinkedList();
+
+ /**
+ * Creates an instance of VideoLayout by also indicating if this
+ * video layout is dedicated to a conference interface.
+ *
+ * @param conference true if the new instance will be dedicated to
+ * a conference; otherwise, false
+ */
+ public VideoLayout(boolean conference)
+ {
+ this.conference = conference;
+ }
+
+ /**
+ * Adds the given component in this layout on the specified by name
+ * position.
+ *
+ * @param name the constraint giving the position of the component in this
+ * layout
+ * @param comp the component to add
+ */
+ @Override
+ public void addLayoutComponent(String name, Component comp)
+ {
+ super.addLayoutComponent(name, comp);
+
+ synchronized (constraints)
+ {
+ constraints.put(comp, name);
+ }
+
+ if ((name == null) || name.equals(CENTER_REMOTE))
+ {
+ if (!remotes.contains(comp))
+ remotes.add(comp);
+ remoteAlignmentX = Component.CENTER_ALIGNMENT;
+ }
+ else if (name.equals(EAST_REMOTE))
+ {
+ if (!remotes.contains(comp))
+ remotes.add(comp);
+ remoteAlignmentX = Component.RIGHT_ALIGNMENT;
+ }
+ else if (name.equals(LOCAL))
+ local = comp;
+ else if (name.equals(CLOSE_LOCAL_BUTTON))
+ closeButton = comp;
+ else if (name.equals(CANVAS))
+ canvas = comp;
+ }
+
+ /**
+ * Determines whether the aspect ratio of a specific Dimension is
+ * to be considered equal to the aspect ratio of specific width and
+ * height.
+ *
+ * @param size the Dimension whose aspect ratio is to be compared
+ * to the aspect ratio of width and height
+ * @param width the width which defines in combination with height
+ * the aspect ratio to be compared to the aspect ratio of size
+ * @param height the height which defines in combination with width
+ * the aspect ratio to be compared to the aspect ratio of size
+ * @return true if the aspect ratio of size is to be
+ * considered equal to the aspect ratio of width and
+ * height; otherwise, false
+ */
+ public static boolean areAspectRatiosEqual(
+ Dimension size,
+ int width, int height)
+ {
+ if ((size.height == 0) || (height == 0))
+ return false;
+ else
+ {
+ double a = size.width / (double) size.height;
+ double b = width / (double) height;
+ double diff = a - b;
+
+ return (-0.01 < diff) && (diff < 0.01);
+ }
+ }
+
+ /**
+ * Determines how may columns to use for the grid display of specific remote
+ * visual/video Components.
+ *
+ * @param remotes the remote visual/video Components to be
+ * displayed in a grid
+ * @return the number of columns to use for the grid display of the
+ * specified remote visual/video Components
+ */
+ private int calculateColumnCount(List remotes)
+ {
+ int remoteCount = remotes.size();
+
+ if (remoteCount == 1)
+ return 1;
+ else if ((remoteCount == 2) || (remoteCount == 4))
+ return 2;
+ else
+ return 3;
+ }
+
+ /**
+ * Returns the remote video component.
+ *
+ * @return the remote video component
+ */
+ @Override
+ protected Component getComponent(Container parent)
+ {
+ return (remotes.size() == 1) ? remotes.get(0) : null;
+ }
+
+ /**
+ * Returns the constraints for the given component.
+ *
+ * @param c the component for which constraints we're looking for
+ * @return the constraints for the given component
+ */
+ public Object getComponentConstraints(Component c)
+ {
+ synchronized (constraints)
+ {
+ return constraints.get(c);
+ }
+ }
+
+ /**
+ * Returns the local video component.
+ *
+ * @return the local video component
+ */
+ public Component getLocal()
+ {
+ return local;
+ }
+
+ /**
+ * Returns the local video close button.
+ *
+ * @return the local video close button
+ */
+ public Component getLocalCloseButton()
+ {
+ return closeButton;
+ }
+
+ /**
+ * Lays out the specified Container (i.e. the Components
+ * it contains) in accord with the logic implemented by this
+ * LayoutManager.
+ *
+ * @param parent the Container to lay out
+ */
+ @Override
+ public void layoutContainer(Container parent)
+ {
+ /*
+ * XXX The methods layoutContainer and preferredLayoutSize must be kept
+ * in sync.
+ */
+
+ List visibleRemotes = new ArrayList();
+ List remotes;
+ Component local = getLocal();
+
+ for (int i = 0; i < this.remotes.size(); i++)
+ {
+ if (this.remotes.get(i).isVisible())
+ visibleRemotes.add(this.remotes.get(i));
+ }
+
+ /*
+ * When there are multiple remote visual/video Components, the local one
+ * will be displayed as if it is a remote one i.e. in the same grid, not
+ * on top of a remote one. The same layout will be used when this
+ * instance is dedicated to a telephony conference.
+ */
+ if (conference || ((visibleRemotes.size() > 1) && (local != null)))
+ {
+ remotes = new ArrayList();
+ remotes.addAll(visibleRemotes);
+ if (local != null)
+ remotes.add(local);
+ }
+ else
+ remotes = visibleRemotes;
+
+ int remoteCount = remotes.size();
+ Dimension parentSize = parent.getSize();
+
+ if (!conference && (remoteCount == 1))
+ {
+ /*
+ * If the videos are to be laid out as in a one-to-one call, the
+ * remote video has to fill the parent and the local video will be
+ * placed on top of the remote video. The remote video will be laid
+ * out now and the local video will be laid out later/further
+ * bellow.
+ */
+ super.layoutContainer(
+ parent,
+ (local == null)
+ ? Component.CENTER_ALIGNMENT
+ : remoteAlignmentX);
+ }
+ else if (remoteCount > 0)
+ {
+ int columns = calculateColumnCount(remotes);
+ int columnsMinus1 = columns - 1;
+ int rows = (remoteCount + columnsMinus1) / columns;
+ int rowsMinus1 = rows - 1;
+ Rectangle bounds
+ = new Rectangle(
+ 0,
+ 0,
+ /*
+ * HGAP is the horizontal gap between the Components
+ * being laid out by this VideoLayout so the number of
+ * HGAPs will be with one less than the number of
+ * columns and that horizontal space cannot be allocated
+ * to the bounds of the Components.
+ */
+ (parentSize.width - (columnsMinus1 * HGAP)) / columns,
+ parentSize.height / rows);
+
+ for (int i = 0; i < remoteCount; i++)
+ {
+ int column = i % columns;
+ int row = i / columns;
+
+ /*
+ * On the x axis, the first column starts at zero and each
+ * subsequent column starts relative to the end of its preceding
+ * column.
+ */
+ if (column == 0)
+ {
+ bounds.x = 0;
+ /*
+ * Eventually, there may be empty cells in the last row.
+ * Center the non-empty cells horizontally.
+ */
+ if (row == rowsMinus1)
+ {
+ int available = remoteCount - i;
+
+ if (available < columns)
+ {
+ bounds.x
+ = (parentSize.width
+ - available * bounds.width
+ - (available - 1) * HGAP)
+ / 2;
+ }
+ }
+ }
+ else
+ bounds.x += (bounds.width + HGAP);
+ bounds.y = row * bounds.height;
+
+ super.layoutComponent(
+ remotes.get(i),
+ bounds,
+ Component.CENTER_ALIGNMENT,
+ Component.CENTER_ALIGNMENT);
+ }
+ }
+
+ if (local == null)
+ {
+ /*
+ * It is plain wrong to display a close button for the local video
+ * if there is no local video.
+ */
+ if (closeButton != null)
+ closeButton.setVisible(false);
+ }
+ else
+ {
+ /*
+ * If the local visual/video Component is not displayed as if it is
+ * a remote one, it will be placed on top of a remote one.
+ */
+ if (!remotes.contains(local))
+ {
+ Component remote0 = remotes.isEmpty() ? null : remotes.get(0);
+ int localX;
+ int localY;
+ int height
+ = Math.round(parentSize.height * LOCAL_TO_REMOTE_RATIO);
+ int width
+ = Math.round(parentSize.width * LOCAL_TO_REMOTE_RATIO);
+ float alignmentX;
+
+ /*
+ * XXX The remote Component being a JLabel is meant to signal
+ * that there is no remote video and the remote is the
+ * photoLabel.
+ */
+ if ((remoteCount == 1) && (remote0 instanceof JLabel))
+ {
+ localX = (parentSize.width - width) / 2;
+ localY = parentSize.height - height;
+ alignmentX = Component.CENTER_ALIGNMENT;
+ }
+ else
+ {
+ localX = ((remote0 == null) ? 0 : remote0.getX()) + 5;
+ localY = parentSize.height - height - 5;
+ alignmentX = Component.LEFT_ALIGNMENT;
+ }
+ super.layoutComponent(
+ local,
+ new Rectangle(localX, localY, width, height),
+ alignmentX,
+ Component.BOTTOM_ALIGNMENT);
+ }
+
+ /* The closeButton has to be on top of the local video. */
+ if (closeButton != null)
+ {
+ /*
+ * XXX We may be overwriting the visible property set by our
+ * client (who has initialized the close button) but it is wrong
+ * to display a close button for the local video if the local
+ * video is not visible.
+ */
+ closeButton.setVisible(local.isVisible());
+
+ super.layoutComponent(
+ closeButton,
+ new Rectangle(
+ local.getX()
+ + local.getWidth()
+ - closeButton.getWidth(),
+ local.getY(),
+ closeButton.getWidth(),
+ closeButton.getHeight()),
+ Component.CENTER_ALIGNMENT,
+ Component.CENTER_ALIGNMENT);
+ }
+ }
+
+ /*
+ * The video canvas will get the locations of the other components to
+ * paint so it has to cover the parent completely.
+ */
+ if (canvas != null)
+ canvas.setBounds(0, 0, parentSize.width, parentSize.height);
+ }
+
+ /**
+ * Returns the preferred layout size for the given container.
+ *
+ * @param parent the container which preferred layout size we're looking for
+ * @return a Dimension containing, the preferred layout size for the given
+ * container
+ */
+ @Override
+ public Dimension preferredLayoutSize(Container parent)
+ {
+ List visibleRemotes = new ArrayList();
+ List remotes;
+ Component local = getLocal();
+
+ for (int i = 0; i < this.remotes.size(); i++)
+ {
+ if (this.remotes.get(i).isVisible())
+ visibleRemotes.add(this.remotes.get(i));
+ }
+
+ /*
+ * When there are multiple remote visual/video Components, the local one
+ * will be displayed as if it is a remote one i.e. in the same grid, not
+ * on top of a remote one. The same layout will be used when this
+ * instance is dedicated to a telephony conference.
+ */
+ if (conference || ((visibleRemotes.size() > 1) && (local != null)))
+ {
+ remotes = new ArrayList();
+ remotes.addAll(visibleRemotes);
+ if (local != null)
+ remotes.add(local);
+ }
+ else
+ remotes = visibleRemotes;
+
+ int remoteCount = remotes.size();
+ Dimension prefLayoutSize;
+
+ if (!conference && (remoteCount == 1))
+ {
+ /*
+ * If the videos are to be laid out as in a one-to-one call, the
+ * remote video has to fill the parent and the local video will be
+ * placed on top of the remote video. The remote video will be laid
+ * out now and the local video will be laid out later/further
+ * bellow.
+ */
+ prefLayoutSize = super.preferredLayoutSize(parent);
+ }
+ else if (remoteCount > 0)
+ {
+ int columns = calculateColumnCount(remotes);
+ int columnsMinus1 = columns - 1;
+ int rows = (remoteCount + columnsMinus1) / columns;
+ int i = 0;
+ Dimension[] prefSizes = new Dimension[columns * rows];
+
+ for (Component remote : remotes)
+ {
+ int column = columnsMinus1 - (i % columns);
+ int row = i / columns;
+
+ prefSizes[column + row * columns] = remote.getPreferredSize();
+
+ i++;
+ if (i >= remoteCount)
+ break;
+ }
+
+ int prefLayoutWidth = 0;
+
+ for (int column = 0; column < columns; column++)
+ {
+ int prefColumnWidth = 0;
+
+ for (int row = 0; row < rows; row++)
+ {
+ Dimension prefSize = prefSizes[column + row * columns];
+
+ if (prefSize != null)
+ prefColumnWidth += prefSize.width;
+ }
+ prefColumnWidth /= rows;
+
+ prefLayoutWidth += prefColumnWidth;
+ }
+
+ int prefLayoutHeight = 0;
+
+ for (int row = 0; row < rows; row++)
+ {
+ int prefRowHeight = 0;
+
+ for (int column = 0; column < columns; column++)
+ {
+ Dimension prefSize = prefSizes[column + row * columns];
+
+ if (prefSize != null)
+ prefRowHeight = prefSize.height;
+ }
+ prefRowHeight /= columns;
+
+ prefLayoutHeight += prefRowHeight;
+ }
+
+ prefLayoutSize
+ = new Dimension(
+ prefLayoutWidth + columnsMinus1 * HGAP,
+ prefLayoutHeight);
+ }
+ else
+ prefLayoutSize = null;
+
+ if (local != null)
+ {
+ /*
+ * If the local visual/video Component is not displayed as if it is
+ * a remote one, it will be placed on top of a remote one. Then for
+ * the purposes of the preferredLayoutSize method it needs to be
+ * considered only if there is no remote video whatsoever.
+ */
+ if (!remotes.contains(local) && (prefLayoutSize == null))
+ {
+ Dimension prefSize = local.getPreferredSize();
+
+ if (prefSize != null)
+ {
+ int prefHeight
+ = Math.round(prefSize.height * LOCAL_TO_REMOTE_RATIO);
+ int prefWidth
+ = Math.round(prefSize.width * LOCAL_TO_REMOTE_RATIO);
+
+ prefLayoutSize = new Dimension(prefWidth, prefHeight);
+ }
+ }
+
+ /*
+ * The closeButton has to be on top of the local video.
+ * Consequently, the preferredLayoutSize method does not have to
+ * consider it. Well, maybe if does if the local video is smaller
+ * than the closeButton... but that's just not cool anyway.
+ */
+ }
+
+ /*
+ * The video canvas will get the locations of the other components to
+ * paint so it has to cover the parent completely. In other words, the
+ * preferredLayoutSize method does not have to consider it.
+ */
+
+ if (prefLayoutSize == null)
+ prefLayoutSize = super.preferredLayoutSize(parent);
+ else if ((prefLayoutSize.height < 1) || (prefLayoutSize.width < 1))
+ {
+ prefLayoutSize.height = DEFAULT_HEIGHT_OR_WIDTH;
+ prefLayoutSize.width = DEFAULT_HEIGHT_OR_WIDTH;
+ }
+
+ return prefLayoutSize;
+ }
+
+ /**
+ * Removes the given component from this layout.
+ *
+ * @param comp the component to remove from the layout
+ */
+ @Override
+ public void removeLayoutComponent(Component comp)
+ {
+ super.removeLayoutComponent(comp);
+
+ synchronized (constraints)
+ {
+ constraints.remove(comp);
+ }
+
+ if (local == comp)
+ local = null;
+ else if (closeButton == comp)
+ closeButton = null;
+ else if (canvas == comp)
+ canvas = null;
+ else
+ remotes.remove(comp);
+ }
+}