Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CPU usage is high when recording #25

Open
unnivm opened this issue Nov 28, 2015 · 0 comments
Open

CPU usage is high when recording #25

unnivm opened this issue Nov 28, 2015 · 0 comments

Comments

@unnivm
Copy link

unnivm commented Nov 28, 2015

Hello Clarke,

When recording starts, it is taking almost 100 % CPU usage.Is there a way to reduce it? I have optimized my recording loop, but still no luck, Also I have added a support for audio recording as well.

Please find the code below:

public class ScreenRecordingExample1 {

    private static final double FRAME_RATE = 24;

    private static final int SECONDS_TO_RUN_FOR = 10;

    private static final String outputFilename = "e:/mydesktop122112015.mov";

    private static Dimension screenBounds;

    private static IMediaWriter writer = null;

    private static int VIDEO_WIDTH  = 0;

    private static int VIDEO_HEIGHT = 0;

    private static YouTube youtube;

    private static final String SAMPLE_VIDEO_FILENAME = "a.mov";

    private static final String VIDEO_FILE_FORMAT = "video/*";

    private static boolean videoRecord = true;

    //http://forums.winamp.com/showthread.php?t=364617

    public static void main(String[] args) {


      screenBounds = Toolkit.getDefaultToolkit().getScreenSize();

         screenBounds.width =  820;  // avi format will work with these resolution
         screenBounds.height = 718;  // avi format will work with these resolution

          VIDEO_WIDTH  = screenBounds.width;
          VIDEO_HEIGHT = screenBounds.height;

  // let's make a IMediaWriter to write the file.
  writer = ToolFactory.makeWriter(outputFilename);



    MediaListener ml = new MediaListener(1,0);

  writer.addListener(ml);



  // We tell it we're going to add one video stream, with id 0,
  // at position 0, and that it will have a fixed frame rate of FRAME_RATE.
  writer.open();  
  writer.addVideoStream(0, 0,  VIDEO_WIDTH, VIDEO_HEIGHT);

  //writer.addVideoStream(0, 0, screenBounds.width, screenBounds.height);
  writer.addAudioStream(1, 0, 2, 8000);

    AudioThread at = new AudioThread();
    at.start();


  long startTime = System.nanoTime();
  for (int index = 0; index < SECONDS_TO_RUN_FOR * FRAME_RATE; index++) {
  //while(videoRecord) {
BufferedImage screen = getDesktopScreenshot();

BufferedImage bgrScreen = convertToType(screen,BufferedImage.TYPE_3BYTE_BGR);

writer.encodeVideo(0, bgrScreen, System.nanoTime() - startTime, TimeUnit.NANOSECONDS);

// sleep for frame rate milliseconds
try {
    Thread.sleep((long) (1000 / FRAME_RATE));
 } catch (InterruptedException e) {

 }

}
at.stopRecord();
try {
at.join();
at.interrupt();
} catch (InterruptedException e) {
e.printStackTrace();
Thread.currentThread().interrupt();
}

 at = null;

writer.close();
}

private static class AudioThread extends Thread {

        private boolean recording = true;
        AudioFormat audioFormat;
        TargetDataLine targetDataLine;
        int channelCount;
        int sampleRate;

        public AudioThread() {

            audioFormat = getAudioFormat();
            DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);

            try {
                targetDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
            } catch (LineUnavailableException e) {
                e.printStackTrace();
            }

            channelCount = targetDataLine.getFormat().getChannels();
            sampleRate = targetDataLine.getFormat().getSampleSizeInBits();
        }

        @Override
        public void run() {

            try {
                targetDataLine.open(audioFormat, targetDataLine.getBufferSize());
            } catch (LineUnavailableException e) {
                e.printStackTrace();
            }
            targetDataLine.start();

            while(recording) {
                 byte[] tempBuffer = new byte[targetDataLine.getBufferSize()/2];
                 int n = targetDataLine.read(tempBuffer, 0,tempBuffer.length);
                 short[] audioSamples = new short[tempBuffer.length/2];

                 if (audioFormat.isBigEndian()){
                     ByteBuffer.wrap(tempBuffer).order(ByteOrder.BIG_ENDIAN).asShortBuffer().get(audioSamples);
                    }else{
                        ByteBuffer.wrap(tempBuffer).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(audioSamples);
                }

                    writer.setMaskLateStreamExceptions(true);
                    writer.encodeAudio(1, audioSamples);

                 try {
                        Thread.sleep((long)(1000/8000));
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                        Thread.currentThread().interrupt();
                    }
            }
            System.out.println("recording finished.....");
        }

        public void stopRecord() {
            recording = false;
            targetDataLine.stop();
            targetDataLine.close();
        }

        private AudioFormat getAudioFormat() {
            float sampleRate = 8000;
            // 8000,11025,16000,22050,44100
            int sampleSizeInBits = 16;
            // 8,16
            int channels = 1;
            // 1,2
            boolean signed = true;
            // true,false
            boolean bigEndian = false;
            // true,false
            return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed,
                    bigEndian);
        }

    }


    static class MediaListener extends MediaToolAdapter{

        // the current offset

        private long mOffset = 0;

        // the next video timestamp

        private long mNextVideo = 0;

        // the next audio timestamp

        private long mNextAudio = 0;

        // the index of the audio stream

        private final int mAudoStreamIndex;

        // the index of the video stream

        private final int mVideoStreamIndex;

        private IVideoResampler videoResampler = null;
        private IAudioResampler audioResampler = null;

        /**
         * Create a concatenator.
         * 
         * @param audioStreamIndex index of audio stream
         * @param videoStreamIndex index of video stream
         */

        public MediaListener(int audioStreamIndex, int videoStreamIndex)
        {
          mAudoStreamIndex = audioStreamIndex;
          mVideoStreamIndex = videoStreamIndex;
        }

        @Override
        public void onAudioSamples(IAudioSamplesEvent event)
        {


          IAudioSamples samples = event.getAudioSamples();
          long newTimeStamp = samples.getTimeStamp() + mOffset;
          mNextAudio = samples.getNextPts();

// IAudioSamples samples = event.getAudioSamples();
if (audioResampler == null) {
audioResampler = IAudioResampler.make(2, samples.getChannels(), 44100, samples.getSampleRate());
}
if (event.getAudioSamples().getNumSamples() > 0) {
IAudioSamples out = IAudioSamples.make(samples.getNumSamples(), samples.getChannels());
audioResampler.resample(out, samples, samples.getNumSamples());

              AudioSamplesEvent asc = new AudioSamplesEvent(event.getSource(), out, event.getStreamIndex()+1);
              super.onAudioSamples(asc);
              out.delete();

              Runtime.getRuntime().gc();
              Runtime.getRuntime().gc();

              }

        //    System.out.println(" processing audio.....");
        }

        public void onVideoPicture(IVideoPictureEvent event)
        {
          IVideoPicture picture = event.getMediaData();
          long originalTimeStamp = picture.getTimeStamp();
          long newTimeStamp = originalTimeStamp + mOffset;
          mNextVideo = originalTimeStamp + 1;


          picture.setTimeStamp(newTimeStamp);
            IVideoPicture pic = event.getPicture();
              if (videoResampler == null) {
                videoResampler = IVideoResampler.make(VIDEO_WIDTH, VIDEO_HEIGHT, pic.getPixelType(), pic.getWidth(), pic.getHeight(), pic.getPixelType());
              }
              pic.setKeyFrame(true);
              pic.setQuality(10);
              IVideoPicture out = IVideoPicture.make(pic.getPixelType(), VIDEO_WIDTH, VIDEO_HEIGHT);
              videoResampler.resample(out, pic);
              IVideoPictureEvent asc = new VideoPictureEvent(event.getSource(), out, event.getStreamIndex());
              super.onVideoPicture(asc);
              out.delete();
              Runtime.getRuntime().gc();
              Runtime.getRuntime().gc();
        }

        public void onClose(ICloseEvent event)
        {
          mOffset = Math.max(mNextVideo, mNextAudio);

          if (mNextAudio < mNextVideo)
          {
          }
        }

        @Override
        public void onAddStream(IAddStreamEvent event)
        {
            System.out.println("adding stream...");
          // overridden to ensure that add stream events are not passed down
          // the tool chain to the writer, which could cause problems
            int streamIndex = event.getStreamIndex();
              IStreamCoder streamCoder = event.getSource().getContainer().getStream(streamIndex).getStreamCoder();
              if (streamCoder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO) {
              //  streamCoder.setSampleRate(22000);
                  streamCoder.setFlag(IStreamCoder.Flags.FLAG_QSCALE, false);
                  streamCoder.setBitRate(320000);
                  streamCoder.setBitRateTolerance(0);
              } else if (streamCoder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {

// streamCoder.setCodec(ICodec.ID.CODEC_ID_H264);
streamCoder.setWidth(VIDEO_WIDTH);
streamCoder.setHeight(VIDEO_HEIGHT);
}
super.onAddStream(event);
}

        public void onOpen(IOpenEvent event)
        {
        }

        public void onOpenCoder(IOpenCoderEvent event)
        {
        }

        public void onCloseCoder(ICloseCoderEvent event)
        {
        }

    }

Please give me some idea or clue so that I can work on it.

Thank you
Unni

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant