Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

8236832: [macos 10.15] JavaFX Application hangs on video play on Cata… #126

Closed
wants to merge 1 commit into from
Closed
Changes from all commits
Commits
File filter...
Filter file types
Jump to…
Jump to file
Failed to load files.

Always

Just for now

@@ -279,4 +279,8 @@ void ProcessAudioTap(MTAudioProcessingTapRef tapRef,
return;
}
}

if (context->audioSpectrum != nullptr) {
context->audioSpectrum.get()->SetFirstBufferDelivered(true);
}
}
@@ -41,6 +41,7 @@ AVFAudioSpectrumUnit::AVFAudioSpectrumUnit() : mSpectrumCallbackProc(NULL),
mMaxFrames(0),
mSamplesPerInterval(0),
mRebuildCrunch(true),
mFirstBufferDelivered(false),
mSpectrumElement(NULL),
mSpectrum(NULL) {
mMixBuffer.mNumberBuffers = 1;
@@ -191,7 +192,8 @@ void AVFAudioSpectrumUnit::UpdateBands(int size, const float* magnitudes, const
// Call our listener to dispatch the spectrum event
if (mSpectrumCallbackProc) {
double duration = (double) mSamplesPerInterval / (double) 44100;
mSpectrumCallbackProc(mSpectrumCallbackContext, duration);
double timestamp = mFirstBufferDelivered ? -1.0 : 0.0;
mSpectrumCallbackProc(mSpectrumCallbackContext, duration, timestamp);
}

unlockBands();
@@ -214,6 +216,10 @@ void AVFAudioSpectrumUnit::SetSpectrumCallbackProc(AVFSpectrumUnitCallbackProc p
mSpectrumCallbackContext = context;
}

void AVFAudioSpectrumUnit::SetFirstBufferDelivered(bool isFirstBufferDelivered) {
mFirstBufferDelivered = isFirstBufferDelivered;
}

static gboolean PostMessageCallback(GstElement * element, GstMessage * message) {
if (message == NULL) {
return FALSE;
@@ -49,7 +49,8 @@
* timeStamp: the beginning time in seconds of the sample period (from beginning of stream)
* duration: the length of time in seconds of the sample period
*/
typedef void (*AVFSpectrumUnitCallbackProc)(void *callbackContext, double duration);
typedef void (*AVFSpectrumUnitCallbackProc)(void *callbackContext, double duration,
double timestamp);

class AVFAudioSpectrumUnit : public CAudioSpectrum {
public:
@@ -80,6 +81,7 @@ class AVFAudioSpectrumUnit : public CAudioSpectrum {
void SetChannels(UInt32 count);
void SetMaxFrames(UInt32 maxFrames);
void SetSpectrumCallbackProc(AVFSpectrumUnitCallbackProc proc, void *context);
void SetFirstBufferDelivered(bool isFirstBufferDelivered);

private:
AVFSpectrumUnitCallbackProc mSpectrumCallbackProc;
@@ -102,6 +104,7 @@ class AVFAudioSpectrumUnit : public CAudioSpectrum {
UInt32 mSamplesPerInterval;

bool mRebuildCrunch;
bool mFirstBufferDelivered;

// GStreamer
GstElement *mSpectrumElement;
@@ -1,5 +1,5 @@
/*
* Copyright (c) 2014, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -87,7 +87,7 @@ static void append_log(NSMutableString *s, NSString *fmt, ...) {

@implementation AVFMediaPlayer

static void SpectrumCallbackProc(void *context, double duration);
static void SpectrumCallbackProc(void *context, double duration, double timestamp);

static CVReturn displayLinkCallback(CVDisplayLinkRef displayLink,
const CVTimeStamp *inNow,
@@ -651,19 +651,21 @@ - (void) sendPixelBuffer:(CVPixelBufferRef)buf frameTime:(double)frameTime hostT
eventHandler->SendNewFrameEvent(frame);
}

- (void) sendSpectrumEventDuration:(double)duration {
- (void) sendSpectrumEventDuration:(double)duration timestamp:(double)timestamp {
if (eventHandler) {
double timestamp = self.currentTime;
if (timestamp < 0) {
timestamp = self.currentTime;
}
eventHandler->SendAudioSpectrumEvent(timestamp, duration);
}
}

@end

static void SpectrumCallbackProc(void *context, double duration) {
static void SpectrumCallbackProc(void *context, double duration, double timestamp) {
if (context) {
AVFMediaPlayer *player = (__bridge AVFMediaPlayer*)context;
[player sendSpectrumEventDuration:duration];
[player sendSpectrumEventDuration:duration timestamp:timestamp];
}
}

ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.