Expand Up @@ -152,8 +152,60 @@ class JUCE_API AudioPlayHead
bool drop = false, pulldown = false;
};

/** Describes a musical time signature.
@see PositionInfo::getTimeSignature() PositionInfo::setTimeSignature()
*/
struct JUCE_API TimeSignature
{
/** Time signature numerator, e.g. the 3 of a 3/4 time sig */
int numerator = 4;

/** Time signature denominator, e.g. the 4 of a 3/4 time sig */
int denominator = 4;

bool operator== (const TimeSignature& other) const
{
const auto tie = [] (auto& x) { return std::tie (x.numerator, x.denominator); };
return tie (*this) == tie (other);
}

bool operator!= (const TimeSignature& other) const
{
return ! operator== (other);
}
};

/** Holds the begin and end points of a looped region.
@see PositionInfo::getIsLooping() PositionInfo::setIsLooping() PositionInfo::getLoopPoints() PositionInfo::setLoopPoints()
*/
struct JUCE_API LoopPoints
{
/** The current cycle start position in units of quarter-notes. */
double ppqStart = 0;

/** The current cycle end position in units of quarter-notes. */
double ppqEnd = 0;

bool operator== (const LoopPoints& other) const
{
const auto tie = [] (auto& x) { return std::tie (x.ppqStart, x.ppqEnd); };
return tie (*this) == tie (other);
}

bool operator!= (const LoopPoints& other) const
{
return ! operator== (other);
}
};

//==============================================================================
/** This structure is filled-in by the AudioPlayHead::getCurrentPosition() method.
/** This type is deprecated; prefer PositionInfo instead.
Some position info may be unavailable, depending on the host or plugin format.
Unfortunately, CurrentPositionInfo doesn't have any way of differentiating between
default values and values that have been set explicitly.
*/
struct JUCE_API CurrentPositionInfo
{
Expand All @@ -162,6 +214,7 @@ class JUCE_API AudioPlayHead

/** Time signature numerator, e.g. the 3 of a 3/4 time sig */
int timeSigNumerator = 4;

/** Time signature denominator, e.g. the 4 of a 3/4 time sig */
int timeSigDenominator = 4;

Expand Down Expand Up @@ -248,7 +301,208 @@ class JUCE_API AudioPlayHead
};

//==============================================================================
/** Fills-in the given structure with details about the transport's
/**
Describes the time at the start of the current audio callback.
Not all hosts and plugin formats can provide all of the possible time
information, so most of the getter functions in this class return
an Optional that will only be engaged if the host provides the corresponding
information. As a plugin developer, you should code defensively so that
the plugin behaves sensibly even when the host fails to provide timing
information.
A default-constructed instance of this class will return nullopt from
all functions that return an Optional.
*/
class PositionInfo
{
public:
/** Returns the number of samples that have elapsed. */
Optional<int64_t> getTimeInSamples() const { return getOptional (flagTimeSamples, timeInSamples); }

/** @see getTimeInSamples() */
void setTimeInSamples (Optional<int64_t> timeInSamplesIn) { setOptional (flagTimeSamples, timeInSamples, timeInSamplesIn); }

/** Returns the number of seconds that have elapsed. */
Optional<double> getTimeInSeconds() const { return getOptional (flagTimeSeconds, timeInSeconds); }

/** @see getTimeInSamples() */
void setTimeInSeconds (Optional<double> timeInSecondsIn) { setOptional (flagTimeSeconds, timeInSeconds, timeInSecondsIn); }

/** Returns the bpm, if available. */
Optional<double> getBpm() const { return getOptional (flagTempo, tempoBpm); }

/** @see getBpm() */
void setBpm (Optional<double> bpmIn) { setOptional (flagTempo, tempoBpm, bpmIn); }

/** Returns the time signature, if available. */
Optional<TimeSignature> getTimeSignature() const { return getOptional (flagTimeSignature, timeSignature); }

/** @see getTimeSignature() */
void setTimeSignature (Optional<TimeSignature> timeSignatureIn) { setOptional (flagTimeSignature, timeSignature, timeSignatureIn); }

/** Returns host loop points, if available. */
Optional<LoopPoints> getLoopPoints() const { return getOptional (flagLoopPoints, loopPoints); }

/** @see getLoopPoints() */
void setLoopPoints (Optional<LoopPoints> loopPointsIn) { setOptional (flagLoopPoints, loopPoints, loopPointsIn); }

/** The number of bars since the beginning of the timeline.
This value isn't available in all hosts or in all plugin formats.
*/
Optional<int64_t> getBarCount() const { return getOptional (flagBarCount, barCount); }

/** @see getBarCount() */
void setBarCount (Optional<int64_t> barCountIn) { setOptional (flagBarCount, barCount, barCountIn); }

/** The position of the start of the last bar, in units of quarter-notes.
This is the time from the start of the timeline to the start of the current
bar, in ppq units.
Note - this value may be unavailable on some hosts, e.g. Pro-Tools.
*/
Optional<double> getPpqPositionOfLastBarStart() const { return getOptional (flagLastBarStartPpq, lastBarStartPpq); }

/** @see getPpqPositionOfLastBarStart() */
void setPpqPositionOfLastBarStart (Optional<double> positionIn) { setOptional (flagLastBarStartPpq, lastBarStartPpq, positionIn); }

/** The video frame rate, if available. */
Optional<FrameRate> getFrameRate() const { return getOptional (flagFrameRate, frame); }

/** @see getFrameRate() */
void setFrameRate (Optional<FrameRate> frameRateIn) { setOptional (flagFrameRate, frame, frameRateIn); }

/** The current play position, in units of quarter-notes. */
Optional<double> getPpqPosition() const { return getOptional (flagPpqPosition, positionPpq); }

/** @see getPpqPosition() */
void setPpqPosition (Optional<double> ppqPositionIn) { setOptional (flagPpqPosition, positionPpq, ppqPositionIn); }

/** For timecode, the position of the start of the timeline, in seconds from 00:00:00:00. */
Optional<double> getEditOriginTime() const { return getOptional (flagOriginTime, originTime); }

/** @see getEditOriginTime() */
void setEditOriginTime (Optional<double> editOriginTimeIn) { setOptional (flagOriginTime, originTime, editOriginTimeIn); }

/** Get the host's callback time in nanoseconds, if available. */
Optional<uint64_t> getHostTimeNs() const { return getOptional (flagHostTimeNs, hostTimeNs); }

/** @see getHostTimeNs() */
void setHostTimeNs (Optional<uint64_t> hostTimeNsIn) { setOptional (flagHostTimeNs, hostTimeNs, hostTimeNsIn); }

/** True if the transport is currently playing. */
bool getIsPlaying() const { return getFlag (flagIsPlaying); }

/** @see getIsPlaying() */
void setIsPlaying (bool isPlayingIn) { setFlag (flagIsPlaying, isPlayingIn); }

/** True if the transport is currently recording.
(When isRecording is true, then isPlaying will also be true).
*/
bool getIsRecording() const { return getFlag (flagIsRecording); }

/** @see getIsRecording() */
void setIsRecording (bool isRecordingIn) { setFlag (flagIsRecording, isRecordingIn); }

/** True if the transport is currently looping. */
bool getIsLooping() const { return getFlag (flagIsLooping); }

/** @see getIsLooping() */
void setIsLooping (bool isLoopingIn) { setFlag (flagIsLooping, isLoopingIn); }

bool operator== (const PositionInfo& other) const noexcept
{
const auto tie = [] (const PositionInfo& i)
{
return std::make_tuple (i.getTimeInSamples(),
i.getTimeInSeconds(),
i.getPpqPosition(),
i.getEditOriginTime(),
i.getPpqPositionOfLastBarStart(),
i.getFrameRate(),
i.getBarCount(),
i.getTimeSignature(),
i.getBpm(),
i.getLoopPoints(),
i.getHostTimeNs(),
i.getIsPlaying(),
i.getIsRecording(),
i.getIsLooping());
};

return tie (*this) == tie (other);
}

bool operator!= (const PositionInfo& other) const noexcept
{
return ! operator== (other);
}

private:
bool getFlag (int64_t flagToCheck) const
{
return (flagToCheck & flags) != 0;
}

void setFlag (int64_t flagToCheck, bool value)
{
flags = (value ? flags | flagToCheck : flags & ~flagToCheck);
}

template <typename Value>
Optional<Value> getOptional (int64_t flagToCheck, Value value) const
{
return getFlag (flagToCheck) ? makeOptional (std::move (value)) : nullopt;
}

template <typename Value>
void setOptional (int64_t flagToCheck, Value& value, Optional<Value> opt)
{
if (opt.hasValue())
value = *opt;

setFlag (flagToCheck, opt.hasValue());
}

enum
{
flagTimeSignature = 1 << 0,
flagLoopPoints = 1 << 1,
flagFrameRate = 1 << 2,
flagTimeSeconds = 1 << 3,
flagLastBarStartPpq = 1 << 4,
flagPpqPosition = 1 << 5,
flagOriginTime = 1 << 6,
flagTempo = 1 << 7,
flagTimeSamples = 1 << 8,
flagBarCount = 1 << 9,
flagHostTimeNs = 1 << 10,
flagIsPlaying = 1 << 11,
flagIsRecording = 1 << 12,
flagIsLooping = 1 << 13
};

TimeSignature timeSignature;
LoopPoints loopPoints;
FrameRate frame = FrameRateType::fps23976;
double timeInSeconds = 0.0;
double lastBarStartPpq = 0.0;
double positionPpq = 0.0;
double originTime = 0.0;
double tempoBpm = 0.0;
int64_t timeInSamples = 0;
int64_t barCount = 0;
uint64_t hostTimeNs = 0;
int64_t flags = 0;
};

//==============================================================================
/** Deprecated, use getPosition() instead.
Fills-in the given structure with details about the transport's
position at the start of the current processing block. If this method returns
false then the current play head position is not available and the given
structure will be undefined.
Expand All @@ -258,7 +512,70 @@ class JUCE_API AudioPlayHead
in which a time would make sense, and some hosts will almost certainly have
multithreading issues if it's not called on the audio thread.
*/
virtual bool getCurrentPosition (CurrentPositionInfo& result) = 0;
[[deprecated ("Use getPosition instead. Not all hosts are able to provide all time position information; getPosition differentiates clearly between set and unset fields.")]]
bool getCurrentPosition (CurrentPositionInfo& result)
{
if (const auto pos = getPosition())
{
result.resetToDefault();

if (const auto sig = pos->getTimeSignature())
{
result.timeSigNumerator = sig->numerator;
result.timeSigDenominator = sig->denominator;
}

if (const auto loop = pos->getLoopPoints())
{
result.ppqLoopStart = loop->ppqStart;
result.ppqLoopEnd = loop->ppqEnd;
}

if (const auto frame = pos->getFrameRate())
result.frameRate = *frame;

if (const auto timeInSeconds = pos->getTimeInSeconds())
result.timeInSeconds = *timeInSeconds;

if (const auto lastBarStartPpq = pos->getPpqPositionOfLastBarStart())
result.ppqPositionOfLastBarStart = *lastBarStartPpq;

if (const auto ppqPosition = pos->getPpqPosition())
result.ppqPosition = *ppqPosition;

if (const auto originTime = pos->getEditOriginTime())
result.editOriginTime = *originTime;

if (const auto bpm = pos->getBpm())
result.bpm = *bpm;

if (const auto timeInSamples = pos->getTimeInSamples())
result.timeInSamples = *timeInSamples;

result.isPlaying = pos->getIsPlaying();
result.isRecording = pos->getIsRecording();
result.isLooping = pos->getIsLooping();

return true;
}

return false;
}

/** Fetches details about the transport's position at the start of the current
processing block. If this method returns nullopt then the current play head
position is not available.
A non-null return value just indicates that the host was able to provide
*some* relevant timing information. Individual PositionInfo getters may
still return nullopt.
You can ONLY call this from your processBlock() method! Calling it at other
times will produce undefined behaviour, as the host may not have any context
in which a time would make sense, and some hosts will almost certainly have
multithreading issues if it's not called on the audio thread.
*/
virtual Optional<PositionInfo> getPosition() const = 0;

/** Returns true if this object can control the transport. */
virtual bool canControlTransport() { return false; }
Expand Down
Expand Up @@ -23,53 +23,6 @@
namespace juce
{

#ifndef DOXYGEN
/** The contents of this namespace are used to implement AudioBuffer and should
not be used elsewhere. Their interfaces (and existence) are liable to change!
*/
namespace detail
{
/** On iOS/arm7 the alignment of `double` is greater than the alignment of
`std::max_align_t`, so we can't trust max_align_t. Instead, we query
lots of primitive types and use the maximum alignment of all of them.
We're putting this stuff outside AudioBuffer itself to avoid creating
unnecessary copies for each distinct template instantiation of
AudioBuffer.
MSVC 2015 doesn't like when we write getMaxAlignment as a loop which
accumulates the max alignment (declarations not allowed in constexpr
function body) so instead we use this recursive version which
instantiates a zillion templates.
*/

template <typename> struct Type {};

constexpr size_t getMaxAlignment() noexcept { return 0; }

template <typename Head, typename... Tail>
constexpr size_t getMaxAlignment (Type<Head>, Type<Tail>... tail) noexcept
{
return jmax (alignof (Head), getMaxAlignment (tail...));
}

constexpr size_t maxAlignment = getMaxAlignment (Type<std::max_align_t>{},
Type<void*>{},
Type<float>{},
Type<double>{},
Type<long double>{},
Type<short int>{},
Type<int>{},
Type<long int>{},
Type<long long int>{},
Type<bool>{},
Type<char>{},
Type<char16_t>{},
Type<char32_t>{},
Type<wchar_t>{});
} // namespace detail
#endif

//==============================================================================
/**
A multi-channel buffer containing floating point audio samples.
Expand Down Expand Up @@ -1215,17 +1168,10 @@ class AudioBuffer

private:
//==============================================================================
int numChannels = 0, size = 0;
size_t allocatedBytes = 0;
Type** channels;
HeapBlock<char, true> allocatedData;
Type* preallocatedChannelSpace[32];
bool isClear = false;

void allocateData()
{
#if (! JUCE_GCC || (__GNUC__ * 100 + __GNUC_MINOR__) >= 409)
static_assert (alignof (Type) <= detail::maxAlignment,
static_assert (alignof (Type) <= maxAlignment,
"AudioBuffer cannot hold types with alignment requirements larger than that guaranteed by malloc");
#endif
jassert (size >= 0);
Expand Down Expand Up @@ -1278,6 +1224,43 @@ class AudioBuffer
isClear = false;
}

/* On iOS/arm7 the alignment of `double` is greater than the alignment of
`std::max_align_t`, so we can't trust max_align_t. Instead, we query
lots of primitive types and use the maximum alignment of all of them.
*/
static constexpr size_t getMaxAlignment() noexcept
{
constexpr size_t alignments[] { alignof (std::max_align_t),
alignof (void*),
alignof (float),
alignof (double),
alignof (long double),
alignof (short int),
alignof (int),
alignof (long int),
alignof (long long int),
alignof (bool),
alignof (char),
alignof (char16_t),
alignof (char32_t),
alignof (wchar_t) };

size_t max = 0;

for (const auto elem : alignments)
max = jmax (max, elem);

return max;
}

int numChannels = 0, size = 0;
size_t allocatedBytes = 0;
Type** channels;
HeapBlock<char, true> allocatedData;
Type* preallocatedChannelSpace[32];
bool isClear = false;
static constexpr size_t maxAlignment = getMaxAlignment();

JUCE_LEAK_DETECTOR (AudioBuffer)
};

Expand Down
Expand Up @@ -31,8 +31,6 @@

#include "juce_audio_basics.h"

#include <juce_core/containers/juce_Optional.h>

#if JUCE_MINGW && ! defined (alloca)
#define alloca __builtin_alloca
#endif
Expand Down
Expand Up @@ -32,7 +32,7 @@
ID: juce_audio_basics
vendor: juce
version: 6.1.6
version: 7.0.1
name: JUCE audio and MIDI data classes
description: Classes for audio buffer manipulation, midi message handling, synthesis, etc.
website: http://www.juce.com/juce
Expand Down Expand Up @@ -120,4 +120,4 @@
#include "sources/juce_ReverbAudioSource.h"
#include "sources/juce_ToneGeneratorAudioSource.h"
#include "synthesisers/juce_Synthesiser.h"
#include "audio_play_head/juce_AudioPlayHead.h"
#include "audio_play_head/juce_AudioPlayHead.h"
Expand Up @@ -91,9 +91,9 @@ void BufferingAudioSource::releaseResources()

buffer.setSize (numberOfChannels, 0);

// MSVC2015 seems to need this if statement to not generate a warning during linking.
// MSVC2017 seems to need this if statement to not generate a warning during linking.
// As source is set in the constructor, there is no way that source could
// ever equal this, but it seems to make MSVC2015 happy.
// ever equal this, but it seems to make MSVC2017 happy.
if (source != this)
source->releaseResources();
}
Expand Down
Expand Up @@ -32,7 +32,7 @@
ID: juce_audio_devices
vendor: juce
version: 6.1.6
version: 7.0.1
name: JUCE audio and MIDI I/O device classes
description: Classes to play and record from audio and MIDI I/O devices
website: http://www.juce.com/juce
Expand Down
Expand Up @@ -153,12 +153,13 @@ namespace FlacNamespace
#include "flac/libFLAC/stream_encoder_framing.c"
#include "flac/libFLAC/window_flac.c"
#undef VERSION
#else
#include <FLAC/all.h>
#endif

JUCE_END_IGNORE_WARNINGS_GCC_LIKE
JUCE_END_IGNORE_WARNINGS_MSVC

#else
#include <FLAC/all.h>
#endif
}

#undef max
Expand Down
Expand Up @@ -172,8 +172,7 @@ ARAPlaybackRegionReader::ARAPlaybackRegionReader (double rate, int numChans,
// We're only providing the minimal set of meaningful values, since the ARA renderer should only
// look at the time position and the playing state, and read any related tempo or bar signature
// information from the ARA model directly (MusicalContext).
positionInfo.resetToDefault();
positionInfo.isPlaying = true;
positionInfo.setIsPlaying (true);

sampleRate = rate;
numChannels = (unsigned int) numChans;
Expand Down Expand Up @@ -252,16 +251,17 @@ bool ARAPlaybackRegionReader::readSamples (int** destSamples, int numDestChannel
{
success = true;
needClearSamples = false;
positionInfo.timeInSamples = startSampleInFile + startInSamples;
positionInfo.setTimeInSamples (startSampleInFile + startInSamples);

while (numSamples > 0)
{
const int numSliceSamples = jmin (numSamples, maximumBlockSize);
AudioBuffer<float> buffer ((float **) destSamples, numDestChannels, startOffsetInDestBuffer, numSliceSamples);
positionInfo.timeInSeconds = static_cast<double> (positionInfo.timeInSamples) / sampleRate;
positionInfo.setTimeInSeconds (static_cast<double> (*positionInfo.getTimeInSamples()) / sampleRate);
success &= playbackRenderer->processBlock (buffer, AudioProcessor::Realtime::no, positionInfo);
numSamples -= numSliceSamples;
startOffsetInDestBuffer += numSliceSamples;
positionInfo.timeInSamples += numSliceSamples;
positionInfo.setTimeInSamples (*positionInfo.getTimeInSamples() + numSliceSamples);
}
}
}
Expand Down
Expand Up @@ -183,7 +183,7 @@ class JUCE_API ARAPlaybackRegionReader : public AudioFormatReader,

private:
std::unique_ptr<ARAPlaybackRenderer> playbackRenderer;
AudioPlayHead::CurrentPositionInfo positionInfo;
AudioPlayHead::PositionInfo positionInfo;
ReadWriteLock lock;

static constexpr int maximumBlockSize = 4 * 1024;
Expand Down
Expand Up @@ -35,7 +35,7 @@
ID: juce_audio_formats
vendor: juce
version: 6.1.6
version: 7.0.1
name: JUCE audio file format codecs
description: Classes for reading and writing various audio file formats.
website: http://www.juce.com/juce
Expand Down
Expand Up @@ -1035,52 +1035,72 @@ namespace AAXClasses

AudioProcessor& getPluginInstance() const noexcept { return *pluginInstance; }

bool getCurrentPosition (juce::AudioPlayHead::CurrentPositionInfo& info) override
Optional<PositionInfo> getPosition() const override
{
PositionInfo info;

const AAX_ITransport& transport = *Transport();

info.bpm = 0.0;
check (transport.GetCurrentTempo (&info.bpm));
info.setBpm ([&]
{
double bpm = 0.0;

return transport.GetCurrentTempo (&bpm) == AAX_SUCCESS ? makeOptional (bpm) : nullopt;
}());

info.setTimeSignature ([&]
{
int32_t num = 4, den = 4;

int32_t num = 4, den = 4;
transport.GetCurrentMeter (&num, &den);
info.timeSigNumerator = (int) num;
info.timeSigDenominator = (int) den;
info.timeInSamples = 0;
return transport.GetCurrentMeter (&num, &den) == AAX_SUCCESS
? makeOptional (TimeSignature { (int) num, (int) den })
: nullopt;
}());

if (transport.IsTransportPlaying (&info.isPlaying) != AAX_SUCCESS)
info.isPlaying = false;
info.setIsPlaying ([&]
{
bool isPlaying = false;

if (info.isPlaying
|| transport.GetTimelineSelectionStartPosition (&info.timeInSamples) != AAX_SUCCESS)
check (transport.GetCurrentNativeSampleLocation (&info.timeInSamples));
return transport.IsTransportPlaying (&isPlaying) == AAX_SUCCESS && isPlaying;
}());

info.timeInSeconds = (float) info.timeInSamples / sampleRate;
info.setTimeInSamples ([&]
{
int64_t timeInSamples = 0;

int64_t ticks = 0;
return ((! info.getIsPlaying() && transport.GetTimelineSelectionStartPosition (&timeInSamples) == AAX_SUCCESS)
|| transport.GetCurrentNativeSampleLocation (&timeInSamples) == AAX_SUCCESS)
? makeOptional (timeInSamples)
: nullopt;
}());

if (info.isPlaying)
check (transport.GetCustomTickPosition (&ticks, info.timeInSamples));
else
check (transport.GetCurrentTickPosition (&ticks));
info.setTimeInSeconds ((float) info.getTimeInSamples().orFallback (0) / sampleRate);

info.ppqPosition = (double) ticks / 960000.0;
info.setPpqPosition ([&]
{
int64_t ticks = 0;

return ((info.getIsPlaying() && transport.GetCustomTickPosition (&ticks, info.getTimeInSamples().orFallback (0))) == AAX_SUCCESS)
|| transport.GetCurrentTickPosition (&ticks) == AAX_SUCCESS
? makeOptional (ticks / 960000.0)
: nullopt;
}());

info.isLooping = false;
bool isLooping = false;
int64_t loopStartTick = 0, loopEndTick = 0;
check (transport.GetCurrentLoopPosition (&info.isLooping, &loopStartTick, &loopEndTick));
info.ppqLoopStart = (double) loopStartTick / 960000.0;
info.ppqLoopEnd = (double) loopEndTick / 960000.0;

std::tie (info.frameRate, info.editOriginTime) = [&transport]
if (transport.GetCurrentLoopPosition (&isLooping, &loopStartTick, &loopEndTick) == AAX_SUCCESS)
{
AAX_EFrameRate frameRate;
int32_t offset;
info.setIsLooping (isLooping);
info.setLoopPoints (LoopPoints { (double) loopStartTick / 960000.0, (double) loopEndTick / 960000.0 });
}

if (transport.GetTimeCodeInfo (&frameRate, &offset) != AAX_SUCCESS)
return std::make_tuple (FrameRate(), 0.0);
AAX_EFrameRate frameRate;
int32_t offset;

const auto rate = [&]
if (transport.GetTimeCodeInfo (&frameRate, &offset) == AAX_SUCCESS)
{
info.setFrameRate ([&]() -> Optional<FrameRate>
{
switch ((JUCE_AAX_EFrameRate) frameRate)
{
Expand Down Expand Up @@ -1114,18 +1134,14 @@ namespace AAXClasses
case JUCE_AAX_eFrameRate_Undeclared: break;
}

return FrameRate();
}();

const auto effectiveRate = rate.getEffectiveRate();
return std::make_tuple (rate, effectiveRate != 0.0 ? offset / effectiveRate : 0.0);
}();
return {};
}());
}

// No way to get these: (?)
info.isRecording = false;
info.ppqPositionOfLastBarStart = 0;
const auto effectiveRate = info.getFrameRate().hasValue() ? info.getFrameRate()->getEffectiveRate() : 0.0;
info.setEditOriginTime (effectiveRate != 0.0 ? makeOptional (offset / effectiveRate) : nullopt);

return true;
return info;
}

void audioProcessorParameterChanged (AudioProcessor* /*processor*/, int parameterIndex, float newValue) override
Expand Down Expand Up @@ -1191,8 +1207,7 @@ namespace AAXClasses
if (data != nullptr && size == sizeof (AAX_EProcessingState))
{
const auto state = *static_cast<const AAX_EProcessingState*> (data);
const auto nonRealtime = state == AAX_eProcessingState_Start
|| state == AAX_eProcessingState_StartPass
const auto nonRealtime = state == AAX_eProcessingState_StartPass
|| state == AAX_eProcessingState_BeginPassGroup;
pluginInstance->setNonRealtime (nonRealtime);
}
Expand Down
Expand Up @@ -103,7 +103,6 @@
class JuceAU : public AudioProcessorHolder,
public MusicDeviceBase,
public AudioProcessorListener,
public AudioPlayHead,
public AudioProcessorParameter::Listener
{
public:
Expand Down Expand Up @@ -140,7 +139,6 @@
totalInChannels = juceFilter->getTotalNumInputChannels();
totalOutChannels = juceFilter->getTotalNumOutputChannels();

juceFilter->setPlayHead (this);
juceFilter->addListener (this);

addParameters();
Expand Down Expand Up @@ -1089,80 +1087,103 @@ Float64 GetLatency() override
return rate > 0 ? juceFilter->getLatencySamples() / rate : 0;
}

//==============================================================================
bool getCurrentPosition (AudioPlayHead::CurrentPositionInfo& info) override
class ScopedPlayHead : private AudioPlayHead
{
info.timeSigNumerator = 0;
info.timeSigDenominator = 0;
info.editOriginTime = 0;
info.ppqPositionOfLastBarStart = 0;
info.isRecording = false;
public:
explicit ScopedPlayHead (JuceAU& juceAudioUnit)
: audioUnit (juceAudioUnit)
{
audioUnit.juceFilter->setPlayHead (this);
}

~ScopedPlayHead() override
{
audioUnit.juceFilter->setPlayHead (nullptr);
}

info.frameRate = [this]
private:
Optional<PositionInfo> getPosition() const override
{
switch (lastTimeStamp.mSMPTETime.mType)
PositionInfo info;

info.setFrameRate ([this]() -> Optional<FrameRate>
{
switch (audioUnit.lastTimeStamp.mSMPTETime.mType)
{
case kSMPTETimeType2398: return FrameRate().withBaseRate (24).withPullDown();
case kSMPTETimeType24: return FrameRate().withBaseRate (24);
case kSMPTETimeType25: return FrameRate().withBaseRate (25);
case kSMPTETimeType30Drop: return FrameRate().withBaseRate (30).withDrop();
case kSMPTETimeType30: return FrameRate().withBaseRate (30);
case kSMPTETimeType2997: return FrameRate().withBaseRate (30).withPullDown();
case kSMPTETimeType2997Drop: return FrameRate().withBaseRate (30).withPullDown().withDrop();
case kSMPTETimeType60: return FrameRate().withBaseRate (60);
case kSMPTETimeType60Drop: return FrameRate().withBaseRate (60).withDrop();
case kSMPTETimeType5994: return FrameRate().withBaseRate (60).withPullDown();
case kSMPTETimeType5994Drop: return FrameRate().withBaseRate (60).withPullDown().withDrop();
case kSMPTETimeType50: return FrameRate().withBaseRate (50);
default: break;
}

return {};
}());

double ppqPosition = 0.0;
double bpm = 0.0;

if (audioUnit.CallHostBeatAndTempo (&ppqPosition, &bpm) == noErr)
{
case kSMPTETimeType2398: return FrameRate().withBaseRate (24).withPullDown();
case kSMPTETimeType24: return FrameRate().withBaseRate (24);
case kSMPTETimeType25: return FrameRate().withBaseRate (25);
case kSMPTETimeType30Drop: return FrameRate().withBaseRate (30).withDrop();
case kSMPTETimeType30: return FrameRate().withBaseRate (30);
case kSMPTETimeType2997: return FrameRate().withBaseRate (30).withPullDown();
case kSMPTETimeType2997Drop: return FrameRate().withBaseRate (30).withPullDown().withDrop();
case kSMPTETimeType60: return FrameRate().withBaseRate (60);
case kSMPTETimeType60Drop: return FrameRate().withBaseRate (60).withDrop();
case kSMPTETimeType5994: return FrameRate().withBaseRate (60).withPullDown();
case kSMPTETimeType5994Drop: return FrameRate().withBaseRate (60).withPullDown().withDrop();
case kSMPTETimeType50: return FrameRate().withBaseRate (50);
default: break;
info.setPpqPosition (ppqPosition);
info.setBpm (bpm);
}

return FrameRate();
}();
UInt32 outDeltaSampleOffsetToNextBeat;
double outCurrentMeasureDownBeat;
float num;
UInt32 den;

if (CallHostBeatAndTempo (&info.ppqPosition, &info.bpm) != noErr)
{
info.ppqPosition = 0;
info.bpm = 0;
}
if (audioUnit.CallHostMusicalTimeLocation (&outDeltaSampleOffsetToNextBeat,
&num,
&den,
&outCurrentMeasureDownBeat) == noErr)
{
info.setTimeSignature (TimeSignature { (int) num, (int) den });
info.setPpqPositionOfLastBarStart (outCurrentMeasureDownBeat);
}

UInt32 outDeltaSampleOffsetToNextBeat;
double outCurrentMeasureDownBeat;
float num;
UInt32 den;
double outCurrentSampleInTimeLine = 0, outCycleStartBeat = 0, outCycleEndBeat = 0;
Boolean playing = false, looping = false, playchanged;

if (CallHostMusicalTimeLocation (&outDeltaSampleOffsetToNextBeat, &num, &den,
&outCurrentMeasureDownBeat) == noErr)
{
info.timeSigNumerator = (int) num;
info.timeSigDenominator = (int) den;
info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
}
if (audioUnit.CallHostTransportState (&playing,
&playchanged,
&outCurrentSampleInTimeLine,
&looping,
&outCycleStartBeat,
&outCycleEndBeat) == noErr)
{
info.setIsPlaying (playing);
info.setTimeInSamples ((int64) (outCurrentSampleInTimeLine + 0.5));
info.setTimeInSeconds (*info.getTimeInSamples() / audioUnit.getSampleRate());
info.setIsLooping (looping);
info.setLoopPoints (LoopPoints { outCycleStartBeat, outCycleEndBeat });
}
else
{
// If the host doesn't support this callback, then use the sample time from lastTimeStamp:
outCurrentSampleInTimeLine = audioUnit.lastTimeStamp.mSampleTime;
}

double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
Boolean playing = false, looping = false, playchanged;
info.setHostTimeNs ((audioUnit.lastTimeStamp.mFlags & kAudioTimeStampHostTimeValid) != 0
? makeOptional (audioUnit.timeConversions.hostTimeToNanos (audioUnit.lastTimeStamp.mHostTime))
: nullopt);

if (CallHostTransportState (&playing,
&playchanged,
&outCurrentSampleInTimeLine,
&looping,
&outCycleStartBeat,
&outCycleEndBeat) != noErr)
{
// If the host doesn't support this callback, then use the sample time from lastTimeStamp:
outCurrentSampleInTimeLine = lastTimeStamp.mSampleTime;
return info;
}

info.isPlaying = playing;
info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
info.timeInSeconds = info.timeInSamples / getSampleRate();
info.isLooping = looping;
info.ppqLoopStart = outCycleStartBeat;
info.ppqLoopEnd = outCycleEndBeat;

return true;
}
JuceAU& audioUnit;
};

//==============================================================================
void sendAUEvent (const AudioUnitEventType type, const int juceParamIndex)
{
if (restoringState)
Expand Down Expand Up @@ -1306,22 +1327,6 @@ ComponentResult Render (AudioUnitRenderActionFlags& ioActionFlags,
{
lastTimeStamp = inTimeStamp;

jassert (! juceFilter->getHostTimeNs());

if ((inTimeStamp.mFlags & kAudioTimeStampHostTimeValid) != 0)
{
const auto timestamp = timeConversions.hostTimeToNanos (inTimeStamp.mHostTime);
juceFilter->setHostTimeNanos (&timestamp);
}

struct AtEndOfScope
{
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); }
AudioProcessor& proc;
};

const AtEndOfScope scope { *juceFilter };

// prepare buffers
{
pullInputAudio (ioActionFlags, inTimeStamp, nFrames);
Expand Down Expand Up @@ -1952,6 +1957,7 @@ void prepareOutputBuffers (const UInt32 nFrames) noexcept
void processBlock (juce::AudioBuffer<float>& buffer, MidiBuffer& midiBuffer) noexcept
{
const ScopedLock sl (juceFilter->getCallbackLock());
const ScopedPlayHead playhead { *this };

if (juceFilter->isSuspended())
{
Expand Down
Expand Up @@ -519,6 +519,7 @@ void reset() override
{
midiMessages.clear();
lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
lastTimeStamp.mFlags = 0;
}

//==============================================================================
Expand Down Expand Up @@ -852,7 +853,6 @@ bool allocateRenderResourcesAndReturnError (NSError **outError) override
midiMessages.ensureSize (2048);
midiMessages.clear();

zeromem (&lastAudioHead, sizeof (lastAudioHead));
hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
hostTransportStateCallback = [getAudioUnit() transportStateBlock];

Expand Down Expand Up @@ -1004,16 +1004,13 @@ void audioProcessorParameterChangeGestureEnd (AudioProcessor*, int idx) override
}

//==============================================================================
bool getCurrentPosition (CurrentPositionInfo& info) override
Optional<PositionInfo> getPosition() const override
{
bool musicContextCallSucceeded = false;
bool transportStateCallSucceeded = false;
PositionInfo info;
info.setTimeInSamples ((int64) (lastTimeStamp.mSampleTime + 0.5));
info.setTimeInSeconds (*info.getTimeInSamples() / getAudioProcessor().getSampleRate());

info = lastAudioHead;
info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();

info.frameRate = [this]
info.setFrameRate ([this]
{
switch (lastTimeStamp.mSMPTETime.mType)
{
Expand All @@ -1033,7 +1030,7 @@ bool getCurrentPosition (CurrentPositionInfo& info) override
}

return FrameRate();
}();
}());

double num;
NSInteger den;
Expand All @@ -1047,17 +1044,14 @@ bool getCurrentPosition (CurrentPositionInfo& info) override

if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
{
musicContextCallSucceeded = true;

info.timeSigNumerator = (int) num;
info.timeSigDenominator = (int) den;
info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
info.bpm = bpm;
info.ppqPosition = ppqPosition;
info.setTimeSignature (TimeSignature { (int) num, (int) den });
info.setPpqPositionOfLastBarStart (outCurrentMeasureDownBeat);
info.setBpm (bpm);
info.setPpqPosition (ppqPosition);
}
}

double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
double outCurrentSampleInTimeLine = 0, outCycleStartBeat = 0, outCycleEndBeat = 0;
AUHostTransportStateFlags flags;

if (hostTransportStateCallback != nullptr)
Expand All @@ -1066,22 +1060,19 @@ bool getCurrentPosition (CurrentPositionInfo& info) override

if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
{
transportStateCallSucceeded = true;

info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
info.isPlaying = ((flags & AUHostTransportStateMoving) != 0);
info.isLooping = ((flags & AUHostTransportStateCycling) != 0);
info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
info.ppqLoopStart = outCycleStartBeat;
info.ppqLoopEnd = outCycleEndBeat;
info.setTimeInSamples ((int64) (outCurrentSampleInTimeLine + 0.5));
info.setTimeInSeconds (*info.getTimeInSamples() / getAudioProcessor().getSampleRate());
info.setIsPlaying ((flags & AUHostTransportStateMoving) != 0);
info.setIsLooping ((flags & AUHostTransportStateCycling) != 0);
info.setIsRecording ((flags & AUHostTransportStateRecording) != 0);
info.setLoopPoints (LoopPoints { outCycleStartBeat, outCycleEndBeat });
}
}

if (musicContextCallSucceeded && transportStateCallSucceeded)
lastAudioHead = info;
if ((lastTimeStamp.mFlags & kAudioTimeStampHostTimeValid) != 0)
info.setHostTimeNs (timeConversions.hostTimeToNanos (lastTimeStamp.mHostTime));

return true;
return info;
}

//==============================================================================
Expand Down Expand Up @@ -1553,23 +1544,6 @@ AUAudioUnitStatus renderCallback (AudioUnitRenderActionFlags* actionFlags, const

const auto numProcessorBusesOut = AudioUnitHelpers::getBusCount (processor, false);

if (timestamp != nullptr)
{
if ((timestamp->mFlags & kAudioTimeStampHostTimeValid) != 0)
{
const auto convertedTime = timeConversions.hostTimeToNanos (timestamp->mHostTime);
getAudioProcessor().setHostTimeNanos (&convertedTime);
}
}

struct AtEndOfScope
{
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); }
AudioProcessor& proc;
};

const AtEndOfScope scope { getAudioProcessor() };

if (lastTimeStamp.mSampleTime != timestamp->mSampleTime)
{
// process params and incoming midi (only once for a given timestamp)
Expand Down Expand Up @@ -1854,7 +1828,6 @@ AUParameterAddress generateAUParameterAddress (AudioProcessorParameter* param) c
ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;

AudioTimeStamp lastTimeStamp;
CurrentPositionInfo lastAudioHead;

String contextName;

Expand Down
Expand Up @@ -118,9 +118,18 @@ class ParameterStorage : private AudioProcessorListener
processor.removeListener (this);
}

static String getUri (const AudioProcessorParameter& param)
/* This is the string that will be used to uniquely identify the parameter.
This string will be written into the plugin's manifest as an IRI, so it must be
syntactically valid.
We escape this string rather than writing the user-defined parameter ID directly to avoid
writing a malformed manifest in the case that user IDs contain spaces or other reserved
characters. This should allow users to keep the same param IDs for all plugin formats.
*/
static String getIri (const AudioProcessorParameter& param)
{
return LegacyAudioParameter::getParamID (&param, false);
return URL::addEscapeChars (LegacyAudioParameter::getParamID (&param, false), true);
}

void setValueFromHost (LV2_URID urid, float value) noexcept
Expand Down Expand Up @@ -208,7 +217,7 @@ class ParameterStorage : private AudioProcessorListener
{
jassert ((size_t) param->getParameterIndex() == result.size());

const auto uri = JucePlugin_LV2URI + String (uriSeparator) + getUri (*param);
const auto uri = JucePlugin_LV2URI + String (uriSeparator) + getIri (*param);
const auto urid = mapFeature.map (mapFeature.handle, uri.toRawUTF8());
result.push_back (urid);
}
Expand Down Expand Up @@ -263,15 +272,9 @@ class PlayHead : public AudioPlayHead
PlayHead (LV2_URID_Map mapFeatureIn, double sampleRateIn)
: parser (mapFeatureIn), sampleRate (sampleRateIn)
{
info.frameRate = fpsUnknown;
info.isLooping = false;
info.isRecording = false;
info.ppqLoopEnd = 0;
info.ppqLoopStart = 0;
info.ppqPositionOfLastBarStart = 0;
}

void invalidate() { valid = false; }
void invalidate() { info = nullopt; }

void readNewInfo (const LV2_Atom_Event* event)
{
Expand All @@ -285,13 +288,15 @@ class PlayHead : public AudioPlayHead

const LV2_Atom* atomFrame = nullptr;
const LV2_Atom* atomSpeed = nullptr;
const LV2_Atom* atomBar = nullptr;
const LV2_Atom* atomBeat = nullptr;
const LV2_Atom* atomBeatUnit = nullptr;
const LV2_Atom* atomBeatsPerBar = nullptr;
const LV2_Atom* atomBeatsPerMinute = nullptr;

LV2_Atom_Object_Query query[] { { mLV2_TIME__frame, &atomFrame },
{ mLV2_TIME__speed, &atomSpeed },
{ mLV2_TIME__bar, &atomBar },
{ mLV2_TIME__beat, &atomBeat },
{ mLV2_TIME__beatUnit, &atomBeatUnit },
{ mLV2_TIME__beatsPerBar, &atomBeatsPerBar },
Expand All @@ -300,37 +305,38 @@ class PlayHead : public AudioPlayHead

lv2_atom_object_query (object, query);

const auto setTimeInFrames = [&] (int64_t value)
{
info.timeInSamples = value;
info.timeInSeconds = (double) info.timeInSamples / sampleRate;
};
info.emplace();

// Carla always seems to give us an integral 'beat' even though I'd expect
// it to be a floating-point value

if ( lv2_shared::withValue (parser.parseNumericAtom<float> (atomBeatsPerMinute), [&] (float value) { info.bpm = value; })
&& lv2_shared::withValue (parser.parseNumericAtom<float> (atomBeatsPerBar), [&] (float value) { info.timeSigNumerator = (int) value; })
&& lv2_shared::withValue (parser.parseNumericAtom<int32_t> (atomBeatUnit), [&] (int32_t value) { info.timeSigDenominator = value; })
&& lv2_shared::withValue (parser.parseNumericAtom<double> (atomBeat), [&] (double value) { info.ppqPosition = value; })
&& lv2_shared::withValue (parser.parseNumericAtom<float> (atomSpeed), [&] (float value) { info.isPlaying = value != 0.0f; })
&& lv2_shared::withValue (parser.parseNumericAtom<int64_t> (atomFrame), setTimeInFrames))
const auto numerator = parser.parseNumericAtom<float> (atomBeatsPerBar);
const auto denominator = parser.parseNumericAtom<int32_t> (atomBeatUnit);

if (numerator.hasValue() && denominator.hasValue())
info->setTimeSignature (TimeSignature { (int) *numerator, (int) *denominator });

info->setBpm (parser.parseNumericAtom<float> (atomBeatsPerMinute));
info->setPpqPosition (parser.parseNumericAtom<double> (atomBeat));
info->setIsPlaying (parser.parseNumericAtom<float> (atomSpeed).orFallback (0.0f) != 0.0f);
info->setBarCount (parser.parseNumericAtom<int64_t> (atomBar));

if (const auto parsed = parser.parseNumericAtom<int64_t> (atomFrame))
{
valid = true;
info->setTimeInSamples (*parsed);
info->setTimeInSeconds ((double) *parsed / sampleRate);
}
}

bool getCurrentPosition (CurrentPositionInfo& result) override
Optional<PositionInfo> getPosition() const override
{
result = info;
return valid;
return info;
}

private:
lv2_shared::NumericAtomParser parser;
CurrentPositionInfo info;
Optional<PositionInfo> info;
double sampleRate;
bool valid = false;

#define X(str) const LV2_URID m##str = parser.map (str);
X (LV2_ATOM__Blank)
Expand All @@ -342,6 +348,7 @@ class PlayHead : public AudioPlayHead
X (LV2_TIME__beatsPerMinute)
X (LV2_TIME__frame)
X (LV2_TIME__speed)
X (LV2_TIME__bar)
#undef X

JUCE_LEAK_DETECTOR (PlayHead)
Expand Down Expand Up @@ -994,7 +1001,7 @@ struct RecallFeature
const auto parameterVisitor = [&] (const String& symbol,
const AudioProcessorParameter& param)
{
os << "plug:" << ParameterStorage::getUri (param) << "\n"
os << "plug:" << ParameterStorage::getIri (param) << "\n"
"\ta lv2:Parameter ;\n"
"\trdfs:label \"" << param.getName (1024) << "\" ;\n";

Expand Down Expand Up @@ -1154,7 +1161,7 @@ struct RecallFeature

for (const auto* param : legacyParameters)
{
os << (isFirst ? "" : " ,") << "\n\t\tplug:" << ParameterStorage::getUri (*param);
os << (isFirst ? "" : " ,") << "\n\t\tplug:" << ParameterStorage::getIri (*param);
isFirst = false;
}

Expand Down
Expand Up @@ -25,7 +25,6 @@

#include <juce_core/system/juce_CompilerWarnings.h>
#include <juce_core/system/juce_TargetPlatform.h>
#include <juce_core/containers/juce_Optional.h>
#include "../utility/juce_CheckSettingMacros.h"

#if JucePlugin_Build_VST
Expand Down Expand Up @@ -388,14 +387,6 @@ class JuceVSTWrapper : public AudioProcessorListener,
{
updateCallbackContextInfo();

struct AtEndOfScope
{
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); }
AudioProcessor& proc;
};

const AtEndOfScope scope { *processor };

int i;
for (i = 0; i < numOut; ++i)
{
Expand Down Expand Up @@ -618,30 +609,19 @@ class JuceVSTWrapper : public AudioProcessorListener,
}

auto& info = currentPosition.emplace();
info.bpm = (ti->flags & Vst2::vstTimingInfoFlagTempoValid) != 0 ? ti->tempoBPM : 0.0;
info.setBpm ((ti->flags & Vst2::vstTimingInfoFlagTempoValid) != 0 ? makeOptional (ti->tempoBPM) : nullopt);

if ((ti->flags & Vst2::vstTimingInfoFlagTimeSignatureValid) != 0)
{
info.timeSigNumerator = ti->timeSignatureNumerator;
info.timeSigDenominator = ti->timeSignatureDenominator;
}
else
{
info.timeSigNumerator = 4;
info.timeSigDenominator = 4;
}
info.setTimeSignature ((ti->flags & Vst2::vstTimingInfoFlagTimeSignatureValid) != 0 ? makeOptional (TimeSignature { ti->timeSignatureNumerator, ti->timeSignatureDenominator })
: nullopt);

info.timeInSamples = (int64) (ti->samplePosition + 0.5);
info.timeInSeconds = ti->samplePosition / ti->sampleRate;
info.ppqPosition = (ti->flags & Vst2::vstTimingInfoFlagMusicalPositionValid) != 0 ? ti->musicalPosition : 0.0;
info.ppqPositionOfLastBarStart = (ti->flags & Vst2::vstTimingInfoFlagLastBarPositionValid) != 0 ? ti->lastBarPosition : 0.0;
info.setTimeInSamples ((int64) (ti->samplePosition + 0.5));
info.setTimeInSeconds (ti->samplePosition / ti->sampleRate);
info.setPpqPosition ((ti->flags & Vst2::vstTimingInfoFlagMusicalPositionValid) != 0 ? makeOptional (ti->musicalPosition) : nullopt);
info.setPpqPositionOfLastBarStart ((ti->flags & Vst2::vstTimingInfoFlagLastBarPositionValid) != 0 ? makeOptional (ti->lastBarPosition) : nullopt);

std::tie (info.frameRate, info.editOriginTime) = [ti]
if ((ti->flags & Vst2::vstTimingInfoFlagSmpteValid) != 0)
{
if ((ti->flags & Vst2::vstTimingInfoFlagSmpteValid) == 0)
return std::make_tuple (FrameRate(), 0.0);

const auto rate = [&]
info.setFrameRate ([&]() -> Optional<FrameRate>
{
switch (ti->smpteRate)
{
Expand All @@ -663,43 +643,27 @@ class JuceVSTWrapper : public AudioProcessorListener,
case Vst2::vstSmpteRate35mmFilm: return FrameRate().withBaseRate (24);
}

return FrameRate();
}();
return nullopt;
}());

const auto effectiveRate = rate.getEffectiveRate();
return std::make_tuple (rate, effectiveRate != 0.0 ? ti->smpteOffset / (80.0 * effectiveRate) : 0.0);
}();
const auto effectiveRate = info.getFrameRate().hasValue() ? info.getFrameRate()->getEffectiveRate() : 0.0;
info.setEditOriginTime (effectiveRate != 0.0 ? makeOptional (ti->smpteOffset / (80.0 * effectiveRate)) : nullopt);
}

info.isRecording = (ti->flags & Vst2::vstTimingInfoFlagCurrentlyRecording) != 0;
info.isPlaying = (ti->flags & (Vst2::vstTimingInfoFlagCurrentlyRecording | Vst2::vstTimingInfoFlagCurrentlyPlaying)) != 0;
info.isLooping = (ti->flags & Vst2::vstTimingInfoFlagLoopActive) != 0;
info.setIsRecording ((ti->flags & Vst2::vstTimingInfoFlagCurrentlyRecording) != 0);
info.setIsPlaying ((ti->flags & (Vst2::vstTimingInfoFlagCurrentlyRecording | Vst2::vstTimingInfoFlagCurrentlyPlaying)) != 0);
info.setIsLooping ((ti->flags & Vst2::vstTimingInfoFlagLoopActive) != 0);

if ((ti->flags & Vst2::vstTimingInfoFlagLoopPositionValid) != 0)
{
info.ppqLoopStart = ti->loopStartPosition;
info.ppqLoopEnd = ti->loopEndPosition;
}
else
{
info.ppqLoopStart = 0;
info.ppqLoopEnd = 0;
}
info.setLoopPoints ((ti->flags & Vst2::vstTimingInfoFlagLoopPositionValid) != 0 ? makeOptional (LoopPoints { ti->loopStartPosition, ti->loopEndPosition })
: nullopt);

if ((ti->flags & Vst2::vstTimingInfoFlagNanosecondsValid) != 0)
{
const auto nanos = (uint64_t) ti->systemTimeNanoseconds;
processor->setHostTimeNanos (&nanos);
}
info.setHostTimeNs ((ti->flags & Vst2::vstTimingInfoFlagNanosecondsValid) != 0 ? makeOptional ((uint64_t) ti->systemTimeNanoseconds) : nullopt);
}

//==============================================================================
bool getCurrentPosition (AudioPlayHead::CurrentPositionInfo& info) override
Optional<PositionInfo> getPosition() const override
{
if (! currentPosition.hasValue())
return false;

info = *currentPosition;
return true;
return currentPosition;
}

//==============================================================================
Expand Down Expand Up @@ -2139,7 +2103,7 @@ class JuceVSTWrapper : public AudioProcessorListener,
Vst2::VstEditorBounds editorRect;
MidiBuffer midiEvents;
VSTMidiEventList outgoingEvents;
Optional<CurrentPositionInfo> currentPosition;
Optional<PositionInfo> currentPosition;

LegacyAudioParametersWrapper juceParameters;

Expand Down
Expand Up @@ -2867,34 +2867,50 @@ class JuceVST3Component : public Vst::IComponent,
Steinberg::int32 channel, Vst::UnitID& unitId) override { return comPluginInstance->getUnitByBus (type, dir, busIndex, channel, unitId); }

//==============================================================================
bool getCurrentPosition (CurrentPositionInfo& info) override
Optional<PositionInfo> getPosition() const override
{
info.timeInSamples = jmax ((juce::int64) 0, processContext.projectTimeSamples);
info.timeInSeconds = static_cast<double> (info.timeInSamples) / processContext.sampleRate;
info.bpm = jmax (1.0, processContext.tempo);
info.timeSigNumerator = jmax (1, (int) processContext.timeSigNumerator);
info.timeSigDenominator = jmax (1, (int) processContext.timeSigDenominator);
info.ppqPositionOfLastBarStart = processContext.barPositionMusic;
info.ppqPosition = processContext.projectTimeMusic;
info.ppqLoopStart = processContext.cycleStartMusic;
info.ppqLoopEnd = processContext.cycleEndMusic;
info.isRecording = (processContext.state & Vst::ProcessContext::kRecording) != 0;
info.isPlaying = (processContext.state & Vst::ProcessContext::kPlaying) != 0;
info.isLooping = (processContext.state & Vst::ProcessContext::kCycleActive) != 0;
PositionInfo info;
info.setTimeInSamples (jmax ((juce::int64) 0, processContext.projectTimeSamples));
info.setTimeInSeconds (static_cast<double> (*info.getTimeInSamples()) / processContext.sampleRate);
info.setIsRecording ((processContext.state & Vst::ProcessContext::kRecording) != 0);
info.setIsPlaying ((processContext.state & Vst::ProcessContext::kPlaying) != 0);
info.setIsLooping ((processContext.state & Vst::ProcessContext::kCycleActive) != 0);

info.frameRate = [&]
{
if ((processContext.state & Vst::ProcessContext::kSmpteValid) == 0)
return FrameRate();
info.setBpm ((processContext.state & Vst::ProcessContext::kTempoValid) != 0
? makeOptional (processContext.tempo)
: nullopt);

return FrameRate().withBaseRate ((int) processContext.frameRate.framesPerSecond)
.withDrop ((processContext.frameRate.flags & Vst::FrameRate::kDropRate) != 0)
.withPullDown ((processContext.frameRate.flags & Vst::FrameRate::kPullDownRate) != 0);
}();
info.setTimeSignature ((processContext.state & Vst::ProcessContext::kTimeSigValid) != 0
? makeOptional (TimeSignature { processContext.timeSigNumerator, processContext.timeSigDenominator })
: nullopt);

info.editOriginTime = (double) processContext.smpteOffsetSubframes / (80.0 * info.frameRate.getEffectiveRate());
info.setLoopPoints ((processContext.state & Vst::ProcessContext::kCycleValid) != 0
? makeOptional (LoopPoints { processContext.cycleStartMusic, processContext.cycleEndMusic })
: nullopt);

return true;
info.setPpqPosition ((processContext.state & Vst::ProcessContext::kProjectTimeMusicValid) != 0
? makeOptional (processContext.projectTimeMusic)
: nullopt);

info.setPpqPositionOfLastBarStart ((processContext.state & Vst::ProcessContext::kBarPositionValid) != 0
? makeOptional (processContext.barPositionMusic)
: nullopt);

info.setFrameRate ((processContext.state & Vst::ProcessContext::kSmpteValid) != 0
? makeOptional (FrameRate().withBaseRate ((int) processContext.frameRate.framesPerSecond)
.withDrop ((processContext.frameRate.flags & Vst::FrameRate::kDropRate) != 0)
.withPullDown ((processContext.frameRate.flags & Vst::FrameRate::kPullDownRate) != 0))
: nullopt);

info.setEditOriginTime (info.getFrameRate().hasValue()
? makeOptional ((double) processContext.smpteOffsetSubframes / (80.0 * info.getFrameRate()->getEffectiveRate()))
: nullopt);

info.setHostTimeNs ((processContext.state & Vst::ProcessContext::kSystemTimeValid) != 0
? makeOptional ((uint64_t) processContext.systemTime)
: nullopt);

return info;
}

//==============================================================================
Expand Down Expand Up @@ -3066,33 +3082,82 @@ class JuceVST3Component : public Vst::IComponent,

if (type == Vst::kAudio)
{
if (index < 0 || index >= getNumAudioBuses (dir == Vst::kInput))
const auto numInputBuses = getNumAudioBuses (true);
const auto numOutputBuses = getNumAudioBuses (false);

if (! isPositiveAndBelow (index, dir == Vst::kInput ? numInputBuses : numOutputBuses))
return kResultFalse;

// Some hosts (old cakewalk, bitwig studio) might call this function without
// deactivating the plugin, so we update the channel mapping here.
if (dir == Vst::BusDirections::kInput)
bufferMapper.setInputBusActive ((size_t) index, state != 0);
// The host is allowed to enable/disable buses as it sees fit, so the plugin needs to be
// able to handle any set of enabled/disabled buses, including layouts for which
// AudioProcessor::isBusesLayoutSupported would return false.
// Our strategy is to keep track of the layout that the host last requested, and to
// attempt to apply that layout directly.
// If the layout isn't supported by the processor, we'll try enabling all the buses
// instead.
// If the host enables a bus that the processor refused to enable, then we'll ignore
// that bus (and return silence for output buses). If the host disables a bus that the
// processor refuses to disable, the wrapper will provide the processor with silence for
// input buses, and ignore the contents of output buses.
// Note that some hosts (old bitwig and cakewalk) may incorrectly call this function
// when the plugin is in an activated state.
if (dir == Vst::kInput)
bufferMapper.setInputBusHostActive ((size_t) index, state != 0);
else
bufferMapper.setOutputBusActive ((size_t) index, state != 0);
bufferMapper.setOutputBusHostActive ((size_t) index, state != 0);

if (auto* bus = pluginInstance->getBus (dir == Vst::kInput, index))
AudioProcessor::BusesLayout desiredLayout;

for (auto i = 0; i < numInputBuses; ++i)
desiredLayout.inputBuses.add (bufferMapper.getRequestedLayoutForInputBus ((size_t) i));

for (auto i = 0; i < numOutputBuses; ++i)
desiredLayout.outputBuses.add (bufferMapper.getRequestedLayoutForOutputBus ((size_t) i));

const auto prev = pluginInstance->getBusesLayout();

const auto busesLayoutSupported = [&]
{
#ifdef JucePlugin_PreferredChannelConfigurations
auto newLayout = pluginInstance->getBusesLayout();
auto targetLayout = (state != 0 ? bus->getLastEnabledLayout() : AudioChannelSet::disabled());
struct ChannelPair
{
short ins, outs;

(dir == Vst::kInput ? newLayout.inputBuses : newLayout.outputBuses).getReference (index) = targetLayout;
auto tie() const { return std::tie (ins, outs); }
bool operator== (ChannelPair x) const { return tie() == x.tie(); }
};

short configs[][2] = { JucePlugin_PreferredChannelConfigurations };
auto compLayout = pluginInstance->getNextBestLayoutInLayoutList (newLayout, configs);
const auto countChannels = [] (auto& range)
{
return std::accumulate (range.begin(), range.end(), 0, [] (auto acc, auto set)
{
return acc + set.size();
});
};

if ((dir == Vst::kInput ? compLayout.inputBuses : compLayout.outputBuses).getReference (index) != targetLayout)
return kResultFalse;
const auto toShort = [] (int x)
{
jassert (0 <= x && x <= std::numeric_limits<short>::max());
return (short) x;
};

const ChannelPair requested { toShort (countChannels (desiredLayout.inputBuses)),
toShort (countChannels (desiredLayout.outputBuses)) };
const ChannelPair configs[] = { JucePlugin_PreferredChannelConfigurations };
return std::find (std::begin (configs), std::end (configs), requested) != std::end (configs);
#else
return pluginInstance->checkBusesLayoutSupported (desiredLayout);
#endif
}();

return bus->enable (state != 0) ? kResultTrue : kResultFalse;
}
if (busesLayoutSupported)
pluginInstance->setBusesLayout (desiredLayout);
else
pluginInstance->enableAllBuses();

bufferMapper.updateActiveClientBuses (pluginInstance->getBusesLayout());

return kResultTrue;
}

return kResultFalse;
Expand Down Expand Up @@ -3152,7 +3217,11 @@ class JuceVST3Component : public Vst::IComponent,
return kResultFalse;
#endif

return pluginInstance->setBusesLayoutWithoutEnabling (requested) ? kResultTrue : kResultFalse;
if (! pluginInstance->setBusesLayoutWithoutEnabling (requested))
return kResultFalse;

bufferMapper.updateFromProcessor (*pluginInstance);
return kResultTrue;
}

tresult PLUGIN_API getBusArrangement (Vst::BusDirection dir, Steinberg::int32 index, Vst::SpeakerArrangement& arr) override
Expand Down Expand Up @@ -3286,12 +3355,6 @@ class JuceVST3Component : public Vst::IComponent,
{
processContext = *data.processContext;

if ((processContext.state & Vst::ProcessContext::kSystemTimeValid) != 0)
{
const auto timestamp = (uint64_t) processContext.systemTime;
getPluginInstance().setHostTimeNanos (&timestamp);
}

if (juceVST3EditController != nullptr)
juceVST3EditController->vst3IsPlaying = (processContext.state & Vst::ProcessContext::kPlaying) != 0;
}
Expand All @@ -3303,14 +3366,6 @@ class JuceVST3Component : public Vst::IComponent,
juceVST3EditController->vst3IsPlaying = false;
}

struct AtEndOfScope
{
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); }
AudioProcessor& proc;
};

const AtEndOfScope scope { getPluginInstance() };

midiBuffer.clear();

if (data.inputParameterChanges != nullptr)
Expand Down Expand Up @@ -3412,7 +3467,9 @@ class JuceVST3Component : public Vst::IComponent,
template <typename FloatType>
void processAudio (Vst::ProcessData& data)
{
auto buffer = bufferMapper.getJuceLayoutForVst3Buffer (detail::Tag<FloatType>{}, data);
ClientRemappedBuffer<FloatType> remappedBuffer { bufferMapper, data };
auto& buffer = remappedBuffer.buffer;

jassert ((int) buffer.getNumChannels() == jmax (pluginInstance->getTotalNumInputChannels(),
pluginInstance->getTotalNumOutputChannels()));

Expand Down Expand Up @@ -3487,7 +3544,8 @@ class JuceVST3Component : public Vst::IComponent,
midiBuffer.ensureSize (2048);
midiBuffer.clear();

bufferMapper.prepare (p, bufferSize);
bufferMapper.updateFromProcessor (p);
bufferMapper.prepare (bufferSize);
}

//==============================================================================
Expand Down Expand Up @@ -3593,7 +3651,7 @@ DEF_CLASS_IID (JuceAudioProcessor)
// Defined in PluginUtilities.cpp
void getUUIDForVST2ID (bool, uint8[16]);

FUID getFUIDForVST2ID (bool forControllerUID)
static FUID getFUIDForVST2ID (bool forControllerUID)
{
TUID uuid;
getUUIDForVST2ID (forControllerUID, (uint8*) uuid);
Expand Down
Expand Up @@ -35,7 +35,7 @@
ID: juce_audio_plugin_client
vendor: juce
version: 6.1.6
version: 7.0.1
name: JUCE audio plugin wrapper classes
description: Classes for building VST, VST3, AU, AUv3 and AAX plugins.
website: http://www.juce.com/juce
Expand Down
Expand Up @@ -47,6 +47,7 @@ namespace juce
#define VST3_REPLACEMENT_AVAILABLE 1

// NB: Nasty old-fashioned code in here because it's copied from the Steinberg example code.
void getUUIDForVST2ID (bool forControllerUID, uint8 uuid[16]);
void getUUIDForVST2ID (bool forControllerUID, uint8 uuid[16])
{
#if JUCE_MSVC
Expand Down
Expand Up @@ -52,7 +52,7 @@
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
Expand All @@ -79,6 +79,8 @@
#pragma once
#ifndef DOXYGEN
#include <vector>
namespace juce
Expand Down Expand Up @@ -109,7 +111,8 @@
{}
}};
}}
"""
#endif"""


def chunks(lst, n):
Expand Down Expand Up @@ -149,5 +152,6 @@ def filter_ttl_files(lv2_dir):

print(FUNCTION_TEMPLATE.format(", ".join(generate_bundle_source(root, files)
for root, files in filter_ttl_files(args.lv2_dir)
if len(files) != 0)),
if len(files) != 0))
.replace("\t", " "),
end = "\r\n")
Expand Up @@ -102,17 +102,26 @@ struct ConversionFunctions
};

//==============================================================================
/** This class is used by the various ARA model object helper classes, such as MusicalContext,
AudioSource etc. It helps with deregistering the model objects from the DocumentController
when the lifetime of the helper class object ends.
You shouldn't use this class directly but instead inherit from the helper classes.
*/
template <typename Base, typename PtrIn>
class ManagedARAHandle
{
public:
using Ptr = PtrIn;

/** Constructor. */
ManagedARAHandle (ARA::Host::DocumentController& dc, Ptr ptr) noexcept
: handle (ptr, Deleter { dc }) {}

/** Returns the host side DocumentController reference. */
auto& getDocumentController() const { return handle.get_deleter().documentController; }

/** Returns the plugin side reference to the model object. */
Ptr getPluginRef() const { return handle.get(); }

private:
Expand Down Expand Up @@ -268,12 +277,34 @@ class AudioModification : public ManagedARAHandle<AudioModification, ARA::ARAAud
AudioSource& source;
};

/** This class is used internally by PlaybackRegionRegistry to be notified when a PlaybackRegion
object is deleted.
*/
struct DeletionListener
{
/** Destructor. */
virtual ~DeletionListener() = default;

/** Removes another DeletionListener object from this DeletionListener. */
virtual void removeListener (DeletionListener& other) noexcept = 0;
};

/** Helper class for the host side implementation of the %ARA %PlaybackRegion model object.
Its intended use is to add a member variable of this type to your host side %PlaybackRegion
implementation. Then it provides a RAII approach to managing the lifetime of the corresponding
objects created inside the DocumentController. When the host side object is instantiated an ARA
model object is also created in the DocumentController. When the host side object is deleted it
will be removed from the DocumentController as well.
The class will automatically put the DocumentController into editable state for operations that
mandate this e.g. creation, deletion or updating.
You can encapsulate multiple such operations into a scope with an ARAEditGuard in order to invoke
the editable state of the DocumentController only once.
@tags{ARA}
*/
struct PlaybackRegion
{
public:
Expand Down
Expand Up @@ -23,6 +23,8 @@
==============================================================================
*/

#ifndef DOXYGEN

// This macro can be set if you need to override this internal name for some reason..
#ifndef JUCE_STATE_DICTIONARY_KEY
#define JUCE_STATE_DICTIONARY_KEY "jucePluginState"
Expand All @@ -31,8 +33,6 @@
namespace juce
{

#ifndef DOXYGEN

struct AudioUnitHelpers
{
class ChannelRemapper
Expand Down Expand Up @@ -560,6 +560,6 @@ struct AudioUnitHelpers
}
};

#endif

} // namespace juce

#endif
Expand Up @@ -1385,7 +1385,10 @@ void resetBuses()

void processAudio (AudioBuffer<float>& buffer, MidiBuffer& midiMessages, bool processBlockBypassedCalled)
{
if (const auto* hostTimeNs = getHostTimeNs())
auto* playhead = getPlayHead();
const auto position = playhead != nullptr ? playhead->getPosition() : nullopt;

if (const auto hostTimeNs = position.hasValue() ? position->getHostTimeNs() : nullopt)
{
timeStamp.mHostTime = *hostTimeNs;
timeStamp.mFlags |= kAudioTimeStampHostTimeValid;
Expand Down Expand Up @@ -2298,12 +2301,10 @@ OSStatus getBeatAndTempo (Float64* outCurrentBeat, Float64* outCurrentTempo) con
{
if (auto* ph = getPlayHead())
{
AudioPlayHead::CurrentPositionInfo result;

if (ph->getCurrentPosition (result))
if (const auto pos = ph->getPosition())
{
setIfNotNull (outCurrentBeat, result.ppqPosition);
setIfNotNull (outCurrentTempo, result.bpm);
setIfNotNull (outCurrentBeat, pos->getPpqPosition().orFallback (0.0));
setIfNotNull (outCurrentTempo, pos->getBpm().orFallback (0.0));
return noErr;
}
}
Expand All @@ -2318,14 +2319,13 @@ OSStatus getMusicalTimeLocation (UInt32* outDeltaSampleOffsetToNextBeat, Float32
{
if (auto* ph = getPlayHead())
{
AudioPlayHead::CurrentPositionInfo result;

if (ph->getCurrentPosition (result))
if (const auto pos = ph->getPosition())
{
const auto signature = pos->getTimeSignature().orFallback (AudioPlayHead::TimeSignature{});
setIfNotNull (outDeltaSampleOffsetToNextBeat, (UInt32) 0); //xxx
setIfNotNull (outTimeSig_Numerator, (UInt32) result.timeSigNumerator);
setIfNotNull (outTimeSig_Denominator, (UInt32) result.timeSigDenominator);
setIfNotNull (outCurrentMeasureDownBeat, result.ppqPositionOfLastBarStart); //xxx wrong
setIfNotNull (outTimeSig_Numerator, (UInt32) signature.numerator);
setIfNotNull (outTimeSig_Denominator, (UInt32) signature.denominator);
setIfNotNull (outCurrentMeasureDownBeat, pos->getPpqPositionOfLastBarStart().orFallback (0.0)); //xxx wrong
return noErr;
}
}
Expand Down
Expand Up @@ -25,8 +25,9 @@

#pragma once

#ifndef DOXYGEN

#include "juce_lv2_config.h"
#include "juce_core/containers/juce_Optional.h"

#ifdef Bool
#undef Bool // previously defined in X11/Xlib.h
Expand Down Expand Up @@ -135,16 +136,6 @@ struct ObjectTraits { static constexpr auto construct = lv2_atom_forge_object;
using SequenceFrame = ScopedFrame<SequenceTraits>;
using ObjectFrame = ScopedFrame<ObjectTraits>;

template <typename Value, typename Callback>
bool withValue (const Optional<Value>& opt, Callback&& callback)
{
if (! opt.hasValue())
return false;

callback (*opt);
return true;
}

struct NumericAtomParser
{
explicit NumericAtomParser (LV2_URID_Map mapFeatureIn)
Expand Down Expand Up @@ -626,3 +617,5 @@ static inline std::vector<ParsedGroup> findStableBusOrder (const String& mainGro

}
}

#endif

Large diffs are not rendered by default.

Expand Up @@ -29,6 +29,8 @@

#pragma once

#ifndef DOXYGEN

#include <vector>

namespace juce
Expand Down Expand Up @@ -10229,3 +10231,5 @@ to an instance of LV2_Extension_Data_Feature.

};
}

#endif

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Expand Up @@ -2120,7 +2120,7 @@ struct VSTPluginInstance final : public AudioPluginInstance,
if (effect != nullptr && effect->interfaceIdentifier == Vst2::juceVstInterfaceIdentifier)
{
jassert (effect->hostSpace2 == 0);
jassert (effect->effectPointer != 0);
jassert (effect->effectPointer != nullptr);

_fpreset(); // some dodgy plugs mess around with this
}
Expand Down Expand Up @@ -2274,6 +2274,20 @@ struct VSTPluginInstance final : public AudioPluginInstance,
return { nullptr, nullptr };
}

template <typename Member, typename Value>
void setFromOptional (Member& target, Optional<Value> opt, int32_t flag)
{
if (opt.hasValue())
{
target = static_cast<Member> (*opt);
vstHostTime.flags |= flag;
}
else
{
vstHostTime.flags &= ~flag;
}
}

//==============================================================================
template <typename FloatType>
void processAudio (AudioBuffer<FloatType>& buffer, MidiBuffer& midiMessages,
Expand All @@ -2299,69 +2313,76 @@ struct VSTPluginInstance final : public AudioPluginInstance,
{
if (auto* currentPlayHead = getPlayHead())
{
AudioPlayHead::CurrentPositionInfo position;

if (currentPlayHead->getCurrentPosition (position))
if (const auto position = currentPlayHead->getPosition())
{
vstHostTime.samplePosition = (double) position.timeInSamples;
vstHostTime.tempoBPM = position.bpm;
vstHostTime.timeSignatureNumerator = position.timeSigNumerator;
vstHostTime.timeSignatureDenominator = position.timeSigDenominator;
vstHostTime.musicalPosition = position.ppqPosition;
vstHostTime.lastBarPosition = position.ppqPositionOfLastBarStart;
vstHostTime.flags |= Vst2::vstTimingInfoFlagTempoValid
| Vst2::vstTimingInfoFlagTimeSignatureValid
| Vst2::vstTimingInfoFlagMusicalPositionValid
| Vst2::vstTimingInfoFlagLastBarPositionValid;

if (const auto* hostTimeNs = getHostTimeNs())
if (const auto samplePos = position->getTimeInSamples())
vstHostTime.samplePosition = (double) *samplePos;
else
jassertfalse; // VST hosts *must* call setTimeInSamples on the audio playhead

if (auto sig = position->getTimeSignature())
{
vstHostTime.systemTimeNanoseconds = (double) *hostTimeNs;
vstHostTime.flags |= Vst2::vstTimingInfoFlagNanosecondsValid;
vstHostTime.flags |= Vst2::vstTimingInfoFlagTimeSignatureValid;
vstHostTime.timeSignatureNumerator = sig->numerator;
vstHostTime.timeSignatureDenominator = sig->denominator;
}
else
{
vstHostTime.flags &= ~Vst2::vstTimingInfoFlagNanosecondsValid;
vstHostTime.flags &= ~Vst2::vstTimingInfoFlagTimeSignatureValid;
}

setFromOptional (vstHostTime.musicalPosition, position->getPpqPosition(), Vst2::vstTimingInfoFlagMusicalPositionValid);
setFromOptional (vstHostTime.lastBarPosition, position->getPpqPositionOfLastBarStart(), Vst2::vstTimingInfoFlagLastBarPositionValid);
setFromOptional (vstHostTime.systemTimeNanoseconds, position->getHostTimeNs(), Vst2::vstTimingInfoFlagNanosecondsValid);
setFromOptional (vstHostTime.tempoBPM, position->getBpm(), Vst2::vstTimingInfoFlagTempoValid);

int32 newTransportFlags = 0;
if (position.isPlaying) newTransportFlags |= Vst2::vstTimingInfoFlagCurrentlyPlaying;
if (position.isRecording) newTransportFlags |= Vst2::vstTimingInfoFlagCurrentlyRecording;
if (position->getIsPlaying()) newTransportFlags |= Vst2::vstTimingInfoFlagCurrentlyPlaying;
if (position->getIsRecording()) newTransportFlags |= Vst2::vstTimingInfoFlagCurrentlyRecording;

if (newTransportFlags != (vstHostTime.flags & (Vst2::vstTimingInfoFlagCurrentlyPlaying
| Vst2::vstTimingInfoFlagCurrentlyRecording)))
vstHostTime.flags = (vstHostTime.flags & ~(Vst2::vstTimingInfoFlagCurrentlyPlaying | Vst2::vstTimingInfoFlagCurrentlyRecording)) | newTransportFlags | Vst2::vstTimingInfoFlagTransportChanged;
else
vstHostTime.flags &= ~Vst2::vstTimingInfoFlagTransportChanged;

const auto optionalFrameRate = [&fr = position.frameRate]() -> Optional<int32>
const auto optionalFrameRate = [fr = position->getFrameRate()]() -> Optional<int32>
{
switch (fr.getBaseRate())
if (! fr.hasValue())
return {};

switch (fr->getBaseRate())
{
case 24: return fr.isPullDown() ? Vst2::vstSmpteRateFps239 : Vst2::vstSmpteRateFps24;
case 25: return fr.isPullDown() ? Vst2::vstSmpteRateFps249 : Vst2::vstSmpteRateFps25;
case 30: return fr.isPullDown() ? (fr.isDrop() ? Vst2::vstSmpteRateFps2997drop : Vst2::vstSmpteRateFps2997)
: (fr.isDrop() ? Vst2::vstSmpteRateFps30drop : Vst2::vstSmpteRateFps30);
case 60: return fr.isPullDown() ? Vst2::vstSmpteRateFps599 : Vst2::vstSmpteRateFps60;
case 24: return fr->isPullDown() ? Vst2::vstSmpteRateFps239 : Vst2::vstSmpteRateFps24;
case 25: return fr->isPullDown() ? Vst2::vstSmpteRateFps249 : Vst2::vstSmpteRateFps25;
case 30: return fr->isPullDown() ? (fr->isDrop() ? Vst2::vstSmpteRateFps2997drop : Vst2::vstSmpteRateFps2997)
: (fr->isDrop() ? Vst2::vstSmpteRateFps30drop : Vst2::vstSmpteRateFps30);
case 60: return fr->isPullDown() ? Vst2::vstSmpteRateFps599 : Vst2::vstSmpteRateFps60;
}

return {};
}();

vstHostTime.flags |= optionalFrameRate ? Vst2::vstTimingInfoFlagSmpteValid : 0;
vstHostTime.smpteRate = optionalFrameRate.orFallback (0);
vstHostTime.smpteOffset = (int32) (position.timeInSeconds * 80.0 * position.frameRate.getEffectiveRate() + 0.5);
const auto effectiveRate = position->getFrameRate().hasValue() ? position->getFrameRate()->getEffectiveRate() : 0.0;
vstHostTime.smpteOffset = (int32) (position->getTimeInSeconds().orFallback (0.0) * 80.0 * effectiveRate + 0.5);

if (position.isLooping)
if (const auto loop = position->getLoopPoints())
{
vstHostTime.loopStartPosition = position.ppqLoopStart;
vstHostTime.loopEndPosition = position.ppqLoopEnd;
vstHostTime.flags |= (Vst2::vstTimingInfoFlagLoopPositionValid | Vst2::vstTimingInfoFlagLoopActive);
vstHostTime.flags |= Vst2::vstTimingInfoFlagLoopPositionValid;
vstHostTime.loopStartPosition = loop->ppqStart;
vstHostTime.loopEndPosition = loop->ppqEnd;
}
else
{
vstHostTime.flags &= ~(Vst2::vstTimingInfoFlagLoopPositionValid | Vst2::vstTimingInfoFlagLoopActive);
vstHostTime.flags &= ~Vst2::vstTimingInfoFlagLoopPositionValid;
}

if (position->getIsLooping())
vstHostTime.flags |= Vst2::vstTimingInfoFlagLoopActive;
else
vstHostTime.flags &= ~Vst2::vstTimingInfoFlagLoopActive;
}
}

Expand Down
Expand Up @@ -42,7 +42,6 @@

#include "juce_audio_processors.h"
#include <juce_gui_extra/juce_gui_extra.h>
#include <juce_core/containers/juce_Optional.h>

//==============================================================================
#if (JUCE_PLUGINHOST_VST || JUCE_PLUGINHOST_VST3) && (JUCE_LINUX || JUCE_BSD) && ! JUCE_AUDIOPROCESSOR_NO_GUI
Expand Down Expand Up @@ -157,21 +156,19 @@ struct NSViewComponentWithParent : public NSViewComponent,
}
}

struct FlippedNSView : public ObjCClass<NSView>
struct InnerNSView : public ObjCClass<NSView>
{
FlippedNSView()
: ObjCClass ("JuceFlippedNSView_")
InnerNSView()
: ObjCClass ("JuceInnerNSView_")
{
addIvar<NSViewComponentWithParent*> ("owner");

addMethod (@selector (isFlipped), isFlipped);
addMethod (@selector (isOpaque), isOpaque);
addMethod (@selector (didAddSubview:), didAddSubview);

registerClass();
}

static BOOL isFlipped (id, SEL) { return YES; }
static BOOL isOpaque (id, SEL) { return YES; }

static void nudge (id self)
Expand All @@ -181,15 +178,12 @@ struct NSViewComponentWithParent : public NSViewComponent,
owner->triggerAsyncUpdate();
}

static void viewDidUnhide (id self, SEL) { nudge (self); }
static void didAddSubview (id self, SEL, NSView*) { nudge (self); }
static void viewDidMoveToSuperview (id self, SEL) { nudge (self); }
static void viewDidMoveToWindow (id self, SEL) { nudge (self); }
};

static FlippedNSView& getViewClass()
static InnerNSView& getViewClass()
{
static FlippedNSView result;
static InnerNSView result;
return result;
}
};
Expand Down Expand Up @@ -239,8 +233,13 @@ struct NSViewComponentWithParent : public NSViewComponent,
#include "format_types/juce_LV2PluginFormat.cpp"

#if JUCE_UNIT_TESTS
#include "format_types/juce_VST3PluginFormat_test.cpp"
#include "format_types/juce_LV2PluginFormat_test.cpp"
#if JUCE_PLUGINHOST_VST3
#include "format_types/juce_VST3PluginFormat_test.cpp"
#endif

#if JUCE_PLUGINHOST_LV2
#include "format_types/juce_LV2PluginFormat_test.cpp"
#endif
#endif

#if JUCE_AUDIOPROCESSOR_NO_GUI
Expand Down
Expand Up @@ -35,7 +35,7 @@

ID: juce_audio_processors
vendor: juce
version: 6.1.6
version: 7.0.1
name: JUCE audio processor classes
description: Classes for loading and playing VST, AU, LADSPA, or internally-generated audio processors.
website: http://www.juce.com/juce
Expand Down
Expand Up @@ -1141,11 +1141,6 @@ class JUCE_API AudioProcessor
/** This method is called when the layout of the audio processor changes. */
virtual void processorLayoutsChanged();

//==============================================================================
/** LV2 specific calls, saving/restore as string. */
virtual String getStateInformationString () { return String(); }
virtual void setStateInformationString (const String&) {}

//==============================================================================
/** Adds a listener that will be called when an aspect of this processor changes. */
virtual void addListener (AudioProcessorListener* newListener);
Expand All @@ -1160,51 +1155,6 @@ class JUCE_API AudioProcessor
*/
virtual void setPlayHead (AudioPlayHead* newPlayHead);

//==============================================================================
/** Hosts may call this function to supply the system time corresponding to the
current audio buffer.

If you want to set a valid time, pass a pointer to a uint64_t holding the current time. The
value will be copied into the AudioProcessor instance without any allocation/deallocation.

If you want to clear any stored host time, pass nullptr.

Calls to this function must be synchronised (i.e. not simultaneous) with the audio callback.

@code
const auto currentHostTime = computeHostTimeNanos();
processor.setHostTimeNanos (&currentHostTime); // Set a valid host time
// ...call processBlock etc.
processor.setHostTimeNanos (nullptr); // Clear host time
@endcode
*/
void setHostTimeNanos (const uint64_t* hostTimeIn)
{
hasHostTime = hostTimeIn != nullptr;
hostTime = hasHostTime ? *hostTimeIn : 0;
}

/** The plugin may call this function inside the processBlock function (and only there!)
to find the timestamp associated with the current audio block.

If a timestamp is available, this will return a pointer to that timestamp. You should
immediately copy the pointed-to value and use that in any following code. Do *not* free
any pointer returned by this function.

If no timestamp is provided, this will return nullptr.

@code
void processBlock (AudioBuffer<float>&, MidiBuffer&) override
{
if (auto* timestamp = getHostTimeNs())
{
// Use *timestamp here to compensate for callback jitter etc.
}
}
@endcode
*/
const uint64_t* getHostTimeNs() const { return hasHostTime ? &hostTime : nullptr; }

//==============================================================================
/** This is called by the processor to specify its details before being played. Use this
version of the function if you are not interested in any sidechain and/or aux buses
Expand Down Expand Up @@ -1565,9 +1515,6 @@ class JUCE_API AudioProcessor
AudioProcessorParameterGroup parameterTree;
Array<AudioProcessorParameter*> flatParameterList;

uint64_t hostTime = 0;
bool hasHostTime = false;

AudioProcessorParameter* getParamChecked (int) const;

#if JUCE_DEBUG
Expand Down
Expand Up @@ -37,18 +37,15 @@ static void updateOnMessageThread (AsyncUpdater& updater)
template <typename FloatType>
struct GraphRenderSequence
{
GraphRenderSequence() {}

struct Context
{
FloatType** audioBuffers;
MidiBuffer* midiBuffers;
AudioPlayHead* audioPlayHead;
Optional<uint64_t> hostTimeNs;
int numSamples;
};

void perform (AudioBuffer<FloatType>& buffer, MidiBuffer& midiMessages, AudioPlayHead* audioPlayHead, Optional<uint64_t> hostTimeNs)
void perform (AudioBuffer<FloatType>& buffer, MidiBuffer& midiMessages, AudioPlayHead* audioPlayHead)
{
auto numSamples = buffer.getNumSamples();
auto maxSamples = renderingBuffer.getNumSamples();
Expand All @@ -67,7 +64,7 @@ struct GraphRenderSequence

// Splitting up the buffer like this will cause the play head and host time to be
// invalid for all but the first chunk...
perform (audioChunk, midiChunk, audioPlayHead, hostTimeNs);
perform (audioChunk, midiChunk, audioPlayHead);

chunkStartSample += maxSamples;
}
Expand All @@ -82,7 +79,7 @@ struct GraphRenderSequence
currentMidiOutputBuffer.clear();

{
const Context context { renderingBuffer.getArrayOfWritePointers(), midiBuffers.begin(), audioPlayHead, hostTimeNs, numSamples };
const Context context { renderingBuffer.getArrayOfWritePointers(), midiBuffers.begin(), audioPlayHead, numSamples };

for (auto* op : renderOps)
op->perform (context);
Expand Down Expand Up @@ -267,7 +264,6 @@ struct GraphRenderSequence
void perform (const Context& c) override
{
processor.setPlayHead (c.audioPlayHead);
processor.setHostTimeNanos (c.hostTimeNs.hasValue() ? &(*c.hostTimeNs) : nullptr);

for (int i = 0; i < totalChans; ++i)
audioChannels[i] = c.audioBuffers[audioChannelsToUse.getUnchecked (i)];
Expand All @@ -289,8 +285,6 @@ struct GraphRenderSequence
buffer.clear();
else
callProcess (buffer, c.midiBuffers[midiBufferToUse]);

processor.setHostTimeNanos (nullptr);
}

void callProcess (AudioBuffer<float>& buffer, MidiBuffer& midiMessages)
Expand Down Expand Up @@ -1402,14 +1396,6 @@ static void processBlockForBuffer (AudioBuffer<FloatType>& buffer, MidiBuffer& m
std::unique_ptr<SequenceType>& renderSequence,
std::atomic<bool>& isPrepared)
{
const auto getHostTime = [&]() -> Optional<uint64_t>
{
if (auto* nanos = graph.getHostTimeNs())
return *nanos;

return nullopt;
};

if (graph.isNonRealtime())
{
while (! isPrepared)
Expand All @@ -1418,7 +1404,7 @@ static void processBlockForBuffer (AudioBuffer<FloatType>& buffer, MidiBuffer& m
const ScopedLock sl (graph.getCallbackLock());

if (renderSequence != nullptr)
renderSequence->perform (buffer, midiMessages, graph.getPlayHead(), getHostTime());
renderSequence->perform (buffer, midiMessages, graph.getPlayHead());
}
else
{
Expand All @@ -1427,7 +1413,7 @@ static void processBlockForBuffer (AudioBuffer<FloatType>& buffer, MidiBuffer& m
if (isPrepared)
{
if (renderSequence != nullptr)
renderSequence->perform (buffer, midiMessages, graph.getPlayHead(), getHostTime());
renderSequence->perform (buffer, midiMessages, graph.getPlayHead());
}
else
{
Expand Down