Audio: Fixed bug with stereo delay filter when changing sample rate (right channel is muted)
This commit is contained in:
parent
cfada27612
commit
a37992e299
2 changed files with 5 additions and 7 deletions
|
@ -5,13 +5,11 @@ int16_t* StereoDelayFilter::ApplyFilter(int16_t* monoBuffer, size_t sampleCount,
|
|||
{
|
||||
UpdateBufferSize(sampleCount, true);
|
||||
|
||||
int32_t delay = EmulationSettings::GetStereoDelay();
|
||||
if(delay != _lastDelay) {
|
||||
size_t delaySampleCount = (int32_t)((double)EmulationSettings::GetStereoDelay() / 1000 * sampleRate);
|
||||
if(delaySampleCount != _lastDelay) {
|
||||
_delayedSamples.clear();
|
||||
}
|
||||
_lastDelay = delay;
|
||||
|
||||
size_t delaySampleCount = (int32_t)((double)delay / 1000 * sampleRate);
|
||||
_lastDelay = delaySampleCount;
|
||||
|
||||
for(size_t i = 0; i < sampleCount; i++) {
|
||||
_delayedSamples.push_back(monoBuffer[i]);
|
||||
|
|
|
@ -9,7 +9,7 @@ class StereoDelayFilter : public BaseSoundFilter
|
|||
{
|
||||
private:
|
||||
std::deque<int16_t> _delayedSamples;
|
||||
int32_t _lastDelay;
|
||||
size_t _lastDelay = 0;
|
||||
|
||||
public:
|
||||
int16_t* ApplyFilter(int16_t* monoBuffer, size_t sampleCount, uint32_t sampleRate);
|
||||
|
|
Loading…
Add table
Reference in a new issue