Fix the number of frames used when interleaving in AudioBuffer::InterleaveTo()
R=henrik.lundin@webrtc.org, peah@webrtc.org TBR=tina.legrand@webrtc.org Review URL: https://codereview.webrtc.org/1862553002 . Cr-Commit-Position: refs/heads/master@{#12249}
This commit is contained in:
parent
faed4ab24b
commit
40cbec5415
Binary file not shown.
@ -430,10 +430,10 @@ void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (frame->num_channels_ == num_channels_) {
|
if (frame->num_channels_ == num_channels_) {
|
||||||
Interleave(data_ptr->ibuf()->channels(), proc_num_frames_, num_channels_,
|
Interleave(data_ptr->ibuf()->channels(), output_num_frames_, num_channels_,
|
||||||
frame->data_);
|
frame->data_);
|
||||||
} else {
|
} else {
|
||||||
UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], proc_num_frames_,
|
UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], output_num_frames_,
|
||||||
frame->num_channels_, frame->data_);
|
frame->num_channels_, frame->data_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -54,12 +54,7 @@ bool write_ref_data = false;
|
|||||||
const google::protobuf::int32 kChannels[] = {1, 2};
|
const google::protobuf::int32 kChannels[] = {1, 2};
|
||||||
const int kSampleRates[] = {8000, 16000, 32000, 48000};
|
const int kSampleRates[] = {8000, 16000, 32000, 48000};
|
||||||
|
|
||||||
#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
|
|
||||||
// Android doesn't support 48kHz.
|
|
||||||
const int kProcessSampleRates[] = {8000, 16000, 32000};
|
|
||||||
#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
|
|
||||||
const int kProcessSampleRates[] = {8000, 16000, 32000, 48000};
|
const int kProcessSampleRates[] = {8000, 16000, 32000, 48000};
|
||||||
#endif
|
|
||||||
|
|
||||||
enum StreamDirection { kForward = 0, kReverse };
|
enum StreamDirection { kForward = 0, kReverse };
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user