Delete more unused Mediafile methods.

In particular, PlayoutStereoData and StartPlayingAudioFile. This also
eliminates the dependency on system_wrappers FileWrapper.

Bug: None
Change-Id: I61df1eea1ad5f5035e36c8229febbf3668808f65
Reviewed-on: https://webrtc-review.googlesource.com/28121
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Magnus Flodman <mflodman@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21038}
This commit is contained in:
Niels Möller 2017-12-04 12:37:31 +01:00 committed by Commit Bot
parent e4a9e923e4
commit 83d27683a8
7 changed files with 1 additions and 309 deletions

View File

@ -37,7 +37,6 @@ rtc_static_library("media_file") {
"../..:webrtc_common",
"../../common_audio",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers",
]
}
@ -50,7 +49,6 @@ if (rtc_include_tests) {
]
deps = [
":media_file",
"../../system_wrappers:system_wrappers",
"../../test:test_support",
]
if (is_win) {

View File

@ -38,42 +38,6 @@ public:
int8_t* audioBuffer,
size_t& dataLengthInBytes) = 0;
// Put 10-60ms, depending on codec frame size, of audio data from file into
// audioBufferLeft and audioBufferRight. The buffers contain the left and
// right channel of played out stereo audio.
// dataLengthInBytes is both an input and output parameter. As input
// parameter it indicates the size of both audioBufferLeft and
// audioBufferRight. As output parameter it indicates the number of bytes
// written to both audio buffers.
// Note: This API can only be successfully called for WAV files with stereo
// audio.
virtual int32_t PlayoutStereoData(
int8_t* audioBufferLeft,
int8_t* audioBufferRight,
size_t& dataLengthInBytes) = 0;
// Open the file specified by fileName (relative path is allowed) for
// reading. FileCallback::PlayNotification(..) will be called after
// notificationTimeMs of the file has been played if notificationTimeMs is
// greater than zero. If loop is true the file will be played until
// StopPlaying() is called. When end of file is reached the file is read
// from the start. format specifies the type of file fileName refers to.
// codecInst specifies the encoding of the audio data. Note that
// file formats that contain this information (like WAV files) don't need to
// provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
// specify what part of the file should be read. From startPointMs ms to
// stopPointMs ms.
// Note: codecInst.channels should be set to 2 for stereo (and 1 for
// mono). Stereo audio is only supported for WAV files.
virtual int32_t StartPlayingAudioFile(
const char* fileName,
const uint32_t notificationTimeMs = 0,
const bool loop = false,
const FileFormats format = kFileFormatPcm16kHzFile,
const CodecInst* codecInst = NULL,
const uint32_t startPointMs = 0,
const uint32_t stopPointMs = 0) = 0;
// Prepare for playing audio from stream.
// FileCallback::PlayNotification(..) will be called after
// notificationTimeMs of the file has been played if notificationTimeMs is

View File

@ -13,7 +13,6 @@
#include "modules/media_file/media_file_impl.h"
#include "rtc_base/format_macros.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/file_wrapper.h"
namespace webrtc {
MediaFile* MediaFile::CreateMediaFile(const int32_t id) {
@ -29,13 +28,10 @@ MediaFileImpl::MediaFileImpl(const int32_t id)
_ptrFileUtilityObj(NULL),
codec_info_(),
_ptrInStream(NULL),
_ptrOutStream(NULL),
_fileFormat((FileFormats)-1),
_playoutPositionMs(0),
_notificationMs(0),
_playingActive(false),
_isStereo(false),
_openFile(false),
_fileName(),
_ptrCallback(NULL) {
RTC_LOG(LS_INFO) << "MediaFileImpl()";
@ -54,13 +50,6 @@ MediaFileImpl::~MediaFileImpl() {
}
delete _ptrFileUtilityObj;
if (_openFile) {
delete _ptrInStream;
_ptrInStream = NULL;
delete _ptrOutStream;
_ptrOutStream = NULL;
}
}
}
@ -173,133 +162,6 @@ void MediaFileImpl::HandlePlayCallbacks(int32_t bytesRead) {
}
}
int32_t MediaFileImpl::PlayoutStereoData(int8_t* bufferLeft,
int8_t* bufferRight,
size_t& dataLengthInBytes) {
RTC_LOG(LS_INFO) << "MediaFileImpl::PlayoutStereoData(Left = "
<< static_cast<void*>(bufferLeft)
<< ", Right = " << static_cast<void*>(bufferRight)
<< ", Len= " << dataLengthInBytes << ")";
const size_t bufferLengthInBytes = dataLengthInBytes;
dataLengthInBytes = 0;
if (bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0) {
RTC_LOG(LS_ERROR) << "A buffer pointer or the length is NULL!";
return -1;
}
bool playEnded = false;
uint32_t callbackNotifyMs = 0;
{
rtc::CritScope lock(&_crit);
if (!_playingActive || !_isStereo) {
RTC_LOG(LS_WARNING) << "Not currently playing stereo!";
return -1;
}
if (!_ptrFileUtilityObj) {
RTC_LOG(LS_ERROR)
<< "Playing stereo, but the FileUtility objects is NULL!";
StopPlaying();
return -1;
}
// Stereo playout only supported for WAV files.
int32_t bytesRead = 0;
switch (_fileFormat) {
case kFileFormatWavFile:
bytesRead = _ptrFileUtilityObj->ReadWavDataAsStereo(
*_ptrInStream, bufferLeft, bufferRight, bufferLengthInBytes);
break;
default:
RTC_LOG(LS_ERROR)
<< "Trying to read non-WAV as stereo audio (not supported)";
break;
}
if (bytesRead > 0) {
dataLengthInBytes = static_cast<size_t>(bytesRead);
// Check if it's time for PlayNotification(..).
_playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
if (_notificationMs) {
if (_playoutPositionMs >= _notificationMs) {
_notificationMs = 0;
callbackNotifyMs = _playoutPositionMs;
}
}
} else {
// If no bytes were read assume end of file.
StopPlaying();
playEnded = true;
}
}
rtc::CritScope lock(&_callbackCrit);
if (_ptrCallback) {
if (callbackNotifyMs) {
_ptrCallback->PlayNotification(_id, callbackNotifyMs);
}
if (playEnded) {
_ptrCallback->PlayFileEnded(_id);
}
}
return 0;
}
int32_t MediaFileImpl::StartPlayingAudioFile(const char* fileName,
const uint32_t notificationTimeMs,
const bool loop,
const FileFormats format,
const CodecInst* codecInst,
const uint32_t startPointMs,
const uint32_t stopPointMs) {
if (!ValidFileName(fileName)) {
return -1;
}
if (!ValidFileFormat(format, codecInst)) {
return -1;
}
if (!ValidFilePositions(startPointMs, stopPointMs)) {
return -1;
}
// Check that the file will play longer than notificationTimeMs ms.
if ((startPointMs && stopPointMs && !loop) &&
(notificationTimeMs > (stopPointMs - startPointMs))) {
RTC_LOG(LS_ERROR) << "specified notification time is longer than amount of"
<< " ms that will be played";
return -1;
}
FileWrapper* inputStream = FileWrapper::Create();
if (inputStream == NULL) {
RTC_LOG(LS_INFO) << "Failed to allocate input stream for file " << fileName;
return -1;
}
if (!inputStream->OpenFile(fileName, true)) {
delete inputStream;
RTC_LOG(LS_ERROR) << "Could not open input file " << fileName;
return -1;
}
if (StartPlayingStream(*inputStream, loop, notificationTimeMs, format,
codecInst, startPointMs, stopPointMs) == -1) {
inputStream->CloseFile();
delete inputStream;
return -1;
}
rtc::CritScope lock(&_crit);
_openFile = true;
strncpy(_fileName, fileName, sizeof(_fileName));
_fileName[sizeof(_fileName) - 1] = '\0';
return 0;
}
int32_t MediaFileImpl::StartPlayingAudioStream(
InStream& stream,
const uint32_t notificationTimeMs,
@ -411,8 +273,7 @@ int32_t MediaFileImpl::StartPlayingStream(InStream& stream,
return -1;
}
_isStereo = (codec_info_.channels == 2);
if (_isStereo && (_fileFormat != kFileFormatWavFile)) {
if ((codec_info_.channels == 2) && (_fileFormat != kFileFormatWavFile)) {
RTC_LOG(LS_WARNING) << "Stereo is only allowed for WAV files";
StopPlaying();
return -1;
@ -427,17 +288,11 @@ int32_t MediaFileImpl::StartPlayingStream(InStream& stream,
int32_t MediaFileImpl::StopPlaying() {
rtc::CritScope lock(&_crit);
_isStereo = false;
if (_ptrFileUtilityObj) {
delete _ptrFileUtilityObj;
_ptrFileUtilityObj = NULL;
}
if (_ptrInStream) {
// If MediaFileImpl opened the InStream it must be reclaimed here.
if (_openFile) {
delete _ptrInStream;
_openFile = false;
}
_ptrInStream = NULL;
}

View File

@ -33,19 +33,6 @@ public:
int32_t PlayoutAudioData(int8_t* audioBuffer,
size_t& dataLengthInBytes) override;
int32_t PlayoutStereoData(int8_t* audioBufferLeft,
int8_t* audioBufferRight,
size_t& dataLengthInBytes) override;
int32_t StartPlayingAudioFile(
const char* fileName,
const uint32_t notificationTimeMs = 0,
const bool loop = false,
const FileFormats format = kFileFormatPcm16kHzFile,
const CodecInst* codecInst = NULL,
const uint32_t startPointMs = 0,
const uint32_t stopPointMs = 0) override;
int32_t StartPlayingAudioStream(
InStream& stream,
const uint32_t notificationTimeMs = 0,
@ -99,15 +86,12 @@ private:
CodecInst codec_info_;
InStream* _ptrInStream;
OutStream* _ptrOutStream;
FileFormats _fileFormat;
uint32_t _playoutPositionMs;
uint32_t _notificationMs;
bool _playingActive;
bool _isStereo;
bool _openFile;
char _fileName[512];

View File

@ -9,9 +9,7 @@
*/
#include "modules/media_file/media_file.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
#include "test/testsupport/fileutils.h"
class MediaFileTest : public testing::Test {
protected:
@ -26,28 +24,3 @@ class MediaFileTest : public testing::Test {
}
webrtc::MediaFile* media_file_;
};
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
#define MAYBE_StartPlayingAudioFileWithoutError \
DISABLED_StartPlayingAudioFileWithoutError
#else
#define MAYBE_StartPlayingAudioFileWithoutError \
StartPlayingAudioFileWithoutError
#endif
TEST_F(MediaFileTest, MAYBE_StartPlayingAudioFileWithoutError) {
// TODO(leozwang): Use hard coded filename here, we want to
// loop through all audio files in future
const std::string audio_file =
webrtc::test::ResourcePath("voice_engine/audio_tiny48", "wav");
ASSERT_EQ(0, media_file_->StartPlayingAudioFile(
audio_file.c_str(),
0,
false,
webrtc::kFileFormatWavFile));
ASSERT_EQ(true, media_file_->IsPlaying());
webrtc::SleepMs(1);
ASSERT_EQ(0, media_file_->StopPlaying());
}

View File

@ -20,7 +20,6 @@
#include "modules/include/module_common_types.h"
#include "rtc_base/format_macros.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/file_wrapper.h"
#include "typedefs.h" // NOLINT(build/include)
namespace {
@ -376,74 +375,6 @@ int32_t ModuleFileUtility::ReadWavDataAsMono(InStream& wav,
return static_cast<int32_t>(bytesRequested);
}
int32_t ModuleFileUtility::ReadWavDataAsStereo(InStream& wav,
int8_t* outDataLeft,
int8_t* outDataRight,
const size_t bufferSize) {
RTC_LOG(LS_VERBOSE) << "ModuleFileUtility::ReadWavDataAsStereo(wav= " << &wav
<< ", outLeft= " << static_cast<void*>(outDataLeft)
<< ", outRight= " << static_cast<void*>(outDataRight)
<< ", bufSize= " << bufferSize << ")";
if ((outDataLeft == NULL) || (outDataRight == NULL)) {
RTC_LOG(LS_ERROR) << "ReadWavDataAsStereo: an input buffer is NULL!";
return -1;
}
if (codec_info_.channels != 2) {
RTC_LOG(LS_ERROR)
<< "ReadWavDataAsStereo: WAV file does not contain stereo data!";
return -1;
}
if (!_reading) {
RTC_LOG(LS_ERROR) << "ReadWavDataAsStereo: no longer reading file.";
return -1;
}
// The number of bytes that should be read from file.
const size_t totalBytesNeeded = _readSizeBytes;
// The number of bytes that will be written to the left and the right
// buffers.
const size_t bytesRequested = totalBytesNeeded >> 1;
if (bufferSize < bytesRequested) {
RTC_LOG(LS_ERROR) << "ReadWavDataAsStereo: Output buffers are too short!";
assert(false);
return -1;
}
int32_t bytesRead = ReadWavData(wav, _tempData, totalBytesNeeded);
if (bytesRead <= 0) {
RTC_LOG(LS_ERROR)
<< "ReadWavDataAsStereo: failed to read data from WAV file.";
return -1;
}
// Turn interleaved audio to left and right buffer. Note samples can be
// either 1 or 2 bytes
if (_bytesPerSample == 1) {
for (size_t i = 0; i < bytesRequested; i++) {
outDataLeft[i] = _tempData[2 * i];
outDataRight[i] = _tempData[(2 * i) + 1];
}
} else if (_bytesPerSample == 2) {
int16_t* sampleData = reinterpret_cast<int16_t*>(_tempData);
int16_t* outLeft = reinterpret_cast<int16_t*>(outDataLeft);
int16_t* outRight = reinterpret_cast<int16_t*>(outDataRight);
// Bytes requested to samples requested.
size_t sampleCount = bytesRequested >> 1;
for (size_t i = 0; i < sampleCount; i++) {
outLeft[i] = sampleData[2 * i];
outRight[i] = sampleData[(2 * i) + 1];
}
} else {
RTC_LOG(LS_ERROR) << "ReadWavStereoData: unsupported sample size "
<< _bytesPerSample << "!";
assert(false);
return -1;
}
return static_cast<int32_t>(bytesRequested);
}
int32_t ModuleFileUtility::ReadWavData(InStream& wav,
uint8_t* buffer,
size_t dataLengthInBytes) {

View File

@ -44,19 +44,6 @@ public:
int32_t ReadWavDataAsMono(InStream& stream, int8_t* audioBuffer,
const size_t dataLengthInBytes);
// Put 10-60ms, depending on codec frame size, of audio data from file into
// audioBufferLeft and audioBufferRight. The buffers contain the left and
// right channel of played out stereo audio.
// dataLengthInBytes indicates the size of both audioBufferLeft and
// audioBufferRight.
// The return value is the number of bytes read for each buffer.
// Note: This API can only be successfully called for WAV files with stereo
// audio.
int32_t ReadWavDataAsStereo(InStream& wav,
int8_t* audioBufferLeft,
int8_t* audioBufferRight,
const size_t bufferLength);
// Prepare for playing audio from stream.
// startPointMs and stopPointMs, unless zero, specify what part of the file
// should be read. From startPointMs ms to stopPointMs ms.