M7350v1_en_gpl

This commit is contained in:
T
2024-09-09 08:52:07 +00:00
commit f9cc65cfda
65988 changed files with 26357421 additions and 0 deletions

View File

@ -0,0 +1,72 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
stagefright.cpp \
SineSource.cpp
LOCAL_SHARED_LIBRARIES := \
libstagefright libmedia libutils libbinder libstagefright_foundation
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
frameworks/base/media/libstagefright \
frameworks/base/media/libstagefright/include \
$(TOP)/frameworks/base/include/media/stagefright/openmax
LOCAL_CFLAGS += -Wno-multichar
LOCAL_MODULE_TAGS := debug
LOCAL_MODULE:= stagefright
include $(BUILD_EXECUTABLE)
################################################################################
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
SineSource.cpp \
record.cpp
LOCAL_SHARED_LIBRARIES := \
libstagefright liblog libutils libbinder
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
frameworks/base/media/libstagefright \
$(TOP)/frameworks/base/include/media/stagefright/openmax
LOCAL_CFLAGS += -Wno-multichar
LOCAL_MODULE_TAGS := debug
LOCAL_MODULE:= record
include $(BUILD_EXECUTABLE)
################################################################################
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
SineSource.cpp \
audioloop.cpp
LOCAL_SHARED_LIBRARIES := \
libstagefright liblog libutils libbinder
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
frameworks/base/media/libstagefright \
$(TOP)/frameworks/base/include/media/stagefright/openmax
LOCAL_CFLAGS += -Wno-multichar
LOCAL_MODULE_TAGS := debug
LOCAL_MODULE:= audioloop
include $(BUILD_EXECUTABLE)

View File

@ -0,0 +1,102 @@
#include "SineSource.h"
#include <math.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
namespace android {
SineSource::SineSource(int32_t sampleRate, int32_t numChannels)
: mStarted(false),
mSampleRate(sampleRate),
mNumChannels(numChannels),
mPhase(0),
mGroup(NULL) {
CHECK(numChannels == 1 || numChannels == 2);
}
SineSource::~SineSource() {
if (mStarted) {
stop();
}
}
status_t SineSource::start(MetaData *params) {
CHECK(!mStarted);
mGroup = new MediaBufferGroup;
mGroup->add_buffer(new MediaBuffer(kBufferSize));
mPhase = 0;
mStarted = true;
return OK;
}
status_t SineSource::stop() {
CHECK(mStarted);
delete mGroup;
mGroup = NULL;
mStarted = false;
return OK;
}
sp<MetaData> SineSource::getFormat() {
sp<MetaData> meta = new MetaData;
meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
meta->setInt32(kKeyChannelCount, mNumChannels);
meta->setInt32(kKeySampleRate, mSampleRate);
meta->setInt32(kKeyMaxInputSize, kBufferSize);
return meta;
}
status_t SineSource::read(
MediaBuffer **out, const ReadOptions *options) {
*out = NULL;
MediaBuffer *buffer;
status_t err = mGroup->acquire_buffer(&buffer);
if (err != OK) {
return err;
}
size_t frameSize = mNumChannels * sizeof(int16_t);
size_t numFramesPerBuffer = buffer->size() / frameSize;
int16_t *ptr = (int16_t *)buffer->data();
const double k = kFrequency / mSampleRate * (2.0 * M_PI);
double x = mPhase * k;
for (size_t i = 0; i < numFramesPerBuffer; ++i) {
int16_t amplitude = (int16_t)(32767.0 * sin(x));
*ptr++ = amplitude;
if (mNumChannels == 2) {
*ptr++ = amplitude;
}
x += k;
}
buffer->meta_data()->setInt64(
kKeyTime, ((int64_t)mPhase * 1000000) / mSampleRate);
mPhase += numFramesPerBuffer;
buffer->set_range(0, numFramesPerBuffer * frameSize);
*out = buffer;
return OK;
}
} // namespace android

View File

@ -0,0 +1,39 @@
#ifndef SINE_SOURCE_H_
#define SINE_SOURCE_H_
#include <media/stagefright/MediaSource.h>
namespace android {
struct MediaBufferGroup;
struct SineSource : public MediaSource {
SineSource(int32_t sampleRate, int32_t numChannels);
virtual status_t start(MetaData *params);
virtual status_t stop();
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **out, const ReadOptions *options = NULL);
protected:
virtual ~SineSource();
private:
enum { kBufferSize = 8192 };
static const double kFrequency = 500.0;
bool mStarted;
int32_t mSampleRate;
int32_t mNumChannels;
size_t mPhase;
MediaBufferGroup *mGroup;
};
} // namespace android
#endif // SINE_SOURCE_H_

View File

@ -0,0 +1,71 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_WAVEWRITER_H_
#define ANDROID_WAVEWRITER_H_
namespace android {
class WaveWriter {
public:
WaveWriter(const char *filename,
uint16_t num_channels, uint32_t sampling_rate)
: mFile(fopen(filename, "wb")),
mTotalBytes(0) {
fwrite("RIFFxxxxWAVEfmt \x10\x00\x00\x00\x01\x00", 1, 22, mFile);
write_u16(num_channels);
write_u32(sampling_rate);
write_u32(sampling_rate * num_channels * 2);
write_u16(num_channels * 2);
write_u16(16);
fwrite("dataxxxx", 1, 8, mFile);
}
~WaveWriter() {
fseek(mFile, 40, SEEK_SET);
write_u32(mTotalBytes);
fseek(mFile, 4, SEEK_SET);
write_u32(36 + mTotalBytes);
fclose(mFile);
mFile = NULL;
}
void Append(const void *data, size_t size) {
fwrite(data, 1, size, mFile);
mTotalBytes += size;
}
private:
void write_u16(uint16_t x) {
fputc(x & 0xff, mFile);
fputc(x >> 8, mFile);
}
void write_u32(uint32_t x) {
write_u16(x & 0xffff);
write_u16(x >> 16);
}
FILE *mFile;
size_t mTotalBytes;
};
} // namespace android
#endif // ANDROID_WAVEWRITER_H_

View File

@ -0,0 +1,101 @@
#include "SineSource.h"
#include <binder/ProcessState.h>
#include <media/mediarecorder.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
using namespace android;
int main() {
// We only have an AMR-WB encoder on sholes...
static bool outputWBAMR = false;
static const int32_t kSampleRate = outputWBAMR ? 16000 : 8000;
static const int32_t kNumChannels = 1;
android::ProcessState::self()->startThreadPool();
OMXClient client;
CHECK_EQ(client.connect(), OK);
#if 0
sp<MediaSource> source = new SineSource(kSampleRate, kNumChannels);
#else
sp<MediaSource> source = new AudioSource(
AUDIO_SOURCE_DEFAULT,
kSampleRate,
kNumChannels == 1
? AudioSystem::CHANNEL_IN_MONO
: AudioSystem::CHANNEL_IN_STEREO);
#endif
sp<MetaData> meta = new MetaData;
meta->setCString(
kKeyMIMEType,
outputWBAMR ? MEDIA_MIMETYPE_AUDIO_AMR_WB
: MEDIA_MIMETYPE_AUDIO_AMR_NB);
meta->setInt32(kKeyChannelCount, kNumChannels);
meta->setInt32(kKeySampleRate, kSampleRate);
int32_t maxInputSize;
if (source->getFormat()->findInt32(kKeyMaxInputSize, &maxInputSize)) {
meta->setInt32(kKeyMaxInputSize, maxInputSize);
}
sp<MediaSource> encoder = OMXCodec::Create(
client.interface(),
meta, true /* createEncoder */,
source);
#if 1
sp<AMRWriter> writer = new AMRWriter("/sdcard/out.amr");
writer->addSource(encoder);
writer->start();
sleep(10);
writer->stop();
#else
sp<MediaSource> decoder = OMXCodec::Create(
client.interface(),
meta, false /* createEncoder */,
encoder);
#if 0
AudioPlayer *player = new AudioPlayer(NULL);
player->setSource(decoder);
player->start();
sleep(10);
player->stop();
delete player;
player = NULL;
#elif 0
CHECK_EQ(decoder->start(), OK);
MediaBuffer *buffer;
while (decoder->read(&buffer) == OK) {
// do something with buffer
putchar('.');
fflush(stdout);
buffer->release();
buffer = NULL;
}
CHECK_EQ(decoder->stop(), OK);
#endif
#endif
return 0;
}

View File

@ -0,0 +1,354 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "SineSource.h"
#include <binder/ProcessState.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/CameraSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <media/MediaPlayerInterface.h>
using namespace android;
static const int32_t kFramerate = 24; // fps
static const int32_t kIFramesIntervalSec = 1;
static const int32_t kVideoBitRate = 512 * 1024;
static const int32_t kAudioBitRate = 12200;
static const int64_t kDurationUs = 10000000LL; // 10 seconds
#if 1
class DummySource : public MediaSource {
public:
DummySource(int width, int height, int colorFormat)
: mWidth(width),
mHeight(height),
mColorFormat(colorFormat),
mSize((width * height * 3) / 2) {
mGroup.add_buffer(new MediaBuffer(mSize));
// Check the color format to make sure
// that the buffer size mSize it set correctly above.
CHECK(colorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
colorFormat == OMX_COLOR_FormatYUV420Planar);
}
virtual sp<MetaData> getFormat() {
sp<MetaData> meta = new MetaData;
meta->setInt32(kKeyWidth, mWidth);
meta->setInt32(kKeyHeight, mHeight);
meta->setInt32(kKeyColorFormat, mColorFormat);
meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
return meta;
}
virtual status_t start(MetaData *params) {
mNumFramesOutput = 0;
return OK;
}
virtual status_t stop() {
return OK;
}
virtual status_t read(
MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
if (mNumFramesOutput == kFramerate * 10) {
// Stop returning data after 10 secs.
return ERROR_END_OF_STREAM;
}
// printf("DummySource::read\n");
status_t err = mGroup.acquire_buffer(buffer);
if (err != OK) {
return err;
}
char x = (char)((double)rand() / RAND_MAX * 255);
memset((*buffer)->data(), x, mSize);
(*buffer)->set_range(0, mSize);
(*buffer)->meta_data()->clear();
(*buffer)->meta_data()->setInt64(
kKeyTime, (mNumFramesOutput * 1000000) / kFramerate);
++mNumFramesOutput;
// printf("DummySource::read - returning buffer\n");
// LOGI("DummySource::read - returning buffer");
return OK;
}
protected:
virtual ~DummySource() {}
private:
MediaBufferGroup mGroup;
int mWidth, mHeight;
int mColorFormat;
size_t mSize;
int64_t mNumFramesOutput;;
DummySource(const DummySource &);
DummySource &operator=(const DummySource &);
};
sp<MediaSource> createSource(const char *filename) {
sp<MediaSource> source;
sp<MediaExtractor> extractor =
MediaExtractor::Create(new FileSource(filename));
if (extractor == NULL) {
return NULL;
}
size_t num_tracks = extractor->countTracks();
sp<MetaData> meta;
for (size_t i = 0; i < num_tracks; ++i) {
meta = extractor->getTrackMetaData(i);
CHECK(meta.get() != NULL);
const char *mime;
if (!meta->findCString(kKeyMIMEType, &mime)) {
continue;
}
if (strncasecmp(mime, "video/", 6)) {
continue;
}
source = extractor->getTrack(i);
break;
}
return source;
}
enum {
kYUV420SP = 0,
kYUV420P = 1,
};
// returns -1 if mapping of the given color is unsuccessful
// returns an omx color enum value otherwise
static int translateColorToOmxEnumValue(int color) {
switch (color) {
case kYUV420SP:
return OMX_COLOR_FormatYUV420SemiPlanar;
case kYUV420P:
return OMX_COLOR_FormatYUV420Planar;
default:
fprintf(stderr, "Unsupported color: %d\n", color);
return -1;
}
}
int main(int argc, char **argv) {
android::ProcessState::self()->startThreadPool();
DataSource::RegisterDefaultSniffers();
#if 1
if (argc != 3) {
fprintf(stderr, "usage: %s <filename> <input_color_format>\n", argv[0]);
fprintf(stderr, " <input_color_format>: 0 (YUV420SP) or 1 (YUV420P)\n");
return 1;
}
int colorFormat = translateColorToOmxEnumValue(atoi(argv[2]));
if (colorFormat == -1) {
fprintf(stderr, "input color format must be 0 (YUV420SP) or 1 (YUV420P)\n");
return 1;
}
OMXClient client;
CHECK_EQ(client.connect(), OK);
status_t err = OK;
#if 0
sp<MediaSource> source = createSource(argv[1]);
if (source == NULL) {
fprintf(stderr, "Unable to find a suitable video track.\n");
return 1;
}
sp<MetaData> meta = source->getFormat();
sp<MediaSource> decoder = OMXCodec::Create(
client.interface(), meta, false /* createEncoder */, source);
int width, height;
bool success = meta->findInt32(kKeyWidth, &width);
success = success && meta->findInt32(kKeyHeight, &height);
CHECK(success);
#else
int width = 720;
int height = 480;
sp<MediaSource> decoder = new DummySource(width, height, colorFormat);
#endif
sp<MetaData> enc_meta = new MetaData;
// enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
// enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
enc_meta->setInt32(kKeyWidth, width);
enc_meta->setInt32(kKeyHeight, height);
enc_meta->setInt32(kKeySampleRate, kFramerate);
enc_meta->setInt32(kKeyBitRate, kVideoBitRate);
enc_meta->setInt32(kKeyStride, width);
enc_meta->setInt32(kKeySliceHeight, height);
enc_meta->setInt32(kKeyIFramesInterval, kIFramesIntervalSec);
enc_meta->setInt32(kKeyColorFormat, colorFormat);
sp<MediaSource> encoder =
OMXCodec::Create(
client.interface(), enc_meta, true /* createEncoder */, decoder);
#if 1
sp<MPEG4Writer> writer = new MPEG4Writer("/sdcard/output.mp4");
writer->addSource(encoder);
writer->setMaxFileDuration(kDurationUs);
CHECK_EQ(OK, writer->start());
while (!writer->reachedEOS()) {
fprintf(stderr, ".");
usleep(100000);
}
err = writer->stop();
#else
CHECK_EQ(OK, encoder->start());
MediaBuffer *buffer;
while (encoder->read(&buffer) == OK) {
printf(".");
fflush(stdout);
int32_t isSync;
if (!buffer->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)) {
isSync = false;
}
printf("got an output frame of size %d%s\n", buffer->range_length(),
isSync ? " (SYNC)" : "");
buffer->release();
buffer = NULL;
}
err = encoder->stop();
#endif
printf("$\n");
client.disconnect();
#endif
#if 0
CameraSource *source = CameraSource::Create();
source->start();
printf("source = %p\n", source);
for (int i = 0; i < 100; ++i) {
MediaBuffer *buffer;
status_t err = source->read(&buffer);
CHECK_EQ(err, OK);
printf("got a frame, data=%p, size=%d\n",
buffer->data(), buffer->range_length());
buffer->release();
buffer = NULL;
}
err = source->stop();
delete source;
source = NULL;
#endif
if (err != OK && err != ERROR_END_OF_STREAM) {
fprintf(stderr, "record failed: %d\n", err);
return 1;
}
return 0;
}
#else
int main(int argc, char **argv) {
android::ProcessState::self()->startThreadPool();
OMXClient client;
CHECK_EQ(client.connect(), OK);
const int32_t kSampleRate = 22050;
const int32_t kNumChannels = 2;
sp<MediaSource> audioSource = new SineSource(kSampleRate, kNumChannels);
#if 0
sp<MediaPlayerBase::AudioSink> audioSink;
AudioPlayer *player = new AudioPlayer(audioSink);
player->setSource(audioSource);
player->start();
sleep(10);
player->stop();
#endif
sp<MetaData> encMeta = new MetaData;
encMeta->setCString(kKeyMIMEType,
1 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC);
encMeta->setInt32(kKeySampleRate, kSampleRate);
encMeta->setInt32(kKeyChannelCount, kNumChannels);
encMeta->setInt32(kKeyMaxInputSize, 8192);
encMeta->setInt32(kKeyBitRate, kAudioBitRate);
sp<MediaSource> encoder =
OMXCodec::Create(client.interface(), encMeta, true, audioSource);
encoder->start();
int32_t n = 0;
status_t err;
MediaBuffer *buffer;
while ((err = encoder->read(&buffer)) == OK) {
printf(".");
fflush(stdout);
buffer->release();
buffer = NULL;
if (++n == 100) {
break;
}
}
printf("$\n");
encoder->stop();
client.disconnect();
return 0;
}
#endif

View File

@ -0,0 +1,861 @@
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "stagefright"
#include <media/stagefright/foundation/ADebug.h>
#include <sys/time.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include "SineSource.h"
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/ALooper.h>
#include "include/ARTSPController.h"
#include "include/LiveSource.h"
#include "include/NuCachedSource2.h"
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/JPEGSource.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <media/mediametadataretriever.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MPEG2TSWriter.h>
#include <media/stagefright/MPEG4Writer.h>
#include <fcntl.h>
using namespace android;
static long gNumRepetitions;
static long gMaxNumFrames; // 0 means decode all available.
static long gReproduceBug; // if not -1.
static bool gPreferSoftwareCodec;
static bool gPlaybackAudio;
static bool gWriteMP4;
static String8 gWriteMP4Filename;
static int64_t getNowUs() {
struct timeval tv;
gettimeofday(&tv, NULL);
return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
}
static void playSource(OMXClient *client, sp<MediaSource> &source) {
sp<MetaData> meta = source->getFormat();
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
sp<MediaSource> rawSource;
if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
rawSource = source;
} else {
rawSource = OMXCodec::Create(
client->interface(), meta, false /* createEncoder */, source,
NULL /* matchComponentName */,
gPreferSoftwareCodec ? OMXCodec::kPreferSoftwareCodecs : 0);
if (rawSource == NULL) {
fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime);
return;
}
}
source.clear();
status_t err = rawSource->start();
if (err != OK) {
fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err);
return;
}
if (gPlaybackAudio) {
AudioPlayer *player = new AudioPlayer(NULL);
player->setSource(rawSource);
rawSource.clear();
player->start(true /* sourceAlreadyStarted */);
status_t finalStatus;
while (!player->reachedEOS(&finalStatus)) {
usleep(100000ll);
}
delete player;
player = NULL;
return;
} else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
int64_t durationUs;
CHECK(meta->findInt64(kKeyDuration, &durationUs));
status_t err;
MediaBuffer *buffer;
MediaSource::ReadOptions options;
int64_t seekTimeUs = -1;
for (;;) {
err = rawSource->read(&buffer, &options);
options.clearSeekTo();
bool shouldSeek = false;
if (err == INFO_FORMAT_CHANGED) {
CHECK(buffer == NULL);
printf("format changed.\n");
continue;
} else if (err != OK) {
printf("reached EOF.\n");
shouldSeek = true;
} else {
int64_t timestampUs;
CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));
bool failed = false;
if (seekTimeUs >= 0) {
int64_t diff = timestampUs - seekTimeUs;
if (diff < 0) {
diff = -diff;
}
if ((gReproduceBug == 4 && diff > 500000)
|| (gReproduceBug == 5 && timestampUs < 0)) {
printf("wanted: %.2f secs, got: %.2f secs\n",
seekTimeUs / 1E6, timestampUs / 1E6);
printf("ERROR: ");
failed = true;
}
}
printf("buffer has timestamp %lld us (%.2f secs)\n",
timestampUs, timestampUs / 1E6);
buffer->release();
buffer = NULL;
if (failed) {
break;
}
shouldSeek = ((double)rand() / RAND_MAX) < 0.1;
if (gReproduceBug == 3) {
shouldSeek = false;
}
}
seekTimeUs = -1;
if (shouldSeek) {
seekTimeUs = (rand() * (float)durationUs) / RAND_MAX;
options.setSeekTo(seekTimeUs);
printf("seeking to %lld us (%.2f secs)\n",
seekTimeUs, seekTimeUs / 1E6);
}
}
rawSource->stop();
return;
}
int n = 0;
int64_t startTime = getNowUs();
long numIterationsLeft = gNumRepetitions;
MediaSource::ReadOptions options;
int64_t sumDecodeUs = 0;
int64_t totalBytes = 0;
while (numIterationsLeft-- > 0) {
long numFrames = 0;
MediaBuffer *buffer;
for (;;) {
int64_t startDecodeUs = getNowUs();
status_t err = rawSource->read(&buffer, &options);
int64_t delayDecodeUs = getNowUs() - startDecodeUs;
options.clearSeekTo();
if (err != OK) {
CHECK(buffer == NULL);
if (err == INFO_FORMAT_CHANGED) {
printf("format changed.\n");
continue;
}
break;
}
if (buffer->range_length() > 0 && (n++ % 16) == 0) {
printf(".");
fflush(stdout);
}
sumDecodeUs += delayDecodeUs;
totalBytes += buffer->range_length();
buffer->release();
buffer = NULL;
++numFrames;
if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) {
break;
}
if (gReproduceBug == 1 && numFrames == 40) {
printf("seeking past the end now.");
options.setSeekTo(0x7fffffffL);
} else if (gReproduceBug == 2 && numFrames == 40) {
printf("seeking to 5 secs.");
options.setSeekTo(5000000);
}
}
printf("$");
fflush(stdout);
options.setSeekTo(0);
}
rawSource->stop();
printf("\n");
int64_t delay = getNowUs() - startTime;
if (!strncasecmp("video/", mime, 6)) {
printf("avg. %.2f fps\n", n * 1E6 / delay);
printf("avg. time to decode one buffer %.2f usecs\n",
(double)sumDecodeUs / n);
printf("decoded a total of %d frame(s).\n", n);
} else if (!strncasecmp("audio/", mime, 6)) {
// Frame count makes less sense for audio, as the output buffer
// sizes may be different across decoders.
printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay);
printf("decoded a total of %lld bytes\n", totalBytes);
}
}
////////////////////////////////////////////////////////////////////////////////
struct DetectSyncSource : public MediaSource {
DetectSyncSource(const sp<MediaSource> &source);
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options);
private:
enum StreamType {
AVC,
MPEG4,
H263,
OTHER,
};
sp<MediaSource> mSource;
StreamType mStreamType;
DISALLOW_EVIL_CONSTRUCTORS(DetectSyncSource);
};
DetectSyncSource::DetectSyncSource(const sp<MediaSource> &source)
: mSource(source),
mStreamType(OTHER) {
const char *mime;
CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
mStreamType = AVC;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) {
mStreamType = MPEG4;
CHECK(!"sync frame detection not implemented yet for MPEG4");
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
mStreamType = H263;
CHECK(!"sync frame detection not implemented yet for H.263");
}
}
status_t DetectSyncSource::start(MetaData *params) {
return mSource->start(params);
}
status_t DetectSyncSource::stop() {
return mSource->stop();
}
sp<MetaData> DetectSyncSource::getFormat() {
return mSource->getFormat();
}
static bool isIDRFrame(MediaBuffer *buffer) {
const uint8_t *data =
(const uint8_t *)buffer->data() + buffer->range_offset();
size_t size = buffer->range_length();
for (size_t i = 0; i + 3 < size; ++i) {
if (!memcmp("\x00\x00\x01", &data[i], 3)) {
uint8_t nalType = data[i + 3] & 0x1f;
if (nalType == 5) {
return true;
}
}
}
return false;
}
status_t DetectSyncSource::read(
MediaBuffer **buffer, const ReadOptions *options) {
status_t err = mSource->read(buffer, options);
if (err != OK) {
return err;
}
if (mStreamType == AVC && isIDRFrame(*buffer)) {
(*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, true);
} else {
(*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, true);
}
return OK;
}
////////////////////////////////////////////////////////////////////////////////
static void writeSourcesToMP4(
Vector<sp<MediaSource> > &sources, bool syncInfoPresent) {
#if 0
sp<MPEG4Writer> writer =
new MPEG4Writer(gWriteMP4Filename.string());
#else
sp<MPEG2TSWriter> writer =
new MPEG2TSWriter(gWriteMP4Filename.string());
#endif
// at most one minute.
writer->setMaxFileDuration(60000000ll);
for (size_t i = 0; i < sources.size(); ++i) {
sp<MediaSource> source = sources.editItemAt(i);
CHECK_EQ(writer->addSource(
syncInfoPresent ? source : new DetectSyncSource(source)),
(status_t)OK);
}
sp<MetaData> params = new MetaData;
params->setInt32(kKeyNotRealTime, true);
CHECK_EQ(writer->start(params.get()), (status_t)OK);
while (!writer->reachedEOS()) {
usleep(100000);
}
writer->stop();
}
static void performSeekTest(const sp<MediaSource> &source) {
CHECK_EQ((status_t)OK, source->start());
int64_t durationUs;
CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs));
for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs;
seekTimeUs += 60000ll) {
MediaSource::ReadOptions options;
options.setSeekTo(
seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
MediaBuffer *buffer;
status_t err;
for (;;) {
err = source->read(&buffer, &options);
options.clearSeekTo();
if (err == INFO_FORMAT_CHANGED) {
CHECK(buffer == NULL);
continue;
}
if (err != OK) {
CHECK(buffer == NULL);
break;
}
if (buffer->range_length() > 0) {
break;
}
CHECK(buffer != NULL);
buffer->release();
buffer = NULL;
}
if (err == OK) {
int64_t timeUs;
CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
printf("%lld\t%lld\t%lld\n", seekTimeUs, timeUs, seekTimeUs - timeUs);
buffer->release();
buffer = NULL;
} else {
printf("ERROR\n");
break;
}
}
CHECK_EQ((status_t)OK, source->stop());
}
static void usage(const char *me) {
fprintf(stderr, "usage: %s\n", me);
fprintf(stderr, " -h(elp)\n");
fprintf(stderr, " -a(udio)\n");
fprintf(stderr, " -n repetitions\n");
fprintf(stderr, " -l(ist) components\n");
fprintf(stderr, " -m max-number-of-frames-to-decode in each pass\n");
fprintf(stderr, " -b bug to reproduce\n");
fprintf(stderr, " -p(rofiles) dump decoder profiles supported\n");
fprintf(stderr, " -t(humbnail) extract video thumbnail or album art\n");
fprintf(stderr, " -s(oftware) prefer software codec\n");
fprintf(stderr, " -o playback audio\n");
fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n");
fprintf(stderr, " -k seek test\n");
}
int main(int argc, char **argv) {
android::ProcessState::self()->startThreadPool();
bool audioOnly = false;
bool listComponents = false;
bool dumpProfiles = false;
bool extractThumbnail = false;
bool seekTest = false;
gNumRepetitions = 1;
gMaxNumFrames = 0;
gReproduceBug = -1;
gPreferSoftwareCodec = false;
gPlaybackAudio = false;
gWriteMP4 = false;
sp<ALooper> looper;
sp<ARTSPController> rtspController;
int res;
while ((res = getopt(argc, argv, "han:lm:b:ptsow:k")) >= 0) {
switch (res) {
case 'a':
{
audioOnly = true;
break;
}
case 'l':
{
listComponents = true;
break;
}
case 'm':
case 'n':
case 'b':
{
char *end;
long x = strtol(optarg, &end, 10);
if (*end != '\0' || end == optarg || x <= 0) {
x = 1;
}
if (res == 'n') {
gNumRepetitions = x;
} else if (res == 'm') {
gMaxNumFrames = x;
} else {
CHECK_EQ(res, 'b');
gReproduceBug = x;
}
break;
}
case 'w':
{
gWriteMP4 = true;
gWriteMP4Filename.setTo(optarg);
break;
}
case 'p':
{
dumpProfiles = true;
break;
}
case 't':
{
extractThumbnail = true;
break;
}
case 's':
{
gPreferSoftwareCodec = true;
break;
}
case 'o':
{
gPlaybackAudio = true;
break;
}
case 'k':
{
seekTest = true;
break;
}
case '?':
case 'h':
default:
{
usage(argv[0]);
exit(1);
break;
}
}
}
if (gPlaybackAudio && !audioOnly) {
// This doesn't make any sense if we're decoding the video track.
gPlaybackAudio = false;
}
argc -= optind;
argv += optind;
if (extractThumbnail) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.player"));
sp<IMediaPlayerService> service =
interface_cast<IMediaPlayerService>(binder);
CHECK(service.get() != NULL);
sp<IMediaMetadataRetriever> retriever =
service->createMetadataRetriever(getpid());
CHECK(retriever != NULL);
for (int k = 0; k < argc; ++k) {
const char *filename = argv[k];
CHECK_EQ(retriever->setDataSource(filename), (status_t)OK);
sp<IMemory> mem =
retriever->getFrameAtTime(-1,
MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
if (mem != NULL) {
printf("getFrameAtTime(%s) => OK\n", filename);
} else {
mem = retriever->extractAlbumArt();
if (mem != NULL) {
printf("extractAlbumArt(%s) => OK\n", filename);
} else {
printf("both getFrameAtTime and extractAlbumArt "
"failed on file '%s'.\n", filename);
}
}
}
return 0;
}
if (dumpProfiles) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.player"));
sp<IMediaPlayerService> service =
interface_cast<IMediaPlayerService>(binder);
CHECK(service.get() != NULL);
sp<IOMX> omx = service->getOMX();
CHECK(omx.get() != NULL);
const char *kMimeTypes[] = {
MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_MPEG4,
MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AAC,
MEDIA_MIMETYPE_AUDIO_AMR_NB, MEDIA_MIMETYPE_AUDIO_AMR_WB,
MEDIA_MIMETYPE_AUDIO_MPEG
};
for (size_t k = 0; k < sizeof(kMimeTypes) / sizeof(kMimeTypes[0]);
++k) {
printf("type '%s':\n", kMimeTypes[k]);
Vector<CodecCapabilities> results;
CHECK_EQ(QueryCodecs(omx, kMimeTypes[k],
true, // queryDecoders
&results), (status_t)OK);
for (size_t i = 0; i < results.size(); ++i) {
printf(" decoder '%s' supports ",
results[i].mComponentName.string());
if (results[i].mProfileLevels.size() == 0) {
printf("NOTHING.\n");
continue;
}
for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) {
const CodecProfileLevel &profileLevel =
results[i].mProfileLevels[j];
printf("%s%ld/%ld", j > 0 ? ", " : "",
profileLevel.mProfile, profileLevel.mLevel);
}
printf("\n");
}
}
}
if (listComponents) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.player"));
sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
CHECK(service.get() != NULL);
sp<IOMX> omx = service->getOMX();
CHECK(omx.get() != NULL);
List<IOMX::ComponentInfo> list;
omx->listNodes(&list);
for (List<IOMX::ComponentInfo>::iterator it = list.begin();
it != list.end(); ++it) {
printf("%s\n", (*it).mName.string());
}
}
DataSource::RegisterDefaultSniffers();
OMXClient client;
status_t err = client.connect();
for (int k = 0; k < argc; ++k) {
bool syncInfoPresent = true;
const char *filename = argv[k];
sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
if (strncasecmp(filename, "sine:", 5)
&& strncasecmp(filename, "rtsp://", 7)
&& strncasecmp(filename, "httplive://", 11)
&& dataSource == NULL) {
fprintf(stderr, "Unable to create data source.\n");
return 1;
}
bool isJPEG = false;
size_t len = strlen(filename);
if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
isJPEG = true;
}
Vector<sp<MediaSource> > mediaSources;
sp<MediaSource> mediaSource;
if (isJPEG) {
mediaSource = new JPEGSource(dataSource);
if (gWriteMP4) {
mediaSources.push(mediaSource);
}
} else if (!strncasecmp("sine:", filename, 5)) {
char *end;
long sampleRate = strtol(filename + 5, &end, 10);
if (end == filename + 5) {
sampleRate = 44100;
}
mediaSource = new SineSource(sampleRate, 1);
if (gWriteMP4) {
mediaSources.push(mediaSource);
}
} else {
sp<MediaExtractor> extractor;
if (!strncasecmp("rtsp://", filename, 7)) {
if (looper == NULL) {
looper = new ALooper;
looper->start();
}
rtspController = new ARTSPController(looper);
status_t err = rtspController->connect(filename);
if (err != OK) {
fprintf(stderr, "could not connect to rtsp server.\n");
return -1;
}
extractor = rtspController.get();
syncInfoPresent = false;
} else if (!strncasecmp("httplive://", filename, 11)) {
String8 uri("http://");
uri.append(filename + 11);
dataSource = new LiveSource(uri.string());
dataSource = new NuCachedSource2(dataSource);
extractor =
MediaExtractor::Create(
dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);
syncInfoPresent = false;
} else {
extractor = MediaExtractor::Create(dataSource);
if (extractor == NULL) {
fprintf(stderr, "could not create extractor.\n");
return -1;
}
}
size_t numTracks = extractor->countTracks();
if (gWriteMP4) {
bool haveAudio = false;
bool haveVideo = false;
for (size_t i = 0; i < numTracks; ++i) {
sp<MediaSource> source = extractor->getTrack(i);
const char *mime;
CHECK(source->getFormat()->findCString(
kKeyMIMEType, &mime));
bool useTrack = false;
if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
haveAudio = true;
useTrack = true;
} else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
haveVideo = true;
useTrack = true;
}
if (useTrack) {
mediaSources.push(source);
if (haveAudio && haveVideo) {
break;
}
}
}
} else {
sp<MetaData> meta;
size_t i;
for (i = 0; i < numTracks; ++i) {
meta = extractor->getTrackMetaData(
i, MediaExtractor::kIncludeExtensiveMetaData);
const char *mime;
meta->findCString(kKeyMIMEType, &mime);
if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
break;
}
if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
break;
}
meta = NULL;
}
if (meta == NULL) {
fprintf(stderr,
"No suitable %s track found. The '-a' option will "
"target audio tracks only, the default is to target "
"video tracks only.\n",
audioOnly ? "audio" : "video");
return -1;
}
int64_t thumbTimeUs;
if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
printf("thumbnailTime: %lld us (%.2f secs)\n",
thumbTimeUs, thumbTimeUs / 1E6);
}
mediaSource = extractor->getTrack(i);
}
}
if (gWriteMP4) {
writeSourcesToMP4(mediaSources, syncInfoPresent);
} else if (seekTest) {
performSeekTest(mediaSource);
} else {
playSource(&client, mediaSource);
}
if (rtspController != NULL) {
rtspController->disconnect();
rtspController.clear();
sleep(3);
}
}
client.disconnect();
return 0;
}