Hi DaneLLL,
Max performance results in very little improvements. (13 FPS). Please see the output of tegrastats command.
E/TegraStats( 6775): RAM 628/1739MB (lfb 173x4MB) cpu [0%,0%,0%,0%]@1938 EMC 36%@792 AVP 5%@300 VDE 444 GR3D 0%@852 EDP limit 0
E/TegraStats( 6775): RAM 628/1739MB (lfb 173x4MB) cpu [57%,74%,65%,64%]@1861 EMC 35%@792 AVP 4%@300 VDE 444 GR3D 4%@852 EDP limit 0
E/TegraStats( 6775): RAM 627/1739MB (lfb 173x4MB) cpu [51%,59%,66%,58%]@1938 EMC 35%@792 AVP 4%@300 VDE 444 GR3D 2%@852 EDP limit 0
E/TegraStats( 6775): RAM 628/1739MB (lfb 173x4MB) cpu [51%,61%,64%,56%]@1938 EMC 35%@792 AVP 4%@204 VDE 444 GR3D 0%@852 EDP limit 0
E/TegraStats( 6775): RAM 628/1739MB (lfb 173x4MB) cpu [44%,64%,66%,65%]@1912 EMC 36%@792 AVP 4%@300 VDE 444 GR3D 0%@852 EDP limit 0
E/TegraStats( 6775): RAM 628/1739MB (lfb 173x4MB) cpu [54%,72%,52%,49%]@1912 EMC 35%@792 AVP 4%@300 VDE 444 GR3D 0%@852 EDP limit 0
When I checked with software decoder (libjpegturbo) I am able to achieve 30FPS for 720P MJPEG video.
Below is code snippet, how I am giving and getting frames to HW decoder.
int nv_mjpegconversion( unsigned char * InputBuffer, unsigned char * OutputBuffer, int InWidth, int InHeight,int Framesize) {
using namespace android;
static int64_t kTimeout = 0;
int numberoftrack = 1;
int tracksize = 1;
int framecount = 0;
status_t err;
size_t i = 0;
bool sawInputEOS = false;
long lSize;
char * buffer;;
size_t result;
uint8_t * InBuffer;
uint8_t * OutBuffer;
for (;;) {
if (!sawInputEOS) {
size_t trackIndex = 0;
if (framecount > 0) {
sawInputEOS = true;
} else {
CodecState *state = &stateByTrack2.editValueFor(trackIndex);
size_t index;
err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
if (err == OK) {
framecount++;
const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
InBuffer = buffer->data();
memcpy(InBuffer,InputBuffer,Framesize);
int64_t timeUs = 10;//what is this time????
uint32_t bufferFlags = 0;
err = state->mCodec->queueInputBuffer(
index,
0,
Framesize,
timeUs,
bufferFlags);
CHECK_EQ(err, (status_t)OK);
} else {
CHECK_EQ(err, -EAGAIN);
}
}
} else {
CodecState *state = &stateByTrack2.editValueAt(0);
if (!state->mSignalledInputEOS) {
size_t index;
status_t err =
state->mCodec->dequeueInputBuffer(&index, kTimeout);
if (err == OK) {
ALOGV("signalling input EOS on track %d", i);
err = state->mCodec->queueInputBuffer(
index,
0 /* offset */,
0 /* size */,
0ll /* timeUs */,
MediaCodec::BUFFER_FLAG_EOS);
CHECK_EQ(err, (status_t)OK);
state->mSignalledInputEOS = true;
} else {
CHECK_EQ(err, -EAGAIN);
}
}
}
bool sawOutputEOSOnAllTracks = true;
CodecState *state = &stateByTrack2.editValueAt(i);
if (!state->mSawOutputEOS) {
sawOutputEOSOnAllTracks = false;
}
if (sawOutputEOSOnAllTracks) {
break;
}
state = &stateByTrack2.editValueAt(0);
if (state->mSawOutputEOS) {
continue;
}
size_t index;
size_t offset;
size_t size;
int64_t presentationTimeUs;
uint32_t flags;
status_t err = state->mCodec->dequeueOutputBuffer(
&index, &offset, &size, &presentationTimeUs, &flags,
kTimeout);
if (err == OK) {
//Convert the YUV16 data to YUV420SP
if(size > 0)
{
const sp<ABuffer> &Outbuffer = state->mOutBuffers.itemAt(index);
OutBuffer = Outbuffer->data();
yuv422sp_2_yuv420(OutputBuffer,OutBuffer,InWidth, InHeight);
}
//ALOGE("INFO_FORMAT_CHANGED: %s", format->debugString().c_str());
err = state->mCodec->releaseOutputBuffer(index);
CHECK_EQ(err, (status_t)OK);
if (flags & MediaCodec::BUFFER_FLAG_EOS) {
state->mSawOutputEOS = true;
}
} else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
CHECK_EQ((status_t)OK,
state->mCodec->getOutputBuffers(&state->mOutBuffers));
} else if (err == INFO_FORMAT_CHANGED) {
sp<AMessage> format;
CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
} else {
CHECK_EQ(err, -EAGAIN);
}
}
return 0;
}