Bilder zum Video mit dem MediaCodec und MediaMuxer

Ich habe eine Reihe von lokal gespeicherten Bilder als jpeg-Dateien. Meine Bilder sind aufgenommen mit CameraPreview und die PreviewFormat ist als Vorgabe: NV21. Ich will generieren ein kleines video aus einer festen Anzahl von Bildern.

Ich werde nicht verwenden Sie FFMpegweil es erfordert, NDK und stellen Kompatibilitätsprobleme.

MediaCodec und MediaMuxer scheint zu arbeiten, aber es gibt nicht eine funktionierende Lösungen auf dem web.

Gibt es ein paar Verweise führen zu meiner aktuellen Lösung.

1.EncodeAndMuxTest: http://bigflake.com/mediacodec/EncodeAndMuxTest.java.txt

Dieser ist geschrieben von fadden. Es ganz passt meine Bedürfnisse, außer er ist mit createInputSurface nicht queueInputBuffer.

2.Konvertieren Sie bitmap-array auf YUV (YCbCr NV21)

Ich die Konvertierung nach dieser Antwort. https://stackoverflow.com/a/17116985/3047840

3.Mit MediaCodec zu retten Reihe von Bildern, die als Video

Diese Frage sieht ähnlich wie meine, aber ich nicht die Mühe mit MediaMuxer.

Mein code ist der folgende:

public class EncodeAndMux extends Activity {
private static final String TAG = "EncodeAndMuxTest";

private static final boolean VERBOSE = false;

private static final File OUTPUT_DIR = Environment
        .getExternalStorageDirectory();

private static final String MIME_TYPE = "video/avc";

private static final int FRAME_RATE = 10;
//10 seconds between I-frames
private static final int IFRAME_INTERVAL = 10;

private static final int NUM_FRAMES = 5;
private static final String DEBUG_FILE_NAME_BASE = "/sdcard/test";
//two seconds of video size of a frame, in pixels
private int mWidth = -1;

private int mHeight = -1;
//bit rate, in bits per second
private int mBitRate = -1;

private byte[] mFrame;

//largest color component delta seen (i.e. actual vs. expected)
private int mLargestColorDelta;
//encoder /muxer state
private MediaCodec mEncoder;
private MediaMuxer mMuxer;
private int mTrackIndex;
private boolean mMuxerStarted;
private Utils mUtils;
private float mPadding;
private int mColumnWidth;

private static final int TEST_Y = 120; //YUV values for colored rect
private static final int TEST_U = 160;
private static final int TEST_V = 200;
private static final int TEST_R0 = 0; //RGB equivalent of {0,0,0}
private static final int TEST_G0 = 136;
private static final int TEST_B0 = 0;
private static final int TEST_R1 = 236; //RGB equivalent of {120,160,200}
private static final int TEST_G1 = 50;
private static final int TEST_B1 = 186;

private static final boolean DEBUG_SAVE_FILE = false; //save copy of
                                                        //encoded movie
//allocate one of these up front so we don't need to do it every time
private MediaCodec.BufferInfo mBufferInfo;
private ArrayList<String> mImagePaths = new ArrayList<String>();

byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {

    int[] argb = new int[inputWidth * inputHeight];
    scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
    byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
    encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
    scaled.recycle();
    return yuv;
}

void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
    final int frameSize = width * height;

    int yIndex = 0;
    int uvIndex = frameSize;

    int a, R, G, B, Y, U, V;
    int index = 0;
    for (int j = 0; j < height; j++) {
        for (int i = 0; i < width; i++) {

            a = (argb[index] & 0xff000000) >> 24; //a is not used obviously
            R = (argb[index] & 0xff0000) >> 16;
            G = (argb[index] & 0xff00) >> 8;
            B = (argb[index] & 0xff) >> 0;

            //well known RGB to YUV algorithm
            Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
            U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
            V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;

            //NV21 has a plane of Y and interleaved planes of VU each
            //sampled by a factor of 2
            //meaning for every 4 Y pixels there are 1 V and 1 U. Note the
            //sampling is every other
            //pixel AND every other scanline.
            yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0
                    : ((Y > 255) ? 255 : Y));
            if (j % 2 == 0 && index % 2 == 0) {
                yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0
                        : ((V > 255) ? 255 : V));
                yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0
                        : ((U > 255) ? 255 : U));
            }

            index++;
        }
    }
}

public static Bitmap decodeFile(String filePath, int WIDTH, int HIGHT) {
    try {

        File f = new File(filePath);

        BitmapFactory.Options o = new BitmapFactory.Options();
        o.inJustDecodeBounds = true;
        o.inPurgeable = true;
        o.inInputShareable = true;
        BitmapFactory.decodeStream(new FileInputStream(f), null, o);

        final int REQUIRED_WIDTH = WIDTH;
        final int REQUIRED_HIGHT = HIGHT;
        int scale = 1;
        while (o.outWidth / scale / 2 >= REQUIRED_WIDTH
                && o.outHeight / scale / 2 >= REQUIRED_HIGHT)
            scale *= 2;
        BitmapFactory.Options o2 = new BitmapFactory.Options();
        o2.inSampleSize = scale;
        o2.inPurgeable = true;
        o2.inInputShareable = true;
        return BitmapFactory.decodeStream(new FileInputStream(f), null, o2);
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    }
    return null;
}

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_encode_and_mux);
    mUtils = new Utils(this);
    mImagePaths = mUtils.getBackFilePaths();
    mPadding = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
            AppConstant.GRID_PADDING, getResources().getDisplayMetrics());
    mColumnWidth = (int) ((mUtils.getScreenWidth() - ((AppConstant.NUM_OF_COLUMNS + 1) * mPadding)) / AppConstant.NUM_OF_COLUMNS);


    try {
        testEncodeDecodeVideoFromBufferToSurface720p();
    } catch (Exception e) {
        //TODO Auto-generated catch block
        e.printStackTrace();
    } catch (Throwable e) {
        //TODO Auto-generated catch block
        e.printStackTrace();
    }
}

/**
 * Returns the first codec capable of encoding the specified MIME type, or null if no
 * match was found.
 */
private static MediaCodecInfo selectCodec(String mimeType) {
    int numCodecs = MediaCodecList.getCodecCount();
    for (int i = 0; i < numCodecs; i++) {
        MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);

        if (!codecInfo.isEncoder()) {
            continue;
        }

        String[] types = codecInfo.getSupportedTypes();
        for (int j = 0; j < types.length; j++) {
            if (types[j].equalsIgnoreCase(mimeType)) {
                return codecInfo;
            }
        }
    }
    return null;
}

/**
 * Returns a color format that is supported by the codec and by this test code.  If no
 * match is found, this throws a test failure -- the set of formats known to the test
 * should be expanded for new platforms.
 */
private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
    MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
    for (int i = 0; i < capabilities.colorFormats.length; i++) {
        int colorFormat = capabilities.colorFormats[i];
        if (isRecognizedFormat(colorFormat)) {
            return colorFormat;
        }
    }
    Log.e("","couldn't find a good color format for " + codecInfo.getName() + " /" + mimeType);
    return 0;   //not reached
}

/**
 * Returns true if this is a color format that this test code understands (i.e. we know how
 * to read and generate frames in this format).
 */
private static boolean isRecognizedFormat(int colorFormat) {
    switch (colorFormat) {
        //these are the formats we know how to handle for this test
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
            return true;
        default:
            return false;
    }
}

/**
 * Returns true if the specified color format is semi-planar YUV.  Throws an exception
 * if the color format is not recognized (e.g. not YUV).
 */
private static boolean isSemiPlanarYUV(int colorFormat) {
    switch (colorFormat) {
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
            return false;
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
            return true;
        default:
            throw new RuntimeException("unknown format " + colorFormat);
    }
}

/**
 * Does the actual work for encoding frames from buffers of byte[].
 */
private void doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat,
        MediaCodec decoder, boolean toSurface) {
    final int TIMEOUT_USEC = 10000;
    ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
    ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    ByteBuffer[] decoderInputBuffers = null;
    ByteBuffer[] decoderOutputBuffers = null;
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    MediaFormat decoderOutputFormat = null;
    int generateIndex = 0;
    int checkIndex = 0;
    int badFrames = 0;
    boolean decoderConfigured = false;
    OutputSurface outputSurface = null;

    //The size of a frame of video data, in the formats we handle, is stride*sliceHeight
    //for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels.  Application
    //of algebra and assuming that stride==width and sliceHeight==height yields:

    //Just out of curiosity.
    long rawSize = 0;
    long encodedSize = 0;

    //Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
    //stream, not a .mp4 file, so not all players will know what to do with it.


    if (toSurface) {
        outputSurface = new OutputSurface(mWidth, mHeight);
    }

    //Loop until the output side is done.
    boolean inputDone = false;
    boolean encoderDone = false;
    boolean outputDone = false;
    while (!outputDone) {
        Log.e(TAG, "loop");

        //If we're not done submitting frames, generate a new one and submit it.  By
        //doing this on every loop we're working to ensure that the encoder always has
        //work to do.
        //
        //We don't really want a timeout here, but sometimes there's a delay opening
        //the encoder device, so a short timeout can keep us from spinning hard.
        if (!inputDone) {
            int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
            Log.e(TAG, "inputBufIndex=" + inputBufIndex);
            if (inputBufIndex >= 0) {
                long ptsUsec = computePresentationTime(generateIndex);
                if (generateIndex == NUM_FRAMES) {
                    //Send an empty frame with the end-of-stream flag set.  If we set EOS
                    //on a frame with data, that frame data will be ignored, and the
                    //output will be short one frame.
                    encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
                            MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    inputDone = true;
                    Log.e(TAG, "sent input EOS (with zero-length frame)");
                } else {
                    generateFrame(generateIndex, encoderColorFormat, mFrame);
                    //generateFrame(generateIndex);

                    ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex];
                    //the buffer should be sized to hold one full frame
                    inputBuf.clear();
                    inputBuf.put(mFrame);

                    encoder.queueInputBuffer(inputBufIndex, 0, mFrame.length, ptsUsec, 0);
                    Log.e(TAG, "submitted frame " + generateIndex + " to enc");
                }
                generateIndex++;
            } else {
                //either all in use, or we timed out during initial setup
                Log.e(TAG, "input buffer not available");
            }
        }

        //Check for output from the encoder.  If there's no output yet, we either need to
        //provide more input, or we need to wait for the encoder to work its magic.  We
        //can't actually tell which is the case, so if we can't get an output buffer right
        //away we loop around and see if it wants more input.
        //
        //Once we get EOS from the encoder, we don't need to do this anymore.
        if (!encoderDone) {
            int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                //no output available yet
                Log.e(TAG, "no output from encoder available");
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                //not expected for an encoder
                encoderOutputBuffers = encoder.getOutputBuffers();
                Log.e(TAG, "encoder output buffers changed");
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                //not expected for an encoder


                if (mMuxerStarted) {
                    throw new RuntimeException("format changed twice");
                }
                MediaFormat newFormat = encoder.getOutputFormat();
                Log.e(TAG, "encoder output format changed: " + newFormat);

                //now that we have the Magic Goodies, start the muxer
                mTrackIndex = mMuxer.addTrack(newFormat);
                Log.e(TAG, "muxer defined muxer format: " + newFormat);
                mMuxer.start();
                mMuxerStarted = true;

            } else if (encoderStatus < 0) {
                Log.e("","unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
            } else { //encoderStatus >= 0
                ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) {
                    Log.e("","encoderOutputBuffer " + encoderStatus + " was null");
                }

                //It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
                encodedData.position(info.offset);
                encodedData.limit(info.offset + info.size);

                encodedSize += info.size;

                if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    //Codec config info.  Only expected on first packet.  One way to
                    //handle this is to manually stuff the data into the MediaFormat
                    //and pass that to configure().  We do that here to exercise the API.

                    MediaFormat format =
                            MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
                    format.setByteBuffer("csd-0", encodedData);
                    decoder.configure(format, toSurface ? outputSurface.getSurface() : null,
                            null, 0);

                    decoder.start();
                    decoderInputBuffers = decoder.getInputBuffers();
                    decoderOutputBuffers = decoder.getOutputBuffers();
                    decoderConfigured = true;
                    Log.e(TAG, "decoder configured (" + info.size + " bytes)"+format);
                } else {
                    //Get a decoder input buffer, blocking until it's available.

                    int inputBufIndex = decoder.dequeueInputBuffer(-1);
                    ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                    inputBuf.clear();
                    inputBuf.put(encodedData);
                    decoder.queueInputBuffer(inputBufIndex, 0, info.size,
                            info.presentationTimeUs, info.flags);

                    encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
                    Log.e(TAG, "passed " + info.size + " bytes to decoder"
                            + (encoderDone ? " (EOS)" : ""));
                    Log.e("encoderDone",encoderDone+"");
                }

                encoder.releaseOutputBuffer(encoderStatus, false);
            }
        }

        //Check for output from the decoder.  We want to do this on every loop to avoid
        //the possibility of stalling the pipeline.  We use a short timeout to avoid
        //burning CPU if the decoder is hard at work but the next frame isn't quite ready.
        //
        //If we're decoding to a Surface, we'll get notified here as usual but the
        //ByteBuffer references will be null.  The data is sent to Surface instead.
        if (decoderConfigured) {
            int decoderStatus = decoder.dequeueOutputBuffer(info, 3*TIMEOUT_USEC);
            if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                //no output available yet
                Log.e(TAG, "no output from decoder available");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                //The storage associated with the direct ByteBuffer may already be unmapped,
                //so attempting to access data through the old output buffer array could
                //lead to a native crash.
                Log.e(TAG, "decoder output buffers changed");
                decoderOutputBuffers = decoder.getOutputBuffers();
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                //this happens before the first frame is returned
                decoderOutputFormat = decoder.getOutputFormat();
                Log.e(TAG, "decoder output format changed: " +
                        decoderOutputFormat);
            } else if (decoderStatus < 0) {
                Log.e(TAG, "unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus);

            } else {  //decoderStatus >= 0
                if (!toSurface) {
                    ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];

                    outputFrame.position(info.offset);
                    outputFrame.limit(info.offset + info.size);
                    mMuxer.writeSampleData(mTrackIndex, outputFrame,
                            info);
                    rawSize += info.size;
                    if (info.size == 0) {
                        Log.e(TAG, "got empty frame");
                    } else {
                        Log.e(TAG, "decoded, checking frame " + checkIndex);

                        if (!checkFrame(checkIndex++, decoderOutputFormat, outputFrame)) {
                            badFrames++;
                        }
                    }

                    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        Log.e(TAG, "output EOS");
                        outputDone = true;
                    }
                    decoder.releaseOutputBuffer(decoderStatus, false /*render*/);
                } else {
                    Log.e(TAG, "surface decoder given buffer " + decoderStatus +
                            " (size=" + info.size + ")");
                    rawSize += info.size;
                    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        Log.e(TAG, "output EOS");
                        outputDone = true;
                    }

                    boolean doRender = (info.size != 0);

                    //As soon as we call releaseOutputBuffer, the buffer will be forwarded
                    //to SurfaceTexture to convert to a texture.  The API doesn't guarantee
                    //that the texture will be available before the call returns, so we
                    //need to wait for the onFrameAvailable callback to fire.
                    decoder.releaseOutputBuffer(decoderStatus, doRender);
                    if (doRender) {
                        Log.e(TAG, "awaiting frame " + checkIndex);

                        outputSurface.awaitNewImage();
                        outputSurface.drawImage();
                        if (!checkSurfaceFrame(checkIndex++)) {
                            badFrames++;
                        }
                    }
                }
            }
        }
    }

    Log.e(TAG, "decoded " + checkIndex + " frames at "
            + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize);

    if (outputSurface != null) {
        outputSurface.release();
    }

    if (checkIndex != NUM_FRAMES) {

        Log.e(TAG, "awaiting frame " + checkIndex);
    }
    if (badFrames != 0) {
        Log.e(TAG, "Found " + badFrames + " bad frames");
    }
}
private void generateFrame(int frameIndex) {

    Bitmap bitmap = decodeFile(mImagePaths.get(frameIndex), mColumnWidth,
            mColumnWidth);

    mFrame = getNV21(bitmap.getWidth(), bitmap.getHeight(), bitmap);
}

/**
 * Generates data for frame N into the supplied buffer.  We have an 8-frame animation
 * sequence that wraps around.  It looks like this:
 * <pre>
 *   0 1 2 3
 *   7 6 5 4
 * </pre>
 * We draw one of the eight rectangles and leave the rest set to the zero-fill color.
 */
private void generateFrame(int frameIndex, int colorFormat, byte[] mFrame) {
    final int HALF_WIDTH = mWidth / 2;
    boolean semiPlanar = isSemiPlanarYUV(colorFormat);
    //Set to zero.  In YUV this is a dull green.
    Arrays.fill(mFrame, (byte) 0);

    int startX, startY, countX, countY;

    frameIndex %= 8;
    //frameIndex = (frameIndex /8) % 8;    //use this instead for debug -- easier to see
    if (frameIndex < 4) {
        startX = frameIndex * (mWidth / 4);
        startY = 0;
    } else {
        startX = (7 - frameIndex) * (mWidth / 4);
        startY = mHeight / 2;
    }

    for (int y = startY + (mHeight/2) - 1; y >= startY; --y) {
        for (int x = startX + (mWidth/4) - 1; x >= startX; --x) {
            if (semiPlanar) {
                //full-size Y, followed by UV pairs at half resolution
                //e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar
                //e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E
                //       OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
                mFrame[y * mWidth + x] = (byte) TEST_Y;
                if ((x & 0x01) == 0 && (y & 0x01) == 0) {
                    mFrame[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U;
                    mFrame[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V;
                }
            } else {
                //full-size Y, followed by quarter-size U and quarter-size V
                //e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar
                //e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar
                mFrame[y * mWidth + x] = (byte) TEST_Y;
                if ((x & 0x01) == 0 && (y & 0x01) == 0) {
                    mFrame[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U;
                    mFrame[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) +
                              (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V;
                }
            }
        }
    }
}




 /**
 * Sets the desired frame size and bit rate.
 */
private void setParameters(int width, int height, int bitRate) {
    if ((width % 16) != 0 || (height % 16) != 0) {
        Log.w(TAG, "WARNING: width or height not multiple of 16");
    }
    mWidth = width;
    mHeight = height;
    mBitRate = bitRate;
    mFrame = new byte[mWidth * mHeight * 3 / 2];
}
public void testEncodeDecodeVideoFromBufferToSurface720p() throws Throwable {
    setParameters(1280, 720, 6000000);
    encodeDecodeVideoFromBuffer(false);
}

}

Logcat:

  12-17 18:25:47.405: E/EncodeAndMuxTest(16415): found codec: OMX.qcom.video.encoder.avc
  12-17 18:25:47.405: I/OMXClient(16415): Using client-side OMX mux.
  12-17 18:25:47.455: E/EncodeAndMuxTest(16415): found colorFormat: 21
  12-17 18:25:47.455: E/EncodeAndMuxTest(16415): format: {frame-rate=10, bitrate=6000000, height=720, mime=video/avc, color-format=21, i-frame-interval=10, width=1280}
  12-17 18:25:47.465: I/OMXClient(16415): Using client-side OMX mux.
  12-17 18:25:47.495: E/ACodec(16415): [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -2147483648
  12-17 18:25:47.495: I/ACodec(16415): setupVideoEncoder succeeded
  12-17 18:25:47.535: I/OMXClient(16415): Using client-side OMX mux.
  12-17 18:25:47.545: E/EncodeAndMuxTest(16415): loop
  12-17 18:25:47.545: E/EncodeAndMuxTest(16415): inputBufIndex=0
  12-17 18:25:47.655: E/EncodeAndMuxTest(16415): submitted frame 0 to enc
  12-17 18:25:47.655: E/EncodeAndMuxTest(16415): encoder output format changed: {csd-1=java.nio.ByteArrayBuffer[position=0,limit=8,capacity=8], height=720, mime=video/avc, csd-0=java.nio.ByteArrayBuffer[position=0,limit=18,capacity=18], what=1869968451, width=1280}
  12-17 18:25:47.655: E/EncodeAndMuxTest(16415): muxer defined muxer format: {csd-1=java.nio.ByteArrayBuffer[position=0,limit=8,capacity=8], height=720, mime=video/avc, csd-0=java.nio.ByteArrayBuffer[position=0,limit=18,capacity=18], what=1869968451, width=1280}
 12-17 18:25:47.655: I/MPEG4Writer(16415): limits: 2147483647/0 bytes/us, bit rate: -1 bps and the estimated moov size 3072 bytes
 12-17 18:25:47.655: E/EncodeAndMuxTest(16415): inputBufIndex=2
 12-17 18:25:47.795: E/EncodeAndMuxTest(16415): submitted frame 1 to enc
 12-17 18:25:47.825: E/EncodeAndMuxTest(16415): decoder configured (26 bytes){csd-0=java.nio.DirectByteBuffer[position=0,limit=26,capacity=692224], height=720, width=1280, mime=video/avc}
 12-17 18:25:47.855: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:47.855: E/EncodeAndMuxTest(16415): inputBufIndex=0
  12-17 18:25:47.976: E/EncodeAndMuxTest(16415): submitted frame 2 to enc
  12-17 18:25:48.136: E/EncodeAndMuxTest(16415): passed 3188 bytes to decoder
  12-17 18:25:48.176: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:48.176: E/EncodeAndMuxTest(16415): inputBufIndex=1
  12-17 18:25:48.296: E/EncodeAndMuxTest(16415): submitted frame 3 to enc
  12-17 18:25:48.296: E/EncodeAndMuxTest(16415): passed 1249 bytes to decoder
 12-17 18:25:48.326: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:48.326: E/EncodeAndMuxTest(16415): loop
  12-17 18:25:48.326: E/EncodeAndMuxTest(16415): inputBufIndex=2
   12-17 18:25:48.396: E/EncodeAndMuxTest(16415): submitted frame 4 to enc
   12-17 18:25:48.396: E/EncodeAndMuxTest(16415): passed 3085 bytes to decoder
  12-17 18:25:48.436: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:48.436: E/EncodeAndMuxTest(16415): inputBufIndex=0
   12-17 18:25:48.436: E/EncodeAndMuxTest(16415): sent input EOS (with zero-length frame)
  12-17 18:25:48.436: E/EncodeAndMuxTest(16415): passed 3056 bytes to decoder
    12-17 18:25:48.466: E/EncodeAndMuxTest(16415): no output from decoder available
   12-17 18:25:48.466: E/EncodeAndMuxTest(16415): passed 1085 bytes to decoder (EOS)
  12-17 18:25:48.476: E/EncodeAndMuxTest(16415): decoder output buffers changed
  12-17 18:25:48.496: E/EncodeAndMuxTest(16415): decoder output format changed:

InformationsquelleAutor der Frage lifelogger | 2013-12-15

Schreibe einen Kommentar