2016-01-21 529 views
2

我試圖在Android上爲使用MediaCodec的實時視頻流編碼h264視頻,但dequeueOutputBuffer一直花費很長時間(實際上它有時非常快,但在其他時間非常慢,請參閱下面的日誌輸出)。我已經看到它甚至高達200ms,輸出緩衝區已準備就緒。有沒有我做錯了我的代碼或你認爲這是OMX.Nvidia.h264.encoder的問題?MediaCodec.dequeueOutputBuffer在Android上編碼h264時花費很長時間

也許我需要將圖像從1280x720縮減爲更小的圖像?或者,也許我需要出隊和更多的輸入緩衝區排隊,而我在等待輸出緩衝區? (有6個輸入和6個輸出緩衝區可用)。我正在使用Android API 19,所以我不能使用異步MediaCodec處理方法。實際上,我是從Google Project Tango平板電腦中傳輸圖片,所以我的其他猜測是Tango的後臺操作可能需要很長時間,導致編碼器速度變慢。有什麼想法可能會放慢這一點呢?

01-20 23:36:30.728 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.400666ms. 
01-20 23:36:30.855 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 94.290667ms. 
01-20 23:36:30.880 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.57ms. 
01-20 23:36:30.929 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 4.878417ms. 
01-20 23:36:31.042 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 77.495417ms. 
01-20 23:36:31.064 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.3225ms. 
01-20 23:36:31.182 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 74.777583ms. 
01-20 23:36:31.195 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.23ms. 
01-20 23:36:31.246 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 17.243583ms. 
01-20 23:36:31.350 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 80.14725ms. 
01-20 23:36:31.373 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 2.493834ms. 
01-20 23:36:31.421 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.273ms. 
01-20 23:36:31.546 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 93.543667ms. 
01-20 23:36:31.576 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 5.309334ms. 
01-20 23:36:31.619 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.402583ms. 
01-20 23:36:31.686 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 22.5485ms. 
01-20 23:36:31.809 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 91.392083ms. 

我的相關代碼如下:

public class StreamingThread extends Thread { 
    ... 

    // encoding 
    private MediaCodec mVideoEncoder = null; 
    private ByteBuffer[] mEncoderInputBuffers = null; 
    private ByteBuffer[] mEncoderOutputBuffers = null; 
    private NV21Convertor mNV21Converter = null; 

    public static native VideoFrame getNewFrame(); 

    public StreamingThread() 
    { 
     this.setPriority(MAX_PRIORITY); 
    } 

    @Override 
    public void run() 
    { 
     Looper.prepare(); 
     init(); 
     Looper.loop(); 
    } 

    private void init() 
    { 
     mHandler = new Handler() { 
      public void handleMessage(Message msg) { 
       // process incoming messages here 
       switch(msg.what) 
       { 
        case HAVE_NEW_FRAME: // new frame has arrived (signaled from main thread) 
         processBufferedFrames(); 
         break; 

        case CLOSE_THREAD: 
         close(); 
         break; 

        default: 
         Log.e(LOGTAG, "received unknown message!"); 
       } 
      } 
     }; 

     try { 
      ... 
      // set up video encoding 
      final String mime = "video/avc"; // H.264/AVC 
      listAvailableEncoders(mime); // (this creates some debug output only) 
      String codec = "OMX.Nvidia.h264.encoder"; // instead, hard-code the codec we want to use for now 

      mVideoEncoder = MediaCodec.createByCodecName(codec); 
      if(mVideoEncoder == null) 
       Log.e(LOGTAG, "Media codec " + codec + " is not available!"); 

      // TODO: change, based on what we're streaming... 
      int FRAME_WIDTH = 1280; 
      int FRAME_HEIGHT = 720; 

      // https://github.com/fyhertz/libstreaming/blob/ac44416d88ed3112869ef0f7eab151a184bbb78d/src/net/majorkernelpanic/streaming/hw/EncoderDebugger.java 
      mNV21Converter = new NV21Convertor(); 
      mNV21Converter.setSize(FRAME_WIDTH, FRAME_HEIGHT); 
      mNV21Converter.setEncoderColorFormat(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar); 
      mNV21Converter.setColorPanesReversed(true); 
      mNV21Converter.setYPadding(0); 

      MediaFormat format = MediaFormat.createVideoFormat(mime, FRAME_WIDTH, FRAME_HEIGHT); 
      format.setInteger(MediaFormat.KEY_FRAME_RATE, 25); 
      format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10); 
      format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar); 
      // TODO: optimize bit rate 
      format.setInteger(MediaFormat.KEY_BIT_RATE, 250000); // 4 Million bits/second = 0.48 Megabytes/s 

      mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 
      mVideoEncoder.start(); 
      mEncoderInputBuffers = mVideoEncoder.getInputBuffers(); 
      mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers(); 

      Log.d(LOGTAG, "Number of input buffers " + mEncoderInputBuffers.length); 
      Log.d(LOGTAG, "Number of output buffers " + mEncoderOutputBuffers.length); 

      initialized = true; 

     } catch (Exception e) { 
      e.printStackTrace(); 
     } 
    } 

    private void close() 
    { 
     Looper.myLooper().quit(); 
     mVideoEncoder.stop(); 
     mVideoEncoder.release(); 
     mVideoEncoder = null; 
    } 

    private void processBufferedFrames() 
    { 
     if (!initialized) 
      return; 
     VideoFrame frame = getNewFrame(); 

     try { 
      sendTCPFrame(frame); 

     } catch (Exception e) { 
      e.printStackTrace(); 
     } 
    } 
    private void sendTCPFrame(VideoFrame frame) 
    { 
     long start = System.nanoTime(); 

     long start2 = System.nanoTime(); 
     int inputBufferIndex = -1; 
     while((inputBufferIndex = mVideoEncoder.dequeueInputBuffer(-1)) < 0) { // -1: wait indefinitely for the buffer 
      switch(inputBufferIndex) { 
       default: 
        Log.e(LOGTAG, "dequeueInputBuffer returned unknown value: " + inputBufferIndex); 
      } 
     } 
     // fill in input (raw) data: 
     mEncoderInputBuffers[inputBufferIndex].clear(); 

     long stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "dequeueInputBuffer took " + (stop2 - start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     byte[] pixels = mNV21Converter.convert(frame.pixels); 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "mNV21Converter.convert took " + (stop2-start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     mEncoderInputBuffers[inputBufferIndex].put(pixels); 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "mEncoderInputBuffers[inputBufferIndex].put(pixels) took " + (stop2 - start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     //mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, 0, 0); 
     //mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime()/1000, 0); 
     mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime(), 0); 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "queueInputBuffer took " + (stop2 - start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     // wait for encoded data to become available: 
     int outputBufferIndex = -1; 
     MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo(); 
     long timeoutUs = -1;//10000; // microseconds 
     while((outputBufferIndex = mVideoEncoder.dequeueOutputBuffer(bufInfo, timeoutUs)) < 0) { // -1: wait indefinitely for the buffer 
      Log.i(LOGTAG, "dequeueOutputBuffer returned value: " + outputBufferIndex); 
      switch(outputBufferIndex) { 
       case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 
        // output buffers have changed, move reference 
        mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers(); 
        break; 
       case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 
        // Subsequent data will conform to new format. 
        //MediaFormat format = codec.getOutputFormat(); 
        Log.e(LOGTAG, "dequeueOutputBuffer returned INFO_OUTPUT_FORMAT_CHANGED ?!"); 
        break; 
       case MediaCodec.INFO_TRY_AGAIN_LATER: 
        Log.w(LOGTAG, "dequeueOutputBuffer return INFO_TRY_AGAIN_LATER"); 
        break; 
       default: 
        Log.e(LOGTAG, "dequeueOutputBuffer returned unknown value: " + outputBufferIndex); 
      } 
     } 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "dequeueOutputBuffer took " + (stop2 - start2)/1e6 + "ms."); 

     // output (encoded) data available! 
     Log.d(LOGTAG, "encoded buffer info: size = " + bufInfo.size + ", offset = " + bufInfo.offset + ", presentationTimeUs = " + bufInfo.presentationTimeUs + ", flags = " + bufInfo.flags); 
     ByteBuffer encodedData = mEncoderOutputBuffers[outputBufferIndex]; 
     final int sizeOfImageData = bufInfo.size; 

     long stop = System.nanoTime(); 
     Log.d(LOGTAG, "Encoding image took " + (stop-start)/1e6 + "ms."); 

     start = System.nanoTime(); 
     // assemble header: 
    ... 

     encodedData.rewind(); 
     // copy (!) raw image data to "direct" (array-backed) buffer: 
     ByteBuffer imageBuffer = ByteBuffer.allocateDirect(encodedData.remaining()); 
     imageBuffer.put(encodedData); // TODO: can this copy be avoided? 

     stop = System.nanoTime(); 
     Log.d(LOGTAG, "Preparing content for streaming took " + (stop - start)/1e6 + "ms."); 
     // do streaming via TCP 
     ... 
     mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false); 
    } 

    // see http://developer.android.com/reference/android/media/MediaCodecInfo.html 
    private void listAvailableEncoders(String mimeType) 
    { 
     Log.d(LOGTAG, "Available encoders for mime type " + mimeType + ":"); 
     for (int i = 0; i < MediaCodecList.getCodecCount(); i++) { 
      MediaCodecInfo codec = MediaCodecList.getCodecInfoAt(i); 

      if (!codec.isEncoder()) 
       continue; 

      String[] types = codec.getSupportedTypes(); 
      for (int j = 0; j < types.length; j++) { 
       //if (types[j].equalsIgnoreCase(mimeType)) { 
       String msg = "- name: " + codec.getName() + ", supported color formats for " + mimeType + ":"; 
       MediaCodecInfo.CodecCapabilities cap = codec.getCapabilitiesForType(mimeType); 
       for(int k = 0; k < cap.colorFormats.length; ++k) msg = msg + " " + cap.colorFormats[k]; 
       Log.d(LOGTAG, msg); 
       // break; 
       //} 
      } 
     } 
    } 

回答

2

是的,有什麼不對您的代碼 - 你是同步等待當前幀是從編碼器輸出的下一個操作前幀。大多數硬件編解碼器的延遲比您預期的要多,爲了獲得編碼器能夠達到的正確吞吐量,您需要異步使用它。

也就是說,在發送一個輸入緩衝區進行編碼之後,您不應該等待編碼的輸出緩衝區,而只是檢查是否有輸出。您應該繼續輸入下一個緩衝區,並再次檢查是否有可用的輸出。只有當您沒有立即獲得輸入緩衝區時,您纔可以開始等待輸出。通過這種方式,編碼器始終有多個輸入緩衝區可供開始工作,以保持忙碌狀態,以便實際達到其能夠支持的幀速率。

(如果您沒有問題,需要的是Android 5.0,你可以看看MediaCodec.setCallback,這使得它更容易與異步工作。)

甚至有一些編解碼器(主要是解碼器不過,如果我沒記錯我正確),甚至不會輸出第一個緩衝區,直到你通過了幾個輸入緩衝區。