类android.media.MediaCodec.BufferInfo源码实例Demo

下面列出了怎么用android.media.MediaCodec.BufferInfo的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: Bitmp4   文件: MP4Encoder.java

@Override
protected void onStart() {
  isStarted = true;
  addedFrameCount = 0;
  encodedFrameCount = 0;
  int width = getWidth();
  int height = getHeight();
  try {
    bufferInfo = new BufferInfo();
    videoCodec = MediaCodec.createEncoderByType(MIMETYPE_VIDEO_AVC);
    MediaFormat videoFormat = MediaFormat.createVideoFormat(MIMETYPE_VIDEO_AVC, width, height);
    videoFormat.setInteger(KEY_BIT_RATE, BIT_RATE);
    videoFormat.setInteger(KEY_FRAME_RATE, FRAME_RATE);
    videoFormat.setInteger(KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
    videoFormat.setInteger(KEY_COLOR_FORMAT, COLOR_FormatYUV420SemiPlanar);
    videoCodec.configure(videoFormat, null, null, CONFIGURE_FLAG_ENCODE);
    videoCodec.start();
    audioCodec = MediaCodec.createEncoderByType(MIMETYPE_AUDIO_AAC);
    MediaFormat audioFormat = MediaFormat.createAudioFormat(MIMETYPE_AUDIO_AAC, 44100, 1);
    int profile;
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
      profile = MPEG2ProfileHigh;
    } else {
      profile = 5;
    }
    audioFormat.setInteger(KEY_AAC_PROFILE, profile);
    audioFormat.setInteger(KEY_BIT_RATE, 65536);
    audioCodec.configure(audioFormat, null, null, CONFIGURE_FLAG_ENCODE);
    audioCodec.start();
    mediaMuxer = new MediaMuxer(outputFilePath, MUXER_OUTPUT_MPEG_4);
  } catch (IOException ioe) {
    throw new RuntimeException("MediaMuxer creation failed", ioe);
  }
}
 

@NonNull
private static BufferInfo copyBufferInfo(BufferInfo lastBufferInfo) {
    BufferInfo bufferInfo = new BufferInfo();
    bufferInfo.presentationTimeUs = lastBufferInfo.presentationTimeUs;
    bufferInfo.flags = lastBufferInfo.flags;
    bufferInfo.offset = lastBufferInfo.offset;
    bufferInfo.size = lastBufferInfo.size;
    return bufferInfo;
}
 
源代码3 项目: libcommon   文件: MediaMuxerWrapper.java

@Override
public void writeSampleData(final int trackIndex,
	@NonNull final ByteBuffer byteBuf, @NonNull final BufferInfo bufferInfo) {

	if (!mReleased) {
		mMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
	}
}
 
源代码4 项目: cameraMediaCodec   文件: AvcDecoder.java

public int Init()
{
	Log.i("AvcDecoder", "Init");
	mMC = MediaCodec.createDecoderByType(MIME_TYPE);
	mStatus = STATUS_LOADED;
	mBI = new BufferInfo();
	Log.i("AvcDecoder", "Init, createDecoderByType");
	return 0;
}
 

private void setupExtractor() {
   mExtractor = new MediaExtractor();
   try {
      mExtractor.setDataSource( mUri.toString() );
   } catch ( IOException e ) {
      e.printStackTrace();
   }

   int videoIndex = 0;

   for ( int trackIndex = 0; trackIndex < mExtractor.getTrackCount(); trackIndex++ ) {
      MediaFormat format = mExtractor.getTrackFormat( trackIndex );

      String mime = format.getString( MediaFormat.KEY_MIME );
      if ( mime != null ) {
         if ( mime.equals( "video/avc" ) ) {
            mExtractor.selectTrack( trackIndex );
            videoIndex = trackIndex;
            break;
         }
      }
   }

   mDecoder = MediaCodec.createDecoderByType( "video/avc" );
   mDecoder.configure( mExtractor.getTrackFormat( videoIndex ), mSurface, null, 0 );
   mDecoder.start();

   mInfo = new BufferInfo();

   mInputBuffers = mDecoder.getInputBuffers();
   mOutputBuffers = mDecoder.getOutputBuffers();
}
 

private void seekTo( long ms, int seekMode ) {

         // Log.d( TAG, String.format( Locale.US, "seeking to %d", ms ) );

         mExtractor.seekTo( ms * 1000, seekMode );
         mCurrentPosition = (int) mExtractor.getSampleTime() / 1000;
         mTimer.setTime( mCurrentPosition );
         // Log.d( TAG, String.format( Locale.US, "seeking extractor to %d, sample time is now %d", ms, mExtractor.getSampleTime() ) );
         mDecoder.flush();
         mInputBuffers = mDecoder.getInputBuffers();
         mOutputBuffers = mDecoder.getOutputBuffers();

         mInfo = new BufferInfo();
      }
 
源代码7 项目: LiveMultimedia   文件: GPUEncoder.java

/*******************************************************************
* createVideoCodec() creates the video codec which is H264 based
******************************************************************/
public synchronized void createVideoCodec() {
    try {
        Log.w(TAG, "----->createVideoCodec()<-----");
        mCodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mCodec.start();
        mBufferInfo = new BufferInfo();
    } catch (IllegalStateException e) {
        Log.e(TAG, "Error in creating video codec failed configuration.");
    }
    Log.w(TAG, "----->end createVideoCodec()<-----");
 }
 
源代码8 项目: LiveMultimedia   文件: GPUEncoder.java

private synchronized void dequeueOutputBuffer(
        MediaCodec codec, ByteBuffer[] outputBuffers,
        int index, MediaCodec.BufferInfo info) {
    if (mAudioFeatureActive) {
        codec.releaseOutputBuffer(index, false);
    }
}
 

private void flushMediaCodec(MediaCodec mc) {
	int index = 0;
	BufferInfo info = new BufferInfo();
	while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
		index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (index>=0) {
			mc.releaseOutputBuffer(index, false);
		}
	}
}
 
源代码10 项目: libstreaming   文件: EncoderDebugger.java

private void flushMediaCodec(MediaCodec mc) {
	int index = 0;
	BufferInfo info = new BufferInfo();
	while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
		index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (index>=0) {
			mc.releaseOutputBuffer(index, false);
		}
	}
}
 
源代码11 项目: VIA-AI   文件: AvcDecoder.java

@Override
    public void run() {
        BufferInfo info = new BufferInfo();
        ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();

        boolean isInput = true;
        boolean first = false;
        long startWhen = 0;

        while (!eosReceived) {
            if (isInput) {
                int inputIndex = mDecoder.dequeueInputBuffer(10000);
                if (inputIndex >= 0) {
                    // fill inputBuffers[inputBufferIndex] with valid data
                    ByteBuffer inputBuffer = mDecoder.getInputBuffers()[inputIndex];
//                    ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputIndex);
                    int sampleSize = mExtractor.readSampleData(inputBuffer, 0);

                    if (mExtractor.advance() && sampleSize > 0) {
                        mDecoder.queueInputBuffer(inputIndex, 0, sampleSize, mExtractor.getSampleTime(), 0);

                    } else {
                        Log.d(TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                        mDecoder.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isInput = false;
                    }
                }
            }

            int outIndex = mDecoder.dequeueOutputBuffer(info, 10000);
            switch (outIndex) {
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                    mDecoder.getOutputBuffers();
                    break;

                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    Log.d(TAG, "INFO_OUTPUT_FORMAT_CHANGED format : " + mDecoder.getOutputFormat());
                    MediaFormat format = mDecoder.getOutputFormat();
                    mOutputHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
                    mOutputWidth = format.getInteger(MediaFormat.KEY_WIDTH);
                    mOutputStride = format.getInteger(MediaFormat.KEY_STRIDE);
                    mOutputColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
                    break;

                case MediaCodec.INFO_TRY_AGAIN_LATER:
//				Log.d(TAG, "INFO_TRY_AGAIN_LATER");
                    break;

                default:
//                    if (!first) {
//                        startWhen = System.currentTimeMillis();
//                        first = true;
//                    }
//                    try {
//                        long sleepTime = (info.presentationTimeUs / 1000) - (System.currentTimeMillis() - startWhen);
//                        Log.d(TAG, "info.presentationTimeUs : " + (info.presentationTimeUs / 1000) + " playTime: " + (System.currentTimeMillis() - startWhen) + " sleepTime : " + sleepTime);
//
//                        if (sleepTime > 0)
//                            Thread.sleep(sleepTime);
//                    } catch (InterruptedException e) {
//                        // TODO Auto-generated catch block
//                        e.printStackTrace();
//                    }
                    ByteBuffer decodedBuffer = mDecoder.getOutputBuffers()[outIndex];//mDecoder.getOutputBuffer(outIndex);
                    if(frameListener != null) {
                        frameListener.onFrameDecoded(decodedBuffer, info.offset, info.size, mOutputWidth, mOutputHeight, mOutputStride, mOutputColorFormat);
                    }

                    mDecoder.releaseOutputBuffer(outIndex, true /* Surface init */);
                    break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }
        if(null!=frameListener) frameListener.onEOS();
        mDecoder.stop();
        mDecoder.release();
        mExtractor.release();
    }
 

@Override
public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
  return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
}
 

public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
 

public BufferInfo getLastBufferInfo() {
	return mBufferInfo;
}
 
源代码15 项目: Android   文件: MainActivity.java

protected boolean process() throws IOException {

        mMediaExtractor = new MediaExtractor();          
        mMediaExtractor.setDataSource(SDCARD_PATH+"/input.mp4");                
                
        int mVideoTrackIndex = -1;
        int framerate = 0;
        for(int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
            MediaFormat format = mMediaExtractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if(!mime.startsWith("video/")) {                
                continue;
            }
            framerate = format.getInteger(MediaFormat.KEY_FRAME_RATE);            
            mMediaExtractor.selectTrack(i);
            mMediaMuxer = new MediaMuxer(SDCARD_PATH+"/ouput.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
            mVideoTrackIndex = mMediaMuxer.addTrack(format);  
            mMediaMuxer.start();
        }
        
        if(mMediaMuxer == null) {
            return false;
        }
        
        BufferInfo info = new BufferInfo();
        info.presentationTimeUs = 0;
        ByteBuffer buffer = ByteBuffer.allocate(500*1024);        
        while(true) {
            int sampleSize = mMediaExtractor.readSampleData(buffer, 0);
            if(sampleSize < 0) {
                break;
            }
            mMediaExtractor.advance();
            info.offset = 0;
            info.size = sampleSize;
            info.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;        
            info.presentationTimeUs += 1000*1000/framerate;
            mMediaMuxer.writeSampleData(mVideoTrackIndex,buffer,info);
        }

        mMediaExtractor.release();
        
        mMediaMuxer.stop();
        mMediaMuxer.release();
        
        return true;
    }
 
源代码16 项目: cameraMediaCodec   文件: AvcEncoder.java

public void Init(int colorformat, AvcEncoderSink sink/*null as default*/)
{
	Log.i("AvcEncoder", "Init");
	
    mPrimeColorFormat = colorformat;
	
	mMC = MediaCodec.createEncoderByType(MIME_TYPE);
	
	mBI = new BufferInfo();
	
	mSink = sink;
	
	mFpsHelper = new FpsHelper();
	mFpsHelper.SetEnableDrop(true);
	
	mStatus = STATUS_LOADED;
}
 
源代码17 项目: ScreenRecoder   文件: ScreenRecoder.java

private void stream(MediaCodec codec) {
	BufferInfo info = new BufferInfo();
	ByteBuffer[] buffers = null;

	while (!mQuitting) {
		int index = codec.dequeueOutputBuffer(info, TIMEOUT_USEC);
		if (index >= 0) {
			if (buffers == null) {
				buffers = codec.getOutputBuffers();
			}

			ByteBuffer buffer = buffers[index];
			buffer.limit(info.offset + info.size);
			buffer.position(info.offset);

			muxer.writeSampleData(videoTrackIndex, buffer, info);

			codec.releaseOutputBuffer(index, false);
		} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
			if (mMuxerStarted) {
				throw new RuntimeException("format changed twice");
			}
			
			MediaFormat newFormat = codec.getOutputFormat();

			// now that we have the Magic Goodies, start the muxer
			videoTrackIndex = muxer.addTrack(newFormat);
			muxer.start();

			mMuxerStarted = true;

			buffers = null;
		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			buffers = null;
		} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
			Log.e("sam", "Codec dequeue buffer timed out.");
		}
	}

	muxer.stop();
	muxer.release();
}
 
源代码18 项目: LiveMultimedia   文件: GPUEncoder.java

@SuppressWarnings("all")
public synchronized void encodeAudio() {
    if (!mAudioFeatureActive ) {
        return;
    }

    mAudioFrame++;
    // not yet encoded.
    ByteBuffer savedAudioBytes = mApp.pullAudioData();
    byte[] audioBytes =  new byte[savedAudioBytes.capacity()];
    System.arraycopy(savedAudioBytes.array(), 0, audioBytes, 0, audioBytes.length);

    Log.w(TAG, "Encoding audio frame " + mAudioFrame + " into AAC!");
    ByteBuffer[] codecInputBuffers  = mAudioEncoder.getInputBuffers();
    ByteBuffer[] codecOutputBuffers = mAudioEncoder.getOutputBuffers();
    int numBytesSubmitted = 0;
    boolean doneSubmittingInput = false;
    int numBytesDequeued = 0;
    int index;
    if (!doneSubmittingInput) {
        index = mAudioEncoder.dequeueInputBuffer(kTimeoutUs /* timeoutUs */);
        if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
            if (numBytesSubmitted >= kNumInputBytes) {
                mAudioEncoder.queueInputBuffer(
                        index,
                        0 /* offset */,
                        0 /* size */,
                        0 /* timeUs */,
                        MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                Log.d(TAG, "queued input EOS.");
                doneSubmittingInput = true;
            } else {
                int size = queueInputBuffer(mAudioEncoder, codecInputBuffers, index, audioBytes);
                numBytesSubmitted += size;
                Log.d(TAG, "queued " + size + " bytes of input data.");
            }
        }
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        index = mAudioEncoder.dequeueOutputBuffer(info, kTimeoutUs /* timeoutUs */);
        if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
            Log.d(TAG, "AUDIO Info try again later!!");
        } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            Log.d(TAG, "encoder output format changed:  Added track index: " + mAudioTrackIndex);
        } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            codecOutputBuffers = mAudioEncoder.getOutputBuffers();
        } else {
            ByteBuffer encodedData = codecOutputBuffers[index];
            if (encodedData == null) {
                Log.e(TAG, "encoderOutputBuffer " + index + " was null in encoding audio!!");
                return;
            }

            if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // The codec config data was pulled out and fed to the muxer when we got
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                info.size = 0;
            }
            // Copy the converted audio data
            mCurrentEncodedAudioData = new byte[info.size];
            System.arraycopy(encodedData , 0, mCurrentEncodedAudioData,0, mCurrentEncodedAudioData.length);
            numBytesDequeued += info.size;
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.w(TAG, "dequeued output EOS.");
            }
            Log.w(TAG, "dequeued " + info.size + " bytes of output data.");
        }
    }
    Log.d(TAG, "queued a total of " + numBytesSubmitted + "bytes, "
                + "dequeued " + numBytesDequeued + " bytes.");
    int sampleRate   = mAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
    int channelCount = mAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
    int inBitrate    = sampleRate * channelCount * 16;  // bit/sec
    int outBitrate   = mAudioFormat.getInteger(MediaFormat.KEY_BIT_RATE);
    float desiredRatio = (float)outBitrate / (float)inBitrate;
    float actualRatio  = (float)numBytesDequeued / (float)numBytesSubmitted;
    if (actualRatio < 0.9 * desiredRatio || actualRatio > 1.1 * desiredRatio) {
        Log.w(TAG, "desiredRatio = " + desiredRatio
                + ", actualRatio = " + actualRatio);
    }
}
 

public BufferInfo getLastBufferInfo() {
	return mBufferInfo;
}
 
源代码20 项目: spydroid-ipcamera   文件: EncoderDebugger.java

/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {

	ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
	BufferInfo info = new BufferInfo();
	byte[] csd = new byte[128];
	int len = 0, p = 4, q = 4;
	long elapsed = 0, now = timestamp();

	while (elapsed<3000000 && (mSPS==null || mPPS==null)) {

		// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
		int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (bufferIndex>=0) {
			check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			inputBuffers[bufferIndex].clear();
			inputBuffers[bufferIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.e(TAG,"No buffer available !");
		}

		// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
		// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
		// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

		int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);

		if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

			// The PPS and PPS shoud be there
			MediaFormat format = mEncoder.getOutputFormat();
			ByteBuffer spsb = format.getByteBuffer("csd-0");
			ByteBuffer ppsb = format.getByteBuffer("csd-1");
			mSPS = new byte[spsb.capacity()-4];
			spsb.position(4);
			spsb.get(mSPS,0,mSPS.length);
			mPPS = new byte[ppsb.capacity()-4];
			ppsb.position(4);
			ppsb.get(mPPS,0,mPPS.length);
			break;

		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			outputBuffers = mEncoder.getOutputBuffers();
		} else if (index>=0) {

			len = info.size;
			if (len<128) {
				outputBuffers[index].get(csd,0,len);
				if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
					// Parses the SPS and PPS, they could be in two different packets and in a different order 
					//depending on the phone so we don't make any assumption about that
					while (p<len) {
						while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
						if (p+3>=len) p=len;
						if ((csd[q]&0x1F)==7) {
							mSPS = new byte[p-q];
							System.arraycopy(csd, q, mSPS, 0, p-q);
						} else {
							mPPS = new byte[p-q];
							System.arraycopy(csd, q, mPPS, 0, p-q);
						}
						p += 4;
						q = p;
					}
				}					
			}
			mEncoder.releaseOutputBuffer(index, false);
		}

		elapsed = timestamp() - now;
	}

	check(mPPS != null & mSPS != null, "Could not determine the SPS & PPS.");
	mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
	mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

	return elapsed;
}
 
源代码21 项目: spydroid-ipcamera   文件: EncoderDebugger.java

private long encode() {
	int n = 0;
	long elapsed = 0, now = timestamp();
	int encOutputIndex = 0, encInputIndex = 0;
	BufferInfo info = new BufferInfo();
	ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

	while (elapsed<5000000) {
		// Feeds the encoder with an image
		encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (encInputIndex>=0) {
			check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			encInputBuffers[encInputIndex].clear();
			encInputBuffers[encInputIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.d(TAG,"No buffer available !");
		}

		// Tries to get a NAL unit
		encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			encOutputBuffers = mEncoder.getOutputBuffers();
		} else if (encOutputIndex>=0) {
			mVideo[n] = new byte[info.size];
			encOutputBuffers[encOutputIndex].clear();
			encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
			mEncoder.releaseOutputBuffer(encOutputIndex, false);
			if (n>=NB_ENCODED) {
				flushMediaCodec(mEncoder);
				return elapsed;
			}
		}

		elapsed = timestamp() - now;
	}

	throw new RuntimeException("The encoder is too slow.");

}
 

public BufferInfo getLastBufferInfo() {
	return mBufferInfo;
}
 
源代码23 项目: libstreaming   文件: EncoderDebugger.java

/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {

	ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
	BufferInfo info = new BufferInfo();
	byte[] csd = new byte[128];
	int len = 0, p = 4, q = 4;
	long elapsed = 0, now = timestamp();

	while (elapsed<3000000 && (mSPS==null || mPPS==null)) {

		// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
		int bufferIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (bufferIndex>=0) {
			check(inputBuffers[bufferIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			inputBuffers[bufferIndex].clear();
			inputBuffers[bufferIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.e(TAG,"No buffer available !");
		}

		// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
		// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
		// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

		int index = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);

		if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

			// The PPS and PPS shoud be there
			MediaFormat format = mEncoder.getOutputFormat();
			ByteBuffer spsb = format.getByteBuffer("csd-0");
			ByteBuffer ppsb = format.getByteBuffer("csd-1");
			mSPS = new byte[spsb.capacity()-4];
			spsb.position(4);
			spsb.get(mSPS,0,mSPS.length);
			mPPS = new byte[ppsb.capacity()-4];
			ppsb.position(4);
			ppsb.get(mPPS,0,mPPS.length);
			break;

		} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			outputBuffers = mEncoder.getOutputBuffers();
		} else if (index>=0) {

			len = info.size;
			if (len<128) {
				outputBuffers[index].get(csd,0,len);
				if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
					// Parses the SPS and PPS, they could be in two different packets and in a different order 
					//depending on the phone so we don't make any assumption about that
					while (p<len) {
						while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
						if (p+3>=len) p=len;
						if ((csd[q]&0x1F)==7) {
							mSPS = new byte[p-q];
							System.arraycopy(csd, q, mSPS, 0, p-q);
						} else {
							mPPS = new byte[p-q];
							System.arraycopy(csd, q, mPPS, 0, p-q);
						}
						p += 4;
						q = p;
					}
				}					
			}
			mEncoder.releaseOutputBuffer(index, false);
		}

		elapsed = timestamp() - now;
	}

	check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
	mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
	mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);

	return elapsed;
}
 
源代码24 项目: libstreaming   文件: EncoderDebugger.java

private long encode() {
	int n = 0;
	long elapsed = 0, now = timestamp();
	int encOutputIndex = 0, encInputIndex = 0;
	BufferInfo info = new BufferInfo();
	ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
	ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

	while (elapsed<5000000) {
		// Feeds the encoder with an image
		encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
		if (encInputIndex>=0) {
			check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
			encInputBuffers[encInputIndex].clear();
			encInputBuffers[encInputIndex].put(mData, 0, mData.length);
			mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
		} else {
			if (VERBOSE) Log.d(TAG,"No buffer available !");
		}

		// Tries to get a NAL unit
		encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
		if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
			encOutputBuffers = mEncoder.getOutputBuffers();
		} else if (encOutputIndex>=0) {
			mVideo[n] = new byte[info.size];
			encOutputBuffers[encOutputIndex].clear();
			encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
			mEncoder.releaseOutputBuffer(encOutputIndex, false);
			if (n>=NB_ENCODED) {
				flushMediaCodec(mEncoder);
				return elapsed;
			}
		}

		elapsed = timestamp() - now;
	}

	throw new RuntimeException("The encoder is too slow.");

}
 

void onReadOnce(byte[] buffer, int readSize, BufferInfo mediaBufferSize); 
 类所在包
 同包方法