android.graphics.SurfaceTexture#getTimestamp ( )源码实例Demo

下面列出了android.graphics.SurfaceTexture#getTimestamp ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

/**
 * Tells the video recorder that a new frame is available.  (Call from non-encoder thread.)
 * <p>
 * This function sends a message and returns immediately.  This isn't sufficient -- we
 * don't want the caller to latch a new frame until we're done with this one -- but we
 * can get away with it so long as the input frame rate is reasonable and the encoder
 * thread doesn't stall.
 * <p>
 * TODO: either block here until the texture has been rendered onto the encoder surface,
 * or have a separate "block if still busy" method that the caller can execute immediately
 * before it calls updateTexImage().  The latter is preferred because we don't want to
 * stall the caller while this thread does work.
 */
public void frameAvailable(SurfaceTexture st) {
    synchronized (mReadyFence) {
        if (!mReady) {
            return;
        }
    }

    st.getTransformMatrix(STMatrix);
    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        // Seeing this after device is toggled off/on with power button.  The
        // first frame back has a zero timestamp.
        //
        // MPEG4Writer thinks this is cause to abort() in native code, so it's very
        // important that we just ignore the frame.
        Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
        return;
    }

    mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
            (int) (timestamp >> 32), (int) timestamp, STMatrix));
}
 
源代码2 项目: TelePlus-Android   文件: InstantCameraView.java
public void frameAvailable(SurfaceTexture st, Integer cameraId, long timestampInternal) {
    synchronized (sync) {
        if (!ready) {
            return;
        }
    }

    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        zeroTimeStamps++;
        if (zeroTimeStamps > 1) {
            if (BuildVars.LOGS_ENABLED) {
                FileLog.d("fix timestamp enabled");
            }
            timestamp = timestampInternal;
        } else {
            return;
        }
    } else {
        zeroTimeStamps = 0;
    }

    handler.sendMessage(handler.obtainMessage(MSG_VIDEOFRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, cameraId));
}
 
源代码3 项目: TelePlus-Android   文件: InstantCameraView.java
public void frameAvailable(SurfaceTexture st, Integer cameraId, long timestampInternal) {
    synchronized (sync) {
        if (!ready) {
            return;
        }
    }

    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        zeroTimeStamps++;
        if (zeroTimeStamps > 1) {
            if (BuildVars.LOGS_ENABLED) {
                FileLog.d("fix timestamp enabled");
            }
            timestamp = timestampInternal;
        } else {
            return;
        }
    } else {
        zeroTimeStamps = 0;
    }

    handler.sendMessage(handler.obtainMessage(MSG_VIDEOFRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, cameraId));
}
 
源代码4 项目: AndroidPlayground   文件: TextureMovieEncoder.java
/**
 * Tells the video recorder that a new frame is available.  (Call from non-encoder thread.)
 * <p>
 * This function sends a message and returns immediately.  This isn't sufficient -- we
 * don't want the caller to latch a new frame until we're done with this one -- but we
 * can get away with it so long as the input frame rate is reasonable and the encoder
 * thread doesn't stall.
 * <p>
 * TODO: either block here until the texture has been rendered onto the encoder surface,
 * or have a separate "block if still busy" method that the caller can execute immediately
 * before it calls updateTexImage().  The latter is preferred because we don't want to
 * stall the caller while this thread does work.
 */
public void frameAvailable(SurfaceTexture st) {
    synchronized (mReadyFence) {
        if (!mReady) {
            return;
        }
    }

    st.getTransformMatrix(mTextureFrameTransform);
    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        // Seeing this after device is toggled off/on with power button.  The
        // first frame back has a zero timestamp.
        //
        // MPEG4Writer thinks this is cause to abort() in native code, so it's very
        // important that we just ignore the frame.
        Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
        return;
    }

    mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
            (int) (timestamp >> 32), (int) timestamp, mTextureFrameTransform));
}
 
/**
 * Tells the video recorder that a new frame is available.  (Call from non-encoder thread.)
 * <p/>
 * This function sends a message and returns immediately.  This isn't sufficient -- we
 * don't want the caller to latch a new frame until we're done with this one -- but we
 * can get away with it so long as the input frame rate is reasonable and the encoder
 * thread doesn't stall.
 * <p/>
 * TODO: either block here until the texture has been rendered onto the encoder surface,
 * or have a separate "block if still busy" method that the caller can execute immediately
 * before it calls updateTexImage().  The latter is preferred because we don't want to
 * stall the caller while this thread does work.
 */
public void frameAvailable(SurfaceTexture st) {
    synchronized (mReadyFence) {
        if (!mReady) {
            return;
        }
    }

    float[] transform = new float[16];      // TODO - avoid alloc every frame
    st.getTransformMatrix(transform);
    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        // Seeing this after device is toggled off/on with power button.  The
        // first frame back has a zero timestamp.
        //
        // MPEG4Writer thinks this is cause to abort() in native code, so it's very
        // important that we just ignore the frame.
        Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
        return;
    }
    mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
            (int) (timestamp >> 32), (int) timestamp, transform));
}
 
源代码6 项目: media-for-mobile   文件: CameraSource.java
@Override
public Frame getFrame() {
    if (surfaceTextureManager != null) {
        SurfaceTexture st = surfaceTextureManager.getSurfaceTexture();

        surfaceTextureManager.prepareAvailableFrame();
        surfaceTextureManager.drawImage();

        if (firstFrame) {
            long fromStartToFirstFrame = (System.currentTimeMillis() - startTimeStamp) * 1000000; // timeStampOffset in nanosecond
            timeStampOffset = st.getTimestamp() - fromStartToFirstFrame;
            firstFrame = false;
        }
        surface.setPresentationTime(st.getTimestamp() - timeStampOffset);
    }


    long sampleTime = (System.currentTimeMillis() - startTimeStamp) * 1000;
    //Log.d("AMP Camera Issue", "Video rame PTS = " + sampleTime);

    //Log.d("AMP Camera Issue", " " + sampleTime);
    currentFrame.setSampleTime(sampleTime);


    return currentFrame;
}
 
源代码7 项目: Telegram-FOSS   文件: InstantCameraView.java
public void frameAvailable(SurfaceTexture st, Integer cameraId, long timestampInternal) {
    synchronized (sync) {
        if (!ready) {
            return;
        }
    }

    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        zeroTimeStamps++;
        if (zeroTimeStamps > 1) {
            if (BuildVars.LOGS_ENABLED) {
                FileLog.d("fix timestamp enabled");
            }
            timestamp = timestampInternal;
        } else {
            return;
        }
    } else {
        zeroTimeStamps = 0;
    }

    handler.sendMessage(handler.obtainMessage(MSG_VIDEOFRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, cameraId));
}
 
源代码8 项目: grafika   文件: TextureMovieEncoder.java
/**
 * Tells the video recorder that a new frame is available.  (Call from non-encoder thread.)
 * <p>
 * This function sends a message and returns immediately.  This isn't sufficient -- we
 * don't want the caller to latch a new frame until we're done with this one -- but we
 * can get away with it so long as the input frame rate is reasonable and the encoder
 * thread doesn't stall.
 * <p>
 * TODO: either block here until the texture has been rendered onto the encoder surface,
 * or have a separate "block if still busy" method that the caller can execute immediately
 * before it calls updateTexImage().  The latter is preferred because we don't want to
 * stall the caller while this thread does work.
 */
public void frameAvailable(SurfaceTexture st) {
    synchronized (mReadyFence) {
        if (!mReady) {
            return;
        }
    }

    float[] transform = new float[16];      // TODO - avoid alloc every frame
    st.getTransformMatrix(transform);
    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        // Seeing this after device is toggled off/on with power button.  The
        // first frame back has a zero timestamp.
        //
        // MPEG4Writer thinks this is cause to abort() in native code, so it's very
        // important that we just ignore the frame.
        Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
        return;
    }

    mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
            (int) (timestamp >> 32), (int) timestamp, transform));
}
 
源代码9 项目: Telegram   文件: InstantCameraView.java
public void frameAvailable(SurfaceTexture st, Integer cameraId, long timestampInternal) {
    synchronized (sync) {
        if (!ready) {
            return;
        }
    }

    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        zeroTimeStamps++;
        if (zeroTimeStamps > 1) {
            if (BuildVars.LOGS_ENABLED) {
                FileLog.d("fix timestamp enabled");
            }
            timestamp = timestampInternal;
        } else {
            return;
        }
    } else {
        zeroTimeStamps = 0;
    }

    handler.sendMessage(handler.obtainMessage(MSG_VIDEOFRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, cameraId));
}
 
源代码10 项目: TikTok   文件: TextureMovieEncoder.java
/**
     * Tells the video recorder that a new frame is available.  (Call from non-encoder thread.)
     * <p>
     * This function sends a message and returns immediately.  This isn't sufficient -- we
     * don't want the caller to latch a new frame until we're done with this one -- but we
     * can get away with it so long as the input frame rate is reasonable and the encoder
     * thread doesn't stall.
     * <p>
     * TODO: either block here until the texture has been rendered onto the encoder surface,
     * or have a separate "block if still busy" method that the caller can execute immediately
     * before it calls updateTexImage().  The latter is preferred because we don't want to
     * stall the caller while this thread does work.
     */
    public void frameAvailable(SurfaceTexture st) {
        synchronized (mReadyFence) {
            if (!mReady) {
                return;
            }
        }

        float[] transform = new float[16];      // TODO - avoid alloc every frame
        st.getTransformMatrix(transform);
        long timestamp = st.getTimestamp();
        if (timestamp == 0) {
            // Seeing this after device is toggled off/on with power button.  The
            // first frame back has a zero timestamp.
            //
            // MPEG4Writer thinks this is cause to abort() in native code, so it's very
            // important that we just ignore the frame.
            Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
            return;
        }
        if(mHandler == null){
            return ;
        }
//        DebugTool.warn(TAG + ",frameAvailable:" + (int) (timestamp >> 32) + "," + (int) timestamp + "," + transform);
        mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
                (int) (timestamp >> 32), (int) timestamp, transform));
    }
 
/**
 * Tells the video recorder that a new frame is available.  (Call from non-encoder thread.)
 * <p>
 * This function sends a message and returns immediately.  This isn't sufficient -- we
 * don't want the caller to latch a new frame until we're done with this one -- but we
 * can get away with it so long as the input frame rate is reasonable and the encoder
 * thread doesn't stall.
 * <p>
 * TODO: either block here until the texture has been rendered onto the encoder surface,
 * or have a separate "block if still busy" method that the caller can execute immediately
 * before it calls updateTexImage().  The latter is preferred because we don't want to
 * stall the caller while this thread does work.
 */
public void frameAvailable(SurfaceTexture st) {
    synchronized (mReadyFence) {
        if (!mReady) {
            return;
        }
    }

    float[] transform = new float[16];      // TODO - avoid alloc every frame
    st.getTransformMatrix(transform);
    //TODO Chris timestamp is here!
    long timestamp = st.getTimestamp();
    if (mPause && !mPauseTimeStamp.isEmpty()) {
    	mPause =  false;
    	mResumeTimeStamp.add(timestamp);
    	//hack: 50 is to be rethink
    	timestamp = mPauseTimeStamp.get(mPauseTimeStamp.size() - 1) + 50;
    } else if (!mPause && !mResumeTimeStamp.isEmpty()) {
    	timestamp = mPauseTimeStamp.get(mPauseTimeStamp.size() - 1) + 
    			timestamp - mResumeTimeStamp.get(mResumeTimeStamp.size() - 1);
    };
    if (timestamp == 0) {
        // Seeing this after device is toggled off/on with power button.  The
        // first frame back has a zero timestamp.
        //
        // MPEG4Writer thinks this is cause to abort() in native code, so it's very
        // important that we just ignore the frame.
        Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
        return;
    }
    
    mTimeStamp = timestamp;
    mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
            (int) (timestamp >> 32), (int) timestamp, transform));
}
 
源代码12 项目: Lassi-Android   文件: SnapshotVideoRecorder.java
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY) {
    if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
        // Set default options
        if (mResult.videoBitRate <= 0) mResult.videoBitRate = DEFAULT_VIDEO_BITRATE;
        if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
        if (mResult.audioBitRate <= 0) mResult.audioBitRate = DEFAULT_AUDIO_BITRATE;

        // Video. Ensure width and height are divisible by 2, as I have read somewhere.
        Size size = mResult.getSize();
        int width = size.getWidth();
        int height = size.getHeight();
        width = width % 2 == 0 ? width : width + 1;
        height = height % 2 == 0 ? height : height + 1;
        String type = "";
        switch (mResult.codec) {
            case H_263:
                type = "video/3gpp";
                break; // MediaFormat.MIMETYPE_VIDEO_H263;
            case H_264:
                type = "video/avc";
                break; // MediaFormat.MIMETYPE_VIDEO_AVC:
            case DEVICE_DEFAULT:
                type = "video/avc";
                break;
        }
        LOG.w("Creating frame encoder. Rotation:", mResult.rotation);
        TextureMediaEncoder.Config config = new TextureMediaEncoder.Config(width, height,
                mResult.videoBitRate,
                mResult.videoFrameRate,
                mResult.rotation,
                type, mTextureId,
                scaleX, scaleY,
                mPreview.mInputFlipped,
                EGL14.eglGetCurrentContext()
        );
        TextureMediaEncoder videoEncoder = new TextureMediaEncoder(config);

        // Audio
        AudioMediaEncoder audioEncoder = null;
        if (mResult.audio == Audio.ON) {
            audioEncoder = new AudioMediaEncoder(new AudioMediaEncoder.Config(mResult.audioBitRate));
        }

        // Engine
        mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, audioEncoder,
                mResult.maxDuration, mResult.maxSize, SnapshotVideoRecorder.this);
        mEncoderEngine.start();
        mResult.rotation = 0; // We will rotate the result instead.
        mCurrentState = STATE_RECORDING;
    }

    if (mCurrentState == STATE_RECORDING) {
        TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
        TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame();
        textureFrame.timestamp = surfaceTexture.getTimestamp();
        surfaceTexture.getTransformMatrix(textureFrame.transform);
        mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, textureFrame);
    }

    if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) {
        mCurrentState = STATE_NOT_RECORDING; // before nulling encoderEngine!
        mEncoderEngine.stop();
        mEncoderEngine = null;
        mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this);
        mPreview = null;
    }

}
 
源代码13 项目: AAVT   文件: VideoSurfaceProcessor.java
private void glRun(){
    EglHelper egl=new EglHelper();
    boolean ret=egl.createGLESWithSurface(new EGLConfigAttrs(),new EGLContextAttrs(),new SurfaceTexture(1));
    if(!ret){
        //todo 错误处理
        return;
    }
    int mInputSurfaceTextureId = GpuUtils.createTextureID(true);
    SurfaceTexture mInputSurfaceTexture = new SurfaceTexture(mInputSurfaceTextureId);

    Point size=mProvider.open(mInputSurfaceTexture);
    AvLog.d(TAG,"Provider Opened . data size (x,y)="+size.x+"/"+size.y);
    if(size.x<=0||size.y<=0){
        //todo 错误处理
        destroyGL(egl);
        synchronized (LOCK){
            LOCK.notifyAll();
        }
        return;
    }
    int mSourceWidth = size.x;
    int mSourceHeight = size.y;
    synchronized (LOCK){
        LOCK.notifyAll();
    }
    //要求数据源提供者必须同步返回数据大小
    if(mSourceWidth <=0|| mSourceHeight <=0){
        error(1,"video source return inaccurate size to SurfaceTextureActuator");
        return;
    }

    if(mRenderer==null){
        mRenderer=new WrapRenderer(null);
    }
    FrameBuffer sourceFrame=new FrameBuffer();
    mRenderer.create();
    mRenderer.sizeChanged(mSourceWidth, mSourceHeight);
    mRenderer.setFlag(mProvider.isLandscape()?WrapRenderer.TYPE_CAMERA:WrapRenderer.TYPE_MOVE);

    //用于其他的回调
    RenderBean rb=new RenderBean();
    rb.egl=egl;
    rb.sourceWidth= mSourceWidth;
    rb.sourceHeight= mSourceHeight;
    rb.endFlag=false;
    rb.threadId=Thread.currentThread().getId();
    AvLog.d(TAG,"Processor While Loop Entry");
    //要求数据源必须同步填充SurfaceTexture,填充完成前等待
    while (!mProvider.frame()&&mGLThreadFlag){
        mInputSurfaceTexture.updateTexImage();
        mInputSurfaceTexture.getTransformMatrix(mRenderer.getTextureMatrix());
        AvLog.d(TAG,"timestamp:"+ mInputSurfaceTexture.getTimestamp());
        sourceFrame.bindFrameBuffer(mSourceWidth, mSourceHeight);
        GLES20.glViewport(0,0, mSourceWidth, mSourceHeight);
        mRenderer.draw(mInputSurfaceTextureId);
        sourceFrame.unBindFrameBuffer();
        rb.textureId=sourceFrame.getCacheTextureId();
        //接收数据源传入的时间戳
        rb.timeStamp=mProvider.getTimeStamp();
        rb.textureTime= mInputSurfaceTexture.getTimestamp();
        observable.notify(rb);
    }
    AvLog.d(TAG,"out of gl thread loop");
    synchronized (LOCK){
        rb.endFlag=true;
        observable.notify(rb);
        mRenderer.destroy();
        destroyGL(egl);
        LOCK.notifyAll();
        AvLog.d(TAG,"gl thread exit");
    }
}