android.opengl.EGLContext#android.graphics.SurfaceTexture源码实例Demo

下面列出了android.opengl.EGLContext#android.graphics.SurfaceTexture 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: Camera2   文件: OneCameraCharacteristicsImpl.java
@Override
public List<Size> getSupportedPreviewSizes()
{
    StreamConfigurationMap configMap;
    try
    {
        configMap = mCameraCharacteristics.get(
                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    } catch (Exception ex)
    {
        Log.e(TAG, "Unable to obtain preview sizes.", ex);
        // See b/19623115   where java.lang.AssertionError can be thrown due to HAL error
        return new ArrayList<>(0);
    }
    ArrayList<Size> supportedPictureSizes = new ArrayList<>();
    for (android.util.Size androidSize : configMap.getOutputSizes(SurfaceTexture.class))
    {
        supportedPictureSizes.add(new Size(androidSize));
    }
    return supportedPictureSizes;
}
 
源代码2 项目: GiraffePlayer   文件: TextureRenderView.java
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public void bindToMediaPlayer(IMediaPlayer mp) {
    if (mp == null)
        return;

    if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) &&
            (mp instanceof ISurfaceTextureHolder)) {
        ISurfaceTextureHolder textureHolder = (ISurfaceTextureHolder) mp;
        mTextureView.mSurfaceCallback.setOwnSurfaceTecture(false);

        SurfaceTexture surfaceTexture = textureHolder.getSurfaceTexture();
        if (surfaceTexture != null) {
            mTextureView.setSurfaceTexture(surfaceTexture);
        } else {
            textureHolder.setSurfaceTexture(mSurfaceTexture);
        }
    } else {
        mp.setSurface(openSurface());
    }
}
 
源代码3 项目: libcommon   文件: OverlayRendererHolder.java
/**
 * internalOnStartの下請け、GLES2用
 */
@WorkerThread
private void internalOnStartES2() {
	if (DEBUG) Log.v(TAG, String.format("internalOnStartES2:init overlay texture(%dx%d)",
		width(), height()));
	if (DEBUG) Log.v(TAG, "internalOnStartES2:shader=" + MY_FRAGMENT_SHADER_EXT_ES2);
	mDrawer.updateShader(MY_FRAGMENT_SHADER_EXT_ES2);
	final int uTex1 = mDrawer.glGetUniformLocation("sTexture");
	GLES20.glUniform1i(uTex1, 0);
	if (DEBUG) Log.v(TAG, "internalOnStart:uTex1=" + uTex1);

	final int uTex2 = mDrawer.glGetUniformLocation("sTexture2");
	mOverlayTexId = GLHelper.initTex(
		GL_TEXTURE_EXTERNAL_OES,
		GLES20.GL_TEXTURE1,
		GLES20.GL_LINEAR, GLES20.GL_LINEAR,
		GLES20.GL_CLAMP_TO_EDGE);
	mOverlayTexture = new SurfaceTexture(mOverlayTexId);
	mOverlayTexture.setDefaultBufferSize(width(), height());
	mOverlaySurface = new Surface(mOverlayTexture);
	GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
	GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mOverlayTexId);
	GLES20.glUniform1i(uTex2, 1);
	if (DEBUG) Log.v(TAG, "internalOnStart:uTex2=" + uTex2);
}
 
源代码4 项目: FuAgoraDemoDroid   文件: EglCore.java
/**
 * Creates an EGL surface associated with a Surface.
 * <p>
 * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
 */
public EGLSurface createWindowSurface(Object surface) {
    if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
        throw new RuntimeException("invalid surface: " + surface);
    }

    // Create a window surface, and attach it to the Surface we received.
    int[] surfaceAttribs = {
            EGL14.EGL_NONE
    };
    EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
            surfaceAttribs, 0);
    checkEglError("eglCreateWindowSurface");
    if (eglSurface == null) {
        throw new RuntimeException("surface was null");
    }
    return eglSurface;
}
 
源代码5 项目: grafika   文件: ContinuousCaptureActivity.java
@Override   // SurfaceHolder.Callback
public void surfaceCreated(SurfaceHolder holder) {
    Log.d(TAG, "surfaceCreated holder=" + holder);

    // Set up everything that requires an EGL context.
    //
    // We had to wait until we had a surface because you can't make an EGL context current
    // without one, and creating a temporary 1x1 pbuffer is a waste of time.
    //
    // The display surface that we use for the SurfaceView, and the encoder surface we
    // use for video, use the same EGL context.
    mEglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
    mDisplaySurface = new WindowSurface(mEglCore, holder.getSurface(), false);
    mDisplaySurface.makeCurrent();

    mFullFrameBlit = new FullFrameRect(
            new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT));
    mTextureId = mFullFrameBlit.createTextureObject();
    mCameraTexture = new SurfaceTexture(mTextureId);
    mCameraTexture.setOnFrameAvailableListener(this);

    startPreview();
}
 
源代码6 项目: android-openGL-canvas   文件: CanvasGL.java
@Override
public void drawSurfaceTexture(BasicTexture texture, final SurfaceTexture surfaceTexture, int left, int top, int right, int bottom, @Nullable final IBitmapMatrix matrix, TextureFilter textureFilter) {
    currentTextureFilter = textureFilter;
    BasicTexture filteredTexture = texture;
    if (textureFilter instanceof FilterGroup) {
        filteredTexture = getFilterGroupTexture(texture, surfaceTexture, (FilterGroup) textureFilter);
    }
    GLCanvas.ICustomMVPMatrix customMVPMatrix = matrix == null ? null : new GLCanvas.ICustomMVPMatrix() {
        @Override
        public float[] getMVPMatrix(int viewportW, int viewportH, float x, float y, float drawW, float drawH) {
            return matrix.obtainResultMatrix(viewportW, viewportH, x, y, drawW, drawH);
        }
    };
    if (surfaceTexture == null) {
        glCanvas.drawTexture(filteredTexture, left, top, right - left, bottom - top, textureFilter, customMVPMatrix);
    } else {
        surfaceTexture.getTransformMatrix(surfaceTextureMatrix);
        glCanvas.drawTexture(filteredTexture, surfaceTextureMatrix, left, top, right - left, bottom - top, textureFilter, customMVPMatrix);
    }
}
 
源代码7 项目: EZFilter   文件: RenderHandler.java
/**
 * 设置输入纹理
 * <p>
 * 必须在GL线程调用
 *
 * @param surface
 * @param texId
 */
public final void setInputTextureId(final Object surface, final int texId) {
    if (!(surface instanceof Surface)
            && !(surface instanceof SurfaceTexture)
            && !(surface instanceof SurfaceHolder)) {
        throw new RuntimeException("unsupported window type:" + surface);
    }
    synchronized (mSync) {
        if (mRequestRelease) return;
        mSharedContext = EGL14.eglGetCurrentContext();
        mTextureId = texId;
        mSurface = surface;
        mRequestSetEglContext = true;
        mSync.notifyAll();
        try {
            mSync.wait();
        } catch (final InterruptedException e) {
        }
    }
}
 
源代码8 项目: AndroidPlayground   文件: TextureMovieEncoder.java
/**
 * Tells the video recorder that a new frame is available.  (Call from non-encoder thread.)
 * <p>
 * This function sends a message and returns immediately.  This isn't sufficient -- we
 * don't want the caller to latch a new frame until we're done with this one -- but we
 * can get away with it so long as the input frame rate is reasonable and the encoder
 * thread doesn't stall.
 * <p>
 * TODO: either block here until the texture has been rendered onto the encoder surface,
 * or have a separate "block if still busy" method that the caller can execute immediately
 * before it calls updateTexImage().  The latter is preferred because we don't want to
 * stall the caller while this thread does work.
 */
public void frameAvailable(SurfaceTexture st) {
    synchronized (mReadyFence) {
        if (!mReady) {
            return;
        }
    }

    st.getTransformMatrix(mTextureFrameTransform);
    long timestamp = st.getTimestamp();
    if (timestamp == 0) {
        // Seeing this after device is toggled off/on with power button.  The
        // first frame back has a zero timestamp.
        //
        // MPEG4Writer thinks this is cause to abort() in native code, so it's very
        // important that we just ignore the frame.
        Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
        return;
    }

    mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
            (int) (timestamp >> 32), (int) timestamp, mTextureFrameTransform));
}
 
源代码9 项目: KSYMediaPlayer_Android   文件: KSYTextureView.java
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {

    if (mSurfaceTexture != null && isComeBackFromShare()){
        mSurfaceTexture.release();
        mSurfaceTexture = surfaceTexture;
    }

    if (mSurfaceTexture == null)
        mSurfaceTexture = surfaceTexture;

    if (mMediaPlayer != null){
        mMediaPlayer.setSurface(new Surface(mSurfaceTexture));
    }

}
 
源代码10 项目: SiliCompressor   文件: TextureRenderer.java
public void drawFrame(SurfaceTexture st, boolean invert) {
    checkGlError("onDrawFrame start");
    st.getTransformMatrix(mSTMatrix);

    if (invert) {
        mSTMatrix[5] = -mSTMatrix[5];
        mSTMatrix[13] = 1.0f - mSTMatrix[13];
    }

    GLES20.glUseProgram(mProgram);
    checkGlError("glUseProgram");
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maPosition");
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    checkGlError("glEnableVertexAttribArray maPositionHandle");
    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
    GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maTextureHandle");
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    checkGlError("glEnableVertexAttribArray maTextureHandle");
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    checkGlError("glDrawArrays");
    GLES20.glFinish();
}
 
源代码11 项目: ZZShow   文件: TextureRenderView.java
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
    mSurfaceTexture = surface;
    mIsFormatChanged = false;
    mWidth = 0;
    mHeight = 0;

    ISurfaceHolder surfaceHolder = new InternalSurfaceHolder(mWeakRenderView.get(), surface, this);
    for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
        renderCallback.onSurfaceCreated(surfaceHolder, 0, 0);
    }
}
 
源代码12 项目: imsdk-android   文件: JZMediaIjk.java
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
    if (SAVED_SURFACE == null) {
        SAVED_SURFACE = surface;
        prepare();
    } else {
        jzvd.textureView.setSurfaceTexture(SAVED_SURFACE);
    }
}
 
/**
 * Opens the camera and starts sending preview frames to the underlying detector.  The preview
 * frames are not displayed.
 *
 * @throws IOException if the camera's preview texture or display could not be initialized
 */
@RequiresPermission(Manifest.permission.CAMERA)
public CameraSource start() throws IOException {
    synchronized (mCameraLock) {
        if (mCamera != null) {
            return this;
        }

        mCamera = createCamera();

        // SurfaceTexture was introduced in Honeycomb (11), so if we are running and
        // old version of Android. fall back to use SurfaceView.
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
            mDummySurfaceTexture = new SurfaceTexture(DUMMY_TEXTURE_NAME);
            mCamera.setPreviewTexture(mDummySurfaceTexture);
        } else {
            mDummySurfaceView = new SurfaceView(mContext);
            mCamera.setPreviewDisplay(mDummySurfaceView.getHolder());
        }
        mCamera.startPreview();

        mProcessingThread = new Thread(mFrameProcessor);
        mFrameProcessor.setActive(true);
        mProcessingThread.start();
    }
    return this;
}
 
源代码14 项目: VideoCompressor   文件: OutputSurface.java
@Override
public void onFrameAvailable(SurfaceTexture st) {
    synchronized (mFrameSyncObject) {
        if (mFrameAvailable) {
            throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
        }
        mFrameAvailable = true;
        mFrameSyncObject.notifyAll();
    }
}
 
源代码15 项目: react-native-video-helper   文件: OutputSurface.java
private void setup() {
    mTextureRender = new TextureRenderer(rotateRender);
    mTextureRender.surfaceCreated();
    mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
    mSurfaceTexture.setOnFrameAvailableListener(this);
    mSurface = new Surface(mSurfaceTexture);
}
 
源代码16 项目: media-for-mobile   文件: PreviewRender.java
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    if (!requestRendering) return;

    synchronized (activeEffectGuard) {
        if (listener == null) {
            requestRendering();
            return;
        }
        listener.onFrameAvailable();
    }
}
 
源代码17 项目: OnionCamera   文件: MainActivity.java
@Override
public void onSurfaceTextureAvailable(SurfaceTexture s,
        int width, int height) {
    surface = s;
    surfaceWidth = width;
    surfaceHeight = height;
    if (camera != null) {
        startPreview();
    }
}
 
源代码18 项目: DanDanPlayForAndroid   文件: TextureMediaPlayer.java
@Override
public void setSurfaceTexture(SurfaceTexture surfaceTexture) {
    if (mSurfaceTexture == surfaceTexture)
        return;

    releaseSurfaceTexture();
    mSurfaceTexture = surfaceTexture;
    if (surfaceTexture == null) {
        super.setSurface(null);
    } else {
        super.setSurface(new Surface(surfaceTexture));
    }
}
 
源代码19 项目: libcommon   文件: VideoSource.java
/**
 * IPipelineSourceの実装
 * 映像入力用のSurfaceTextureを取得
 * @return
 * @throws IllegalStateException
 */
@NonNull
@Override
public SurfaceTexture getInputSurfaceTexture() throws IllegalStateException {
	if (DEBUG) Log.v(TAG, "getInputSurfaceTexture:" + mInputTexture);
	checkValid();
	if (mInputTexture == null) {
		throw new IllegalStateException("has no master surface");
	}
	return mInputTexture;
}
 
源代码20 项目: IjkPlayerDemo   文件: TextureMediaPlayer.java
@Override
public void setSurfaceTexture(SurfaceTexture surfaceTexture) {
    if (mSurfaceTexture == surfaceTexture)
        return;

    releaseSurfaceTexture();
    mSurfaceTexture = surfaceTexture;
    if (surfaceTexture == null) {
        super.setSurface(null);
    } else {
        super.setSurface(new Surface(surfaceTexture));
    }
}
 
源代码21 项目: TelePlus-Android   文件: SimpleExoPlayer.java
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
  if (needSetSurface) {
    setVideoSurfaceInternal(new Surface(surfaceTexture), true);
    needSetSurface = false;
  }
  maybeNotifySurfaceSizeChanged(width, height);
}
 
源代码22 项目: Mrthumb   文件: NiceVideoPlayer.java
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
    if (mSurfaceTexture == null) {
        mSurfaceTexture = surfaceTexture;
        openMediaPlayer();
    } else {
        mTextureView.setSurfaceTexture(mSurfaceTexture);
    }
}
 
源代码23 项目: OTTLivePlayer_vlc   文件: AWindow.java
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
    if (surfaceTexture == mSurfaceTexture) {
        if (mFrameAvailable)
            throw new IllegalStateException("An available frame was not updated");
        mFrameAvailable = true;
        notify();
    }
}
 
源代码24 项目: pasm-yolov3-Android   文件: CameraGLRendererBase.java
private void initSurfaceTexture() {
    Log.d(LOGTAG, "initSurfaceTexture");
    deleteSurfaceTexture();
    initTexOES(texCamera);
    mSTexture = new SurfaceTexture(texCamera[0]);
    mSTexture.setOnFrameAvailableListener(this);
}
 
源代码25 项目: vlc-example-streamplayer   文件: AWindow.java
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
    if (surfaceTexture == mSurfaceTexture) {
        if (mFrameAvailable)
            throw new IllegalStateException("An available frame was not updated");
        mFrameAvailable = true;
        notify();
    }
}
 
源代码26 项目: In77Camera   文件: EGLBase.java
EglSurface(final EGLBase egl, final Object surface) {
	if (DEBUG) Log.v(TAG, "EglSurface:");
	if (!(surface instanceof SurfaceView)
		&& !(surface instanceof Surface)
		&& !(surface instanceof SurfaceHolder)
		&& !(surface instanceof SurfaceTexture))
		throw new IllegalArgumentException("unsupported surface");
	mEgl = egl;
	mEglSurface = mEgl.createWindowSurface(surface);
       mWidth = mEgl.querySurface(mEglSurface, EGL14.EGL_WIDTH);
       mHeight = mEgl.querySurface(mEglSurface, EGL14.EGL_HEIGHT);
       if (DEBUG) Log.v(TAG, String.format("EglSurface:size(%d,%d)", mWidth, mHeight));
}
 
源代码27 项目: XPlayer2   文件: TextureRenderView.java
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
    // 当SurfaceTexture缓冲区大小更改时调用。
    MLog.i("================onSurfaceTextureSizeChanged");

    if (mOnSurfaceStatusListener != null) {
        MLog.i("========22========onSurfaceTextureSizeChanged");
        mOnSurfaceStatusListener.onSurfaceSizeChanged(mSurface, width, height);
    }
}
 
源代码28 项目: MusicPlayer   文件: JBubblePicker.java
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
    mFrames = 0;
    mDelta = 0;
    mTotalRunningTime = 0;
    mStartTime = System.currentTimeMillis();
    initDraw();
    mExecutorService = Executors.newSingleThreadScheduledExecutor();
    mExecutorService.scheduleAtFixedRate(this::render, FRAME_INTERVAL, FRAME_INTERVAL, TimeUnit.MILLISECONDS);
    Log.d(TAG, "onSurfaceTextureAvailable");

}
 
源代码29 项目: Paddle-Lite-Demo   文件: CameraSurfaceView.java
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    // Create OES texture for storing camera preview data(YUV format)
    GLES20.glGenTextures(1, camTextureId, 0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, camTextureId[0]);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
    surfaceTexture = new SurfaceTexture(camTextureId[0]);
    surfaceTexture.setOnFrameAvailableListener(this);

    // Prepare vertex and texture coordinates
    int bytes = vertexCoords.length * Float.SIZE / Byte.SIZE;
    vertexCoordsBuffer = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
    textureCoordsBuffer = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
    vertexCoordsBuffer.put(vertexCoords).position(0);
    textureCoordsBuffer.put(textureCoords).position(0);

    // Create vertex and fragment shaders
    // camTextureId->fboTexureId
    progCam2FBO = Utils.createShaderProgram(vss, fssCam2FBO);
    vcCam2FBO = GLES20.glGetAttribLocation(progCam2FBO, "vPosition");
    tcCam2FBO = GLES20.glGetAttribLocation(progCam2FBO, "vTexCoord");
    GLES20.glEnableVertexAttribArray(vcCam2FBO);
    GLES20.glEnableVertexAttribArray(tcCam2FBO);
    // fboTexureId/drawTexureId -> screen
    progTex2Screen = Utils.createShaderProgram(vss, fssTex2Screen);
    vcTex2Screen = GLES20.glGetAttribLocation(progTex2Screen, "vPosition");
    tcTex2Screen = GLES20.glGetAttribLocation(progTex2Screen, "vTexCoord");
    GLES20.glEnableVertexAttribArray(vcTex2Screen);
    GLES20.glEnableVertexAttribArray(tcTex2Screen);
}
 
源代码30 项目: TVRemoteIME   文件: TextureRenderView.java
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
    mSurfaceTexture = surface;
    mIsFormatChanged = true;
    mWidth = width;
    mHeight = height;

    ISurfaceHolder surfaceHolder = new InternalSurfaceHolder(mWeakRenderView.get(), surface, this);
    for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
        renderCallback.onSurfaceChanged(surfaceHolder, 0, width, height);
    }
}