下面列出了怎么用android.view.Surface的API类实例代码及写法,或者点击链接到github查看源代码。
static List<Long> getSurfaceIds(SparseArray<Surface> surfaces)
throws BufferQueueAbandonedException {
if (surfaces == null) {
throw new NullPointerException("Null argument surfaces");
}
List<Long> surfaceIds = new ArrayList<>();
int count = surfaces.size();
for (int i = 0; i < count; i++) {
long id = getSurfaceId(surfaces.valueAt(i));
if (id == 0) {
throw new IllegalStateException(
"Configured surface had null native GraphicBufferProducer pointer!");
}
surfaceIds.add(id);
}
return surfaceIds;
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
if (mSurfaceTexture != null && isComeBackFromShare()){
mSurfaceTexture.release();
mSurfaceTexture = surfaceTexture;
}
if (mSurfaceTexture == null)
mSurfaceTexture = surfaceTexture;
if (mMediaPlayer != null){
mMediaPlayer.setSurface(new Surface(mSurfaceTexture));
}
}
private int getDeviceOrientation(Context context) {
WindowManager windowManager = (WindowManager)
context.getSystemService(Context.WINDOW_SERVICE);
Resources resources = context.getResources();
DisplayMetrics dm = resources.getDisplayMetrics();
Configuration config = resources.getConfiguration();
int rotation = windowManager.getDefaultDisplay().getRotation();
boolean isLandscape = (config.orientation == Configuration.ORIENTATION_LANDSCAPE) &&
(rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180);
boolean isRotatedPortrait = (config.orientation == Configuration.ORIENTATION_PORTRAIT) &&
(rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270);
if (isLandscape || isRotatedPortrait) {
return CellLayout.LANDSCAPE;
} else {
return CellLayout.PORTRAIT;
}
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
@Override
public void onPrepared(int streamId) {
final OutputConfiguration output;
final StateCallbackKK sessionCallback;
if (DEBUG) {
Log.v(TAG, "Stream " + streamId + " is prepared");
}
synchronized(mInterfaceLock) {
output = mConfiguredOutputs.get(streamId);
sessionCallback = mSessionStateCallback;
}
if (sessionCallback == null) return;
if (output == null) {
Log.w(TAG, "onPrepared invoked for unknown output Surface");
return;
}
final List<Surface> surfaces = output.getSurfaces();
for (Surface surface : surfaces) {
sessionCallback.onSurfacePrepared(surface);
}
}
/**
* 设置截屏尺寸
* @param width 宽度
* @param height 高度
*/
public void setSize(int width, int height) {
if (Build.VERSION.SDK_INT >= 28) {
// Android 9.0方法变更,https://github.com/wejoy/HJMirror/issues/4
if (width != 0 && height != 0 && this.width != width && this.height != height) {
size = new Object[]{new Rect(), width, height, Surface.ROTATION_0};
this.width = width;
this.height = height;
}
} else {
if (width != 0 && height != 0 && this.width != width && this.height != height) {
size = new Object[]{width, height};
this.width = width;
this.height = height;
}
}
}
/**
* Handles the surface-created callback from SurfaceView. Prepares GLES and the Surface.
*/
private void surfaceAvailable(SurfaceHolder holder, boolean newSurface) {
Surface surface = holder.getSurface();
mWindowSurface = new WindowSurface(mEglCore, surface, false);
mWindowSurface.makeCurrent();
// Create and configure the SurfaceTexture, which will receive frames from the
// camera. We set the textured rect's program to render from it.
mTexProgram = new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT);
int textureId = mTexProgram.createTextureObject();
mCameraTexture = new SurfaceTexture(textureId);
mRect.setTexture(textureId);
if (!newSurface) {
// This Surface was established on a previous run, so no surfaceChanged()
// message is forthcoming. Finish the surface setup now.
//
// We could also just call this unconditionally, and perhaps do an unnecessary
// bit of reallocating if a surface-changed message arrives.
mWindowSurfaceWidth = mWindowSurface.getWidth();
mWindowSurfaceHeight = mWindowSurface.getHeight();
finishSurfaceSetup();
}
mCameraTexture.setOnFrameAvailableListener(this);
}
public int getCameraDisplayOrientation() {
Camera.CameraInfo info =
new Camera.CameraInfo();
Camera.getCameraInfo(currentCameraId, info);
int rotation = context.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
private static float getDegreesForRotation(int rotation) {
switch (rotation) {
case Surface.ROTATION_90: {
return 360f - 90f;
}
case Surface.ROTATION_180: {
return 360f - 180f;
}
case Surface.ROTATION_270: {
return 360f - 270f;
}
default: {
return 0;
}
}
}
private static int getDisplayRotation(Activity activity)
{
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
switch (rotation)
{
case Surface.ROTATION_0:
return 0;
case Surface.ROTATION_90:
return 90;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_270:
return 270;
}
return 0;
}
private int getCurrentOrientation() {
int rotation = getWindowManager().getDefaultDisplay().getRotation();
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_90:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
}
} else {
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_270:
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
}
}
}
/**
* Creates an EGL surface associated with a Surface.
* <p>
* If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
*/
public EGLSurface createWindowSurface(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new RuntimeException("invalid surface: " + surface);
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
public void startPreview(final Object surface) {
checkReleased();
if (!((surface instanceof SurfaceHolder) || (surface instanceof Surface) || (surface instanceof SurfaceTexture))) {
throw new IllegalArgumentException("surface should be one of SurfaceHolder, Surface or SurfaceTexture: " + surface);
}
sendMessage(obtainMessage(MSG_PREVIEW_START, surface));
}
/**
* カメラデバイスを開きます.
* <p>
* TextView を指定して接続する場合には、{@link #STATE_INIT_SURFACE} → {@link #STATE_OPEN_CAMERA} と遷移します。
* </p>
* @param textureView カメラデバイスの映像を描画するView
* @param surfaces カメラの映像を描画するSurfaceのリスト
* @IllegalStateException 既にカメラデバイスがオープンされていた場合に発生
*/
public void open(@NonNull AutoFitTextureView textureView, List<Surface> surfaces) {
if (mState != null) {
throw new IllegalStateException("Camera2 has already started. state=" + mState);
}
mTextureView = textureView;
mSurfaces.clear();
mSurfaces.addAll(surfaces);
nextState(mInitSurfaceState);
}
private Surface getSurface() {
if (type == TYPE_TEXTURE_VIEW) {
SurfaceTexture surfaceTexture = ((TextureView) view).getSurfaceTexture();
assert surfaceTexture != null;
return new Surface(surfaceTexture);
} else if (type == TYPE_SURFACE_VIEW) {
return ((SurfaceView) view).getHolder().getSurface();
}
return null;
}
@Override
public Surface getSurface() {
if (mRenderView != null) {
return mRenderView.getSurface();
}
return null;
}
@Override
public void init(@Nullable Surface outputSurface, @Nullable MediaFormat sourceMediaFormat, @Nullable MediaFormat targetMediaFormat) {
if (outputSurface == null) {
throw new IllegalArgumentException("GlVideoRenderer requires an output surface");
}
if (targetMediaFormat == null) {
throw new IllegalArgumentException("GlVideoRenderer requires target media format");
}
triangleVertices = ByteBuffer.allocateDirect(
triangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
triangleVertices.put(triangleVerticesData).position(0);
// prioritize target video rotation value, fall back to source video rotation value
int rotation = 0;
if (targetMediaFormat.containsKey(KEY_ROTATION)) {
rotation = targetMediaFormat.getInteger(KEY_ROTATION);
} else if (sourceMediaFormat != null && sourceMediaFormat.containsKey(KEY_ROTATION)) {
rotation = sourceMediaFormat.getInteger(KEY_ROTATION);
}
float aspectRatio = 1;
if (targetMediaFormat.containsKey(MediaFormat.KEY_WIDTH) && targetMediaFormat.containsKey(MediaFormat.KEY_HEIGHT)) {
aspectRatio = (float) targetMediaFormat.getInteger(MediaFormat.KEY_WIDTH) / targetMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
}
this.outputSurface = new VideoRenderOutputSurface(outputSurface);
inputSurface = new VideoRenderInputSurface();
initMvpMatrix(rotation, aspectRatio);
initGl();
for (GlFilter filter : filters) {
filter.init(Arrays.copyOf(mvpMatrix, mvpMatrix.length), 0);
}
}
public static void renderSurface(Surface s, ByteBuffer pixelBytes, int offset, int width, int height, int size, int colorFormat, int stride) {
if(pixelBytes.isDirect()) {
renderingToSurface2(s,pixelBytes,offset,width,height,size,colorFormat,stride);
} else {
byte[] b = new byte[pixelBytes.remaining()];
pixelBytes.get(b);
renderingToSurface(s,b,offset,width,height,size);
}
}
static void setSurfaceDimens(Surface surface, int width, int height)
throws BufferQueueAbandonedException {
checkNotNull(surface);
checkArgumentPositive(width, "width must be positive.");
checkArgumentPositive(height, "height must be positive.");
LegacyExceptionUtils.throwOnError(nativeSetSurfaceDimens(surface, width, height));
}
@Override
public void showDisplay(Message msg) {
if (msg.obj == null && mediaPlayer != null) {
mediaPlayer.setSurface(null);
} else {
Surface holder = (Surface) msg.obj;
surface = holder;
if (mediaPlayer != null && holder.isValid()) {
mediaPlayer.setSurface(holder);
}
}
}
@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
super.init();
/*
* Create the SurfaceTexture that will feed this textureID, and pass
* it to the MediaPlayer
*/
mSurface = new SurfaceTexture(getTexture());
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
mMediaPlayer.setScreenOnWhilePlaying(true);
surface.release();
if (!isMediaPlayerPrepared) {
try {
mMediaPlayer.prepare();
} catch (IOException e) {
e.printStackTrace();
}
isMediaPlayerPrepared = true;
}
synchronized (this) {
updateSurface = false;
}
mMediaPlayer.start();
}
public WindowSurface(EGLConfig eglConfig, Surface encodeSurface) {
mEglConfig = eglConfig;
mEgl= (EGL10) EGLContext.getEGL();
mEglContext = mEgl.eglGetCurrentContext();
mEglDisplay = mEgl.eglGetCurrentDisplay();
mWindowSurface = mEgl.eglGetCurrentSurface(EGL10.EGL_DRAW);
mEncoderSurface = createWindowSurface(encodeSurface);
Log.i(TAG, "eglCreateWindowSurface:" + mEgl.eglGetError());
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
surface = new Surface(surfaceTexture);
if (callback != null){
callback.onSurfaceAvailable(surface);
}
}
private void attachVlcSurface(Surface surface) {
if (mSurfaceReady)
libvlc.detachSurface();
if (surface != null) {
ExoVlcUtil.log(this, " VLCIVideoSurfaceHandler.attachVlcSurface() Setting lib vlc with surface : "
+ surface);
libvlc.attachSurface(surface, this);
}
}
private @NonNull
MediaCodec createVideoEncoder(
final @NonNull MediaCodecInfo codecInfo,
final @NonNull MediaFormat format,
final @NonNull AtomicReference<Surface> surfaceReference) throws IOException {
final MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// Must be called before start()
surfaceReference.set(encoder.createInputSurface());
encoder.start();
return encoder;
}
public static int getCameraDisplayOrientation(Context context, int cameraId) {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
int rotation = wm.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else {
// back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
public void startJniWorkThread(Surface surface, int width, int height, int frameRate, IFrameDataListener listener) {
if (this.mH264Player == null) {
this.mH264Player = new H264Player(listener);
}
this.mH264Player.setSurface(surface);
this.mH264DecoderThread = new H264DecoderThread(this.mH264Player, listener);
this.mH264DecoderThread.start();
this.isWorking = true;
}
/**
* Creates an InputSurface from a Surface.
*/
public InputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
mSurface = surface;
eglSetup();
}
/**
* Set shared context and Surface
* @param shard_context
* @param surface
*/
public final void handleSetEglContext(final EGLContext shard_context, final Object surface, final boolean isRecordable) {
if (DEBUG) Log.i(TAG_THREAD, "setEglContext:");
release();
synchronized (mSync) {
mSurface = surface instanceof Surface ? (Surface)surface
: (surface instanceof SurfaceTexture ? new Surface((SurfaceTexture)surface) : null);
}
mEgl = new EGLBase(shard_context, false, isRecordable);
mTargetSurface = mEgl.createFromSurface(surface);
mDrawer = new GLDrawer2D();
}
/**
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
* with the SurfaceTexture.
*/
private void setup(SurfaceTexture.OnFrameAvailableListener listener) {
mTextureRender = new FrameBufferObjectRenderer(mFilter);
mTextureRender.surfaceCreated();
// Even if we don't access the SurfaceTexture after the constructor returns, we
// still need to keep a reference to it. The Surface doesn't retain a reference
// at the Java level, so if we don't either then the object can get GCed, which
// causes the native finalizer to run.
if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// This doesn't work if OutputSurface is created on the thread that CTS started for
// these test cases.
//
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
// create a Handler that uses it. The "frame available" message is delivered
// there, but since we're not a Looper-based thread we'll never see it. For
// this to do anything useful, OutputSurface must be created on a thread without
// a Looper, so that SurfaceTexture uses the main application Looper instead.
//
// Java language note: passing "this" out of a constructor is generally unwise,
// but we should be able to get away with it here.
mSurfaceTexture.setOnFrameAvailableListener(listener);
mSurface = new Surface(mSurfaceTexture);
checkEglError("setup");
}