下面列出了怎么用android.hardware.display.VirtualDisplay的API类实例代码及写法,或者点击链接到github查看源代码。
private VirtualDisplay getOrCreateVirtualDisplay(MediaProjection mediaProjection, VideoEncodeConfig config) {
if (mVirtualDisplay == null) {
mVirtualDisplay = mediaProjection.createVirtualDisplay("ScreenRecorder-display0",
config.width, config.height, 1 /*dpi*/,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
null /*surface*/, null, null);
} else {
// resize if size not matched
Point size = new Point();
mVirtualDisplay.getDisplay().getSize(size);
if (size.x != config.width || size.y != config.height) {
mVirtualDisplay.resize(config.width, config.height, 1);
}
}
return mVirtualDisplay;
}
public VirtualDisplay.Callback getDisplayCallback() {
return new VirtualDisplay.Callback() {
@Override
public void onPaused() {
Log.e("ABC", "$$$$$$$$$$ onPause");
}
@Override
public void onResumed() {
Log.e("ABC", "$$$$$$$$$$ onResumed");
}
@Override
public void onStopped() {
Log.e("ABC", "$$$$$$$$$$ onStopped");
}
};
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private VirtualDisplay createVirtualDisplay(int width, int height) {
this.mSurfaceTextureHelper.setTextureSize(width, height);
return this.mMediaProjection.createVirtualDisplay(
"RongRTC_ScreenCapture",
width,
height,
VIRTUAL_DISPLAY_DPI,
DISPLAY_FLAGS,
new Surface(this.mSurfaceTextureHelper.getSurfaceTexture()),
null,
null);
}
private ScreenRecorder newRecorder(MediaProjection mediaProjection, VideoEncodeConfig video,
AudioEncodeConfig audio, File output) {
final VirtualDisplay display = getOrCreateVirtualDisplay(mediaProjection, video);
ScreenRecorder r = new ScreenRecorder(video, audio, display, output.getAbsolutePath());
r.setCallback(new ScreenRecorder.Callback() {
long startTime = 0;
@Override
public void onStop(Throwable error) {
runOnUiThread(() -> stopRecorder());
if (error != null) {
toast("Recorder error ! See logcat for more details");
error.printStackTrace();
output.delete();
} else {
Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE)
.addCategory(Intent.CATEGORY_DEFAULT)
.setData(Uri.fromFile(output));
sendBroadcast(intent);
}
}
@Override
public void onStart() {
// mNotifications.recording(0);
}
@Override
public void onRecording(long presentationTimeUs) {
if (startTime <= 0) {
startTime = presentationTimeUs;
}
long time = (presentationTimeUs - startTime) / 1000;
// mNotifications.recording(time);
}
});
return r;
}
private VirtualDisplay createVirtualDisplay() {
return mMediaProjection.createVirtualDisplay("MainActivity",
DISPLAY_WIDTH, DISPLAY_HEIGHT, mScreenDensity,
// DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY |
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
mMediaRecorder.getSurface(), null /*Callbacks*/, null
/*Handler*/);
}
public ScreenRecorder(VideoEncodeConfig video,
AudioEncodeConfig audio,
VirtualDisplay display,
String dstPath) {
mVirtualDisplay = display;
mDstPath = dstPath;
mVideoEncoder = new VideoEncoder(video);
mAudioEncoder = audio == null ? null : new MicRecorder(audio);
}
public synchronized Bitmap captureScreen() throws IllegalStateException {
AtomicReference<Image> imageHolder = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
ImageReader mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
mImageReader.setOnImageAvailableListener(imageReader -> {
imageHolder.set(mImageReader.acquireLatestImage());
latch.countDown();
}, mHandler);
VirtualDisplay display = mProjection.createVirtualDisplay("screen-mirror", mWidth, mHeight, mDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mImageReader.getSurface(),
null, null);
try {
latch.await(1, TimeUnit.SECONDS);
if (latch.getCount() == 1) {
throw new IllegalStateException("Screen capturing timed out");
}
final Image image = imageHolder.get();
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * mWidth;
// create bitmap
Bitmap bmp = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(buffer);
image.close();
return bmp;
} catch (InterruptedException e) {
throw new IllegalStateException("Got interrupt while capturing screen");
} finally {
display.release();
}
}
private VirtualDisplay createVirtualDisplay() {
return mMediaProjection.createVirtualDisplay("MainActivity",
WIDTH, HEIGHT, DENSITY_DPI,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mMediaRecorder.getSurface(), null /*Callbacks*/, null
/*Handler*/);
}
private VirtualDisplay createVirtualDisplay() {
int w = mWidth;
int h = mHeight;
WindowManager wm = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
if (wm == null) {
throw new RuntimeException("WindowManager is not supported.");
}
DisplayMetrics dm = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(dm);
if (dm.widthPixels > dm.heightPixels) {
if (w < h) {
w = mHeight;
h = mWidth;
}
} else {
if (w > h) {
w = mHeight;
h = mWidth;
}
}
return mMediaProjection.createVirtualDisplay(
"Android Host Screen",
w,
h,
mDisplayDensityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
getSurface(),
mCallback,
new Handler(Looper.getMainLooper()));
}
@Override
protected VirtualDisplay createVirtualDisplay() {
int w = mVideoQuality.getVideoWidth();
int h = mVideoQuality.getVideoHeight();
WindowManager wm = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
if (wm == null) {
throw new RuntimeException("");
}
DisplayMetrics dm = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(dm);
if (dm.widthPixels > dm.heightPixels) {
if (w < h) {
w = mVideoQuality.getVideoHeight();
h = mVideoQuality.getVideoWidth();
}
} else {
if (w > h) {
w = mVideoQuality.getVideoWidth();
h = mVideoQuality.getVideoWidth();
}
}
return mMediaProjection.createVirtualDisplay(
"Screen Cast",
w, h,
mDisplayDensityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mOutputSurface,
getDisplayCallback(),
new Handler(Looper.getMainLooper()));
}
@Override
public void run() {
MediaFormat format = MediaFormat.createVideoFormat("video/avc",
mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
I_FRAME_INTERVAL);
MediaCodec codec = MediaCodec.createEncoderByType("video/avc");
codec.configure(format, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface surface = codec.createInputSurface();
codec.start();
VirtualDisplay virtualDisplay = mDisplayManager
.createVirtualDisplay(DISPLAY_NAME, mWidth, mHeight,
mDensityDpi, surface,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC);
if (virtualDisplay != null) {
stream(codec);
virtualDisplay.release();
}
codec.signalEndOfInputStream();
codec.stop();
}
@TargetApi(LOLLIPOP) void captureNativeScreenshot(final MediaProjection projection) {
capturingStart();
// Wait for the next frame to be sure our progress bars are hidden.
post(() -> {
DisplayMetrics displayMetrics = new DisplayMetrics();
windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
final int width = displayMetrics.widthPixels;
final int height = displayMetrics.heightPixels;
@SuppressLint("WrongConstant")
ImageReader imageReader = ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, 2);
Surface surface = imageReader.getSurface();
final VirtualDisplay display =
projection.createVirtualDisplay("telescope", width, height, displayMetrics.densityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION, surface, null, null);
imageReader.setOnImageAvailableListener(reader -> {
Image image = null;
Bitmap bitmap = null;
try {
image = reader.acquireLatestImage();
post(this::capturingEnd);
if (image == null) {
return;
}
saving = true;
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height,
Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
// Trim the screenshot to the correct size.
final Bitmap croppedBitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
checkLens();
lens.onCapture(croppedBitmap,
processed -> new SaveScreenshotTask(croppedBitmap).execute());
} catch (UnsupportedOperationException e) {
Log.e(TAG,
"Failed to capture system screenshot. Setting the screenshot mode to CANVAS.", e);
setScreenshotMode(ScreenshotMode.CANVAS);
post(this::captureCanvasScreenshot);
} finally {
if (bitmap != null) {
bitmap.recycle();
}
if (image != null) {
image.close();
}
reader.close();
display.release();
projection.stop();
}
}, getBackgroundHandler());
});
}
protected abstract VirtualDisplay createVirtualDisplay();