在 API 低于 30 的情况下,可以通过以下方法实现 Android 辅助功能服务的截屏功能:
public class MyAccessibilityService extends AccessibilityService {
private ImageReader mImageReader;
@Override
public void onCreate() {
super.onCreate();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
DisplayMetrics metrics = getResources().getDisplayMetrics();
mImageReader = ImageReader.newInstance(metrics.widthPixels, metrics.heightPixels,
PixelFormat.RGBA_8888, 2);
} else {
mImageReader = ImageReader.newInstance(getScreenWidth(), getScreenHeight(),
PixelFormat.RGBA_8888, 2);
}
}
@Override
public void onAccessibilityEvent(AccessibilityEvent event) {
takeScreenshot();
}
private void takeScreenshot() {
// 获取当前的窗口所在的 AccessibilityNodeInfo 对象
AccessibilityNodeInfo root = getRootInActiveWindow();
// 获取屏幕区域的像素数据
mImageReader.acquireLatestImage().getPlanes()[0].getBuffer().rewind();
int width = mImageReader.getWidth();
int height = mImageReader.getHeight();
int pixelStride = mImageReader.getPlanes()[0].getPixelStride();
int rowStride = mImageReader.getPlanes()[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
int bufferSize = width * height * PixelFormat.RGBA_8888 / 8;
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
byte[] row = new byte[rowStride];
for (int y = 0; y < height; y++) {
int position = (y * pixelStride * width);
buffer.position(position);
mImageReader.getPlanes()[0].getBuffer().get(row, 0, width * pixelStride);