本讲是Android Camera专题系列的第41讲,我们介绍Android Camera2 API专题的Touch AE实战。
更多资源:
资源 | 描述 |
---|---|
在线课程 | 极客笔记在线课程 |
知识星球 | 星球名称:深入浅出Android Camera 星球ID: 17296815 |
极客笔记圈 |
判断是否支持设置AE Regions
CameraCharacteristics.CONTROL_MAX_REGIONS_AE
private void printMaxAERegions(Context context, int cameraId) {
try {
String cameraIdS = mCameraManager.getCameraIdList()[cameraId];
CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraIdS);
Integer regionCount = characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
Log.i(TAG, "[Touch AEAF]cameraId:" + cameraIdS + ", printMaxAERegions:" + regionCount);
} catch (Exception e) {
}
}
点击屏幕设置Touch AE(坐标系转换)
Camera坐标系转换到View坐标系
private void calculateCameraToPreviewMatrix() {
if( MyDebug.LOG )
Log.d(TAG, "calculateCameraToPreviewMatrix");
if( mCameraController == null )
return;
camera_to_preview_matrix.reset();
if( !using_android_l ) {
// see http://developer.android.com/reference/android/hardware/Camera.Face.html#rect
// Need mirror for front camera
boolean mirror = (mCameraController.getFacing() == CameraController.Facing.FACING_FRONT);
camera_to_preview_matrix.setScale(mirror ? -1 : 1, 1);
int display_orientation = mCameraController.getDisplayOrientation();
if( MyDebug.LOG ) {
Log.d(TAG, "orientation of display relative to camera orientaton: " + display_orientation);
}
camera_to_preview_matrix.postRotate(display_orientation);
}
else {
// Unfortunately the transformation for Android L API isn't documented, but this seems to work for Nexus 6.
// This is the equivalent code for android.hardware.Camera.setDisplayOrientation, but we don't actually use setDisplayOrientation()
// for CameraController2, except testing on Nexus 6 shows that we shouldn't change "result" for front facing camera.
boolean mirror = (mCameraController.getFacing() == CameraController.Facing.FACING_FRONT);
camera_to_preview_matrix.setScale(1, mirror ? -1 : 1);
int degrees = getDisplayRotationDegrees();
Log.d(TAG, "[Touch AEAF] view degrees:" + degrees);
int result = (mCameraController.getCameraOrientation() - degrees + 360) % 360;
if( MyDebug.LOG ) {
Log.d(TAG, "orientation of display relative to natural orientaton: " + degrees);
Log.d(TAG, "orientation of display relative to camera orientaton: " + result);
}
camera_to_preview_matrix.postRotate(result);
}
// Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
// UI coordinates range from (0, 0) to (width, height).
camera_to_preview_matrix.postScale(mCameraSurface.getView().getWidth() / 2000f, mCameraSurface.getView().getHeight() / 2000f);
camera_to_preview_matrix.postTranslate(mCameraSurface.getView().getWidth() / 2f, mCameraSurface.getView().getHeight() / 2f);
}
Camera API坐标系转换到Camera API2坐标系
private Rect convertRectToCamera2(Rect crop_rect, Rect rect) {
// CameraController.Area is always [-1000, -1000] to [1000, 1000] for the viewable region
// but for CameraController2, we must convert to be relative to the crop region
double left_f = (rect.left+1000)/2000.0;
double top_f = (rect.top+1000)/2000.0;
double right_f = (rect.right+1000)/2000.0;
double bottom_f = (rect.bottom+1000)/2000.0;
int left = (int)(crop_rect.left + left_f * (crop_rect.width()-1));
int right = (int)(crop_rect.left + right_f * (crop_rect.width()-1));
int top = (int)(crop_rect.top + top_f * (crop_rect.height()-1));
int bottom = (int)(crop_rect.top + bottom_f * (crop_rect.height()-1));
left = Math.max(left, crop_rect.left);
right = Math.max(right, crop_rect.left);
top = Math.max(top, crop_rect.top);
bottom = Math.max(bottom, crop_rect.top);
left = Math.min(left, crop_rect.right);
right = Math.min(right, crop_rect.right);
top = Math.min(top, crop_rect.bottom);
bottom = Math.min(bottom, crop_rect.bottom);
Log.i(TAG, "[Touch AEAF] convertRectToCamera2 crop_rect:" + crop_rect +
", rect:" + rect +
", result:" + new Rect(left, top, right, bottom));
return new Rect(left, top, right, bottom);
}
View坐标转换到Camera API2坐标
/**
* Given (nx, ny) \in [0, 1]^2, in the display's portrait coordinate system,
* returns normalized sensor coordinates \in [0, 1]^2 depending on how the
* sensor's orientation \in {0, 90, 180, 270}.
* <p>
* Returns null if sensorOrientation is not one of the above.
* </p>
*/
public static PointF normalizedSensorCoordsForNormalizedDisplayCoords(
float nx, float ny, int sensorOrientation) {
switch (sensorOrientation) {
case 0:
return new PointF(nx, ny);
case 90:
return new PointF(ny, 1.0f - nx);
case 180:
return new PointF(1.0f - nx, 1.0f - ny);
case 270:
return new PointF(1.0f - ny, nx);
default:
return null;
}
}
/** Compute 3A regions for a sensor-referenced touch coordinate.
* Returns a MeteringRectangle[] with length 1.
*
* @param nx x coordinate of the touch point, in normalized portrait coordinates.
* @param ny y coordinate of the touch point, in normalized portrait coordinates.
* @param fraction Fraction in [0,1]. Multiplied by min(cropRegion.width(), cropRegion.height())
* to determine the side length of the square MeteringRectangle.
* @param cropRegion Crop region of the image.
* @param sensorOrientation sensor orientation as defined by
* CameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION).
*/
private static MeteringRectangle[] regionsForNormalizedCoord(float nx, float ny,
float fraction, final Rect cropRegion, int sensorOrientation) {
// Compute half side length in pixels.
int minCropEdge = Math.min(cropRegion.width(), cropRegion.height());
int halfSideLength = (int) (0.5f * fraction * minCropEdge);
// Compute the output MeteringRectangle in sensor space.
// nx, ny is normalized to the screen.
// Crop region itself is specified in sensor coordinates.
// Normalized coordinates, now rotated into sensor space.
PointF nsc = CameraUtil.normalizedSensorCoordsForNormalizedDisplayCoords(
nx, ny, sensorOrientation);
int xCenterSensor = (int)(cropRegion.left + nsc.x * cropRegion.width());
int yCenterSensor = (int)(cropRegion.top + nsc.y * cropRegion.height());
Rect meteringRegion = new Rect(xCenterSensor - halfSideLength,
yCenterSensor - halfSideLength,
xCenterSensor + halfSideLength,
yCenterSensor + halfSideLength);
// Clamp meteringRegion to cropRegion.
meteringRegion.left = CameraUtil.clamp(meteringRegion.left, cropRegion.left, cropRegion.right);
meteringRegion.top = CameraUtil.clamp(meteringRegion.top, cropRegion.top, cropRegion.bottom);
meteringRegion.right = CameraUtil.clamp(meteringRegion.right, cropRegion.left, cropRegion.right);
meteringRegion.bottom = CameraUtil.clamp(meteringRegion.bottom, cropRegion.top, cropRegion.bottom);
return new MeteringRectangle[]{new MeteringRectangle(meteringRegion, CAMERA2_REGION_WEIGHT)};
}