am 0ed8c93c: am 00fb27dd: Merge "Camera2: add metadata override" into klp-dev

* commit '0ed8c93cffec6b4d0c1e86599365074b1b2c3f1f':
  Camera2: add metadata override
This commit is contained in:
Rachad Alao
2013-10-07 13:46:05 -07:00
committed by Android Git Automerger
4 changed files with 321 additions and 58 deletions

View File

@@ -60,11 +60,6 @@ public final class CaptureResult extends CameraMetadata {
@Override
public <T> T get(Key<T> key) {
if (key == STATISTICS_FACES) { // Don't throw IllegalArgumentException
// TODO: Implement android.statistics.faces
return null;
}
return mResults.get(key);
}

View File

@@ -58,6 +58,9 @@ public final class Face {
* Create a new face with all fields set.
*
* <p>The id, leftEyePosition, rightEyePosition, and mouthPosition are considered optional.
* They are only required when the {@link #CaptureResult} reports that the value of key
* {@link CaptureResult#STATISTICS_FACE_DETECT_MODE} is
* {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_FULL}.
* If the id is {@value #ID_UNSUPPORTED} then the leftEyePosition, rightEyePosition, and
* mouthPositions are guaranteed to be {@code null}. Otherwise, each of leftEyePosition,
* rightEyePosition, and mouthPosition may be independently null or not-null.</p>
@@ -107,7 +110,11 @@ public final class Face {
* <p>The id, leftEyePosition, rightEyePosition, and mouthPosition are considered optional.
* If the id is {@value #ID_UNSUPPORTED} then the leftEyePosition, rightEyePosition, and
* mouthPositions are guaranteed to be {@code null}. Otherwise, each of leftEyePosition,
* rightEyePosition, and mouthPosition may be independently null or not-null.</p>
* rightEyePosition, and mouthPosition may be independently null or not-null. When devices
* report the value of key {@link CaptureResult#STATISTICS_FACE_DETECT_MODE} as
* {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_SIMPLE} in {@link #CaptureResult},
* the face id of each face is expected to be {@value #ID_UNSUPPORTED}, the leftEyePosition,
* rightEyePosition, and mouthPositions are expected to be {@code null} for each face.</p>
*
* @param bounds Bounds of the face.
* @param score Confidence level between {@value #SCORE_MIN}-{@value #SCORE_MAX}.
@@ -168,7 +175,10 @@ public final class Face {
* <p>This is an optional field, may not be supported on all devices.
* If the id is {@value #ID_UNSUPPORTED} then the leftEyePosition, rightEyePosition, and
* mouthPositions are guaranteed to be {@code null}. Otherwise, each of leftEyePosition,
* rightEyePosition, and mouthPosition may be independently null or not-null.</p>
* rightEyePosition, and mouthPosition may be independently null or not-null. When devices
* report the value of key {@link CaptureResult#STATISTICS_FACE_DETECT_MODE} as
* {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_SIMPLE} in {@link #CaptureResult},
* the face id of each face is expected to be {@value #ID_UNSUPPORTED}.</p>
*
* <p>This value will either be {@value #ID_UNSUPPORTED} or
* otherwise greater than {@code 0}.</p>
@@ -219,7 +229,7 @@ public final class Face {
* field, may not be supported on all devices. If not
* supported, the value will always be set to null.
* This value will always be null only if {@link #getId()} returns
* {@value #ID_UNSUPPORTED}.</p> them are.
* {@value #ID_UNSUPPORTED}.</p>
* </p>
*
* @return The mouth position, or {@code null} if unknown.

View File

@@ -16,7 +16,13 @@
package android.hardware.camera2.impl;
import android.graphics.ImageFormat;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.Face;
import android.hardware.camera2.Rational;
import android.os.Parcelable;
import android.os.Parcel;
@@ -36,6 +42,8 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
private static final String TAG = "CameraMetadataJV";
private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
// this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h
private static final int NATIVE_JPEG_FORMAT = 0x21;
public CameraMetadataNative() {
super();
@@ -84,16 +92,12 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
@SuppressWarnings("unchecked")
@Override
public <T> T get(Key<T> key) {
int tag = key.getTag();
byte[] values = readValues(tag);
if (values == null) {
return null;
T value = getOverride(key);
if (value != null) {
return value;
}
int nativeType = getNativeType(tag);
ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder());
return unpackSingle(buffer, key.getType(), nativeType);
return getBase(key);
}
public void readFromParcel(Parcel in) {
@@ -110,24 +114,11 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
* type to the key.
*/
public <T> void set(Key<T> key, T value) {
int tag = key.getTag();
if (value == null) {
writeValues(tag, null);
if (setOverride(key, value)) {
return;
}
int nativeType = getNativeType(tag);
int size = packSingle(value, null, key.getType(), nativeType, /* sizeOnly */true);
// TODO: Optimization. Cache the byte[] and reuse if the size is big enough.
byte[] values = new byte[size];
ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder());
packSingle(value, buffer, key.getType(), nativeType, /*sizeOnly*/false);
writeValues(tag, values);
setBase(key, value);
}
// Keep up-to-date with camera_metadata.h
@@ -435,6 +426,157 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
return (T) array;
}
private <T> T getBase(Key<T> key) {
int tag = key.getTag();
byte[] values = readValues(tag);
if (values == null) {
return null;
}
int nativeType = getNativeType(tag);
ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder());
return unpackSingle(buffer, key.getType(), nativeType);
}
// Need overwrite some metadata that has different definitions between native
// and managed sides.
@SuppressWarnings("unchecked")
private <T> T getOverride(Key<T> key) {
if (key == CameraCharacteristics.SCALER_AVAILABLE_FORMATS) {
return (T) getAvailableFormats();
} else if (key == CaptureResult.STATISTICS_FACES) {
return (T) getFaces();
}
// For other keys, get() falls back to getBase()
return null;
}
private int[] getAvailableFormats() {
int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS);
for (int i = 0; i < availableFormats.length; i++) {
// JPEG has different value between native and managed side, need override.
if (availableFormats[i] == NATIVE_JPEG_FORMAT) {
availableFormats[i] = ImageFormat.JPEG;
}
}
return availableFormats;
}
private Face[] getFaces() {
final int FACE_LANDMARK_SIZE = 6;
Integer faceDetectMode = getBase(CaptureResult.STATISTICS_FACE_DETECT_MODE);
if (faceDetectMode == null) {
throw new AssertionError("Expect non-null face detect mode");
}
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
return new Face[0];
}
if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE &&
faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
throw new AssertionError("Unknown face detect mode: " + faceDetectMode);
}
// Face scores and rectangles are required by SIMPLE and FULL mode.
byte[] faceScores = getBase(CaptureResult.STATISTICS_FACE_SCORES);
Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES);
if (faceScores == null || faceRectangles == null) {
throw new AssertionError("Expect face scores and rectangles to be non-null");
} else if (faceScores.length != faceRectangles.length) {
throw new AssertionError(
String.format("Face score size(%d) doesn match face rectangle size(%d)!",
faceScores.length, faceRectangles.length));
}
// Face id and landmarks are only required by FULL mode.
int[] faceIds = getBase(CaptureResult.STATISTICS_FACE_IDS);
int[] faceLandmarks = getBase(CaptureResult.STATISTICS_FACE_LANDMARKS);
int numFaces = faceScores.length;
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
if (faceIds == null || faceLandmarks == null) {
throw new AssertionError("Expect face ids and landmarks to be non-null for " +
"FULL mode");
} else if (faceIds.length != numFaces ||
faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) {
throw new AssertionError(
String.format("Face id size(%d), or face landmark size(%d) don't match " +
"face number(%d)!",
faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE,
numFaces));
}
}
Face[] faces = new Face[numFaces];
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
for (int i = 0; i < numFaces; i++) {
faces[i] = new Face(faceRectangles[i], faceScores[i]);
}
} else {
// CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL
for (int i = 0; i < numFaces; i++) {
Point leftEye = new Point(faceLandmarks[i*6], faceLandmarks[i*6+1]);
Point rightEye = new Point(faceLandmarks[i*6+2], faceLandmarks[i*6+3]);
Point mouth = new Point(faceLandmarks[i*6+4], faceLandmarks[i*6+5]);
faces[i] = new Face(faceRectangles[i], faceScores[i], faceIds[i],
leftEye, rightEye, mouth);
}
}
return faces;
}
private <T> void setBase(Key<T> key, T value) {
int tag = key.getTag();
if (value == null) {
writeValues(tag, null);
return;
}
int nativeType = getNativeType(tag);
int size = packSingle(value, null, key.getType(), nativeType, /* sizeOnly */true);
// TODO: Optimization. Cache the byte[] and reuse if the size is big enough.
byte[] values = new byte[size];
ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder());
packSingle(value, buffer, key.getType(), nativeType, /*sizeOnly*/false);
writeValues(tag, values);
}
// Set the camera metadata override.
private <T> boolean setOverride(Key<T> key, T value) {
if (key == CameraCharacteristics.SCALER_AVAILABLE_FORMATS) {
return setAvailableFormats((int[]) value);
}
// For other keys, set() falls back to setBase().
return false;
}
private boolean setAvailableFormats(int[] value) {
int[] availableFormat = value;
if (value == null) {
// Let setBase() to handle the null value case.
return false;
}
int[] newValues = new int[availableFormat.length];
for (int i = 0; i < availableFormat.length; i++) {
newValues[i] = availableFormat[i];
if (availableFormat[i] == ImageFormat.JPEG) {
newValues[i] = NATIVE_JPEG_FORMAT;
}
}
setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues);
return true;
}
private long mMetadataPtr; // native CameraMetadata*
private native long nativeAllocate();
@@ -538,7 +680,7 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
* @hide
*/
public byte[] readValues(int tag) {
// TODO: Optimization. Native code returns a ByteBuffer instead.
// TODO: Optimization. Native code returns a ByteBuffer instead.
return nativeReadValues(tag);
}

View File

@@ -18,9 +18,11 @@ package com.android.mediaframeworktest.unit;
import android.os.Parcel;
import android.test.suitebuilder.annotation.SmallTest;
import android.graphics.ImageFormat;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.Face;
import android.hardware.camera2.Rational;
import android.hardware.camera2.Size;
import android.hardware.camera2.impl.CameraMetadataNative;
@@ -30,9 +32,6 @@ import static android.hardware.camera2.impl.CameraMetadataNative.*;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.IntBuffer;
import static org.junit.Assert.assertArrayEquals;
/**
* <pre>
@@ -57,6 +56,7 @@ public class CameraMetadataTest extends junit.framework.TestCase {
// Tags
static final int ANDROID_COLOR_CORRECTION_MODE = ANDROID_COLOR_CORRECTION_START;
static final int ANDROID_COLOR_CORRECTION_TRANSFORM = ANDROID_COLOR_CORRECTION_START + 1;
static final int ANDROID_COLOR_CORRECTION_GAINS = ANDROID_COLOR_CORRECTION_START + 2;
static final int ANDROID_CONTROL_AE_ANTIBANDING_MODE = ANDROID_CONTROL_START;
static final int ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION = ANDROID_CONTROL_START + 1;
@@ -131,7 +131,8 @@ public class CameraMetadataTest extends junit.framework.TestCase {
@SmallTest
public void testGetTypeFromTag() {
assertEquals(TYPE_BYTE, CameraMetadataNative.getNativeType(ANDROID_COLOR_CORRECTION_MODE));
assertEquals(TYPE_FLOAT, CameraMetadataNative.getNativeType(ANDROID_COLOR_CORRECTION_TRANSFORM));
assertEquals(TYPE_RATIONAL, CameraMetadataNative.getNativeType(ANDROID_COLOR_CORRECTION_TRANSFORM));
assertEquals(TYPE_FLOAT, CameraMetadataNative.getNativeType(ANDROID_COLOR_CORRECTION_GAINS));
assertEquals(TYPE_BYTE, CameraMetadataNative.getNativeType(ANDROID_CONTROL_AE_ANTIBANDING_MODE));
assertEquals(TYPE_INT32,
CameraMetadataNative.getNativeType(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION));
@@ -187,30 +188,30 @@ public class CameraMetadataTest extends junit.framework.TestCase {
assertEquals(false, mMetadata.isEmpty());
//
// android.colorCorrection.transform (3x3 matrix)
// android.colorCorrection.colorCorrectionGains (float x 4 array)
//
final float[] transformMatrix = new float[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 };
byte[] transformMatrixAsByteArray = new byte[transformMatrix.length * 4];
ByteBuffer transformMatrixByteBuffer =
ByteBuffer.wrap(transformMatrixAsByteArray).order(ByteOrder.nativeOrder());
for (float f : transformMatrix)
transformMatrixByteBuffer.putFloat(f);
final float[] colorCorrectionGains = new float[] { 1.0f, 2.0f, 3.0f, 4.0f};
byte[] colorCorrectionGainsAsByteArray = new byte[colorCorrectionGains.length * 4];
ByteBuffer colorCorrectionGainsByteBuffer =
ByteBuffer.wrap(colorCorrectionGainsAsByteArray).order(ByteOrder.nativeOrder());
for (float f : colorCorrectionGains)
colorCorrectionGainsByteBuffer.putFloat(f);
// Read
assertNull(mMetadata.readValues(ANDROID_COLOR_CORRECTION_TRANSFORM));
mMetadata.writeValues(ANDROID_COLOR_CORRECTION_TRANSFORM, transformMatrixAsByteArray);
assertNull(mMetadata.readValues(ANDROID_COLOR_CORRECTION_GAINS));
mMetadata.writeValues(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGainsAsByteArray);
// Write
assertArrayEquals(transformMatrixAsByteArray,
mMetadata.readValues(ANDROID_COLOR_CORRECTION_TRANSFORM));
assertArrayEquals(colorCorrectionGainsAsByteArray,
mMetadata.readValues(ANDROID_COLOR_CORRECTION_GAINS));
assertEquals(2, mMetadata.getEntryCount());
assertEquals(false, mMetadata.isEmpty());
// Erase
mMetadata.writeValues(ANDROID_COLOR_CORRECTION_TRANSFORM, null);
assertNull(mMetadata.readValues(ANDROID_COLOR_CORRECTION_TRANSFORM));
mMetadata.writeValues(ANDROID_COLOR_CORRECTION_GAINS, null);
assertNull(mMetadata.readValues(ANDROID_COLOR_CORRECTION_GAINS));
assertEquals(1, mMetadata.getEntryCount());
}
@@ -279,7 +280,7 @@ public class CameraMetadataTest extends junit.framework.TestCase {
@SmallTest
public void testReadWritePrimitiveArray() {
// int32 (n)
checkKeyGetAndSetArray("android.sensor.info.availableSensitivities", int[].class,
checkKeyGetAndSetArray("android.sensor.info.sensitivityRange", int[].class,
new int[] {
0xC0FFEE, 0xDEADF00D
});
@@ -379,7 +380,9 @@ public class CameraMetadataTest extends junit.framework.TestCase {
new AvailableFormat[] {
AvailableFormat.RAW_SENSOR,
AvailableFormat.YV12,
AvailableFormat.IMPLEMENTATION_DEFINED
AvailableFormat.IMPLEMENTATION_DEFINED,
AvailableFormat.YCbCr_420_888,
AvailableFormat.BLOB
});
}
@@ -431,12 +434,13 @@ public class CameraMetadataTest extends junit.framework.TestCase {
AvailableFormat.RAW_SENSOR,
AvailableFormat.YV12,
AvailableFormat.IMPLEMENTATION_DEFINED,
AvailableFormat.YCbCr_420_888
AvailableFormat.YCbCr_420_888,
AvailableFormat.BLOB
});
Key<AeAntibandingMode> availableFormatsKey =
new Key<AeAntibandingMode>("android.scaler.availableFormats",
AeAntibandingMode.class);
Key<AvailableFormat[]> availableFormatsKey =
new Key<AvailableFormat[]>("android.scaler.availableFormats",
AvailableFormat[].class);
byte[] availableFormatValues = mMetadata.readValues(CameraMetadataNative
.getTag(availableFormatsKey.getName()));
@@ -444,7 +448,8 @@ public class CameraMetadataTest extends junit.framework.TestCase {
0x20,
0x32315659,
0x22,
0x23
0x23,
0x21
};
ByteBuffer bf = ByteBuffer.wrap(availableFormatValues).order(ByteOrder.nativeOrder());
@@ -523,4 +528,115 @@ public class CameraMetadataTest extends junit.framework.TestCase {
<T> void compareGeneric(T expected, T actual) {
assertEquals(expected, actual);
}
@SmallTest
public void testReadWriteOverride() {
//
// android.scaler.availableFormats (int x n array)
//
int[] availableFormats = new int[] {
0x20, // RAW_SENSOR
0x32315659, // YV12
0x11, // YCrCb_420_SP
0x100, // ImageFormat.JPEG
0x22, // IMPLEMENTATION_DEFINED
0x23, // YCbCr_420_888
};
int[] expectedIntValues = new int[] {
0x20, // RAW_SENSOR
0x32315659, // YV12
0x11, // YCrCb_420_SP
0x21, // BLOB
0x22, // IMPLEMENTATION_DEFINED
0x23, // YCbCr_420_888
};
int availableFormatTag = CameraMetadataNative.getTag("android.scaler.availableFormats");
// Write
mMetadata.set(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, availableFormats);
byte[] availableFormatValues = mMetadata.readValues(availableFormatTag);
ByteBuffer bf = ByteBuffer.wrap(availableFormatValues).order(ByteOrder.nativeOrder());
assertEquals(expectedIntValues.length * 4, availableFormatValues.length);
for (int i = 0; i < expectedIntValues.length; ++i) {
assertEquals(expectedIntValues[i], bf.getInt());
}
// Read
byte[] availableFormatsAsByteArray = new byte[expectedIntValues.length * 4];
ByteBuffer availableFormatsByteBuffer =
ByteBuffer.wrap(availableFormatsAsByteArray).order(ByteOrder.nativeOrder());
for (int value : expectedIntValues) {
availableFormatsByteBuffer.putInt(value);
}
mMetadata.writeValues(availableFormatTag, availableFormatsAsByteArray);
int[] resultFormats = mMetadata.get(CameraCharacteristics.SCALER_AVAILABLE_FORMATS);
assertNotNull("result available formats shouldn't be null", resultFormats);
assertArrayEquals(availableFormats, resultFormats);
//
// android.statistics.faces (Face x n array)
//
int[] expectedFaceIds = new int[] {1, 2, 3, 4, 5};
byte[] expectedFaceScores = new byte[] {10, 20, 30, 40, 50};
int numFaces = expectedFaceIds.length;
Rect[] expectedRects = new Rect[numFaces];
for (int i = 0; i < numFaces; i++) {
expectedRects[i] = new Rect(i*4 + 1, i * 4 + 2, i * 4 + 3, i * 4 + 4);
}
int[] expectedFaceLM = new int[] {
1, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24,
25, 26, 27, 28, 29, 30,
};
Point[] expectedFaceLMPoints = new Point[numFaces * 3];
for (int i = 0; i < numFaces; i++) {
expectedFaceLMPoints[i*3] = new Point(expectedFaceLM[i*6], expectedFaceLM[i*6+1]);
expectedFaceLMPoints[i*3+1] = new Point(expectedFaceLM[i*6+2], expectedFaceLM[i*6+3]);
expectedFaceLMPoints[i*3+2] = new Point(expectedFaceLM[i*6+4], expectedFaceLM[i*6+5]);
}
/**
* Read - FACE_DETECT_MODE == FULL
*/
mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE,
CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL);
mMetadata.set(CaptureResult.STATISTICS_FACE_IDS, expectedFaceIds);
mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
mMetadata.set(CaptureResult.STATISTICS_FACE_LANDMARKS, expectedFaceLM);
Face[] resultFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
assertEquals(numFaces, resultFaces.length);
for (int i = 0; i < numFaces; i++) {
assertEquals(expectedFaceIds[i], resultFaces[i].getId());
assertEquals(expectedFaceScores[i], resultFaces[i].getScore());
assertEquals(expectedRects[i], resultFaces[i].getBounds());
assertEquals(expectedFaceLMPoints[i*3], resultFaces[i].getLeftEyePosition());
assertEquals(expectedFaceLMPoints[i*3+1], resultFaces[i].getRightEyePosition());
assertEquals(expectedFaceLMPoints[i*3+2], resultFaces[i].getMouthPosition());
}
/**
* Read - FACE_DETECT_MODE == SIMPLE
*/
mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE,
CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE);
mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
Face[] resultSimpleFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
assertEquals(numFaces, resultSimpleFaces.length);
for (int i = 0; i < numFaces; i++) {
assertEquals(Face.ID_UNSUPPORTED, resultSimpleFaces[i].getId());
assertEquals(expectedFaceScores[i], resultSimpleFaces[i].getScore());
assertEquals(expectedRects[i], resultSimpleFaces[i].getBounds());
assertNull(resultSimpleFaces[i].getLeftEyePosition());
assertNull(resultSimpleFaces[i].getRightEyePosition());
assertNull(resultSimpleFaces[i].getMouthPosition());
}
}
}