詹子聪 5 éve
szülő
commit
d619b15caa
100 módosított fájl, 17993 hozzáadás és 1 törlés
  1. 3 0
      app/build.gradle
  2. 6 1
      build.gradle
  3. 125 0
      cameraview/build.gradle.kts
  4. 17 0
      cameraview/src/androidTest/AndroidManifest.xml
  5. 36 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseEglTest.java
  6. 117 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java
  7. 138 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraLoggerTest.java
  8. 191 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraUtilsTest.java
  9. 271 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewCallbacksTest.java
  10. 1105 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  11. 55 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/PictureResultTest.java
  12. 76 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/TestActivity.java
  13. 146 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/VideoResultTest.java
  14. 38 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera1IntegrationTest.java
  15. 77 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java
  16. 1227 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  17. 214 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  18. 67 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/mappers/Camera1MapperTest.java
  19. 116 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/mappers/Camera2MapperTest.java
  20. 373 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/options/Camera1OptionsTest.java
  21. 172 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/BaseFilterTest.java
  22. 52 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/FilterParserTest.java
  23. 28 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/FiltersTest.java
  24. 212 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/MultiFilterTest.java
  25. 34 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/NoFilterTest.java
  26. 36 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/SimpleFilterTest.java
  27. 112 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java
  28. 70 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java
  29. 90 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/GestureFinderTest.java
  30. 71 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/PinchGestureFinderTest.java
  31. 98 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/ScrollGestureFinderTest.java
  32. 87 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/TapGestureFinderTest.java
  33. 60 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/CamcorderProfilesTest.java
  34. 52 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/CropHelperTest.java
  35. 219 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/DeviceEncodersTest.java
  36. 97 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/GridLinesLayoutTest.java
  37. 112 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/OrientationHelperTest.java
  38. 52 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/RotationHelperTest.java
  39. 245 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/WorkerHandlerTest.java
  40. 100 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/DefaultAutoFocusMarkerTest.java
  41. 128 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/MarkerLayoutTest.java
  42. 66 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/MarkerParserTest.java
  43. 123 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/metering/MeteringRegionsTest.java
  44. 76 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java
  45. 174 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayLayoutTest.java
  46. 43 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/picture/PictureRecorderTest.java
  47. 198 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/CameraPreviewTest.java
  48. 43 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/GlCameraPreviewTest.java
  49. 62 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/MockCameraPreview.java
  50. 30 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreviewTest.java
  51. 54 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/TextureCameraPreviewTest.java
  52. 211 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/size/SizeSelectorParserTest.java
  53. 12 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Emulator.java
  54. 148 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Op.java
  55. 9 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Retry.java
  56. 50 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/RetryRule.java
  57. 20 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExclude.java
  58. 47 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExcludeFilter.java
  59. 20 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkInclude.java
  60. 47 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkIncludeFilter.java
  61. 52 0
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java
  62. 4 0
      cameraview/src/androidTest/res/layout/not_overlay.xml
  63. 8 0
      cameraview/src/androidTest/res/layout/overlay.xml
  64. 28 0
      cameraview/src/main/AndroidManifest.xml
  65. 22 0
      cameraview/src/main/java/com/otaliastudios/cameraview/BitmapCallback.java
  66. 91 0
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraException.java
  67. 165 0
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java
  68. 203 0
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraLogger.java
  69. 313 0
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  70. 361 0
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  71. 2706 0
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  72. 23 0
      cameraview/src/main/java/com/otaliastudios/cameraview/FileCallback.java
  73. 175 0
      cameraview/src/main/java/com/otaliastudios/cameraview/PictureResult.java
  74. 256 0
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoResult.java
  75. 59 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Audio.java
  76. 63 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/AudioCodec.java
  77. 8 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Control.java
  78. 108 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/ControlParser.java
  79. 48 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Engine.java
  80. 64 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Facing.java
  81. 69 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Flash.java
  82. 59 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Grid.java
  83. 47 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Hdr.java
  84. 59 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Mode.java
  85. 52 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/PictureFormat.java
  86. 58 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Preview.java
  87. 54 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/VideoCodec.java
  88. 78 0
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/WhiteBalance.java
  89. 940 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  90. 1657 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  91. 975 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java
  92. 726 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  93. 7 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/EngineThread.java
  94. 92 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Action.java
  95. 23 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionCallback.java
  96. 82 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionHolder.java
  97. 67 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionWrapper.java
  98. 56 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Actions.java
  99. 177 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/BaseAction.java
  100. 0 0
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/CompletionCallback.java

+ 3 - 0
app/build.gradle

@@ -55,6 +55,7 @@ dependencies {
     implementation fileTree(dir: 'libs', include: ['*.jar'])
     implementation 'androidx.appcompat:appcompat:1.1.0'
     implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
+    implementation project(path: ':cameraview')
     testImplementation 'junit:junit:4.12'
     androidTestImplementation 'androidx.test.ext:junit:1.1.1'
     androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
@@ -85,4 +86,6 @@ dependencies {
     // crash捕获
     implementation 'cn.yc:ToolLib:1.2.0'
     implementation project(path: ':camera')
+
+    //api 'com.otaliastudios:cameraview:2.6.4'
 }

+ 6 - 1
build.gradle

@@ -3,22 +3,27 @@
 buildscript {
     repositories {
         google()
+        mavenCentral()
         jcenter()
         
     }
     dependencies {
-        classpath 'com.android.tools.build:gradle:3.5.2'
+        classpath("com.android.tools.build:gradle:4.0.1")
 
         classpath 'com.jfrog.bintray.gradle:gradle-bintray-plugin:1.6'
         classpath 'com.github.dcendents:android-maven-gradle-plugin:1.4.1'
         // NOTE: Do not place your application dependencies here; they belong
         // in the individual module build.gradle files
+
+        classpath("com.otaliastudios.tools:publisher:0.3.3")
+        classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:1.4.0")
     }
 }
 
 allprojects {
     repositories {
         google()
+        mavenCentral()
         jcenter()
         maven { url 'https://jitpack.io' }
     }

+ 125 - 0
cameraview/build.gradle.kts

@@ -0,0 +1,125 @@
+import com.otaliastudios.tools.publisher.common.License
+import com.otaliastudios.tools.publisher.common.Release
+
+plugins {
+    id("com.android.library")
+    id("kotlin-android")
+    id("com.otaliastudios.tools.publisher")
+    id("jacoco")
+}
+
+android {
+    setCompileSdkVersion(29)
+    defaultConfig {
+        setMinSdkVersion(21)
+        setTargetSdkVersion(29)
+        versionCode = 1
+        versionName = "2.6.4"
+        testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
+        testInstrumentationRunnerArgument("filter", "" +
+                "com.otaliastudios.cameraview.tools.SdkExcludeFilter," +
+                "com.otaliastudios.cameraview.tools.SdkIncludeFilter")
+    }
+    buildTypes["debug"].isTestCoverageEnabled = true
+    buildTypes["release"].isMinifyEnabled = false
+}
+
+dependencies {
+    testImplementation("junit:junit:4.13")
+    testImplementation("org.mockito:mockito-inline:2.28.2")
+
+    androidTestImplementation("androidx.test:runner:1.3.0")
+    androidTestImplementation("androidx.test:rules:1.3.0")
+    androidTestImplementation("androidx.test.ext:junit:1.1.1")
+    androidTestImplementation("org.mockito:mockito-android:2.28.2")
+    androidTestImplementation("androidx.test.espresso:espresso-core:3.2.0")
+
+    api("androidx.exifinterface:exifinterface:1.2.0")
+    api("androidx.lifecycle:lifecycle-common:2.2.0")
+    api("com.google.android.gms:play-services-tasks:17.2.0")
+    implementation("androidx.annotation:annotation:1.1.0")
+    implementation("com.otaliastudios.opengl:egloo:0.5.3")
+}
+
+// Publishing
+
+publisher {
+    project.description = "A well documented, high-level Android interface that makes capturing " +
+            "pictures and videos easy, addressing all of the common issues and needs. " +
+            "Real-time filters, gestures, watermarks, frame processing, RAW, output of any size."
+    project.artifact = "cameraview"
+    project.group = "com.otaliastudios"
+    project.url = "https://github.com/natario1/CameraView"
+    project.addLicense(License.APACHE_2_0)
+    release.setSources(Release.SOURCES_AUTO)
+    release.setDocs(Release.DOCS_AUTO)
+    bintray {
+        auth.user = "BINTRAY_USER"
+        auth.key = "BINTRAY_KEY"
+        auth.repo = "BINTRAY_REPO"
+    }
+    directory {
+        directory = "build/local"
+    }
+}
+
+// Code Coverage
+val buildDir = project.buildDir.absolutePath
+val coverageInputDir = "$buildDir/coverage_input" // changing? change github workflow
+val coverageOutputDir = "$buildDir/coverage_output" // changing? change github workflow
+
+// Run unit tests, with coverage enabled in the android { } configuration.
+// Output will be an .exec file in build/jacoco.
+tasks.register("runUnitTests") { // changing name? change github workflow
+    dependsOn("testDebugUnitTest")
+    doLast {
+        copy {
+            from("$buildDir/jacoco/testDebugUnitTest.exec")
+            into("$coverageInputDir/unit_tests") // changing? change github workflow
+        }
+    }
+}
+
+// Run android tests with coverage.
+tasks.register("runAndroidTests") { // changing name? change github workflow
+    dependsOn("connectedDebugAndroidTest")
+    doLast {
+        copy {
+            from("$buildDir/outputs/code_coverage/debugAndroidTest/connected")
+            include("*coverage.ec")
+            into("$coverageInputDir/android_tests") // changing? change github workflow
+        }
+    }
+}
+
+// Merge the two with a jacoco task.
+jacoco { toolVersion = "0.8.5" }
+tasks.register("computeCoverage", JacocoReport::class) {
+    dependsOn("compileDebugSources") // Compile sources, needed below
+    executionData.from(fileTree(coverageInputDir))
+    sourceDirectories.from(android.sourceSets["main"].java.sourceFiles)
+    additionalSourceDirs.from("$buildDir/generated/source/buildConfig/debug")
+    additionalSourceDirs.from("$buildDir/generated/source/r/debug")
+    classDirectories.from(fileTree("$buildDir/intermediates/javac/debug") {
+        // Not everything here is relevant for CameraView, but let's keep it generic
+        exclude(
+                "**/R.class",
+                "**/R$*.class",
+                "**/BuildConfig.*",
+                "**/Manifest*.*",
+                "android/**",
+                "androidx/**",
+                "com/google/**",
+                "**/*\$ViewInjector*.*",
+                "**/Dagger*Component.class",
+                "**/Dagger*Component\$Builder.class",
+                "**/*Module_*Factory.class",
+                // We don"t test OpenGL filters.
+                "**/com/otaliastudios/cameraview/filters/**.*"
+        )
+    })
+    reports.html.isEnabled = true
+    reports.xml.isEnabled = true
+    reports.html.destination = file("$coverageOutputDir/html")
+    reports.xml.destination = file("$coverageOutputDir/xml/report.xml")
+}

+ 17 - 0
cameraview/src/androidTest/AndroidManifest.xml

@@ -0,0 +1,17 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+          package="com.otaliastudios.cameraview">
+
+    <uses-permission android:name="android.permission.CAMERA" />
+    <uses-permission android:name="android.permission.RECORD_AUDIO" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.WAKE_LOCK"/>
+    <uses-permission android:name="android.permission.DISABLE_KEYGUARD"/>
+
+    <application>
+        <activity
+            android:configChanges="orientation|screenLayout|keyboardHidden"
+            android:hardwareAccelerated="true"
+            android:name=".TestActivity"/>
+    </application>
+
+</manifest>

+ 36 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseEglTest.java

@@ -0,0 +1,36 @@
+package com.otaliastudios.cameraview;
+
+import android.opengl.EGL14;
+
+import com.otaliastudios.opengl.core.EglCore;
+import com.otaliastudios.opengl.surface.EglOffscreenSurface;
+import com.otaliastudios.opengl.surface.EglSurface;
+
+import org.junit.After;
+import org.junit.Before;
+
+
+@SuppressWarnings("WeakerAccess")
+public abstract class BaseEglTest extends BaseTest {
+
+    protected final static int WIDTH = 100;
+    protected final static int HEIGHT = 100;
+
+    protected EglCore eglCore;
+    protected EglSurface eglSurface;
+
+    @Before
+    public void setUp() {
+        eglCore = new EglCore(EGL14.EGL_NO_CONTEXT, EglCore.FLAG_RECORDABLE);
+        eglSurface = new EglOffscreenSurface(eglCore, WIDTH, HEIGHT);
+        eglSurface.makeCurrent();
+    }
+
+    @After
+    public void tearDown() {
+        eglSurface.release();
+        eglSurface = null;
+        eglCore.release();
+        eglCore = null;
+    }
+}

+ 117 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java

@@ -0,0 +1,117 @@
+package com.otaliastudios.cameraview;
+
+
+import android.app.KeyguardManager;
+import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.PowerManager;
+
+import androidx.annotation.NonNull;
+import androidx.test.platform.app.InstrumentationRegistry;
+
+import com.otaliastudios.cameraview.tools.Op;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.mockito.stubbing.Stubber;
+
+import java.util.concurrent.CountDownLatch;
+
+import static android.content.Context.KEYGUARD_SERVICE;
+import static android.content.Context.POWER_SERVICE;
+import static org.mockito.Mockito.doAnswer;
+
+public class BaseTest {
+
+    private static KeyguardManager.KeyguardLock keyguardLock;
+    private static PowerManager.WakeLock wakeLock;
+
+    // https://github.com/linkedin/test-butler/blob/bc2bb4df13d0a554d2e2b0ea710795017717e710/test-butler-app/src/main/java/com/linkedin/android/testbutler/ButlerService.java#L121
+    @BeforeClass
+    public static void beforeClass_wakeUp() {
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
+
+        // Acquire a keyguard lock to prevent the lock screen from randomly appearing and breaking tests
+        KeyguardManager keyguardManager = (KeyguardManager) getContext().getSystemService(KEYGUARD_SERVICE);
+        keyguardLock = keyguardManager.newKeyguardLock("CameraViewLock");
+        keyguardLock.disableKeyguard();
+
+        // Acquire a wake lock to prevent the cpu from going to sleep and breaking tests
+        PowerManager powerManager = (PowerManager) getContext().getSystemService(POWER_SERVICE);
+        wakeLock = powerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK
+                | PowerManager.ACQUIRE_CAUSES_WAKEUP
+                | PowerManager.ON_AFTER_RELEASE, "CameraViewLock");
+        wakeLock.acquire();
+    }
+
+    @AfterClass
+    public static void afterClass_releaseWakeUp() {
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_ERROR);
+
+        wakeLock.release();
+        keyguardLock.reenableKeyguard();
+    }
+
+    /**
+     * This will make mockito report the error when it should.
+     * Mockito reports failure on the next mockito invocation, which is terrible
+     * since it might be on the next test or even never happen.
+     */
+    @After
+    public void after_checkMockito() {
+        Object object = Mockito.mock(Object.class);
+        //noinspection ResultOfMethodCallIgnored
+        object.toString();
+    }
+
+    @NonNull
+    protected static Context getContext() {
+        return InstrumentationRegistry.getInstrumentation().getContext();
+    }
+
+    protected static void uiSync(Runnable runnable) {
+        InstrumentationRegistry.getInstrumentation().runOnMainSync(runnable);
+    }
+
+    @SuppressWarnings("unused")
+    protected static void uiAsync(Runnable runnable) {
+        new Handler(Looper.getMainLooper()).post(runnable);
+    }
+
+    @SuppressWarnings("unused")
+    protected static void waitUiIdle() {
+        InstrumentationRegistry.getInstrumentation().waitForIdleSync();
+    }
+
+    @NonNull
+    protected static Stubber doCountDown(@NonNull final CountDownLatch latch) {
+        return doAnswer(new Answer<Object>() {
+            @Override
+            public Object answer(InvocationOnMock invocation) {
+                latch.countDown();
+                return null;
+            }
+        });
+    }
+
+    @NonNull
+    protected static <T> Stubber doEndOp(final Op<T> op, final T response) {
+        return doAnswer(new Answer<Object>() {
+            @Override
+            public Object answer(InvocationOnMock invocation) {
+                op.controller().end(response);
+                return null;
+            }
+        });
+    }
+
+    @NonNull
+    protected static <T> Stubber doEndOp(final Op<T> op, final int withReturnArgument) {
+        return op.controller().from(withReturnArgument);
+    }
+}

+ 138 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraLoggerTest.java

@@ -0,0 +1,138 @@
+package com.otaliastudios.cameraview;
+
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.tools.Op;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyInt;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class CameraLoggerTest extends BaseTest {
+
+    private String loggerTag = "myLogger";
+    private CameraLogger logger;
+
+    @Before
+    public void setUp() {
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
+        CameraLogger.unregisterLogger(CameraLogger.sAndroidLogger); // Avoid writing into Logs during these tests
+        logger = CameraLogger.create(loggerTag);
+    }
+
+    @After
+    public void tearDown() {
+        CameraLogger.registerLogger(CameraLogger.sAndroidLogger);
+        logger = null;
+    }
+
+    @Test
+    public void testLoggerLevels() {
+        // Verbose
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
+        logger.v("v");
+        assertEquals(CameraLogger.lastMessage, "v");
+        logger.i("i");
+        assertEquals(CameraLogger.lastMessage, "i");
+        logger.w("w");
+        assertEquals(CameraLogger.lastMessage, "w");
+        logger.e("e");
+        assertEquals(CameraLogger.lastMessage, "e");
+
+        // Info
+        CameraLogger.lastMessage = null;
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_INFO);
+        logger.v("v");
+        assertNull(CameraLogger.lastMessage);
+        logger.i("i");
+        assertEquals(CameraLogger.lastMessage, "i");
+        logger.w("w");
+        assertEquals(CameraLogger.lastMessage, "w");
+        logger.e("e");
+        assertEquals(CameraLogger.lastMessage, "e");
+
+        // Warning
+        CameraLogger.lastMessage = null;
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_WARNING);
+        logger.v("v");
+        assertNull(CameraLogger.lastMessage);
+        logger.i("i");
+        assertNull(CameraLogger.lastMessage);
+        logger.w("w");
+        assertEquals(CameraLogger.lastMessage, "w");
+        logger.e("e");
+        assertEquals(CameraLogger.lastMessage, "e");
+
+        // Error
+        CameraLogger.lastMessage = null;
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_ERROR);
+        logger.v("v");
+        assertNull(CameraLogger.lastMessage);
+        logger.i("i");
+        assertNull(CameraLogger.lastMessage);
+        logger.w("w");
+        assertNull(CameraLogger.lastMessage);
+        logger.e("e");
+        assertEquals(CameraLogger.lastMessage, "e");
+    }
+
+    @Test
+    public void testMessage() {
+        logger.i("test", "logger", 10, null);
+        assertEquals(CameraLogger.lastTag, loggerTag);
+        assertEquals(CameraLogger.lastMessage, "test logger 10 null");
+    }
+
+    @Test
+    public void testExternal() {
+        CameraLogger.Logger mock = mock(CameraLogger.Logger.class);
+        CameraLogger.registerLogger(mock);
+        logger.e("hey");
+        verify(mock, times(1)).log(CameraLogger.LEVEL_ERROR, loggerTag, "hey", null);
+
+        reset(mock);
+        CameraLogger.unregisterLogger(mock);
+        logger.e("hey again");
+        verify(mock, never()).log(anyInt(), anyString(), anyString(), any(Throwable.class));
+    }
+
+    @Test
+    public void testThrowable() {
+        CameraLogger.Logger mock = mock(CameraLogger.Logger.class);
+        CameraLogger.registerLogger(mock);
+
+        final Op<Throwable> op = new Op<>(false);
+        doEndOp(op, 3)
+                .when(mock)
+                .log(anyInt(), anyString(), anyString(), any(Throwable.class));
+
+        op.listen();
+        logger.e("Got no error.");
+        assertNull(op.await(100));
+
+        op.listen();
+        logger.e("Got error:", new RuntimeException(""));
+        assertNotNull(op.await(100));
+
+        op.listen();
+        logger.e("Got", new RuntimeException(""), "while starting");
+        assertNotNull(op.await(100));
+    }
+}

+ 191 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraUtilsTest.java

@@ -0,0 +1,191 @@
+package com.otaliastudios.cameraview;
+
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.graphics.Bitmap;
+import android.graphics.Color;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.tools.Op;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.charset.Charset;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class CameraUtilsTest extends BaseTest {
+
+    @Test
+    public void testHasCameras() {
+        Context context = mock(Context.class);
+        PackageManager pm = mock(PackageManager.class);
+        when(context.getPackageManager()).thenReturn(pm);
+        when(pm.hasSystemFeature(PackageManager.FEATURE_CAMERA)).thenReturn(true);
+        when(pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)).thenReturn(true);
+        assertTrue(CameraUtils.hasCameras(context));
+        when(pm.hasSystemFeature(PackageManager.FEATURE_CAMERA)).thenReturn(false);
+        when(pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)).thenReturn(true);
+        assertTrue(CameraUtils.hasCameras(context));
+        when(pm.hasSystemFeature(PackageManager.FEATURE_CAMERA)).thenReturn(false);
+        when(pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)).thenReturn(false);
+        assertFalse(CameraUtils.hasCameras(context));
+    }
+
+    @NonNull
+    private Op<String> writeAndReadString(@NonNull String data) {
+        final File file = new File(getContext().getFilesDir(), "string.txt");
+        final byte[] bytes = data.getBytes(Charset.forName("UTF-8"));
+        final Op<String> result = new Op<>();
+        final FileCallback callback = new FileCallback() {
+            @Override
+            public void onFileReady(@Nullable File file) {
+                if (file == null) {
+                    result.controller().end(null);
+                } else {
+                    // Read back the file.
+                    try {
+                        FileInputStream stream = new FileInputStream(file);
+                        byte[] bytes = new byte[stream.available()];
+                        stream.read(bytes);
+                        result.controller().end(new String(bytes, Charset.forName("UTF-8")));
+                    } catch (IOException e) {
+                        result.controller().end(null);
+                    }
+                }
+            }
+        };
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                CameraUtils.writeToFile(bytes, file, callback);
+            }
+        });
+        return result;
+    }
+
+    @Test
+    public void testWriteToFile() {
+        Op<String> op = writeAndReadString("testString");
+        String result = op.await(2000);
+        assertEquals("testString", result);
+    }
+
+
+    // Encodes bitmap and decodes again using our utility.
+    private Op<Bitmap> encodeDecodeTask(@NonNull Bitmap source, final int maxWidth, final int maxHeight, boolean async) {
+        final ByteArrayOutputStream os = new ByteArrayOutputStream();
+        // Using lossy JPG we can't have strict comparison of values after compression.
+        source.compress(Bitmap.CompressFormat.PNG, 100, os);
+        final byte[] data = os.toByteArray();
+
+        final Op<Bitmap> decode = new Op<>();
+        if (async) {
+            final BitmapCallback callback = new BitmapCallback() {
+                @Override
+                public void onBitmapReady(Bitmap bitmap) {
+                    decode.controller().end(bitmap);
+                }
+            };
+
+            // Run on ui because it involves handlers.
+            uiSync(new Runnable() {
+                @Override
+                public void run() {
+                    if (maxWidth > 0 && maxHeight > 0) {
+                        CameraUtils.decodeBitmap(data, maxWidth, maxHeight, callback);
+                    } else {
+                        CameraUtils.decodeBitmap(data, callback);
+                    }
+                }
+            });
+        } else {
+            Bitmap result;
+            if (maxWidth > 0 && maxHeight > 0) {
+                result = CameraUtils.decodeBitmap(data, maxWidth, maxHeight);
+            } else {
+                result = CameraUtils.decodeBitmap(data);
+            }
+            decode.controller().end(result);
+        }
+        return decode;
+    }
+
+    @Test
+    public void testDecodeBitmap() {
+        int w = 100, h = 200, color = Color.WHITE;
+        Bitmap source = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
+        source.setPixel(0, 0, color);
+
+        Op<Bitmap> decode = encodeDecodeTask(source, 0, 0, true);
+        Bitmap other = decode.await(800);
+        assertNotNull(other);
+        assertEquals(100, w);
+        assertEquals(200, h);
+        assertEquals(color, other.getPixel(0, 0));
+        assertEquals(0, other.getPixel(0, h-1));
+        assertEquals(0, other.getPixel(w-1, 0));
+        assertEquals(0, other.getPixel(w-1, h-1));
+    }
+
+    @Test
+    public void testDecodeBitmapSync() {
+        int w = 100, h = 200, color = Color.WHITE;
+        Bitmap source = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
+        source.setPixel(0, 0, color);
+
+        Op<Bitmap> decode = encodeDecodeTask(source, 0, 0, false);
+        Bitmap other = decode.await(800);
+        assertNotNull(other);
+        assertEquals(100, w);
+        assertEquals(200, h);
+        assertEquals(color, other.getPixel(0, 0));
+        assertEquals(0, other.getPixel(0, h-1));
+        assertEquals(0, other.getPixel(w-1, 0));
+        assertEquals(0, other.getPixel(w-1, h-1));
+    }
+
+
+    @Test
+    public void testDecodeDownscaledBitmap() {
+        int width = 1000, height = 2000;
+        Bitmap source = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+        Op<Bitmap> op;
+        Bitmap other;
+
+        op = encodeDecodeTask(source, 100, 100, true);
+        other = op.await(800);
+        assertNotNull(other);
+        assertTrue(other.getWidth() <= 100);
+        assertTrue(other.getHeight() <= 100);
+
+        op = encodeDecodeTask(source, Integer.MAX_VALUE, Integer.MAX_VALUE, true);
+        other = op.await(800);
+        assertNotNull(other);
+        assertEquals(other.getWidth(), width);
+        assertEquals(other.getHeight(), height);
+
+        op = encodeDecodeTask(source, 6000, 6000, true);
+        other = op.await(800);
+        assertNotNull(other);
+        assertEquals(other.getWidth(), width);
+        assertEquals(other.getHeight(), height);
+    }
+}

+ 271 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewCallbacksTest.java

@@ -0,0 +1,271 @@
+package com.otaliastudios.cameraview;
+
+
+import android.content.Context;
+import android.graphics.PointF;
+import android.view.ViewGroup;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.MediumTest;
+
+import com.otaliastudios.cameraview.controls.Audio;
+import com.otaliastudios.cameraview.controls.Engine;
+import com.otaliastudios.cameraview.controls.Preview;
+import com.otaliastudios.cameraview.engine.CameraEngine;
+import com.otaliastudios.cameraview.engine.MockCameraEngine;
+import com.otaliastudios.cameraview.frame.Frame;
+import com.otaliastudios.cameraview.frame.FrameProcessor;
+import com.otaliastudios.cameraview.gesture.Gesture;
+import com.otaliastudios.cameraview.gesture.GestureAction;
+import com.otaliastudios.cameraview.markers.AutoFocusMarker;
+import com.otaliastudios.cameraview.markers.AutoFocusTrigger;
+import com.otaliastudios.cameraview.markers.MarkerLayout;
+import com.otaliastudios.cameraview.preview.CameraPreview;
+import com.otaliastudios.cameraview.preview.MockCameraPreview;
+import com.otaliastudios.cameraview.tools.Op;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static junit.framework.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.mockito.ArgumentMatchers.nullable;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+/**
+ * Tests {@link CameraView#mCameraCallbacks} dispatch functions.
+ */
+@RunWith(AndroidJUnit4.class)
+@MediumTest
+public class CameraViewCallbacksTest extends BaseTest {
+
+    private final static long DELAY = 500;
+
+    private CameraView camera;
+    private CameraListener listener;
+    private FrameProcessor processor;
+    private MockCameraEngine mockController;
+    private MockCameraPreview mockPreview;
+    private Op<Boolean> op;
+
+    @Before
+    public void setUp() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                Context context = getContext();
+                listener = mock(CameraListener.class);
+                processor = mock(FrameProcessor.class);
+                camera = new CameraView(context) {
+
+                    @NonNull
+                    @Override
+                    protected CameraEngine instantiateCameraEngine(@NonNull Engine engine, @NonNull CameraEngine.Callback callback) {
+                        mockController = new MockCameraEngine(callback);
+                        return mockController;
+                    }
+
+                    @NonNull
+                    @Override
+                    protected CameraPreview instantiatePreview(@NonNull Preview preview, @NonNull Context context, @NonNull ViewGroup container) {
+                        mockPreview = new MockCameraPreview(context, container);
+                        return mockPreview;
+                    }
+
+                    @Override
+                    protected boolean checkPermissions(@NonNull Audio audio) {
+                        return true;
+                    }
+                };
+                camera.doInstantiatePreview();
+                camera.addCameraListener(listener);
+                camera.addFrameProcessor(processor);
+                op = new Op<>();
+            }
+        });
+    }
+
+    @After
+    public void tearDown() {
+        camera = null;
+        mockController = null;
+        mockPreview = null;
+        listener = null;
+    }
+
+    @Test
+    public void testDontDispatchIfRemoved() {
+        camera.removeCameraListener(listener);
+        CameraOptions options = mock(CameraOptions.class);
+        doEndOp(op, true).when(listener).onCameraOpened(options);
+        camera.mCameraCallbacks.dispatchOnCameraOpened(options);
+
+        assertNull(op.await(DELAY));
+        verify(listener, never()).onCameraOpened(options);
+    }
+
+    @Test
+    public void testDontDispatchIfCleared() {
+        camera.clearCameraListeners();
+        CameraOptions options = mock(CameraOptions.class);
+        doEndOp(op, true).when(listener).onCameraOpened(options);
+        camera.mCameraCallbacks.dispatchOnCameraOpened(options);
+
+        assertNull(op.await(DELAY));
+        verify(listener, never()).onCameraOpened(options);
+    }
+
+    @Test
+    public void testDispatchOnCameraOpened() {
+        CameraOptions options = mock(CameraOptions.class);
+        doEndOp(op, true).when(listener).onCameraOpened(options);
+        camera.mCameraCallbacks.dispatchOnCameraOpened(options);
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onCameraOpened(options);
+    }
+
+    @Test
+    public void testDispatchOnCameraClosed() {
+        doEndOp(op, true).when(listener).onCameraClosed();
+        camera.mCameraCallbacks.dispatchOnCameraClosed();
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onCameraClosed();
+    }
+
+    @Test
+    public void testDispatchOnVideoRecordingStart() {
+        doEndOp(op, true).when(listener).onVideoRecordingStart();
+        camera.mCameraCallbacks.dispatchOnVideoRecordingStart();
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onVideoRecordingStart();
+    }
+
+    @Test
+    public void testDispatchOnVideoRecordingEnd() {
+        doEndOp(op, true).when(listener).onVideoRecordingEnd();
+        camera.mCameraCallbacks.dispatchOnVideoRecordingEnd();
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onVideoRecordingEnd();
+    }
+
+    @Test
+    public void testDispatchOnVideoTaken() {
+        VideoResult.Stub stub = new VideoResult.Stub();
+        doEndOp(op, true).when(listener).onVideoTaken(any(VideoResult.class));
+        camera.mCameraCallbacks.dispatchOnVideoTaken(stub);
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onVideoTaken(any(VideoResult.class));
+    }
+
+    @Test
+    public void testDispatchOnPictureTaken() {
+        PictureResult.Stub stub = new PictureResult.Stub();
+        doEndOp(op, true).when(listener).onPictureTaken(any(PictureResult.class));
+        camera.mCameraCallbacks.dispatchOnPictureTaken(stub);
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onPictureTaken(any(PictureResult.class));
+    }
+
+    @Test
+    public void testDispatchOnZoomChanged() {
+        doEndOp(op, true).when(listener).onZoomChanged(eq(0f), eq(new float[]{0, 1}), nullable(PointF[].class));
+        camera.mCameraCallbacks.dispatchOnZoomChanged(0f, null);
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onZoomChanged(eq(0f), eq(new float[]{0, 1}), nullable(PointF[].class));
+    }
+
+    @Test
+    public void testDispatchOnExposureCorrectionChanged() {
+        float[] bounds = new float[]{};
+        doEndOp(op, true).when(listener).onExposureCorrectionChanged(0f, bounds, null);
+        camera.mCameraCallbacks.dispatchOnExposureCorrectionChanged(0f, bounds, null);
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onExposureCorrectionChanged(0f, bounds, null);
+    }
+
+    @Test
+    public void testDispatchOnFocusStart() {
+        // Enable tap gesture.
+        // Can't mock package protected. camera.mTapGestureFinder = mock(TapGestureLayout.class);
+        camera.mapGesture(Gesture.TAP, GestureAction.AUTO_FOCUS);
+        AutoFocusMarker marker = mock(AutoFocusMarker.class);
+        MarkerLayout markerLayout = mock(MarkerLayout.class);
+        camera.setAutoFocusMarker(marker);
+        camera.mMarkerLayout = markerLayout;
+
+        PointF point = new PointF();
+        doEndOp(op, true).when(listener).onAutoFocusStart(point);
+        camera.mCameraCallbacks.dispatchOnFocusStart(Gesture.TAP, point);
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onAutoFocusStart(point);
+        verify(marker, times(1)).onAutoFocusStart(AutoFocusTrigger.GESTURE, point);
+        verify(markerLayout, times(1)).onEvent(eq(MarkerLayout.TYPE_AUTOFOCUS), any(PointF[].class));
+    }
+
+    @Test
+    public void testDispatchOnFocusEnd() {
+        // Enable tap gesture.
+        // Can't mock package protected. camera.mTapGestureFinder = mock(TapGestureLayout.class);
+        camera.mapGesture(Gesture.TAP, GestureAction.AUTO_FOCUS);
+        AutoFocusMarker marker = mock(AutoFocusMarker.class);
+        camera.setAutoFocusMarker(marker);
+
+        PointF point = new PointF();
+        boolean success = true;
+        doEndOp(op, true).when(listener).onAutoFocusEnd(success, point);
+        camera.mCameraCallbacks.dispatchOnFocusEnd(Gesture.TAP, success, point);
+
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onAutoFocusEnd(success, point);
+        verify(marker, times(1)).onAutoFocusEnd(AutoFocusTrigger.GESTURE, success, point);
+
+        // Can't mock package protected. verify(camera.mTapGestureFinder, times(1)).onAutoFocusEnd(success);
+    }
+
+    @Test
+    public void testOrientationCallbacks() {
+        doEndOp(op, true).when(listener).onOrientationChanged(anyInt());
+        camera.mCameraCallbacks.onDeviceOrientationChanged(90);
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onOrientationChanged(anyInt());
+    }
+
+    // TODO: test onShutter, here or elsewhere
+
+    @Test
+    public void testCameraError() {
+        CameraException error = new CameraException(new RuntimeException("Error"));
+        doEndOp(op, true).when(listener).onCameraError(error);
+
+        camera.mCameraCallbacks.dispatchError(error);
+        assertNotNull(op.await(DELAY));
+        verify(listener, times(1)).onCameraError(error);
+    }
+
+    @Test
+    public void testProcessFrame() {
+        Frame mock = mock(Frame.class);
+        doEndOp(op, true).when(processor).process(mock);
+        camera.mCameraCallbacks.dispatchFrame(mock);
+
+        assertNotNull(op.await(DELAY));
+        verify(processor, times(1)).process(mock);
+    }
+}

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 1105 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java


+ 55 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/PictureResultTest.java

@@ -0,0 +1,55 @@
+package com.otaliastudios.cameraview;
+
+
+import android.location.Location;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+
+import static org.junit.Assert.assertEquals;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class PictureResultTest extends BaseTest {
+
+    private PictureResult.Stub stub = new PictureResult.Stub();
+
+    @Test
+    public void testResult() {
+        PictureFormat format = PictureFormat.JPEG;
+        int rotation = 90;
+        Size size = new Size(20, 120);
+        byte[] jpeg = new byte[]{2, 4, 1, 5, 2};
+        Location location = Mockito.mock(Location.class);
+        boolean isSnapshot = true;
+        Facing facing = Facing.FRONT;
+
+        stub.format = format;
+        stub.rotation = rotation;
+        stub.size = size;
+        stub.data = jpeg;
+        stub.location = location;
+        stub.facing = facing;
+        //noinspection ConstantConditions
+        stub.isSnapshot = isSnapshot;
+
+        PictureResult result = new PictureResult(stub);
+        assertEquals(result.getFormat(), format);
+        assertEquals(result.getRotation(), rotation);
+        assertEquals(result.getSize(), size);
+        assertEquals(result.getData(), jpeg);
+        assertEquals(result.getLocation(), location);
+        //noinspection ConstantConditions
+        assertEquals(result.isSnapshot(), isSnapshot);
+        assertEquals(result.getFacing(), facing);
+    }
+}

+ 76 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/TestActivity.java

@@ -0,0 +1,76 @@
+package com.otaliastudios.cameraview;
+
+
+import android.app.Activity;
+import android.graphics.Point;
+import android.os.Bundle;
+import android.view.Gravity;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.WindowManager;
+import android.widget.FrameLayout;
+
+import androidx.annotation.Nullable;
+
+import com.otaliastudios.cameraview.size.Size;
+
+import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
+
+public class TestActivity extends Activity {
+
+    private ViewGroup content;
+    private Size contentSize = new Size(1000, 1000);
+
+    @Override
+    public void onCreate(@Nullable Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
+        wakeScreen();
+
+        // Match parent decor view.
+        FrameLayout root = new FrameLayout(this);
+        root.setKeepScreenOn(true);
+        root.setLayoutParams(new ViewGroup.LayoutParams(MATCH_PARENT, MATCH_PARENT));
+
+        // Inner content view with fixed size.
+        // We want it to be fully visible or expresso will crash.
+        Point size = new Point();
+        getWindowManager().getDefaultDisplay().getSize(size);
+        int width = Math.min(size.x, size.y);
+        int height = Math.min(size.x, size.y);
+        FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(
+                width, height, Gravity.CENTER);
+        content = new FrameLayout(this);
+        content.setLayoutParams(params);
+        contentSize = new Size(width, height);
+
+        // Add.
+        root.addView(content);
+        setContentView(root);
+    }
+
+    public void wakeScreen() {
+        getWindow().addFlags(WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
+                | WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
+                | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
+                | WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON
+                | WindowManager.LayoutParams.FLAG_ALLOW_LOCK_WHILE_SCREEN_ON);
+    }
+
+    public Size getContentSize() {
+        return contentSize;
+    }
+
+    public ViewGroup getContentView() {
+        return content;
+    }
+
+    public void inflate(View child) {
+        inflate(child, new ViewGroup.LayoutParams(MATCH_PARENT, MATCH_PARENT));
+    }
+
+    public void inflate(View child, ViewGroup.LayoutParams params) {
+        content.addView(child, params);
+        content.requestLayout();
+    }
+}

+ 146 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/VideoResultTest.java

@@ -0,0 +1,146 @@
+package com.otaliastudios.cameraview;
+
+
+import android.location.Location;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.controls.Audio;
+import com.otaliastudios.cameraview.controls.AudioCodec;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.VideoCodec;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+
+import java.io.File;
+import java.io.FileDescriptor;
+
+import static org.junit.Assert.assertEquals;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class VideoResultTest extends BaseTest {
+
+    private VideoResult.Stub stub = new VideoResult.Stub();
+
+    @Test
+    public void testResultWithFile() {
+        File file = Mockito.mock(File.class);
+        int rotation = 90;
+        Size size = new Size(20, 120);
+        VideoCodec videoCodec = VideoCodec.H_263;
+        AudioCodec audioCodec = AudioCodec.DEVICE_DEFAULT;
+        Location location = Mockito.mock(Location.class);
+        boolean isSnapshot = true;
+        int maxDuration = 1234;
+        long maxFileSize = 500000;
+        int reason = VideoResult.REASON_MAX_DURATION_REACHED;
+        int videoFrameRate = 30;
+        int videoBitRate = 300000;
+        int audioBitRate = 30000;
+        Audio audio = Audio.ON;
+        Facing facing = Facing.FRONT;
+
+        stub.file = file;
+        stub.rotation = rotation;
+        stub.size = size;
+        stub.videoCodec = videoCodec;
+        stub.audioCodec = audioCodec;
+        stub.location = location;
+        stub.isSnapshot = isSnapshot;
+        stub.maxDuration = maxDuration;
+        stub.maxSize = maxFileSize;
+        stub.endReason = reason;
+        stub.videoFrameRate = videoFrameRate;
+        stub.videoBitRate = videoBitRate;
+        stub.audioBitRate = audioBitRate;
+        stub.audio = audio;
+        stub.facing = facing;
+
+        VideoResult result = new VideoResult(stub);
+        assertEquals(result.getFile(), file);
+        assertEquals(result.getRotation(), rotation);
+        assertEquals(result.getSize(), size);
+        assertEquals(result.getVideoCodec(), videoCodec);
+        assertEquals(result.getAudioCodec(), audioCodec);
+        assertEquals(result.getLocation(), location);
+        assertEquals(result.isSnapshot(), isSnapshot);
+        assertEquals(result.getMaxSize(), maxFileSize);
+        assertEquals(result.getMaxDuration(), maxDuration);
+        assertEquals(result.getTerminationReason(), reason);
+        assertEquals(result.getVideoFrameRate(), videoFrameRate);
+        assertEquals(result.getVideoBitRate(), videoBitRate);
+        assertEquals(result.getAudioBitRate(), audioBitRate);
+        assertEquals(result.getAudio(), audio);
+        assertEquals(result.getFacing(), facing);
+    }
+
+    @Test
+    public void testResultWithFileDescriptor() {
+        FileDescriptor fileDescriptor = FileDescriptor.in;
+        int rotation = 90;
+        Size size = new Size(20, 120);
+        VideoCodec videoCodec = VideoCodec.H_263;
+        AudioCodec audioCodec = AudioCodec.DEVICE_DEFAULT;
+        Location location = Mockito.mock(Location.class);
+        boolean isSnapshot = true;
+        int maxDuration = 1234;
+        long maxFileSize = 500000;
+        int reason = VideoResult.REASON_MAX_DURATION_REACHED;
+        int videoFrameRate = 30;
+        int videoBitRate = 300000;
+        int audioBitRate = 30000;
+        Audio audio = Audio.ON;
+        Facing facing = Facing.FRONT;
+
+        stub.fileDescriptor = fileDescriptor;
+        stub.rotation = rotation;
+        stub.size = size;
+        stub.videoCodec = videoCodec;
+        stub.audioCodec = audioCodec;
+        stub.location = location;
+        stub.isSnapshot = isSnapshot;
+        stub.maxDuration = maxDuration;
+        stub.maxSize = maxFileSize;
+        stub.endReason = reason;
+        stub.videoFrameRate = videoFrameRate;
+        stub.videoBitRate = videoBitRate;
+        stub.audioBitRate = audioBitRate;
+        stub.audio = audio;
+        stub.facing = facing;
+
+        VideoResult result = new VideoResult(stub);
+        assertEquals(result.getFileDescriptor(), fileDescriptor);
+        assertEquals(result.getRotation(), rotation);
+        assertEquals(result.getSize(), size);
+        assertEquals(result.getVideoCodec(), videoCodec);
+        assertEquals(result.getAudioCodec(), audioCodec);
+        assertEquals(result.getLocation(), location);
+        assertEquals(result.isSnapshot(), isSnapshot);
+        assertEquals(result.getMaxSize(), maxFileSize);
+        assertEquals(result.getMaxDuration(), maxDuration);
+        assertEquals(result.getTerminationReason(), reason);
+        assertEquals(result.getVideoFrameRate(), videoFrameRate);
+        assertEquals(result.getVideoBitRate(), videoBitRate);
+        assertEquals(result.getAudioBitRate(), audioBitRate);
+        assertEquals(result.getAudio(), audio);
+        assertEquals(result.getFacing(), facing);
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void testResultWithNoFile() {
+        VideoResult result = new VideoResult(stub);
+        result.getFile();
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void testResultWithNoFileDescriptor() {
+        VideoResult result = new VideoResult(stub);
+        result.getFileDescriptor();
+    }
+}

+ 38 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera1IntegrationTest.java

@@ -0,0 +1,38 @@
+package com.otaliastudios.cameraview.engine;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.LargeTest;
+
+import com.otaliastudios.cameraview.controls.Engine;
+
+import org.junit.runner.RunWith;
+
+/**
+ * These tests work great on real devices, and are the only way to test actual CameraEngine
+ * implementation - we really need to open the camera device.
+ * Unfortunately they fail unreliably on emulated devices, due to some bug with the
+ * emulated camera controller.
+ */
+@RunWith(AndroidJUnit4.class)
+@LargeTest
+// @RequiresDevice
+public class Camera1IntegrationTest extends CameraIntegrationTest<Camera1Engine> {
+
+    @NonNull
+    @Override
+    protected Engine getEngine() {
+        return Engine.CAMERA1;
+    }
+
+    @Override
+    protected long getMeteringTimeoutMillis() {
+        return Camera1Engine.AUTOFOCUS_END_DELAY_MILLIS;
+    }
+
+    @Override
+    public void testFrameProcessing_maxSize() {
+        // Camera1Engine does not support different sizes.
+        // super.testFrameProcessing_maxSize();
+    }
+}

+ 77 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java

@@ -0,0 +1,77 @@
+package com.otaliastudios.cameraview.engine;
+
+import android.hardware.camera2.CameraCharacteristics;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.LargeTest;
+
+import com.otaliastudios.cameraview.controls.Engine;
+
+import org.junit.runner.RunWith;
+
+/**
+ * These tests work great on real devices, and are the only way to test actual CameraEngine
+ * implementation - we really need to open the camera device.
+ * Unfortunately they fail unreliably on emulated devices, due to some bug with the
+ * emulated camera controller.
+ */
+@RunWith(AndroidJUnit4.class)
+@LargeTest
+// @RequiresDevice
+public class Camera2IntegrationTest extends CameraIntegrationTest<Camera2Engine> {
+
+    @NonNull
+    @Override
+    protected Engine getEngine() {
+        return Engine.CAMERA2;
+    }
+
+    /* @Override
+    protected void onOpenSync() {
+        super.onOpenSync();
+        // Extra wait for the first frame to be dispatched.
+        // This is because various classes require getLastResult to be non-null
+        // and that's typically the case in a real app.
+        final CountDownLatch latch = new CountDownLatch(1);
+        new BaseAction() {
+            @Override
+            public void onCaptureCompleted(@NonNull ActionHolder holder,
+                                           @NonNull CaptureRequest request,
+                                           @NonNull TotalCaptureResult result) {
+                super.onCaptureCompleted(holder, request, result);
+                latch.countDown();
+                setState(STATE_COMPLETED);
+            }
+        }.start(controller);
+        try { latch.await(); } catch (InterruptedException ignore) {}
+    } */
+
+    @Override
+    protected long getMeteringTimeoutMillis() {
+        return Camera2Engine.METER_TIMEOUT;
+    }
+
+    /**
+     * setMaxDuration can crash on legacy devices (most emulator are), and I don't see
+     * any way to fix this in code. They shouldn't use Camera2 at all.
+     * @return true if possible.
+     */
+    @Override
+    protected boolean canSetVideoMaxDuration() {
+        if (!super.canSetVideoMaxDuration()) return false;
+        boolean shouldOpen = !camera.isOpened();
+        if (shouldOpen) openSync(true);
+        boolean result = controller.readCharacteristic(
+                CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
+                != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
+        if (shouldOpen) closeSync(true);
+        return result;
+    }
+
+    @Override
+    public void testFrameProcessing_freezeRelease() {
+        // Camera2 Frames are not freezable.
+        // super.testFrameProcessing_freezeRelease();
+    }
+}

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 1227 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java


+ 214 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java

@@ -0,0 +1,214 @@
+package com.otaliastudios.cameraview.engine;
+
+
+import android.graphics.PointF;
+import android.location.Location;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.PictureResult;
+import com.otaliastudios.cameraview.VideoResult;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
+import com.otaliastudios.cameraview.frame.ByteBufferFrameManager;
+import com.otaliastudios.cameraview.frame.FrameManager;
+import com.otaliastudios.cameraview.gesture.Gesture;
+import com.otaliastudios.cameraview.metering.MeteringRegions;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+public class MockCameraEngine extends CameraBaseEngine {
+
+    public boolean mPictureCaptured;
+    public boolean mFocusStarted;
+    public boolean mZoomChanged;
+    public boolean mExposureCorrectionChanged;
+
+    public MockCameraEngine(CameraEngine.Callback callback) {
+        super(callback);
+    }
+
+    @NonNull
+    @Override
+    protected Task<CameraOptions> onStartEngine() {
+        return Tasks.forResult(mCameraOptions);
+    }
+
+    @NonNull
+    @Override
+    protected Task<Void> onStopEngine() {
+        return Tasks.forResult(null);
+    }
+
+    @NonNull
+    @Override
+    protected Task<Void> onStartBind() {
+        return Tasks.forResult(null);
+    }
+
+    @NonNull
+    @Override
+    protected Task<Void> onStopBind() {
+        return Tasks.forResult(null);
+    }
+
+    @NonNull
+    @Override
+    protected Task<Void> onStartPreview() {
+        return Tasks.forResult(null);
+    }
+
+    @NonNull
+    @Override
+    protected Task<Void> onStopPreview() {
+        return Tasks.forResult(null);
+    }
+
+    public void setMockCameraOptions(CameraOptions options) {
+        mCameraOptions = options;
+    }
+
+    public void setMockPreviewStreamSize(Size size) {
+        mPreviewStreamSize = size;
+    }
+
+    public void setMockState(@NonNull CameraState state) {
+        Task<Void> change = getOrchestrator().scheduleStateChange(getState(),
+                state,
+                false,
+                new Callable<Task<Void>>() {
+            @Override
+            public Task<Void> call() {
+                return Tasks.forResult(null);
+            }
+        });
+        try {
+            Tasks.await(change);
+        } catch (Exception ignore) {}
+    }
+
+    @Override
+    public void setZoom(float zoom, @Nullable PointF[] points, boolean notify) {
+        mZoomValue = zoom;
+        mZoomChanged = true;
+    }
+
+    @Override
+    public void setExposureCorrection(float EVvalue, @NonNull float[] bounds, @Nullable PointF[] points, boolean notify) {
+        mExposureCorrectionValue = EVvalue;
+        mExposureCorrectionChanged = true;
+    }
+
+    @Override
+    public void setFlash(@NonNull Flash flash) {
+        mFlash = flash;
+    }
+
+    @Override
+    public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
+        mWhiteBalance = whiteBalance;
+    }
+
+    @Override
+    public void setHdr(@NonNull Hdr hdr) {
+        mHdr = hdr;
+    }
+
+    @Override
+    public void setLocation(@Nullable Location location) {
+        mLocation = location;
+    }
+
+    @Override
+    public void setPictureFormat(@NonNull PictureFormat pictureFormat) {
+        mPictureFormat = pictureFormat;
+    }
+
+    @Override
+    public void setHasFrameProcessors(boolean hasFrameProcessors) {
+        mHasFrameProcessors = hasFrameProcessors;
+    }
+
+    @Override
+    public void setFrameProcessingFormat(int format) {
+        mFrameProcessingFormat = format;
+    }
+
+    @Override
+    public void takePicture(@NonNull PictureResult.Stub stub) {
+        super.takePicture(stub);
+        mPictureCaptured = true;
+    }
+
+    @Override
+    protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {
+
+    }
+
+    @Override
+    protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering) {
+
+    }
+
+    @Override
+    protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
+
+    }
+
+    @Override
+    protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
+
+    }
+
+    @Override
+    protected void onPreviewStreamSizeChanged() {
+
+    }
+
+    @NonNull
+    @Override
+    protected List<Size> getPreviewStreamAvailableSizes() {
+        return new ArrayList<>();
+    }
+
+    @NonNull
+    @Override
+    protected List<Size> getFrameProcessingAvailableSizes() {
+        return new ArrayList<>();
+    }
+
+    @Override
+    public void startAutoFocus(@Nullable Gesture gesture, @NonNull MeteringRegions regions, @NonNull PointF legacyPoint) {
+        mFocusStarted = true;
+    }
+
+    @NonNull
+    @Override
+    protected FrameManager instantiateFrameManager(int poolSize) {
+        return new ByteBufferFrameManager(poolSize, null);
+    }
+
+    @Override
+    public void setPlaySounds(boolean playSounds) { }
+
+    @Override
+    protected boolean collectCameraInfo(@NonNull Facing facing) {
+        return true;
+    }
+
+    @Override public void setPreviewFrameRate(float previewFrameRate) {
+        mPreviewFrameRate = previewFrameRate;
+    }
+}

+ 67 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/mappers/Camera1MapperTest.java

@@ -0,0 +1,67 @@
+package com.otaliastudios.cameraview.engine.mappers;
+
+
+import android.hardware.Camera;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class Camera1MapperTest extends BaseTest {
+
+    private Camera1Mapper mapper = Camera1Mapper.get();
+
+    @Test
+    public void testMap() {
+        assertEquals(mapper.mapFlash(Flash.OFF), Camera.Parameters.FLASH_MODE_OFF);
+        assertEquals(mapper.mapFlash(Flash.ON), Camera.Parameters.FLASH_MODE_ON);
+        assertEquals(mapper.mapFlash(Flash.AUTO), Camera.Parameters.FLASH_MODE_AUTO);
+        assertEquals(mapper.mapFlash(Flash.TORCH), Camera.Parameters.FLASH_MODE_TORCH);
+
+        assertEquals(mapper.mapFacing(Facing.BACK), Camera.CameraInfo.CAMERA_FACING_BACK);
+        assertEquals(mapper.mapFacing(Facing.FRONT), Camera.CameraInfo.CAMERA_FACING_FRONT);
+
+        assertEquals(mapper.mapHdr(Hdr.OFF), Camera.Parameters.SCENE_MODE_AUTO);
+        assertEquals(mapper.mapHdr(Hdr.ON), Camera.Parameters.SCENE_MODE_HDR);
+
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.AUTO), Camera.Parameters.WHITE_BALANCE_AUTO);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.DAYLIGHT), Camera.Parameters.WHITE_BALANCE_DAYLIGHT);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.CLOUDY), Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.INCANDESCENT), Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.FLUORESCENT), Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
+    }
+
+
+    @Test
+    public void testUnmap() {
+        assertEquals(Flash.OFF, mapper.unmapFlash(Camera.Parameters.FLASH_MODE_OFF));
+        assertEquals(Flash.ON, mapper.unmapFlash(Camera.Parameters.FLASH_MODE_ON));
+        assertEquals(Flash.AUTO, mapper.unmapFlash(Camera.Parameters.FLASH_MODE_AUTO));
+        assertEquals(Flash.TORCH, mapper.unmapFlash(Camera.Parameters.FLASH_MODE_TORCH));
+
+        assertEquals(Facing.BACK, mapper.unmapFacing(Camera.CameraInfo.CAMERA_FACING_BACK));
+        assertEquals(Facing.FRONT, mapper.unmapFacing(Camera.CameraInfo.CAMERA_FACING_FRONT));
+
+        assertEquals(Hdr.OFF, mapper.unmapHdr(Camera.Parameters.SCENE_MODE_AUTO));
+        assertEquals(Hdr.ON, mapper.unmapHdr(Camera.Parameters.SCENE_MODE_HDR));
+
+        assertEquals(WhiteBalance.AUTO, mapper.unmapWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO));
+        assertEquals(WhiteBalance.DAYLIGHT, mapper.unmapWhiteBalance(Camera.Parameters.WHITE_BALANCE_DAYLIGHT));
+        assertEquals(WhiteBalance.CLOUDY, mapper.unmapWhiteBalance(Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT));
+        assertEquals(WhiteBalance.INCANDESCENT, mapper.unmapWhiteBalance(Camera.Parameters.WHITE_BALANCE_INCANDESCENT));
+        assertEquals(WhiteBalance.FLUORESCENT, mapper.unmapWhiteBalance(Camera.Parameters.WHITE_BALANCE_FLUORESCENT));
+    }
+}

+ 116 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/mappers/Camera2MapperTest.java

@@ -0,0 +1,116 @@
+package com.otaliastudios.cameraview.engine.mappers;
+
+
+import android.util.Pair;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.util.List;
+import java.util.Set;
+
+import static android.hardware.camera2.CameraMetadata.CONTROL_AE_MODE_OFF;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AE_MODE_ON;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AWB_MODE_AUTO;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AWB_MODE_DAYLIGHT;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AWB_MODE_FLUORESCENT;
+import static android.hardware.camera2.CameraMetadata.CONTROL_AWB_MODE_INCANDESCENT;
+import static android.hardware.camera2.CameraMetadata.CONTROL_SCENE_MODE_DISABLED;
+import static android.hardware.camera2.CameraMetadata.CONTROL_SCENE_MODE_HDR;
+import static android.hardware.camera2.CameraMetadata.FLASH_MODE_OFF;
+import static android.hardware.camera2.CameraMetadata.FLASH_MODE_TORCH;
+import static android.hardware.camera2.CameraMetadata.LENS_FACING_BACK;
+import static android.hardware.camera2.CameraMetadata.LENS_FACING_FRONT;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class Camera2MapperTest extends BaseTest {
+
+    private Camera2Mapper mapper = Camera2Mapper.get();
+
+    @Test
+    public void testMap() {
+        List<Pair<Integer, Integer>> values = mapper.mapFlash(Flash.OFF);
+        assertEquals(2, values.size());
+        assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON, FLASH_MODE_OFF)));
+        assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_OFF, FLASH_MODE_OFF)));
+        values = mapper.mapFlash(Flash.TORCH);
+        assertEquals(2, values.size());
+        assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON, FLASH_MODE_TORCH)));
+        assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_OFF, FLASH_MODE_TORCH)));
+        values = mapper.mapFlash(Flash.AUTO);
+        assertEquals(2, values.size());
+        assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_AUTO_FLASH, FLASH_MODE_OFF)));
+        assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, FLASH_MODE_OFF)));
+        values = mapper.mapFlash(Flash.ON);
+        assertEquals(1, values.size());
+        assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_ALWAYS_FLASH, FLASH_MODE_OFF)));
+
+        assertEquals(mapper.mapFacing(Facing.BACK), LENS_FACING_BACK);
+        assertEquals(mapper.mapFacing(Facing.FRONT), LENS_FACING_FRONT);
+
+        assertEquals(mapper.mapHdr(Hdr.OFF), CONTROL_SCENE_MODE_DISABLED);
+        assertEquals(mapper.mapHdr(Hdr.ON), CONTROL_SCENE_MODE_HDR);
+
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.AUTO), CONTROL_AWB_MODE_AUTO);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.DAYLIGHT), CONTROL_AWB_MODE_DAYLIGHT);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.CLOUDY), CONTROL_AWB_MODE_CLOUDY_DAYLIGHT);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.INCANDESCENT), CONTROL_AWB_MODE_INCANDESCENT);
+        assertEquals(mapper.mapWhiteBalance(WhiteBalance.FLUORESCENT), CONTROL_AWB_MODE_FLUORESCENT);
+    }
+
+
+    @Test
+    public void testUnmap() {
+        Set<Flash> values;
+        values = mapper.unmapFlash(CONTROL_AE_MODE_OFF);
+        assertEquals(values.size(), 2);
+        assertTrue(values.contains(Flash.OFF));
+        assertTrue(values.contains(Flash.TORCH));
+        values = mapper.unmapFlash(CONTROL_AE_MODE_ON);
+        assertEquals(values.size(), 2);
+        assertTrue(values.contains(Flash.OFF));
+        assertTrue(values.contains(Flash.TORCH));
+        values = mapper.unmapFlash(CONTROL_AE_MODE_ON_ALWAYS_FLASH);
+        assertEquals(values.size(), 1);
+        assertTrue(values.contains(Flash.ON));
+        values = mapper.unmapFlash(CONTROL_AE_MODE_ON_AUTO_FLASH);
+        assertEquals(values.size(), 1);
+        assertTrue(values.contains(Flash.AUTO));
+        values = mapper.unmapFlash(CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
+        assertEquals(values.size(), 1);
+        assertTrue(values.contains(Flash.AUTO));
+        values = mapper.unmapFlash(CONTROL_AE_MODE_ON_EXTERNAL_FLASH);
+        assertEquals(values.size(), 0);
+
+        assertEquals(Facing.BACK, mapper.unmapFacing(LENS_FACING_BACK));
+        assertEquals(Facing.FRONT, mapper.unmapFacing(LENS_FACING_FRONT));
+
+        assertEquals(Hdr.OFF, mapper.unmapHdr(CONTROL_SCENE_MODE_DISABLED));
+        assertEquals(Hdr.ON, mapper.unmapHdr(CONTROL_SCENE_MODE_HDR));
+
+        assertEquals(WhiteBalance.AUTO, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_AUTO));
+        assertEquals(WhiteBalance.DAYLIGHT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_DAYLIGHT));
+        assertEquals(WhiteBalance.CLOUDY, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_CLOUDY_DAYLIGHT));
+        assertEquals(WhiteBalance.INCANDESCENT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_INCANDESCENT));
+        assertEquals(WhiteBalance.FLUORESCENT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_FLUORESCENT));
+    }
+}

+ 373 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/options/Camera1OptionsTest.java

@@ -0,0 +1,373 @@
+package com.otaliastudios.cameraview.engine.options;
+
+
+import android.graphics.ImageFormat;
+import android.hardware.Camera;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.controls.Audio;
+import com.otaliastudios.cameraview.controls.AudioCodec;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Grid;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.Mode;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.controls.VideoCodec;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
+import com.otaliastudios.cameraview.gesture.GestureAction;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class Camera1OptionsTest extends BaseTest {
+
+    @Test
+    public void testEmpty() {
+        CameraOptions o = new Camera1Options(mock(Camera.Parameters.class), 0, false);
+        assertTrue(o.getSupportedPictureAspectRatios().isEmpty());
+        assertTrue(o.getSupportedPictureSizes().isEmpty());
+        assertTrue(o.getSupportedWhiteBalance().isEmpty());
+        assertEquals(1, o.getSupportedFlash().size()); // Flash.OFF is always there
+        assertEquals(1, o.getSupportedHdr().size()); // Hdr.OFF is always there
+        assertFalse(o.isAutoFocusSupported());
+        assertFalse(o.isExposureCorrectionSupported());
+        assertFalse(o.isZoomSupported());
+        assertEquals(o.getExposureCorrectionMaxValue(), 0f, 0);
+        assertEquals(o.getExposureCorrectionMinValue(), 0f, 0);
+        // Static
+        assertEquals(1, o.getSupportedPictureFormats().size());
+        assertTrue(o.getSupportedPictureFormats().contains(PictureFormat.JPEG));
+        assertEquals(1, o.getSupportedFrameProcessingFormats().size());
+        assertTrue(o.getSupportedFrameProcessingFormats().contains(ImageFormat.NV21));
+    }
+
+    private Camera.Size mockCameraSize(int width, int height) {
+        Camera.Size cs = mock(Camera.Size.class);
+        cs.width = width;
+        cs.height = height;
+        return cs;
+    }
+
+    @Test
+    public void testPictureSizes() {
+        List<Camera.Size> sizes = Arrays.asList(
+                mockCameraSize(100, 200),
+                mockCameraSize(50, 50),
+                mockCameraSize(1600, 900),
+                mockCameraSize(1000, 2000)
+        );
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedPictureSizes()).thenReturn(sizes);
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<Size> supportedSizes = o.getSupportedPictureSizes();
+        assertEquals(supportedSizes.size(), sizes.size());
+        for (Camera.Size size : sizes) {
+            Size internalSize = new Size(size.width, size.height);
+            assertTrue(supportedSizes.contains(internalSize));
+        }
+    }
+
+    @Test
+    public void testPictureSizesFlip() {
+        List<Camera.Size> sizes = Arrays.asList(
+                mockCameraSize(100, 200),
+                mockCameraSize(50, 50),
+                mockCameraSize(1600, 900),
+                mockCameraSize(1000, 2000)
+        );
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedPictureSizes()).thenReturn(sizes);
+        CameraOptions o = new Camera1Options(params, 0, true);
+        Collection<Size> supportedSizes = o.getSupportedPictureSizes();
+        assertEquals(supportedSizes.size(), sizes.size());
+        for (Camera.Size size : sizes) {
+            Size internalSize = new Size(size.width, size.height).flip();
+            assertTrue(supportedSizes.contains(internalSize));
+        }
+    }
+
+    @Test
+    public void testPictureAspectRatio() {
+        List<Camera.Size> sizes = Arrays.asList(
+                mockCameraSize(100, 200),
+                mockCameraSize(50, 50),
+                mockCameraSize(1600, 900),
+                mockCameraSize(1000, 2000)
+        );
+
+        Set<AspectRatio> expected = new HashSet<>();
+        expected.add(AspectRatio.of(1, 2));
+        expected.add(AspectRatio.of(1, 1));
+        expected.add(AspectRatio.of(16, 9));
+
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedPictureSizes()).thenReturn(sizes);
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<AspectRatio> supportedRatios = o.getSupportedPictureAspectRatios();
+        assertEquals(supportedRatios.size(), expected.size());
+        for (AspectRatio ratio : expected) {
+            assertTrue(supportedRatios.contains(ratio));
+        }
+    }
+
+
+    @Test
+    public void testVideoSizes() {
+        // VideoSize are capped by CamcorderProfile.QUALITY_HIGH max size.
+        // This can be very small on an emulator, so use very small sizes to not hit that value.
+        List<Camera.Size> sizes = Arrays.asList(
+                mockCameraSize(10, 20),
+                mockCameraSize(5, 5),
+                mockCameraSize(16, 9),
+                mockCameraSize(20, 40)
+        );
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedVideoSizes()).thenReturn(sizes);
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<Size> supportedSizes = o.getSupportedVideoSizes();
+        assertEquals(supportedSizes.size(), sizes.size());
+        for (Camera.Size size : sizes) {
+            Size internalSize = new Size(size.width, size.height);
+            assertTrue(supportedSizes.contains(internalSize));
+        }
+    }
+
+    @Test
+    public void testVideoSizesNull() {
+        // When videoSizes is null, we take the preview sizes.
+        List<Camera.Size> sizes = Arrays.asList(
+                mockCameraSize(10, 20),
+                mockCameraSize(5, 5),
+                mockCameraSize(16, 9),
+                mockCameraSize(20, 40)
+        );
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedVideoSizes()).thenReturn(null);
+        when(params.getSupportedPreviewSizes()).thenReturn(sizes);
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<Size> supportedSizes = o.getSupportedVideoSizes();
+        assertEquals(supportedSizes.size(), sizes.size());
+        for (Camera.Size size : sizes) {
+            Size internalSize = new Size(size.width, size.height);
+            assertTrue(supportedSizes.contains(internalSize));
+        }
+    }
+
+    @Test
+    public void testVideoSizesFlip() {
+        List<Camera.Size> sizes = Arrays.asList(
+                mockCameraSize(10, 20),
+                mockCameraSize(5, 5),
+                mockCameraSize(16, 9),
+                mockCameraSize(20, 40)
+        );
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedVideoSizes()).thenReturn(sizes);
+        CameraOptions o = new Camera1Options(params, 0, true);
+        Collection<Size> supportedSizes = o.getSupportedVideoSizes();
+        assertEquals(supportedSizes.size(), sizes.size());
+        for (Camera.Size size : sizes) {
+            Size internalSize = new Size(size.width, size.height).flip();
+            assertTrue(supportedSizes.contains(internalSize));
+        }
+    }
+
+    @Test
+    public void testVideoAspectRatio() {
+        List<Camera.Size> sizes = Arrays.asList(
+                mockCameraSize(10, 20),
+                mockCameraSize(5, 5),
+                mockCameraSize(16, 9),
+                mockCameraSize(20, 40)
+        );
+
+        Set<AspectRatio> expected = new HashSet<>();
+        expected.add(AspectRatio.of(1, 2));
+        expected.add(AspectRatio.of(1, 1));
+        expected.add(AspectRatio.of(16, 9));
+
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedVideoSizes()).thenReturn(sizes);
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<AspectRatio> supportedRatios = o.getSupportedVideoAspectRatios();
+        assertEquals(supportedRatios.size(), expected.size());
+        for (AspectRatio ratio : expected) {
+            assertTrue(supportedRatios.contains(ratio));
+        }
+    }
+
+    @Test
+    public void testGestureActions() {
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedFocusModes()).thenReturn(Collections.<String>emptyList());
+        when(params.isZoomSupported()).thenReturn(true);
+        when(params.getMaxExposureCompensation()).thenReturn(0);
+        when(params.getMinExposureCompensation()).thenReturn(0);
+
+        CameraOptions o = new Camera1Options(params, 0, false);
+        assertFalse(o.supports(GestureAction.AUTO_FOCUS));
+        assertTrue(o.supports(GestureAction.TAKE_PICTURE));
+        assertTrue(o.supports(GestureAction.NONE));
+        assertTrue(o.supports(GestureAction.ZOOM));
+        assertTrue(o.supports(GestureAction.FILTER_CONTROL_1));
+        assertTrue(o.supports(GestureAction.FILTER_CONTROL_2));
+        assertFalse(o.supports(GestureAction.EXPOSURE_CORRECTION));
+    }
+
+    @Test
+    public void testAlwaysSupportedControls() {
+        // Grid, VideoQuality, SessionType and Audio are always supported.
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        CameraOptions o = new Camera1Options(params, 0, false);
+
+        Collection<Grid> grids = o.getSupportedControls(Grid.class);
+        Collection<VideoCodec> video = o.getSupportedControls(VideoCodec.class);
+        Collection<AudioCodec> audioCodecs = o.getSupportedControls(AudioCodec.class);
+        Collection<Mode> sessions = o.getSupportedControls(Mode.class);
+        Collection<Audio> audio = o.getSupportedControls(Audio.class);
+        assertEquals(grids.size(), Grid.values().length);
+        assertEquals(video.size(), VideoCodec.values().length);
+        assertEquals(audioCodecs.size(), AudioCodec.values().length);
+        assertEquals(sessions.size(), Mode.values().length);
+        assertEquals(audio.size(), Audio.values().length);
+    }
+
+    @Test
+    public void testFacing() {
+        Set<Integer> supported = new HashSet<>();
+        Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
+        for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
+            Camera.getCameraInfo(i, cameraInfo);
+            supported.add(cameraInfo.facing);
+        }
+
+        CameraOptions o = new Camera1Options(mock(Camera.Parameters.class), 0, false);
+        Camera1Mapper m = Camera1Mapper.get();
+        Collection<Facing> s = o.getSupportedControls(Facing.class);
+        assertEquals(s.size(), supported.size());
+        for (Facing facing : s) {
+            assertTrue(supported.contains(m.mapFacing(facing)));
+            assertTrue(o.supports(facing));
+        }
+    }
+
+    @Test
+    public void testWhiteBalance() {
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedWhiteBalance()).thenReturn(Arrays.asList(
+                Camera.Parameters.WHITE_BALANCE_AUTO, // Supported
+                Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT, // Supported
+                Camera.Parameters.WHITE_BALANCE_SHADE // Not supported
+        ));
+
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<WhiteBalance> w = o.getSupportedControls(WhiteBalance.class);
+        assertEquals(w.size(), 2);
+        assertTrue(w.contains(WhiteBalance.AUTO));
+        assertTrue(w.contains(WhiteBalance.CLOUDY));
+        assertTrue(o.supports(WhiteBalance.AUTO));
+        assertTrue(o.supports(WhiteBalance.CLOUDY));
+    }
+
+    @Test
+    public void testFlash() {
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedFlashModes()).thenReturn(Arrays.asList(
+                Camera.Parameters.FLASH_MODE_OFF, // Supported
+                Camera.Parameters.FLASH_MODE_AUTO, // Supported
+                Camera.Parameters.FLASH_MODE_TORCH, // Supported
+                Camera.Parameters.FLASH_MODE_RED_EYE // Not supported
+        ));
+
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<Flash> f = o.getSupportedControls(Flash.class);
+        assertEquals(f.size(), 3);
+        assertTrue(f.contains(Flash.OFF));
+        assertTrue(f.contains(Flash.AUTO));
+        assertTrue(f.contains(Flash.TORCH));
+
+        assertTrue(o.supports(Flash.OFF));
+        assertTrue(o.supports(Flash.AUTO));
+        assertTrue(o.supports(Flash.TORCH));
+    }
+
+    @Test
+    public void testHdr() {
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getSupportedSceneModes()).thenReturn(Arrays.asList(
+                Camera.Parameters.SCENE_MODE_AUTO, // Supported
+                Camera.Parameters.SCENE_MODE_HDR, // Supported
+                Camera.Parameters.SCENE_MODE_FIREWORKS // Not supported
+        ));
+
+        CameraOptions o = new Camera1Options(params, 0, false);
+        Collection<Hdr> h = o.getSupportedControls(Hdr.class);
+        assertEquals(h.size(), 2);
+        assertTrue(h.contains(Hdr.OFF));
+        assertTrue(h.contains(Hdr.ON));
+
+        assertTrue(o.supports(Hdr.OFF));
+        assertTrue(o.supports(Hdr.ON));
+    }
+
+    @Test
+    public void testBooleanFlags() {
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.isVideoSnapshotSupported()).thenReturn(true);
+        when(params.isZoomSupported()).thenReturn(true);
+        //noinspection ArraysAsListWithZeroOrOneArgument
+        when(params.getSupportedFocusModes()).thenReturn(Arrays.asList(Camera.Parameters.FOCUS_MODE_AUTO));
+        CameraOptions o = new Camera1Options(params, 0, false);
+        assertTrue(o.isZoomSupported());
+        assertTrue(o.isAutoFocusSupported());
+    }
+
+    @Test
+    public void testExposureCorrection() {
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        when(params.getMaxExposureCompensation()).thenReturn(10);
+        when(params.getMinExposureCompensation()).thenReturn(-10);
+        when(params.getExposureCompensationStep()).thenReturn(0.5f);
+        CameraOptions o = new Camera1Options(params, 0, false);
+        assertTrue(o.isExposureCorrectionSupported());
+        assertEquals(o.getExposureCorrectionMinValue(), -10f * 0.5f, 0f);
+        assertEquals(o.getExposureCorrectionMaxValue(), 10f * 0.5f, 0f);
+    }
+
+    @Test
+    public void testPreviewFrameRate() {
+        Camera.Parameters params = mock(Camera.Parameters.class);
+        List<int[]> result = Arrays.asList(
+                new int[]{20000, 30000},
+                new int[]{30000, 60000},
+                new int[]{60000, 120000}
+        );
+        when(params.getSupportedPreviewFpsRange()).thenReturn(result);
+        CameraOptions o = new Camera1Options(params, 0, false);
+        assertEquals(20F, o.getPreviewFrameRateMinValue(), 0.001F);
+        assertEquals(120F, o.getPreviewFrameRateMaxValue(), 0.001F);
+    }
+}

+ 172 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/BaseFilterTest.java

@@ -0,0 +1,172 @@
+package com.otaliastudios.cameraview.filter;
+
+
+import android.opengl.GLES20;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseEglTest;
+import com.otaliastudios.cameraview.internal.GlTextureDrawer;
+import com.otaliastudios.opengl.program.GlTextureProgram;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class BaseFilterTest extends BaseEglTest {
+
+    public static class TestFilter extends BaseFilter implements TwoParameterFilter {
+
+        private float param1;
+        private float param2;
+
+        @NonNull
+        @Override
+        public String getFragmentShader() {
+            return createDefaultFragmentShader();
+        }
+
+        @Override
+        public void setParameter1(float value) {
+            param1 = value;
+        }
+
+        @Override
+        public void setParameter2(float value) {
+            param2 = value;
+        }
+
+        @Override
+        public float getParameter1() {
+            return param1;
+        }
+
+        @Override
+        public float getParameter2() {
+            return param2;
+        }
+    }
+
+    private TestFilter filter;
+
+    @Test
+    public void testCreateDefaultFragmentShader() {
+        filter = new TestFilter();
+        filter.fragmentTextureCoordinateName = "XXX";
+        String defaultFragmentShader = filter.createDefaultFragmentShader();
+        assertNotNull(defaultFragmentShader);
+        assertTrue(defaultFragmentShader.contains(filter.fragmentTextureCoordinateName));
+    }
+
+    @Test
+    public void testCreateDefaultVertexShader() {
+        filter = new TestFilter();
+        filter.vertexModelViewProjectionMatrixName = "AAA";
+        filter.vertexPositionName = "BBB";
+        filter.vertexTextureCoordinateName = "CCC";
+        filter.vertexTransformMatrixName = "DDD";
+        filter.fragmentTextureCoordinateName = "EEE";
+        String defaultVertexShader = filter.createDefaultVertexShader();
+        assertNotNull(defaultVertexShader);
+        assertTrue(defaultVertexShader.contains(filter.vertexModelViewProjectionMatrixName));
+        assertTrue(defaultVertexShader.contains(filter.vertexPositionName));
+        assertTrue(defaultVertexShader.contains(filter.vertexTextureCoordinateName));
+        assertTrue(defaultVertexShader.contains(filter.vertexTransformMatrixName));
+        assertTrue(defaultVertexShader.contains(filter.fragmentTextureCoordinateName));
+    }
+
+    @Test
+    public void testOnProgramCreate() {
+        filter = new TestFilter();
+        int handle = GlTextureProgram.create(filter.getVertexShader(), filter.getFragmentShader());
+        filter.onCreate(handle);
+        assertNotNull(filter.program);
+        filter.onDestroy();
+        assertNull(filter.program);
+        GLES20.glDeleteProgram(handle);
+    }
+
+    @Test
+    public void testDraw_whenInvalid() {
+        filter = spy(new TestFilter());
+        float[] matrix = new float[16];
+        filter.draw(0L, matrix);
+        verify(filter, never()).onPreDraw(0L, matrix);
+        verify(filter, never()).onDraw(0L);
+        verify(filter, never()).onPostDraw(0L);
+    }
+
+    @Test
+    public void testDraw() {
+        // Use a drawer which cares about GL setup.
+        filter = spy(new TestFilter());
+        GlTextureDrawer drawer = new GlTextureDrawer();
+        drawer.setFilter(filter);
+
+        float[] matrix = drawer.getTextureTransform();
+        drawer.draw(0L);
+        verify(filter, times(1)).onPreDraw(0L, matrix);
+        verify(filter, times(1)).onDraw(0L);
+        verify(filter, times(1)).onPostDraw(0L);
+
+        drawer.release();
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void testOnCopy_invalid() {
+        // Anonymous inner classes do not have a public constructor.
+        Filter filter = new BaseFilter() {
+            @NonNull
+            @Override
+            public String getFragmentShader() {
+                return "whatever";
+            }
+        };
+        filter.copy();
+    }
+
+    @Test
+    public void testOnCopy() {
+        filter = new TestFilter();
+        BaseFilter other = filter.copy();
+        assertTrue(other instanceof TestFilter);
+    }
+
+    @Test
+    public void testCopy_withSize() {
+        filter = new TestFilter();
+        filter.setSize(WIDTH, HEIGHT);
+        BaseFilter other = filter.copy();
+        assertEquals(WIDTH, other.size.getWidth());
+        assertEquals(HEIGHT, other.size.getHeight());
+    }
+
+    @Test
+    public void testCopy_withParameter1() {
+        filter = new TestFilter();
+        filter.setParameter1(0.5F);
+        TestFilter other = (TestFilter) filter.copy();
+        assertEquals(filter.getParameter1(), other.getParameter1(), 0.001F);
+    }
+
+    @Test
+    public void testCopy_withParameter2() {
+        filter = new TestFilter();
+        filter.setParameter2(0.5F);
+        TestFilter other = (TestFilter) filter.copy();
+        assertEquals(filter.getParameter2(), other.getParameter2(), 0.001F);
+    }
+}

+ 52 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/FilterParserTest.java

@@ -0,0 +1,52 @@
+package com.otaliastudios.cameraview.filter;
+
+
+import android.content.res.TypedArray;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.R;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static junit.framework.TestCase.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class FilterParserTest extends BaseTest {
+
+    @Test
+    public void testFallback() {
+        TypedArray array = mock(TypedArray.class);
+        when(array.hasValue(R.styleable.CameraView_cameraFilter)).thenReturn(false);
+        when(array.getString(R.styleable.CameraView_cameraFilter)).thenReturn(null);
+        FilterParser parser = new FilterParser(array);
+        assertNotNull(parser.getFilter());
+        assertTrue(parser.getFilter() instanceof NoFilter);
+    }
+    @Test
+    public void testConstructor() {
+        TypedArray array = mock(TypedArray.class);
+        when(array.hasValue(R.styleable.CameraView_cameraFilter)).thenReturn(true);
+        when(array.getString(R.styleable.CameraView_cameraFilter)).thenReturn(MyFilter.class.getName());
+        FilterParser parser = new FilterParser(array);
+        assertNotNull(parser.getFilter());
+        assertTrue(parser.getFilter() instanceof MyFilter);
+    }
+
+    public static class MyFilter extends BaseFilter {
+        @NonNull
+        @Override
+        public String getFragmentShader() {
+            return createDefaultFragmentShader();
+        }
+    }
+}

+ 28 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/FiltersTest.java

@@ -0,0 +1,28 @@
+package com.otaliastudios.cameraview.filter;
+
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertNotNull;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class FiltersTest extends BaseTest {
+
+    @Test
+    public void testNewInstance() {
+        // At least tests that all our default filters have a no-args constructor.
+        Filters[] filtersArray = Filters.values();
+        for (Filters filters : filtersArray) {
+            Filter filter = filters.newInstance();
+            assertNotNull(filter);
+        }
+    }
+}

+ 212 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/MultiFilterTest.java

@@ -0,0 +1,212 @@
+package com.otaliastudios.cameraview.filter;
+
+
+import android.opengl.GLES20;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseEglTest;
+import com.otaliastudios.cameraview.filters.AutoFixFilter;
+import com.otaliastudios.cameraview.filters.BrightnessFilter;
+import com.otaliastudios.cameraview.filters.DuotoneFilter;
+import com.otaliastudios.cameraview.filters.VignetteFilter;
+import com.otaliastudios.cameraview.internal.GlTextureDrawer;
+import com.otaliastudios.opengl.program.GlProgram;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class MultiFilterTest extends BaseEglTest {
+
+    @Test
+    public void testConstructor1() {
+        MultiFilter multiFilter = new MultiFilter(
+                new DuotoneFilter(),
+                new AutoFixFilter()
+        );
+        assertEquals(2, multiFilter.filters.size());
+    }
+
+    @Test
+    public void testConstructor2() {
+        List<Filter> filters = new ArrayList<>();
+        filters.add(new DuotoneFilter());
+        filters.add(new AutoFixFilter());
+        MultiFilter multiFilter = new MultiFilter(filters);
+        assertEquals(2, multiFilter.filters.size());
+    }
+
+    @Test
+    public void testAddFilter() {
+        MultiFilter multiFilter = new MultiFilter();
+        assertEquals(0, multiFilter.filters.size());
+        multiFilter.addFilter(new DuotoneFilter());
+        assertEquals(1, multiFilter.filters.size());
+        multiFilter.addFilter(new AutoFixFilter());
+        assertEquals(2, multiFilter.filters.size());
+    }
+
+    @Test
+    public void testAddFilter_multi() {
+        MultiFilter multiFilter = new MultiFilter(new DuotoneFilter());
+        assertEquals(1, multiFilter.filters.size());
+        MultiFilter other = new MultiFilter(
+                new AutoFixFilter(),
+                new BrightnessFilter(),
+                new VignetteFilter());
+        multiFilter.addFilter(other);
+        assertEquals(4, multiFilter.filters.size());
+    }
+
+    @Test
+    public void testSetSize() {
+        DuotoneFilter filter = new DuotoneFilter();
+        MultiFilter multiFilter = new MultiFilter(filter);
+        MultiFilter.State state = multiFilter.states.get(filter);
+        assertNotNull(state);
+        assertNull(state.size);
+        multiFilter.setSize(WIDTH, HEIGHT);
+        assertNotNull(state.size);
+    }
+
+    @Test
+    public void testCopy() {
+        DuotoneFilter filter = spy(new DuotoneFilter());
+        MultiFilter multiFilter = new MultiFilter(filter);
+        MultiFilter multiFilterCopy = (MultiFilter) multiFilter.copy();
+        assertEquals(1, multiFilterCopy.filters.size());
+        verify(filter, times(1)).onCopy();
+    }
+
+    @Test
+    public void testParameter1() {
+        DuotoneFilter filter = new DuotoneFilter();
+        MultiFilter multiFilter = new MultiFilter(filter);
+        float desired = 0.21582F; // whatever
+        multiFilter.setParameter1(desired);
+        assertEquals(desired, multiFilter.getParameter1(), 0.001F);
+        assertEquals(desired, filter.getParameter1(), 0.001F);
+    }
+
+    @Test
+    public void testParameter2() {
+        DuotoneFilter filter = new DuotoneFilter();
+        MultiFilter multiFilter = new MultiFilter(filter);
+        float desired = 0.21582F; // whatever
+        multiFilter.setParameter2(desired);
+        assertEquals(desired, multiFilter.getParameter2(), 0.001F);
+        assertEquals(desired, filter.getParameter2(), 0.001F);
+    }
+
+    @Test
+    public void testOnCreate_isLazy() {
+        DuotoneFilter filter = spy(new DuotoneFilter());
+        MultiFilter multiFilter = new MultiFilter(filter);
+
+        int program = GlProgram.create(multiFilter.getVertexShader(),
+                multiFilter.getFragmentShader());
+        multiFilter.onCreate(program);
+        verify(filter, never()).onCreate(anyInt());
+
+        multiFilter.onDestroy();
+        GLES20.glDeleteProgram(program);
+        verify(filter, never()).onDestroy();
+    }
+
+    @Test
+    public void testDraw_simple() {
+        DuotoneFilter filter = spy(new DuotoneFilter());
+        MultiFilter multiFilter = new MultiFilter(filter);
+        multiFilter.setSize(WIDTH, HEIGHT);
+        GlTextureDrawer drawer = new GlTextureDrawer();
+        drawer.setFilter(multiFilter);
+        float[] matrix = drawer.getTextureTransform();
+        drawer.draw(0L);
+        drawer.release();
+
+        // The child should have experienced the whole lifecycle.
+        verify(filter, atLeastOnce()).getVertexShader();
+        verify(filter, atLeastOnce()).getFragmentShader();
+        verify(filter, atLeastOnce()).setSize(anyInt(), anyInt());
+        verify(filter, times(1)).onCreate(anyInt());
+        verify(filter, times(1)).draw(0L, matrix);
+        verify(filter, times(1)).onDestroy();
+    }
+
+    @Test
+    public void testDraw_multi() {
+        // Want to test that when filter1 is drawn, the current framebuffer is a
+        // non-default one. When filter2 is drawn, the current framebuffer is 0.
+        final DuotoneFilter filter1 = spy(new DuotoneFilter());
+        final DuotoneFilter filter2 = spy(new DuotoneFilter());
+        final MultiFilter multiFilter = new MultiFilter(filter1, filter2);
+        multiFilter.setSize(WIDTH, HEIGHT);
+        GlTextureDrawer drawer = new GlTextureDrawer();
+        drawer.setFilter(multiFilter);
+        float[] matrix = drawer.getTextureTransform();
+        final int[] result = new int[1];
+
+        doAnswer(new Answer() {
+            @Override
+            public Object answer(InvocationOnMock invocation) {
+                MultiFilter.State state = multiFilter.states.get(filter1);
+                assertNotNull(state);
+                assertTrue(state.isProgramCreated);
+                assertTrue(state.isFramebufferCreated);
+
+                GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, result, 0);
+                // assertTrue(result[0] != 0);
+                return null;
+            }
+        }).when(filter1).draw(0L, matrix);
+
+        // Note: second filter is drawn with the identity matrix!
+        doAnswer(new Answer() {
+            @Override
+            public Object answer(InvocationOnMock invocation) {
+                // The last filter has no FBO / texture.
+                MultiFilter.State state = multiFilter.states.get(filter2);
+                assertNotNull(state);
+                assertTrue(state.isProgramCreated);
+                assertFalse(state.isFramebufferCreated);
+
+                GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, result, 0);
+                assertEquals(0, result[0]);
+                return null;
+
+            }
+        }).when(filter2).draw(eq(0L), any(float[].class));
+
+        drawer.draw(0L);
+        drawer.release();
+
+        // Verify that both are drawn.
+        verify(filter1, times(1)).draw(0L, matrix);
+        verify(filter2, times(1)).draw(eq(0L), any(float[].class));
+    }
+
+}

+ 34 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/NoFilterTest.java

@@ -0,0 +1,34 @@
+package com.otaliastudios.cameraview.filter;
+
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class NoFilterTest extends BaseTest {
+
+    public static class DummyFilter extends BaseFilter {
+        @NonNull
+        @Override
+        public String getFragmentShader() {
+            return "whatever";
+        }
+    }
+
+    @Test
+    public void testGetFragmentShader() {
+        NoFilter filter = new NoFilter();
+        String defaultFragmentShader = new DummyFilter().createDefaultFragmentShader();
+        assertEquals(defaultFragmentShader, filter.getFragmentShader());
+    }
+}

+ 36 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/SimpleFilterTest.java

@@ -0,0 +1,36 @@
+package com.otaliastudios.cameraview.filter;
+
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class SimpleFilterTest extends BaseTest {
+
+    @Test
+    public void testGetFragmentShader() {
+        String shader = "shader";
+        SimpleFilter filter = new SimpleFilter(shader);
+        assertEquals(shader, filter.getFragmentShader());
+    }
+
+    @Test
+    public void testCopy() {
+        String shader = "shader";
+        SimpleFilter filter = new SimpleFilter(shader);
+        BaseFilter copy = filter.copy();
+        assertTrue(copy instanceof SimpleFilter);
+        SimpleFilter simpleCopy = (SimpleFilter) copy;
+        assertEquals(shader, simpleCopy.getFragmentShader());
+    }
+}

+ 112 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java

@@ -0,0 +1,112 @@
+package com.otaliastudios.cameraview.frame;
+
+
+import android.graphics.ImageFormat;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.engine.offset.Angles;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class ByteBufferFrameManagerTest extends BaseTest {
+
+    private final Angles angles = new Angles();
+    private ByteBufferFrameManager.BufferCallback callback;
+
+    @Before
+    public void setUp() {
+        callback = mock(ByteBufferFrameManager.BufferCallback.class);
+    }
+
+    @After
+    public void tearDown() {
+        callback = null;
+    }
+
+    @Test
+    public void testAllocate() {
+        ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
+        manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
+        verify(callback, times(1)).onBufferAvailable(any(byte[].class));
+        reset(callback);
+
+        manager = new ByteBufferFrameManager(5, callback);
+        manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
+        verify(callback, times(5)).onBufferAvailable(any(byte[].class));
+    }
+
+    @Test
+    public void testOnFrameReleased_alreadyFull() {
+        ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
+        manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
+        int length = manager.getFrameBytes();
+
+        Frame frame1 = manager.getFrame(new byte[length], 0);
+        assertNotNull(frame1);
+        // Since frame1 is already taken and poolSize = 1, getFrame() would return null.
+        // To create a new frame, freeze the first one.
+        Frame frame2 = frame1.freeze();
+        // Now release the first frame so it goes back into the pool.
+        manager.onFrameReleased(frame1, (byte[]) frame1.getData());
+        reset(callback);
+        // Release the second. The pool is already full, so onBufferAvailable should not be called
+        // since this Frame instance will NOT be reused.
+        manager.onFrameReleased(frame2, (byte[]) frame2.getData());
+        verify(callback, never()).onBufferAvailable((byte[]) frame2.getData());
+    }
+
+    @Test
+    public void testOnFrameReleased_sameLength() {
+        ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
+        manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
+        int length = manager.getFrameBytes();
+
+        // A camera preview frame comes. Request a frame.
+        byte[] picture = new byte[length];
+        Frame frame = manager.getFrame(picture, 0);
+        assertNotNull(frame);
+
+        // Release the frame and ensure that onBufferAvailable is called.
+        reset(callback);
+        manager.onFrameReleased(frame, (byte[]) frame.getData());
+        verify(callback, times(1)).onBufferAvailable(picture);
+    }
+
+    @Test
+    public void testOnFrameReleased_differentLength() {
+        ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
+        manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
+        int length = manager.getFrameBytes();
+
+        // A camera preview frame comes. Request a frame.
+        byte[] picture = new byte[length];
+        Frame frame = manager.getFrame(picture, 0);
+        assertNotNull(frame);
+
+        // Don't release the frame. Change the allocation size.
+        manager.setUp(ImageFormat.NV16, new Size(15, 15), angles);
+
+        // Now release the old frame and ensure that onBufferAvailable is NOT called,
+        // because the released data has wrong length.
+        manager.onFrameReleased(frame, (byte[]) frame.getData());
+        reset(callback);
+        verify(callback, never()).onBufferAvailable(picture);
+    }
+}

+ 70 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java

@@ -0,0 +1,70 @@
+package com.otaliastudios.cameraview.frame;
+
+
+import android.graphics.ImageFormat;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.engine.offset.Angles;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class FrameManagerTest extends BaseTest {
+
+    private final Angles angles = new Angles();
+
+    @Test
+    public void testFrameRecycling() {
+        // A 1-pool manager will always recycle the same frame.
+        FrameManager<String> manager = new FrameManager<String>(1, String.class) {
+            @Override
+            protected void onFrameDataReleased(@NonNull String data, boolean recycled) { }
+
+            @NonNull
+            @Override
+            protected String onCloneFrameData(@NonNull String data) {
+                return data;
+            }
+        };
+        manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
+
+        Frame first = manager.getFrame("foo", 0);
+        assertNotNull(first);
+        first.release();
+        Frame second = manager.getFrame("bar", 0);
+        assertNotNull(second);
+        second.release();
+        assertEquals(first, second);
+    }
+
+    @Test
+    public void testGetFrame() {
+        FrameManager<String> manager = new FrameManager<String>(1, String.class) {
+            @Override
+            protected void onFrameDataReleased(@NonNull String data, boolean recycled) { }
+
+            @NonNull
+            @Override
+            protected String onCloneFrameData(@NonNull String data) {
+                return data;
+            }
+        };
+        manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
+
+        Frame first = manager.getFrame("foo", 0);
+        assertNotNull(first);
+        Frame second = manager.getFrame("bar", 0);
+        assertNull(second);
+    }
+}

+ 90 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/GestureFinderTest.java

@@ -0,0 +1,90 @@
+package com.otaliastudios.cameraview.gesture;
+
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.FrameLayout;
+
+import androidx.annotation.NonNull;
+import androidx.test.espresso.ViewInteraction;
+import androidx.test.espresso.matcher.RootMatchers;
+import androidx.test.rule.ActivityTestRule;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.TestActivity;
+import com.otaliastudios.cameraview.tools.Op;
+
+import org.hamcrest.Matchers;
+import org.junit.Before;
+import org.junit.Rule;
+
+import static androidx.test.espresso.Espresso.onView;
+
+@TargetApi(17)
+public abstract class GestureFinderTest<T extends GestureFinder> extends BaseTest {
+
+    protected abstract T createFinder(@NonNull GestureFinder.Controller controller);
+
+    @Rule
+    public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
+
+    @SuppressWarnings("WeakerAccess")
+    protected T finder;
+    @SuppressWarnings("WeakerAccess")
+    protected Op<Gesture> touchOp;
+    @SuppressWarnings("WeakerAccess")
+    protected ViewGroup layout;
+
+    @Before
+    public void setUp() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                TestActivity a = rule.getActivity();
+                layout = new FrameLayout(a);
+                finder = createFinder(new Controller());
+                finder.setActive(true);
+                a.inflate(layout);
+
+                touchOp = new Op<>(false);
+                layout.setOnTouchListener(new View.OnTouchListener() {
+                    @Override
+                    public boolean onTouch(View view, MotionEvent motionEvent) {
+                        boolean found = finder.onTouchEvent(motionEvent);
+                        if (found) touchOp.controller().end(finder.getGesture());
+                        return true;
+                    }
+                });
+            }
+        });
+    }
+
+    @SuppressWarnings("WeakerAccess")
+    protected final ViewInteraction onLayout() {
+        return onView(Matchers.<View>is(layout))
+                .inRoot(RootMatchers.withDecorView(
+                        Matchers.is(rule.getActivity().getWindow().getDecorView())));
+    }
+
+    private class Controller implements GestureFinder.Controller {
+
+        @NonNull
+        @Override
+        public Context getContext() {
+            return layout.getContext();
+        }
+
+        @Override
+        public int getWidth() {
+            return layout.getWidth();
+        }
+
+        @Override
+        public int getHeight() {
+            return layout.getHeight();
+        }
+    }
+}

+ 71 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/PinchGestureFinderTest.java

@@ -0,0 +1,71 @@
+package com.otaliastudios.cameraview.gesture;
+
+
+import androidx.annotation.NonNull;
+import androidx.test.espresso.ViewAction;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.tools.SdkExclude;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * On API 26 these tests fail during Espresso's inRoot() - the window never gains focus.
+ * This might be due to a system popup or something similar.
+ */
+@SdkExclude(minSdkVersion = 26, maxSdkVersion = 26)
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class PinchGestureFinderTest extends GestureFinderTest<PinchGestureFinder> {
+
+    @Override
+    protected PinchGestureFinder createFinder(@NonNull GestureFinder.Controller controller) {
+        return new PinchGestureFinder(controller);
+    }
+
+    @Test
+    public void testDefaults() {
+        assertEquals(finder.getGesture(), Gesture.PINCH);
+        assertEquals(finder.getPoints().length, 2);
+        assertEquals(finder.getPoints()[0].x, 0, 0);
+        assertEquals(finder.getPoints()[0].y, 0, 0);
+        assertEquals(finder.getPoints()[1].x, 0, 0);
+        assertEquals(finder.getPoints()[1].y, 0, 0);
+    }
+
+    // TODO: test pinch open
+    // TODO: test pinch close
+    // TODO: test pinch disabled
+
+    // Possible approach: mimic pinch gesture and let espresso test.
+    // Too lazy to do this now, but it's possible.
+    // https://stackoverflow.com/questions/11523423/how-to-generate-zoom-pinch-gesture-for-testing-for-android
+
+    public abstract class PinchViewAction implements ViewAction {
+    }
+
+    private void testPinch(ViewAction action, boolean increasing) {
+        touchOp.listen();
+        touchOp.controller().start();
+        onLayout().perform(action);
+        Gesture found = touchOp.await(10000);
+        assertNotNull(found);
+
+        // How will this move  our parameter?
+        float curr = 0.5f, min = 0f, max = 1f;
+        float newValue = finder.computeValue(curr, min, max);
+        if (increasing) {
+            assertTrue(newValue > curr);
+            assertTrue(newValue <= max);
+        } else {
+            assertTrue(newValue < curr);
+            assertTrue(newValue >= min);
+        }
+    }
+}

+ 98 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/ScrollGestureFinderTest.java

@@ -0,0 +1,98 @@
+package com.otaliastudios.cameraview.gesture;
+
+
+import androidx.annotation.NonNull;
+import androidx.test.espresso.ViewAction;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.tools.SdkExclude;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static androidx.test.espresso.action.ViewActions.swipeDown;
+import static androidx.test.espresso.action.ViewActions.swipeLeft;
+import static androidx.test.espresso.action.ViewActions.swipeRight;
+import static androidx.test.espresso.action.ViewActions.swipeUp;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * On API 26 these tests fail during Espresso's inRoot() - the window never gains focus.
+ * This might be due to a system popup or something similar.
+ */
+@SdkExclude(minSdkVersion = 26, maxSdkVersion = 26)
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class ScrollGestureFinderTest extends GestureFinderTest<ScrollGestureFinder> {
+
+    private final static long WAIT = 2000; // 500 was too short
+
+    @Override
+    protected ScrollGestureFinder createFinder(@NonNull GestureFinder.Controller controller) {
+        return new ScrollGestureFinder(controller);
+    }
+
+    @Test
+    public void testDefaults() {
+        assertNull(finder.mType);
+        assertEquals(finder.getPoints().length, 2);
+        assertEquals(finder.getPoints()[0].x, 0, 0);
+        assertEquals(finder.getPoints()[0].y, 0, 0);
+        assertEquals(finder.getPoints()[1].x, 0, 0);
+        assertEquals(finder.getPoints()[1].y, 0, 0);
+    }
+
+    @Test
+    public void testScrollDisabled() {
+        finder.setActive(false);
+        touchOp.listen();
+        touchOp.controller().start();
+        onLayout().perform(swipeUp());
+        Gesture found = touchOp.await(WAIT);
+        assertNull(found);
+    }
+
+    private void testScroll(ViewAction scroll, Gesture expected, boolean increasing) {
+        touchOp.listen();
+        touchOp.controller().start();
+        onLayout().perform(scroll);
+        Gesture found = touchOp.await(WAIT);
+        assertEquals(found, expected);
+
+        // How will this move our parameter?
+        float curr = 0.5f, min = 0f, max = 1f;
+        float newValue = finder.computeValue(curr, min, max);
+        if (increasing) {
+            assertTrue(newValue >= curr);
+            assertTrue(newValue <= max);
+        } else {
+            assertTrue(newValue <= curr);
+            assertTrue(newValue >= min);
+        }
+    }
+
+    @Test
+    public void testScrollLeft() {
+        testScroll(swipeLeft(), Gesture.SCROLL_HORIZONTAL, false);
+    }
+
+    @Test
+    public void testScrollRight() {
+        testScroll(swipeRight(), Gesture.SCROLL_HORIZONTAL, true);
+    }
+
+    @Test
+    public void testScrollUp() {
+        testScroll(swipeUp(), Gesture.SCROLL_VERTICAL, true);
+    }
+
+    @Test
+    public void testScrollDown() {
+        testScroll(swipeDown(), Gesture.SCROLL_VERTICAL, false);
+    }
+
+
+}

+ 87 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/TapGestureFinderTest.java

@@ -0,0 +1,87 @@
+package com.otaliastudios.cameraview.gesture;
+
+
+import android.view.InputDevice;
+import android.view.MotionEvent;
+
+import androidx.annotation.NonNull;
+import androidx.test.espresso.action.GeneralClickAction;
+import androidx.test.espresso.action.GeneralLocation;
+import androidx.test.espresso.action.Press;
+import androidx.test.espresso.action.Tap;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.size.Size;
+import com.otaliastudios.cameraview.tools.SdkExclude;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static androidx.test.espresso.action.ViewActions.click;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+/**
+ * On API 26 these tests fail during Espresso's inRoot() - the window never gains focus.
+ * This might be due to a system popup or something similar.
+ */
+@SdkExclude(minSdkVersion = 26, maxSdkVersion = 26)
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class TapGestureFinderTest extends GestureFinderTest<TapGestureFinder> {
+
+    @Override
+    protected TapGestureFinder createFinder(@NonNull GestureFinder.Controller controller) {
+        return new TapGestureFinder(controller);
+    }
+
+    @Test
+    public void testDefaults() {
+        assertNull(finder.mType);
+        assertEquals(finder.getPoints().length, 1);
+        assertEquals(finder.getPoints()[0].x, 0, 0);
+        assertEquals(finder.getPoints()[0].y, 0, 0);
+    }
+
+    @Test
+    public void testTap() {
+        touchOp.listen();
+        touchOp.controller().start();
+        GeneralClickAction a = new GeneralClickAction(
+                Tap.SINGLE, GeneralLocation.CENTER, Press.FINGER,
+                InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY);
+        onLayout().perform(a);
+        Gesture found = touchOp.await(500);
+
+        assertEquals(found, Gesture.TAP);
+        Size size = rule.getActivity().getContentSize();
+        assertEquals(finder.getPoints()[0].x, (size.getWidth() / 2f), 1f);
+        assertEquals(finder.getPoints()[0].y, (size.getHeight() / 2f), 1f);
+    }
+
+    @Test
+    public void testTapWhileDisabled() {
+        finder.setActive(false);
+        touchOp.listen();
+        touchOp.controller().start();
+        onLayout().perform(click());
+        Gesture found = touchOp.await(500);
+        assertNull(found);
+    }
+
+    @Test
+    public void testLongTap() {
+        touchOp.listen();
+        touchOp.controller().start();
+        GeneralClickAction a = new GeneralClickAction(
+                Tap.LONG, GeneralLocation.CENTER, Press.FINGER,
+                InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY);
+        onLayout().perform(a);
+        Gesture found = touchOp.await(500);
+        assertEquals(found, Gesture.LONG_TAP);
+        Size size = rule.getActivity().getContentSize();
+        assertEquals(finder.getPoints()[0].x, (size.getWidth() / 2f), 1f);
+        assertEquals(finder.getPoints()[0].y, (size.getHeight() / 2f), 1f);
+    }
+}

+ 60 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/CamcorderProfilesTest.java

@@ -0,0 +1,60 @@
+package com.otaliastudios.cameraview.internal;
+
+
+import android.media.CamcorderProfile;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.CameraUtils;
+import com.otaliastudios.cameraview.size.Size;
+import com.otaliastudios.cameraview.tools.SdkExclude;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class CamcorderProfilesTest extends BaseTest {
+
+    private String getCameraId() {
+        if (CameraUtils.hasCameras(getContext())) {
+            return "0";
+        }
+        return null;
+    }
+
+    @Test
+    public void testInvalidCameraReturnsLowest() {
+        CamcorderProfile invalid = CamcorderProfiles.get("invalid", new Size(100, 100));
+        CamcorderProfile lowest = CamcorderProfile.get(CamcorderProfile.QUALITY_LOW);
+        assertEquals(lowest.videoFrameWidth, invalid.videoFrameWidth);
+        assertEquals(lowest.videoFrameHeight, invalid.videoFrameHeight);
+    }
+
+    /**
+     * For some reason this fails on emulator 26.
+     */
+    @SdkExclude(minSdkVersion = 26, maxSdkVersion = 26)
+    @Test
+    public void testGet() {
+        String cameraId = getCameraId();
+        if (cameraId == null) return;
+        int cameraIdInt = Integer.parseInt(cameraId);
+
+        // Not much we can test. Let's just ask for lowest and highest.
+        CamcorderProfile low = CamcorderProfiles.get(cameraId, new Size(1, 1));
+        CamcorderProfile high = CamcorderProfiles.get(cameraId, new Size(Integer.MAX_VALUE, Integer.MAX_VALUE));
+
+        // Compare with lowest
+        CamcorderProfile lowest = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_LOW);
+        CamcorderProfile highest = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH);
+        assertEquals(lowest.videoFrameWidth, low.videoFrameWidth);
+        assertEquals(lowest.videoFrameHeight, low.videoFrameHeight);
+        assertEquals(highest.videoFrameWidth, high.videoFrameWidth);
+        assertEquals(highest.videoFrameHeight, high.videoFrameHeight);
+    }
+}

+ 52 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/CropHelperTest.java

@@ -0,0 +1,52 @@
+package com.otaliastudios.cameraview.internal;
+
+
+import android.graphics.Rect;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertTrue;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class CropHelperTest extends BaseTest {
+
+    @Test
+    public void testCrop() {
+        testCrop(new Size(1600, 1600), AspectRatio.of(16, 16));
+        testCrop(new Size(1600, 1600), AspectRatio.of(16, 9));
+        testCrop(new Size(1600, 1600), AspectRatio.of(9, 16));
+    }
+
+    private void testCrop(final Size inSize, final AspectRatio outRatio) {
+        AspectRatio inRatio = AspectRatio.of(inSize.getWidth(), inSize.getHeight());
+        Rect out = CropHelper.computeCrop(inSize, outRatio);
+        Size outSize = new Size(out.width(), out.height());
+        assertTrue(outRatio.matches(outSize));
+
+        if (outRatio.matches(inSize)) {
+            // They are equal.
+            assertEquals(outSize.getWidth(), inSize.getWidth());
+            assertEquals(outSize.getHeight(), inSize.getHeight());
+        } else if (outRatio.toFloat() > inRatio.toFloat()) {
+            // Width must match.
+            assertEquals(outSize.getWidth(), inSize.getWidth());
+            assertNotEquals(outSize.getHeight(), inSize.getHeight());
+        } else {
+            // Height must match.
+            assertEquals(outSize.getHeight(), inSize.getHeight());
+            assertNotEquals(outSize.getWidth(), inSize.getWidth());
+        }
+    }
+
+}

+ 219 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/DeviceEncodersTest.java

@@ -0,0 +1,219 @@
+package com.otaliastudios.cameraview.internal;
+
+
+import android.media.MediaCodecInfo;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.MediumTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+
+@RunWith(AndroidJUnit4.class)
+@MediumTest
+public class DeviceEncodersTest extends BaseTest {
+
+    // This is guaranteed to work, see
+    // https://developer.android.com/guide/topics/media/media-formats
+    private final static Size GUARANTEED_SIZE = new Size(176, 144);
+
+    private boolean enabled;
+
+    @Before
+    public void setUp() {
+        enabled = DeviceEncoders.ENABLED;
+    }
+
+    @After
+    public void tearDown() {
+        DeviceEncoders.ENABLED = enabled;
+    }
+
+    @NonNull
+    private DeviceEncoders create() {
+        return new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER,
+                "video/avc",
+                "audio/mp4a-latm",
+                0,
+                0);
+    }
+
+    @Test
+    public void testGetDeviceEncoders() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            List<MediaCodecInfo> infos = deviceEncoders.getDeviceEncoders();
+            for (MediaCodecInfo info : infos) {
+                assertTrue(info.isEncoder());
+            }
+        }
+    }
+
+    @Test
+    public void testIsHardwareEncoder() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            assertFalse(deviceEncoders.isHardwareEncoder("OMX.google.encoder"));
+            assertTrue(deviceEncoders.isHardwareEncoder("OMX.other.encoder"));
+        }
+    }
+
+    @Test
+    public void testFindDeviceEncoder() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            List<MediaCodecInfo> allEncoders = deviceEncoders.getDeviceEncoders();
+            MediaCodecInfo encoder = deviceEncoders.findDeviceEncoder(allEncoders,
+                    "video/avc", DeviceEncoders.MODE_RESPECT_ORDER, 0);
+            assertNotNull(encoder);
+            List<String> encoderTypes = Arrays.asList(encoder.getSupportedTypes());
+            assertTrue(encoderTypes.contains("video/avc"));
+        }
+    }
+
+    @Test
+    public void testGetVideoEncoder() {
+        if (DeviceEncoders.ENABLED) {
+            DeviceEncoders deviceEncoders = create();
+            assertNotNull(deviceEncoders.getVideoEncoder());
+        }
+
+        DeviceEncoders.ENABLED = false;
+        DeviceEncoders deviceEncoders = create();
+        assertNull(deviceEncoders.getVideoEncoder());
+    }
+
+    @Test
+    public void testGetAudioEncoder() {
+        if (DeviceEncoders.ENABLED) {
+            DeviceEncoders deviceEncoders = create();
+            assertNotNull(deviceEncoders.getAudioEncoder());
+        }
+
+        DeviceEncoders.ENABLED = false;
+        DeviceEncoders deviceEncoders = create();
+        assertNull(deviceEncoders.getAudioEncoder());
+    }
+
+    @Test
+    public void testGetSupportedVideoSize_disabled() {
+        DeviceEncoders.ENABLED = false;
+        DeviceEncoders deviceEncoders = create();
+        Size input = new Size(GUARANTEED_SIZE.getWidth(), GUARANTEED_SIZE.getHeight());
+        Size output = deviceEncoders.getSupportedVideoSize(input);
+        assertSame(input, output);
+    }
+
+    @Test
+    public void testGetSupportedVideoSize_scalesDown() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            Size input = new Size(
+                    GUARANTEED_SIZE.getWidth() * 1000,
+                    GUARANTEED_SIZE.getHeight() * 1000);
+            try {
+                Size output = deviceEncoders.getSupportedVideoSize(input);
+                assertTrue(AspectRatio.of(input).matches(output, 0.01F));
+            } catch (RuntimeException e) {
+                // The scaled down size happens to be not supported.
+                // I see no way of testing this easily if we're not sure of supported ranges.
+                // This depends highly on the alignment since scaling down, while keeping AR,
+                // can change the alignment and require width / height changes.
+            }
+        }
+    }
+
+    @Test
+    public void testGetSupportedVideoSize_aligns() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            Size input = new Size(GUARANTEED_SIZE.getWidth() + 1,
+                    GUARANTEED_SIZE.getHeight() + 1);
+            Size output = deviceEncoders.getSupportedVideoSize(input);
+            assertTrue(output.getWidth() <= input.getWidth());
+            assertTrue(output.getHeight() <= input.getHeight());
+        }
+    }
+
+    @Test
+    public void testGetSupportedVideoBitRate_disabled() {
+        DeviceEncoders.ENABLED = false;
+        DeviceEncoders deviceEncoders = create();
+        int input = 1000;
+        int output = deviceEncoders.getSupportedVideoBitRate(input);
+        assertEquals(input, output);
+    }
+
+    @Test
+    public void testGetSupportedVideoBitRate_enabled() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            // Ensure it's clamped: we can pass a negative value and check it's >= 0.
+            int input = -1000;
+            int output = deviceEncoders.getSupportedVideoBitRate(input);
+            assertNotEquals(input, output);
+            assertTrue(output >= 0);
+        }
+    }
+
+    @Test
+    public void testGetSupportedAudioBitRate_disabled() {
+        DeviceEncoders.ENABLED = false;
+        DeviceEncoders deviceEncoders = create();
+        int input = 1000;
+        int output = deviceEncoders.getSupportedAudioBitRate(input);
+        assertEquals(input, output);
+    }
+
+    @Test
+    public void testGetSupportedAudioBitRate_enabled() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            // Ensure it's clamped: we can pass a negative value and check it's >= 0.
+            int input = -1000;
+            int output = deviceEncoders.getSupportedAudioBitRate(input);
+            assertNotEquals(input, output);
+            assertTrue(output >= 0);
+        }
+    }
+
+    @Test
+    public void testGetSupportedFrameRate_disabled() {
+        DeviceEncoders.ENABLED = false;
+        DeviceEncoders deviceEncoders = create();
+        int input = 1000;
+        int output = deviceEncoders.getSupportedVideoFrameRate(GUARANTEED_SIZE, input);
+        assertEquals(input, output);
+    }
+
+    @Test
+    public void testGetSupportedFrameRate_enabled() {
+        DeviceEncoders deviceEncoders = create();
+        if (DeviceEncoders.ENABLED) {
+            // Ensure it's clamped: we can pass a negative value and check it's >= 0.
+            int input = -10;
+            Size inputSize = deviceEncoders.getSupportedVideoSize(GUARANTEED_SIZE);
+            int output = deviceEncoders.getSupportedVideoFrameRate(inputSize, input);
+            assertNotEquals(input, output);
+            assertTrue(output >= 0);
+        }
+    }
+}

+ 97 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/GridLinesLayoutTest.java

@@ -0,0 +1,97 @@
+package com.otaliastudios.cameraview.internal;
+
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.MediumTest;
+import androidx.test.rule.ActivityTestRule;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.TestActivity;
+import com.otaliastudios.cameraview.controls.Grid;
+import com.otaliastudios.cameraview.tools.Op;
+import com.otaliastudios.cameraview.tools.Retry;
+import com.otaliastudios.cameraview.tools.RetryRule;
+
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.anyInt;
+import static org.mockito.Mockito.mock;
+
+@RunWith(AndroidJUnit4.class)
+@MediumTest
+public class GridLinesLayoutTest extends BaseTest {
+
+    @Rule
+    public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
+
+    @Rule
+    public RetryRule retryRule = new RetryRule(3);
+
+    private GridLinesLayout layout;
+
+    @NonNull
+    private Op<Integer> getDrawOp() {
+        final Op<Integer> op = new Op<>();
+        layout.callback = mock(GridLinesLayout.DrawCallback.class);
+        doEndOp(op, 0).when(layout.callback).onDraw(anyInt());
+        return op;
+    }
+
+    @Before
+    public void setUp() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                TestActivity a = rule.getActivity();
+                layout = new GridLinesLayout(a);
+                layout.setGridMode(Grid.OFF);
+                Op<Integer> op = getDrawOp();
+                a.getContentView().addView(layout);
+                op.await(1000);
+            }
+        });
+    }
+
+    private int setGridAndWait(Grid value) {
+        layout.setGridMode(value);
+        Op<Integer> op = getDrawOp();
+        Integer result = op.await(1000);
+        assertNotNull(result);
+        return result;
+    }
+
+    @Retry
+    @Test
+    public void testOff() {
+        int linesDrawn = setGridAndWait(Grid.OFF);
+        assertEquals(0, linesDrawn);
+    }
+
+    @Retry
+    @Test
+    public void test3x3() {
+        int linesDrawn = setGridAndWait(Grid.DRAW_3X3);
+        assertEquals(2, linesDrawn);
+    }
+
+    @Retry
+    @Test
+    public void testPhi() {
+        int linesDrawn = setGridAndWait(Grid.DRAW_PHI);
+        assertEquals(2, linesDrawn);
+    }
+
+    @Retry
+    @Test
+    public void test4x4() {
+        int linesDrawn = setGridAndWait(Grid.DRAW_4X4);
+        assertEquals(3, linesDrawn);
+    }
+
+}

+ 112 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/OrientationHelperTest.java

@@ -0,0 +1,112 @@
+package com.otaliastudios.cameraview.internal;
+
+
+import android.view.OrientationEventListener;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class OrientationHelperTest extends BaseTest {
+
+    private OrientationHelper helper;
+    private OrientationHelper.Callback callback;
+
+    @Before
+    public void setUp() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                callback = mock(OrientationHelper.Callback.class);
+                helper = new OrientationHelper(getContext(), callback);
+            }
+        });
+    }
+
+    @After
+    public void tearDown() {
+        callback = null;
+        helper = null;
+    }
+
+    @Test
+    public void testEnable() {
+        // On some API levels, enable() needs to be run on the UI thread.
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                assertEquals(helper.getLastDisplayOffset(), -1);
+                assertEquals(helper.getLastDeviceOrientation(), -1);
+
+                helper.enable();
+                assertNotEquals(helper.getLastDisplayOffset(), -1); // Don't know about device orientation.
+
+                // Ensure nothing bad if called twice.
+                helper.enable();
+                assertNotEquals(helper.getLastDisplayOffset(), -1);
+
+                helper.disable();
+                assertEquals(helper.getLastDisplayOffset(), -1);
+                assertEquals(helper.getLastDeviceOrientation(), -1);
+            }
+        });
+    }
+
+    @Test
+    public void testRotation() {
+        // On some API levels, enable() needs to be run on the UI thread.
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                // Sometimes (on some APIs) the helper will trigger an update to 0
+                // right after enabling. But that's fine for us, times(1) will be OK either way.
+                helper.enable();
+                helper.mDeviceOrientationListener.onOrientationChanged(OrientationEventListener.ORIENTATION_UNKNOWN);
+                assertEquals(helper.getLastDeviceOrientation(), 0);
+                helper.mDeviceOrientationListener.onOrientationChanged(10);
+                assertEquals(helper.getLastDeviceOrientation(), 0);
+                helper.mDeviceOrientationListener.onOrientationChanged(-10);
+                assertEquals(helper.getLastDeviceOrientation(), 0);
+                helper.mDeviceOrientationListener.onOrientationChanged(44);
+                assertEquals(helper.getLastDeviceOrientation(), 0);
+                helper.mDeviceOrientationListener.onOrientationChanged(360);
+                assertEquals(helper.getLastDeviceOrientation(), 0);
+
+                // Callback called just once.
+                verify(callback, times(1)).onDeviceOrientationChanged(0);
+
+                helper.mDeviceOrientationListener.onOrientationChanged(90);
+                helper.mDeviceOrientationListener.onOrientationChanged(91);
+                assertEquals(helper.getLastDeviceOrientation(), 90);
+                verify(callback, times(1)).onDeviceOrientationChanged(90);
+
+                helper.mDeviceOrientationListener.onOrientationChanged(180);
+                assertEquals(helper.getLastDeviceOrientation(), 180);
+                verify(callback, times(1)).onDeviceOrientationChanged(180);
+
+                helper.mDeviceOrientationListener.onOrientationChanged(270);
+                assertEquals(helper.getLastDeviceOrientation(), 270);
+                verify(callback, times(1)).onDeviceOrientationChanged(270);
+
+                // It is still 270 after ORIENTATION_UNKNOWN
+                helper.mDeviceOrientationListener.onOrientationChanged(OrientationEventListener.ORIENTATION_UNKNOWN);
+                assertEquals(helper.getLastDeviceOrientation(), 270);
+                verify(callback, times(1)).onDeviceOrientationChanged(270);
+            }
+        });
+    }
+}

+ 52 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/RotationHelperTest.java

@@ -0,0 +1,52 @@
+package com.otaliastudios.cameraview.internal;
+
+
+import android.graphics.ImageFormat;
+import android.graphics.YuvImage;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class RotationHelperTest extends BaseTest {
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testInvalidRotation1() {
+        RotationHelper.rotate(new byte[10], new Size(1, 1), -1);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testInvalidRotation2() {
+        RotationHelper.rotate(new byte[10], new Size(1, 1), -90);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testInvalidRotation3() {
+        RotationHelper.rotate(new byte[10], new Size(1, 1), 360);
+    }
+
+    @Test
+    public void testRotate() {
+        // Just test that nothing happens.
+        Size inputSize = new Size(160, 90);
+        int inputSizeBits = inputSize.getWidth() * inputSize.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.NV21);
+        int inputSizeBytes = (int) Math.ceil(inputSizeBits / 8.0d);
+        byte[] input = new byte[inputSizeBytes];
+        byte[] output = RotationHelper.rotate(input, inputSize, 90);
+        assertEquals(input.length, output.length);
+
+        Size outputSize = inputSize.flip();
+        YuvImage image = new YuvImage(output, ImageFormat.NV21, outputSize.getWidth(), outputSize.getHeight(), null);
+        assertNotNull(image);
+    }
+}

+ 245 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/WorkerHandlerTest.java

@@ -0,0 +1,245 @@
+package com.otaliastudios.cameraview.internal;
+
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.tools.Op;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class WorkerHandlerTest extends BaseTest {
+
+    @Test
+    public void testGetFromCache() {
+        WorkerHandler first = WorkerHandler.get("first");
+        WorkerHandler second = WorkerHandler.get("first");
+        assertSame(first, second);
+    }
+
+    @Test
+    public void testGetAnother() {
+        WorkerHandler first = WorkerHandler.get("first");
+        WorkerHandler second = WorkerHandler.get("second");
+        assertNotSame(first, second);
+    }
+
+    @NonNull
+    private Runnable getRunnableForOp(final @NonNull Op<Boolean> op) {
+        return new Runnable() {
+            @Override
+            public void run() {
+                op.controller().end(true);
+            }
+        };
+    }
+
+    @NonNull
+    private Callable<Boolean> getCallableForOp(final @NonNull Op<Boolean> op) {
+        return new Callable<Boolean>() {
+            @Override
+            public Boolean call() {
+                op.controller().end(true);
+                return true;
+            }
+        };
+    }
+
+    @NonNull
+    private Callable<Void> getThrowCallable() {
+        return new Callable<Void>() {
+            @Override
+            public Void call() {
+                throw new RuntimeException("Fake error");
+            }
+        };
+    }
+
+    private void waitOp(@NonNull Op<Boolean> op) {
+        Boolean result = op.await(500);
+        assertNotNull(result);
+        assertTrue(result);
+    }
+
+    @Test
+    public void testFallbackExecute() {
+        final Op<Boolean> op = new Op<>();
+        WorkerHandler.execute(getRunnableForOp(op));
+        waitOp(op);
+    }
+
+    @Test
+    public void testPostRunnable() {
+        WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        handler.post(getRunnableForOp(op));
+        waitOp(op);
+    }
+
+    @Test
+    public void testPostCallable() {
+        WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        handler.post(getCallableForOp(op));
+        waitOp(op);
+    }
+
+    @Test
+    public void testPostCallable_throws() {
+        WorkerHandler handler = WorkerHandler.get("handler");
+        Task<Void> task = handler.post(getThrowCallable());
+        try { Tasks.await(task); } catch (ExecutionException | InterruptedException ignore) {}
+        assertTrue(task.isComplete());
+        assertFalse(task.isSuccessful());
+    }
+
+    @Test
+    public void testRunRunnable_background() {
+        WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        handler.run(getRunnableForOp(op));
+        waitOp(op);
+    }
+
+    @Test
+    public void testRunRunnable_sameThread() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op1 = new Op<>();
+        final Op<Boolean> op2 = new Op<>();
+        handler.post(new Runnable() {
+            @Override
+            public void run() {
+                handler.run(getRunnableForOp(op2));
+                assertTrue(op2.await(0)); // Do not wait.
+                op1.controller().end(true);
+            }
+        });
+        waitOp(op1);
+    }
+
+    @Test
+    public void testRunCallable_background() {
+        WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        handler.run(getCallableForOp(op));
+        waitOp(op);
+    }
+
+    @Test
+    public void testRunCallable_sameThread() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op1 = new Op<>();
+        final Op<Boolean> op2 = new Op<>();
+        handler.post(new Runnable() {
+            @Override
+            public void run() {
+                handler.run(getCallableForOp(op2));
+                assertTrue(op2.await(0)); // Do not wait.
+                op1.controller().end(true);
+            }
+        });
+        waitOp(op1);
+    }
+
+    @Test
+    public void testRunCallable_sameThread_throws() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        handler.post(new Runnable() {
+            @Override
+            public void run() {
+                Task<Void> task = handler.run(getThrowCallable());
+                assertTrue(task.isComplete()); // Already complete
+                assertFalse(task.isSuccessful());
+                op.controller().end(true);
+            }
+        });
+        waitOp(op);
+    }
+
+    @Test
+    public void testPostDelayed_tooEarly() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        handler.post(1000, getRunnableForOp(op));
+        assertNull(op.await(500));
+    }
+
+    @Test
+    public void testPostDelayed() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        handler.post(1000, getRunnableForOp(op));
+        assertNotNull(op.await(2000));
+    }
+
+    @Test
+    public void testRemove() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        final Op<Boolean> op = new Op<>();
+        Runnable runnable = getRunnableForOp(op);
+        handler.post(1000, runnable);
+        handler.remove(runnable);
+        assertNull(op.await(2000));
+    }
+
+    @Test
+    public void testGetters() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        assertNotNull(handler.getExecutor());
+        assertNotNull(handler.getHandler());
+        assertNotNull(handler.getLooper());
+        assertNotNull(handler.getThread());
+    }
+
+    @Test
+    public void testExecutor() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        Executor executor = handler.getExecutor();
+        final Op<Boolean> op = new Op<>();
+        executor.execute(getRunnableForOp(op));
+        waitOp(op);
+    }
+
+    @Test
+    public void testDestroy() {
+        final WorkerHandler handler = WorkerHandler.get("handler");
+        assertTrue(handler.getThread().isAlive());
+        handler.destroy();
+        WorkerHandler newHandler = WorkerHandler.get("handler");
+        assertNotSame(handler, newHandler);
+        assertTrue(newHandler.getThread().isAlive());
+        // Ensure old thread dies at some point.
+        try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
+        assertFalse(handler.getThread().isAlive());
+    }
+
+    @Test
+    public void testDestroyAll() {
+        final WorkerHandler handler1 = WorkerHandler.get("handler1");
+        final WorkerHandler handler2 = WorkerHandler.get("handler2");
+        WorkerHandler.destroyAll();
+        WorkerHandler newHandler1 = WorkerHandler.get("handler1");
+        WorkerHandler newHandler2 = WorkerHandler.get("handler2");
+        assertNotSame(handler1, newHandler1);
+        assertNotSame(handler2, newHandler2);
+    }
+}

+ 100 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/DefaultAutoFocusMarkerTest.java

@@ -0,0 +1,100 @@
+package com.otaliastudios.cameraview.markers;
+
+
+import android.graphics.PointF;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.FrameLayout;
+
+import androidx.test.annotation.UiThreadTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+
+public class DefaultAutoFocusMarkerTest extends BaseTest {
+
+    private DefaultAutoFocusMarker marker;
+
+    @Before
+    public void setUp() {
+        marker = new DefaultAutoFocusMarker();
+    }
+
+    @After
+    public void tearDown() {
+        marker = null;
+    }
+
+    @Test
+    public void testOnAttach() {
+        assertNull(marker.mContainer);
+        assertNull(marker.mFill);
+        ViewGroup container = new FrameLayout(getContext());
+        View result = marker.onAttach(getContext(), container);
+        assertNotNull(result);
+        assertNotNull(marker.mContainer);
+        assertNotNull(marker.mFill);
+    }
+
+    @UiThreadTest
+    @Test
+    public void testOnAutoFocusStart() {
+        View mockContainer = spy(new View(getContext()));
+        View mockFill = spy(new View(getContext()));
+        marker.mContainer = mockContainer;
+        marker.mFill = mockFill;
+        marker.onAutoFocusStart(AutoFocusTrigger.GESTURE, new PointF());
+        verify(mockContainer, atLeastOnce()).clearAnimation();
+        verify(mockFill, atLeastOnce()).clearAnimation();
+        verify(mockContainer, atLeastOnce()).animate();
+        verify(mockFill, atLeastOnce()).animate();
+    }
+
+    @UiThreadTest
+    @Test
+    public void testOnAutoFocusStart_fromMethod() {
+        View mockContainer = spy(new View(getContext()));
+        View mockFill = spy(new View(getContext()));
+        marker.mContainer = mockContainer;
+        marker.mFill = mockFill;
+        marker.onAutoFocusStart(AutoFocusTrigger.METHOD, new PointF());
+        verify(mockContainer, never()).clearAnimation();
+        verify(mockFill, never()).clearAnimation();
+        verify(mockContainer, never()).animate();
+        verify(mockFill, never()).animate();
+    }
+
+    @UiThreadTest
+    @Test
+    public void testOnAutoFocusEnd() {
+        View mockContainer = spy(new View(getContext()));
+        View mockFill = spy(new View(getContext()));
+        marker.mContainer = mockContainer;
+        marker.mFill = mockFill;
+        marker.onAutoFocusEnd(AutoFocusTrigger.GESTURE, true, new PointF());
+        verify(mockContainer, atLeastOnce()).animate();
+        verify(mockFill, atLeastOnce()).animate();
+    }
+
+    @UiThreadTest
+    @Test
+    public void testOnAutoFocusEnd_fromMethod() {
+        View mockContainer = spy(new View(getContext()));
+        View mockFill = spy(new View(getContext()));
+        marker.mContainer = mockContainer;
+        marker.mFill = mockFill;
+        marker.onAutoFocusEnd(AutoFocusTrigger.METHOD, true, new PointF());
+        verify(mockContainer, never()).animate();
+        verify(mockFill, never()).animate();
+    }
+}

+ 128 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/MarkerLayoutTest.java

@@ -0,0 +1,128 @@
+package com.otaliastudios.cameraview.markers;
+
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.PointF;
+import android.view.View;
+import android.view.ViewGroup;
+
+import androidx.annotation.NonNull;
+import androidx.test.rule.ActivityTestRule;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.TestActivity;
+import com.otaliastudios.cameraview.tools.SdkExclude;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import static org.mockito.ArgumentMatchers.anyFloat;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+/**
+ * Not clear why, but for some reason on API 28+ the UiThreadTests here crash for an internal NPE
+ * in FrameLayout.onMeasure.
+ */
+@SdkExclude(minSdkVersion = 28, maxSdkVersion = 29)
+@TargetApi(17)
+public class MarkerLayoutTest extends BaseTest {
+
+
+    @Rule
+    public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
+
+    private MarkerLayout markerLayout;
+    private AutoFocusMarker autoFocusMarker;
+
+    @Before
+    public void setUp() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                TestActivity a = rule.getActivity();
+                markerLayout = spy(new MarkerLayout(a));
+                a.inflate(markerLayout);
+                autoFocusMarker = spy(new DefaultAutoFocusMarker());
+            }
+        });
+    }
+
+    @Test
+    public void testOnMarker_callsOnAttach() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, autoFocusMarker);
+                Mockito.verify(autoFocusMarker, times(1)).onAttach(
+                        Mockito.any(Context.class),
+                        Mockito.eq(markerLayout));
+            }
+        });
+    }
+
+    @Test
+    public void testOnMarker_addsView() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                Assert.assertEquals(markerLayout.getChildCount(), 0);
+                markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, autoFocusMarker);
+                Assert.assertEquals(markerLayout.getChildCount(), 1);
+            }
+        });
+    }
+
+    @Test
+    public void testOnMarker_removesView() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, autoFocusMarker);
+                Assert.assertEquals(markerLayout.getChildCount(), 1);
+                markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, autoFocusMarker);
+                Assert.assertEquals(markerLayout.getChildCount(), 1);
+                markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, null);
+                Assert.assertEquals(markerLayout.getChildCount(), 0);
+
+                Mockito.verify(autoFocusMarker, times(2)).onAttach(
+                        Mockito.any(Context.class),
+                        Mockito.eq(markerLayout));
+            }
+        });
+    }
+
+    @Test
+    public void testOnEvent() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                final View mockView = spy(new View(getContext()));
+                // These fail, however it's not really needed.
+                // when(mockView.getWidth()).thenReturn(50);
+                // when(mockView.getHeight()).thenReturn(50);
+                AutoFocusMarker mockMarker = new AutoFocusMarker() {
+                    public void onAutoFocusStart(@NonNull AutoFocusTrigger trigger, @NonNull PointF point) { }
+                    public void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger, boolean successful, @NonNull PointF point) { }
+
+                    @Override
+                    public View onAttach(@NonNull Context context, @NonNull ViewGroup container) {
+                        return mockView;
+                    }
+                };
+                markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, mockMarker);
+                reset(mockView);
+                markerLayout.onEvent(MarkerLayout.TYPE_AUTOFOCUS, new PointF[]{new PointF(0, 0)});
+                verify(mockView, times(1)).clearAnimation();
+                verify(mockView, times(1)).setTranslationX(anyFloat());
+                verify(mockView, times(1)).setTranslationY(anyFloat());
+            }
+        });
+    }
+}

+ 66 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/MarkerParserTest.java

@@ -0,0 +1,66 @@
+package com.otaliastudios.cameraview.markers;
+
+
+import android.content.Context;
+import android.content.res.TypedArray;
+import android.graphics.PointF;
+import android.view.View;
+import android.view.ViewGroup;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.R;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static junit.framework.TestCase.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class MarkerParserTest extends BaseTest {
+
+    @Test
+    public void testNullConstructor() {
+        TypedArray array = mock(TypedArray.class);
+        when(array.hasValue(R.styleable.CameraView_cameraAutoFocusMarker)).thenReturn(false);
+        when(array.getString(R.styleable.CameraView_cameraAutoFocusMarker)).thenReturn(null);
+        MarkerParser parser = new MarkerParser(array);
+        assertNull(parser.getAutoFocusMarker());
+    }
+    @Test
+    public void testConstructor() {
+        TypedArray array = mock(TypedArray.class);
+        when(array.hasValue(R.styleable.CameraView_cameraAutoFocusMarker)).thenReturn(true);
+        when(array.getString(R.styleable.CameraView_cameraAutoFocusMarker)).thenReturn(Marker.class.getName());
+        MarkerParser parser = new MarkerParser(array);
+        assertNotNull(parser.getAutoFocusMarker());
+        assertTrue(parser.getAutoFocusMarker() instanceof Marker);
+    }
+
+    public static class Marker implements AutoFocusMarker {
+
+        public Marker() { }
+
+        @Nullable
+        @Override
+        public View onAttach(@NonNull Context context, @NonNull ViewGroup container) {
+            return null;
+        }
+
+        @Override
+        public void onAutoFocusStart(@NonNull AutoFocusTrigger trigger, @NonNull PointF point) { }
+
+        @Override
+        public void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger, boolean successful, @NonNull PointF point) { }
+    }
+}

+ 123 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/metering/MeteringRegionsTest.java

@@ -0,0 +1,123 @@
+package com.otaliastudios.cameraview.metering;
+
+
+import android.graphics.PointF;
+import android.graphics.RectF;
+
+import androidx.annotation.NonNull;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class MeteringRegionsTest extends BaseTest {
+
+    private final Size bounds = new Size(1000, 1000);
+
+    private void checkRegion(@NonNull MeteringRegion region, @NonNull PointF center, int weight) {
+        assertEquals(center.x, region.mRegion.centerX(), 0.01F);
+        assertEquals(center.y, region.mRegion.centerY(), 0.01F);
+        assertEquals(weight, region.mWeight);
+    }
+
+    @Test
+    public void testFromPoint() {
+        PointF center = new PointF(500, 500);
+        MeteringRegions regions = MeteringRegions.fromPoint(bounds, center);
+        assertEquals(2, regions.mRegions.size());
+        MeteringRegion first = regions.mRegions.get(0);
+        MeteringRegion second = regions.mRegions.get(1);
+        checkRegion(first, center, MeteringRegion.MAX_WEIGHT);
+        checkRegion(second, center,
+                Math.round(MeteringRegions.BLUR_FACTOR_WEIGHT * MeteringRegion.MAX_WEIGHT));
+    }
+
+    @Test
+    public void testFromArea() {
+        RectF area = new RectF(400, 400, 600, 600);
+        MeteringRegions regions = MeteringRegions.fromArea(bounds, area);
+        assertEquals(1, regions.mRegions.size());
+        MeteringRegion region = regions.mRegions.get(0);
+        checkRegion(region, new PointF(area.centerX(), area.centerY()), MeteringRegion.MAX_WEIGHT);
+    }
+
+    @Test
+    public void testFromArea_withBlur() {
+        RectF area = new RectF(400, 400, 600, 600);
+        MeteringRegions regions = MeteringRegions.fromArea(bounds, area,
+                MeteringRegion.MAX_WEIGHT, true);
+        assertEquals(2, regions.mRegions.size());
+        MeteringRegion first = regions.mRegions.get(0);
+        MeteringRegion second = regions.mRegions.get(1);
+        PointF center = new PointF(area.centerX(), area.centerY());
+        checkRegion(first, center, MeteringRegion.MAX_WEIGHT);
+        checkRegion(second, center,
+                Math.round(MeteringRegions.BLUR_FACTOR_WEIGHT * MeteringRegion.MAX_WEIGHT));
+    }
+
+    @Test
+    public void testTransform() {
+        MeteringTransform transform = mock(MeteringTransform.class);
+        when(transform.transformMeteringPoint(any(PointF.class))).then(new Answer<PointF>() {
+            @Override
+            public PointF answer(InvocationOnMock invocation) {
+                PointF in = invocation.getArgument(0);
+                // This will swap x and y coordinates
+                //noinspection SuspiciousNameCombination
+                return new PointF(in.y, in.x);
+            }
+        });
+        RectF area = new RectF(0, 0, 100, 500); // tall area
+        RectF expected = new RectF(0, 0, 500, 100); // wide area
+        MeteringRegions regions = MeteringRegions.fromArea(bounds, area);
+        MeteringRegions transformed = regions.transform(transform);
+        verify(transform, times(4)).transformMeteringPoint(any(PointF.class));
+        assertEquals(1, transformed.mRegions.size());
+        assertEquals(expected, transformed.mRegions.get(0).mRegion);
+    }
+
+
+    @Test
+    public void testGet() {
+        MeteringTransform<Integer> transform = new MeteringTransform<Integer>() {
+            @NonNull
+            @Override
+            public PointF transformMeteringPoint(@NonNull PointF point) {
+                return point;
+            }
+
+            @NonNull
+            @Override
+            public Integer transformMeteringRegion(@NonNull RectF region, int weight) {
+                return weight;
+            }
+        };
+        MeteringRegions regions = MeteringRegions.fromArea(bounds,
+                new RectF(400, 400, 600, 600),
+                900,
+                true);
+        assertEquals(2, regions.mRegions.size());
+        List<Integer> result = regions.get(1, transform);
+        assertEquals(1, result.size());
+        assertEquals(900, (int) result.get(0));
+    }
+
+}

+ 76 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java

@@ -0,0 +1,76 @@
+package com.otaliastudios.cameraview.overlay;
+
+
+import android.graphics.Canvas;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseEglTest;
+import com.otaliastudios.cameraview.internal.GlTextureDrawer;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.hamcrest.BaseMatcher;
+import org.hamcrest.Description;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class OverlayDrawerTest extends BaseEglTest {
+
+    @Test
+    public void testDraw() {
+        Overlay overlay = mock(Overlay.class);
+        OverlayDrawer drawer = new OverlayDrawer(overlay, new Size(WIDTH, HEIGHT));
+        drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
+        verify(overlay, times(1)).drawOn(
+                eq(Overlay.Target.PICTURE_SNAPSHOT),
+                any(Canvas.class));
+    }
+
+    @Test
+    public void testGetTransform() {
+        // We'll check that the transform is not all zeros, which is highly unlikely
+        // (the default transform should be the identity matrix)
+        OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
+        drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
+        assertThat(drawer.getTransform(), new BaseMatcher<float[]>() {
+            public void describeTo(Description description) { }
+            public boolean matches(Object item) {
+                float[] array = (float[]) item;
+                for (float value : array) {
+                    if (value != 0.0F) return true;
+                }
+                return false;
+            }
+        });
+    }
+
+    @Test
+    public void testRender() {
+        OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
+        drawer.mTextureDrawer = spy(drawer.mTextureDrawer);
+
+        drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
+        drawer.render(0L);
+        verify(drawer.mTextureDrawer, times(1)).draw(0L);
+    }
+
+    @Test
+    public void testRelease() {
+        OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
+        GlTextureDrawer textureDrawer = spy(drawer.mTextureDrawer);
+        drawer.mTextureDrawer = textureDrawer;
+        drawer.release();
+        verify(textureDrawer, times(1)).release();
+    }
+}

+ 174 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayLayoutTest.java

@@ -0,0 +1,174 @@
+package com.otaliastudios.cameraview.overlay;
+
+
+import android.content.res.XmlResourceParser;
+import android.graphics.Canvas;
+import android.util.AttributeSet;
+import android.util.Xml;
+import android.view.View;
+import android.view.ViewGroup;
+
+import androidx.annotation.NonNull;
+import androidx.test.annotation.UiThreadTest;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.anyFloat;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class OverlayLayoutTest extends BaseTest {
+
+    private OverlayLayout overlayLayout;
+
+    @Before
+    public void setUp() {
+        overlayLayout = spy(new OverlayLayout(getContext()));
+    }
+
+    @After
+    public void tearDown() {
+        overlayLayout = null;
+    }
+
+    @Test
+    public void testIsOverlay_LayoutParams() {
+        ViewGroup.LayoutParams params;
+
+        params = new ViewGroup.LayoutParams(10, 10);
+        assertFalse(overlayLayout.isOverlay(params));
+
+        params = new OverlayLayout.LayoutParams(10, 10);
+        assertTrue(overlayLayout.isOverlay(params));
+    }
+
+    @Test
+    public void testIsOverlay_attributeSet() throws Exception {
+        int layout1 = com.otaliastudios.cameraview.test.R.layout.overlay;
+        int layout2 = com.otaliastudios.cameraview.test.R.layout.not_overlay;
+
+        AttributeSet set1 = getAttributeSet(layout1);
+        assertTrue(overlayLayout.isOverlay(set1));
+
+        AttributeSet set2 = getAttributeSet(layout2);
+        assertFalse(overlayLayout.isOverlay(set2));
+    }
+
+    @NonNull
+    private AttributeSet getAttributeSet(int layout) throws Exception {
+        // Get the attribute set in the correct state: use a parser and move to START_TAG
+        XmlResourceParser parser = getContext().getResources().getLayout(layout);
+        //noinspection StatementWithEmptyBody
+        while (parser.next() != XmlResourceParser.START_TAG) {}
+        return Xml.asAttributeSet(parser);
+    }
+
+    @Test
+    public void testLayoutParams_drawsOn() {
+        OverlayLayout.LayoutParams params = new OverlayLayout.LayoutParams(10, 10);
+
+        assertFalse(params.drawsOn(Overlay.Target.PREVIEW));
+        assertFalse(params.drawsOn(Overlay.Target.PICTURE_SNAPSHOT));
+        assertFalse(params.drawsOn(Overlay.Target.VIDEO_SNAPSHOT));
+
+        params.drawOnPreview = true;
+        assertTrue(params.drawsOn(Overlay.Target.PREVIEW));
+        params.drawOnPictureSnapshot = true;
+        assertTrue(params.drawsOn(Overlay.Target.PICTURE_SNAPSHOT));
+        params.drawOnVideoSnapshot = true;
+        assertTrue(params.drawsOn(Overlay.Target.VIDEO_SNAPSHOT));
+    }
+
+    @Test
+    public void testLayoutParams_toString() {
+        OverlayLayout.LayoutParams params = new OverlayLayout.LayoutParams(10, 10);
+        String string = params.toString();
+        assertTrue(string.contains("drawOnPreview"));
+        assertTrue(string.contains("drawOnPictureSnapshot"));
+        assertTrue(string.contains("drawOnVideoSnapshot"));
+    }
+
+    @Test
+    public void testDrawChild() {
+        Canvas canvas = new Canvas();
+        OverlayLayout.LayoutParams params = new OverlayLayout.LayoutParams(10, 10);
+        View child = new View(getContext());
+        child.setLayoutParams(params);
+        when(overlayLayout.doDrawChild(canvas, child, 0)).thenReturn(true);
+
+        overlayLayout.currentTarget = Overlay.Target.PREVIEW;
+        assertFalse(overlayLayout.drawChild(canvas, child, 0));
+        params.drawOnPreview = true;
+        assertTrue(overlayLayout.drawChild(canvas, child, 0));
+
+        overlayLayout.currentTarget = Overlay.Target.PICTURE_SNAPSHOT;
+        assertFalse(overlayLayout.drawChild(canvas, child, 0));
+        params.drawOnPictureSnapshot = true;
+        assertTrue(overlayLayout.drawChild(canvas, child, 0));
+
+        overlayLayout.currentTarget = Overlay.Target.VIDEO_SNAPSHOT;
+        assertFalse(overlayLayout.drawChild(canvas, child, 0));
+        params.drawOnVideoSnapshot = true;
+        assertTrue(overlayLayout.drawChild(canvas, child, 0));
+    }
+
+    @UiThreadTest
+    @Test
+    public void testDraw() {
+        Canvas canvas = new Canvas();
+        when(overlayLayout.drawsOn(Overlay.Target.PREVIEW)).thenReturn(false);
+        overlayLayout.draw(canvas);
+        verify(overlayLayout, never()).drawOn(Overlay.Target.PREVIEW, canvas);
+
+        when(overlayLayout.drawsOn(Overlay.Target.PREVIEW)).thenReturn(true);
+        overlayLayout.draw(canvas);
+        verify(overlayLayout, times(1)).drawOn(Overlay.Target.PREVIEW, canvas);
+    }
+
+    @UiThreadTest
+    @Test
+    public void testDrawOn() {
+        Canvas canvas = spy(new Canvas());
+        View child = new View(getContext());
+        OverlayLayout.LayoutParams params = new OverlayLayout.LayoutParams(10, 10);
+        params.drawOnPreview = true;
+        params.drawOnPictureSnapshot = true;
+        params.drawOnVideoSnapshot = true;
+        overlayLayout.addView(child, params);
+
+        overlayLayout.drawOn(Overlay.Target.PREVIEW, canvas);
+        verify(canvas, never()).scale(anyFloat(), anyFloat());
+        verify(overlayLayout, times(1)).doDrawChild(eq(canvas), eq(child), anyLong());
+        reset(canvas);
+        reset(overlayLayout);
+
+        overlayLayout.drawOn(Overlay.Target.PICTURE_SNAPSHOT, canvas);
+        verify(canvas, times(1)).scale(anyFloat(), anyFloat());
+        verify(overlayLayout, times(1)).doDrawChild(eq(canvas), eq(child), anyLong());
+        reset(canvas);
+        reset(overlayLayout);
+
+        overlayLayout.drawOn(Overlay.Target.VIDEO_SNAPSHOT, canvas);
+        verify(canvas, times(1)).scale(anyFloat(), anyFloat());
+        verify(overlayLayout, times(1)).doDrawChild(eq(canvas), eq(child), anyLong());
+        reset(canvas);
+        reset(overlayLayout);
+    }
+}

+ 43 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/picture/PictureRecorderTest.java

@@ -0,0 +1,43 @@
+package com.otaliastudios.cameraview.picture;
+
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.PictureResult;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+
+import java.lang.reflect.Constructor;
+
+import static org.junit.Assert.assertNull;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class PictureRecorderTest extends BaseTest {
+
+    @Test
+    public void testRecorder() throws Exception {
+        PictureResult.Stub result = createStub();
+        PictureRecorder.PictureResultListener listener = Mockito.mock(PictureRecorder.PictureResultListener.class);
+        PictureRecorder recorder = new PictureRecorder(result, listener) {
+            public void take() {
+                dispatchResult();
+            }
+        };
+        recorder.take();
+        Mockito.verify(listener, Mockito.times(1)).onPictureResult(result, null);
+        assertNull(recorder.mListener);
+        assertNull(recorder.mResult);
+    }
+
+    private PictureResult.Stub createStub() throws Exception {
+        Constructor<PictureResult.Stub> constructor = PictureResult.Stub.class.getDeclaredConstructor();
+        constructor.setAccessible(true);
+        return constructor.newInstance();
+    }
+}

+ 198 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/CameraPreviewTest.java

@@ -0,0 +1,198 @@
+package com.otaliastudios.cameraview.preview;
+
+
+import android.content.Context;
+import android.view.ViewGroup;
+
+import androidx.test.rule.ActivityTestRule;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.TestActivity;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+import com.otaliastudios.cameraview.tools.Op;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+public abstract class CameraPreviewTest<T extends CameraPreview> extends BaseTest {
+
+    private final static long DELAY = 4000;
+
+    protected abstract T createPreview(Context context, ViewGroup parent);
+
+    @Rule
+    public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
+
+    protected T preview;
+    @SuppressWarnings("WeakerAccess")
+    protected Size surfaceSize;
+    private CameraPreview.SurfaceCallback callback;
+
+    private Op<Boolean> available;
+    private Op<Boolean> destroyed;
+
+    @Before
+    public void setUp() {
+        available = new Op<>();
+        destroyed = new Op<>();
+
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                TestActivity a = rule.getActivity();
+                surfaceSize = a.getContentSize();
+                callback = mock(CameraPreview.SurfaceCallback.class);
+
+                doAnswer(new Answer() {
+                    @Override
+                    public Object answer(InvocationOnMock invocation) {
+                        if (available != null) available.controller().end(true);
+                        return null;
+                    }
+                }).when(callback).onSurfaceAvailable();
+
+                doAnswer(new Answer() {
+                    @Override
+                    public Object answer(InvocationOnMock invocation) {
+                        if (destroyed != null) destroyed.controller().end(true);
+                        return null;
+                    }
+                }).when(callback).onSurfaceDestroyed();
+
+                preview = createPreview(a, a.getContentView());
+                preview.setSurfaceCallback(callback);
+            }
+        });
+    }
+
+    // Wait for surface to be available.
+    protected void ensureAvailable() {
+        assertNotNull(available.await(DELAY));
+    }
+
+    // Trigger a destroy.
+    protected void ensureDestroyed() {
+        uiSync(new Runnable() {
+            @Override
+            public void run() {
+                rule.getActivity().getContentView().removeView(preview.getRootView());
+            }
+        });
+        assertNotNull(destroyed.await(DELAY));
+    }
+
+    @After
+    public void tearDown() {
+        preview = null;
+        callback = null;
+        surfaceSize = null;
+        available = null;
+        destroyed = null;
+    }
+
+    @Test
+    public void testDefaults() {
+        ensureAvailable();
+        assertTrue(preview.hasSurface());
+        assertNotNull(preview.getView());
+        assertNotNull(preview.getRootView());
+        assertNotNull(preview.getOutputClass());
+    }
+
+    @Test
+    public void testDesiredSize() {
+        preview.setStreamSize(160, 90);
+        assertEquals(160, preview.getStreamSize().getWidth());
+        assertEquals(90, preview.getStreamSize().getHeight());
+    }
+
+    @Test
+    public void testSurfaceAvailable() {
+        ensureAvailable();
+        verify(callback, times(1)).onSurfaceAvailable();
+        assertEquals(surfaceSize.getWidth(), preview.getSurfaceSize().getWidth());
+        assertEquals(surfaceSize.getHeight(), preview.getSurfaceSize().getHeight());
+    }
+
+    @Test
+    public void testSurfaceDestroyed() {
+        ensureAvailable();
+        ensureDestroyed();
+        // This might be called twice in Texture because it overrides ensureDestroyed method
+        verify(callback, atLeastOnce()).onSurfaceDestroyed();
+        assertEquals(0, preview.getSurfaceSize().getWidth());
+        assertEquals(0, preview.getSurfaceSize().getHeight());
+    }
+
+    @Test
+    public void testCropCenter() {
+        ensureAvailable();
+
+        // This is given by the activity, it's the fixed size.
+        float view = getViewAspectRatio();
+
+        // If we apply a desired size with same aspect ratio, there should be no crop.
+        setDesiredAspectRatio(view);
+        assertFalse(preview.isCropping());
+
+        // If we apply a different aspect ratio, there should be cropping.
+        float desired = view * 1.2f;
+        if (preview.supportsCropping()) {
+            setDesiredAspectRatio(desired);
+            assertTrue(preview.isCropping());
+        }
+
+        // Since desired is 'desired', let's fake a new view size that is consistent with it.
+        // Ensure crop is not happening anymore.
+        preview.mCropCallback = mock(CameraPreview.CropCallback.class);
+        Op<Void> op = new Op<>();
+        doEndOp(op, null).when(preview.mCropCallback).onCrop();
+        preview.dispatchOnSurfaceSizeChanged((int) (50f * desired), 50);
+
+        op.await(); // Wait...
+        assertEquals(desired, getViewAspectRatioWithScale(), 0.01f);
+        assertFalse(preview.isCropping());
+    }
+
+    private void setDesiredAspectRatio(float desiredAspectRatio) {
+        preview.mCropCallback = mock(CameraPreview.CropCallback.class);
+        Op<Void> op = new Op<>();
+        doEndOp(op, null).when(preview.mCropCallback).onCrop();
+        preview.setStreamSize((int) (10f * desiredAspectRatio), 10);
+
+        op.await(); // Wait...
+        assertEquals(desiredAspectRatio, getViewAspectRatioWithScale(), 0.01f);
+
+    }
+
+    private float getViewAspectRatio() {
+        Size size = preview.getSurfaceSize();
+        return AspectRatio.of(size.getWidth(), size.getHeight()).toFloat();
+    }
+
+    private float getViewAspectRatioWithScale() {
+        Size size = preview.getSurfaceSize();
+        int newWidth = (int) (((float) size.getWidth()) * getCropScaleX());
+        int newHeight = (int) (((float) size.getHeight()) * getCropScaleY());
+        return AspectRatio.of(newWidth, newHeight).toFloat();
+    }
+
+    abstract protected float getCropScaleX();
+
+    abstract protected float getCropScaleY();
+}

+ 43 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/GlCameraPreviewTest.java

@@ -0,0 +1,43 @@
+package com.otaliastudios.cameraview.preview;
+
+
+import android.content.Context;
+import android.view.ViewGroup;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.filter.Filter;
+import com.otaliastudios.cameraview.filter.Filters;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class GlCameraPreviewTest extends CameraPreviewTest<GlCameraPreview> {
+
+    @Override
+    protected GlCameraPreview createPreview(Context context, ViewGroup parent) {
+        return new GlCameraPreview(context, parent);
+    }
+
+    @Override
+    protected float getCropScaleY() {
+        return 1F / preview.mCropScaleY;
+    }
+
+    @Override
+    protected float getCropScaleX() {
+        return 1F / preview.mCropScaleX;
+    }
+
+    @Test
+    public void testSetFilter() {
+        Filter filter = Filters.BLACK_AND_WHITE.newInstance();
+        preview.setFilter(filter);
+        assertEquals(filter, preview.getCurrentFilter());
+    }
+}

+ 62 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/MockCameraPreview.java

@@ -0,0 +1,62 @@
+package com.otaliastudios.cameraview.preview;
+
+
+import android.content.Context;
+import android.view.View;
+import android.view.ViewGroup;
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.filter.Filter;
+
+public class MockCameraPreview extends CameraPreview<View, Void> implements FilterCameraPreview {
+
+    public MockCameraPreview(Context context, ViewGroup parent) {
+        super(context, parent);
+    }
+
+    private View rootView;
+    private Filter filter;
+
+    @Override
+    public boolean supportsCropping() {
+        return true;
+    }
+
+    @NonNull
+    @Override
+    protected View onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
+        rootView = new View(context);
+        return rootView;
+    }
+
+    @NonNull
+    @Override
+    public Class<Void> getOutputClass() {
+        return null;
+    }
+
+    @NonNull
+    @Override
+    public Void getOutput() {
+        return null;
+    }
+
+
+    @NonNull
+    @Override
+    public View getRootView() {
+        return rootView;
+    }
+
+    @Override
+    public void setFilter(@NonNull Filter filter) {
+        this.filter = filter;
+    }
+
+    @NonNull
+    @Override
+    public Filter getCurrentFilter() {
+        return filter;
+    }
+}

+ 30 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreviewTest.java

@@ -0,0 +1,30 @@
+package com.otaliastudios.cameraview.preview;
+
+
+import android.content.Context;
+import android.view.ViewGroup;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import org.junit.runner.RunWith;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class SurfaceCameraPreviewTest extends CameraPreviewTest<SurfaceCameraPreview> {
+
+    @Override
+    protected SurfaceCameraPreview createPreview(Context context, ViewGroup parent) {
+        return new SurfaceCameraPreview(context, parent);
+    }
+
+    @Override
+    protected float getCropScaleX() {
+        return 1F;
+    }
+
+    @Override
+    protected float getCropScaleY() {
+        return 1F;
+    }
+}

+ 54 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/TextureCameraPreviewTest.java

@@ -0,0 +1,54 @@
+package com.otaliastudios.cameraview.preview;
+
+
+import android.content.Context;
+import android.view.ViewGroup;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import org.junit.runner.RunWith;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class TextureCameraPreviewTest extends CameraPreviewTest<TextureCameraPreview> {
+
+    @Override
+    protected TextureCameraPreview createPreview(Context context, ViewGroup parent) {
+        return new TextureCameraPreview(context, parent);
+    }
+
+    @Override
+    protected void ensureAvailable() {
+        if (isHardwareAccelerated()) {
+            super.ensureAvailable();
+        } else {
+            preview.dispatchOnSurfaceAvailable(
+                    surfaceSize.getWidth(),
+                    surfaceSize.getHeight());
+        }
+    }
+
+    @Override
+    protected void ensureDestroyed() {
+        super.ensureDestroyed();
+        if (!isHardwareAccelerated()) {
+            // Ensure it is called.
+            preview.dispatchOnSurfaceDestroyed();
+        }
+    }
+
+    private boolean isHardwareAccelerated() {
+        return preview.getView().isHardwareAccelerated();
+    }
+
+    @Override
+    protected float getCropScaleX() {
+        return preview.getView().getScaleX();
+    }
+
+    @Override
+    protected float getCropScaleY() {
+        return preview.getView().getScaleY();
+    }
+}

+ 211 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/size/SizeSelectorParserTest.java

@@ -0,0 +1,211 @@
+package com.otaliastudios.cameraview.size;
+
+
+import android.content.res.TypedArray;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.StyleableRes;
+import androidx.arch.core.util.Function;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.R;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.anyBoolean;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class SizeSelectorParserTest extends BaseTest {
+
+    private MockTypedArray input;
+    private List<Size> sizes = Arrays.asList(
+            new Size(100, 200),
+            new Size(150, 300),
+            new Size(600, 900),
+            new Size(600, 600),
+            new Size(1600, 900),
+            new Size(30, 40),
+            new Size(40, 30),
+            new Size(2000, 4000)
+    );
+
+    @Before
+    public void setUp() {
+        input = new MockTypedArray();
+    }
+
+    @After
+    public void tearDown() {
+        input = null;
+    }
+
+    private void doAssert(@NonNull Function<List<Size>, Void> assertions) {
+        SizeSelectorParser parser = new SizeSelectorParser(input.array);
+        assertions.apply(parser.getPictureSizeSelector().select(sizes));
+        assertions.apply(parser.getVideoSizeSelector().select(sizes));
+    }
+
+    @Test
+    public void testWidth() {
+        input.setMinWidth(1500);
+        input.setMaxWidth(1700);
+        doAssert(new Function<List<Size>, Void>() {
+            @Override
+            public Void apply(List<Size> input) {
+                assertEquals(1, input.size());
+                assertEquals(new Size(1600, 900), input.get(0));
+                return null;
+            }
+        });
+    }
+
+    @Test
+    public void testHeight() {
+        input.setMinHeight(25);
+        input.setMaxHeight(35);
+        doAssert(new Function<List<Size>, Void>() {
+            @Override
+            public Void apply(List<Size> input) {
+                assertEquals(1, input.size());
+                assertEquals(new Size(40, 30), input.get(0));
+                return null;
+            }
+        });
+    }
+
+    @Test
+    public void testArea() {
+        input.setMinArea(30 * 30);
+        input.setMaxArea(40 * 40);
+        doAssert(new Function<List<Size>, Void>() {
+            @Override
+            public Void apply(List<Size> input) {
+                assertEquals(2, input.size());
+                assertTrue(input.contains(new Size(40, 30)));
+                assertTrue(input.contains(new Size(30, 40)));
+                return null;
+            }
+        });
+    }
+
+    @Test
+    public void testSmallest() {
+        input.setSmallest(true);
+        doAssert(new Function<List<Size>, Void>() {
+            @Override
+            public Void apply(List<Size> input) {
+                assertEquals(sizes.size(), input.size());
+                Size first = input.get(0);
+                assertEquals(30 * 40, first.getWidth() * first.getHeight());
+                return null;
+            }
+        });
+    }
+
+    @Test
+    public void testBiggest() {
+        input.setBiggest(true);
+        doAssert(new Function<List<Size>, Void>() {
+            @Override
+            public Void apply(List<Size> input) {
+                assertEquals(sizes.size(), input.size());
+                assertEquals(new Size(2000, 4000), input.get(0));
+                return null;
+            }
+        });
+    }
+
+    @Test
+    public void testAspectRatio() {
+        input.setAspectRatio("16:9");
+        doAssert(new Function<List<Size>, Void>() {
+            @Override
+            public Void apply(List<Size> input) {
+                assertEquals(1, input.size());
+                assertEquals(new Size(1600, 900), input.get(0));
+                return null;
+            }
+        });
+    }
+
+    @SuppressWarnings("SameParameterValue")
+    private class MockTypedArray {
+        private TypedArray array = mock(TypedArray.class);
+
+        private void setIntValue(@StyleableRes int index, int value) {
+            when(array.hasValue(index)).thenReturn(true);
+            when(array.getInteger(eq(index), anyInt())).thenReturn(value);
+        }
+
+        private void setBooleanValue(@StyleableRes int index, boolean value) {
+            when(array.hasValue(index)).thenReturn(true);
+            when(array.getBoolean(eq(index), anyBoolean())).thenReturn(value);
+        }
+
+        private void setStringValue(@StyleableRes int index, @NonNull String value) {
+            when(array.hasValue(index)).thenReturn(true);
+            when(array.getString(index)).thenReturn(value);
+        }
+
+        private void setMinWidth(int value) {
+            setIntValue(R.styleable.CameraView_cameraPictureSizeMinWidth, value);
+            setIntValue(R.styleable.CameraView_cameraVideoSizeMinWidth, value);
+        }
+
+        private void setMaxWidth(int value) {
+            setIntValue(R.styleable.CameraView_cameraPictureSizeMaxWidth, value);
+            setIntValue(R.styleable.CameraView_cameraVideoSizeMaxWidth, value);
+        }
+
+        private void setMinHeight(int value) {
+            setIntValue(R.styleable.CameraView_cameraPictureSizeMinHeight, value);
+            setIntValue(R.styleable.CameraView_cameraVideoSizeMinHeight, value);
+        }
+
+        private void setMaxHeight(int value) {
+            setIntValue(R.styleable.CameraView_cameraPictureSizeMaxHeight, value);
+            setIntValue(R.styleable.CameraView_cameraVideoSizeMaxHeight, value);
+        }
+
+        private void setMinArea(int value) {
+            setIntValue(R.styleable.CameraView_cameraPictureSizeMinArea, value);
+            setIntValue(R.styleable.CameraView_cameraVideoSizeMinArea, value);
+        }
+
+        private void setMaxArea(int value) {
+            setIntValue(R.styleable.CameraView_cameraPictureSizeMaxArea, value);
+            setIntValue(R.styleable.CameraView_cameraVideoSizeMaxArea, value);
+        }
+
+        private void setSmallest(boolean value) {
+            setBooleanValue(R.styleable.CameraView_cameraPictureSizeSmallest, value);
+            setBooleanValue(R.styleable.CameraView_cameraVideoSizeSmallest, value);
+        }
+
+        private void setBiggest(boolean value) {
+            setBooleanValue(R.styleable.CameraView_cameraPictureSizeBiggest, value);
+            setBooleanValue(R.styleable.CameraView_cameraVideoSizeBiggest, value);
+        }
+
+        private void setAspectRatio(@NonNull String value) {
+            setStringValue(R.styleable.CameraView_cameraPictureSizeAspectRatio, value);
+            setStringValue(R.styleable.CameraView_cameraVideoSizeAspectRatio, value);
+        }
+    }
+}

+ 12 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Emulator.java

@@ -0,0 +1,12 @@
+package com.otaliastudios.cameraview.tools;
+
+import android.os.Build;
+
+public class Emulator {
+    public static boolean isEmulator() {
+        // From Android's RequiresDeviceFilter
+        return Build.HARDWARE.equals("goldfish")
+                || Build.HARDWARE.equals("ranchu")
+                || Build.HARDWARE.equals("gce_x86");
+    }
+}

+ 148 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Op.java

@@ -0,0 +1,148 @@
+package com.otaliastudios.cameraview.tools;
+
+import androidx.annotation.NonNull;
+
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.android.gms.tasks.Task;
+
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.mockito.stubbing.Stubber;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A naive implementation of {@link java.util.concurrent.CountDownLatch}
+ * to help in testing.
+ */
+public class Op<T> {
+
+    public class Controller {
+        private int mToBeIgnored;
+
+        private Controller() { }
+
+        /** Op owner method: notifies the action started. */
+        public void start() {
+            if (!isListening()) mToBeIgnored++;
+        }
+
+        /** Op owner method: notifies the action ended. */
+        public void end(T result) {
+            if (mToBeIgnored > 0) {
+                mToBeIgnored--;
+                return;
+            }
+
+            if (isListening()) { // Should be always true.
+                mResult = result;
+                mLatch.countDown();
+            }
+        }
+
+        public void from(@NonNull Task<T> task) {
+            start();
+            task.addOnSuccessListener(new OnSuccessListener<T>() {
+                @Override
+                public void onSuccess(T result) {
+                    end(result);
+                }
+            });
+        }
+
+        @NonNull
+        public Stubber from(final int invocationArgument) {
+            return Mockito.doAnswer(new Answer() {
+                @Override
+                public Object answer(InvocationOnMock invocation) {
+                    //noinspection unchecked
+                    T o = (T) invocation.getArguments()[invocationArgument];
+                    start();
+                    end(o);
+                    return null;
+                }
+            });
+        }
+    }
+
+    private CountDownLatch mLatch;
+    private Controller mController = new Controller();
+    private T mResult;
+
+    /**
+     * Listeners should:
+     * - call {@link #listen()} to notify they are interested in the next action
+     * - call {@link #await()} to know when the action is performed.
+     *
+     * Op owners should:
+     * - call {@link Controller#start()} when task started
+     * - call {@link Controller#end(Object)} when task ends
+     */
+    public Op() {
+        this(true);
+    }
+
+    public Op(boolean startListening) {
+        if (startListening) listen();
+    }
+
+    public Op(@NonNull Task<T> task) {
+        listen();
+        controller().from(task);
+    }
+
+    private boolean isListening() {
+        return mLatch != null;
+    }
+
+    /**
+     * Listener method: notifies we are interested in the next action.
+     */
+    public void listen() {
+        if (isListening()) throw new RuntimeException("Should not happen.");
+        mResult = null;
+        mLatch = new CountDownLatch(1);
+    }
+
+    /**
+     * Listener method: waits for next task action to end.
+     * @param millis milliseconds
+     * @return the action result
+     */
+    public T await(long millis) {
+        return await(millis, TimeUnit.MILLISECONDS);
+    }
+
+    /**
+     * Listener method: waits 1 minute for next task action to end.
+     * @return the action result
+     */
+    public T await() {
+        return await(1, TimeUnit.MINUTES);
+    }
+
+    /**
+     * Listener method: waits for next task action to end.
+     * @param time time
+     * @param unit the time unit
+     * @return the action result
+     */
+    private T await(long time, @NonNull TimeUnit unit) {
+        try {
+            mLatch.await(time, unit);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        T result = mResult;
+        mResult = null;
+        mLatch = null;
+        return result;
+    }
+
+    @NonNull
+    public Controller controller() {
+        return mController;
+    }
+}

+ 9 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Retry.java

@@ -0,0 +1,9 @@
+package com.otaliastudios.cameraview.tools;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+public @interface Retry {
+    boolean emulatorOnly() default false;
+}

+ 50 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/RetryRule.java

@@ -0,0 +1,50 @@
+package com.otaliastudios.cameraview.tools;
+
+import com.otaliastudios.cameraview.CameraLogger;
+
+import org.junit.rules.TestRule;
+import org.junit.runner.Description;
+import org.junit.runners.model.Statement;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class RetryRule implements TestRule {
+
+    private final static String TAG = RetryRule.class.getSimpleName();
+    private final static CameraLogger LOG = CameraLogger.create(TAG);
+
+    private AtomicInteger retries;
+
+    public RetryRule(int retries) {
+        this.retries = new AtomicInteger(retries);
+    }
+
+    @Override
+    public Statement apply(final Statement base, final Description description) {
+        return new Statement() {
+            @Override
+            public void evaluate() throws Throwable {
+                Retry retry = description.getAnnotation(Retry.class);
+                if (retry == null || retry.emulatorOnly() && !Emulator.isEmulator()) {
+                    base.evaluate();
+                } else {
+                    Throwable caught = null;
+                    while (retries.getAndDecrement() > 0) {
+                        try {
+                            base.evaluate();
+                            return;
+                        } catch (Throwable throwable) {
+                            LOG.e("[RETRY] Test failed.", retries.get(),
+                                    "retries available...");
+                            LOG.e("*******************************************************");
+                            caught = throwable;
+                        }
+                    }
+                    if (caught != null) {
+                        throw caught;
+                    }
+                }
+            }
+        };
+    }
+}

+ 20 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExclude.java

@@ -0,0 +1,20 @@
+package com.otaliastudios.cameraview.tools;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Like {@link androidx.test.filters.SdkSuppress}, but negative.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+public @interface SdkExclude {
+    /** The minimum API level to drop (inclusive) */
+    int minSdkVersion() default 1;
+    /** The maximum API level to drop (inclusive) */
+    int maxSdkVersion() default Integer.MAX_VALUE;
+    /** Whether this filter only applies to emulators */
+    boolean emulatorOnly() default false;
+}

+ 47 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExcludeFilter.java

@@ -0,0 +1,47 @@
+package com.otaliastudios.cameraview.tools;
+
+
+import android.os.Build;
+
+import androidx.annotation.Nullable;
+import androidx.test.internal.runner.filters.ParentFilter;
+
+import org.junit.runner.Description;
+
+/**
+ * Filter for {@link SdkExclude}, based on
+ * {@link androidx.test.internal.runner.TestRequestBuilder}'s SdkSuppressFilter.
+ */
+public class SdkExcludeFilter extends ParentFilter {
+
+    protected boolean evaluateTest(Description description) {
+        SdkExclude annotation = getAnnotationForTest(description);
+        if (annotation != null) {
+            if ((!annotation.emulatorOnly() || Emulator.isEmulator())
+                    && Build.VERSION.SDK_INT >= annotation.minSdkVersion()
+                    && Build.VERSION.SDK_INT <= annotation.maxSdkVersion()) {
+                return false; // exclude the test
+            }
+            return true; // run the test
+        }
+        return true; // no annotation, run the test
+    }
+
+    @Nullable
+    private SdkExclude getAnnotationForTest(Description description) {
+        final SdkExclude s = description.getAnnotation(SdkExclude.class);
+        if (s != null) {
+            return s;
+        }
+        final Class<?> testClass = description.getTestClass();
+        if (testClass != null) {
+            return testClass.getAnnotation(SdkExclude.class);
+        }
+        return null;
+    }
+
+    @Override
+    public String describe() {
+        return "Skip tests annotated with SdkExclude";
+    }
+}

+ 20 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkInclude.java

@@ -0,0 +1,20 @@
+package com.otaliastudios.cameraview.tools;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Like {@link androidx.test.filters.SdkSuppress}, but with emulatorOnly().
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+public @interface SdkInclude {
+    /** The minimum API level to run (inclusive) */
+    int minSdkVersion() default 1;
+    /** The maximum API level to run (inclusive) */
+    int maxSdkVersion() default Integer.MAX_VALUE;
+    /** Whether this filter only applies to emulators */
+    boolean emulatorOnly() default false;
+}

+ 47 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkIncludeFilter.java

@@ -0,0 +1,47 @@
+package com.otaliastudios.cameraview.tools;
+
+
+import android.os.Build;
+
+import androidx.annotation.Nullable;
+import androidx.test.internal.runner.filters.ParentFilter;
+
+import org.junit.runner.Description;
+
+/**
+ * Filter for {@link SdkInclude}, based on
+ * {@link androidx.test.internal.runner.TestRequestBuilder}'s SdkSuppressFilter.
+ */
+public class SdkIncludeFilter extends ParentFilter {
+
+    protected boolean evaluateTest(Description description) {
+        SdkInclude annotation = getAnnotationForTest(description);
+        if (annotation != null) {
+            if ((!annotation.emulatorOnly() || Emulator.isEmulator())
+                    && Build.VERSION.SDK_INT >= annotation.minSdkVersion()
+                    && Build.VERSION.SDK_INT <= annotation.maxSdkVersion()) {
+                return true; // run the test
+            }
+            return false; // drop the test
+        }
+        return true; // no annotation, run the test
+    }
+
+    @Nullable
+    private SdkInclude getAnnotationForTest(Description description) {
+        final SdkInclude s = description.getAnnotation(SdkInclude.class);
+        if (s != null) {
+            return s;
+        }
+        final Class<?> testClass = description.getTestClass();
+        if (testClass != null) {
+            return testClass.getAnnotation(SdkInclude.class);
+        }
+        return null;
+    }
+
+    @Override
+    public String describe() {
+        return "Skip tests annotated with SdkInclude";
+    }
+}

+ 52 - 0
cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java

@@ -0,0 +1,52 @@
+package com.otaliastudios.cameraview.video;
+
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.VideoResult;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+
+import java.lang.reflect.Constructor;
+
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class VideoRecorderTest extends BaseTest {
+
+    @Test
+    public void testRecorder() throws Exception {
+        VideoResult.Stub result = createStub();
+        VideoRecorder.VideoResultListener listener = Mockito.mock(VideoRecorder.VideoResultListener.class);
+        VideoRecorder recorder = new VideoRecorder(listener) {
+            @Override
+            protected void onStart() {
+                dispatchVideoRecordingStart();
+            }
+
+            @Override
+            protected void onStop(boolean isCameraShutdown) {
+                dispatchVideoRecordingEnd();
+                dispatchResult();
+            }
+        };
+        recorder.start(result);
+        Mockito.verify(listener,Mockito.times(1) )
+                .onVideoRecordingStart();
+        recorder.stop(false);
+        Mockito.verify(listener, Mockito.times(1))
+                .onVideoRecordingEnd();
+        Mockito.verify(listener, Mockito.times(1))
+                .onVideoResult(result, null);
+    }
+
+    private VideoResult.Stub createStub() throws Exception {
+        Constructor<VideoResult.Stub> constructor = VideoResult.Stub.class.getDeclaredConstructor();
+        constructor.setAccessible(true);
+        return constructor.newInstance();
+    }
+}

+ 4 - 0
cameraview/src/androidTest/res/layout/not_overlay.xml

@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"/>

+ 8 - 0
cameraview/src/androidTest/res/layout/overlay.xml

@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    app:layout_drawOnPreview="true"
+    app:layout_drawOnPictureSnapshot="true"
+    app:layout_drawOnVideoSnapshot="true"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"/>

+ 28 - 0
cameraview/src/main/AndroidManifest.xml

@@ -0,0 +1,28 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools"
+    package="com.otaliastudios.cameraview">
+
+    <uses-permission android:name="android.permission.CAMERA" />
+
+    <!-- Have developers add this. We don't want AUDIO permission to be auto-added to
+         apps that just want to take pictures. -->
+    <!-- uses-permission android:name="android.permission.RECORD_AUDIO" /-->
+
+    <uses-feature
+        android:name="android.hardware.camera"
+        android:required="false"/>
+    <uses-feature
+        android:name="android.hardware.camera.autofocus"
+        android:required="false"/>
+    <uses-feature
+        android:name="android.hardware.camera.front"
+        android:required="false"/>
+    <uses-feature
+        android:name="android.hardware.microphone"
+        android:required="false"/>
+
+    <uses-sdk tools:overrideLibrary="com.otaliastudios.opengl" />
+
+    <application/>
+
+</manifest>

+ 22 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/BitmapCallback.java

@@ -0,0 +1,22 @@
+package com.otaliastudios.cameraview;
+
+import android.graphics.Bitmap;
+
+import androidx.annotation.Nullable;
+import androidx.annotation.UiThread;
+
+/**
+ * Receives callbacks about a bitmap decoding operation.
+ */
+public interface BitmapCallback {
+
+    /**
+     * Notifies that the bitmap was successfully decoded.
+     * This is run on the UI thread.
+     * Returns a null object if a {@link OutOfMemoryError} was encountered.
+     *
+     * @param bitmap decoded bitmap, or null
+     */
+    @UiThread
+    void onBitmapReady(@Nullable Bitmap bitmap);
+}

+ 91 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/CameraException.java

@@ -0,0 +1,91 @@
+package com.otaliastudios.cameraview;
+
+
+import com.otaliastudios.cameraview.controls.Facing;
+
+/**
+ * Holds an error with the camera configuration.
+ */
+public class CameraException extends RuntimeException {
+
+    /**
+     * Unknown error. No further info available.
+     */
+    public static final int REASON_UNKNOWN = 0;
+
+    /**
+     * We failed to connect to the camera service.
+     * The camera might be in use by another app.
+     */
+    public static final int REASON_FAILED_TO_CONNECT = 1;
+
+    /**
+     * Failed to start the camera preview.
+     * Again, the camera might be in use by another app.
+     */
+    public static final int REASON_FAILED_TO_START_PREVIEW = 2;
+
+    /**
+     * Camera was forced to disconnect.
+     * In Camera1, this is thrown when android.hardware.Camera.CAMERA_ERROR_EVICTED
+     * is caught.
+     */
+    public static final int REASON_DISCONNECTED = 3;
+
+    /**
+     * Could not take a picture or a picture snapshot,
+     * for some not specified reason.
+     */
+    public static final int REASON_PICTURE_FAILED = 4;
+
+    /**
+     * Could not take a video or a video snapshot,
+     * for some not specified reason.
+     */
+    public static final int REASON_VIDEO_FAILED = 5;
+
+    /**
+     * Indicates that we could not find a camera for the current {@link Facing}
+     * value.
+     * This can be solved by changing the facing value and starting again.
+     */
+    public static final int REASON_NO_CAMERA = 6;
+
+    private int reason = REASON_UNKNOWN;
+
+    @SuppressWarnings("WeakerAccess")
+    public CameraException(Throwable cause) {
+        super(cause);
+    }
+
+    public CameraException(Throwable cause, int reason) {
+        super(cause);
+        this.reason = reason;
+    }
+
+    public CameraException(int reason) {
+        super();
+        this.reason = reason;
+    }
+
+    public int getReason() {
+        return reason;
+    }
+
+    /**
+     * Whether this error is unrecoverable. If this function returns true,
+     * the Camera has been closed (or will be soon) and it is likely showing a black preview.
+     * This is the right moment to show an error dialog to the user.
+     *
+     * @return true if this error is unrecoverable
+     */
+    @SuppressWarnings("unused")
+    public boolean isUnrecoverable() {
+        switch (getReason()) {
+            case REASON_FAILED_TO_CONNECT: return true;
+            case REASON_FAILED_TO_START_PREVIEW: return true;
+            case REASON_DISCONNECTED: return true;
+            default: return false;
+        }
+    }
+}

+ 165 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java

@@ -0,0 +1,165 @@
+package com.otaliastudios.cameraview;
+
+import android.graphics.PointF;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.UiThread;
+
+/**
+ * The base class for receiving updates from a {@link CameraView} instance.
+ * You can add and remove listeners using {@link CameraView#addCameraListener(CameraListener)}
+ * and {@link CameraView#removeCameraListener(CameraListener)}.
+ */
+@SuppressWarnings({"WeakerAccess", "unused"})
+public abstract class CameraListener {
+
+
+    /**
+     * Notifies that the camera was opened.
+     * The {@link CameraOptions} object collects all supported options by the current camera.
+     *
+     * @param options camera supported options
+     */
+    @UiThread
+    public void onCameraOpened(@NonNull CameraOptions options) { }
+
+
+    /**
+     * Notifies that the camera session was closed.
+     */
+    @UiThread
+    public void onCameraClosed() { }
+
+
+    /**
+     * Notifies about an error during the camera setup or configuration.
+     *
+     * At this point you should inspect the {@link CameraException} reason using
+     * {@link CameraException#getReason()} and see what should be done, if anything.
+     * If the error is unrecoverable, this is the right moment to show an error dialog, for example.
+     *
+     * @param exception the error
+     */
+    @UiThread
+    public void onCameraError(@NonNull CameraException exception) { }
+
+
+    /**
+     * Notifies that a picture previously captured with {@link CameraView#takePicture()}
+     * or {@link CameraView#takePictureSnapshot()} is ready to be shown or saved to file.
+     *
+     * If planning to show a bitmap, you can use
+     * {@link PictureResult#toBitmap(int, int, BitmapCallback)} to decode the byte array
+     * taking care about orientation and threading.
+     *
+     * @param result captured picture
+     */
+    @UiThread
+    public void onPictureTaken(@NonNull PictureResult result) { }
+
+
+    /**
+     * Notifies that a video capture has just ended.
+     *
+     * @param result the video result
+     */
+    @UiThread
+    public void onVideoTaken(@NonNull VideoResult result) { }
+
+
+    /**
+     * Notifies that the device was tilted or the window offset changed.
+     * The orientation passed is exactly the counter-clockwise rotation that a View should have,
+     * in order to appear correctly oriented to the user, considering the way she is
+     * holding the device, and the native activity orientation.
+     *
+     * This is meant to be used for aligning views (e.g. buttons) to the current camera viewport.
+     *
+     * @param orientation either 0, 90, 180 or 270
+     */
+    @UiThread
+    public void onOrientationChanged(int orientation) { }
+
+
+    /**
+     * Notifies that user interacted with the screen and started metering with a gesture,
+     * and touch metering routine is trying to focus around that area.
+     * This callback can be used to draw things on screen.
+     * Can also be triggered by {@link CameraView#startAutoFocus(float, float)}.
+     *
+     * @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()
+     */
+    @UiThread
+    public void onAutoFocusStart(@NonNull PointF point) { }
+
+
+    /**
+     * Notifies that a touch metering event just ended, and the camera converged
+     * to a new focus, exposure and possibly white balance.
+     * This might succeed or not.
+     * Can also be triggered by {@link CameraView#startAutoFocus(float, float)}.
+     *
+     * @param successful whether metering succeeded
+     * @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()
+     */
+    @UiThread
+    public void onAutoFocusEnd(boolean successful, @NonNull PointF point) { }
+
+
+    /**
+     * Notifies that a finger gesture just caused the camera zoom
+     * to be changed. This can be used to draw, for example, a seek bar.
+     *
+     * @param newValue the new zoom value
+     * @param bounds min and max bounds for newValue (fixed to 0 ... 1)
+     * @param fingers finger positions that caused the event, null if not caused by touch
+     */
+    @UiThread
+    public void onZoomChanged(float newValue,
+                              @NonNull float[] bounds,
+                              @Nullable PointF[] fingers) { }
+
+
+    /**
+     * Noitifies that a finger gesture just caused the camera exposure correction
+     * to be changed. This can be used to draw, for example, a seek bar.
+     *
+     * @param newValue the new correction value
+     * @param bounds min and max bounds for newValue, as returned by {@link CameraOptions}
+     * @param fingers finger positions that caused the event, null if not caused by touch
+     */
+    @UiThread
+    public void onExposureCorrectionChanged(float newValue,
+                                            @NonNull float[] bounds,
+                                            @Nullable PointF[] fingers) { }
+
+
+    /**
+     * Notifies that the actual video recording has started.
+     * This is the time when actual frames recording starts.
+     *
+     * This can be used to show some UI indicator for video recording or counting time.
+     *
+     * @see #onVideoRecordingEnd()
+     */
+    @UiThread
+    public void onVideoRecordingStart() {
+
+    }
+
+    /**
+     * Notifies that the actual video recording has ended.
+     * At this point recording has ended, though the file might still be processed.
+     * The {@link #onVideoTaken(VideoResult)} callback will be called soon.
+     *
+     * This can be used to remove UI indicators for video recording.
+     *
+     * @see #onVideoRecordingStart()
+     */
+    @UiThread
+    public void onVideoRecordingEnd() {
+
+    }
+
+}

+ 203 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/CameraLogger.java

@@ -0,0 +1,203 @@
+package com.otaliastudios.cameraview;
+
+import android.util.Log;
+
+import androidx.annotation.IntDef;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
+
+/**
+ * Utility class that can log traces and info.
+ */
+@SuppressWarnings({"WeakerAccess", "UnusedReturnValue"})
+public final class CameraLogger {
+
+    public final static int LEVEL_VERBOSE = 0;
+    public final static int LEVEL_INFO = 1;
+    public final static int LEVEL_WARNING = 2;
+    public final static int LEVEL_ERROR = 3;
+
+    /**
+     * Interface of integers representing log levels.
+     * @see #LEVEL_VERBOSE
+     * @see #LEVEL_INFO
+     * @see #LEVEL_WARNING
+     * @see #LEVEL_ERROR
+     */
+    @IntDef({LEVEL_VERBOSE, LEVEL_INFO, LEVEL_WARNING, LEVEL_ERROR})
+    @Retention(RetentionPolicy.SOURCE)
+    public @interface LogLevel {}
+
+    /**
+     * A Logger can listen to internal log events
+     * and log them to different providers.
+     * The default logger will simply post to logcat.
+     */
+    public interface Logger {
+
+        /**
+         * Notifies that an internal log event was just triggered.
+         *
+         * @param level the log level
+         * @param tag the log tag
+         * @param message the log message
+         * @param throwable an optional throwable
+         */
+        void log(@LogLevel int level,
+                 @NonNull String tag,
+                 @NonNull String message,
+                 @Nullable Throwable throwable);
+    }
+
+    @VisibleForTesting static String lastMessage;
+    @VisibleForTesting static String lastTag;
+
+    private static int sLevel;
+    private static Set<Logger> sLoggers = new CopyOnWriteArraySet<>();
+
+    @VisibleForTesting static Logger sAndroidLogger = new Logger() {
+        @Override
+        public void log(int level,
+                        @NonNull String tag,
+                        @NonNull String message,
+                        @Nullable Throwable throwable) {
+            switch (level) {
+                case LEVEL_VERBOSE: Log.v(tag, message, throwable); break;
+                case LEVEL_INFO: Log.i(tag, message, throwable); break;
+                case LEVEL_WARNING: Log.w(tag, message, throwable); break;
+                case LEVEL_ERROR: Log.e(tag, message, throwable); break;
+            }
+        }
+    };
+
+    static {
+        setLogLevel(LEVEL_ERROR);
+        sLoggers.add(sAndroidLogger);
+    }
+
+    /**
+     * Creates a CameraLogger that will stream logs into the
+     * internal logs and dispatch them to {@link Logger}s.
+     *
+     * @param tag the logger tag
+     * @return a new CameraLogger
+     */
+    public static CameraLogger create(@NonNull String tag) {
+        return new CameraLogger(tag);
+    }
+
+    /**
+     * Sets the log sLevel for logcat events.
+     *
+     * @see #LEVEL_VERBOSE
+     * @see #LEVEL_INFO
+     * @see #LEVEL_WARNING
+     * @see #LEVEL_ERROR
+     * @param logLevel the desired log sLevel
+     */
+    public static void setLogLevel(@LogLevel int logLevel) {
+        sLevel = logLevel;
+    }
+
+    /**
+     * Registers an external {@link Logger} for log events.
+     * Make sure to unregister using {@link #unregisterLogger(Logger)}.
+     *
+     * @param logger logger to add
+     */
+    @SuppressWarnings("WeakerAccess")
+    public static void registerLogger(@NonNull Logger logger) {
+        sLoggers.add(logger);
+    }
+
+    /**
+     * Unregisters a previously registered {@link Logger} for log events.
+     * This is needed in order to avoid leaks.
+     *
+     * @param logger logger to remove
+     */
+    @SuppressWarnings("WeakerAccess")
+    public static void unregisterLogger(@NonNull Logger logger) {
+        sLoggers.remove(logger);
+    }
+
+    @NonNull
+    private String mTag;
+
+    private CameraLogger(@NonNull String tag) {
+        mTag = tag;
+    }
+
+    private boolean should(int messageLevel) {
+        return sLevel <= messageLevel && sLoggers.size() > 0;
+    }
+
+    /**
+     * Log to the verbose channel.
+     * @param data log contents
+     * @return the log message, if logged
+     */
+    @Nullable
+    public String v(@NonNull Object... data) {
+        return log(LEVEL_VERBOSE, data);
+    }
+
+    /**
+     * Log to the info channel.
+     * @param data log contents
+     * @return the log message, if logged
+     */
+    @Nullable
+    public String i(@NonNull Object... data) {
+        return log(LEVEL_INFO, data);
+    }
+
+    /**
+     * Log to the warning channel.
+     * @param data log contents
+     * @return the log message, if logged
+     */
+    @Nullable
+    public String w(@NonNull Object... data) {
+        return log(LEVEL_WARNING, data);
+    }
+
+    /**
+     * Log to the error channel.
+     * @param data log contents
+     * @return the log message, if logged
+     */
+    @Nullable
+    public String e(@NonNull Object... data) {
+        return log(LEVEL_ERROR, data);
+    }
+
+    @Nullable
+    private String log(@LogLevel int level, @NonNull Object... data) {
+        if (!should(level)) return null;
+
+        StringBuilder message = new StringBuilder();
+        Throwable throwable = null;
+        for (Object object : data) {
+            if (object instanceof Throwable) {
+                throwable = (Throwable) object;
+            }
+            message.append(String.valueOf(object));
+            message.append(" ");
+        }
+        String string = message.toString().trim();
+        for (Logger logger : sLoggers) {
+            logger.log(level, mTag, string, throwable);
+        }
+        lastMessage = string;
+        lastTag = mTag;
+        return string;
+    }
+}
+

+ 313 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java

@@ -0,0 +1,313 @@
+package com.otaliastudios.cameraview;
+
+
+import android.graphics.ImageFormat;
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.controls.Audio;
+import com.otaliastudios.cameraview.controls.AudioCodec;
+import com.otaliastudios.cameraview.controls.Control;
+import com.otaliastudios.cameraview.controls.Engine;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Grid;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.Mode;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.controls.Preview;
+import com.otaliastudios.cameraview.controls.VideoCodec;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+import com.otaliastudios.cameraview.gesture.GestureAction;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Options telling you what is available and what is not.
+ */
+public abstract class CameraOptions {
+
+    protected Set<WhiteBalance> supportedWhiteBalance = new HashSet<>(5);
+    protected Set<Facing> supportedFacing = new HashSet<>(2);
+    protected Set<Flash> supportedFlash = new HashSet<>(4);
+    protected Set<Hdr> supportedHdr = new HashSet<>(2);
+    protected Set<Size> supportedPictureSizes = new HashSet<>(15);
+    protected Set<Size> supportedVideoSizes = new HashSet<>(5);
+    protected Set<AspectRatio> supportedPictureAspectRatio = new HashSet<>(4);
+    protected Set<AspectRatio> supportedVideoAspectRatio = new HashSet<>(3);
+    protected Set<PictureFormat> supportedPictureFormats = new HashSet<>(2);
+    protected Set<Integer> supportedFrameProcessingFormats = new HashSet<>(2);
+
+    protected boolean zoomSupported;
+    protected boolean exposureCorrectionSupported;
+    protected float exposureCorrectionMinValue;
+    protected float exposureCorrectionMaxValue;
+    protected boolean autoFocusSupported;
+    protected float previewFrameRateMinValue;
+    protected float previewFrameRateMaxValue;
+
+    protected CameraOptions() { }
+
+    /**
+     * Shorthand for getSupported*().contains(value).
+     *
+     * @param control value to check
+     * @return whether it's supported
+     */
+    public final boolean supports(@NonNull Control control) {
+        return getSupportedControls(control.getClass()).contains(control);
+    }
+
+    /**
+     * Shorthand for other methods in this class,
+     * e.g. supports(GestureAction.ZOOM) == isZoomSupported().
+     *
+     * @param action value to be checked
+     * @return whether it's supported
+     */
+    public final boolean supports(@NonNull GestureAction action) {
+        switch (action) {
+            case AUTO_FOCUS:
+                return isAutoFocusSupported();
+            case TAKE_PICTURE:
+            case FILTER_CONTROL_1:
+            case FILTER_CONTROL_2:
+            case NONE:
+                return true;
+            case ZOOM:
+                return isZoomSupported();
+            case EXPOSURE_CORRECTION:
+                return isExposureCorrectionSupported();
+        }
+        return false;
+    }
+
+    @SuppressWarnings("unchecked")
+    @NonNull
+    public final <T extends Control> Collection<T> getSupportedControls(
+            @NonNull Class<T> controlClass) {
+        if (controlClass.equals(Audio.class)) {
+            return (Collection<T>) Arrays.asList(Audio.values());
+        } else if (controlClass.equals(Facing.class)) {
+            return (Collection<T>) getSupportedFacing();
+        } else if (controlClass.equals(Flash.class)) {
+            return (Collection<T>) getSupportedFlash();
+        } else if (controlClass.equals(Grid.class)) {
+            return (Collection<T>) Arrays.asList(Grid.values());
+        } else if (controlClass.equals(Hdr.class)) {
+            return (Collection<T>) getSupportedHdr();
+        } else if (controlClass.equals(Mode.class)) {
+            return (Collection<T>) Arrays.asList(Mode.values());
+        } else if (controlClass.equals(VideoCodec.class)) {
+            return (Collection<T>) Arrays.asList(VideoCodec.values());
+        } else if (controlClass.equals(AudioCodec.class)) {
+            return (Collection<T>) Arrays.asList(AudioCodec.values());
+        } else if (controlClass.equals(WhiteBalance.class)) {
+            return (Collection<T>) getSupportedWhiteBalance();
+        } else if (controlClass.equals(Engine.class)) {
+            return (Collection<T>) Arrays.asList(Engine.values());
+        } else if (controlClass.equals(Preview.class)) {
+            return (Collection<T>) Arrays.asList(Preview.values());
+        } else if (controlClass.equals(PictureFormat.class)) {
+            return (Collection<T>) getSupportedPictureFormats();
+        }
+        // Unrecognized control.
+        return Collections.emptyList();
+    }
+
+    /**
+     * Set of supported picture sizes for the currently opened camera.
+     *
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<Size> getSupportedPictureSizes() {
+        return Collections.unmodifiableSet(supportedPictureSizes);
+    }
+
+    /**
+     * Set of supported picture aspect ratios for the currently opened camera.
+     *
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<AspectRatio> getSupportedPictureAspectRatios() {
+        return Collections.unmodifiableSet(supportedPictureAspectRatio);
+    }
+
+    /**
+     * Set of supported video sizes for the currently opened camera.
+     *
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<Size> getSupportedVideoSizes() {
+        return Collections.unmodifiableSet(supportedVideoSizes);
+    }
+
+    /**
+     * Set of supported picture aspect ratios for the currently opened camera.
+     *
+     * @return a set of supported values.
+     */
+    @NonNull
+    public final Collection<AspectRatio> getSupportedVideoAspectRatios() {
+        return Collections.unmodifiableSet(supportedVideoAspectRatio);
+    }
+
+    /**
+     * Set of supported facing values.
+     *
+     * @see Facing#BACK
+     * @see Facing#FRONT
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<Facing> getSupportedFacing() {
+        return Collections.unmodifiableSet(supportedFacing);
+    }
+
+    /**
+     * Set of supported flash values.
+     *
+     * @see Flash#AUTO
+     * @see Flash#OFF
+     * @see Flash#ON
+     * @see Flash#TORCH
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<Flash> getSupportedFlash() {
+        return Collections.unmodifiableSet(supportedFlash);
+    }
+
+    /**
+     * Set of supported white balance values.
+     *
+     * @see WhiteBalance#AUTO
+     * @see WhiteBalance#INCANDESCENT
+     * @see WhiteBalance#FLUORESCENT
+     * @see WhiteBalance#DAYLIGHT
+     * @see WhiteBalance#CLOUDY
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<WhiteBalance> getSupportedWhiteBalance() {
+        return Collections.unmodifiableSet(supportedWhiteBalance);
+    }
+
+    /**
+     * Set of supported hdr values.
+     *
+     * @see Hdr#OFF
+     * @see Hdr#ON
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<Hdr> getSupportedHdr() {
+        return Collections.unmodifiableSet(supportedHdr);
+    }
+
+    /**
+     * Set of supported picture formats.
+     *
+     * @see PictureFormat#JPEG
+     * @see PictureFormat#DNG
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<PictureFormat> getSupportedPictureFormats() {
+        return Collections.unmodifiableSet(supportedPictureFormats);
+    }
+
+    /**
+     * Set of supported formats for frame processing,
+     * as {@link ImageFormat} constants.
+     *
+     * @see CameraView#setFrameProcessingFormat(int)
+     * @return a collection of supported values.
+     */
+    @NonNull
+    public final Collection<Integer> getSupportedFrameProcessingFormats() {
+        return Collections.unmodifiableSet(supportedFrameProcessingFormats);
+    }
+
+    /**
+     * Whether zoom is supported. If this is false, pinch-to-zoom
+     * will not work and {@link CameraView#setZoom(float)} will have no effect.
+     *
+     * @return whether zoom is supported.
+     */
+    public final boolean isZoomSupported() {
+        return zoomSupported;
+    }
+
+
+    /**
+     * Whether touch metering (metering with respect to a specific region of the screen) is
+     * supported. If it is, you can map gestures to {@link GestureAction#AUTO_FOCUS}
+     * and metering will change on tap.
+     *
+     * @return whether auto focus is supported.
+     */
+    public final boolean isAutoFocusSupported() {
+        return autoFocusSupported;
+    }
+
+    /**
+     * Whether exposure correction is supported. If this is false, calling
+     * {@link CameraView#setExposureCorrection(float)} has no effect.
+     *
+     * @see #getExposureCorrectionMinValue()
+     * @see #getExposureCorrectionMaxValue()
+     * @return whether exposure correction is supported.
+     */
+    public final boolean isExposureCorrectionSupported() {
+        return exposureCorrectionSupported;
+    }
+
+    /**
+     * The minimum value of negative exposure correction, in EV stops.
+     * This is presumably negative or 0 if not supported.
+     *
+     * @return min EV value
+     */
+    public final float getExposureCorrectionMinValue() {
+        return exposureCorrectionMinValue;
+    }
+
+
+    /**
+     * The maximum value of positive exposure correction, in EV stops.
+     * This is presumably positive or 0 if not supported.
+     *
+     * @return max EV value
+     */
+    public final float getExposureCorrectionMaxValue() {
+        return exposureCorrectionMaxValue;
+    }
+
+    /**
+     * The minimum value for the preview frame rate, in frames per second (FPS).
+     *
+     * @return the min value
+     */
+    public final float getPreviewFrameRateMinValue() {
+        return previewFrameRateMinValue;
+    }
+
+    /**
+     * The maximum value for the preview frame rate, in frames per second (FPS).
+     *
+     * @return the max value
+     */
+    public final float getPreviewFrameRateMaxValue() {
+        return previewFrameRateMaxValue;
+    }
+}

+ 361 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java

@@ -0,0 +1,361 @@
+package com.otaliastudios.cameraview;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Matrix;
+import android.hardware.Camera;
+import android.os.Handler;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.WorkerThread;
+import androidx.exifinterface.media.ExifInterface;
+
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
+import com.otaliastudios.cameraview.internal.ExifHelper;
+import com.otaliastudios.cameraview.internal.WorkerHandler;
+
+import java.io.BufferedOutputStream;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+/**
+ * Static utilities for dealing with camera I/O, orientations, etc.
+ */
+@SuppressWarnings("unused")
+public class CameraUtils {
+
+    private final static String TAG = CameraUtils.class.getSimpleName();
+    private final static CameraLogger LOG = CameraLogger.create(TAG);
+
+    /**
+     * Determines whether the device has valid camera sensors, so the library
+     * can be used.
+     *
+     * @param context a valid Context
+     * @return whether device has cameras
+     */
+    @SuppressWarnings("WeakerAccess")
+    public static boolean hasCameras(@NonNull Context context) {
+        PackageManager manager = context.getPackageManager();
+        // There's also FEATURE_CAMERA_EXTERNAL , should we support it?
+        return manager.hasSystemFeature(PackageManager.FEATURE_CAMERA)
+                || manager.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT);
+    }
+
+
+    /**
+     * Determines whether the device has a valid camera sensor with the given
+     * Facing value, so that a session can be started.
+     *
+     * @param context a valid context
+     * @param facing either {@link Facing#BACK} or {@link Facing#FRONT}
+     * @return true if such sensor exists
+     */
+    public static boolean hasCameraFacing(@SuppressWarnings("unused") @NonNull Context context,
+                                          @NonNull Facing facing) {
+        int internal = Camera1Mapper.get().mapFacing(facing);
+        Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
+        for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
+            Camera.getCameraInfo(i, cameraInfo);
+            if (cameraInfo.facing == internal) return true;
+        }
+        return false;
+    }
+
+
+    /**
+     * Simply writes the given data to the given file. It is done synchronously. If you are
+     * running on the UI thread, please use {@link #writeToFile(byte[], File, FileCallback)}
+     * and pass a file callback.
+     *
+     * If any error is encountered, this returns null.
+     *
+     * @param data the data to be written
+     * @param file the file to write into
+     * @return the source file, or null if error
+     */
+    @SuppressWarnings("WeakerAccess")
+    @Nullable
+    @WorkerThread
+    @SuppressLint("NewApi")
+    public static File writeToFile(@NonNull final byte[] data, @NonNull File file) {
+        if (file.exists() && !file.delete()) return null;
+        try (OutputStream stream = new BufferedOutputStream(new FileOutputStream(file))) {
+            stream.write(data);
+            stream.flush();
+            return file;
+        } catch (IOException e) {
+            return null;
+        }
+    }
+
+
+    /**
+     * Writes the given data to the given file in a background thread, returning on the
+     * original thread (typically the UI thread) once writing is done.
+     * If some error is encountered, the {@link FileCallback} will return null instead of the
+     * original file.
+     *
+     * @param data the data to be written
+     * @param file the file to write into
+     * @param callback a callback
+     */
+    @SuppressWarnings("WeakerAccess")
+    public static void writeToFile(@NonNull final byte[] data,
+                                   @NonNull final File file,
+                                   @NonNull final FileCallback callback) {
+        final Handler ui = new Handler();
+        WorkerHandler.execute(new Runnable() {
+            @Override
+            public void run() {
+                final File result = writeToFile(data, file);
+                ui.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        callback.onFileReady(result);
+                    }
+                });
+            }
+        });
+    }
+
+    /**
+     * Decodes an input byte array and outputs a Bitmap that is ready to be displayed.
+     * The difference with {@link android.graphics.BitmapFactory#decodeByteArray(byte[], int, int)}
+     * is that this cares about orientation, reading it from the EXIF header.
+     *
+     * @param source a JPEG byte array
+     * @return decoded bitmap or null if error is encountered
+     */
+    @SuppressWarnings("WeakerAccess")
+    @Nullable
+    @WorkerThread
+    public static Bitmap decodeBitmap(@NonNull final byte[] source) {
+        return decodeBitmap(source, Integer.MAX_VALUE, Integer.MAX_VALUE);
+    }
+
+    /**
+     * Decodes an input byte array and outputs a Bitmap that is ready to be displayed.
+     * The difference with {@link android.graphics.BitmapFactory#decodeByteArray(byte[], int, int)}
+     * is that this cares about orientation, reading it from the EXIF header.
+     * This is executed in a background thread, and returns the result to the original thread.
+     *
+     * @param source a JPEG byte array
+     * @param callback a callback to be notified
+     */
+    @SuppressWarnings("WeakerAccess")
+    public static void decodeBitmap(@NonNull final byte[] source,
+                                    @NonNull final BitmapCallback callback) {
+        decodeBitmap(source, Integer.MAX_VALUE, Integer.MAX_VALUE, callback);
+    }
+    
+    /**
+     * Decodes an input byte array and outputs a Bitmap that is ready to be displayed.
+     * The difference with {@link android.graphics.BitmapFactory#decodeByteArray(byte[], int, int)}
+     * is that this cares about orientation, reading it from the EXIF header.
+     * This is executed in a background thread, and returns the result to the original thread.
+     *
+     * The image is also downscaled taking care of the maxWidth and maxHeight arguments.
+     *
+     * @param source a JPEG byte array
+     * @param maxWidth the max allowed width
+     * @param maxHeight the max allowed height
+     * @param callback a callback to be notified
+     */
+    @SuppressWarnings("WeakerAccess")
+    public static void decodeBitmap(@NonNull final byte[] source,
+                                    final int maxWidth,
+                                    final int maxHeight,
+                                    @NonNull final BitmapCallback callback) {
+        decodeBitmap(source, maxWidth, maxHeight, new BitmapFactory.Options(), callback);
+    }
+
+    /**
+     * Decodes an input byte array and outputs a Bitmap that is ready to be displayed.
+     * The difference with {@link android.graphics.BitmapFactory#decodeByteArray(byte[], int, int)}
+     * is that this cares about orientation, reading it from the EXIF header.
+     * This is executed in a background thread, and returns the result to the original thread.
+     *
+     * The image is also downscaled taking care of the maxWidth and maxHeight arguments.
+     *
+     * @param source a JPEG byte array
+     * @param maxWidth the max allowed width
+     * @param maxHeight the max allowed height
+     * @param options the options to be passed to decodeByteArray
+     * @param callback a callback to be notified
+     */
+    @SuppressWarnings("WeakerAccess")
+    public static void decodeBitmap(@NonNull final byte[] source,
+                                    final int maxWidth,
+                                    final int maxHeight,
+                                    @NonNull final BitmapFactory.Options options,
+                                    @NonNull final BitmapCallback callback) {
+        decodeBitmap(source, maxWidth, maxHeight, options, -1, callback);
+    }
+
+    static void decodeBitmap(@NonNull final byte[] source,
+                             final int maxWidth,
+                             final int maxHeight,
+                             @NonNull final BitmapFactory.Options options,
+                             final int rotation,
+                             @NonNull final BitmapCallback callback) {
+        final Handler ui = new Handler();
+        WorkerHandler.execute(new Runnable() {
+            @Override
+            public void run() {
+                final Bitmap bitmap = decodeBitmap(source, maxWidth, maxHeight, options, rotation);
+                ui.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        callback.onBitmapReady(bitmap);
+                    }
+                });
+            }
+        });
+    }
+
+    /**
+     * Decodes an input byte array and outputs a Bitmap that is ready to be displayed.
+     * The difference with {@link android.graphics.BitmapFactory#decodeByteArray(byte[], int, int)}
+     * is that this cares about orientation, reading it from the EXIF header.
+     *
+     * The image is also downscaled taking care of the maxWidth and maxHeight arguments.
+     *
+     * @param source a JPEG byte array
+     * @param maxWidth the max allowed width
+     * @param maxHeight the max allowed height
+     * @return decoded bitmap or null if error is encountered
+     */
+    @SuppressWarnings("SameParameterValue")
+    @Nullable
+    @WorkerThread
+    public static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight) {
+        return decodeBitmap(source, maxWidth, maxHeight, new BitmapFactory.Options());
+    }
+
+    /**
+     * Decodes an input byte array and outputs a Bitmap that is ready to be displayed.
+     * The difference with {@link android.graphics.BitmapFactory#decodeByteArray(byte[], int, int)}
+     * is that this cares about orientation, reading it from the EXIF header.
+     *
+     * The image is also downscaled taking care of the maxWidth and maxHeight arguments.
+     *
+     * @param source a JPEG byte array
+     * @param maxWidth the max allowed width
+     * @param maxHeight the max allowed height
+     * @param options the options to be passed to decodeByteArray
+     * @return decoded bitmap or null if error is encountered
+     */
+    @SuppressWarnings("WeakerAccess")
+    @Nullable
+    @WorkerThread
+    public static Bitmap decodeBitmap(@NonNull byte[] source,
+                                      int maxWidth,
+                                      int maxHeight,
+                                      @NonNull BitmapFactory.Options options) {
+        return decodeBitmap(source, maxWidth, maxHeight, options, -1);
+    }
+
+    // Null means we got OOM
+    // Ignores flipping, but it should be super rare.
+    @SuppressWarnings("TryFinallyCanBeTryWithResources")
+    @Nullable
+    private static Bitmap decodeBitmap(@NonNull byte[] source,
+                                       int maxWidth,
+                                       int maxHeight,
+                                       @NonNull BitmapFactory.Options options,
+                                       int rotation) {
+        if (maxWidth <= 0) maxWidth = Integer.MAX_VALUE;
+        if (maxHeight <= 0) maxHeight = Integer.MAX_VALUE;
+        int orientation;
+        boolean flip;
+        if (rotation == -1) {
+            InputStream stream = null;
+            try {
+                // http://sylvana.net/jpegcrop/exif_orientation.html
+                stream = new ByteArrayInputStream(source);
+                ExifInterface exif = new ExifInterface(stream);
+                int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
+                        ExifInterface.ORIENTATION_NORMAL);
+                orientation = ExifHelper.getOrientation(exifOrientation);
+                flip = exifOrientation == ExifInterface.ORIENTATION_FLIP_HORIZONTAL ||
+                        exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL ||
+                        exifOrientation == ExifInterface.ORIENTATION_TRANSPOSE ||
+                        exifOrientation == ExifInterface.ORIENTATION_TRANSVERSE;
+                LOG.i("decodeBitmap:", "got orientation from EXIF.", orientation);
+            } catch (IOException e) {
+                LOG.e("decodeBitmap:", "could not get orientation from EXIF.", e);
+                orientation = 0;
+                flip = false;
+            } finally {
+                if (stream != null) {
+                    try {
+                        stream.close();
+                    } catch (Exception ignored) { }
+                }
+            }
+        } else {
+            orientation = rotation;
+            flip = false;
+            LOG.i("decodeBitmap:", "got orientation from constructor.", orientation);
+        }
+
+        Bitmap bitmap;
+        try {
+            if (maxWidth < Integer.MAX_VALUE || maxHeight < Integer.MAX_VALUE) {
+                options.inJustDecodeBounds = true;
+                BitmapFactory.decodeByteArray(source, 0, source.length, options);
+
+                int outHeight = options.outHeight;
+                int outWidth = options.outWidth;
+                if (orientation % 180 != 0) {
+                    //noinspection SuspiciousNameCombination
+                    outHeight = options.outWidth;
+                    //noinspection SuspiciousNameCombination
+                    outWidth = options.outHeight;
+                }
+
+                options.inSampleSize = computeSampleSize(outWidth, outHeight, maxWidth, maxHeight);
+                options.inJustDecodeBounds = false;
+                bitmap = BitmapFactory.decodeByteArray(source, 0, source.length, options);
+            } else {
+                bitmap = BitmapFactory.decodeByteArray(source, 0, source.length);
+            }
+
+            if (orientation != 0 || flip) {
+                Matrix matrix = new Matrix();
+                matrix.setRotate(orientation);
+                Bitmap temp = bitmap;
+                bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
+                        bitmap.getHeight(), matrix, true);
+                temp.recycle();
+            }
+        } catch (OutOfMemoryError e) {
+            bitmap = null;
+        }
+        return bitmap;
+    }
+
+    private static int computeSampleSize(int width, int height, int maxWidth, int maxHeight) {
+        // https://developer.android.com/topic/performance/graphics/load-bitmap.html
+        int inSampleSize = 1;
+        if (height > maxHeight || width > maxWidth) {
+            while ((height / inSampleSize) >= maxHeight
+                    || (width / inSampleSize) >= maxWidth) {
+                inSampleSize *= 2;
+            }
+        }
+        return inSampleSize;
+    }
+
+
+}

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 2706 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java


+ 23 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/FileCallback.java

@@ -0,0 +1,23 @@
+package com.otaliastudios.cameraview;
+
+import androidx.annotation.Nullable;
+import androidx.annotation.UiThread;
+
+import java.io.File;
+
+/**
+ * Receives callbacks about a file saving operation.
+ */
+public interface FileCallback {
+
+    /**
+     * Notifies that the data was succesfully written to file.
+     * This is run on the UI thread.
+     * Returns a null object if an exception was encountered, for example
+     * if you don't have permissions to write to file.
+     *
+     * @param file the written file, or null
+     */
+    @UiThread
+    void onFileReady(@Nullable File file);
+}

+ 175 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/PictureResult.java

@@ -0,0 +1,175 @@
+package com.otaliastudios.cameraview;
+
+import android.graphics.BitmapFactory;
+import android.location.Location;
+import android.os.Build;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.size.Size;
+
+import java.io.File;
+
+/**
+ * Wraps the picture captured by {@link CameraView#takePicture()} or
+ * {@link CameraView#takePictureSnapshot()}.
+ */
+@SuppressWarnings("unused")
+public class PictureResult {
+
+    /**
+     * A result stub, for internal use only.
+     */
+    public static class Stub {
+
+        Stub() {}
+
+        public boolean isSnapshot;
+        public Location location;
+        public int rotation;
+        public Size size;
+        public Facing facing;
+        public byte[] data;
+        public PictureFormat format;
+    }
+
+    private final boolean isSnapshot;
+    private final Location location;
+    private final int rotation;
+    private final Size size;
+    private final Facing facing;
+    private final byte[] data;
+    private final PictureFormat format;
+
+    PictureResult(@NonNull Stub builder) {
+        isSnapshot = builder.isSnapshot;
+        location = builder.location;
+        rotation = builder.rotation;
+        size = builder.size;
+        facing = builder.facing;
+        data = builder.data;
+        format = builder.format;
+    }
+
+    /**
+     * Returns whether this result comes from a snapshot.
+     *
+     * @return whether this is a snapshot
+     */
+    public boolean isSnapshot() {
+        return isSnapshot;
+    }
+
+    /**
+     * Returns geographic information for this picture, if any.
+     * If it was set, it is also present in the file metadata.
+     *
+     * @return a nullable Location
+     */
+    @Nullable
+    public Location getLocation() {
+        return location;
+    }
+
+    /**
+     * Returns the clock-wise rotation that should be applied to the
+     * picture before displaying. If it is non-zero, it is also present
+     * in the EXIF metadata.
+     *
+     * @return the clock-wise rotation
+     */
+    public int getRotation() {
+        return rotation;
+    }
+
+    /**
+     * Returns the size of the picture after the rotation is applied.
+     *
+     * @return the Size of this picture
+     */
+    @NonNull
+    public Size getSize() {
+        return size;
+    }
+
+    /**
+     * Returns the facing value with which this video was recorded.
+     *
+     * @return the Facing of this video
+     */
+    @NonNull
+    public Facing getFacing() {
+        return facing;
+    }
+
+    /**
+     * Returns the raw compressed, ready to be saved to file,
+     * in the given format.
+     *
+     * @return the compressed data stream
+     */
+    @NonNull
+    public byte[] getData() {
+        return data;
+    }
+
+    /**
+     * Returns the format for {@link #getData()}.
+     *
+     * @return the format
+     */
+    @NonNull
+    public PictureFormat getFormat() {
+        return format;
+    }
+
+    /**
+     * Shorthand for {@link CameraUtils#decodeBitmap(byte[], int, int, BitmapCallback)}.
+     * Decodes this picture on a background thread and posts the result in the UI thread using
+     * the given callback.
+     *
+     * @param maxWidth the max. width of final bitmap
+     * @param maxHeight the max. height of final bitmap
+     * @param callback a callback to be notified of image decoding
+     */
+    public void toBitmap(int maxWidth, int maxHeight, @NonNull BitmapCallback callback) {
+        if (format == PictureFormat.JPEG) {
+            CameraUtils.decodeBitmap(getData(), maxWidth, maxHeight, new BitmapFactory.Options(),
+                    rotation, callback);
+        } else if (format == PictureFormat.DNG && Build.VERSION.SDK_INT >= 24) {
+            // Apparently: BitmapFactory added DNG support in API 24.
+            // https://github.com/aosp-mirror/platform_frameworks_base/blob/nougat-mr1-release/core/jni/android/graphics/BitmapFactory.cpp
+            CameraUtils.decodeBitmap(getData(), maxWidth, maxHeight, new BitmapFactory.Options(),
+                    rotation, callback);
+        } else {
+            throw new UnsupportedOperationException("PictureResult.toBitmap() does not support "
+                    + "this picture format: " + format);
+        }
+    }
+
+    /**
+     * Shorthand for {@link CameraUtils#decodeBitmap(byte[], BitmapCallback)}.
+     * Decodes this picture on a background thread and posts the result in the UI thread using
+     * the given callback.
+     *
+     * @param callback a callback to be notified of image decoding
+     */
+    public void toBitmap(@NonNull BitmapCallback callback) {
+        toBitmap(-1, -1, callback);
+    }
+
+    /**
+     * Shorthand for {@link CameraUtils#writeToFile(byte[], File, FileCallback)}.
+     * This writes this picture to file on a background thread and posts the result in the UI
+     * thread using the given callback.
+     *
+     * @param file the file to write into
+     * @param callback a callback
+     */
+    public void toFile(@NonNull File file, @NonNull FileCallback callback) {
+        CameraUtils.writeToFile(getData(), file, callback);
+    }
+}

+ 256 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/VideoResult.java

@@ -0,0 +1,256 @@
+package com.otaliastudios.cameraview;
+
+import android.location.Location;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import com.otaliastudios.cameraview.controls.Audio;
+import com.otaliastudios.cameraview.controls.AudioCodec;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.VideoCodec;
+import com.otaliastudios.cameraview.size.Size;
+
+import java.io.File;
+import java.io.FileDescriptor;
+
+/**
+ * Wraps the result of a video recording started by {@link CameraView#takeVideo(File)}.
+ */
+@SuppressWarnings("WeakerAccess")
+public class VideoResult {
+
+    /**
+     * A result stub, for internal use only.
+     */
+    public static class Stub {
+
+        Stub() {}
+
+        public boolean isSnapshot;
+        public Location location;
+        public int rotation;
+        public Size size;
+        public File file;
+        public FileDescriptor fileDescriptor;
+        public Facing facing;
+        public VideoCodec videoCodec;
+        public AudioCodec audioCodec;
+        public Audio audio;
+        public long maxSize;
+        public int maxDuration;
+        public int endReason;
+        public int videoBitRate;
+        public int videoFrameRate;
+        public int audioBitRate;
+    }
+
+    @SuppressWarnings({"WeakerAccess", "unused"})
+    public static final int REASON_USER = 0;
+
+    @SuppressWarnings("WeakerAccess")
+    public static final int REASON_MAX_SIZE_REACHED = 1;
+
+    @SuppressWarnings("WeakerAccess")
+    public static final int REASON_MAX_DURATION_REACHED = 2;
+
+    private final boolean isSnapshot;
+    private final Location location;
+    private final int rotation;
+    private final Size size;
+    private final File file;
+    private final FileDescriptor fileDescriptor;
+    private final Facing facing;
+    private final VideoCodec videoCodec;
+    private final AudioCodec audioCodec;
+    private final Audio audio;
+    private final long maxSize;
+    private final int maxDuration;
+    private final int endReason;
+    private final int videoBitRate;
+    private final int videoFrameRate;
+    private final int audioBitRate;
+
+    VideoResult(@NonNull Stub builder) {
+        isSnapshot = builder.isSnapshot;
+        location = builder.location;
+        rotation = builder.rotation;
+        size = builder.size;
+        file = builder.file;
+        fileDescriptor = builder.fileDescriptor;
+        facing = builder.facing;
+        videoCodec = builder.videoCodec;
+        audioCodec = builder.audioCodec;
+        audio = builder.audio;
+        maxSize = builder.maxSize;
+        maxDuration = builder.maxDuration;
+        endReason = builder.endReason;
+        videoBitRate = builder.videoBitRate;
+        videoFrameRate = builder.videoFrameRate;
+        audioBitRate = builder.audioBitRate;
+    }
+
+    /**
+     * Returns whether this result comes from a snapshot.
+     *
+     * @return whether this is a snapshot
+     */
+    public boolean isSnapshot() {
+        return isSnapshot;
+    }
+
+    /**
+     * Returns geographic information for this video, if any.
+     * If it was set, it is also present in the file metadata.
+     *
+     * @return a nullable Location
+     */
+    @Nullable
+    public Location getLocation() {
+        return location;
+    }
+
+    /**
+     * Returns the clock-wise rotation that should be applied to the
+     * video frames before displaying. If it is non-zero, it is also present
+     * in the video metadata, so most reader will take care of it.
+     *
+     * @return the clock-wise rotation
+     */
+    public int getRotation() {
+        return rotation;
+    }
+
+    /**
+     * Returns the size of the frames after the rotation is applied.
+     *
+     * @return the Size of this video
+     */
+    @NonNull
+    public Size getSize() {
+        return size;
+    }
+
+    /**
+     * Returns the file where the video was saved.
+     *
+     * @return the File of this video
+     */
+    @NonNull
+    public File getFile() {
+        if (file == null) {
+            throw new RuntimeException("File is only available when takeVideo(File) is used.");
+        }
+        return file;
+    }
+
+    /**
+     * Returns the file descriptor where the video was saved.
+     *
+     * @return the File Descriptor of this video
+     */
+    @NonNull
+    public FileDescriptor getFileDescriptor() {
+        if (fileDescriptor == null) {
+            throw new RuntimeException("FileDescriptor is only available when takeVideo(FileDescriptor) is used.");
+        }
+        return fileDescriptor;
+    }
+
+    /**
+     * Returns the facing value with which this video was recorded.
+     *
+     * @return the Facing of this video
+     */
+    @NonNull
+    public Facing getFacing() {
+        return facing;
+    }
+
+    /**
+     * Returns the codec that was used to encode the video frames.
+     *
+     * @return the video codec
+     */
+    @NonNull
+    public VideoCodec getVideoCodec() {
+        return videoCodec;
+    }
+
+    /**
+     * Returns the codec that was used to encode the audio frames.
+     *
+     * @return the audio codec
+     */
+    @NonNull
+    public AudioCodec getAudioCodec() {
+        return audioCodec;
+    }
+
+    /**
+     * Returns the max file size in bytes that was set before recording,
+     * or 0 if no constraint was set.
+     *
+     * @return the max file size in bytes
+     */
+    public long getMaxSize() {
+        return maxSize;
+    }
+
+    /**
+     * Returns the max video duration in milliseconds that was set before recording,
+     * or 0 if no constraint was set.
+     *
+     * @return the max duration in milliseconds
+     */
+    public int getMaxDuration() {
+        return maxDuration;
+    }
+
+    /**
+     * Returns the {@link Audio} setting for this video.
+     *
+     * @return the audio setting for this video
+     */
+    @NonNull
+    public Audio getAudio() {
+        return audio;
+    }
+
+    /**
+     * Returns the reason why the recording was stopped.
+     * @return one of {@link #REASON_USER}, {@link #REASON_MAX_DURATION_REACHED}
+     *         or {@link #REASON_MAX_SIZE_REACHED}.
+     */
+    public int getTerminationReason() {
+        return endReason;
+    }
+
+    /**
+     * Returns the bit rate used for video encoding.
+     *
+     * @return the video bit rate
+     */
+    public int getVideoBitRate() {
+        return videoBitRate;
+    }
+
+    /**
+     * Returns the frame rate used for video encoding
+     * in frames per second.
+     *
+     * @return the video frame rate
+     */
+    public int getVideoFrameRate() {
+        return videoFrameRate;
+    }
+
+    /**
+     * Returns the bit rate used for audio encoding.
+     *
+     * @return the audio bit rate
+     */
+    public int getAudioBitRate() {
+        return audioBitRate;
+    }
+}

+ 59 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Audio.java

@@ -0,0 +1,59 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Audio values indicate whether to record audio stream when record video.
+ *
+ * @see CameraView#setAudio(Audio)
+ */
+public enum Audio implements Control {
+
+    /**
+     * No audio.
+     */
+    OFF(0),
+
+    /**
+     * Audio on. The number of channels depends on the video configuration,
+     * on the device capabilities and on the video type (e.g. we default to
+     * mono for snapshots).
+     */
+    ON(1),
+
+    /**
+     * Force mono channel audio.
+     */
+    MONO(2),
+
+    /**
+     * Force stereo audio.
+     */
+    STEREO(3);
+
+    final static Audio DEFAULT = ON;
+
+    private int value;
+
+    Audio(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static Audio fromValue(int value) {
+        Audio[] list = Audio.values();
+        for (Audio action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 63 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/AudioCodec.java

@@ -0,0 +1,63 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import android.os.Build;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Constants for selecting the encoder of audio recordings.
+ * https://developer.android.com/guide/topics/media/media-formats.html#audio-formats
+ *
+ * @see CameraView#setAudioCodec(AudioCodec)
+ */
+public enum AudioCodec implements Control {
+
+    /**
+     * Let the device choose its codec.
+     */
+    DEVICE_DEFAULT(0),
+
+    /**
+     * The AAC codec.
+     */
+    AAC(1),
+
+    /**
+     * The HE_AAC codec.
+     */
+    @RequiresApi(Build.VERSION_CODES.JELLY_BEAN)
+    HE_AAC(2),
+
+    /**
+     * The AAC_ELD codec.
+     */
+    @RequiresApi(Build.VERSION_CODES.JELLY_BEAN)
+    AAC_ELD(3);
+
+    static final AudioCodec DEFAULT = DEVICE_DEFAULT;
+
+    private int value;
+
+    AudioCodec(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static AudioCodec fromValue(int value) {
+        AudioCodec[] list = AudioCodec.values();
+        for (AudioCodec action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 8 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Control.java

@@ -0,0 +1,8 @@
+package com.otaliastudios.cameraview.controls;
+
+/**
+ * Base interface for controls like {@link Audio},
+ * {@link Facing}, {@link Flash} and so on.
+ */
+public interface Control {
+}

+ 108 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/ControlParser.java

@@ -0,0 +1,108 @@
+package com.otaliastudios.cameraview.controls;
+
+import android.content.Context;
+import android.content.res.TypedArray;
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.R;
+
+/**
+ * Parses controls from XML attributes.
+ */
+public class ControlParser {
+
+    private int preview;
+    private int facing;
+    private int flash;
+    private int grid;
+    private int whiteBalance;
+    private int mode;
+    private int hdr;
+    private int audio;
+    private int videoCodec;
+    private int audioCodec;
+    private int engine;
+    private int pictureFormat;
+
+    public ControlParser(@NonNull Context context, @NonNull TypedArray array) {
+        preview = array.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value());
+        facing = array.getInteger(R.styleable.CameraView_cameraFacing,
+                Facing.DEFAULT(context).value());
+        flash = array.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value());
+        grid = array.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value());
+        whiteBalance = array.getInteger(R.styleable.CameraView_cameraWhiteBalance,
+                WhiteBalance.DEFAULT.value());
+        mode = array.getInteger(R.styleable.CameraView_cameraMode, Mode.DEFAULT.value());
+        hdr = array.getInteger(R.styleable.CameraView_cameraHdr, Hdr.DEFAULT.value());
+        audio = array.getInteger(R.styleable.CameraView_cameraAudio, Audio.DEFAULT.value());
+        videoCodec = array.getInteger(R.styleable.CameraView_cameraVideoCodec,
+                VideoCodec.DEFAULT.value());
+        audioCodec = array.getInteger(R.styleable.CameraView_cameraAudioCodec,
+                AudioCodec.DEFAULT.value());
+        engine = array.getInteger(R.styleable.CameraView_cameraEngine, Engine.DEFAULT.value());
+        pictureFormat = array.getInteger(R.styleable.CameraView_cameraPictureFormat,
+                PictureFormat.DEFAULT.value());
+    }
+
+    @NonNull
+    public Preview getPreview() {
+        return Preview.fromValue(preview);
+    }
+
+    @NonNull
+    public Facing getFacing() {
+        //noinspection ConstantConditions
+        return Facing.fromValue(facing);
+    }
+
+    @NonNull
+    public Flash getFlash() {
+        return Flash.fromValue(flash);
+    }
+
+    @NonNull
+    public Grid getGrid() {
+        return Grid.fromValue(grid);
+    }
+
+    @NonNull
+    public Mode getMode() {
+        return Mode.fromValue(mode);
+    }
+
+    @NonNull
+    public WhiteBalance getWhiteBalance() {
+        return WhiteBalance.fromValue(whiteBalance);
+    }
+
+    @NonNull
+    public Hdr getHdr() {
+        return Hdr.fromValue(hdr);
+    }
+
+    @NonNull
+    public Audio getAudio() {
+        return Audio.fromValue(audio);
+    }
+
+    @NonNull
+    public AudioCodec getAudioCodec() {
+        return AudioCodec.fromValue(audioCodec);
+    }
+
+    @NonNull
+    public VideoCodec getVideoCodec() {
+        return VideoCodec.fromValue(videoCodec);
+    }
+
+    @NonNull
+    public Engine getEngine() {
+        return Engine.fromValue(engine);
+    }
+
+    @NonNull
+    public PictureFormat getPictureFormat() {
+        return PictureFormat.fromValue(pictureFormat);
+    }
+}

+ 48 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Engine.java

@@ -0,0 +1,48 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * The engine to be used.
+ *
+ * @see CameraView#setEngine(Engine)
+ */
+public enum Engine implements Control {
+
+    /**
+     * Camera1 based engine.
+     */
+    CAMERA1(0),
+
+    /**
+     * Camera2 based engine. For API versions older than 21,
+     * the system falls back to {@link #CAMERA1}.
+     */
+    CAMERA2(1);
+
+    final static Engine DEFAULT = CAMERA1;
+
+    private int value;
+
+    Engine(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static Engine fromValue(int value) {
+        Engine[] list = Engine.values();
+        for (Engine action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 64 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Facing.java

@@ -0,0 +1,64 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import android.content.Context;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import com.otaliastudios.cameraview.CameraUtils;
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Facing value indicates which camera sensor should be used for the current session.
+ *
+ * @see CameraView#setFacing(Facing)
+ */
+public enum Facing implements Control {
+
+    /**
+     * Back-facing camera sensor.
+     */
+    BACK(0),
+
+    /**
+     * Front-facing camera sensor.
+     */
+    FRONT(1);
+
+    @NonNull
+    static Facing DEFAULT(@Nullable Context context) {
+        if (context == null) {
+            return BACK;
+        } else if (CameraUtils.hasCameraFacing(context, BACK)) {
+            return BACK;
+        } else if (CameraUtils.hasCameraFacing(context, FRONT)) {
+            return FRONT;
+        } else {
+            // The controller will throw a CameraException.
+            // This device has no cameras.
+            return BACK;
+        }
+    }
+
+    private int value;
+
+    Facing(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @Nullable
+    static Facing fromValue(int value) {
+        Facing[] list = Facing.values();
+        for (Facing action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return null;
+    }
+}

+ 69 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Flash.java

@@ -0,0 +1,69 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Flash value indicates the flash mode to be used.
+ *
+ * @see CameraView#setFlash(Flash)
+ */
+public enum Flash implements Control {
+
+    /**
+     * Flash is always off.
+     */
+    OFF(0),
+
+    /**
+     * Flash will be on when capturing.
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedFlash()
+     */
+    ON(1),
+
+
+    /**
+     * Flash mode is chosen by the camera.
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedFlash()
+     */
+    AUTO(2),
+
+
+    /**
+     * Flash is always on, working as a torch.
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedFlash()
+     */
+    TORCH(3);
+
+    static final Flash DEFAULT = OFF;
+
+    private int value;
+
+    Flash(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static Flash fromValue(int value) {
+        Flash[] list = Flash.values();
+        for (Flash action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 59 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Grid.java

@@ -0,0 +1,59 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Grid values can be used to draw grid lines over the camera preview.
+ *
+ * @see CameraView#setGrid(Grid)
+ */
+public enum Grid implements Control {
+
+
+    /**
+     * No grid is drawn.
+     */
+    OFF(0),
+
+    /**
+     * Draws a regular, 3x3 grid.
+     */
+    DRAW_3X3(1),
+
+    /**
+     * Draws a regular, 4x4 grid.
+     */
+    DRAW_4X4(2),
+
+    /**
+     * Draws a grid respecting the 'phi' constant proportions,
+     * often referred as to the golden ratio.
+     */
+    DRAW_PHI(3);
+
+    static final Grid DEFAULT = OFF;
+
+    private int value;
+
+    Grid(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static Grid fromValue(int value) {
+        Grid[] list = Grid.values();
+        for (Grid action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 47 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Hdr.java

@@ -0,0 +1,47 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Hdr values indicate whether to use high dynamic range techniques when capturing pictures.
+ *
+ * @see CameraView#setHdr(Hdr)
+ */
+public enum Hdr implements Control {
+
+    /**
+     * No HDR.
+     */
+    OFF(0),
+
+    /**
+     * Using HDR.
+     */
+    ON(1);
+
+    final static Hdr DEFAULT = OFF;
+
+    private int value;
+
+    Hdr(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static Hdr fromValue(int value) {
+        Hdr[] list = Hdr.values();
+        for (Hdr action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 59 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Mode.java

@@ -0,0 +1,59 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraView;
+
+import java.io.File;
+
+/**
+ * Type of the session to be opened or to move to.
+ * Session modes have influence over the capture and preview size, ability to shoot pictures,
+ * focus modes, runtime permissions needed.
+ *
+ * @see CameraView#setMode(Mode)
+ */
+public enum Mode implements Control {
+
+    /**
+     * Session used to capture pictures.
+     *
+     * - {@link CameraView#takeVideo(File)} will throw an exception
+     * - Only the camera permission is requested
+     * - Capture size is chosen according to the current picture size selector
+     */
+    PICTURE(0),
+
+    /**
+     * Session used to capture videos.
+     *
+     * - {@link CameraView#takePicture()} will throw an exception
+     * - Camera and audio record permissions are requested
+     * - Capture size is chosen according to the current video size selector
+     */
+    VIDEO(1);
+
+    static final Mode DEFAULT = PICTURE;
+
+    private int value;
+
+    Mode(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static Mode fromValue(int value) {
+        Mode[] list = Mode.values();
+        for (Mode action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 52 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/PictureFormat.java

@@ -0,0 +1,52 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Format of the picture results for pictures that are taken with {@link CameraView#takePicture()}.
+ * This does not apply to picture snapshots.
+ *
+ * @see CameraView#setPictureFormat(PictureFormat)
+ */
+public enum PictureFormat implements Control {
+
+    /**
+     * The picture result data will be a JPEG file.
+     * This value is always supported.
+     */
+    JPEG(0),
+
+    /**
+     * The picture result data will be a DNG file.
+     * This is only supported with the {@link Engine#CAMERA2} engine and only on
+     * specific devices. Please check {@link CameraOptions#getSupportedPictureFormats()}.
+     */
+    DNG(1);
+
+    static final PictureFormat DEFAULT = JPEG;
+
+    private int value;
+
+    PictureFormat(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static PictureFormat fromValue(int value) {
+        PictureFormat[] list = PictureFormat.values();
+        for (PictureFormat action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 58 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/Preview.java

@@ -0,0 +1,58 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * The preview engine to be used.
+ *
+ * @see CameraView#setPreview(Preview)
+ */
+public enum Preview implements Control {
+
+    /**
+     * Preview engine based on {@link android.view.SurfaceView}.
+     * Not recommended.
+     */
+    SURFACE(0),
+
+    /**
+     * Preview engine based on {@link android.view.TextureView}.
+     * Stable, but does not support all features (like video snapshots,
+     * or picture snapshot while taking videos).
+     */
+    TEXTURE(1),
+
+    /**
+     * Preview engine based on {@link android.opengl.GLSurfaceView}.
+     * This is the best engine available. Supports video snapshots,
+     * supports picture snapshots while taking videos, supports
+     * watermarks and overlays, supports real-time filters.
+     */
+    GL_SURFACE(2);
+
+    final static Preview DEFAULT = GL_SURFACE;
+
+    private int value;
+
+    Preview(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static Preview fromValue(int value) {
+        Preview[] list = Preview.values();
+        for (Preview action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 54 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/VideoCodec.java

@@ -0,0 +1,54 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * Constants for selecting the encoder of video recordings.
+ * https://developer.android.com/guide/topics/media/media-formats.html#video-formats
+ *
+ * @see CameraView#setVideoCodec(VideoCodec)
+ */
+public enum VideoCodec implements Control {
+
+
+    /**
+     * Let the device choose its codec.
+     */
+    DEVICE_DEFAULT(0),
+
+    /**
+     * The H.263 codec.
+     */
+    H_263(1),
+
+    /**
+     * The H.264 codec.
+     */
+    H_264(2);
+
+    static final VideoCodec DEFAULT = DEVICE_DEFAULT;
+
+    private int value;
+
+    VideoCodec(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static VideoCodec fromValue(int value) {
+        VideoCodec[] list = VideoCodec.values();
+        for (VideoCodec action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 78 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/controls/WhiteBalance.java

@@ -0,0 +1,78 @@
+package com.otaliastudios.cameraview.controls;
+
+
+import androidx.annotation.NonNull;
+
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.CameraView;
+
+/**
+ * White balance values control the white balance settings.
+ *
+ * @see CameraView#setWhiteBalance(WhiteBalance)
+ */
+public enum WhiteBalance implements Control {
+
+    /**
+     * Automatic white balance selection (AWB).
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedWhiteBalance()
+     */
+    AUTO(0),
+
+    /**
+     * White balance appropriate for incandescent light.
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedWhiteBalance()
+     */
+    INCANDESCENT(1),
+
+    /**
+     * White balance appropriate for fluorescent light.
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedWhiteBalance()
+     */
+    FLUORESCENT(2),
+
+    /**
+     * White balance appropriate for daylight captures.
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedWhiteBalance()
+     */
+    DAYLIGHT(3),
+
+    /**
+     * White balance appropriate for pictures in cloudy conditions.
+     * This is not guaranteed to be supported.
+     *
+     * @see CameraOptions#getSupportedWhiteBalance()
+     */
+    CLOUDY(4);
+
+    static final WhiteBalance DEFAULT = AUTO;
+
+    private int value;
+
+    WhiteBalance(int value) {
+        this.value = value;
+    }
+
+    int value() {
+        return value;
+    }
+
+    @NonNull
+    static WhiteBalance fromValue(int value) {
+        WhiteBalance[] list = WhiteBalance.values();
+        for (WhiteBalance action : list) {
+            if (action.value() == value) {
+                return action;
+            }
+        }
+        return DEFAULT;
+    }
+}

+ 940 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java

@@ -0,0 +1,940 @@
+package com.otaliastudios.cameraview.engine;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.graphics.ImageFormat;
+import android.graphics.PointF;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.location.Location;
+import android.os.Build;
+import android.view.SurfaceHolder;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.otaliastudios.cameraview.CameraException;
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.PictureResult;
+import com.otaliastudios.cameraview.VideoResult;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.Mode;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
+import com.otaliastudios.cameraview.engine.metering.Camera1MeteringTransform;
+import com.otaliastudios.cameraview.engine.offset.Axis;
+import com.otaliastudios.cameraview.engine.offset.Reference;
+import com.otaliastudios.cameraview.engine.options.Camera1Options;
+import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
+import com.otaliastudios.cameraview.frame.ByteBufferFrameManager;
+import com.otaliastudios.cameraview.frame.Frame;
+import com.otaliastudios.cameraview.frame.FrameManager;
+import com.otaliastudios.cameraview.gesture.Gesture;
+import com.otaliastudios.cameraview.internal.CropHelper;
+import com.otaliastudios.cameraview.metering.MeteringRegions;
+import com.otaliastudios.cameraview.metering.MeteringTransform;
+import com.otaliastudios.cameraview.picture.Full1PictureRecorder;
+import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder;
+import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
+import com.otaliastudios.cameraview.preview.RendererCameraPreview;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+import com.otaliastudios.cameraview.video.Full1VideoRecorder;
+import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+
+public class Camera1Engine extends CameraBaseEngine implements
+        Camera.PreviewCallback,
+        Camera.ErrorCallback,
+        ByteBufferFrameManager.BufferCallback {
+    private static final String JOB_FOCUS_RESET = "focus reset";
+    private static final String JOB_FOCUS_END = "focus end";
+
+    private static final int PREVIEW_FORMAT = ImageFormat.NV21;
+    @VisibleForTesting static final int AUTOFOCUS_END_DELAY_MILLIS = 2500;
+
+    private final Camera1Mapper mMapper = Camera1Mapper.get();
+    private Camera mCamera;
+    @VisibleForTesting int mCameraId;
+
+    public Camera1Engine(@NonNull Callback callback) {
+        super(callback);
+    }
+
+    //region Utilities
+
+    @Override
+    public void onError(int error, Camera camera) {
+        String message = LOG.e("Internal Camera1 error.", error);
+        Exception runtime = new RuntimeException(message);
+        int reason;
+        switch (error) {
+            case Camera.CAMERA_ERROR_SERVER_DIED:
+            case Camera.CAMERA_ERROR_EVICTED:
+                reason = CameraException.REASON_DISCONNECTED; break;
+            case Camera.CAMERA_ERROR_UNKNOWN: // Pass DISCONNECTED which is considered unrecoverable
+                reason = CameraException.REASON_DISCONNECTED; break;
+            default: reason = CameraException.REASON_UNKNOWN;
+        }
+        throw new CameraException(runtime, reason);
+    }
+
+    //endregion
+
+    //region Protected APIs
+
+    @EngineThread
+    @NonNull
+    @Override
+    protected List<Size> getPreviewStreamAvailableSizes() {
+        List<Camera.Size> sizes;
+        try {
+            sizes = mCamera.getParameters().getSupportedPreviewSizes();
+        } catch (Exception e) {
+            LOG.e("getPreviewStreamAvailableSizes:", "Failed to compute preview size. Camera params is empty");
+            throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
+        }
+        List<Size> result = new ArrayList<>(sizes.size());
+        for (Camera.Size size : sizes) {
+            Size add = new Size(size.width, size.height);
+            if (!result.contains(add)) result.add(add);
+        }
+        LOG.i("getPreviewStreamAvailableSizes:", result);
+        return result;
+    }
+
+    @EngineThread
+    @NonNull
+    @Override
+    protected List<Size> getFrameProcessingAvailableSizes() {
+        // We don't choose the frame processing size.
+        // It comes from the preview stream.
+        return Collections.singletonList(mPreviewStreamSize);
+    }
+
+    @EngineThread
+    @Override
+    protected void onPreviewStreamSizeChanged() {
+        restartPreview();
+    }
+
+    @EngineThread
+    @Override
+    protected boolean collectCameraInfo(@NonNull Facing facing) {
+        int internalFacing = mMapper.mapFacing(facing);
+        LOG.i("collectCameraInfo",
+                "Facing:", facing,
+                "Internal:", internalFacing,
+                "Cameras:", Camera.getNumberOfCameras());
+        Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
+        for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
+            Camera.getCameraInfo(i, cameraInfo);
+            if (cameraInfo.facing == internalFacing) {
+                getAngles().setSensorOffset(facing, cameraInfo.orientation);
+                mCameraId = i;
+                return true;
+            }
+        }
+        return false;
+    }
+
+    //endregion
+
+    //region Start
+
+    @NonNull
+    @EngineThread
+    @Override
+    protected Task<CameraOptions> onStartEngine() {
+        try {
+            mCamera = Camera.open(mCameraId);
+        } catch (Exception e) {
+            LOG.e("onStartEngine:", "Failed to connect. Maybe in use by another app?");
+            throw new CameraException(e, CameraException.REASON_FAILED_TO_CONNECT);
+        }
+        if (mCamera == null) {
+            LOG.e("onStartEngine:", "Failed to connect. Camera is null, maybe in use by another app or already released?");
+            throw new CameraException(CameraException.REASON_FAILED_TO_CONNECT);
+        }
+        mCamera.setErrorCallback(this);
+
+        // Set parameters that might have been set before the camera was opened.
+        LOG.i("onStartEngine:", "Applying default parameters.");
+        try {
+            Camera.Parameters params = mCamera.getParameters();
+            mCameraOptions = new Camera1Options(params, mCameraId,
+                    getAngles().flip(Reference.SENSOR, Reference.VIEW));
+            applyAllParameters(params);
+            mCamera.setParameters(params);
+        } catch (Exception e) {
+            LOG.e("onStartEngine:", "Failed to connect. Problem with camera params");
+            throw new CameraException(e, CameraException.REASON_FAILED_TO_CONNECT);
+        }
+        try {
+            mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW,
+                    Axis.ABSOLUTE)); // <- not allowed during preview
+        } catch (Exception e) {
+            LOG.e("onStartEngine:", "Failed to connect. Can't set display orientation, maybe preview already exists?");
+            throw new CameraException(CameraException.REASON_FAILED_TO_CONNECT);
+        }
+        LOG.i("onStartEngine:", "Ended");
+        return Tasks.forResult(mCameraOptions);
+    }
+
+    @EngineThread
+    @NonNull
+    @Override
+    protected Task<Void> onStartBind() {
+        LOG.i("onStartBind:", "Started");
+        try {
+            if (mPreview.getOutputClass() == SurfaceHolder.class) {
+                mCamera.setPreviewDisplay((SurfaceHolder) mPreview.getOutput());
+            } else if (mPreview.getOutputClass() == SurfaceTexture.class) {
+                mCamera.setPreviewTexture((SurfaceTexture) mPreview.getOutput());
+            } else {
+                throw new RuntimeException("Unknown CameraPreview output class.");
+            }
+        } catch (IOException e) {
+            LOG.e("onStartBind:", "Failed to bind.", e);
+            throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
+        }
+
+        mCaptureSize = computeCaptureSize();
+        mPreviewStreamSize = computePreviewStreamSize();
+        return Tasks.forResult(null);
+    }
+
+    @EngineThread
+    @NonNull
+    @Override
+    protected Task<Void> onStartPreview() {
+        LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged.");
+        getCallback().onCameraPreviewStreamSizeChanged();
+
+        Size previewSize = getPreviewStreamSize(Reference.VIEW);
+        if (previewSize == null) {
+            throw new IllegalStateException("previewStreamSize should not be null at this point.");
+        }
+        mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
+        mPreview.setDrawRotation(0);
+
+        Camera.Parameters params;
+        try {
+            params = mCamera.getParameters();
+        } catch (Exception e) {
+            LOG.e("onStartPreview:", "Failed to get params from camera. Maybe low level problem with camera or camera has already released?");
+            throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
+        }
+        // NV21 should be the default, but let's make sure, since YuvImage will only support this
+        // and a few others
+        params.setPreviewFormat(ImageFormat.NV21);
+        // setPreviewSize is not allowed during preview
+        params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight());
+        if (getMode() == Mode.PICTURE) {
+            // setPictureSize is allowed during preview
+            params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight());
+        } else {
+            // mCaptureSize in this case is a video size. The available video sizes are not
+            // necessarily a subset of the picture sizes, so we can't use the mCaptureSize value:
+            // it might crash. However, the setPictureSize() passed here is useless : we don't allow
+            // HQ pictures in video mode.
+            // While this might be lifted in the future, for now, just use a picture capture size.
+            Size pictureSize = computeCaptureSize(Mode.PICTURE);
+            params.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
+        }
+        try {
+            mCamera.setParameters(params);
+        } catch (Exception e) {
+            LOG.e("onStartPreview:", "Failed to set params for camera. Maybe incorrect parameter put in params?");
+            throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
+        }
+
+        mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
+        mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
+        getFrameManager().setUp(PREVIEW_FORMAT, mPreviewStreamSize, getAngles());
+
+        LOG.i("onStartPreview", "Starting preview with startPreview().");
+        try {
+            mCamera.startPreview();
+        } catch (Exception e) {
+            LOG.e("onStartPreview", "Failed to start preview.", e);
+            throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
+        }
+        LOG.i("onStartPreview", "Started preview.");
+        return Tasks.forResult(null);
+    }
+
+    //endregion
+
+    //region Stop
+
+    @EngineThread
+    @NonNull
+    @Override
+    protected Task<Void> onStopPreview() {
+        LOG.i("onStopPreview:", "Started.");
+        if (mVideoRecorder != null) {
+            mVideoRecorder.stop(true);
+            mVideoRecorder = null;
+        }
+        mPictureRecorder = null;
+        getFrameManager().release();
+        LOG.i("onStopPreview:", "Releasing preview buffers.");
+        mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
+        try {
+            LOG.i("onStopPreview:", "Stopping preview.");
+            mCamera.stopPreview();
+            LOG.i("onStopPreview:", "Stopped preview.");
+        } catch (Exception e) {
+            LOG.e("stopPreview", "Could not stop preview", e);
+        }
+        return Tasks.forResult(null);
+    }
+
+    @EngineThread
+    @NonNull
+    @Override
+    protected Task<Void> onStopBind() {
+        mPreviewStreamSize = null;
+        mCaptureSize = null;
+        try {
+            if (mPreview.getOutputClass() == SurfaceHolder.class) {
+                mCamera.setPreviewDisplay(null);
+            } else if (mPreview.getOutputClass() == SurfaceTexture.class) {
+                mCamera.setPreviewTexture(null);
+            } else {
+                throw new RuntimeException("Unknown CameraPreview output class.");
+            }
+        } catch (IOException e) {
+            // NOTE: when this happens, the next onStopEngine() call hangs on camera.release(),
+            // Not sure for how long. This causes the destroy() flow to fail the timeout.
+            LOG.e("onStopBind", "Could not release surface", e);
+        }
+        return Tasks.forResult(null);
+    }
+
+    @EngineThread
+    @NonNull
+    @Override
+    protected Task<Void> onStopEngine() {
+        LOG.i("onStopEngine:", "About to clean up.");
+        getOrchestrator().remove(JOB_FOCUS_RESET);
+        getOrchestrator().remove(JOB_FOCUS_END);
+        if (mCamera != null) {
+            try {
+                LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
+                // Just like Camera2Engine, this call can hang (at least on emulators) and if
+                // we don't find a way around the lock, it leaves the camera in a bad state.
+                // This is anticipated by the exception in onStopBind() (see above).
+                //
+                // 12:29:32.163 E Camera3-Device: Camera 0: clearStreamingRequest: Device has encountered a serious error
+                // 12:29:32.163 E Camera2-StreamingProcessor: stopStream: Camera 0: Can't clear stream request: Function not implemented (-38)
+                // 12:29:32.163 E Camera2Client: stopPreviewL: Camera 0: Can't stop streaming: Function not implemented (-38)
+                // 12:29:32.273 E Camera2-StreamingProcessor: deletePreviewStream: Unable to delete old preview stream: Device or resource busy (-16)
+                // 12:29:32.274 E Camera2-CallbackProcessor: deleteStream: Unable to delete callback stream: Device or resource busy (-16)
+                // 12:29:32.274 E Camera3-Device: Camera 0: disconnect: Shutting down in an error state
+                //
+                // I believe there is a thread deadlock due to this call internally waiting to
+                // dispatch some callback to us (pending captures, ...), but the callback thread
+                // is blocked here. We try to workaround this in CameraEngine.destroy().
+                mCamera.release();
+                LOG.i("onStopEngine:", "Clean up.", "Released camera.");
+            } catch (Exception e) {
+                LOG.w("onStopEngine:", "Clean up.", "Exception while releasing camera.", e);
+            }
+            mCamera = null;
+            mCameraOptions = null;
+        }
+        mVideoRecorder = null;
+        mCameraOptions = null;
+        mCamera = null;
+        LOG.w("onStopEngine:", "Clean up.", "Returning.");
+        return Tasks.forResult(null);
+    }
+
+    //endregion
+
+    //region Pictures
+
+    @EngineThread
+    @Override
+    protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {
+        LOG.i("onTakePicture:", "executing.");
+        stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
+                Axis.RELATIVE_TO_SENSOR);
+        stub.size = getPictureSize(Reference.OUTPUT);
+        mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
+        mPictureRecorder.take();
+        LOG.i("onTakePicture:", "executed.");
+    }
+
+    @EngineThread
+    @Override
+    protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
+                                         @NonNull AspectRatio outputRatio,
+                                         boolean doMetering) {
+        LOG.i("onTakePictureSnapshot:", "executing.");
+        // Not the real size: it will be cropped to match the view ratio
+        stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
+        if (mPreview instanceof RendererCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+            stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
+            mPictureRecorder = new SnapshotGlPictureRecorder(stub, this,
+                    (RendererCameraPreview) mPreview, outputRatio, getOverlay());
+        } else {
+            stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
+            mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
+        }
+        mPictureRecorder.take();
+        LOG.i("onTakePictureSnapshot:", "executed.");
+    }
+
+    //endregion
+
+    //region Videos
+
+    @EngineThread
+    @Override
+    protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
+        stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
+                Axis.RELATIVE_TO_SENSOR);
+        stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip()
+                : mCaptureSize;
+        // Unlock the camera and start recording.
+        try {
+            mCamera.unlock();
+        } catch (Exception e) {
+            // If this failed, we are unlikely able to record the video.
+            // Dispatch an error.
+            onVideoResult(null, e);
+            return;
+        }
+        mVideoRecorder = new Full1VideoRecorder(Camera1Engine.this, mCamera, mCameraId);
+        mVideoRecorder.start(stub);
+    }
+
+    @SuppressLint("NewApi")
+    @EngineThread
+    @Override
+    protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
+                                       @NonNull AspectRatio outputRatio) {
+        if (!(mPreview instanceof RendererCameraPreview)) {
+            throw new IllegalStateException("Video snapshots are only supported with GL_SURFACE.");
+        }
+        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
+            throw new IllegalStateException("Video snapshots are only supported on API 18+.");
+        }
+        RendererCameraPreview glPreview = (RendererCameraPreview) mPreview;
+        Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
+        if (outputSize == null) {
+            throw new IllegalStateException("outputSize should not be null.");
+        }
+        Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
+        outputSize = new Size(outputCrop.width(), outputCrop.height());
+        stub.size = outputSize;
+        // Vertical:               0   (270-0-0)
+        // Left (unlocked):        0   (270-90-270)
+        // Right (unlocked):       0   (270-270-90)
+        // Upside down (unlocked): 0   (270-180-180)
+        // Left (locked):          270 (270-0-270)
+        // Right (locked):         90  (270-0-90)
+        // Upside down (locked):   180 (270-0-180)
+        // The correct formula seems to be deviceOrientation+displayOffset,
+        // which means offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE).
+        stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
+        stub.videoFrameRate = Math.round(mPreviewFrameRate);
+        LOG.i("onTakeVideoSnapshot", "rotation:", stub.rotation, "size:", stub.size);
+
+        // Start.
+        mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview, getOverlay());
+        mVideoRecorder.start(stub);
+    }
+
+    @Override
+    public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
+        super.onVideoResult(result, exception);
+        if (result == null) {
+            // Something went wrong, lock the camera again.
+            mCamera.lock();
+        }
+    }
+
+    //endregion
+
+    //region Parameters
+
+    private void applyAllParameters(@NonNull Camera.Parameters params) {
+        params.setRecordingHint(getMode() == Mode.VIDEO);
+        applyDefaultFocus(params);
+        applyFlash(params, Flash.OFF);
+        applyLocation(params, null);
+        applyWhiteBalance(params, WhiteBalance.AUTO);
+        applyHdr(params, Hdr.OFF);
+        applyZoom(params, 0F);
+        applyExposureCorrection(params, 0F);
+        applyPlaySounds(mPlaySounds);
+        applyPreviewFrameRate(params, 0F);
+    }
+
+    private void applyDefaultFocus(@NonNull Camera.Parameters params) {
+        List<String> modes = params.getSupportedFocusModes();
+
+        if (getMode() == Mode.VIDEO &&
+                modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+            params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+            return;
+        }
+
+        if (modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
+            params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
+            return;
+        }
+
+        if (modes.contains(Camera.Parameters.FOCUS_MODE_INFINITY)) {
+            params.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
+            return;
+        }
+
+        if (modes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
+            params.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
+            //noinspection UnnecessaryReturnStatement
+            return;
+        }
+    }
+
+    @Override
+    public void setFlash(@NonNull Flash flash) {
+        final Flash old = mFlash;
+        mFlash = flash;
+        mFlashTask = getOrchestrator().scheduleStateful("flash (" + flash + ")",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                Camera.Parameters params = mCamera.getParameters();
+                if (applyFlash(params, old)) mCamera.setParameters(params);
+            }
+        });
+    }
+
+    private boolean applyFlash(@NonNull Camera.Parameters params, @NonNull Flash oldFlash) {
+        if (mCameraOptions.supports(mFlash)) {
+            params.setFlashMode(mMapper.mapFlash(mFlash));
+            return true;
+        }
+        mFlash = oldFlash;
+        return false;
+    }
+
+    @Override
+    public void setLocation(@Nullable Location location) {
+        final Location oldLocation = mLocation;
+        mLocation = location;
+        mLocationTask = getOrchestrator().scheduleStateful("location",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                Camera.Parameters params = mCamera.getParameters();
+                if (applyLocation(params, oldLocation)) mCamera.setParameters(params);
+            }
+        });
+    }
+
+    private boolean applyLocation(@NonNull Camera.Parameters params,
+                                  @SuppressWarnings("unused") @Nullable Location oldLocation) {
+        if (mLocation != null) {
+            params.setGpsLatitude(mLocation.getLatitude());
+            params.setGpsLongitude(mLocation.getLongitude());
+            params.setGpsAltitude(mLocation.getAltitude());
+            params.setGpsTimestamp(mLocation.getTime());
+            params.setGpsProcessingMethod(mLocation.getProvider());
+        }
+        return true;
+    }
+
+    @Override
+    public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
+        final WhiteBalance old = mWhiteBalance;
+        mWhiteBalance = whiteBalance;
+        mWhiteBalanceTask = getOrchestrator().scheduleStateful(
+                "white balance (" + whiteBalance + ")",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                Camera.Parameters params = mCamera.getParameters();
+                if (applyWhiteBalance(params, old)) mCamera.setParameters(params);
+            }
+        });
+    }
+
+    private boolean applyWhiteBalance(@NonNull Camera.Parameters params,
+                                      @NonNull WhiteBalance oldWhiteBalance) {
+        if (mCameraOptions.supports(mWhiteBalance)) {
+            // If this lock key is present, the engine can throw when applying the
+            // parameters, not sure why. Since we never lock it, this should be
+            // harmless for the rest of the engine.
+            params.setWhiteBalance(mMapper.mapWhiteBalance(mWhiteBalance));
+            params.remove("auto-whitebalance-lock");
+            return true;
+        }
+        mWhiteBalance = oldWhiteBalance;
+        return false;
+    }
+
+    @Override
+    public void setHdr(@NonNull Hdr hdr) {
+        final Hdr old = mHdr;
+        mHdr = hdr;
+        mHdrTask = getOrchestrator().scheduleStateful("hdr (" + hdr + ")",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                Camera.Parameters params = mCamera.getParameters();
+                if (applyHdr(params, old)) mCamera.setParameters(params);
+            }
+        });
+    }
+
+    private boolean applyHdr(@NonNull Camera.Parameters params, @NonNull Hdr oldHdr) {
+        if (mCameraOptions.supports(mHdr)) {
+            params.setSceneMode(mMapper.mapHdr(mHdr));
+            return true;
+        }
+        mHdr = oldHdr;
+        return false;
+    }
+
+    @Override
+    public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
+        final float old = mZoomValue;
+        mZoomValue = zoom;
+        // Zoom requests can be high frequency (e.g. linked to touch events), so
+        // we remove the task before scheduling to avoid stack overflows in orchestrator.
+        getOrchestrator().remove("zoom");
+        mZoomTask = getOrchestrator().scheduleStateful("zoom",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                Camera.Parameters params = mCamera.getParameters();
+                if (applyZoom(params, old)) {
+                    mCamera.setParameters(params);
+                    if (notify) {
+                        getCallback().dispatchOnZoomChanged(mZoomValue, points);
+                    }
+                }
+            }
+        });
+    }
+
+    private boolean applyZoom(@NonNull Camera.Parameters params, float oldZoom) {
+        if (mCameraOptions.isZoomSupported()) {
+            float max = params.getMaxZoom();
+            params.setZoom((int) (mZoomValue * max));
+            mCamera.setParameters(params);
+            return true;
+        }
+        mZoomValue = oldZoom;
+        return false;
+    }
+
+    @Override
+    public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds,
+                                      @Nullable final PointF[] points, final boolean notify) {
+        final float old = mExposureCorrectionValue;
+        mExposureCorrectionValue = EVvalue;
+        // EV requests can be high frequency (e.g. linked to touch events), so
+        // we remove the task before scheduling to avoid stack overflows in orchestrator.
+        getOrchestrator().remove("exposure correction");
+        mExposureCorrectionTask = getOrchestrator().scheduleStateful(
+                "exposure correction",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                Camera.Parameters params = mCamera.getParameters();
+                if (applyExposureCorrection(params, old)) {
+                    mCamera.setParameters(params);
+                    if (notify) {
+                        getCallback().dispatchOnExposureCorrectionChanged(mExposureCorrectionValue,
+                                bounds, points);
+                    }
+                }
+            }
+        });
+    }
+
+    private boolean applyExposureCorrection(@NonNull Camera.Parameters params,
+                                            float oldExposureCorrection) {
+        if (mCameraOptions.isExposureCorrectionSupported()) {
+            // Just make sure we're inside boundaries.
+            float max = mCameraOptions.getExposureCorrectionMaxValue();
+            float min = mCameraOptions.getExposureCorrectionMinValue();
+            float val = mExposureCorrectionValue;
+            val = val < min ? min : val > max ? max : val; // cap
+            mExposureCorrectionValue = val;
+            // Apply.
+            int indexValue = (int) (mExposureCorrectionValue
+                    / params.getExposureCompensationStep());
+            params.setExposureCompensation(indexValue);
+            return true;
+        }
+        mExposureCorrectionValue = oldExposureCorrection;
+        return false;
+    }
+
+    @Override
+    public void setPlaySounds(boolean playSounds) {
+        final boolean old = mPlaySounds;
+        mPlaySounds = playSounds;
+        mPlaySoundsTask = getOrchestrator().scheduleStateful(
+                "play sounds (" + playSounds + ")",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                applyPlaySounds(old);
+            }
+        });
+    }
+
+    @SuppressWarnings("UnusedReturnValue")
+    @TargetApi(17)
+    private boolean applyPlaySounds(boolean oldPlaySound) {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
+            Camera.CameraInfo info = new Camera.CameraInfo();
+            Camera.getCameraInfo(mCameraId, info);
+            if (info.canDisableShutterSound) {
+                try {
+                    // this method is documented to throw on some occasions. #377
+                    return mCamera.enableShutterSound(mPlaySounds);
+                } catch (RuntimeException exception) {
+                    return false;
+                }
+            }
+        }
+        if (mPlaySounds) {
+            return true;
+        }
+        mPlaySounds = oldPlaySound;
+        return false;
+    }
+
+    @Override
+    public void setPreviewFrameRate(float previewFrameRate) {
+        final float old = previewFrameRate;
+        mPreviewFrameRate = previewFrameRate;
+        mPreviewFrameRateTask = getOrchestrator().scheduleStateful(
+                "preview fps (" + previewFrameRate + ")",
+                CameraState.ENGINE,
+                new Runnable() {
+            @Override
+            public void run() {
+                Camera.Parameters params = mCamera.getParameters();
+                if (applyPreviewFrameRate(params, old)) mCamera.setParameters(params);
+            }
+        });
+    }
+
+    private boolean applyPreviewFrameRate(@NonNull Camera.Parameters params,
+                                          float oldPreviewFrameRate) {
+        List<int[]> fpsRanges = params.getSupportedPreviewFpsRange();
+        sortRanges(fpsRanges);
+        if (mPreviewFrameRate == 0F) {
+            // 0F is a special value. Fallback to a reasonable default.
+            for (int[] fpsRange : fpsRanges) {
+                float lower = (float) fpsRange[0] / 1000F;
+                float upper = (float) fpsRange[1] / 1000F;
+                if ((lower <= 30F && 30F <= upper) || (lower <= 24F && 24F <= upper)) {
+                    params.setPreviewFpsRange(fpsRange[0], fpsRange[1]);
+                    return true;
+                }
+            }
+        } else {
+            // If out of boundaries, adjust it.
+            mPreviewFrameRate = Math.min(mPreviewFrameRate,
+                    mCameraOptions.getPreviewFrameRateMaxValue());
+            mPreviewFrameRate = Math.max(mPreviewFrameRate,
+                    mCameraOptions.getPreviewFrameRateMinValue());
+            for (int[] fpsRange : fpsRanges) {
+                float lower = (float) fpsRange[0] / 1000F;
+                float upper = (float) fpsRange[1] / 1000F;
+                float rate = Math.round(mPreviewFrameRate);
+                if (lower <= rate && rate <= upper) {
+                    params.setPreviewFpsRange(fpsRange[0], fpsRange[1]);
+                    return true;
+                }
+            }
+        }
+        mPreviewFrameRate = oldPreviewFrameRate;
+        return false;
+    }
+
+    private void sortRanges(List<int[]> fpsRanges) {
+        if (getPreviewFrameRateExact() && mPreviewFrameRate != 0F) { // sort by range width in ascending order
+            Collections.sort(fpsRanges, new Comparator<int[]>() {
+                @Override
+                public int compare(int[] range1, int[] range2) {
+                    return (range1[1] - range1[0]) - (range2[1] - range2[0]);
+                }
+            });
+        } else { // sort by range width in descending order
+            Collections.sort(fpsRanges, new Comparator<int[]>() {
+                @Override
+                public int compare(int[] range1, int[] range2) {
+                    return (range2[1] - range2[0]) - (range1[1] - range1[0]);
+                }
+            });
+        }
+    }
+
+    @Override
+    public void setPictureFormat(@NonNull PictureFormat pictureFormat) {
+        if (pictureFormat != PictureFormat.JPEG) {
+            throw new UnsupportedOperationException("Unsupported picture format: " + pictureFormat);
+        }
+        mPictureFormat = pictureFormat;
+    }
+
+    //endregion
+
+    //region Frame Processing
+
+    @NonNull
+    @Override
+    protected FrameManager instantiateFrameManager(int poolSize) {
+        return new ByteBufferFrameManager(poolSize, this);
+    }
+
+    @NonNull
+    @Override
+    public ByteBufferFrameManager getFrameManager() {
+        return (ByteBufferFrameManager) super.getFrameManager();
+    }
+
+    @Override
+    public void setHasFrameProcessors(boolean hasFrameProcessors) {
+        // we don't care, FP is always on
+        mHasFrameProcessors = hasFrameProcessors;
+    }
+
+    @Override
+    public void setFrameProcessingFormat(int format) {
+        // Ignore input: we only support NV21.
+        mFrameProcessingFormat = ImageFormat.NV21;
+    }
+
+    @Override
+    public void onBufferAvailable(@NonNull byte[] buffer) {
+        if (getState().isAtLeast(CameraState.ENGINE)
+                && getTargetState().isAtLeast(CameraState.ENGINE)) {
+            mCamera.addCallbackBuffer(buffer);
+        }
+    }
+
+    @Override
+    public void onPreviewFrame(byte[] data, Camera camera) {
+        if (data == null) {
+            // Seen this happen in logs.
+            return;
+        }
+        Frame frame = getFrameManager().getFrame(data, System.currentTimeMillis());
+        if (frame != null) {
+            getCallback().dispatchFrame(frame);
+        }
+    }
+
+    //endregion
+
+    //region Auto Focus
+
+    @Override
+    public void startAutoFocus(@Nullable final Gesture gesture,
+                               @NonNull final MeteringRegions regions,
+                               @NonNull final PointF legacyPoint) {
+        getOrchestrator().scheduleStateful("auto focus", CameraState.BIND, new Runnable() {
+            @Override
+            public void run() {
+                if (!mCameraOptions.isAutoFocusSupported()) return;
+                MeteringTransform<Camera.Area> transform = new Camera1MeteringTransform(
+                        getAngles(),
+                        getPreview().getSurfaceSize());
+                MeteringRegions transformed = regions.transform(transform);
+
+                Camera.Parameters params = mCamera.getParameters();
+                int maxAF = params.getMaxNumFocusAreas();
+                int maxAE = params.getMaxNumMeteringAreas();
+                if (maxAF > 0) params.setFocusAreas(transformed.get(maxAF, transform));
+                if (maxAE > 0) params.setMeteringAreas(transformed.get(maxAE, transform));
+                params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
+                mCamera.setParameters(params);
+                getCallback().dispatchOnFocusStart(gesture, legacyPoint);
+
+                // The auto focus callback is not guaranteed to be called, but we really want it
+                // to be. So we remove the old runnable if still present and post a new one.
+                getOrchestrator().remove(JOB_FOCUS_END);
+                getOrchestrator().scheduleDelayed(JOB_FOCUS_END, AUTOFOCUS_END_DELAY_MILLIS,
+                        new Runnable() {
+                    @Override
+                    public void run() {
+                        getCallback().dispatchOnFocusEnd(gesture, false, legacyPoint);
+                    }
+                });
+
+                // Wrapping autoFocus in a try catch to handle some device specific exceptions,
+                // see See https://github.com/natario1/CameraView/issues/181.
+                try {
+                    mCamera.autoFocus(new Camera.AutoFocusCallback() {
+                        @Override
+                        public void onAutoFocus(boolean success, Camera camera) {
+                            getOrchestrator().remove(JOB_FOCUS_END);
+                            getOrchestrator().remove(JOB_FOCUS_RESET);
+                            getCallback().dispatchOnFocusEnd(gesture, success, legacyPoint);
+                            if (shouldResetAutoFocus()) {
+                                getOrchestrator().scheduleStatefulDelayed(
+                                        JOB_FOCUS_RESET,
+                                        CameraState.ENGINE,
+                                        getAutoFocusResetDelay(),
+                                        new Runnable() {
+                                    @Override
+                                    public void run() {
+                                        mCamera.cancelAutoFocus();
+                                        Camera.Parameters params = mCamera.getParameters();
+                                        int maxAF = params.getMaxNumFocusAreas();
+                                        int maxAE = params.getMaxNumMeteringAreas();
+                                        if (maxAF > 0) params.setFocusAreas(null);
+                                        if (maxAE > 0) params.setMeteringAreas(null);
+                                        applyDefaultFocus(params); // Revert to internal focus.
+                                        mCamera.setParameters(params);
+                                    }
+                                });
+                            }
+                        }
+                    });
+                } catch (RuntimeException e) {
+                    LOG.e("startAutoFocus:", "Error calling autoFocus", e);
+                    // Let the mFocusEndRunnable do its job. (could remove it and quickly dispatch
+                    // onFocusEnd here, but let's make it simpler).
+                }
+            }
+        });
+    }
+
+    //endregion
+}
+

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 1657 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java


+ 975 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java

@@ -0,0 +1,975 @@
+package com.otaliastudios.cameraview.engine;
+
+import android.location.Location;
+
+import androidx.annotation.CallSuper;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.otaliastudios.cameraview.CameraException;
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.PictureResult;
+import com.otaliastudios.cameraview.VideoResult;
+import com.otaliastudios.cameraview.controls.Audio;
+import com.otaliastudios.cameraview.controls.AudioCodec;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.Mode;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.controls.VideoCodec;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+import com.otaliastudios.cameraview.engine.offset.Angles;
+import com.otaliastudios.cameraview.engine.offset.Reference;
+import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
+import com.otaliastudios.cameraview.frame.FrameManager;
+import com.otaliastudios.cameraview.overlay.Overlay;
+import com.otaliastudios.cameraview.picture.PictureRecorder;
+import com.otaliastudios.cameraview.preview.CameraPreview;
+import com.otaliastudios.cameraview.size.AspectRatio;
+import com.otaliastudios.cameraview.size.Size;
+import com.otaliastudios.cameraview.size.SizeSelector;
+import com.otaliastudios.cameraview.size.SizeSelectors;
+import com.otaliastudios.cameraview.video.VideoRecorder;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+
+/**
+ * Abstract implementation of {@link CameraEngine} that helps in common tasks.
+ */
+public abstract class CameraBaseEngine extends CameraEngine {
+
+    @SuppressWarnings("WeakerAccess") protected CameraPreview mPreview;
+    @SuppressWarnings("WeakerAccess") protected CameraOptions mCameraOptions;
+    @SuppressWarnings("WeakerAccess") protected PictureRecorder mPictureRecorder;
+    @SuppressWarnings("WeakerAccess") protected VideoRecorder mVideoRecorder;
+    @SuppressWarnings("WeakerAccess") protected Size mCaptureSize;
+    @SuppressWarnings("WeakerAccess") protected Size mPreviewStreamSize;
+    @SuppressWarnings("WeakerAccess") protected Size mFrameProcessingSize;
+    @SuppressWarnings("WeakerAccess") protected int mFrameProcessingFormat;
+    @SuppressWarnings("WeakerAccess") protected boolean mHasFrameProcessors;
+    @SuppressWarnings("WeakerAccess") protected Flash mFlash;
+    @SuppressWarnings("WeakerAccess") protected WhiteBalance mWhiteBalance;
+    @SuppressWarnings("WeakerAccess") protected VideoCodec mVideoCodec;
+    @SuppressWarnings("WeakerAccess") protected AudioCodec mAudioCodec;
+    @SuppressWarnings("WeakerAccess") protected Hdr mHdr;
+    @SuppressWarnings("WeakerAccess") protected PictureFormat mPictureFormat;
+    @SuppressWarnings("WeakerAccess") protected Location mLocation;
+    @SuppressWarnings("WeakerAccess") protected float mZoomValue;
+    @SuppressWarnings("WeakerAccess") protected float mExposureCorrectionValue;
+    @SuppressWarnings("WeakerAccess") protected boolean mPlaySounds;
+    @SuppressWarnings("WeakerAccess") protected boolean mPictureMetering;
+    @SuppressWarnings("WeakerAccess") protected boolean mPictureSnapshotMetering;
+    @SuppressWarnings("WeakerAccess") protected float mPreviewFrameRate;
+    @SuppressWarnings("WeakerAccess") private boolean mPreviewFrameRateExact;
+
+    private FrameManager mFrameManager;
+    private final Angles mAngles = new Angles();
+    @Nullable private SizeSelector mPreviewStreamSizeSelector;
+    private SizeSelector mPictureSizeSelector;
+    private SizeSelector mVideoSizeSelector;
+    private Facing mFacing;
+    private Mode mMode;
+    private Audio mAudio;
+    private long mVideoMaxSize;
+    private int mVideoMaxDuration;
+    private int mVideoBitRate;
+    private int mAudioBitRate;
+    private long mAutoFocusResetDelayMillis;
+    private int mSnapshotMaxWidth; // in REF_VIEW like SizeSelectors
+    private int mSnapshotMaxHeight; // in REF_VIEW like SizeSelectors
+    private int mFrameProcessingMaxWidth; // in REF_VIEW like SizeSelectors
+    private int mFrameProcessingMaxHeight; // in REF_VIEW like SizeSelectors
+    private int mFrameProcessingPoolSize;
+    private Overlay mOverlay;
+
+    // Ops used for testing.
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mZoomTask
+            = Tasks.forResult(null);
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mExposureCorrectionTask
+            = Tasks.forResult(null);
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mFlashTask
+            = Tasks.forResult(null);
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mWhiteBalanceTask
+            = Tasks.forResult(null);
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mHdrTask
+            = Tasks.forResult(null);
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mLocationTask
+            = Tasks.forResult(null);
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mPlaySoundsTask
+            = Tasks.forResult(null);
+    @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mPreviewFrameRateTask
+            = Tasks.forResult(null);
+
+    @SuppressWarnings("WeakerAccess")
+    protected CameraBaseEngine(@NonNull Callback callback) {
+        super(callback);
+    }
+
+    /**
+     * Called at construction time to get a frame manager that can later be
+     * accessed through {@link #getFrameManager()}.
+     * @param poolSize pool size
+     * @return a frame manager
+     */
+    @NonNull
+    protected abstract FrameManager instantiateFrameManager(int poolSize);
+
+    @NonNull
+    @Override
+    public final Angles getAngles() {
+        return mAngles;
+    }
+
+    @NonNull
+    @Override
+    public FrameManager getFrameManager() {
+        if (mFrameManager == null) {
+            mFrameManager = instantiateFrameManager(mFrameProcessingPoolSize);
+        }
+        return mFrameManager;
+    }
+
+    @Nullable
+    @Override
+    public final CameraOptions getCameraOptions() {
+        return mCameraOptions;
+    }
+
+    @Override
+    public final void setPreview(@NonNull CameraPreview cameraPreview) {
+        if (mPreview != null) mPreview.setSurfaceCallback(null);
+        mPreview = cameraPreview;
+        mPreview.setSurfaceCallback(this);
+    }
+
+    @NonNull
+    @Override
+    public final CameraPreview getPreview() {
+        return mPreview;
+    }
+
+    @Override
+    public final void setOverlay(@Nullable Overlay overlay) {
+        mOverlay = overlay;
+    }
+
+    @Nullable
+    @Override
+    public final Overlay getOverlay() {
+        return mOverlay;
+    }
+
+    @Override
+    public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) {
+        mPreviewStreamSizeSelector = selector;
+    }
+
+    @Nullable
+    @Override
+    public final SizeSelector getPreviewStreamSizeSelector() {
+        return mPreviewStreamSizeSelector;
+    }
+
+    @Override
+    public final void setPictureSizeSelector(@NonNull SizeSelector selector) {
+        mPictureSizeSelector = selector;
+    }
+
+    @NonNull
+    @Override
+    public final SizeSelector getPictureSizeSelector() {
+        return mPictureSizeSelector;
+    }
+
+    @Override
+    public final void setVideoSizeSelector(@NonNull SizeSelector selector) {
+        mVideoSizeSelector = selector;
+    }
+
+    @NonNull
+    @Override
+    public final SizeSelector getVideoSizeSelector() {
+        return mVideoSizeSelector;
+    }
+
+    @Override
+    public final void setVideoMaxSize(long videoMaxSizeBytes) {
+        mVideoMaxSize = videoMaxSizeBytes;
+    }
+
+    @Override
+    public final long getVideoMaxSize() {
+        return mVideoMaxSize;
+    }
+
+    @Override
+    public final void setVideoMaxDuration(int videoMaxDurationMillis) {
+        mVideoMaxDuration = videoMaxDurationMillis;
+    }
+
+    @Override
+    public final int getVideoMaxDuration() {
+        return mVideoMaxDuration;
+    }
+
+    @Override
+    public final void setVideoCodec(@NonNull VideoCodec codec) {
+        mVideoCodec = codec;
+    }
+
+    @NonNull
+    @Override
+    public final VideoCodec getVideoCodec() {
+        return mVideoCodec;
+    }
+
+    @Override
+    public final void setVideoBitRate(int videoBitRate) {
+        mVideoBitRate = videoBitRate;
+    }
+
+    @Override
+    public final int getVideoBitRate() {
+        return mVideoBitRate;
+    }
+
+    @Override
+    public final void setAudioCodec(@NonNull AudioCodec codec) {
+        mAudioCodec = codec;
+    }
+
+    @NonNull
+    @Override
+    public final AudioCodec getAudioCodec() {
+        return mAudioCodec;
+    }
+
+    @Override
+    public final void setAudioBitRate(int audioBitRate) {
+        mAudioBitRate = audioBitRate;
+    }
+
+    @Override
+    public final int getAudioBitRate() {
+        return mAudioBitRate;
+    }
+
+    @Override
+    public final void setSnapshotMaxWidth(int maxWidth) {
+        mSnapshotMaxWidth = maxWidth;
+    }
+
+    @Override
+    public final int getSnapshotMaxWidth() {
+        return mSnapshotMaxWidth;
+    }
+
+    @Override
+    public final void setSnapshotMaxHeight(int maxHeight) {
+        mSnapshotMaxHeight = maxHeight;
+    }
+
+    @Override
+    public final int getSnapshotMaxHeight() {
+        return mSnapshotMaxHeight;
+    }
+
+    @Override
+    public final void setFrameProcessingMaxWidth(int maxWidth) {
+        mFrameProcessingMaxWidth = maxWidth;
+    }
+
+    @Override
+    public final int getFrameProcessingMaxWidth() {
+        return mFrameProcessingMaxWidth;
+    }
+
+    @Override
+    public final void setFrameProcessingMaxHeight(int maxHeight) {
+        mFrameProcessingMaxHeight = maxHeight;
+    }
+
+    @Override
+    public final int getFrameProcessingMaxHeight() {
+        return mFrameProcessingMaxHeight;
+    }
+
+    @Override
+    public final int getFrameProcessingFormat() {
+        return mFrameProcessingFormat;
+    }
+
+    @Override
+    public final void setFrameProcessingPoolSize(int poolSize) {
+        mFrameProcessingPoolSize = poolSize;
+    }
+
+    @Override
+    public final int getFrameProcessingPoolSize() {
+        return mFrameProcessingPoolSize;
+    }
+
+    @Override
+    public final void setAutoFocusResetDelay(long delayMillis) {
+        mAutoFocusResetDelayMillis = delayMillis;
+    }
+
+    @Override
+    public final long getAutoFocusResetDelay() {
+        return mAutoFocusResetDelayMillis;
+    }
+
+    /**
+     * Helper function for subclasses.
+     * @return true if AF should be reset
+     */
+    @SuppressWarnings("WeakerAccess")
+    protected final boolean shouldResetAutoFocus() {
+        return mAutoFocusResetDelayMillis > 0 && mAutoFocusResetDelayMillis != Long.MAX_VALUE;
+    }
+
+    /**
+     * Sets a new facing value. This will restart the engine session (if there's any)
+     * so that we can open the new facing camera.
+     * @param facing facing
+     */
+    @Override
+    public final void setFacing(final @NonNull Facing facing) {
+        final Facing old = mFacing;
+        if (facing != old) {
+            mFacing = facing;
+            getOrchestrator().scheduleStateful("facing", CameraState.ENGINE,
+                    new Runnable() {
+                @Override
+                public void run() {
+                    if (collectCameraInfo(facing)) {
+                        restart();
+                    } else {
+                        mFacing = old;
+                    }
+                }
+            });
+        }
+    }
+
+    @NonNull
+    @Override
+    public final Facing getFacing() {
+        return mFacing;
+    }
+
+    /**
+     * Sets a new audio value that will be used for video recordings.
+     * @param audio desired audio
+     */
+    @Override
+    public final void setAudio(@NonNull Audio audio) {
+        if (mAudio != audio) {
+            if (isTakingVideo()) {
+                LOG.w("Audio setting was changed while recording. " +
+                        "Changes will take place starting from next video");
+            }
+            mAudio = audio;
+        }
+    }
+
+    @NonNull
+    @Override
+    public final Audio getAudio() {
+        return mAudio;
+    }
+
+    /**
+     * Sets the desired mode (either picture or video).
+     * @param mode desired mode.
+     */
+    @Override
+    public final void setMode(@NonNull Mode mode) {
+        if (mode != mMode) {
+            mMode = mode;
+            getOrchestrator().scheduleStateful("mode", CameraState.ENGINE,
+                    new Runnable() {
+                @Override
+                public void run() {
+                    restart();
+                }
+            });
+        }
+    }
+
+    @NonNull
+    @Override
+    public final Mode getMode() {
+        return mMode;
+    }
+
+    @Override
+    public final float getZoomValue() {
+        return mZoomValue;
+    }
+
+    @Override
+    public final float getExposureCorrectionValue() {
+        return mExposureCorrectionValue;
+    }
+
+    @NonNull
+    @Override
+    public final Flash getFlash() {
+        return mFlash;
+    }
+
+    @NonNull
+    @Override
+    public final WhiteBalance getWhiteBalance() {
+        return mWhiteBalance;
+    }
+
+    @NonNull
+    @Override
+    public final Hdr getHdr() {
+        return mHdr;
+    }
+
+    @Nullable
+    @Override
+    public final Location getLocation() {
+        return mLocation;
+    }
+
+    @NonNull
+    @Override
+    public final PictureFormat getPictureFormat() {
+        return mPictureFormat;
+    }
+
+    @Override
+    public final void setPreviewFrameRateExact(boolean previewFrameRateExact) {
+        mPreviewFrameRateExact = previewFrameRateExact;
+    }
+
+    @Override
+    public final boolean getPreviewFrameRateExact() {
+        return mPreviewFrameRateExact;
+    }
+
+    @Override
+    public final float getPreviewFrameRate() {
+        return mPreviewFrameRate;
+    }
+
+    @Override
+    public final boolean hasFrameProcessors() {
+        return mHasFrameProcessors;
+    }
+
+    @Override
+    public final void setPictureMetering(boolean enable) {
+        mPictureMetering = enable;
+    }
+
+    @Override
+    public final boolean getPictureMetering() {
+        return mPictureMetering;
+    }
+
+    @Override
+    public final void setPictureSnapshotMetering(boolean enable) {
+        mPictureSnapshotMetering = enable;
+    }
+
+    @Override
+    public final boolean getPictureSnapshotMetering() {
+        return mPictureSnapshotMetering;
+    }
+
+    //region Picture and video control
+
+    @Override
+    public final boolean isTakingPicture() {
+        return mPictureRecorder != null;
+    }
+
+    @Override
+    public /* final */ void takePicture(final @NonNull PictureResult.Stub stub) {
+        // Save boolean before scheduling! See how Camera2Engine calls this with a temp value.
+        final boolean metering = mPictureMetering;
+        getOrchestrator().scheduleStateful("take picture", CameraState.BIND,
+                new Runnable() {
+            @Override
+            public void run() {
+                LOG.i("takePicture:", "running. isTakingPicture:", isTakingPicture());
+                if (isTakingPicture()) return;
+                if (mMode == Mode.VIDEO) {
+                    throw new IllegalStateException("Can't take hq pictures while in VIDEO mode");
+                }
+                stub.isSnapshot = false;
+                stub.location = mLocation;
+                stub.facing = mFacing;
+                stub.format = mPictureFormat;
+                onTakePicture(stub, metering);
+            }
+        });
+    }
+
+    /**
+     * The snapshot size is the {@link #getPreviewStreamSize(Reference)}, but cropped based on the
+     * view/surface aspect ratio.
+     * @param stub a picture stub
+     */
+    @Override
+    public /* final */ void takePictureSnapshot(final @NonNull PictureResult.Stub stub) {
+        // Save boolean before scheduling! See how Camera2Engine calls this with a temp value.
+        final boolean metering = mPictureSnapshotMetering;
+        getOrchestrator().scheduleStateful("take picture snapshot", CameraState.BIND,
+                new Runnable() {
+            @Override
+            public void run() {
+                LOG.i("takePictureSnapshot:", "running. isTakingPicture:", isTakingPicture());
+                if (isTakingPicture()) return;
+                stub.location = mLocation;
+                stub.isSnapshot = true;
+                stub.facing = mFacing;
+                stub.format = PictureFormat.JPEG;
+                // Leave the other parameters to subclasses.
+                //noinspection ConstantConditions
+                AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
+                onTakePictureSnapshot(stub, ratio, metering);
+            }
+        });
+    }
+
+    @Override
+    public void onPictureShutter(boolean didPlaySound) {
+        getCallback().onShutter(!didPlaySound);
+    }
+
+    @Override
+    public void onPictureResult(@Nullable PictureResult.Stub result, @Nullable Exception error) {
+        mPictureRecorder = null;
+        if (result != null) {
+            getCallback().dispatchOnPictureTaken(result);
+        } else {
+            LOG.e("onPictureResult", "result is null: something went wrong.", error);
+            getCallback().dispatchError(new CameraException(error,
+                    CameraException.REASON_PICTURE_FAILED));
+        }
+    }
+
+    @Override
+    public final boolean isTakingVideo() {
+        return mVideoRecorder != null && mVideoRecorder.isRecording();
+    }
+
+    @Override
+    public final void takeVideo(final @NonNull VideoResult.Stub stub,
+                                final @Nullable File file,
+                                final @Nullable FileDescriptor fileDescriptor) {
+        getOrchestrator().scheduleStateful("take video", CameraState.BIND, new Runnable() {
+            @Override
+            public void run() {
+                LOG.i("takeVideo:", "running. isTakingVideo:", isTakingVideo());
+                if (isTakingVideo()) return;
+                if (mMode == Mode.PICTURE) {
+                    throw new IllegalStateException("Can't record video while in PICTURE mode");
+                }
+                if (file != null) {
+                    stub.file = file;
+                } else if (fileDescriptor != null) {
+                    stub.fileDescriptor = fileDescriptor;
+                } else {
+                    throw new IllegalStateException("file and fileDescriptor are both null.");
+                }
+                stub.isSnapshot = false;
+                stub.videoCodec = mVideoCodec;
+                stub.audioCodec = mAudioCodec;
+                stub.location = mLocation;
+                stub.facing = mFacing;
+                stub.audio = mAudio;
+                stub.maxSize = mVideoMaxSize;
+                stub.maxDuration = mVideoMaxDuration;
+                stub.videoBitRate = mVideoBitRate;
+                stub.audioBitRate = mAudioBitRate;
+                onTakeVideo(stub);
+            }
+        });
+    }
+
+    /**
+     * @param stub a video stub
+     * @param file the output file
+     */
+    @Override
+    public final void takeVideoSnapshot(@NonNull final VideoResult.Stub stub,
+                                        @NonNull final File file) {
+        getOrchestrator().scheduleStateful("take video snapshot", CameraState.BIND,
+                new Runnable() {
+            @Override
+            public void run() {
+                LOG.i("takeVideoSnapshot:", "running. isTakingVideo:", isTakingVideo());
+                stub.file = file;
+                stub.isSnapshot = true;
+                stub.videoCodec = mVideoCodec;
+                stub.audioCodec = mAudioCodec;
+                stub.location = mLocation;
+                stub.facing = mFacing;
+                stub.videoBitRate = mVideoBitRate;
+                stub.audioBitRate = mAudioBitRate;
+                stub.audio = mAudio;
+                stub.maxSize = mVideoMaxSize;
+                stub.maxDuration = mVideoMaxDuration;
+                //noinspection ConstantConditions
+                AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
+                onTakeVideoSnapshot(stub, ratio);
+            }
+        });
+    }
+
+    @Override
+    public final void stopVideo() {
+        getOrchestrator().schedule("stop video", true, new Runnable() {
+            @Override
+            public void run() {
+                LOG.i("stopVideo", "running. isTakingVideo?", isTakingVideo());
+                onStopVideo();
+            }
+        });
+    }
+
+    @EngineThread
+    @SuppressWarnings("WeakerAccess")
+    protected void onStopVideo() {
+        if (mVideoRecorder != null) {
+            mVideoRecorder.stop(false);
+            // Do not null this, so we respond correctly to isTakingVideo(),
+            // which checks for recorder presence and recorder.isRecording().
+            // It will be nulled in onVideoResult.
+        }
+    }
+
+    @CallSuper
+    @Override
+    public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
+        mVideoRecorder = null;
+        if (result != null) {
+            getCallback().dispatchOnVideoTaken(result);
+        } else {
+            LOG.e("onVideoResult", "result is null: something went wrong.", exception);
+            getCallback().dispatchError(new CameraException(exception,
+                    CameraException.REASON_VIDEO_FAILED));
+        }
+    }
+
+    @Override
+    public void onVideoRecordingStart() {
+        getCallback().dispatchOnVideoRecordingStart();
+    }
+
+    @Override
+    public void onVideoRecordingEnd() {
+        getCallback().dispatchOnVideoRecordingEnd();
+    }
+
+    @EngineThread
+    protected abstract void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering);
+
+    @EngineThread
+    protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
+                                                  @NonNull AspectRatio outputRatio,
+                                                  boolean doMetering);
+
+    @EngineThread
+    protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
+                                                @NonNull AspectRatio outputRatio);
+
+    @EngineThread
+    protected abstract void onTakeVideo(@NonNull VideoResult.Stub stub);
+
+    //endregion
+
+    //region Size / Surface
+
+    @Override
+    public final void onSurfaceChanged() {
+        LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(Reference.VIEW));
+        getOrchestrator().scheduleStateful("surface changed", CameraState.BIND,
+                new Runnable() {
+            @Override
+            public void run() {
+                // Compute a new camera preview size and apply.
+                Size newSize = computePreviewStreamSize();
+                if (newSize.equals(mPreviewStreamSize)) {
+                    LOG.i("onSurfaceChanged:",
+                            "The computed preview size is identical. No op.");
+                } else {
+                    LOG.i("onSurfaceChanged:",
+                            "Computed a new preview size. Calling onPreviewStreamSizeChanged().");
+                    mPreviewStreamSize = newSize;
+                    onPreviewStreamSizeChanged();
+                }
+            }
+        });
+    }
+
+    /**
+     * The preview stream size has changed. At this point, some engine might want to
+     * simply call {@link #restartPreview()}, others to {@link #restartBind()}.
+     *
+     * It basically depends on the step at which the preview stream size is actually used.
+     */
+    @EngineThread
+    protected abstract void onPreviewStreamSizeChanged();
+
+    @Nullable
+    @Override
+    public final Size getPictureSize(@SuppressWarnings("SameParameterValue") @NonNull Reference reference) {
+        Size size = mCaptureSize;
+        if (size == null || mMode == Mode.VIDEO) return null;
+        return getAngles().flip(Reference.SENSOR, reference) ? size.flip() : size;
+    }
+
+    @Nullable
+    @Override
+    public final Size getVideoSize(@SuppressWarnings("SameParameterValue") @NonNull Reference reference) {
+        Size size = mCaptureSize;
+        if (size == null || mMode == Mode.PICTURE) return null;
+        return getAngles().flip(Reference.SENSOR, reference) ? size.flip() : size;
+    }
+
+    @Nullable
+    @Override
+    public final Size getPreviewStreamSize(@NonNull Reference reference) {
+        Size size = mPreviewStreamSize;
+        if (size == null) return null;
+        return getAngles().flip(Reference.SENSOR, reference) ? size.flip() : size;
+    }
+
+    @SuppressWarnings("SameParameterValue")
+    @Nullable
+    private Size getPreviewSurfaceSize(@NonNull Reference reference) {
+        CameraPreview preview = mPreview;
+        if (preview == null) return null;
+        return getAngles().flip(Reference.VIEW, reference) ? preview.getSurfaceSize().flip()
+                : preview.getSurfaceSize();
+    }
+
+    /**
+     * Returns the snapshot size, but not cropped with the view dimensions, which
+     * is what we will do before creating the snapshot. However, cropping is done at various
+     * levels so we don't want to perform the op here.
+     *
+     * The base snapshot size is based on PreviewStreamSize (later cropped with view ratio). Why?
+     * One might be tempted to say that it's the SurfaceSize (which already matches the view ratio).
+     *
+     * The camera sensor will capture preview frames with PreviewStreamSize and that's it. Then they
+     * are hardware-scaled by the preview surface, but this does not affect the snapshot, as the
+     * snapshot recorder simply creates another surface.
+     *
+     * Done tests to ensure that this is true, by using
+     * 1. small SurfaceSize and biggest() PreviewStreamSize: output is not low quality
+     * 2. big SurfaceSize and smallest() PreviewStreamSize: output is low quality
+     * In both cases the result.size here was set to the biggest of the two.
+     *
+     * I could not find the same evidence for videos, but I would say that the same things should
+     * apply, despite the capturing mechanism being different.
+     *
+     * @param reference the reference system
+     * @return the uncropped snapshot size
+     */
+    @Nullable
+    @Override
+    public final Size getUncroppedSnapshotSize(@NonNull Reference reference) {
+        Size baseSize = getPreviewStreamSize(reference);
+        if (baseSize == null) return null;
+        boolean flip = getAngles().flip(reference, Reference.VIEW);
+        int maxWidth = flip ? mSnapshotMaxHeight : mSnapshotMaxWidth;
+        int maxHeight = flip ? mSnapshotMaxWidth : mSnapshotMaxHeight;
+        if (maxWidth <= 0) maxWidth = Integer.MAX_VALUE;
+        if (maxHeight <= 0) maxHeight = Integer.MAX_VALUE;
+        float baseRatio = AspectRatio.of(baseSize).toFloat();
+        float maxValuesRatio = AspectRatio.of(maxWidth, maxHeight).toFloat();
+        if (maxValuesRatio >= baseRatio) {
+            // Height is the real constraint.
+            int outHeight = Math.min(baseSize.getHeight(), maxHeight);
+            int outWidth = (int) Math.floor((float) outHeight * baseRatio);
+            return new Size(outWidth, outHeight);
+        } else {
+            // Width is the real constraint.
+            int outWidth = Math.min(baseSize.getWidth(), maxWidth);
+            int outHeight = (int) Math.floor((float) outWidth / baseRatio);
+            return new Size(outWidth, outHeight);
+        }
+    }
+
+    /**
+     * This is called either on cameraView.start(), or when the underlying surface changes.
+     * It is possible that in the first call the preview surface has not already computed its
+     * dimensions.
+     * But when it does, the {@link CameraPreview.SurfaceCallback} should be called,
+     * and this should be refreshed.
+     *
+     * @return the capture size
+     */
+    @NonNull
+    @SuppressWarnings("WeakerAccess")
+    protected final Size computeCaptureSize() {
+        return computeCaptureSize(mMode);
+    }
+
+    @NonNull
+    @SuppressWarnings("WeakerAccess")
+    protected final Size computeCaptureSize(@NonNull Mode mode) {
+        // We want to pass stuff into the REF_VIEW reference, not the sensor one.
+        // This is already managed by CameraOptions, so we just flip again at the end.
+        boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
+        SizeSelector selector;
+        Collection<Size> sizes;
+        if (mode == Mode.PICTURE) {
+            selector = mPictureSizeSelector;
+            sizes = mCameraOptions.getSupportedPictureSizes();
+        } else {
+            selector = mVideoSizeSelector;
+            sizes = mCameraOptions.getSupportedVideoSizes();
+        }
+        selector = SizeSelectors.or(selector, SizeSelectors.biggest());
+        List<Size> list = new ArrayList<>(sizes);
+        Size result = selector.select(list).get(0);
+        if (!list.contains(result)) {
+            throw new RuntimeException("SizeSelectors must not return Sizes other than " +
+                    "those in the input list.");
+        }
+        LOG.i("computeCaptureSize:", "result:", result, "flip:", flip, "mode:", mode);
+        if (flip) result = result.flip(); // Go back to REF_SENSOR
+        return result;
+    }
+
+    /**
+     * This is called anytime {@link #computePreviewStreamSize()} is called.
+     * This means that it should be called during the binding process, when
+     * we can be sure that the camera is available (engineState == STARTED).
+     * @return a list of available sizes for preview
+     */
+    @EngineThread
+    @NonNull
+    protected abstract List<Size> getPreviewStreamAvailableSizes();
+
+    @EngineThread
+    @NonNull
+    @SuppressWarnings("WeakerAccess")
+    protected final Size computePreviewStreamSize() {
+        @NonNull List<Size> previewSizes = getPreviewStreamAvailableSizes();
+        // These sizes come in REF_SENSOR. Since there is an external selector involved,
+        // we must convert all of them to REF_VIEW, then flip back when returning.
+        boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
+        List<Size> sizes = new ArrayList<>(previewSizes.size());
+        for (Size size : previewSizes) {
+            sizes.add(flip ? size.flip() : size);
+        }
+
+        // Create our own default selector, which will be used if the external
+        // mPreviewStreamSizeSelector is null, or if it fails in finding a size.
+        Size targetMinSize = getPreviewSurfaceSize(Reference.VIEW);
+        if (targetMinSize == null) {
+            throw new IllegalStateException("targetMinSize should not be null here.");
+        }
+        AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
+        if (flip) targetRatio = targetRatio.flip();
+        LOG.i("computePreviewStreamSize:",
+                "targetRatio:", targetRatio,
+                "targetMinSize:", targetMinSize);
+        SizeSelector matchRatio = SizeSelectors.and( // Match this aspect ratio and sort by biggest
+                SizeSelectors.aspectRatio(targetRatio, 0),
+                SizeSelectors.biggest());
+        SizeSelector matchSize = SizeSelectors.and( // Bigger than this size, and sort by smallest
+                SizeSelectors.minHeight(targetMinSize.getHeight()),
+                SizeSelectors.minWidth(targetMinSize.getWidth()),
+                SizeSelectors.smallest());
+        SizeSelector matchAll = SizeSelectors.or(
+                SizeSelectors.and(matchRatio, matchSize), // Try to respect both constraints.
+                matchSize, // If couldn't match aspect ratio, at least respect the size
+                matchRatio, // If couldn't respect size, at least match aspect ratio
+                SizeSelectors.biggest() // If couldn't match any, take the biggest.
+        );
+
+        // Apply the external selector with this as a fallback,
+        // and return a size in REF_SENSOR reference.
+        SizeSelector selector;
+        if (mPreviewStreamSizeSelector != null) {
+            selector = SizeSelectors.or(mPreviewStreamSizeSelector, matchAll);
+        } else {
+            selector = matchAll;
+        }
+        Size result = selector.select(sizes).get(0);
+        if (!sizes.contains(result)) {
+            throw new RuntimeException("SizeSelectors must not return Sizes other than " +
+                    "those in the input list.");
+        }
+        if (flip) result = result.flip();
+        LOG.i("computePreviewStreamSize:", "result:", result, "flip:", flip);
+        return result;
+    }
+
+    /**
+     * This is called anytime {@link #computeFrameProcessingSize()} is called.
+     * Implementors can return null if frame processor size is not selectable
+     * @return a list of available sizes for frame processing
+     */
+    @EngineThread
+    @NonNull
+    protected abstract List<Size> getFrameProcessingAvailableSizes();
+
+    @EngineThread
+    @NonNull
+    @SuppressWarnings("WeakerAccess")
+    protected final Size computeFrameProcessingSize() {
+        @NonNull List<Size> frameSizes = getFrameProcessingAvailableSizes();
+        // These sizes come in REF_SENSOR. Since there is an external selector involved,
+        // we must convert all of them to REF_VIEW, then flip back when returning.
+        boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
+        List<Size> sizes = new ArrayList<>(frameSizes.size());
+        for (Size size : frameSizes) {
+            sizes.add(flip ? size.flip() : size);
+        }
+        AspectRatio targetRatio = AspectRatio.of(
+                mPreviewStreamSize.getWidth(),
+                mPreviewStreamSize.getHeight());
+        if (flip) targetRatio = targetRatio.flip();
+        int maxWidth = mFrameProcessingMaxWidth;
+        int maxHeight = mFrameProcessingMaxHeight;
+        if (maxWidth <= 0 || maxWidth == Integer.MAX_VALUE) maxWidth = 640;
+        if (maxHeight <= 0 || maxHeight == Integer.MAX_VALUE) maxHeight = 640;
+        Size targetMaxSize = new Size(maxWidth, maxHeight);
+        LOG.i("computeFrameProcessingSize:",
+                "targetRatio:", targetRatio,
+                "targetMaxSize:", targetMaxSize);
+        SizeSelector matchRatio = SizeSelectors.aspectRatio(targetRatio, 0);
+        SizeSelector matchSize = SizeSelectors.and(
+                SizeSelectors.maxHeight(targetMaxSize.getHeight()),
+                SizeSelectors.maxWidth(targetMaxSize.getWidth()),
+                SizeSelectors.biggest());
+        SizeSelector matchAll = SizeSelectors.or(
+                SizeSelectors.and(matchRatio, matchSize), // Try to respect both constraints.
+                matchSize, // If couldn't match aspect ratio, at least respect the size
+                SizeSelectors.smallest() // If couldn't match any, take the smallest.
+        );
+        Size result = matchAll.select(sizes).get(0);
+        if (!sizes.contains(result)) {
+            throw new RuntimeException("SizeSelectors must not return Sizes other than " +
+                    "those in the input list.");
+        }
+        if (flip) result = result.flip();
+        LOG.i("computeFrameProcessingSize:", "result:", result, "flip:", flip);
+        return result;
+    }
+
+    //endregion
+}

+ 726 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java

@@ -0,0 +1,726 @@
+package com.otaliastudios.cameraview.engine;
+
+import android.content.Context;
+import android.graphics.PointF;
+import android.location.Location;
+import android.os.Handler;
+import android.os.Looper;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.VisibleForTesting;
+
+import com.google.android.gms.tasks.OnCompleteListener;
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.android.gms.tasks.SuccessContinuation;
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.otaliastudios.cameraview.CameraException;
+import com.otaliastudios.cameraview.CameraLogger;
+import com.otaliastudios.cameraview.CameraOptions;
+import com.otaliastudios.cameraview.PictureResult;
+import com.otaliastudios.cameraview.VideoResult;
+import com.otaliastudios.cameraview.controls.Audio;
+import com.otaliastudios.cameraview.controls.AudioCodec;
+import com.otaliastudios.cameraview.controls.Facing;
+import com.otaliastudios.cameraview.controls.Flash;
+import com.otaliastudios.cameraview.controls.Hdr;
+import com.otaliastudios.cameraview.controls.Mode;
+import com.otaliastudios.cameraview.controls.PictureFormat;
+import com.otaliastudios.cameraview.controls.VideoCodec;
+import com.otaliastudios.cameraview.controls.WhiteBalance;
+import com.otaliastudios.cameraview.engine.offset.Angles;
+import com.otaliastudios.cameraview.engine.offset.Reference;
+import com.otaliastudios.cameraview.engine.orchestrator.CameraOrchestrator;
+import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
+import com.otaliastudios.cameraview.engine.orchestrator.CameraStateOrchestrator;
+import com.otaliastudios.cameraview.frame.Frame;
+import com.otaliastudios.cameraview.frame.FrameManager;
+import com.otaliastudios.cameraview.gesture.Gesture;
+import com.otaliastudios.cameraview.internal.WorkerHandler;
+import com.otaliastudios.cameraview.metering.MeteringRegions;
+import com.otaliastudios.cameraview.overlay.Overlay;
+import com.otaliastudios.cameraview.picture.PictureRecorder;
+import com.otaliastudios.cameraview.preview.CameraPreview;
+import com.otaliastudios.cameraview.size.Size;
+import com.otaliastudios.cameraview.size.SizeSelector;
+import com.otaliastudios.cameraview.video.VideoRecorder;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+
+/**
+ * PROCESS
+ * Setting up the Camera is usually a 4 steps process:
+ * 1. Setting up the Surface. Done by {@link CameraPreview}.
+ * 2. Starting the camera. Done by us. See {@link #startEngine()}, {@link #onStartEngine()}.
+ * 3. Binding the camera to the surface. Done by us. See {@link #startBind()},
+ *    {@link #onStartBind()}
+ * 4. Streaming the camera preview. Done by us. See {@link #startPreview()},
+ *    {@link #onStartPreview()}
+ *
+ * The first two steps can actually happen at the same time, anyway
+ * the order is not guaranteed, we just get a callback from the Preview when 1 happens.
+ * So at the end of both step 1 and 2, the engine should check if both have
+ * been performed and trigger the steps 3 and 4.
+ *
+ * STATE
+ * We only expose generic {@link #start()} and {@link #stop(boolean)} calls to the outside.
+ * The external users of this class are most likely interested in whether we have completed step 2
+ * or not, since that tells us if we can act on the camera or not, rather than knowing about
+ * steps 3 and 4.
+ *
+ * So in the {@link CameraEngine} notation,
+ * - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time,
+ *                     S3 and S4 are also performed.
+ * - {@link #stop(boolean)}: ASYNC - stops everything: undoes S4, then S3, then S2.
+ * - {@link #restart()}: ASYNC - completes a stop then a start.
+ * - {@link #destroy(boolean)}: SYNC - performs a {@link #stop(boolean)} that will go on no matter
+ *                              what, without throwing. Makes the engine unusable and clears
+ *                              resources.
+ *
+ * THREADING
+ * Subclasses should always execute code on the thread given by {@link #mHandler}.
+ * For convenience, all the setup and tear down methods are called on this engine thread:
+ * {@link #onStartEngine()}, {@link #onStartBind()}, {@link #onStartPreview()} to setup and
+ * {@link #onStopEngine()}, {@link #onStopBind()}, {@link #onStopPreview()} to tear down.
+ * However, these methods are not forced to be synchronous and then can simply return a Google's
+ * {@link Task}.
+ *
+ * Other setters are executed on the callers thread so subclasses should make sure they post
+ * to the engine handler before acting on themselves.
+ *
+ *
+ * ERROR HANDLING
+ * THe {@link #mHandler} thread has a special {@link Thread.UncaughtExceptionHandler} that handles
+ * exceptions and dispatches error to the callback (instead of crashing the app).
+ * This lets subclasses run code safely and directly throw {@link CameraException}s when needed.
+ *
+ * For convenience, the two main method {@link #onStartEngine()} and {@link #onStopEngine()}
+ * are already called on the engine thread, but they can still be asynchronous by returning a
+ * Google's {@link com.google.android.gms.tasks.Task}.
+ */
+public abstract class CameraEngine implements
+        CameraPreview.SurfaceCallback,
+        PictureRecorder.PictureResultListener,
+        VideoRecorder.VideoResultListener {
+
+    public interface Callback {
+        @NonNull Context getContext();
+        void dispatchOnCameraOpened(@NonNull CameraOptions options);
+        void dispatchOnCameraClosed();
+        void onCameraPreviewStreamSizeChanged();
+        void onShutter(boolean shouldPlaySound);
+        void dispatchOnVideoTaken(@NonNull VideoResult.Stub stub);
+        void dispatchOnPictureTaken(@NonNull PictureResult.Stub stub);
+        void dispatchOnFocusStart(@Nullable Gesture trigger, @NonNull PointF where);
+        void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, @NonNull PointF where);
+        void dispatchOnZoomChanged(final float newValue, @Nullable final PointF[] fingers);
+        void dispatchOnExposureCorrectionChanged(float newValue, @NonNull float[] bounds,
+                                                 @Nullable PointF[] fingers);
+        void dispatchFrame(@NonNull Frame frame);
+        void dispatchError(CameraException exception);
+        void dispatchOnVideoRecordingStart();
+        void dispatchOnVideoRecordingEnd();
+    }
+
+    protected static final String TAG = CameraEngine.class.getSimpleName();
+    protected static final CameraLogger LOG = CameraLogger.create(TAG);
+    // If this is 2, this means we'll try to run destroy() twice.
+    private static final int DESTROY_RETRIES = 2;
+
+    private WorkerHandler mHandler;
+    @VisibleForTesting Handler mCrashHandler;
+    private final Callback mCallback;
+    private final CameraStateOrchestrator mOrchestrator
+            = new CameraStateOrchestrator(new CameraOrchestrator.Callback() {
+        @Override
+        @NonNull
+        public WorkerHandler getJobWorker(@NonNull String job) {
+            return mHandler;
+        }
+
+        @Override
+        public void handleJobException(@NonNull String job, @NonNull Exception exception) {
+            handleException(exception, false);
+        }
+    });
+
+    protected CameraEngine(@NonNull Callback callback) {
+        mCallback = callback;
+        mCrashHandler = new Handler(Looper.getMainLooper());
+        recreateHandler(false);
+    }
+
+    @NonNull
+    protected final Callback getCallback() {
+        return mCallback;
+    }
+
+    @NonNull
+    protected final CameraStateOrchestrator getOrchestrator() {
+        return mOrchestrator;
+    }
+
+    //region Error handling
+
+    /**
+     * The base exception handler, which inspects the exception and
+     * decides what to do.
+     */
+    private class CrashExceptionHandler implements Thread.UncaughtExceptionHandler {
+        @Override
+        public void uncaughtException(@NonNull Thread thread, @NonNull Throwable throwable) {
+            handleException(throwable, true);
+        }
+    }
+
+    /**
+     * A static exception handler used during destruction to avoid leaks,
+     * since the default handler is not static and the thread might survive the engine.
+     */
+    private static class NoOpExceptionHandler implements Thread.UncaughtExceptionHandler {
+        @Override
+        public void uncaughtException(@NonNull Thread thread, @NonNull Throwable throwable) {
+            LOG.w("EXCEPTION:", "In the NoOpExceptionHandler, probably while destroying.",
+                    "Thread:", thread, "Error:", throwable);
+        }
+    }
+
+    /**
+     * Handles exceptions coming from either runtime errors on the {@link #mHandler} code that is
+     * not caught (using the {@link CrashExceptionHandler}), as might happen during standard
+     * mHandler.post() operations that subclasses might do, OR for errors caught by tasks and
+     * continuations that we launch here.
+     *
+     * In the first case, the thread is about to be terminated. In the second case,
+     * we can actually keep using it.
+     *
+     * @param throwable the throwable
+     * @param isUncaught true if coming from exception handler
+     */
+    private void handleException(@NonNull final Throwable throwable,
+                                 final boolean isUncaught) {
+        // 1. If this comes from the exception handler, the thread has crashed. Replace it.
+        // Most actions are wrapped into Tasks so don't go here, but some callbacks do
+        // (at least in Camera1, e.g. onError).
+        if (isUncaught) {
+            LOG.e("EXCEPTION:", "Handler thread is gone. Replacing.");
+            recreateHandler(false);
+        }
+
+        // 2. Depending on the exception, we must destroy(false|true) to release resources, and
+        // notify the outside, either with the callback or by crashing the app.
+        LOG.e("EXCEPTION:", "Scheduling on the crash handler...");
+        mCrashHandler.post(new Runnable() {
+            @Override
+            public void run() {
+                if (throwable instanceof CameraException) {
+                    CameraException exception = (CameraException) throwable;
+                    if (exception.isUnrecoverable()) {
+                        LOG.e("EXCEPTION:", "Got CameraException. " +
+                                "Since it is unrecoverable, executing destroy(false).");
+                        destroy(false);
+                    }
+                    LOG.e("EXCEPTION:", "Got CameraException. Dispatching to callback.");
+                    mCallback.dispatchError(exception);
+                } else {
+                    LOG.e("EXCEPTION:", "Unexpected error! Executing destroy(true).");
+                    destroy(true);
+                    LOG.e("EXCEPTION:", "Unexpected error! Throwing.");
+                    if (throwable instanceof RuntimeException) {
+                        throw (RuntimeException) throwable;
+                    } else {
+                        throw new RuntimeException(throwable);
+                    }
+                }
+            }
+        });
+    }
+
+    /**
+     * Recreates the handler, to ensure we use a fresh one from now on.
+     * If we suspect that handler is currently stuck, the orchestrator should be reset
+     * because it hosts a chain of tasks and the last one will never complete.
+     * @param resetOrchestrator true to reset
+     */
+    private void recreateHandler(boolean resetOrchestrator) {
+        if (mHandler != null) mHandler.destroy();
+        mHandler = WorkerHandler.get("CameraViewEngine");
+        mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
+        if (resetOrchestrator) mOrchestrator.reset();
+    }
+
+    //endregion
+
+    //region State management
+
+    @NonNull
+    public final CameraState getState() {
+        return mOrchestrator.getCurrentState();
+    }
+
+    @NonNull
+    public final CameraState getTargetState() {
+        return mOrchestrator.getTargetState();
+    }
+
+    public final boolean isChangingState() {
+        return mOrchestrator.hasPendingStateChange();
+    }
+
+    /**
+     * Calls {@link #stop(boolean)} and waits for it.
+     * Not final due to mockito requirements.
+     *
+     * If unrecoverably is true, this also releases resources and the engine will not be in a
+     * functional state after. If forever is false, this really is just a synchronous stop.
+     *
+     * NOTE: Should not be called on the orchestrator thread! This would cause deadlocks due to us
+     * awaiting for {@link #stop(boolean)} to return.
+     */
+    public void destroy(boolean unrecoverably) {
+        destroy(unrecoverably, 0);
+    }
+
+    private void destroy(boolean unrecoverably, int depth) {
+        LOG.i("DESTROY:", "state:", getState(),
+                "thread:", Thread.currentThread(),
+                "depth:", depth,
+                "unrecoverably:", unrecoverably);
+        if (unrecoverably) {
+            // Prevent CameraEngine leaks. Don't set to null, or exceptions
+            // inside the standard stop() method might crash the main thread.
+            mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler());
+        }
+        // Cannot use Tasks.await() because we might be on the UI thread.
+        final CountDownLatch latch = new CountDownLatch(1);
+        stop(true).addOnCompleteListener(
+                mHandler.getExecutor(),
+                new OnCompleteListener<Void>() {
+                    @Override
+                    public void onComplete(@NonNull Task<Void> task) {
+                        latch.countDown();
+                    }
+                });
+        try {
+            boolean success = latch.await(6, TimeUnit.SECONDS);
+            if (!success) {
+                // This thread is likely stuck. The reason might be deadlock issues in the internal
+                // camera implementation, at least in emulators: see Camera1Engine and Camera2Engine
+                // onStopEngine() implementation and comments.
+                LOG.e("DESTROY: Could not destroy synchronously after 6 seconds.",
+                        "Current thread:", Thread.currentThread(),
+                        "Handler thread:", mHandler.getThread());
+                depth++;
+                if (depth < DESTROY_RETRIES) {
+                    recreateHandler(true);
+                    LOG.e("DESTROY: Trying again on thread:", mHandler.getThread());
+                    destroy(unrecoverably, depth);
+                } else {
+                    LOG.w("DESTROY: Giving up because DESTROY_RETRIES was reached.");
+                }
+            }
+        } catch (InterruptedException ignore) {}
+    }
+
+    @SuppressWarnings("WeakerAccess")
+    public void restart() {
+        LOG.i("RESTART:", "scheduled. State:", getState());
+        stop(false);
+        start();
+    }
+
+    @NonNull
+    public Task<Void> start() {
+        LOG.i("START:", "scheduled. State:", getState());
+        Task<Void> engine = startEngine();
+        startBind();
+        startPreview();
+        return engine;
+    }
+
+    @NonNull
+    public Task<Void> stop(final boolean swallowExceptions) {
+        LOG.i("STOP:", "scheduled. State:", getState());
+        stopPreview(swallowExceptions);
+        stopBind(swallowExceptions);
+        return stopEngine(swallowExceptions);
+    }
+
+    @SuppressWarnings({"WeakerAccess", "UnusedReturnValue"})
+    @NonNull
+    protected Task<Void> restartBind() {
+        LOG.i("RESTART BIND:", "scheduled. State:", getState());
+        stopPreview(false);
+        stopBind(false);
+        startBind();
+        return startPreview();
+    }
+
+    @SuppressWarnings({"WeakerAccess", "UnusedReturnValue"})
+    @NonNull
+    protected Task<Void> restartPreview() {
+        LOG.i("RESTART PREVIEW:", "scheduled. State:", getState());
+        stopPreview(false);
+        return startPreview();
+    }
+
+    //endregion
+
+    //region Start & Stop the engine
+
+    @NonNull
+    @EngineThread
+    private Task<Void> startEngine() {
+        return mOrchestrator.scheduleStateChange(CameraState.OFF, CameraState.ENGINE,
+                true,
+                new Callable<Task<CameraOptions>>() {
+            @Override
+            public Task<CameraOptions> call() {
+                if (!collectCameraInfo(getFacing())) {
+                    LOG.e("onStartEngine:", "No camera available for facing", getFacing());
+                    throw new CameraException(CameraException.REASON_NO_CAMERA);
+                }
+                return onStartEngine();
+            }
+        }).onSuccessTask(new SuccessContinuation<CameraOptions, Void>() {
+            @NonNull
+            @Override
+            public Task<Void> then(@Nullable CameraOptions cameraOptions) {
+                // Put this on the outer task so we're sure it's called after getState() is changed.
+                // This was breaking some tests on rare occasions.
+                if (cameraOptions == null) throw new RuntimeException("Null options!");
+                mCallback.dispatchOnCameraOpened(cameraOptions);
+                return Tasks.forResult(null);
+            }
+        });
+    }
+
+    @NonNull
+    @EngineThread
+    private Task<Void> stopEngine(boolean swallowExceptions) {
+        return mOrchestrator.scheduleStateChange(CameraState.ENGINE, CameraState.OFF,
+                !swallowExceptions,
+                new Callable<Task<Void>>() {
+            @Override
+            public Task<Void> call() {
+                return onStopEngine();
+            }
+        }).addOnSuccessListener(new OnSuccessListener<Void>() {
+            @Override
+            public void onSuccess(Void aVoid) {
+                // Put this on the outer task so we're sure it's called after getState() is OFF.
+                // This was breaking some tests on rare occasions.
+                mCallback.dispatchOnCameraClosed();
+            }
+        });
+    }
+
+    /**
+     * Camera is about to be opened. Implementors should look into available cameras
+     * and see if anyone matches the given {@link Facing value}.
+     *
+     * If so, implementors should set {@link Angles#setSensorOffset(Facing, int)}
+     * and any other information (like camera ID) needed to start the engine.
+     *
+     * @param facing the facing value
+     * @return true if we have one
+     */
+    @EngineThread
+    protected abstract boolean collectCameraInfo(@NonNull Facing facing);
+
+    /**
+     * Starts the engine.
+     * @return a task
+     */
+    @NonNull
+    @EngineThread
+    protected abstract Task<CameraOptions> onStartEngine();
+
+    /**
+     * Stops the engine.
+     * Stop events should generally not throw exceptions. We
+     * want to release resources either way.
+     * @return a task
+     */
+    @NonNull
+    @EngineThread
+    protected abstract Task<Void> onStopEngine();
+
+    //endregion
+
+    //region Start & Stop binding
+
+    @NonNull
+    @EngineThread
+    private Task<Void> startBind() {
+        return mOrchestrator.scheduleStateChange(CameraState.ENGINE, CameraState.BIND,
+                true,
+                new Callable<Task<Void>>() {
+            @Override
+            public Task<Void> call() {
+                if (getPreview() != null && getPreview().hasSurface()) {
+                    return onStartBind();
+                } else {
+                    return Tasks.forCanceled();
+                }
+            }
+        });
+    }
+
+    @SuppressWarnings("UnusedReturnValue")
+    @NonNull
+    @EngineThread
+    private Task<Void> stopBind(boolean swallowExceptions) {
+        return mOrchestrator.scheduleStateChange(CameraState.BIND, CameraState.ENGINE,
+                !swallowExceptions,
+                new Callable<Task<Void>>() {
+            @Override
+            public Task<Void> call() {
+                return onStopBind();
+            }
+        });
+    }
+
+    /**
+     * Starts the binding process.
+     * @return a task
+     */
+    @NonNull
+    @EngineThread
+    protected abstract Task<Void> onStartBind();
+
+    /**
+     * Stops the binding process.
+     * Stop events should generally not throw exceptions. We
+     * want to release resources either way.
+     * @return a task
+     */
+    @NonNull
+    @EngineThread
+    protected abstract Task<Void> onStopBind();
+
+    //endregion
+
+    //region Start & Stop preview
+
+    @NonNull
+    @EngineThread
+    private Task<Void> startPreview() {
+        return mOrchestrator.scheduleStateChange(CameraState.BIND, CameraState.PREVIEW,
+                true,
+                new Callable<Task<Void>>() {
+            @Override
+            public Task<Void> call() {
+                return onStartPreview();
+            }
+        });
+    }
+
+    @SuppressWarnings("UnusedReturnValue")
+    @NonNull
+    @EngineThread
+    private Task<Void> stopPreview(boolean swallowExceptions) {
+        return mOrchestrator.scheduleStateChange(CameraState.PREVIEW, CameraState.BIND,
+                !swallowExceptions,
+                new Callable<Task<Void>>() {
+            @Override
+            public Task<Void> call() {
+                return onStopPreview();
+            }
+        });
+    }
+
+    /**
+     * Starts the preview streaming.
+     * @return a task
+     */
+    @NonNull
+    @EngineThread
+    protected abstract Task<Void> onStartPreview();
+
+    /**
+     * Stops the preview streaming.
+     * Stop events should generally not throw exceptions. We
+     * want to release resources either way.
+     * @return a task
+     */
+    @NonNull
+    @EngineThread
+    protected abstract Task<Void> onStopPreview();
+
+    //endregion
+
+    //region Surface callbacks
+
+    /**
+     * The surface is now available, which means that step 1 has completed.
+     * If we have also completed step 2, go on with binding and streaming.
+     */
+    @SuppressWarnings("ConstantConditions")
+    @Override
+    public final void onSurfaceAvailable() {
+        LOG.i("onSurfaceAvailable:", "Size is", getPreview().getSurfaceSize());
+        startBind();
+        startPreview();
+    }
+
+    @Override
+    public final void onSurfaceDestroyed() {
+        LOG.i("onSurfaceDestroyed");
+        stopPreview(false);
+        stopBind(false);
+    }
+
+    //endregion
+
+    //region Abstract getters
+
+    @NonNull
+    public abstract Angles getAngles();
+
+    @NonNull
+    public abstract FrameManager getFrameManager();
+
+    @Nullable
+    public abstract CameraOptions getCameraOptions();
+
+    @Nullable
+    public abstract Size getPictureSize(@NonNull Reference reference);
+
+    @Nullable
+    public abstract Size getVideoSize(@NonNull Reference reference);
+
+    @Nullable
+    public abstract Size getPreviewStreamSize(@NonNull Reference reference);
+
+    @Nullable
+    public abstract Size getUncroppedSnapshotSize(@NonNull Reference reference);
+
+    //endregion
+
+    //region Abstract APIs
+
+    public abstract void setPreview(@NonNull CameraPreview cameraPreview);
+    @Nullable public abstract CameraPreview getPreview();
+
+    public abstract void setOverlay(@Nullable Overlay overlay);
+    @Nullable public abstract Overlay getOverlay();
+
+    public abstract void setPreviewStreamSizeSelector(@Nullable SizeSelector selector);
+    @Nullable public abstract SizeSelector getPreviewStreamSizeSelector();
+
+    public abstract void setPictureSizeSelector(@NonNull SizeSelector selector);
+    @NonNull public abstract SizeSelector getPictureSizeSelector();
+
+    public abstract void setVideoSizeSelector(@NonNull SizeSelector selector);
+    @NonNull public abstract SizeSelector getVideoSizeSelector();
+
+    public abstract void setVideoMaxSize(long videoMaxSizeBytes);
+    public abstract long getVideoMaxSize();
+
+    public abstract void setVideoMaxDuration(int videoMaxDurationMillis);
+    public abstract int getVideoMaxDuration();
+
+    public abstract void setVideoCodec(@NonNull VideoCodec codec);
+    @NonNull public abstract VideoCodec getVideoCodec();
+
+    public abstract void setVideoBitRate(int videoBitRate);
+    public abstract int getVideoBitRate();
+
+    public abstract void setAudioBitRate(int audioBitRate);
+    public abstract int getAudioBitRate();
+
+    public abstract void setAudioCodec(@NonNull AudioCodec codec);
+    @NonNull public abstract AudioCodec getAudioCodec();
+
+    public abstract void setSnapshotMaxWidth(int maxWidth);
+    public abstract int getSnapshotMaxWidth();
+
+    public abstract void setSnapshotMaxHeight(int maxHeight);
+    public abstract int getSnapshotMaxHeight();
+
+    public abstract void setFrameProcessingMaxWidth(int maxWidth);
+    public abstract int getFrameProcessingMaxWidth();
+
+    public abstract void setFrameProcessingMaxHeight(int maxHeight);
+    public abstract int getFrameProcessingMaxHeight();
+
+    public abstract void setFrameProcessingFormat(int format);
+    public abstract int getFrameProcessingFormat();
+
+    public abstract void setFrameProcessingPoolSize(int poolSize);
+    public abstract int getFrameProcessingPoolSize();
+
+    public abstract void setAutoFocusResetDelay(long delayMillis);
+    public abstract long getAutoFocusResetDelay();
+
+    public abstract void setFacing(final @NonNull Facing facing);
+    @NonNull public abstract Facing getFacing();
+
+    public abstract void setAudio(@NonNull Audio audio);
+    @NonNull public abstract Audio getAudio();
+
+    public abstract void setMode(@NonNull Mode mode);
+    @NonNull public abstract Mode getMode();
+
+    public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify);
+    public abstract float getZoomValue();
+
+    public abstract void setExposureCorrection(float EVvalue, @NonNull float[] bounds,
+                                               @Nullable PointF[] points, boolean notify);
+    public abstract float getExposureCorrectionValue();
+
+    public abstract void setFlash(@NonNull Flash flash);
+    @NonNull public abstract Flash getFlash();
+
+    public abstract void setWhiteBalance(@NonNull WhiteBalance whiteBalance);
+    @NonNull public abstract WhiteBalance getWhiteBalance();
+
+    public abstract void setHdr(@NonNull Hdr hdr);
+    @NonNull public abstract Hdr getHdr();
+
+    public abstract void setLocation(@Nullable Location location);
+    @Nullable public abstract Location getLocation();
+
+    public abstract void setPictureFormat(@NonNull PictureFormat pictureFormat);
+    @NonNull public abstract PictureFormat getPictureFormat();
+
+    public abstract void setPreviewFrameRateExact(boolean previewFrameRateExact);
+    public abstract boolean getPreviewFrameRateExact();
+    public abstract void setPreviewFrameRate(float previewFrameRate);
+    public abstract float getPreviewFrameRate();
+
+    public abstract void setHasFrameProcessors(boolean hasFrameProcessors);
+    public abstract boolean hasFrameProcessors();
+
+    public abstract void setPictureMetering(boolean enable);
+    public abstract boolean getPictureMetering();
+
+    public abstract void setPictureSnapshotMetering(boolean enable);
+    public abstract boolean getPictureSnapshotMetering();
+
+    public abstract void startAutoFocus(@Nullable Gesture gesture,
+                                        @NonNull MeteringRegions regions,
+                                        @NonNull PointF legacyPoint);
+
+    public abstract void setPlaySounds(boolean playSounds);
+
+    public abstract boolean isTakingPicture();
+    public abstract void takePicture(@NonNull PictureResult.Stub stub);
+    public abstract void takePictureSnapshot(final @NonNull PictureResult.Stub stub);
+
+    public abstract boolean isTakingVideo();
+    public abstract void takeVideo(@NonNull VideoResult.Stub stub,
+                                   @Nullable File file,
+                                   @Nullable FileDescriptor fileDescriptor);
+    public abstract void takeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file);
+    public abstract void stopVideo();
+
+    //endregion
+}

+ 7 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/EngineThread.java

@@ -0,0 +1,7 @@
+package com.otaliastudios.cameraview.engine;
+
+/**
+ * Indicates that some action is being executed on the {@link CameraEngine} thread.
+ */
+@SuppressWarnings("WeakerAccess")
+public @interface EngineThread {}

+ 92 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Action.java

@@ -0,0 +1,92 @@
+package com.otaliastudios.cameraview.engine.action;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.os.Build;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+/**
+ * The Action class encapsulates logic for completing an action in a Camera2 environment.
+ * In this case, we are often interested in constantly receiving the {@link CaptureResult}
+ * and {@link CaptureRequest} callbacks, as well as applying changes to a
+ * {@link CaptureRequest.Builder} and having them applied to the sensor.
+ *
+ * The Action class receives the given callbacks and can operate over the engine
+ * through the {@link ActionHolder} object.
+ *
+ * Each Action operates on a given state in a given moment. This base class offers the
+ * {@link #STATE_COMPLETED} state which is common to all actions.
+ *
+ * See {@link BaseAction} for a base implementation.
+ */
+@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
+public interface Action {
+
+    int STATE_COMPLETED = Integer.MAX_VALUE;
+
+    /**
+     * Returns the current state.
+     * @return the state
+     */
+    int getState();
+
+    /**
+     * Starts this action.
+     * @param holder the holder
+     */
+    void start(@NonNull ActionHolder holder);
+
+    /**
+     * Aborts this action.
+     * @param holder the holder
+     */
+    void abort(@NonNull ActionHolder holder);
+
+    /**
+     * Adds an {@link ActionCallback} to receive state
+     * change events.
+     * @param callback a callback
+     */
+    void addCallback(@NonNull ActionCallback callback);
+
+    /**
+     * Removes a previously added callback.
+     * @param callback a callback
+     */
+    void removeCallback(@NonNull ActionCallback callback);
+
+    /**
+     * Called from {@link CaptureCallback#onCaptureStarted(CameraCaptureSession, CaptureRequest,
+     * long, long)}.
+     * @param holder the holder
+     * @param request the request
+     */
+    void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request);
+
+    /**
+     * Called from {@link CaptureCallback#onCaptureProgressed(CameraCaptureSession, CaptureRequest,
+     * CaptureResult)}.
+     * @param holder the holder
+     * @param request the request
+     * @param result the result
+     */
+    void onCaptureProgressed(@NonNull ActionHolder holder,
+                             @NonNull CaptureRequest request,
+                             @NonNull CaptureResult result);
+
+    /**
+     * Called from {@link CaptureCallback#onCaptureCompleted(CameraCaptureSession, CaptureRequest,
+     * TotalCaptureResult)}.
+     * @param holder the holder
+     * @param request the request
+     * @param result the result
+     */
+    void onCaptureCompleted(@NonNull ActionHolder holder,
+                            @NonNull CaptureRequest request,
+                            @NonNull TotalCaptureResult result);
+}

+ 23 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionCallback.java

@@ -0,0 +1,23 @@
+package com.otaliastudios.cameraview.engine.action;
+
+import android.os.Build;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+/**
+ * A callback for {@link Action} state changes.
+ * See the action class.
+ *
+ * See also {@link CompletionCallback}.
+ */
+@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
+public interface ActionCallback {
+
+    /**
+     * Action state has just changed.
+     * @param action action
+     * @param state new state
+     */
+    void onActionStateChanged(@NonNull Action action, int state);
+}

+ 82 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionHolder.java

@@ -0,0 +1,82 @@
+package com.otaliastudios.cameraview.engine.action;
+
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.TotalCaptureResult;
+import android.os.Build;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+
+/**
+ * The holder of {@link Action}.
+ *
+ * This class should keep a list or set of currently running actions, and offers
+ * to them the base Camera2 objects that are needed to apply changes.
+ *
+ * This class, or an holder of it, should also forward the capture callbacks
+ * to all {@link Action}s. See {@link com.otaliastudios.cameraview.engine.Camera2Engine} for
+ * our implementation.
+ */
+@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
+public interface ActionHolder {
+
+    /**
+     * Adds a new action
+     * @param action action
+     */
+    void addAction(@NonNull Action action);
+
+    /**
+     * Removes a previously added action
+     * @param action action
+     */
+    void removeAction(@NonNull Action action);
+
+    /**
+     * Returns the {@link CameraCharacteristics} of the current
+     * camera device.
+     * @param action action
+     * @return characteristics
+     */
+    @NonNull
+    CameraCharacteristics getCharacteristics(@NonNull Action action);
+
+    /**
+     * Returns the latest {@link TotalCaptureResult}. Can be used
+     * by actions to start querying the state before receiving their
+     * first frame.
+     * @param action action
+     * @return last result
+     */
+    @Nullable
+    TotalCaptureResult getLastResult(@NonNull Action action);
+
+    /**
+     * Returns the current {@link CaptureRequest.Builder} so that
+     * actions can apply changes to it and later submit them.
+     * @param action action
+     * @return the builder
+     */
+    @NonNull
+    CaptureRequest.Builder getBuilder(@NonNull Action action);
+
+    /**
+     * Applies the current builder (as per {@link #getBuilder(Action)})
+     * as a repeating request on the preview.
+     * @param source action
+     */
+    void applyBuilder(@NonNull Action source);
+
+    /**
+     * Applies the given builder as a single capture request.
+     * Callers can catch the exception and choose what to do.
+     * @param source action
+     * @param builder builder
+     * @throws CameraAccessException camera exception
+     */
+    void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder)
+            throws CameraAccessException;
+}

+ 67 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionWrapper.java

@@ -0,0 +1,67 @@
+package com.otaliastudios.cameraview.engine.action;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.os.Build;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+/**
+ * A simple wrapper around a {@link BaseAction}.
+ * This can be used to add functionality around a base action.
+ */
+@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
+public abstract class ActionWrapper extends BaseAction {
+
+    /**
+     * Should return the wrapped action.
+     * @return the wrapped action
+     */
+    @NonNull
+    public abstract BaseAction getAction();
+
+    @Override
+    protected void onStart(@NonNull ActionHolder holder) {
+        super.onStart(holder);
+        getAction().addCallback(new ActionCallback() {
+            @Override
+            public void onActionStateChanged(@NonNull Action action, int state) {
+                setState(state);
+                if (state == STATE_COMPLETED) {
+                    action.removeCallback(this);
+                }
+            }
+        });
+        getAction().onStart(holder);
+    }
+
+    @Override
+    protected void onAbort(@NonNull ActionHolder holder) {
+        super.onAbort(holder);
+        getAction().onAbort(holder);
+    }
+
+    @Override
+    public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) {
+        super.onCaptureStarted(holder, request);
+        getAction().onCaptureStarted(holder, request);
+    }
+
+    @Override
+    public void onCaptureProgressed(@NonNull ActionHolder holder,
+                                    @NonNull CaptureRequest request,
+                                    @NonNull CaptureResult result) {
+        super.onCaptureProgressed(holder, request, result);
+        getAction().onCaptureProgressed(holder, request, result);
+    }
+
+    @Override
+    public void onCaptureCompleted(@NonNull ActionHolder holder,
+                                   @NonNull CaptureRequest request,
+                                   @NonNull TotalCaptureResult result) {
+        super.onCaptureCompleted(holder, request, result);
+        getAction().onCaptureCompleted(holder, request, result);
+    }
+}

+ 56 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Actions.java

@@ -0,0 +1,56 @@
+package com.otaliastudios.cameraview.engine.action;
+
+import android.os.Build;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+import java.util.Arrays;
+
+/**
+ * Utilities for creating {@link Action} sequences.
+ */
+@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
+public class Actions {
+
+    /**
+     * Creates a {@link BaseAction} that executes all the child actions
+     * together, at the same time, and completes once all of them are
+     * completed.
+     *
+     * @param actions input actions
+     * @return a new action
+     */
+    @NonNull
+    public static BaseAction together(@NonNull BaseAction... actions) {
+        return new TogetherAction(Arrays.asList(actions));
+    }
+
+    /**
+     * Creates a {@link BaseAction} that executes all the child actions
+     * in sequence, waiting for the first to complete, then going on with
+     * the second and so on, finally completing when all are completed.
+     *
+     * @param actions input actions
+     * @return a new action
+     */
+    @NonNull
+    public static BaseAction sequence(@NonNull BaseAction... actions) {
+        return new SequenceAction(Arrays.asList(actions));
+    }
+
+    /**
+     * Creates a {@link BaseAction} that completes as normal, but is also
+     * forced to complete if the given timeout is reached, by calling
+     * {@link Action#abort(ActionHolder)}.
+     *
+     * @param timeoutMillis timeout in milliseconds
+     * @param action action
+     * @return a new action
+     */
+    @NonNull
+    public static BaseAction timeout(long timeoutMillis, @NonNull BaseAction action) {
+        return new TimeoutAction(timeoutMillis, action);
+    }
+
+}

+ 177 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/BaseAction.java

@@ -0,0 +1,177 @@
+package com.otaliastudios.cameraview.engine.action;
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.os.Build;
+
+import androidx.annotation.CallSuper;
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * The base implementation of {@link Action} that should always be subclassed,
+ * instead of implementing the root interface itself.
+ *
+ * It holds a list of callbacks and dispatches events to them, plus it cares about
+ * its own lifecycle:
+ * - when {@link #start(ActionHolder)} is called, we add ourselves to the holder list
+ * - when {@link #STATE_COMPLETED} is reached, we remove ouverselves from the holder list
+ *
+ * This is very important in all cases.
+ */
+@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
+public abstract class BaseAction implements Action {
+
+    private final List<ActionCallback> callbacks = new ArrayList<>();
+    private int state;
+    private ActionHolder holder;
+    private boolean needsOnStart;
+
+    @Override
+    public final int getState() {
+        return state;
+    }
+
+    @Override
+    public final void start(@NonNull ActionHolder holder) {
+        this.holder = holder;
+        holder.addAction(this);
+        if (holder.getLastResult(this) != null) {
+            onStart(holder);
+        } else {
+            needsOnStart = true;
+        }
+    }
+
+    @Override
+    public final void abort(@NonNull ActionHolder holder) {
+        holder.removeAction(this);
+        if (!isCompleted()) {
+            onAbort(holder);
+            setState(STATE_COMPLETED);
+        }
+        needsOnStart = false;
+    }
+
+    /**
+     * Action was started and will soon receive events from the
+     * holder stream.
+     * @param holder holder
+     */
+    @CallSuper
+    protected void onStart(@NonNull ActionHolder holder) {
+        // Repeating holder assignment here (already in start()) because we NEED it in start()
+        // but some special actions will not call start() at all for their children.
+        this.holder = holder;
+        // Overrideable
+    }
+
+    /**
+     * Action was aborted and will not receive events from the
+     * holder stream anymore. It will soon be marked as completed.
+     * @param holder holder
+     */
+    @SuppressWarnings("unused")
+    protected void onAbort(@NonNull ActionHolder holder) {
+        // Overrideable
+    }
+
+    @CallSuper
+    @Override
+    public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) {
+        if (needsOnStart) {
+            onStart(holder);
+            needsOnStart = false;
+        }
+    }
+
+    @Override
+    public void onCaptureProgressed(@NonNull ActionHolder holder,
+                                    @NonNull CaptureRequest request,
+                                    @NonNull CaptureResult result) {
+        // Overrideable
+    }
+
+    @Override
+    public void onCaptureCompleted(@NonNull ActionHolder holder,
+                                   @NonNull CaptureRequest request,
+                                   @NonNull TotalCaptureResult result) {
+        // Overrideable
+    }
+
+    /**
+     * Called by subclasses to notify of their state. If state is {@link #STATE_COMPLETED},
+     * this removes this action from the holder.
+     * @param newState new state
+     */
+    protected final void setState(int newState) {
+        if (newState != state) {
+            state = newState;
+            for (ActionCallback callback : callbacks) {
+                callback.onActionStateChanged(this, state);
+            }
+            if (state == STATE_COMPLETED) {
+                holder.removeAction(this);
+                onCompleted(holder);
+            }
+        }
+    }
+
+    /**
+     * Whether this action has reached the completed state.
+     * @return true if completed
+     */
+    public boolean isCompleted() {
+        return state == STATE_COMPLETED;
+    }
+
+    /**
+     * Called when this action has completed (possibly aborted).
+     * @param holder holder
+     */
+    protected void onCompleted(@NonNull ActionHolder holder) {
+        // Overrideable
+    }
+
+    /**
+     * Returns the holder.
+     * @return the holder
+     */
+    @NonNull
+    protected ActionHolder getHolder() {
+        return holder;
+    }
+
+
+    /**
+     * Reads a characteristic with a fallback.
+     * @param key key
+     * @param fallback fallback
+     * @param <T> key type
+     * @return value or fallback
+     */
+    @NonNull
+    protected <T> T readCharacteristic(@NonNull CameraCharacteristics.Key<T> key,
+                                       @NonNull T fallback) {
+        T value = holder.getCharacteristics(this).get(key);
+        return value == null ? fallback : value;
+    }
+
+    @Override
+    public void addCallback(@NonNull ActionCallback callback) {
+        if (!callbacks.contains(callback)) {
+            callbacks.add(callback);
+            callback.onActionStateChanged(this, getState());
+        }
+    }
+
+    @Override
+    public void removeCallback(@NonNull ActionCallback callback) {
+        callbacks.remove(callback);
+    }
+}

+ 0 - 0
cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/CompletionCallback.java


Nem az összes módosított fájl került megjelenítésre, mert túl sok fájl változott