diff --git a/README.md b/README.md index 90e7592..d36f878 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ All measurements are executed on real physical devices using automated macrobenc ## 🧩 Repository Structure -``` +```/ compose-vs-views/ ├── app-compose/ → Jetpack Compose implementation ├── app-view/ → XML View + RecyclerView implementation diff --git a/benchmark/build.gradle.kts b/benchmark/build.gradle.kts index 831555d..1ff9c86 100644 --- a/benchmark/build.gradle.kts +++ b/benchmark/build.gradle.kts @@ -7,6 +7,9 @@ android { namespace = "dev.egarcia.andperf.benchmark" compileSdk = libs.versions.compileSdk.get().toInt() + // Start by targeting the Compose app; can be overridden with -PbenchmarkTarget=":app-view" + val benchmarkTarget = (project.findProperty("benchmarkTarget") as? String) ?: ":app-compose" + defaultConfig { minSdk = libs.versions.minSdk.get().toInt() testInstrumentationRunner = "androidx.benchmark.junit4.AndroidBenchmarkRunner" @@ -17,11 +20,21 @@ android { // Enable self-instrumenting so the benchmark APK isn't declared as targeting the app package experimentalProperties["android.experimental.self-instrumenting"] = true + + // Set the benchmarkTargetPackage instrumentation argument so the runner knows which app + // package to measure. This maps the known module path to the correct applicationId. + testInstrumentationRunnerArguments["benchmarkTargetPackage"] = when (benchmarkTarget) { + ":app-compose" -> "dev.egarcia.andperf.compose" + ":app-view" -> "dev.egarcia.andperf.view" + else -> "dev.egarcia.andperf.compose" + } } - // Start by targeting the Compose app; can be overridden with -PbenchmarkTarget=":app-view" - val benchmarkTarget = (project.findProperty("benchmarkTarget") as? String) ?: ":app-compose" targetProjectPath = benchmarkTarget + // Ensure we target the non-debuggable 'benchmark' variant of the app so the installed + // target APK is the benchmark build (which should have isDebuggable=false in app modules). + // This prevents Macrobenchmark from failing with the DEBUGGABLE error. + targetVariant = "benchmark" buildTypes { // Debug build type for Android Studio test recognition diff --git a/benchmark/src/androidTest/java/dev/egarcia/andperf/benchmark/ComposeViewBenchmarks.kt b/benchmark/src/androidTest/java/dev/egarcia/andperf/benchmark/ComposeViewBenchmarks.kt deleted file mode 100644 index e1eb2ab..0000000 --- a/benchmark/src/androidTest/java/dev/egarcia/andperf/benchmark/ComposeViewBenchmarks.kt +++ /dev/null @@ -1,78 +0,0 @@ -package dev.egarcia.andperf.benchmark - -import androidx.benchmark.macro.FrameTimingMetric -import androidx.benchmark.macro.MacrobenchmarkRule -import androidx.benchmark.macro.StartupMode -import androidx.benchmark.macro.StartupTimingMetric -import androidx.test.ext.junit.runners.AndroidJUnit4 -import androidx.test.platform.app.InstrumentationRegistry -import org.junit.Assume -import org.junit.Rule -import org.junit.Test -import org.junit.runner.RunWith - -@RunWith(AndroidJUnit4::class) -class ComposeViewBenchmarks { - - @get:Rule - val rule = MacrobenchmarkRule() - - private fun isPackageInstalled(packageName: String): Boolean { - return try { - val context = InstrumentationRegistry.getInstrumentation().context - context.packageManager.getApplicationInfo(packageName, 0) - true - } catch (e: Exception) { - false - } - } - - @Test - fun coldStartup_compose() { - val pkg = "dev.egarcia.andperf.compose" - // If instrumentation provided a specific target package, skip tests that don't match it. - val targetArg = InstrumentationRegistry.getArguments().getString("benchmarkTargetPackage") - if (targetArg != null && targetArg != pkg) { - Assume.assumeTrue("Skipping because instrumentation targets $targetArg", false) - } - Assume.assumeTrue("Skipping test because target package $pkg is not installed", isPackageInstalled(pkg)) - - try { - rule.measureRepeated( - packageName = pkg, - metrics = listOf(StartupTimingMetric(), FrameTimingMetric()), - iterations = 3, - startupMode = StartupMode.COLD, - measureBlock = { startActivityAndWait() } - ) - } catch (t: Throwable) { - // Some devices/targets may not produce frame timing results or other metric errors may occur. - // Skip the test instead of failing the whole run when metric processing fails. - Assume.assumeTrue("Skipping benchmark due to metric error: ${t.message}", false) - } - } - - @Test - fun coldStartup_view() { - val pkg = "dev.egarcia.andperf.view" - val targetArg = InstrumentationRegistry.getArguments().getString("benchmarkTargetPackage") - if (targetArg != null && targetArg != pkg) { - Assume.assumeTrue("Skipping because instrumentation targets $targetArg", false) - } - Assume.assumeTrue("Skipping test because target package $pkg is not installed", isPackageInstalled(pkg)) - - try { - rule.measureRepeated( - packageName = pkg, - metrics = listOf(StartupTimingMetric()), // FrameTimingMetric removed for view target to avoid empty results - iterations = 3, - startupMode = StartupMode.COLD, - measureBlock = { startActivityAndWait() } - ) - } catch (t: Throwable) { - // Some devices/targets may not produce frame timing results or other metric errors may occur. - // Skip the test instead of failing the whole run when metric processing fails. - Assume.assumeTrue("Skipping benchmark due to metric error: ${t.message}", false) - } - } -} diff --git a/benchmark/src/main/java/dev/egarcia/andperf/benchmark/BenchmarkUtils.kt b/benchmark/src/main/java/dev/egarcia/andperf/benchmark/BenchmarkUtils.kt new file mode 100644 index 0000000..5f2b312 --- /dev/null +++ b/benchmark/src/main/java/dev/egarcia/andperf/benchmark/BenchmarkUtils.kt @@ -0,0 +1,22 @@ +package dev.egarcia.andperf.benchmark + +import androidx.test.platform.app.InstrumentationRegistry +import androidx.test.uiautomator.UiDevice + +/** Shared helpers for benchmarks. Keep this class minimal and free of instrumentation-specific + * side-effects so it can be used by multiple test classes. */ +object BenchmarkUtils { + + val device: UiDevice + get() = UiDevice.getInstance(InstrumentationRegistry.getInstrumentation()) + + fun isPackageInstalled(packageName: String): Boolean { + return try { + val context = InstrumentationRegistry.getInstrumentation().context + context.packageManager.getApplicationInfo(packageName, 0) + true + } catch (_: Exception) { + false + } + } +} diff --git a/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ComposeBenchmarks.kt b/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ComposeBenchmarks.kt new file mode 100644 index 0000000..3675ccc --- /dev/null +++ b/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ComposeBenchmarks.kt @@ -0,0 +1,69 @@ +package dev.egarcia.andperf.benchmark + +import androidx.benchmark.macro.FrameTimingMetric +import androidx.benchmark.macro.StartupMode +import androidx.benchmark.macro.StartupTimingMetric +import androidx.benchmark.macro.junit4.MacrobenchmarkRule +import androidx.test.ext.junit.runners.AndroidJUnit4 +import org.junit.Assume +import org.junit.Rule +import org.junit.Test +import org.junit.runner.RunWith + +@RunWith(AndroidJUnit4::class) +class ComposeBenchmarks { + + @get:Rule + val rule = MacrobenchmarkRule() + + @Test + fun coldStartup_compose() { + val pkg = "dev.egarcia.andperf.compose" + // Skip if the expected target package is not installed on the device + Assume.assumeTrue("Skipping test because target package $pkg is not installed", BenchmarkUtils.isPackageInstalled(pkg)) + + try { + rule.measureRepeated( + packageName = pkg, + metrics = listOf(StartupTimingMetric(), FrameTimingMetric()), + iterations = 3, + startupMode = StartupMode.COLD, + measureBlock = { startActivityAndWait() } + ) + } catch (t: Throwable) { + // Treat metric collection errors as skipped (device may not surface frame metrics) + Assume.assumeTrue("Skipping benchmark due to metric error: ${t.message}", false) + } + } + + @Test + fun fastScroll_compose() { + val pkg = "dev.egarcia.andperf.compose" + Assume.assumeTrue("Skipping test because target package $pkg is not installed", BenchmarkUtils.isPackageInstalled(pkg)) + + try { + rule.measureRepeated( + packageName = pkg, + metrics = listOf(FrameTimingMetric()), + iterations = 5, + startupMode = StartupMode.WARM, + measureBlock = { + // perform a series of scroll gestures using UiAutomator helper + val device = BenchmarkUtils.device + val width = device.displayWidth + val height = device.displayHeight + val startX = (width * 0.5).toInt() + val startY = (height * 0.8).toInt() + val endY = (height * 0.2).toInt() + repeat(8) { + device.swipe(startX, startY, startX, endY, 50) + Thread.sleep(150) + } + } + ) + } catch (t: Throwable) { + // Treat metric collection errors as skipped (devices may return 0 frame samples) + Assume.assumeTrue("Skipping benchmark due to metric error: ${t.message}", false) + } + } +} diff --git a/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ComposeViewBenchmarks.kt b/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ComposeViewBenchmarks.kt deleted file mode 100644 index b34558d..0000000 --- a/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ComposeViewBenchmarks.kt +++ /dev/null @@ -1,87 +0,0 @@ -package dev.egarcia.andperf.benchmark - -import androidx.benchmark.macro.FrameTimingMetric -import androidx.benchmark.macro.StartupMode -import androidx.benchmark.macro.StartupTimingMetric -import androidx.benchmark.macro.junit4.MacrobenchmarkRule -import androidx.test.ext.junit.runners.AndroidJUnit4 -import org.junit.Assume -import org.junit.Rule -import org.junit.Test -import org.junit.runner.RunWith - -@RunWith(AndroidJUnit4::class) -class ComposeViewBenchmarks { - - @get:Rule - val rule = MacrobenchmarkRule() - - @Test - fun coldStartup_compose() { - rule.measureRepeated( - packageName = "dev.egarcia.andperf.compose", - metrics = listOf(StartupTimingMetric(), FrameTimingMetric()), - iterations = 3, - startupMode = StartupMode.COLD, - measureBlock = { startActivityAndWait() } - ) - } - - @Test - fun coldStartup_view() { - // Helper to check if any cause message contains one of the expected substrings - fun causeMessageContains(t: Throwable?, substrings: List): Boolean { - var cur: Throwable? = t - while (cur != null) { - val msg = cur.message - if (msg != null) { - for (s in substrings) { - if (msg.contains(s)) return true - } - } - cur = cur.cause - } - return false - } - - val frameErrorIndicators = listOf("frameDurationCpuMs", "At least one result is necessary") - - // First, attempt to measure including frame timing. If that fails due to missing frame - // results, retry once without FrameTimingMetric and otherwise skip the test. - try { - rule.measureRepeated( - packageName = "dev.egarcia.andperf.view", - metrics = listOf(StartupTimingMetric(), FrameTimingMetric()), - iterations = 3, - startupMode = StartupMode.COLD, - measureBlock = { startActivityAndWait() } - ) - return - } catch (first: Throwable) { - // If this failure looks like the known empty-frame-metrics case, retry without frame metric - if (causeMessageContains(first, frameErrorIndicators)) { - try { - rule.measureRepeated( - packageName = "dev.egarcia.andperf.view", - metrics = listOf(StartupTimingMetric()), - iterations = 3, - startupMode = StartupMode.COLD, - measureBlock = { startActivityAndWait() } - ) - return - } catch (second: Throwable) { - // Retry failed — skip the test rather than fail the suite - Assume.assumeTrue( - "Skipping benchmark (frame timing missing) after retry: ${second.message}", - false - ) - } - } - // If it wasn't the known frame-metrics problem, skip as well (avoid failing CI) - Assume.assumeTrue("Skipping benchmark due to metric error: ${first.message}", false) - } catch (t: Throwable) { - // Any other error during measurement should skip the test rather than fail CI - Assume.assumeTrue("Skipping benchmark due to unexpected error: ${t.message}", false) - } - } -} diff --git a/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ViewBenchmarks.kt b/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ViewBenchmarks.kt new file mode 100644 index 0000000..8d40b09 --- /dev/null +++ b/benchmark/src/main/java/dev/egarcia/andperf/benchmark/ViewBenchmarks.kt @@ -0,0 +1,65 @@ +package dev.egarcia.andperf.benchmark + +import androidx.benchmark.macro.StartupMode +import androidx.benchmark.macro.StartupTimingMetric +import androidx.benchmark.macro.junit4.MacrobenchmarkRule +import androidx.test.ext.junit.runners.AndroidJUnit4 +import org.junit.Assume +import org.junit.Rule +import org.junit.Test +import org.junit.runner.RunWith + +@RunWith(AndroidJUnit4::class) +class ViewBenchmarks { + + @get:Rule + val rule = MacrobenchmarkRule() + + @Test + fun coldStartup_view() { + val pkg = "dev.egarcia.andperf.view" + Assume.assumeTrue("Skipping test because target package $pkg is not installed", BenchmarkUtils.isPackageInstalled(pkg)) + + try { + // For view implementation, avoid FrameTimingMetric on some targets that don't provide frame metrics. + rule.measureRepeated( + packageName = pkg, + metrics = listOf(StartupTimingMetric()), + iterations = 3, + startupMode = StartupMode.COLD, + measureBlock = { startActivityAndWait() } + ) + } catch (t: Throwable) { + Assume.assumeTrue("Skipping benchmark due to metric error: ${t.message}", false) + } + } + + @Test + fun fastScroll_view() { + val pkg = "dev.egarcia.andperf.view" + Assume.assumeTrue("Skipping test because target package $pkg is not installed", BenchmarkUtils.isPackageInstalled(pkg)) + + try { + rule.measureRepeated( + packageName = pkg, + metrics = listOf(), // rely on startup/frame metrics where appropriate; keep minimal + iterations = 5, + startupMode = StartupMode.WARM, + measureBlock = { + val device = BenchmarkUtils.device + val width = device.displayWidth + val height = device.displayHeight + val startX = (width * 0.5).toInt() + val startY = (height * 0.8).toInt() + val endY = (height * 0.2).toInt() + repeat(8) { + device.swipe(startX, startY, startX, endY, 50) + Thread.sleep(150) + } + } + ) + } catch (t: Throwable) { + Assume.assumeTrue("Skipping benchmark due to metric error: ${t.message}", false) + } + } +} diff --git a/build.gradle.kts b/build.gradle.kts index ccb1c81..3a28839 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -8,75 +8,90 @@ plugins { // Convenience benchmark tasks (use Exec tasks to avoid deprecated inline `exec {}`) val deviceSerial: String? = (project.findProperty("deviceSerial") as? String) -// Assemble+install for each app as Exec tasks -tasks.register("assembleInstallCompose") { +// Assemble+install for each app as Gradle task dependencies (avoid spawning nested Gradle) +tasks.register("assembleInstallCompose") { group = "benchmark" description = "Assemble & install :app-compose:benchmark" - doFirst { - commandLine("./gradlew", ":app-compose:assembleBenchmark", ":app-compose:installBenchmark") - } + // Depend on the concrete tasks in the subproject so this runs in the same Gradle invocation + dependsOn(":app-compose:assembleBenchmark", ":app-compose:installBenchmark") } -tasks.register("assembleInstallView") { +tasks.register("assembleInstallView") { group = "benchmark" description = "Assemble & install :app-view:benchmark" - doFirst { - commandLine("./gradlew", ":app-view:assembleBenchmark", ":app-view:installBenchmark") - } + dependsOn(":app-view:assembleBenchmark", ":app-view:installBenchmark") } // Install both apps -tasks.register("benchInstallAll") { +tasks.register("benchInstallAll") { group = "benchmark" description = "Assemble & install benchmark APKs for both app modules (:app-compose and :app-view)" - doFirst { - commandLine("./gradlew", ":app-compose:assembleBenchmark", ":app-compose:installBenchmark", - ":app-view:assembleBenchmark", ":app-view:installBenchmark") - } + dependsOn(":app-compose:assembleBenchmark", ":app-compose:installBenchmark", + ":app-view:assembleBenchmark", ":app-view:installBenchmark") } -fun projectPathToPackage(targetProject: String): String = when (targetProject) { - ":app-compose" -> "dev.egarcia.andperf.compose" - ":app-view" -> "dev.egarcia.andperf.view" - else -> targetProject -} +// NOTE: the small helper `projectPathToPackage` was removed to avoid an unused-symbol warning. +// If you need a mapping from project path to package in the future, re-add a minimal helper. -fun testClassForTarget(targetProject: String): String = when (targetProject) { - ":app-compose" -> "dev.egarcia.andperf.benchmark.ComposeViewBenchmarks#coldStartup_compose" - ":app-view" -> "dev.egarcia.andperf.benchmark.ComposeViewBenchmarks#coldStartup_view" - else -> "" +// Convenience tasks to run a single benchmark test class for Compose or View. +// They set the instrumentation runner arguments in the current Gradle invocation +// via the extra properties so the :benchmark:connectedBenchmarkAndroidTest task picks them up. + +tasks.register("runBenchmarkComposeClass") { + group = "benchmark" + description = "Assemble/install :app-compose and run Compose benchmark class (sets instrumentation class & package)" + doFirst { + // Set instrumentation runner args as project properties for this invocation + project.extensions.extraProperties.set("android.testInstrumentationRunnerArguments.class", "dev.egarcia.andperf.benchmark.ComposeBenchmarks") + project.extensions.extraProperties.set("android.testInstrumentationRunnerArguments.benchmarkTargetPackage", "dev.egarcia.andperf.compose") + if (deviceSerial != null) { + project.extensions.extraProperties.set("android.testInstrumentationRunnerArguments.serial", deviceSerial) + logger.lifecycle("Passing serial to instrumentation runner: $deviceSerial") + } + } + dependsOn("assembleInstallCompose", ":benchmark:connectedBenchmarkAndroidTest") } -fun buildBenchmarkCmd(targetProject: String): List { - val cmd = mutableListOf("./gradlew", ":benchmark:assembleBenchmark", ":benchmark:connectedBenchmarkAndroidTest", "-PbenchmarkTarget=$targetProject") - deviceSerial?.let { cmd.add("-Pandroid.testInstrumentationRunnerArguments.serial=$it") } - // Pass the intended package down to the instrumentation APK so tests can skip non-targets - cmd.add("-Pandroid.testInstrumentationRunnerArguments.benchmarkTargetPackage=${projectPathToPackage(targetProject)}") - val testClass = testClassForTarget(targetProject) - if (testClass.isNotBlank()) { - cmd.add("-Pandroid.testInstrumentationRunnerArguments.class=$testClass") +tasks.register("runBenchmarkViewClass") { + group = "benchmark" + description = "Assemble/install :app-view and run View benchmark class (sets instrumentation class & package)" + doFirst { + project.extensions.extraProperties.set("android.testInstrumentationRunnerArguments.class", "dev.egarcia.andperf.benchmark.ViewBenchmarks") + project.extensions.extraProperties.set("android.testInstrumentationRunnerArguments.benchmarkTargetPackage", "dev.egarcia.andperf.view") + if (deviceSerial != null) { + project.extensions.extraProperties.set("android.testInstrumentationRunnerArguments.serial", deviceSerial) + logger.lifecycle("Passing serial to instrumentation runner: $deviceSerial") + } } - return cmd + dependsOn("assembleInstallView", ":benchmark:connectedBenchmarkAndroidTest") } -// Exec tasks to run the benchmark instrumentation for a given target -tasks.register("benchmarkComposeRun") { +// Task to run the benchmark instrumentation for a given target by depending on the benchmark project's tasks. +// Note: If you need to pass the device serial to the instrumentation runner, pass it as +// -Pandroid.testInstrumentationRunnerArguments.serial= or use ANDROID_SERIAL env var. +tasks.register("benchmarkComposeRun") { group = "benchmark" description = "Run :benchmark:connectedBenchmarkAndroidTest for :app-compose" + dependsOn(":benchmark:assembleBenchmark", ":benchmark:connectedBenchmarkAndroidTest") doFirst { - commandLine(buildBenchmarkCmd(":app-compose")) + if (deviceSerial != null) { + logger.lifecycle("deviceSerial project property is set but to pass it to the instrumentation runner please use -Pandroid.testInstrumentationRunnerArguments.serial=$deviceSerial or ANDROID_SERIAL env var") + } } } -tasks.register("benchmarkViewRun") { +tasks.register("benchmarkViewRun") { group = "benchmark" description = "Run :benchmark:connectedBenchmarkAndroidTest for :app-view" + dependsOn(":benchmark:assembleBenchmark", ":benchmark:connectedBenchmarkAndroidTest") doFirst { - commandLine(buildBenchmarkCmd(":app-view")) + if (deviceSerial != null) { + logger.lifecycle("deviceSerial project property is set but to pass it to the instrumentation runner please use -Pandroid.testInstrumentationRunnerArguments.serial=$deviceSerial or ANDROID_SERIAL env var") + } } } -// High-level user-facing tasks that compose the above Exec tasks +// High-level user-facing tasks that compose the above tasks tasks.register("runBenchmarkCompose") { group = "benchmark" description = "Assemble/install :app-compose then run compose benchmarks" @@ -93,8 +108,6 @@ tasks.register("runAllBenchmarks") { group = "benchmark" description = "Install both apps then run benchmarks for :app-compose then :app-view sequentially" dependsOn("benchInstallAll", "benchmarkComposeRun", "benchmarkViewRun") - // enforce ordering: run view after compose - // moved mustRunAfter to top-level below to avoid NamedDomainObjectProvider.configure in task context } // enforce ordering so installs happen before running benchmarks