Add Android toolchain benchmark suite

Add Android toolchain benchmark suite into toolchain-utils, which will
be synced to Android Repository.

BUG=None

TEST=None

Change-Id: Iee1ba9aa21a99ff6ce8d669c59d53af6e953703c
Reviewed-on: https://chromium-review.googlesource.com/569481
Commit-Ready: Zhizhou Yang <zhizhouy@chromium.org>
Tested-by: Zhizhou Yang <zhizhouy@chromium.org>
Reviewed-by: Luis Lozano <llozano@chromium.org>
diff --git a/android_bench_suite/Binder_flags_aosp.diff b/android_bench_suite/Binder_flags_aosp.diff
new file mode 100644
index 0000000..1e3ec6c
--- /dev/null
+++ b/android_bench_suite/Binder_flags_aosp.diff
@@ -0,0 +1,43 @@
+diff --git a/libs/binder/Android.bp b/libs/binder/Android.bp
+index f7347aef1..a539fac47 100644
+--- a/libs/binder/Android.bp
++++ b/libs/binder/Android.bp
+@@ -52,10 +52,12 @@ cc_library {
+         "-Wall",
+         "-Wextra",
+         "-Werror",
++	CFLAGS_FOR_BENCH_SUITE
+     ],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+     product_variables: {
+         binder32bit: {
+-            cflags: ["-DBINDER_IPC_32BIT=1"],
++            cflags: ["-DBINDER_IPC_32BIT=1",],
+         },
+     },
+ 
+@@ -76,4 +78,22 @@ cc_library {
+     },
+ }
+ 
+-subdirs = ["tests"]
++cc_test {
++    name: "binderThroughputTest",
++    srcs: ["tests/binderThroughputTest.cpp"],
++    shared_libs: [
++        "libbinder",
++        "libutils",
++    ],
++    clang: true,
++    cflags: [
++        "-g",
++        "-Wall",
++        "-Werror",
++        "-Wno-missing-field-initializers",
++        "-Wno-sign-compare",
++	 "-O3",
++        CFLAGS_FOR_BENCH_SUITE
++    ],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++}
++
diff --git a/android_bench_suite/Binder_flags_internal.diff b/android_bench_suite/Binder_flags_internal.diff
new file mode 100644
index 0000000..1e3ec6c
--- /dev/null
+++ b/android_bench_suite/Binder_flags_internal.diff
@@ -0,0 +1,43 @@
+diff --git a/libs/binder/Android.bp b/libs/binder/Android.bp
+index f7347aef1..a539fac47 100644
+--- a/libs/binder/Android.bp
++++ b/libs/binder/Android.bp
+@@ -52,10 +52,12 @@ cc_library {
+         "-Wall",
+         "-Wextra",
+         "-Werror",
++	CFLAGS_FOR_BENCH_SUITE
+     ],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+     product_variables: {
+         binder32bit: {
+-            cflags: ["-DBINDER_IPC_32BIT=1"],
++            cflags: ["-DBINDER_IPC_32BIT=1",],
+         },
+     },
+ 
+@@ -76,4 +78,22 @@ cc_library {
+     },
+ }
+ 
+-subdirs = ["tests"]
++cc_test {
++    name: "binderThroughputTest",
++    srcs: ["tests/binderThroughputTest.cpp"],
++    shared_libs: [
++        "libbinder",
++        "libutils",
++    ],
++    clang: true,
++    cflags: [
++        "-g",
++        "-Wall",
++        "-Werror",
++        "-Wno-missing-field-initializers",
++        "-Wno-sign-compare",
++	 "-O3",
++        CFLAGS_FOR_BENCH_SUITE
++    ],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++}
++
diff --git a/android_bench_suite/Dex2oat_flags_aosp.diff b/android_bench_suite/Dex2oat_flags_aosp.diff
new file mode 100644
index 0000000..fcd611f
--- /dev/null
+++ b/android_bench_suite/Dex2oat_flags_aosp.diff
@@ -0,0 +1,13 @@
+diff --git a/compiler/Android.bp b/compiler/Android.bp
+index a1269dcaf..a9b62b474 100644
+--- a/compiler/Android.bp
++++ b/compiler/Android.bp
+@@ -215,6 +215,8 @@ art_cc_defaults {
+     shared: {
+         shared_libs: ["libcrypto"],
+     },
++    cflags: [CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+ }
+ 
+ gensrcs {
diff --git a/android_bench_suite/Dex2oat_flags_internal.diff b/android_bench_suite/Dex2oat_flags_internal.diff
new file mode 100644
index 0000000..fcd611f
--- /dev/null
+++ b/android_bench_suite/Dex2oat_flags_internal.diff
@@ -0,0 +1,13 @@
+diff --git a/compiler/Android.bp b/compiler/Android.bp
+index a1269dcaf..a9b62b474 100644
+--- a/compiler/Android.bp
++++ b/compiler/Android.bp
+@@ -215,6 +215,8 @@ art_cc_defaults {
+     shared: {
+         shared_libs: ["libcrypto"],
+     },
++    cflags: [CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+ }
+ 
+ gensrcs {
diff --git a/android_bench_suite/Hwui_flags_aosp.diff b/android_bench_suite/Hwui_flags_aosp.diff
new file mode 100644
index 0000000..9e3b1df
--- /dev/null
+++ b/android_bench_suite/Hwui_flags_aosp.diff
@@ -0,0 +1,50 @@
+diff --git a/libs/hwui/Android.bp b/libs/hwui/Android.bp
+index 558cdc0faf3..1565be5b201 100644
+--- a/libs/hwui/Android.bp
++++ b/libs/hwui/Android.bp
+@@ -24,12 +24,15 @@ cc_defaults {
+         "-Werror",
+         "-fvisibility=hidden",
+         "-DHWUI_NEW_OPS",
++	CFLAGS_FOR_BENCH_SUITE
+ 
+         // GCC false-positives on this warning, and since we -Werror that's
+         // a problem
+         "-Wno-free-nonheap-object",
+     ],
+ 
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     include_dirs: [
+         "external/skia/include/private",
+         "external/skia/src/core",
+@@ -214,6 +217,9 @@ cc_defaults {
+         export_proto_headers: true,
+     },
+ 
++    cflags: [CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     export_include_dirs: ["."],
+ }
+ 
+@@ -229,7 +235,8 @@ cc_library {
+ cc_library_static {
+     name: "libhwui_static_null_gpu",
+     defaults: ["libhwui_defaults"],
+-    cflags: ["-DHWUI_NULL_GPU"],
++    cflags: ["-DHWUI_NULL_GPU", CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+     srcs: [
+         "debug/nullegl.cpp",
+         "debug/nullgles.cpp",
+@@ -319,7 +326,8 @@ cc_benchmark {
+     name: "hwuimicro",
+     defaults: ["hwui_test_defaults"],
+ 
+-    cflags: ["-DHWUI_NULL_GPU"],
++    cflags: ["-DHWUI_NULL_GPU", CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+ 
+     whole_static_libs: ["libhwui_static_null_gpu"],
+
diff --git a/android_bench_suite/Hwui_flags_internal.diff b/android_bench_suite/Hwui_flags_internal.diff
new file mode 100644
index 0000000..16a0222
--- /dev/null
+++ b/android_bench_suite/Hwui_flags_internal.diff
@@ -0,0 +1,72 @@
+diff --git a/libs/hwui/Android.bp b/libs/hwui/Android.bp
+index 303d05f084a..946aa9bb754 100644
+--- a/libs/hwui/Android.bp
++++ b/libs/hwui/Android.bp
+@@ -17,6 +17,7 @@ cc_defaults {
+         "-Wunreachable-code",
+         "-Werror",
+         "-fvisibility=hidden",
++	CFLAGS_FOR_BENCH_SUITE
+ 
+         // GCC false-positives on this warning, and since we -Werror that's
+         // a problem
+@@ -30,6 +31,8 @@ cc_defaults {
+         //"-DANDROID_ENABLE_LINEAR_BLENDING",
+     ],
+ 
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     include_dirs: [
+         "external/skia/include/private",
+         "external/skia/src/core",
+@@ -231,6 +234,9 @@ cc_defaults {
+         export_proto_headers: true,
+     },
+ 
++    cflags: [CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     export_include_dirs: ["."],
+     export_shared_lib_headers: ["libRScpp"],
+ }
+@@ -240,7 +246,7 @@ cc_library {
+     defaults: [
+         "libhwui_defaults",
+ 
+-        // Enables fine-grained GLES error checking
++	// Enables fine-grained GLES error checking
+         // If enabled, every GLES call is wrapped & error checked
+         // Has moderate overhead
+         "hwui_enable_opengl_validation",
+@@ -257,7 +263,8 @@ cc_library_static {
+         "libhwui_defaults",
+         "hwui_debug",
+     ],
+-    cflags: ["-DHWUI_NULL_GPU"],
++    cflags: ["-DHWUI_NULL_GPU", CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+     srcs: [
+         "debug/nullegl.cpp",
+     ],
+@@ -354,6 +361,9 @@ cc_benchmark {
+     whole_static_libs: ["libhwui"],
+     shared_libs: ["libmemunreachable"],
+ 
++    cflags: [CFLAGS_FOR_BENCH_SUITE],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     srcs: [
+         "tests/macrobench/TestSceneRunner.cpp",
+         "tests/macrobench/main.cpp",
+@@ -371,8 +381,11 @@ cc_benchmark {
+     cflags: [
+         "-include debug/wrap_gles.h",
+         "-DHWUI_NULL_GPU",
++	CFLAGS_FOR_BENCH_SUITE
+     ],
+ 
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     whole_static_libs: ["libhwui_static_debug"],
+     shared_libs: ["libmemunreachable"],
+ 
diff --git a/android_bench_suite/README.txt b/android_bench_suite/README.txt
new file mode 100644
index 0000000..3d0cceb
--- /dev/null
+++ b/android_bench_suite/README.txt
@@ -0,0 +1,41 @@
+This is a Android Toolchain benchmark suite.
+===========================================
+Where to find this suite:
+	This suite locates at google3, please create a google3 branch first,
+	then run:
+		$cd experimental/users/zhizhouy/benchtoolchain
+	Copy this directory to the place you want to put it.
+
+To use this suite:
+	1. Configure the basic envrionment in env_setting file.
+
+	2. Run ./apply_patches.py, which will:
+		1) Patch all the android benchmarks in the android tree.
+		Benchmark Panorama does not exist in android tree, so perftests/
+		gets copied into the top-level of android tree.
+
+		2) Apply patch autotest.diff to android_root/external/autotest, which
+		includes all the test scripts for benchmarks. Copy testcases to
+		related autotest directory.
+
+	   If you have applied the patch partially and hope to discard the
+	   patch, just run discard_patches.py
+
+	3. Build and run benchmark on the device using ./run.py. You can either
+	use test configuration file (-t test_config), or set all the variables
+	manually.
+
+	4. The raw results locate at bench_result_* in bench suite home
+	directory.
+
+	5. The JSON format result will be generated for crosperf report.
+
+Utility tools:
+	1. Autotest is a test framework located in android exteranl/autotest
+		Before first time running it, please run
+		utils/build_externals.py first to ensure all the environments
+		and tools needed are installed.
+
+	2. Crosperf is a report generating tool in ChromeOS toolchain utilities.
+		Please look for it in chromium source:
+		src/third_party/toolchain-utils/crosperf.
diff --git a/android_bench_suite/Skia_flags_aosp.diff b/android_bench_suite/Skia_flags_aosp.diff
new file mode 100644
index 0000000..b2ff242
--- /dev/null
+++ b/android_bench_suite/Skia_flags_aosp.diff
@@ -0,0 +1,28 @@
+diff --git a/Android.bp b/Android.bp
+index a581b0a53..36159c5ae 100644
+--- a/Android.bp
++++ b/Android.bp
+@@ -40,8 +40,11 @@ cc_library {
+         "-Wno-clobbered",
+         "-Wno-error",
+         "-fexceptions",
++	CFLAGS_FOR_BENCH_SUITE
+     ],
+ 
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     cppflags: [
+         "-std=c++11",
+         "-fno-threadsafe-statics",
+@@ -1470,8 +1473,11 @@ cc_test {
+         "-U_FORTIFY_SOURCE",
+         "-D_FORTIFY_SOURCE=1",
+         "-DSKIA_IMPLEMENTATION=1",
++	CFLAGS_FOR_BENCH_SUITE
+     ],
+ 
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     cppflags: [
+         "-std=c++11",
+         "-fno-threadsafe-statics",
diff --git a/android_bench_suite/Skia_flags_internal.diff b/android_bench_suite/Skia_flags_internal.diff
new file mode 100644
index 0000000..2eb6a1d
--- /dev/null
+++ b/android_bench_suite/Skia_flags_internal.diff
@@ -0,0 +1,26 @@
+diff --git a/Android.bp b/Android.bp
+index b4e1f5f701..13e1c6645f 100644
+--- a/Android.bp
++++ b/Android.bp
+@@ -9,7 +9,9 @@ cc_library {
+         "-D_FORTIFY_SOURCE=1",
+         "-DSKIA_IMPLEMENTATION=1",
+         "-DATRACE_TAG=ATRACE_TAG_VIEW",
++	CFLAGS_FOR_BENCH_SUITE
+     ],
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
+ 
+     export_include_dirs: [
+         "include/android/",
+@@ -1603,8 +1605,11 @@ cc_test {
+ 
+     cflags: [
+         "-Wno-unused-parameter",
++	CFLAGS_FOR_BENCH_SUITE
+     ],
+ 
++    ldflags: [LDFLAGS_FOR_BENCH_SUITE],
++
+     local_include_dirs: [
+         "bench/",
+         "experimental/svg/model/",
diff --git a/android_bench_suite/apply_patches.py b/android_bench_suite/apply_patches.py
new file mode 100755
index 0000000..0584946
--- /dev/null
+++ b/android_bench_suite/apply_patches.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python2
+#
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Script to patch Android repo with diffs that are needed by the suite.
+
+Run this script before running the suite.
+"""
+from __future__ import print_function
+
+import config
+import os
+import subprocess
+
+# The patches to be added to the android repo.
+# An error may occur if it is already patched, or meets some error.
+# FIXME: Needs to be FIXED in the future.
+def try_patch_skia():
+  skia_dir = os.path.join(config.android_home, config.bench_dict['Skia'])
+  # You may want to change the file based on aosp or internal
+  if config.android_type == 'internal':
+    print('No need to patch skia for internal repo.')
+    return
+  elif config.android_type == 'aosp':
+    skia_patch = os.path.join(
+        os.path.dirname(os.path.realpath(__file__)), 'skia_aosp.diff')
+  else:
+    raise ValueError('Adnroid source type should be either aosp or internal.')
+  # FIXME: A quick hack, need to handle errors and check whether has been
+  # applied in the future.
+  try:
+    subprocess.check_call(['git', '-C', skia_dir, 'apply', skia_patch])
+    print('Skia patched successfully!')
+  except subprocess.CalledProcessError:
+    print('Skia patch not applied, error or already patched.')
+
+
+def try_patch_autotest():
+  # Patch autotest, which includes all the testcases on device, setting device,
+  # and running the benchmarks
+  autotest_dir = os.path.join(config.android_home, config.autotest_dir)
+  autotest_patch = os.path.join(
+      os.path.dirname(os.path.realpath(__file__)), 'autotest.diff')
+  dex2oat_dir = os.path.join(autotest_dir, 'server/site_tests/android_Dex2oat')
+  panorama_dir = os.path.join(autotest_dir,
+                              'server/site_tests/android_Panorama')
+  # FIXME: A quick hack, need to handle errors and check whether has been
+  # applied in the future.
+  try:
+    subprocess.check_call(['git', '-C', autotest_dir, 'apply', autotest_patch])
+    subprocess.check_call(['cp', '-rf', 'dex2oat_input', dex2oat_dir])
+    subprocess.check_call(['cp', '-rf', 'panorama_input', panorama_dir])
+    print('Autotest patched successfully!')
+  except subprocess.CalledProcessError:
+    print('Autotest patch not applied, error or already patched.')
+
+
+def try_patch_panorama():
+  panorama_dir = os.path.join(config.android_home,
+                              config.bench_dict['Panorama'])
+  panorama_patch = os.path.join(
+      os.path.dirname(os.path.realpath(__file__)), 'panorama.diff')
+  # FIXME: A quick hack, need to handle errors and check whether has been
+  # applied in the future.
+  try:
+    subprocess.check_call(['git', '-C', panorama_dir, 'apply', panorama_patch])
+    print('Panorama patched successfully!')
+  except subprocess.CalledProcessError:
+    print('Panorama patch not applied, error or already patched.')
+
+
+def try_patch_synthmark():
+  synthmark_dir = 'devrel/tools/synthmark'
+  # FIXME: A quick hack, need to handle errors and check whether has been
+  # applied in the future.
+  try:
+    subprocess.check_call([
+        'bash', '-c', 'cd devrel && '
+        'repo init -u sso://devrel/manifest &&'
+        'repo sync tools/synthmark'
+    ])
+    subprocess.check_call(['cp', '-rf', synthmark_dir, config.android_home])
+    subprocess.check_call([
+        'cp', 'devrel/Android.mk',
+        os.path.join(config.android_home, 'synthmark')
+    ])
+    print('Synthmark patched successfully!')
+  except subprocess.CalledProcessError:
+    print('Synthmark patch not applied, error or already patched.')
+
+
+def main():
+  try_patch_skia()
+  try_patch_autotest()
+  try_patch_panorama()
+  try_patch_synthmark()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/android_bench_suite/autotest.diff b/android_bench_suite/autotest.diff
new file mode 100644
index 0000000..ef0029a
--- /dev/null
+++ b/android_bench_suite/autotest.diff
@@ -0,0 +1,1057 @@
+diff --git a/server/site_tests/android_Binder/android_Binder.py b/server/site_tests/android_Binder/android_Binder.py
+new file mode 100644
+index 000000000..b233b586a
+--- /dev/null
++++ b/server/site_tests/android_Binder/android_Binder.py
+@@ -0,0 +1,57 @@
++# Tests for android Binder
++from __future__ import print_function
++
++import bench_config
++import logging
++import os
++import re
++
++from autotest_lib.server import test
++
++class android_Binder(test.test):
++    version = 1
++
++    def run_once(self, host=None):
++        self.client = host
++
++        out_dir = os.path.join(bench_config.android_home,
++                              'out/target/product/' + bench_config.product)
++
++        # Set binary directories
++        lib_dir = os.path.join(out_dir, 'system/lib/libbinder.so')
++        lib_dir_DUT = '/system/lib/libbinder.so'
++        lib64_dir = os.path.join(out_dir, 'system/lib64/libbinder.so')
++        lib64_dir_DUT = '/system/lib64/libbinder.so'
++        bench_dir = os.path.join(out_dir,
++                                 'symbols/data/nativetest64',
++                                 'binderThroughputTest/binderThroughputTest')
++        bench_dir_DUT = os.path.join('/data/local/tmp',
++                                     'binderThroughputTest')
++
++        # Push binary to the device
++        print('Pushing binaries of Binder benchmark onto device!')
++        host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
++        host.send_file(lib_dir, lib_dir_DUT, delete_dest=True)
++        host.send_file(lib64_dir, lib64_dir_DUT, delete_dest=True)
++
++        # Make sure the binary is executable
++        self.client.run('chmod u+x ' + bench_dir_DUT)
++
++        print('Running tests on the device...')
++        # First run creates bench_result
++        self.client.run('taskset %s /data/local/tmp/'
++                        'binderThroughputTest > /data/local/tmp/bench_result'
++                        % os.getenv('TEST_MODE'))
++        # Next 4 runs add to bench_result
++        for i in xrange(4):
++          self.client.run('taskset %s /data/local/tmp/'
++                          'binderThroughputTest >> '
++                          '/data/local/tmp/bench_result'
++                          % os.getenv('TEST_MODE'))
++
++        # Pull result from the device
++        out_dir = bench_config.bench_suite_dir
++        result_dir_DUT = '/data/local/tmp/bench_result'
++
++        host.get_file(result_dir_DUT, out_dir, delete_dest=True)
++        print('Result has been pulled back to file bench_result!')
+diff --git a/server/site_tests/android_Binder/bench_config.py b/server/site_tests/android_Binder/bench_config.py
+new file mode 100644
+index 000000000..20f685eb9
+--- /dev/null
++++ b/server/site_tests/android_Binder/bench_config.py
+@@ -0,0 +1,19 @@
++#!/bin/bash/python
++import os
++
++home = os.environ["HOME"]
++
++android_home = os.getenv("ANDROID_HOME",
++                         default=os.path.join(home,
++                                 'android_source/master-googleplex/'))
++bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
++                            default=os.path.join(android_home,
++                                                 'benchtoolchain'))
++
++synthmark_dir = 'framework/native/libs/binder'
++
++real_synthmark_dir = os.path.join(android_home, synthmark_dir)
++
++out_dir = os.path.join(android_home, 'out')
++
++product = os.getenv("PRODUCT", default="generic")
+diff --git a/server/site_tests/android_Binder/control b/server/site_tests/android_Binder/control
+new file mode 100644
+index 000000000..d91854b11
+--- /dev/null
++++ b/server/site_tests/android_Binder/control
+@@ -0,0 +1,19 @@
++#Control
++
++NAME = "Binder"
++AUTHOR = "Zhizhou Yang"
++ATTRIBUTES = "suite:android_toolchain_benchmark"
++TIME = "MEDIUM"
++TEST_CATEGORY = "Functional"
++TEST_CLASS = "application"
++TEST_TYPE = "server"
++
++DOC = """
++
++"""
++
++def run_binder_test(machine):
++    host = hosts.create_host(machine)
++    job.run_test("android_Binder", host=host)
++
++parallel_simple(run_binder_test, machines)
+diff --git a/server/site_tests/android_Dex2oat/android_Dex2oat.py b/server/site_tests/android_Dex2oat/android_Dex2oat.py
+new file mode 100644
+index 000000000..dd6af0b53
+--- /dev/null
++++ b/server/site_tests/android_Dex2oat/android_Dex2oat.py
+@@ -0,0 +1,70 @@
++# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
++# Use of this source code is governed by a BSD-style license that can be
++# found in the LICENSE file.
++
++import bench_config
++import time
++import logging
++import os
++import re
++
++from autotest_lib.client.common_lib import error
++from autotest_lib.server import test
++
++class android_Dex2oat(test.test):
++    version = 1
++
++    def run_once(self, host=None):
++        self.client = host
++
++        out_dir = os.path.join(bench_config.android_home,
++                              'out/target/product/',
++                               bench_config.product)
++
++        # Set binary directories
++        bench_dir = os.path.join(out_dir, 'system/lib/libart-compiler.so')
++        bench_dir_DUT = '/system/lib/libart-compiler.so'
++        bench64_dir = os.path.join(out_dir, 'system/lib64/libart-compiler.so')
++        bench64_dir_DUT = '/system/lib64/libart-compiler.so'
++
++        # Push libart-compiler.so to the device
++        print('Pushing binaries of newly generated library onto device!')
++        host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
++        host.send_file(bench64_dir, bench64_dir_DUT, delete_dest=True)
++
++        # Set testcase directories
++        test_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
++                                 'dex2oat_input')
++        test_dir_DUT = '/data/local/tmp/'
++
++        # Push testcases to the device
++        print('Pushing tests onto device!')
++        host.send_file(test_dir, test_dir_DUT, delete_dest=True)
++
++        # Open file to write the result
++        with open(os.path.join(bench_config.bench_suite_dir,
++                               'bench_result'), 'w') as f:
++
++          # There are two benchmarks, chrome and camera.
++          for i in xrange(2):
++            f.write('Test %d:\n' % i)
++            total_time = 0
++            # Run benchmark for several times for accurancy
++            for j in xrange(3):
++              f.write('Iteration %d: ' % j)
++              result = self.client.run('time taskset %s dex2oat'
++                         ' --dex-file=data/local/tmp/dex2oat_input/test%d.apk'
++                         ' --oat-file=data/local/tmp/dex2oat_input/test%d.oat'
++                         % (os.getenv('TEST_MODE'), i+1, i+1))
++              # Find and record real time of the run
++              time_str = ''
++              for t in result.stdout.split() + result.stderr.split():
++                if 'm' in t and 's' in t:
++                  time_str = t.split('m')
++                  break
++              time_sec = float(time_str[0]) * 60
++              time_sec += float(time_str[1].split('s')[0])
++              f.write('User Time: %.2f seconds\n' % time_sec)
++              total_time += time_sec
++
++            f.write('Total elapsed time: %.2f seconds.\n\n' % total_time)
+diff --git a/server/site_tests/android_Dex2oat/bench_config.py b/server/site_tests/android_Dex2oat/bench_config.py
+new file mode 100644
+index 000000000..d2855f22c
+--- /dev/null
++++ b/server/site_tests/android_Dex2oat/bench_config.py
+@@ -0,0 +1,15 @@
++#!/bin/bash/python
++import os
++
++home = os.environ["HOME"]
++
++android_home = os.getenv("ANDROID_HOME",
++                         default=os.path.join(home,
++                                 'android_source/master-googleplex/'))
++bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
++                            default=os.path.join(android_home,
++                                                 'benchtoolchain'))
++
++out_dir = os.path.join(android_home, 'out')
++
++product = os.getenv("PRODUCT", default="generic")
+diff --git a/server/site_tests/android_Dex2oat/control b/server/site_tests/android_Dex2oat/control
+new file mode 100644
+index 000000000..763864f3a
+--- /dev/null
++++ b/server/site_tests/android_Dex2oat/control
+@@ -0,0 +1,21 @@
++# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
++# Use of this source code is governed by a BSD-style license that can be
++# found in the LICENSE file.
++
++NAME = "Dex2oat"
++AUTHOR = "Zhizhou Yang"
++ATTRIBUTES = "suite:android_toolchain_benchmark"
++TIME = "SHORT"
++TEST_CATEGORY = "Functional"
++TEST_CLASS = "kernel"
++TEST_TYPE = "server"
++
++DOC = """
++
++"""
++
++def run_dex2oat(machine):
++    host = hosts.create_host(machine)
++    job.run_test("android_Dex2oat", host=host)
++
++parallel_simple(run_dex2oat, machines)
+diff --git a/server/site_tests/android_Hwui/android_Hwui.py b/server/site_tests/android_Hwui/android_Hwui.py
+new file mode 100644
+index 000000000..d1837e042
+--- /dev/null
++++ b/server/site_tests/android_Hwui/android_Hwui.py
+@@ -0,0 +1,67 @@
++# Tests for android Hwui
++from __future__ import print_function
++
++import bench_config
++import logging
++import os
++import re
++
++from autotest_lib.server import test
++
++class android_Hwui(test.test):
++    version = 1
++
++    def run_once(self, host=None):
++        self.client = host
++
++        out_dir = os.path.join(bench_config.android_home,
++                              'out/target/product/' + bench_config.product)
++
++        lib_dir = os.path.join(out_dir, 'system/lib/libhwui.so')
++        lib_dir_DUT = '/system/lib/libhwui.so'
++        lib64_dir = os.path.join(out_dir, 'system/lib64/libhwui.so')
++        lib64_dir_DUT = '/system/lib64/libhwui.so'
++        bench_dir = os.path.join(out_dir,
++                                 'symbols/data/nativetest64/',
++                                 'hwuimicro/hwuimicro')
++        bench_dir_DUT = '/data/local/tmp/hwuimicro'
++
++        # Push binary to the device
++        print('Pushing Hwui benchmark onto device!')
++        host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
++        host.send_file(lib_dir, lib_dir_DUT, delete_dest=True)
++        host.send_file(lib64_dir, lib64_dir_DUT, delete_dest=True)
++
++        # Make sure the binary is executable
++        self.client.run('chmod u+x ' + bench_dir_DUT)
++
++
++        print('Running tests on the device...')
++        self.client.run('taskset %s /data/local/tmp/hwuimicro'
++                        ' > /data/local/tmp/bench_result'
++                        % os.getenv('TEST_MODE'))
++
++        # Pull result from the device
++        out_dir = bench_config.bench_suite_dir
++        result_dir_DUT = '/data/local/tmp/bench_result'
++
++        host.get_file(result_dir_DUT, out_dir, delete_dest=True)
++
++        # Update total time of the test
++        t = 0
++        with open(os.path.join(out_dir, 'bench_result'), 'r') as fin:
++
++          for lines in fin:
++            line = lines.split()
++            print(line)
++
++            # Check if there is test result in this line
++            if len(line) == 8:
++              # Accumulate the Run time for the testcase
++              t += int(line[2])
++
++        # Append total time to the file
++        with open(os.path.join(out_dir, 'bench_result'), 'a') as fout:
++          fout.write('\nTotal elapsed time: %d ns.\n' % t)
++
++        print('Result has been pulled back to file bench_result!')
+diff --git a/server/site_tests/android_Hwui/bench_config.py b/server/site_tests/android_Hwui/bench_config.py
+new file mode 100644
+index 000000000..a98d259f9
+--- /dev/null
++++ b/server/site_tests/android_Hwui/bench_config.py
+@@ -0,0 +1,19 @@
++#!/bin/bash/python
++import os
++
++home = os.environ["HOME"]
++
++android_home = os.getenv("ANDROID_HOME",
++                         default=os.path.join(home,
++                                 'android_source/master-googleplex/'))
++bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
++                            default=os.path.join(android_home,
++                                                 'benchtoolchain'))
++
++hwui_dir = 'frameworks/base/libs/hwui/'
++
++real_hwui_dir = os.path.join(android_home, hwui_dir)
++
++out_dir = os.path.join(android_home, 'out')
++
++product = os.getenv("PRODUCT", default="generic")
+diff --git a/server/site_tests/android_Hwui/control b/server/site_tests/android_Hwui/control
+new file mode 100644
+index 000000000..89c47da20
+--- /dev/null
++++ b/server/site_tests/android_Hwui/control
+@@ -0,0 +1,19 @@
++#Control
++
++NAME = "Hwui"
++AUTHOR = "Zhizhou Yang"
++ATTRIBUTES = "suite:android_toolchain_benchmark"
++TIME = "MEDIUM"
++TEST_CATEGORY = "Functional"
++TEST_CLASS = "library"
++TEST_TYPE = "server"
++
++DOC = """
++
++"""
++
++def run_hwui_test(machine):
++    host = hosts.create_host(machine)
++    job.run_test("android_Hwui", host=host)
++
++parallel_simple(run_hwui_test, machines)
+diff --git a/server/site_tests/android_Panorama/android_Panorama.py b/server/site_tests/android_Panorama/android_Panorama.py
+new file mode 100644
+index 000000000..db2a29cde
+--- /dev/null
++++ b/server/site_tests/android_Panorama/android_Panorama.py
+@@ -0,0 +1,53 @@
++# Tests for android Panorama
++from __future__ import print_function
++
++import bench_config
++import logging
++import os
++import re
++
++from autotest_lib.server import test
++
++class android_Panorama(test.test):
++    version = 1
++
++    def run_once(self, host=None):
++        self.client = host
++
++        out_dir = os.path.join(bench_config.android_home,
++                              'out/target/product/' + bench_config.product)
++
++        # Set binary directories
++        bench_dir = os.path.join(out_dir,
++                                 'data/local/tmp/panorama_bench64')
++        bench_dir_DUT = '/data/local/tmp/panorama_bench64'
++
++        # Set tests directories
++        tests_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
++                                                 'panorama_input')
++        tests_dir_DUT = '/data/local/tmp/panorama_input/'
++
++        # Push binary to the device
++        print('Pushing binaries of Panorama benchmark onto device!')
++        host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
++
++        # Make sure the binary is executable
++        self.client.run('chmod u+x ' + bench_dir_DUT)
++
++        # Push testcases to the device
++        print('Pushing tests onto device!')
++        host.send_file(tests_dir, tests_dir_DUT, delete_dest=True)
++
++        print('Running tests on the device...')
++        self.client.run('taskset %s /data/local/tmp/panorama_bench64 '
++                        '/data/local/tmp/panorama_input/panorama_input/test '
++                        '/data/local/tmp/panorama.ppm'
++                        ' > /data/local/tmp/bench_result'
++                        % os.getenv('TEST_MODE'))
++
++        # Pull result from the device
++        out_dir = bench_config.bench_suite_dir
++        result_dir_DUT = '/data/local/tmp/bench_result'
++
++        host.get_file(result_dir_DUT, out_dir, delete_dest=True)
++        print('Result has been pulled back to file bench_result!')
+diff --git a/server/site_tests/android_Panorama/bench_config.py b/server/site_tests/android_Panorama/bench_config.py
+new file mode 100644
+index 000000000..075beec76
+--- /dev/null
++++ b/server/site_tests/android_Panorama/bench_config.py
+@@ -0,0 +1,19 @@
++#!/bin/bash/python
++import os
++
++home = os.environ["HOME"]
++
++android_home = os.getenv("ANDROID_HOME",
++                         default=os.path.join(home,
++                                 'android_source/master-googleplex/'))
++bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
++                            default=os.path.join(android_home,
++                                                 'benchtoolchain'))
++
++panorama_dir = 'perftests/panorama/'
++
++real_panorama_dir = os.path.join(android_home, panorama_dir)
++
++out_dir = os.path.join(android_home, 'out')
++
++product = os.getenv("PRODUCT", default="generic")
+diff --git a/server/site_tests/android_Panorama/control b/server/site_tests/android_Panorama/control
+new file mode 100644
+index 000000000..3cd589eed
+--- /dev/null
++++ b/server/site_tests/android_Panorama/control
+@@ -0,0 +1,19 @@
++#Control
++
++NAME = "Panorama"
++AUTHOR = "Zhizhou Yang"
++ATTRIBUTES = "suite:android_toolchain_benchmark"
++TIME = "MEDIUM"
++TEST_CATEGORY = "Functional"
++TEST_CLASS = "application"
++TEST_TYPE = "server"
++
++DOC = """
++
++"""
++
++def run_panorama_test(machine):
++    host = hosts.create_host(machine)
++    job.run_test("android_Panorama", host=host)
++
++parallel_simple(run_panorama_test, machines)
+diff --git a/server/site_tests/android_SetDevice/android_SetDevice.py b/server/site_tests/android_SetDevice/android_SetDevice.py
+new file mode 100644
+index 000000000..7a7134d58
+--- /dev/null
++++ b/server/site_tests/android_SetDevice/android_SetDevice.py
+@@ -0,0 +1,77 @@
++# Set device modes such as cpu frequency
++from __future__ import print_function
++
++import logging
++import os
++import re
++import time
++
++from autotest_lib.server import test
++
++def _get_cat_value(result):
++    return result.stdout.split('\n')[0]
++
++class android_SetDevice(test.test):
++    version = 1
++
++    def run_once(self, host=None):
++        self.client = host
++
++        # Disable GPU
++        self.client.run('setprop debug.rs.default-GPU-driver 1')
++
++        # Freeze system
++        # Stop perfd, mpdecision and thermal-engine to ensure setting runs
++        # without unexpected errors.
++        self.client.run('stop thermal-engine')
++        self.client.run('stop mpdecision')
++        self.client.run('stop perfd')
++
++        # Set airplane mode on the device
++        self.client.run('settings put global airplane_mode_on 1')
++
++        print('Setting frequency on the device...')
++        frequency = os.getenv('FREQUENCY')
++
++        # Get number of cores on device
++        result = self.client.run('ls /sys/devices/system/cpu/ '
++                                 '| grep cpu[0-9].*')
++        cores = result.stdout.splitlines()
++        for core in cores:
++          if core.startswith('cpu'):
++            # First set all cores online
++            online = os.path.join('/sys/devices/system/cpu', core, 'online')
++            online_status = _get_cat_value(self.client.run('cat %s' % online))
++            if online_status == '0':
++              self.client.run('echo %s > %s' % ('1', online))
++
++            freq_path = os.path.join('/sys/devices/system/cpu', core,
++                                     'cpufreq')
++
++            # Check if the frequency user entered is legal or not.
++            available_freq = self.client.run('cat %s/'
++                                             'scaling_available_frequencies'
++                                             % (freq_path))
++            available_freq_list = _get_cat_value(available_freq).split()
++
++            if frequency not in available_freq_list:
++              raise ValueError('Wrong freqeuncy input, '
++                               'please select from: \n%s'
++                               % (' '.join(available_freq_list)))
++
++            # Set frequency
++            self.client.run('echo %s > %s/scaling_min_freq'
++                            % (frequency, freq_path))
++            self.client.run('echo %s > %s/scaling_max_freq'
++                            % (frequency, freq_path))
++
++            # Sleep for 2 seconds, let device update the frequency.
++            time.sleep(2)
++
++            # Get current frequency
++            freq = self.client.run('cat %s/cpuinfo_cur_freq' % freq_path)
++            f = _get_cat_value(freq)
++            if f != frequency:
++              raise RuntimeError('Expected frequency for %s to be %s, '
++                                 'but is %s' % (core, frequency, f))
++            print('CPU frequency has been set to %s' % (frequency))
+diff --git a/server/site_tests/android_SetDevice/control b/server/site_tests/android_SetDevice/control
+new file mode 100644
+index 000000000..85163706d
+--- /dev/null
++++ b/server/site_tests/android_SetDevice/control
+@@ -0,0 +1,19 @@
++# Control
++
++NAME = "SetDevice"
++AUTHOR = "Zhizhou Yang"
++ATTRIBUTES = "suite:android_toolchain_benchmark"
++TIME = "MEDIUM"
++TEST_CATEGORY = "Functional"
++TEST_CLASS = "application"
++TEST_TYPE = "server"
++
++DOC = """
++Set the core frequency and which core online for devices.
++"""
++
++def run_set_device_test(machine):
++    host = hosts.create_host(machine)
++    job.run_test("android_SetDevice", host=host)
++
++parallel_simple(run_set_device_test, machines)
+diff --git a/server/site_tests/android_Skia/android_Skia.py b/server/site_tests/android_Skia/android_Skia.py
+new file mode 100644
+index 000000000..fc8d09dab
+--- /dev/null
++++ b/server/site_tests/android_Skia/android_Skia.py
+@@ -0,0 +1,65 @@
++# Tests for android Skia
++from __future__ import print_function
++
++import bench_config
++import logging
++import os
++import re
++
++from autotest_lib.server import test
++
++class android_Skia(test.test):
++    version = 1
++
++    def run_once(self, host=None):
++        self.client = host
++
++        out_dir = os.path.join(bench_config.android_home,
++                              'out/target/product/' + bench_config.product)
++
++        # Set binary directories
++        lib_dir = os.path.join(out_dir, 'system/lib/libskia.so')
++        lib_dir_DUT = '/system/lib/libskia.so'
++        lib64_dir = os.path.join(out_dir, 'system/lib64/libskia.so')
++        lib64_dir_DUT = '/system/lib64/libskia.so'
++        bench_dir = os.path.join(out_dir,
++                                 'data/nativetest64/',
++                                 'skia_nanobench/skia_nanobench')
++        bench_dir_DUT = '/data/local/tmp/skia_nanobench'
++
++        # Push binary to the device
++        print('Pushing Skia benchmark onto device!')
++        host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
++        host.send_file(lib_dir, lib_dir_DUT, delete_dest=True)
++        host.send_file(lib64_dir, lib64_dir_DUT, delete_dest=True)
++
++        # Make sure the binary is executable
++        self.client.run('chmod u+x ' + bench_dir_DUT)
++
++        # Set resource directory
++        resource_dir = os.path.join(bench_config.real_skia_dir, 'resources')
++        resource_dir_DUT = '/data/local/tmp/skia_resources/'
++
++        # Push binary to the device
++        print('Pushing Skia resources onto device!')
++        host.send_file(resource_dir, resource_dir_DUT, delete_dest=True)
++
++        # Run tests
++        print('Running tests on the device...')
++        try:
++          self.client.run('taskset %s ./data/local/tmp/skia_nanobench'
++                          ' --outResultsFile /data/local/tmp/bench_result'
++                          ' --samples 25'
++                          ' --config nonrendering'
++                          % os.getenv('TEST_MODE'))
++        except:
++          # Ignore Abort caused failure
++          None
++
++        # Pull result from the device
++        out_dir = bench_config.bench_suite_dir
++        result_dir_DUT = '/data/local/tmp/bench_result'
++
++        host.get_file(result_dir_DUT, out_dir, delete_dest=True)
++
++        print('Result has been pulled back to file bench_result!')
+diff --git a/server/site_tests/android_Skia/bench_config.py b/server/site_tests/android_Skia/bench_config.py
+new file mode 100644
+index 000000000..5d38d452f
+--- /dev/null
++++ b/server/site_tests/android_Skia/bench_config.py
+@@ -0,0 +1,19 @@
++#!/bin/bash/python
++import os
++
++home = os.environ["HOME"]
++
++android_home = os.getenv("ANDROID_HOME",
++                         default=os.path.join(home,
++                                 'android_source/master-googleplex/'))
++bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
++                            default=os.path.join(android_home,
++                                                 'benchtoolchain'))
++
++skia_dir = 'external/skia'
++
++real_skia_dir = os.path.join(android_home, skia_dir)
++
++out_dir = os.path.join(android_home, 'out')
++
++product = os.getenv("PRODUCT", default="generic")
+diff --git a/server/site_tests/android_Skia/control b/server/site_tests/android_Skia/control
+new file mode 100644
+index 000000000..e38195a8c
+--- /dev/null
++++ b/server/site_tests/android_Skia/control
+@@ -0,0 +1,19 @@
++#Control
++
++NAME = "Skia"
++AUTHOR = "Zhizhou Yang"
++ATTRIBUTES = "suite:android_toolchain_benchmark"
++TIME = "MEDIUM"
++TEST_CATEGORY = "Functional"
++TEST_CLASS = "library"
++TEST_TYPE = "server"
++
++DOC = """
++
++"""
++
++def run_skia_test(machine):
++    host = hosts.create_host(machine)
++    job.run_test("android_Skia", host=host)
++
++parallel_simple(run_skia_test, machines)
+diff --git a/server/site_tests/android_Synthmark/android_Synthmark.py b/server/site_tests/android_Synthmark/android_Synthmark.py
+new file mode 100644
+index 000000000..b317bd0f3
+--- /dev/null
++++ b/server/site_tests/android_Synthmark/android_Synthmark.py
+@@ -0,0 +1,48 @@
++# Tests for android Synthmark
++from __future__ import print_function
++
++import bench_config
++import logging
++import os
++import re
++
++from autotest_lib.server import test
++
++class android_Synthmark(test.test):
++    version = 1
++
++    def run_once(self, host=None):
++        self.client = host
++
++        out_dir = os.path.join(bench_config.android_home,
++                              'out/target/product/' + bench_config.product)
++
++        # Set binary directories
++        bench_dir = os.path.join(out_dir,
++                                 'symbols/system/bin/synthmark')
++        bench_dir_DUT = '/data/local/tmp/synthmark'
++
++        # Push binary to the device
++        print('Pushing binaries of Synthmark benchmark onto device!')
++        host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
++
++        # Make sure the binary is executable
++        self.client.run('chmod u+x ' + bench_dir_DUT)
++
++        print('Running tests on the device...')
++        # First run creates bench_result
++        self.client.run('taskset %s /data/local/tmp/synthmark'
++                        ' > /data/local/tmp/bench_result'
++                        % os.getenv('TEST_MODE'))
++        # Next 4 runs add to bench_result
++        for i in xrange(4):
++          self.client.run('taskset %s /data/local/tmp/synthmark'
++                          ' >> /data/local/tmp/bench_result'
++                          % os.getenv('TEST_MODE'))
++
++        # Pull result from the device
++        out_dir = bench_config.bench_suite_dir
++        result_dir_DUT = '/data/local/tmp/bench_result'
++
++        host.get_file(result_dir_DUT, out_dir, delete_dest=True)
++        print('Result has been pulled back to file bench_result!')
+diff --git a/server/site_tests/android_Synthmark/bench_config.py b/server/site_tests/android_Synthmark/bench_config.py
+new file mode 100644
+index 000000000..7d7aacacd
+--- /dev/null
++++ b/server/site_tests/android_Synthmark/bench_config.py
+@@ -0,0 +1,19 @@
++#!/bin/bash/python
++import os
++
++home = os.environ["HOME"]
++
++android_home = os.getenv("ANDROID_HOME",
++                         default=os.path.join(home,
++                                 'android_source/master-googleplex/'))
++bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
++                            default=os.path.join(android_home,
++                                                 'benchtoolchain'))
++
++synthmark_dir = 'synthmark'
++
++real_synthmark_dir = os.path.join(android_home, synthmark_dir)
++
++out_dir = os.path.join(android_home, 'out')
++
++product = os.getenv("PRODUCT", default="generic")
+diff --git a/server/site_tests/android_Synthmark/control b/server/site_tests/android_Synthmark/control
+new file mode 100644
+index 000000000..144766351
+--- /dev/null
++++ b/server/site_tests/android_Synthmark/control
+@@ -0,0 +1,19 @@
++#Control
++
++NAME = "Synthmark"
++AUTHOR = "Zhizhou Yang"
++ATTRIBUTES = "suite:android_toolchain_benchmark"
++TIME = "MEDIUM"
++TEST_CATEGORY = "Functional"
++TEST_CLASS = "application"
++TEST_TYPE = "server"
++
++DOC = """
++
++"""
++
++def run_synthmark_test(machine):
++    host = hosts.create_host(machine)
++    job.run_test("android_Synthmark", host=host)
++
++parallel_simple(run_synthmark_test, machines)
+diff --git a/site_utils/set_device.py b/site_utils/set_device.py
+new file mode 100755
+index 000000000..abb8a8dcc
+--- /dev/null
++++ b/site_utils/set_device.py
+@@ -0,0 +1,110 @@
++#!/usr/bin/python
++from __future__ import print_function
++
++import argparse
++import common
++import logging
++import os
++import sys
++
++# Turn the logging level to INFO before importing other autotest code, to avoid
++# having failed import logging messages confuse the test_droid user.
++logging.basicConfig(level=logging.INFO)
++
++# Unfortunately, autotest depends on external packages for assorted
++# functionality regardless of whether or not it is needed in a particular
++# context.  Since we can't depend on people to import these utilities in any
++# principled way, we dynamically download code before any autotest imports.
++try:
++    import chromite.lib.terminal  # pylint: disable=unused-import
++    import django.http  # pylint: disable=unused-import
++except ImportError:
++    # Ensure the chromite site-package is installed.
++    import subprocess
++    build_externals_path = os.path.join(
++            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
++            'utils', 'build_externals.py')
++    subprocess.check_call([build_externals_path, '--names_to_check',
++                           'chromiterepo', 'django'])
++    # Restart the script so python now finds the autotest site-packages.
++    sys.exit(os.execv(__file__, sys.argv))
++
++from autotest_lib.client.common_lib import utils
++from autotest_lib.server.hosts import adb_host
++from autotest_lib.site_utils import test_runner_utils
++from autotest_lib.site_utils import tester_feedback
++
++def _parse_arguments_internal(argv):
++    """
++    Parse command line arguments
++
++    @param argv: argument list to parse
++
++    @returns:    tuple of parsed arguments and argv suitable for remote runs
++
++    @raises SystemExit if arguments are malformed, or required arguments
++            are not present.
++    """
++
++    parser = argparse.ArgumentParser(description='Set device cpu cores and '
++                                                 'frequency.')
++
++    parser.add_argument('-s', '--serials', metavar='SERIALS',
++                        help='Comma separate list of device serials under '
++                             'test.')
++    parser.add_argument('-r', '--remote', metavar='REMOTE',
++                        default='localhost',
++                        help='hostname[:port] if the ADB device is connected '
++                             'to a remote machine. Ensure this workstation '
++                             'is configured for passwordless ssh access as '
++                             'users "root" or "adb"')
++    parser.add_argument('-q', '--frequency', type=int, default=960000,
++                        help='Specify the CPU frequency of the device, lower '
++                             'frequency will slow down the performance but '
++                             'reduce noise.')
++
++    return parser.parse_args(argv)
++
++def main(argv):
++    """
++    Entry point for set_device script.
++
++    @param argv: arguments list
++    """
++    arguments = _parse_arguments_internal(argv)
++
++    serials = arguments.serials
++    if serials is None:
++        result = utils.run(['adb', 'devices'])
++        devices = adb_host.ADBHost.parse_device_serials(result.stdout)
++        if len(devices) != 1:
++            logging.error('Could not detect exactly one device; please select '
++                          'one with -s: %s', devices)
++            return 1
++        serials = devices[0]
++
++    autotest_path = os.path.dirname(os.path.dirname(
++            os.path.realpath(__file__)))
++    site_utils_path = os.path.join(autotest_path, 'site_utils')
++    realpath = os.path.realpath(__file__)
++    site_utils_path = os.path.realpath(site_utils_path)
++    host_attributes = {'serials': serials,
++                       'os_type': 'android'}
++    results_directory = test_runner_utils.create_results_directory(None)
++
++    logging.info('Start setting CPU frequency on the device...')
++
++    os.environ['FREQUENCY'] = str(arguments.frequency)
++
++    set_device = ['SetDevice']
++    if test_runner_utils.perform_run_from_autotest_root(
++                      autotest_path, argv, set_device, arguments.remote,
++                      host_attributes=host_attributes,
++                      results_directory=results_directory):
++      logging.error('Error while setting device!')
++      return 1
++
++    return 0
++
++if __name__ == '__main__':
++    sys.exit(main(sys.argv[1:]))
+diff --git a/site_utils/test_bench.py b/site_utils/test_bench.py
+new file mode 100755
+index 000000000..4d0773ad9
+--- /dev/null
++++ b/site_utils/test_bench.py
+@@ -0,0 +1,133 @@
++#!/usr/bin/python
++from __future__ import print_function
++
++import argparse
++import common
++import logging
++import os
++import sys
++
++# Turn the logging level to INFO before importing other autotest
++# code, to avoid having failed import logging messages confuse the
++# test_droid user.
++logging.basicConfig(level=logging.INFO)
++
++# Unfortunately, autotest depends on external packages for assorted
++# functionality regardless of whether or not it is needed in a particular
++# context.
++# Since we can't depend on people to import these utilities in any principled
++# way, we dynamically download code before any autotest imports.
++try:
++    import chromite.lib.terminal  # pylint: disable=unused-import
++    import django.http  # pylint: disable=unused-import
++except ImportError:
++    # Ensure the chromite site-package is installed.
++    import subprocess
++    build_externals_path = os.path.join(
++            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
++            'utils', 'build_externals.py')
++    subprocess.check_call([build_externals_path, '--names_to_check',
++                           'chromiterepo', 'django'])
++    # Restart the script so python now finds the autotest site-packages.
++    sys.exit(os.execv(__file__, sys.argv))
++
++from autotest_lib.client.common_lib import utils
++from autotest_lib.server.hosts import adb_host
++from autotest_lib.site_utils import test_runner_utils
++from autotest_lib.site_utils import tester_feedback
++
++def _parse_arguments_internal(argv):
++    """
++    Parse command line arguments
++
++    @param argv: argument list to parse
++
++    @returns:    tuple of parsed arguments and argv suitable for remote runs
++
++    @raises SystemExit if arguments are malformed, or required arguments
++            are not present.
++    """
++
++    parser = argparse.ArgumentParser(description='Run remote tests.')
++
++    parser.add_argument('-b', '--bench', metavar='BENCH', required=True,
++                        help='Select the benchmark want to be run for '
++                             'test.')
++    parser.add_argument('-s', '--serials', metavar='SERIALS',
++                        help='Comma separate list of device serials under '
++                             'test.')
++    parser.add_argument('-r', '--remote', metavar='REMOTE',
++                        default='localhost',
++                        help='hostname[:port] if the ADB device is connected '
++                             'to a remote machine. Ensure this workstation '
++                             'is configured for passwordless ssh access as '
++                             'users "root" or "adb"')
++    parser.add_argument('-m', '--mode', default='little',
++                        help='Two modes can be chosen, little mode runs on a '
++                             'single core of Cortex-A53, while big mode runs '
++                             'on single core of Cortex-A57.')
++
++    return parser.parse_args(argv)
++
++def main(argv):
++    """
++    Entry point for test_bench script.
++
++    @param argv: arguments list
++    """
++    arguments = _parse_arguments_internal(argv)
++
++    serials = arguments.serials
++    if serials is None:
++        result = utils.run(['adb', 'devices'])
++        devices = adb_host.ADBHost.parse_device_serials(result.stdout)
++        if len(devices) != 1:
++            logging.error('Could not detect exactly one device; please select '
++                          'one with -s: %s', devices)
++            return 1
++        serials = devices[0]
++
++    autotest_path = os.path.dirname(os.path.dirname(
++            os.path.realpath(__file__)))
++    site_utils_path = os.path.join(autotest_path, 'site_utils')
++    realpath = os.path.realpath(__file__)
++    site_utils_path = os.path.realpath(site_utils_path)
++    host_attributes = {'serials': serials,
++                       'os_type': 'android'}
++    results_directory = test_runner_utils.create_results_directory(None)
++
++    bench = arguments.bench
++
++    benchlist = ['Panorama', 'Skia', 'Dex2oat', 'Hwui', "Synthmark", "Binder"]
++
++    logging.info('Start testing benchmark on the device...')
++
++    if bench not in benchlist:
++        logging.error('Please select one benchmark from the list below: \n%s',
++                      '\n'.join(benchlist))
++        return 1
++
++    # Use taskset command to run benchmarks with different CPU core settings.
++    #
++    # TEST_MODE variable is set to either 7 or 56 for coremask in taskset.
++    #
++    # While Nexus 6P has 8 cores and 5X has 6 cores. CPU number 0-3 in both
++    # devices belongs to Cortex 53, which are slow. CPU number 4-5 in 5X and 4-7
++    # in 6P belongs to Cortex 57, which are fast.
++    #
++    # So we set 7(0x00000111) for little mode, that runs the benchmark on three
++    # slow cores; 56(0x00111000) for big mode, that runs the benchmark on two
++    # fast and one slow cores.
++    os.environ['TEST_MODE'] = '7' if arguments.mode == 'little' else '56'
++
++    tests = [bench]
++
++    if test_runner_utils.perform_run_from_autotest_root(
++                      autotest_path, argv, tests, arguments.remote,
++                      host_attributes=host_attributes,
++                      results_directory=results_directory):
++      logging.error('Error while testing on device.')
++      return 1
++
++if __name__ == '__main__':
++    sys.exit(main(sys.argv[1:]))
diff --git a/android_bench_suite/build_bench.py b/android_bench_suite/build_bench.py
new file mode 100755
index 0000000..44ad7a0
--- /dev/null
+++ b/android_bench_suite/build_bench.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python2
+#
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# pylint: disable=cros-logging-import
+
+"""Script to build the benchmark locally with toolchain settings."""
+from __future__ import print_function
+
+import argparse
+import config
+import logging
+import os
+import subprocess
+import sys
+
+# Turn the logging level to INFO before importing other code, to avoid having
+# failed import logging messages confuse the user.
+logging.basicConfig(level=logging.INFO)
+
+
+def _parse_arguments_internal(argv):
+  parser = argparse.ArgumentParser(description='Build benchmarks with '
+                                   'specified toolchain settings')
+
+  parser.add_argument(
+      '-b', '--bench', required=True, help='Select the benchmark to be built.')
+
+  parser.add_argument(
+      '-c',
+      '--compiler_dir',
+      metavar='DIR',
+      help='Specify the path to the compiler bin '
+      'directory.')
+
+  parser.add_argument(
+      '-o', '--build_os', help='Specify the host OS to build benchmark.')
+
+  parser.add_argument(
+      '-l',
+      '--llvm_prebuilts_version',
+      help='Specify the version of prebuilt LLVM.')
+
+  parser.add_argument(
+      '-f',
+      '--cflags',
+      help='Specify the optimization cflags for '
+      'the toolchain.')
+
+  parser.add_argument(
+      '--ldflags', help='Specify linker flags for the toolchain.')
+
+  return parser.parse_args(argv)
+
+
+# Set flags for compiling benchmarks, by changing the local
+# CFLAGS/LDFLAGS in the android makefile of each benchmark
+def set_flags(bench, cflags, ldflags):
+  if not cflags:
+    logging.info('No CFLAGS specified, using default settings.')
+    cflags = ''
+  else:
+    logging.info('Cflags setting to "%s"...', cflags)
+
+  if not ldflags:
+    logging.info('No LDFLAGS specifed, using default settings.')
+    ldflags = ''
+  else:
+    logging.info('Ldflags setting to "%s"...', ldflags)
+
+  add_flags = config.bench_flags_dict[bench]
+  add_flags(cflags, ldflags)
+  logging.info('Flags set successfully!')
+
+
+def set_build_os(build_os):
+  # Set $BUILD_OS variable for android makefile
+  if build_os:
+    os.environ['BUILD_OS'] = build_os
+    logging.info('BUILD_OS set to "%s"...', build_os)
+  else:
+    logging.info('No BUILD_OS specified, using linux as default...')
+
+
+def set_llvm_prebuilts_version(llvm_prebuilts_version):
+  # Set $LLVM_PREBUILTS_VERSION for android makefile
+  if llvm_prebuilts_version:
+    os.environ['LLVM_PREBUILTS_VERSION'] = llvm_prebuilts_version
+    logging.info('LLVM_PREBUILTS_VERSION set to "%s"...',
+                 llvm_prebuilts_version)
+  else:
+    logging.info('No LLVM_PREBUILTS_VERSION specified, using default one...')
+
+
+def set_compiler(compiler):
+  # If compiler_dir has been specified, copy the binaries to
+  # a temporary location, set BUILD_OS and LLVM_PREBUILTS_VERSION
+  # variables to the location
+  if compiler:
+    # Report error if path not exits
+    if not os.path.isdir(compiler):
+      logging.error('Error while setting compiler: '
+                    'Directory %s does not exist!', compiler)
+      raise OSError('Directory %s not exist.' % compiler)
+
+    # Specify temporary directory for compiler
+    tmp_dir = os.path.join(config.android_home,
+                           'prebuilts/clang/host/linux-x86', 'clang-tmp')
+
+    compiler_content = os.path.join(compiler, '.')
+
+    # Copy compiler to new directory
+    try:
+      subprocess.check_call(['cp', '-rf', compiler_content, tmp_dir])
+    except subprocess.CalledProcessError:
+      logging.error('Error while copying the compiler to '
+                    'temporary directory %s!', tmp_dir)
+      raise
+
+    # Set environment variable
+    os.environ['LLVM_PREBUILTS_VERSION'] = 'clang-tmp'
+
+    logging.info('Prebuilt Compiler set as %s.', os.path.abspath(compiler))
+
+
+def set_compiler_env(bench, compiler, build_os, llvm_prebuilts_version, cflags,
+                     ldflags):
+  logging.info('Setting compiler options for benchmark...')
+
+  # If no specific prebuilt compiler directory, use BUILD_OS and
+  # LLVM_PREBUILTS_VERSION to set the compiler version.
+  # Otherwise, use the new prebuilt compiler.
+  if not compiler:
+    set_build_os(build_os)
+    set_llvm_prebuilts_version(llvm_prebuilts_version)
+  else:
+    set_compiler(compiler)
+
+  set_flags(bench, cflags, ldflags)
+
+  return 0
+
+
+def remove_tmp_dir():
+  tmp_dir = os.path.join(config.android_home, 'prebuilts/clang/host/linux-x86',
+                         'clang-tmp')
+
+  try:
+    subprocess.check_call(['rm', '-r', tmp_dir])
+  except subprocess.CalledProcessError:
+    logging.error('Error while removing the temporary '
+                  'compiler directory %s!', tmp_dir)
+    raise
+
+
+# Recover the makefile/blueprint from our patch after building
+def restore_makefile(bench):
+  pwd = os.path.join(config.android_home, config.bench_dict[bench])
+  mk_file = os.path.join(pwd, 'Android.mk')
+  if not os.path.exists(mk_file):
+    mk_file = os.path.join(pwd, 'Android.bp')
+  subprocess.check_call(['mv', os.path.join(pwd, 'tmp_makefile'), mk_file])
+
+
+# Run script to build benchmark
+def build_bench(bench, source_dir):
+  logging.info('Start building benchmark...')
+
+  raw_cmd = ('cd {android_home} '
+             '&& source build/envsetup.sh '
+             '&& lunch {product_combo} '
+             '&& mmma {source_dir} -j48'.format(
+                 android_home=config.android_home,
+                 product_combo=config.product_combo,
+                 source_dir=source_dir))
+
+  log_file = os.path.join(config.bench_suite_dir, 'build_log')
+  with open(log_file, 'a') as logfile:
+    log_head = 'Log for building benchmark: %s\n' % (bench)
+    logfile.write(log_head)
+    try:
+      subprocess.check_call(
+          ['bash', '-c', raw_cmd], stdout=logfile, stderr=logfile)
+    except subprocess.CalledProcessError:
+      logging.error('Error while running %s, please check '
+                    '%s for more info.', raw_cmd, log_file)
+      restore_makefile(bench)
+      raise
+
+  logging.info('Logs for building benchmark %s are written to %s.', bench,
+               log_file)
+  logging.info('Benchmark built successfully!')
+
+
+def main(argv):
+  arguments = _parse_arguments_internal(argv)
+
+  bench = arguments.bench
+  compiler = arguments.compiler_dir
+  build_os = arguments.build_os
+  llvm_version = arguments.llvm_prebuilts_version
+  cflags = arguments.cflags
+  ldflags = arguments.ldflags
+
+  try:
+    source_dir = config.bench_dict[bench]
+  except KeyError:
+    logging.error('Please select one benchmark from the list below:\n\t' +
+                  '\n\t'.join(config.bench_list))
+    raise
+
+  set_compiler_env(bench, compiler, build_os, llvm_version, cflags, ldflags)
+
+  build_bench(bench, source_dir)
+
+  # If flags has been set, remember to restore the makefile/blueprint to
+  # original ones.
+  restore_makefile(bench)
+
+  # If a tmp directory is used for compiler path, remove it after building.
+  if compiler:
+    remove_tmp_dir()
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/android_bench_suite/config.py b/android_bench_suite/config.py
new file mode 100644
index 0000000..4cfd261
--- /dev/null
+++ b/android_bench_suite/config.py
@@ -0,0 +1,94 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for the benchmark suite."""
+from __future__ import print_function
+
+import ConfigParser
+import os
+
+from parse_result import parse_Panorama
+from parse_result import parse_Dex2oat
+from parse_result import parse_Hwui
+from parse_result import parse_Skia
+from parse_result import parse_Synthmark
+from parse_result import parse_Binder
+
+from set_flags import add_flags_Panorama
+from set_flags import add_flags_Dex2oat
+from set_flags import add_flags_Hwui
+from set_flags import add_flags_Skia
+from set_flags import add_flags_Synthmark
+from set_flags import add_flags_Binder
+
+home = os.environ['HOME']
+
+# Load user configurations for default envrionments
+env_config = ConfigParser.ConfigParser(allow_no_value=True)
+env_config.read('env_setting')
+
+def get_suite_env(name, path=False):
+  variable = env_config.get('Suite_Environment', name)
+  if variable:
+    if path and not os.path.isdir(variable):
+      raise ValueError('The path of %s does not exist.' % name)
+    return variable
+  else:
+    raise ValueError('Please specify %s in env_setting' % name)
+
+# Android source code type: internal or aosp
+android_type = get_suite_env('android_type')
+
+# Android home directory specified as android_home,
+android_home = get_suite_env('android_home', True)
+
+# The benchmark results will be saved in bench_suite_dir.
+# Please create a directory to store the results, default directory is
+# android_home/benchtoolchain
+bench_suite_dir = get_suite_env('bench_suite_dir', True)
+
+# Crosperf directory is used to generate crosperf report.
+toolchain_utils = get_suite_env('toolchain_utils', True)
+
+# Please change both product and architecture at same time
+# Product can be chosen from the lunch list of android building.
+product_combo = get_suite_env('product_combo')
+
+# Arch can be found from out/target/product
+product = get_suite_env('product')
+
+# Benchmarks list is in following variables, you can change it adding new
+# benchmarks.
+bench_dict = {
+    'Panorama': 'packages/apps/LegacyCamera/jni/',
+    'Dex2oat': 'art/compiler/',
+    'Hwui': 'frameworks/base/libs/hwui/',
+    'Skia': 'external/skia/',
+    'Synthmark': 'synthmark/',
+    'Binder': 'frameworks/native/libs/binder/',
+}
+
+bench_parser_dict = {
+    'Panorama': parse_Panorama,
+    'Dex2oat': parse_Dex2oat,
+    'Hwui': parse_Hwui,
+    'Skia': parse_Skia,
+    'Synthmark': parse_Synthmark,
+    'Binder': parse_Binder,
+}
+
+bench_flags_dict = {
+    'Panorama': add_flags_Panorama,
+    'Dex2oat': add_flags_Dex2oat,
+    'Hwui': add_flags_Hwui,
+    'Skia': add_flags_Skia,
+    'Synthmark': add_flags_Synthmark,
+    'Binder': add_flags_Binder,
+}
+
+bench_list = bench_dict.keys()
+
+# Directories used in the benchmark suite
+autotest_dir = 'external/autotest/'
+out_dir = os.path.join(android_home, 'out')
diff --git a/android_bench_suite/devrel/Android.mk b/android_bench_suite/devrel/Android.mk
new file mode 100644
index 0000000..3970857
--- /dev/null
+++ b/android_bench_suite/devrel/Android.mk
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_TAGS := tests
+LOCAL_C_INCLUDES := $(LOCAL_PATH)/source
+LOCAL_SRC_FILES:= apps/synthmark.cpp
+LOCAL_CFLAGS += -g -std=c++11 -Ofast
+LOCAL_CFLAGS += $(CFLAGS_FOR_BENCH_SUITE)
+LOCAL_LDFLAGS += $(LDFLAGS_FOR_BENCH_SUITE)
+#LOCAL_SHARED_LIBRARIES := libcutils libutils
+LOCAL_MODULE := synthmark
+include $(BUILD_EXECUTABLE)
diff --git a/android_bench_suite/dex2oat_input/test1.apk b/android_bench_suite/dex2oat_input/test1.apk
new file mode 100644
index 0000000..16cc638
--- /dev/null
+++ b/android_bench_suite/dex2oat_input/test1.apk
Binary files differ
diff --git a/android_bench_suite/dex2oat_input/test2.apk b/android_bench_suite/dex2oat_input/test2.apk
new file mode 100644
index 0000000..4dc0aa0
--- /dev/null
+++ b/android_bench_suite/dex2oat_input/test2.apk
Binary files differ
diff --git a/android_bench_suite/discard_patches.py b/android_bench_suite/discard_patches.py
new file mode 100755
index 0000000..39d7572
--- /dev/null
+++ b/android_bench_suite/discard_patches.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python2
+#
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Script to discard all the patches added to Android for this suite"""
+
+from __future__ import print_function
+
+import config
+import os
+import subprocess
+
+
+def discard_git(path):
+  try:
+    subprocess.check_call(['git', '-C', path, 'reset'])
+    subprocess.check_call(['git', '-C', path, 'clean', '-fdx'])
+    subprocess.check_call(['git', '-C', path, 'stash'])
+    print('Patch in %s removed successfully!' % path)
+  except subprocess.CalledProcessError:
+    print('Error while removing patch in %s' % path)
+
+
+def dispatch_skia():
+  skia_dir = os.path.join(config.android_home, config.bench_dict['Skia'])
+  discard_git(skia_dir)
+
+
+def dispatch_autotest():
+  autotest_dir = os.path.join(config.android_home, config.autotest_dir)
+  discard_git(autotest_dir)
+
+
+def dispatch_panorama():
+  panorama_dir = os.path.join(config.android_home,
+                              config.bench_dict['Panorama'])
+  discard_git(panorama_dir)
+
+
+def dispatch_synthmark():
+  synthmark_dir = 'synthmark'
+  try:
+    subprocess.check_call(
+        ['rm', '-rf',
+         os.path.join(config.android_home, synthmark_dir)])
+    subprocess.check_call(['rm', '-rf', 'devrel/tools'])
+    subprocess.check_call(['rm', '-rf', 'devrel/.repo'])
+    print('Synthmark patch removed successfully!')
+  except subprocess.CalledProcessError:
+    print('Synthmark is not removed. Error occurred.')
+
+
+def main():
+  dispatch_skia()
+  dispatch_autotest()
+  dispatch_panorama()
+  dispatch_synthmark()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/android_bench_suite/env_setting b/android_bench_suite/env_setting
new file mode 100644
index 0000000..397888b
--- /dev/null
+++ b/android_bench_suite/env_setting
@@ -0,0 +1,31 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+[Suite_Environment]
+
+# Please indicate the android source type: internal or aosp
+
+android_type = aosp
+
+# This is basically the root of your android sources.
+
+android_home = /work/android/aosp
+
+# This is the directory where you want to put the logs and results to.
+# Please create a directory first before specify it.
+
+bench_suite_dir = /work/android/bench_suite_dir
+
+# Crosperf is a toolchain utility to help generating html report of the test.
+# Please specify the path to it here.
+
+toolchain_utils = /chromium_source/src/third_party/toolchain-utils/crosperf
+
+# Product_combo and product is the type of image you want to build.
+# The product_combo list can be found with 'lunch' instruction.
+# The product is usually the middle part of the combo name, also can be found in
+# $ANDROID_HOME/out/target/product/
+
+product_combo = aosp_bullhead-userdebug
+product = bullhead
diff --git a/android_bench_suite/fix_json.py b/android_bench_suite/fix_json.py
new file mode 100755
index 0000000..cf94dd6
--- /dev/null
+++ b/android_bench_suite/fix_json.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python2
+#
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# pylint: disable=cros-logging-import
+
+"""Script to re-format json result to one with branch_name and build_id"""
+from __future__ import print_function
+
+import argparse
+import config
+import json
+import logging
+import os
+import subprocess
+import sys
+
+# Turn the logging level to INFO before importing other autotest
+# code, to avoid having failed import logging messages confuse the
+# test_droid user.
+logging.basicConfig(level=logging.INFO)
+
+
+def _parse_arguments_internal(argv):
+  parser = argparse.ArgumentParser(description='Convert result to JSON'
+                                   'format')
+  parser.add_argument(
+      '-b', '--bench', help='Generate JSON format file for which benchmark.')
+  return parser.parse_args(argv)
+
+def fix_json(bench):
+  # Set environment variable for crosperf
+  os.environ['PYTHONPATH'] = os.path.dirname(config.toolchain_utils)
+
+  logging.info('Generating Crosperf Report...')
+  json_path = os.path.join(config.bench_suite_dir, bench + '_refined')
+  crosperf_cmd = [
+      os.path.join(config.toolchain_utils, 'generate_report.py'), '--json',
+      '-i=' + os.path.join(config.bench_suite_dir, bench + '.json'),
+      '-o=' + json_path, '-f'
+  ]
+
+  # Run crosperf generate_report.py
+  logging.info('Command: %s', crosperf_cmd)
+  subprocess.call(crosperf_cmd)
+
+  json_path += '.json'
+  with open(json_path) as fout:
+    objs = json.load(fout)
+  for obj in objs:
+    obj['branch_name'] = 'aosp/master'
+    obj['build_id'] = 0
+  with open(json_path, 'w') as fout:
+    json.dump(objs, fout)
+
+  logging.info('JSON file fixed successfully!')
+
+def main(argv):
+  arguments = _parse_arguments_internal(argv)
+
+  bench = arguments.bench
+
+  fix_json(bench)
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/android_bench_suite/fix_skia_results.py b/android_bench_suite/fix_skia_results.py
new file mode 100755
index 0000000..6eec6cc
--- /dev/null
+++ b/android_bench_suite/fix_skia_results.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python2
+#
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# pylint: disable=cros-logging-import
+"""Transforms skia benchmark results to ones that crosperf can understand."""
+
+from __future__ import print_function
+
+import itertools
+import logging
+import json
+import sys
+
+# Turn the logging level to INFO before importing other autotest
+# code, to avoid having failed import logging messages confuse the
+# test_droid user.
+logging.basicConfig(level=logging.INFO)
+
+# All of the results we care about, by name.
+# Each of these *must* end in _ns, _us, _ms, or _s, since all the metrics we
+# collect (so far) are related to time, and we alter the results based on the
+# suffix of these strings (so we don't have 0.000421ms per sample, for example)
+_RESULT_RENAMES = {
+    'memset32_100000_640_480_nonrendering': 'memset_time_ms',
+    'path_equality_50%_640_480_nonrendering': 'path_equality_ns',
+    'sort_qsort_backward_640_480_nonrendering': 'qsort_us'
+}
+
+
+def _GetFamiliarName(name):
+  r = _RESULT_RENAMES[name]
+  return r if r else name
+
+
+def _IsResultInteresting(name):
+  return name in _RESULT_RENAMES
+
+
+def _GetTimeMultiplier(label_name):
+  """Given a time (in milliseconds), normalize it to what label_name expects.
+
+  "What label_name expects" meaning "we pattern match against the last few
+  non-space chars in label_name."
+
+  This expects the time unit to be separated from anything else by '_'.
+  """
+  ms_mul = 1000 * 1000.
+  endings = [('_ns', 1), ('_us', 1000), ('_ms', ms_mul), ('_s', ms_mul * 1000)]
+  for end, mul in endings:
+    if label_name.endswith(end):
+      return ms_mul / mul
+  raise ValueError('Unknown ending in "%s"; expecting one of %s' %
+                   (label_name, [end for end, _ in endings]))
+
+
+def _GetTimeDenom(ms):
+  """Given a list of times (in milliseconds), find a sane time unit for them.
+
+  Returns the unit name, and `ms` normalized to that time unit.
+
+  >>> _GetTimeDenom([1, 2, 3])
+  ('ms', [1.0, 2.0, 3.0])
+  >>> _GetTimeDenom([.1, .2, .3])
+  ('us', [100.0, 200.0, 300.0])
+  """
+
+  ms_mul = 1000 * 1000
+  units = [('us', 1000), ('ms', ms_mul), ('s', ms_mul * 1000)]
+  for name, mul in reversed(units):
+    normalized = [float(t) * ms_mul / mul for t in ms]
+    average = sum(normalized) / len(normalized)
+    if all(n > 0.1 for n in normalized) and average >= 1:
+      return name, normalized
+
+  normalized = [float(t) * ms_mul for t in ms]
+  return 'ns', normalized
+
+
+def _TransformBenchmarks(raw_benchmarks):
+  # We get {"results": {"bench_name": Results}}
+  # where
+  #   Results = {"config_name": {"samples": [float], etc.}}
+  #
+  # We want {"data": {"skia": [[BenchmarkData]]},
+  #          "platforms": ["platform1, ..."]}
+  # where
+  #   BenchmarkData = {"bench_name": bench_samples[N], ..., "retval": 0}
+  #
+  # Note that retval is awkward -- crosperf's JSON reporter reports the result
+  # as a failure if it's not there. Everything else treats it like a
+  # statistic...
+  benchmarks = raw_benchmarks['results']
+  results = []
+  for bench_name, bench_result in benchmarks.iteritems():
+    try:
+      for cfg_name, keyvals in bench_result.iteritems():
+        # Some benchmarks won't have timing data (either it won't exist at all,
+        # or it'll be empty); skip them.
+        samples = keyvals.get('samples')
+        if not samples:
+          continue
+
+        bench_name = '%s_%s' % (bench_name, cfg_name)
+        if not _IsResultInteresting(bench_name):
+          continue
+
+        friendly_name = _GetFamiliarName(bench_name)
+        if len(results) < len(samples):
+          results.extend({
+              'retval': 0
+          } for _ in xrange(len(samples) - len(results)))
+
+        time_mul = _GetTimeMultiplier(friendly_name)
+        for sample, app in itertools.izip(samples, results):
+          assert friendly_name not in app
+          app[friendly_name] = sample * time_mul
+    except (KeyError, ValueError) as e:
+      logging.error('While converting "%s" (key: %s): %s',
+                    bench_result, bench_name, e.message)
+      raise
+
+  # Realistically, [results] should be multiple results, where each entry in the
+  # list is the result for a different label. Because we only deal with one
+  # label at the moment, we need to wrap it in its own list.
+  return results
+
+
+if __name__ == '__main__':
+
+  def _GetUserFile(argv):
+    if not argv or argv[0] == '-':
+      return sys.stdin
+    return open(argv[0])
+
+  def _Main():
+    with _GetUserFile(sys.argv[1:]) as in_file:
+      obj = json.load(in_file)
+    output = _TransformBenchmarks(obj)
+    json.dump(output, sys.stdout)
+
+  _Main()
diff --git a/android_bench_suite/gen_json.py b/android_bench_suite/gen_json.py
new file mode 100755
index 0000000..ad617ff
--- /dev/null
+++ b/android_bench_suite/gen_json.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python2
+#
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# pylint: disable=cros-logging-import
+
+"""Script to help generate json format report from raw data."""
+from __future__ import print_function
+
+import argparse
+import config
+import json
+import logging
+import sys
+
+# Turn the logging level to INFO before importing other autotest
+# code, to avoid having failed import logging messages confuse the
+# test_droid user.
+logging.basicConfig(level=logging.INFO)
+
+
+def _parse_arguments_internal(argv):
+  parser = argparse.ArgumentParser(description='Convert result to JSON'
+                                   'format')
+
+  parser.add_argument(
+      '-b', '--bench', help='Generate JSON format file for which benchmark.')
+
+  parser.add_argument(
+      '-i', '--input', help='Specify the input result file name.')
+
+  parser.add_argument(
+      '-o', '--output', help='Specify the output JSON format result file')
+
+  parser.add_argument(
+      '-p',
+      '--platform',
+      help='Indicate the platform(experiment or device) name '
+      'to be shown in JSON')
+
+  parser.add_argument(
+      '--iterations',
+      type=int,
+      help='How many iterations does the result include.')
+  return parser.parse_args(argv)
+
+# Collect data and generate JSON {} tuple from benchmark result
+def collect_data(infile, bench, it):
+  result_dict = {}
+  with open(infile + str(it)) as fin:
+    if bench not in config.bench_parser_dict:
+      logging.error('Please input the correct benchmark name.')
+      raise ValueError('Wrong benchmark name: %s' % bench)
+    parse = config.bench_parser_dict[bench]
+    result_dict = parse(bench, fin)
+  return result_dict
+
+# If there is no original output file, create a new one and init it.
+def create_outfile(outfile, bench):
+  with open(outfile, 'w') as fout:
+    obj_null = {'data': {bench.lower(): []}, 'platforms': []}
+    json.dump(obj_null, fout)
+
+# Seek the original output file and try to add new result into it.
+def get_outfile(outfile, bench):
+  try:
+    return open(outfile)
+  except IOError:
+    create_outfile(outfile, bench)
+    return open(outfile)
+
+def main(argv):
+  arguments = _parse_arguments_internal(argv)
+
+  bench = arguments.bench
+  infile = arguments.input
+  outfile = arguments.output
+  platform = arguments.platform
+  iteration = arguments.iterations
+
+  result = []
+  for i in xrange(iteration):
+    result += collect_data(infile, bench, i)
+
+  with get_outfile(outfile, bench) as fout:
+    obj = json.load(fout)
+  obj['platforms'].append(platform)
+  obj['data'][bench.lower()].append(result)
+  with open(outfile, 'w') as fout:
+    json.dump(obj, fout)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/android_bench_suite/panorama.diff b/android_bench_suite/panorama.diff
new file mode 100644
index 0000000..fcd214e
--- /dev/null
+++ b/android_bench_suite/panorama.diff
@@ -0,0 +1,763 @@
+diff --git a/jni/Android.mk b/jni/Android.mk
+index 8b816270..d48e0d8e 100755
+--- a/jni/Android.mk
++++ b/jni/Android.mk
+@@ -1,50 +1,53 @@
++local_target_dir := $(TARGET_OUT_DATA)/local/tmp
++
+ LOCAL_PATH:= $(call my-dir)
+ 
+ include $(CLEAR_VARS)
+ 
+ LOCAL_C_INCLUDES := \
+-        $(LOCAL_PATH)/feature_stab/db_vlvm \
+-        $(LOCAL_PATH)/feature_stab/src \
+-        $(LOCAL_PATH)/feature_stab/src/dbreg \
+-        $(LOCAL_PATH)/feature_mos/src \
+-        $(LOCAL_PATH)/feature_mos/src/mosaic
+-
+-LOCAL_CFLAGS := -O3 -DNDEBUG
+-LOCAL_CPPFLAGS := -std=gnu++98
+-
+-LOCAL_SRC_FILES := \
+-        feature_mos_jni.cpp \
+-        mosaic_renderer_jni.cpp \
+-        feature_mos/src/mosaic/trsMatrix.cpp \
+-        feature_mos/src/mosaic/AlignFeatures.cpp \
+-        feature_mos/src/mosaic/Blend.cpp \
+-        feature_mos/src/mosaic/Delaunay.cpp \
+-        feature_mos/src/mosaic/ImageUtils.cpp \
+-        feature_mos/src/mosaic/Mosaic.cpp \
+-        feature_mos/src/mosaic/Pyramid.cpp \
+-        feature_mos/src/mosaic_renderer/Renderer.cpp \
+-        feature_mos/src/mosaic_renderer/WarpRenderer.cpp \
+-        feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp \
+-        feature_mos/src/mosaic_renderer/YVURenderer.cpp \
+-        feature_mos/src/mosaic_renderer/FrameBuffer.cpp \
+-        feature_stab/db_vlvm/db_feature_detection.cpp \
+-        feature_stab/db_vlvm/db_feature_matching.cpp \
+-        feature_stab/db_vlvm/db_framestitching.cpp \
+-        feature_stab/db_vlvm/db_image_homography.cpp \
+-        feature_stab/db_vlvm/db_rob_image_homography.cpp \
+-        feature_stab/db_vlvm/db_utilities.cpp \
+-        feature_stab/db_vlvm/db_utilities_camera.cpp \
+-        feature_stab/db_vlvm/db_utilities_indexing.cpp \
+-        feature_stab/db_vlvm/db_utilities_linalg.cpp \
+-        feature_stab/db_vlvm/db_utilities_poly.cpp \
+-        feature_stab/src/dbreg/dbreg.cpp \
+-        feature_stab/src/dbreg/dbstabsmooth.cpp \
+-        feature_stab/src/dbreg/vp_motionmodel.c
++    $(LOCAL_PATH)/feature_mos/src \
++    $(LOCAL_PATH)/feature_stab/src \
++    $(LOCAL_PATH)/feature_stab/db_vlvm
+ 
+-LOCAL_SHARED_LIBRARIES := liblog libnativehelper libGLESv2
+-#LOCAL_LDLIBS := -L$(SYSROOT)/usr/lib -ldl -llog -lGLESv2 -L$(TARGET_OUT)
++LOCAL_SRC_FILES := benchmark.cpp \
++	feature_mos/src/mosaic/ImageUtils.cpp \
++    feature_mos/src/mosaic/Mosaic.cpp \
++    feature_mos/src/mosaic/AlignFeatures.cpp \
++    feature_mos/src/mosaic/Blend.cpp \
++    feature_mos/src/mosaic/Pyramid.cpp \
++    feature_mos/src/mosaic/trsMatrix.cpp \
++    feature_mos/src/mosaic/Delaunay.cpp \
++    feature_mos/src/mosaic_renderer/Renderer.cpp \
++    feature_mos/src/mosaic_renderer/WarpRenderer.cpp \
++    feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp \
++    feature_mos/src/mosaic_renderer/YVURenderer.cpp \
++    feature_mos/src/mosaic_renderer/FrameBuffer.cpp \
++    feature_stab/db_vlvm/db_rob_image_homography.cpp \
++    feature_stab/db_vlvm/db_feature_detection.cpp \
++    feature_stab/db_vlvm/db_image_homography.cpp \
++    feature_stab/db_vlvm/db_framestitching.cpp \
++    feature_stab/db_vlvm/db_feature_matching.cpp \
++    feature_stab/db_vlvm/db_utilities.cpp \
++    feature_stab/db_vlvm/db_utilities_camera.cpp \
++    feature_stab/db_vlvm/db_utilities_indexing.cpp \
++    feature_stab/db_vlvm/db_utilities_linalg.cpp \
++    feature_stab/db_vlvm/db_utilities_poly.cpp \
++    feature_stab/src/dbreg/dbstabsmooth.cpp \
++    feature_stab/src/dbreg/dbreg.cpp \
++    feature_stab/src/dbreg/vp_motionmodel.c
+ 
+-LOCAL_MODULE_TAGS := optional
++LOCAL_CFLAGS := -O3 -DNDEBUG -Wno-unused-parameter -Wno-maybe-uninitialized
++LOCAL_CFLAGS += $(CFLAGS_FOR_BENCH_SUITE)
++LOCAL_LDFLAGS := $(LDFLAGS_FOR_BENCH_SUITE)
++LOCAL_CPPFLAGS := -std=c++98
++LOCAL_MODULE_TAGS := tests
++LOCAL_MODULE := panorama_bench
++LOCAL_MODULE_STEM_32 := panorama_bench
++LOCAL_MODULE_STEM_64 := panorama_bench64
++LOCAL_MULTILIB := both
++LOCAL_MODULE_PATH := $(local_target_dir)
++LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
++LOCAL_FORCE_STATIC_EXECUTABLE := true
++LOCAL_STATIC_LIBRARIES := libc libm
+ 
+-LOCAL_MODULE    := libjni_legacymosaic
+-include $(BUILD_SHARED_LIBRARY)
++include $(BUILD_EXECUTABLE)
+diff --git a/jni/benchmark.cpp b/jni/benchmark.cpp
+new file mode 100755
+index 00000000..2a6440f4
+--- /dev/null
++++ b/jni/benchmark.cpp
+@@ -0,0 +1,131 @@
++/*
++ * Copyright (C) 2012 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#include <time.h>
++#include <sys/types.h>
++#include <sys/stat.h>
++#include <unistd.h>
++
++#include "mosaic/Mosaic.h"
++#include "mosaic/ImageUtils.h"
++
++#define MAX_FRAMES 200
++#define KERNEL_ITERATIONS 10
++
++const int blendingType = Blend::BLEND_TYPE_HORZ;
++const int stripType = Blend::STRIP_TYPE_WIDE;
++
++ImageType yvuFrames[MAX_FRAMES];
++
++int loadImages(const char* basename, int &width, int &height)
++{
++    char filename[512];
++    struct stat filestat;
++    int i;
++
++    for (i = 0; i < MAX_FRAMES; i++) {
++        sprintf(filename, "%s_%03d.ppm", basename, i + 1);
++        if (stat(filename, &filestat) != 0) break;
++        ImageType rgbFrame = ImageUtils::readBinaryPPM(filename, width, height);
++        yvuFrames[i] = ImageUtils::allocateImage(width, height,
++                                ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
++        ImageUtils::rgb2yvu(yvuFrames[i], rgbFrame, width, height);
++        ImageUtils::freeImage(rgbFrame);
++    }
++    return i;
++}
++
++int main(int argc, char **argv)
++{
++    struct timespec t1, t2, t3;
++
++    int width, height;
++    float totalElapsedTime = 0;
++
++    const char *basename;
++    const char *filename;
++
++    if (argc != 3) {
++        printf("Usage: %s input_dir output_filename\n", argv[0]);
++        return 0;
++    } else {
++        basename = argv[1];
++        filename = argv[2];
++    }
++
++    // Load the images outside the computational kernel
++    int totalFrames = loadImages(basename, width, height);
++
++    if (totalFrames == 0) {
++        printf("Image files not found. Make sure %s exists.\n",
++               basename);
++        return 1;
++    }
++
++    printf("%d frames loaded\n", totalFrames);
++
++
++    // Interesting stuff is here
++    for (int iteration = 0; iteration < KERNEL_ITERATIONS; iteration++)  {
++        Mosaic mosaic;
++
++        mosaic.initialize(blendingType, stripType, width, height, -1, false, 0);
++
++        clock_gettime(CLOCK_MONOTONIC, &t1);
++        for (int i = 0; i < totalFrames; i++) {
++            mosaic.addFrame(yvuFrames[i]);
++        }
++        clock_gettime(CLOCK_MONOTONIC, &t2);
++
++        float progress = 0.0;
++        bool cancelComputation = false;
++
++        mosaic.createMosaic(progress, cancelComputation);
++
++        int mosaicWidth, mosaicHeight;
++        ImageType resultYVU = mosaic.getMosaic(mosaicWidth, mosaicHeight);
++
++        ImageType imageRGB = ImageUtils::allocateImage(
++            mosaicWidth, mosaicHeight, ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
++
++        clock_gettime(CLOCK_MONOTONIC, &t3);
++
++        float elapsedTime =
++            (t3.tv_sec - t1.tv_sec) + (t3.tv_nsec - t1.tv_nsec)/1e9;
++        float addImageTime =
++            (t2.tv_sec - t1.tv_sec) + (t2.tv_nsec - t1.tv_nsec)/1e9;
++        float stitchImageTime =
++            (t3.tv_sec - t2.tv_sec) + (t3.tv_nsec - t2.tv_nsec)/1e9;
++
++        totalElapsedTime += elapsedTime;
++
++        printf("Iteration %d: %dx%d moasic created: "
++               "%.2f seconds (%.2f + %.2f)\n",
++               iteration, mosaicWidth, mosaicHeight,
++               elapsedTime, addImageTime, stitchImageTime);
++
++        // Write the output only once for correctness check
++        if (iteration == 0) {
++            ImageUtils::yvu2rgb(imageRGB, resultYVU, mosaicWidth,
++                                mosaicHeight);
++            ImageUtils::writeBinaryPPM(imageRGB, filename, mosaicWidth,
++                                       mosaicHeight);
++        }
++    }
++    printf("Total elapsed time: %.2f seconds\n", totalElapsedTime);
++
++    return 0;
++}
+diff --git a/jni/feature_mos/src/mosaic/AlignFeatures.cpp b/jni/feature_mos/src/mosaic/AlignFeatures.cpp
+index aeabf8f9..703a5ea5 100644
+--- a/jni/feature_mos/src/mosaic/AlignFeatures.cpp
++++ b/jni/feature_mos/src/mosaic/AlignFeatures.cpp
+@@ -30,6 +30,8 @@
+ 
+ #define LOG_TAG "AlignFeatures"
+ 
++const double Align::DEFAULT_MAX_DISPARITY = 0.1;
++
+ Align::Align()
+ {
+   width = height = 0;
+diff --git a/jni/feature_mos/src/mosaic/AlignFeatures.h b/jni/feature_mos/src/mosaic/AlignFeatures.h
+index 19f39051..9999f575 100644
+--- a/jni/feature_mos/src/mosaic/AlignFeatures.h
++++ b/jni/feature_mos/src/mosaic/AlignFeatures.h
+@@ -44,7 +44,7 @@ public:
+   ///// Settings for feature-based alignment
+   // Number of features to use from corner detection
+   static const int DEFAULT_NR_CORNERS=750;
+-  static const double DEFAULT_MAX_DISPARITY=0.1;//0.4;
++  static const double DEFAULT_MAX_DISPARITY;//0.4;
+   // Type of homography to model
+   static const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_R_T;
+ // static const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_PROJECTIVE;
+diff --git a/jni/feature_mos/src/mosaic/Blend.cpp b/jni/feature_mos/src/mosaic/Blend.cpp
+index e37755de..b6a843a2 100644
+--- a/jni/feature_mos/src/mosaic/Blend.cpp
++++ b/jni/feature_mos/src/mosaic/Blend.cpp
+@@ -26,8 +26,8 @@
+ #include "Geometry.h"
+ #include "trsMatrix.h"
+ 
+-#include "Log.h"
+-#define LOG_TAG "BLEND"
++const float Blend::LIMIT_SIZE_MULTIPLIER = 50.f * 2.0f;
++const float Blend::LIMIT_HEIGHT_MULTIPLIER = 2.5f;
+ 
+ Blend::Blend()
+ {
+@@ -67,7 +67,6 @@ int Blend::initialize(int blendingType, int stripType, int frame_width, int fram
+ 
+     if (!m_pFrameYPyr || !m_pFrameUPyr || !m_pFrameVPyr)
+     {
+-        LOGE("Error: Could not allocate pyramids for blending");
+         return BLEND_RET_ERROR_MEMORY;
+     }
+ 
+@@ -122,7 +121,6 @@ int Blend::runBlend(MosaicFrame **oframes, MosaicFrame **rframes,
+ 
+     if (numCenters == 0)
+     {
+-        LOGE("Error: No frames to blend");
+         return BLEND_RET_ERROR;
+     }
+ 
+@@ -228,9 +226,6 @@ int Blend::runBlend(MosaicFrame **oframes, MosaicFrame **rframes,
+ 
+     if (xRightMost <= xLeftMost || yBottomMost <= yTopMost)
+     {
+-        LOGE("RunBlend: aborting -consistency check failed,"
+-             "(xLeftMost, xRightMost, yTopMost, yBottomMost): (%d, %d, %d, %d)",
+-             xLeftMost, xRightMost, yTopMost, yBottomMost);
+         return BLEND_RET_ERROR;
+     }
+ 
+@@ -241,17 +236,12 @@ int Blend::runBlend(MosaicFrame **oframes, MosaicFrame **rframes,
+     ret = MosaicSizeCheck(LIMIT_SIZE_MULTIPLIER, LIMIT_HEIGHT_MULTIPLIER);
+     if (ret != BLEND_RET_OK)
+     {
+-       LOGE("RunBlend: aborting - mosaic size check failed, "
+-            "(frame_width, frame_height) vs (mosaic_width, mosaic_height): "
+-            "(%d, %d) vs (%d, %d)", width, height, Mwidth, Mheight);
+        return ret;
+     }
+ 
+-    LOGI("Allocate mosaic image for blending - size: %d x %d", Mwidth, Mheight);
+     YUVinfo *imgMos = YUVinfo::allocateImage(Mwidth, Mheight);
+     if (imgMos == NULL)
+     {
+-        LOGE("RunBlend: aborting - couldn't alloc %d x %d mosaic image", Mwidth, Mheight);
+         return BLEND_RET_ERROR_MEMORY;
+     }
+ 
+@@ -362,7 +352,6 @@ int Blend::FillFramePyramid(MosaicFrame *mb)
+             !PyramidShort::BorderReduce(m_pFrameUPyr, m_wb.nlevsC) || !PyramidShort::BorderExpand(m_pFrameUPyr, m_wb.nlevsC, -1) ||
+             !PyramidShort::BorderReduce(m_pFrameVPyr, m_wb.nlevsC) || !PyramidShort::BorderExpand(m_pFrameVPyr, m_wb.nlevsC, -1))
+     {
+-        LOGE("Error: Could not generate Laplacian pyramids");
+         return BLEND_RET_ERROR;
+     }
+     else
+@@ -384,7 +373,6 @@ int Blend::DoMergeAndBlend(MosaicFrame **frames, int nsite,
+     m_pMosaicVPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevsC,(unsigned short)rect.Width(),(unsigned short)rect.Height(),BORDER);
+     if (!m_pMosaicYPyr || !m_pMosaicUPyr || !m_pMosaicVPyr)
+     {
+-      LOGE("Error: Could not allocate pyramids for blending");
+       return BLEND_RET_ERROR_MEMORY;
+     }
+ 
+@@ -579,6 +567,11 @@ int Blend::DoMergeAndBlend(MosaicFrame **frames, int nsite,
+     // Blend
+     PerformFinalBlending(imgMos, cropping_rect);
+ 
++    if (cropping_rect.Width() <= 0 || cropping_rect.Height() <= 0)
++    {
++        return BLEND_RET_ERROR;
++    }
++
+     if (m_pMosaicVPyr) free(m_pMosaicVPyr);
+     if (m_pMosaicUPyr) free(m_pMosaicUPyr);
+     if (m_pMosaicYPyr) free(m_pMosaicYPyr);
+@@ -632,7 +625,6 @@ int Blend::PerformFinalBlending(YUVinfo &imgMos, MosaicRect &cropping_rect)
+     if (!PyramidShort::BorderExpand(m_pMosaicYPyr, m_wb.nlevs, 1) || !PyramidShort::BorderExpand(m_pMosaicUPyr, m_wb.nlevsC, 1) ||
+         !PyramidShort::BorderExpand(m_pMosaicVPyr, m_wb.nlevsC, 1))
+     {
+-      LOGE("Error: Could not BorderExpand!");
+       return BLEND_RET_ERROR;
+     }
+ 
+@@ -785,18 +777,31 @@ int Blend::PerformFinalBlending(YUVinfo &imgMos, MosaicRect &cropping_rect)
+                 break;
+             }
+         }
++
+     }
+ 
++    RoundingCroppingSizeToMultipleOf8(cropping_rect);
++
+     for(int j=0; j<imgMos.Y.height; j++)
+     {
+         delete b[j];
+     }
+ 
+-    delete b;
++    delete[] b;
+ 
+     return BLEND_RET_OK;
+ }
+ 
++void Blend::RoundingCroppingSizeToMultipleOf8(MosaicRect &rect) {
++    int height = rect.bottom - rect.top + 1;
++    int residue = height & 7;
++    rect.bottom -= residue;
++
++    int width = rect.right - rect.left + 1;
++    residue = width & 7;
++    rect.right -= residue;
++}
++
+ void Blend::ComputeMask(CSite *csite, BlendRect &vcrect, BlendRect &brect, MosaicRect &rect, YUVinfo &imgMos, int site_idx)
+ {
+     PyramidShort *dptr = m_pMosaicYPyr;
+diff --git a/jni/feature_mos/src/mosaic/Blend.h b/jni/feature_mos/src/mosaic/Blend.h
+index 6371fdeb..175eacd4 100644
+--- a/jni/feature_mos/src/mosaic/Blend.h
++++ b/jni/feature_mos/src/mosaic/Blend.h
+@@ -119,9 +119,10 @@ protected:
+   void CropFinalMosaic(YUVinfo &imgMos, MosaicRect &cropping_rect);
+ 
+ private:
+-   static const float LIMIT_SIZE_MULTIPLIER = 5.0f * 2.0f;
+-   static const float LIMIT_HEIGHT_MULTIPLIER = 2.5f;
++   static const float LIMIT_SIZE_MULTIPLIER;
++   static const float LIMIT_HEIGHT_MULTIPLIER;
+    int MosaicSizeCheck(float sizeMultiplier, float heightMultiplier);
++   void RoundingCroppingSizeToMultipleOf8(MosaicRect& rect);
+ };
+ 
+ #endif
+diff --git a/jni/feature_mos/src/mosaic/Delaunay.cpp b/jni/feature_mos/src/mosaic/Delaunay.cpp
+index 82f5d203..0ce09fc5 100644
+--- a/jni/feature_mos/src/mosaic/Delaunay.cpp
++++ b/jni/feature_mos/src/mosaic/Delaunay.cpp
+@@ -24,7 +24,7 @@
+ 
+ #define QQ 9   // Optimal value as determined by testing
+ #define DM 38  // 2^(1+DM/2) element sort capability. DM=38 for >10^6 elements
+-#define NYL (-1)
++#define NYL -1
+ #define valid(l) ccw(orig(basel), dest(l), dest(basel))
+ 
+ 
+diff --git a/jni/feature_mos/src/mosaic/ImageUtils.cpp b/jni/feature_mos/src/mosaic/ImageUtils.cpp
+index 6d0aac0c..daa86060 100644
+--- a/jni/feature_mos/src/mosaic/ImageUtils.cpp
++++ b/jni/feature_mos/src/mosaic/ImageUtils.cpp
+@@ -283,7 +283,7 @@ ImageType ImageUtils::readBinaryPPM(const char *filename, int &width, int &heigh
+ 
+   FILE *imgin = NULL;
+   int mval=0, format=0, eret;
+-  ImageType ret = IMAGE_TYPE_NOIMAGE;
++  ImageType ret = NULL;//IMAGE_TYPE_NOIMAGE;
+ 
+   imgin = fopen(filename, "r");
+   if (imgin == NULL) {
+diff --git a/jni/feature_mos/src/mosaic/ImageUtils.h b/jni/feature_mos/src/mosaic/ImageUtils.h
+index 87782383..92965ca8 100644
+--- a/jni/feature_mos/src/mosaic/ImageUtils.h
++++ b/jni/feature_mos/src/mosaic/ImageUtils.h
+@@ -47,7 +47,7 @@ public:
+   /**
+    *  Definition of an empty image.
+    */
+-  static const int IMAGE_TYPE_NOIMAGE = NULL;
++  static const int IMAGE_TYPE_NOIMAGE = 0;
+ 
+   /**
+    *  Convert image from BGR (interlaced) to YVU (non-interlaced)
+diff --git a/jni/feature_mos/src/mosaic/Log.h b/jni/feature_mos/src/mosaic/Log.h
+index cf6f14b1..2adfeda9 100644
+--- a/jni/feature_mos/src/mosaic/Log.h
++++ b/jni/feature_mos/src/mosaic/Log.h
+@@ -14,7 +14,7 @@
+  * limitations under the License.
+  */
+ #ifndef LOG_H_
+-#define LOG_H_
++#define LOG_H_
+ 
+ #include <android/log.h>
+ #define LOGV(...) __android_log_print(ANDROID_LOG_SILENT, LOG_TAG, __VA_ARGS__)
+diff --git a/jni/feature_mos/src/mosaic/Mosaic.cpp b/jni/feature_mos/src/mosaic/Mosaic.cpp
+index f17c030b..4abc6f68 100644
+--- a/jni/feature_mos/src/mosaic/Mosaic.cpp
++++ b/jni/feature_mos/src/mosaic/Mosaic.cpp
+@@ -26,9 +26,6 @@
+ #include "Mosaic.h"
+ #include "trsMatrix.h"
+ 
+-#include "Log.h"
+-#define LOG_TAG "MOSAIC"
+-
+ Mosaic::Mosaic()
+ {
+     initialized = false;
+@@ -47,6 +44,10 @@ Mosaic::~Mosaic()
+     delete frames;
+     delete rframes;
+ 
++    for (int j = 0; j < owned_size; j++)
++        delete owned_frames[j];
++    delete owned_frames;
++
+     if (aligner != NULL)
+         delete aligner;
+     if (blender != NULL)
+@@ -88,13 +89,10 @@ int Mosaic::initialize(int blendingType, int stripType, int width, int height, i
+         {
+             frames[i] = NULL;
+         }
+-
+-
+     }
+ 
+-    LOGV("Initialize %d %d", width, height);
+-    LOGV("Frame width %d,%d", width, height);
+-    LOGV("Max num frames %d", max_frames);
++    owned_frames = new ImageType[max_frames];
++    owned_size = 0;
+ 
+     aligner = new Align();
+     aligner->initialize(width, height,quarter_res,thresh_still);
+@@ -107,7 +105,6 @@ int Mosaic::initialize(int blendingType, int stripType, int width, int height, i
+         blender->initialize(blendingType, stripType, width, height);
+     } else {
+         blender = NULL;
+-        LOGE("Error: Unknown blending type %d",blendingType);
+         return MOSAIC_RET_ERROR;
+     }
+ 
+@@ -123,7 +120,15 @@ int Mosaic::addFrameRGB(ImageType imageRGB)
+     imageYVU = ImageUtils::allocateImage(this->width, this->height, ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
+     ImageUtils::rgb2yvu(imageYVU, imageRGB, width, height);
+ 
+-    return addFrame(imageYVU);
++    int existing_frames_size = frames_size;
++    int ret = addFrame(imageYVU);
++
++    if (frames_size > existing_frames_size)
++        owned_frames[owned_size++] = imageYVU;
++    else
++        ImageUtils::freeImage(imageYVU);
++
++    return ret;
+ }
+ 
+ int Mosaic::addFrame(ImageType imageYVU)
+@@ -146,8 +151,6 @@ int Mosaic::addFrame(ImageType imageYVU)
+ 
+         if (frames_size >= max_frames)
+         {
+-            LOGV("WARNING: More frames than preallocated, ignoring."
+-                 "Increase maximum number of frames (-f <max_frames>) to avoid this");
+             return MOSAIC_RET_ERROR;
+         }
+ 
+diff --git a/jni/feature_mos/src/mosaic/Mosaic.h b/jni/feature_mos/src/mosaic/Mosaic.h
+index fc6ecd90..9dea6642 100644
+--- a/jni/feature_mos/src/mosaic/Mosaic.h
++++ b/jni/feature_mos/src/mosaic/Mosaic.h
+@@ -181,6 +181,12 @@ protected:
+   int frames_size;
+   int max_frames;
+ 
++  /**
++    * Implicitly created frames, should be freed by Mosaic.
++    */
++  ImageType *owned_frames;
++  int owned_size;
++
+   /**
+    * Initialization state.
+    */
+diff --git a/jni/feature_mos/src/mosaic/Pyramid.cpp b/jni/feature_mos/src/mosaic/Pyramid.cpp
+index 945eafba..b022d73d 100644
+--- a/jni/feature_mos/src/mosaic/Pyramid.cpp
++++ b/jni/feature_mos/src/mosaic/Pyramid.cpp
+@@ -154,24 +154,30 @@ void PyramidShort::BorderExpandOdd(PyramidShort *in, PyramidShort *out, PyramidS
+     // Vertical Filter
+     for (j = -off; j < in->height + off; j++) {
+         int j2 = j * 2;
+-        for (i = -scr->border; i < scr->width + scr->border; i++) {
++        int limit = scr->width + scr->border;
++        for (i = -scr->border; i < limit; i++) {
++            int t1 = in->ptr[j][i];
++            int t2 = in->ptr[j+1][i];
+             scr->ptr[j2][i] = (short)
+-                ((6 * in->ptr[j][i] + (in->ptr[j-1][i] + in->ptr[j+1][i]) + 4) >> 3);
+-            scr->ptr[j2+1][i] = (short)((in->ptr[j][i] + in->ptr[j+1][i] + 1) >> 1);
++                ((6 * t1 + (in->ptr[j-1][i] + t2) + 4) >> 3);
++            scr->ptr[j2+1][i] = (short)((t1 + t2 + 1) >> 1);
+         }
+     }
+ 
+     BorderSpread(scr, 0, 0, 3, 3);
+ 
+     // Horizontal Filter
+-    for (i = -off; i < scr->width + off; i++) {
+-        int i2 = i * 2;
+-        for (j = -out->border; j < out->height + out->border; j++) {
++    int limit = out->height + out->border;
++    for (j = -out->border; j < limit; j++) {
++        for (i = -off; i < scr->width + off; i++) {
++            int i2 = i * 2;
++            int t1 = scr->ptr[j][i];
++            int t2 = scr->ptr[j][i+1];
+             out->ptr[j][i2] = (short) (out->ptr[j][i2] +
+-                    (mode * ((6 * scr->ptr[j][i] +
+-                              scr->ptr[j][i-1] + scr->ptr[j][i+1] + 4) >> 3)));
++                    (mode * ((6 * t1 +
++                              scr->ptr[j][i-1] + t2 + 4) >> 3)));
+             out->ptr[j][i2+1] = (short) (out->ptr[j][i2+1] +
+-                    (mode * ((scr->ptr[j][i] + scr->ptr[j][i+1] + 1) >> 1)));
++                    (mode * ((t1 + t2 + 1) >> 1)));
+         }
+     }
+ 
+diff --git a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
+index 9a07e496..a956f23b 100755
+--- a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
++++ b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
+@@ -55,6 +55,8 @@ bool FrameBuffer::Init(int width, int height, GLenum format) {
+                            GL_TEXTURE_2D,
+                            mTextureName,
+                            0);
++    checkFramebufferStatus("FrameBuffer.cpp");
++    checkGlError("framebuffertexture2d");
+ 
+     if (!checkGlError("texture setup")) {
+         return false;
+@@ -94,6 +96,3 @@ int FrameBuffer::GetWidth() const {
+ int FrameBuffer::GetHeight() const {
+     return mHeight;
+ }
+-
+-
+-
+diff --git a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h
+index b6a20ad1..314b1262 100755
+--- a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h
++++ b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h
+@@ -4,7 +4,10 @@
+ #include <GLES2/gl2.h>
+ #include <GLES2/gl2ext.h>
+ 
+-extern bool checkGlError(const char* op);
++#define checkGlError(op)  checkGLErrorDetail(__FILE__, __LINE__, (op))
++
++extern bool checkGLErrorDetail(const char* file, int line, const char* op);
++extern void checkFramebufferStatus(const char* name);
+ 
+ class FrameBuffer {
+   public:
+diff --git a/jni/feature_mos/src/mosaic_renderer/Renderer.cpp b/jni/feature_mos/src/mosaic_renderer/Renderer.cpp
+index c5c143f9..b9938eb6 100755
+--- a/jni/feature_mos/src/mosaic_renderer/Renderer.cpp
++++ b/jni/feature_mos/src/mosaic_renderer/Renderer.cpp
+@@ -87,7 +87,7 @@ GLuint Renderer::createProgram(const char* pVertexSource, const char* pFragmentS
+         GLint linkStatus = GL_FALSE;
+         glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+ 
+-        LOGI("Program Linked!");
++        LOGI("Program Linked (%d)!", program);
+ 
+         if (linkStatus != GL_TRUE)
+         {
+diff --git a/jni/feature_stab/db_vlvm/db_utilities_constants.h b/jni/feature_stab/db_vlvm/db_utilities_constants.h
+index 612fc783..07565efd 100644
+--- a/jni/feature_stab/db_vlvm/db_utilities_constants.h
++++ b/jni/feature_stab/db_vlvm/db_utilities_constants.h
+@@ -64,7 +64,7 @@
+ #define DB_DEFAULT_ABS_CORNER_THRESHOLD 50000000.0
+ #define DB_DEFAULT_REL_CORNER_THRESHOLD 0.00005
+ #define DB_DEFAULT_MAX_DISPARITY 0.1
+-#define DB_DEFAULT_NO_DISPARITY (-1.0)
++#define DB_DEFAULT_NO_DISPARITY -1.0
+ #define DB_DEFAULT_MAX_TRACK_LENGTH 300
+ 
+ #define DB_DEFAULT_MAX_NR_CAMERAS 1000
+diff --git a/jni/feature_stab/src/dbreg/dbreg.cpp b/jni/feature_stab/src/dbreg/dbreg.cpp
+index dc7d58fe..da06aa2a 100644
+--- a/jni/feature_stab/src/dbreg/dbreg.cpp
++++ b/jni/feature_stab/src/dbreg/dbreg.cpp
+@@ -485,7 +485,8 @@ int db_FrameToReferenceRegistration::AddFrame(const unsigned char * const * im,
+   if(m_do_motion_smoothing)
+     SmoothMotion();
+ 
+-   db_PrintDoubleMatrix(m_H_ref_to_ins,3,3);
++   // Disable debug printing
++   // db_PrintDoubleMatrix(m_H_ref_to_ins,3,3);
+ 
+   db_Copy9(H, m_H_ref_to_ins);
+ 
+diff --git a/jni/feature_stab/src/dbreg/dbstabsmooth.cpp b/jni/feature_stab/src/dbreg/dbstabsmooth.cpp
+index dffff8ab..2bb5d2e5 100644
+--- a/jni/feature_stab/src/dbreg/dbstabsmooth.cpp
++++ b/jni/feature_stab/src/dbreg/dbstabsmooth.cpp
+@@ -136,7 +136,7 @@ bool db_StabilizationSmoother::smoothMotionAdaptive(/*VP_BIMG *bimg,*/int hsize,
+             smoothFactor = minSmoothFactor;
+ 
+         // Find the amount of motion that must be compensated so that no "border" pixels are seen in the stable video
+-        for (smoothFactor = smoothFactor; smoothFactor >= minSmoothFactor; smoothFactor -= 0.01) {
++        for (; smoothFactor >= minSmoothFactor; smoothFactor -= 0.01) {
+             // Compute the smoothed motion
+             if(!smoothMotion(inmot, &tmpMotion, smoothFactor))
+                 break;
+diff --git a/jni/feature_stab/src/dbreg/vp_motionmodel.h b/jni/feature_stab/src/dbreg/vp_motionmodel.h
+index 71a7f7e7..a63ac001 100644
+--- a/jni/feature_stab/src/dbreg/vp_motionmodel.h
++++ b/jni/feature_stab/src/dbreg/vp_motionmodel.h
+@@ -120,7 +120,7 @@ enum VP_MOTION_MODEL {
+   VP_VFE_AFFINE=120
+ };
+ 
+-#define VP_REFID (-1)   /* Default ID used for reference frame */
++#define VP_REFID -1   /* Default ID used for reference frame */
+ 
+ typedef struct {
+   VP_TRS par;            /* Contains the motion paramerers.
+@@ -205,16 +205,16 @@ typedef struct {
+ /* Warp a 2d point (assuming the z component is zero) */
+ #define VP_WARP_POINT_2D(inx,iny,m,outx,outy) do {\
+   VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWW(m); \
+-  (outx) = (MXX(m)*((VP_PAR)(inx))+MXY(m)*((VP_PAR)(iny))+MXW(m))/vpTmpWarpPnt___; \
+-  (outy) = (MYX(m)*((VP_PAR)(inx))+MYY(m)*((VP_PAR)(iny))+MYW(m))/vpTmpWarpPnt___; } while (0)
++  outx = (MXX(m)*((VP_PAR)inx)+MXY(m)*((VP_PAR)iny)+MXW(m))/vpTmpWarpPnt___; \
++  outy = (MYX(m)*((VP_PAR)inx)+MYY(m)*((VP_PAR)iny)+MYW(m))/vpTmpWarpPnt___; } while (0)
+ 
+ /* Warp a 3d point */
+ #define VP_WARP_POINT_3D(inx,iny,inz,m,outx,outy,outz) do {\
+-  VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWZ(m)*((VP_PAR)(inz))+MWW(m); \
+-  (outx) = (MXX(m)*((VP_PAR)(inx))+MXY(m)*((VP_PAR)(iny))+MXZ(m)*((VP_PAR)(inz))+MXW(m))/vpTmpWarpPnt___; \
+-  (outy) = (MYX(m)*((VP_PAR)(inx))+MYY(m)*((VP_PAR)(iny))+MYZ(m)*((VP_PAR)(inz))+MYW(m))/vpTmpWarpPnt___; \
+-  (outz) = MZX(m)*((VP_PAR)(inx))+MZY(m)*((VP_PAR)(iny))+MZZ(m)*((VP_PAR)(inz))+MZW(m); \
+-  if ((m).type==VP_MOTION_PROJ_3D) (outz)/=vpTmpWarpPnt___; } while (0)
++  VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWZ(m)*((VP_PAR)inz)+MWW(m); \
++  outx = (MXX(m)*((VP_PAR)inx)+MXY(m)*((VP_PAR)iny)+MXZ(m)*((VP_PAR)inz)+MXW(m))/vpTmpWarpPnt___; \
++  outy = (MYX(m)*((VP_PAR)inx)+MYY(m)*((VP_PAR)iny)+MYZ(m)*((VP_PAR)inz)+MYW(m))/vpTmpWarpPnt___; \
++  outz = MZX(m)*((VP_PAR)inx)+MZY(m)*((VP_PAR)iny)+MZZ(m)*((VP_PAR)inz)+MZW(m); \
++  if ((m).type==VP_MOTION_PROJ_3D) outz/=vpTmpWarpPnt___; } while (0)
+ 
+ /* Projections of each component */
+ #define VP_PROJW_3D(m,x,y,z,f)   ( MWX(m)*(x)+MWY(m)*(y)+MWZ(m)*(z)+MWW(m) )
+@@ -224,15 +224,15 @@ typedef struct {
+ 
+ /* Scale Down a matrix by Sfactor */
+ #define VP_SCALEDOWN(m,Sfactor) do { \
+-  MXW(m) /= (VP_PAR)(Sfactor); MWX(m) *= (VP_PAR)(Sfactor); \
+-  MYW(m) /= (VP_PAR)(Sfactor); MWY(m) *= (VP_PAR)(Sfactor); \
+-  MZW(m) /= (VP_PAR)(Sfactor); MWZ(m) *= (VP_PAR)(Sfactor); } while (0)
++  MXW(m) /= (VP_PAR)Sfactor; MWX(m) *= (VP_PAR)Sfactor; \
++  MYW(m) /= (VP_PAR)Sfactor; MWY(m) *= (VP_PAR)Sfactor; \
++  MZW(m) /= (VP_PAR)Sfactor; MWZ(m) *= (VP_PAR)Sfactor; } while (0)
+ 
+ /* Scale Up a matrix by Sfactor */
+ #define VP_SCALEUP(m,Sfactor) do { \
+-  MXW(m) *= (VP_PAR)(Sfactor); MWX(m) /= (VP_PAR)(Sfactor); \
+-  MYW(m) *= (VP_PAR)(Sfactor); MWY(m) /= (VP_PAR)(Sfactor); \
+-  MZW(m) *= (VP_PAR)(Sfactor); MWZ(m) /= (VP_PAR)(Sfactor); } while (0)
++  MXW(m) *= (VP_PAR)Sfactor; MWX(m) /= (VP_PAR)Sfactor; \
++  MYW(m) *= (VP_PAR)Sfactor; MWY(m) /= (VP_PAR)Sfactor; \
++  MZW(m) *= (VP_PAR)Sfactor; MWZ(m) /= (VP_PAR)Sfactor; } while (0)
+ 
+ /* Normalize the transformation matrix so that MWW is 1 */
+ #define VP_NORMALIZE(m) if (MWW(m)!=(VP_PAR)0.0) do { \
+@@ -253,15 +253,15 @@ typedef struct {
+ 
+ /* w' projection given a point x,y,0,f */
+ #define VP_PROJZ(m,x,y,f) ( \
+-    MWX(m)*((VP_PAR)(x))+MWY(m)*((VP_PAR)(y))+MWW(m)*((VP_PAR)(f)))
++    MWX(m)*((VP_PAR)x)+MWY(m)*((VP_PAR)y)+MWW(m)*((VP_PAR)f))
+ 
+ /* X Projection given a point x,y,0,f and w' */
+ #define VP_PROJX(m,x,y,w,f) (\
+-   (MXX(m)*((VP_PAR)(x))+MXY(m)*((VP_PAR)(y))+MXW(m)*((VP_PAR)(f)))/((VP_PAR)(w)))
++   (MXX(m)*((VP_PAR)x)+MXY(m)*((VP_PAR)y)+MXW(m)*((VP_PAR)f))/((VP_PAR)w))
+ 
+ /* Y Projection given a point x,y,0,f and the w' */
+ #define VP_PROJY(m,x,y,w,f) (\
+-  (MYX(m)*((VP_PAR)(x))+MYY(m)*((VP_PAR)(y))+MYW(m)*((VP_PAR)(f)))/((VP_PAR)(w)))
++  (MYX(m)*((VP_PAR)x)+MYY(m)*((VP_PAR)y)+MYW(m)*((VP_PAR)f))/((VP_PAR)w))
+ 
+ /* Set the reference id for a motion */
+ #define VP_SET_REFID(m,id) do { (m).refid=id; } while (0)
diff --git a/android_bench_suite/panorama_input/test_001.ppm b/android_bench_suite/panorama_input/test_001.ppm
new file mode 100644
index 0000000..e7218bf
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_001.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_002.ppm b/android_bench_suite/panorama_input/test_002.ppm
new file mode 100644
index 0000000..8975073
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_002.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_003.ppm b/android_bench_suite/panorama_input/test_003.ppm
new file mode 100644
index 0000000..58c9e34
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_003.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_004.ppm b/android_bench_suite/panorama_input/test_004.ppm
new file mode 100644
index 0000000..142c76b
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_004.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_005.ppm b/android_bench_suite/panorama_input/test_005.ppm
new file mode 100644
index 0000000..ff229d3
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_005.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_006.ppm b/android_bench_suite/panorama_input/test_006.ppm
new file mode 100644
index 0000000..2fc5c09
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_006.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_007.ppm b/android_bench_suite/panorama_input/test_007.ppm
new file mode 100644
index 0000000..d7f6a9a
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_007.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_008.ppm b/android_bench_suite/panorama_input/test_008.ppm
new file mode 100644
index 0000000..86d92b3
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_008.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_009.ppm b/android_bench_suite/panorama_input/test_009.ppm
new file mode 100644
index 0000000..72dd05f
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_009.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_010.ppm b/android_bench_suite/panorama_input/test_010.ppm
new file mode 100644
index 0000000..a09a054
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_010.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_011.ppm b/android_bench_suite/panorama_input/test_011.ppm
new file mode 100644
index 0000000..be7b61b
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_011.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_012.ppm b/android_bench_suite/panorama_input/test_012.ppm
new file mode 100644
index 0000000..67fad4a
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_012.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_013.ppm b/android_bench_suite/panorama_input/test_013.ppm
new file mode 100644
index 0000000..6d92fd1
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_013.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_014.ppm b/android_bench_suite/panorama_input/test_014.ppm
new file mode 100644
index 0000000..97aff41
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_014.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_015.ppm b/android_bench_suite/panorama_input/test_015.ppm
new file mode 100644
index 0000000..d1de251
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_015.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_016.ppm b/android_bench_suite/panorama_input/test_016.ppm
new file mode 100644
index 0000000..70ea1f5
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_016.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_017.ppm b/android_bench_suite/panorama_input/test_017.ppm
new file mode 100644
index 0000000..e075c9e
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_017.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_018.ppm b/android_bench_suite/panorama_input/test_018.ppm
new file mode 100644
index 0000000..adf023b
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_018.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_019.ppm b/android_bench_suite/panorama_input/test_019.ppm
new file mode 100644
index 0000000..1f27d1d
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_019.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_020.ppm b/android_bench_suite/panorama_input/test_020.ppm
new file mode 100644
index 0000000..fb95f52
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_020.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_021.ppm b/android_bench_suite/panorama_input/test_021.ppm
new file mode 100644
index 0000000..43baadf
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_021.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_022.ppm b/android_bench_suite/panorama_input/test_022.ppm
new file mode 100644
index 0000000..f928c83
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_022.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_023.ppm b/android_bench_suite/panorama_input/test_023.ppm
new file mode 100644
index 0000000..e21b275
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_023.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_024.ppm b/android_bench_suite/panorama_input/test_024.ppm
new file mode 100644
index 0000000..43ba0ba
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_024.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_025.ppm b/android_bench_suite/panorama_input/test_025.ppm
new file mode 100644
index 0000000..b9f8892
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_025.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_026.ppm b/android_bench_suite/panorama_input/test_026.ppm
new file mode 100644
index 0000000..201615f
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_026.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_027.ppm b/android_bench_suite/panorama_input/test_027.ppm
new file mode 100644
index 0000000..07cf426
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_027.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_028.ppm b/android_bench_suite/panorama_input/test_028.ppm
new file mode 100644
index 0000000..aedb023
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_028.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_029.ppm b/android_bench_suite/panorama_input/test_029.ppm
new file mode 100644
index 0000000..9a0d398
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_029.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_030.ppm b/android_bench_suite/panorama_input/test_030.ppm
new file mode 100644
index 0000000..26a8f53
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_030.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_031.ppm b/android_bench_suite/panorama_input/test_031.ppm
new file mode 100644
index 0000000..2300461
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_031.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_032.ppm b/android_bench_suite/panorama_input/test_032.ppm
new file mode 100644
index 0000000..f5e93f8
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_032.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_033.ppm b/android_bench_suite/panorama_input/test_033.ppm
new file mode 100644
index 0000000..c2f8ad9
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_033.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_034.ppm b/android_bench_suite/panorama_input/test_034.ppm
new file mode 100644
index 0000000..de93b23
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_034.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_035.ppm b/android_bench_suite/panorama_input/test_035.ppm
new file mode 100644
index 0000000..62198de
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_035.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_036.ppm b/android_bench_suite/panorama_input/test_036.ppm
new file mode 100644
index 0000000..bf252e4
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_036.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_037.ppm b/android_bench_suite/panorama_input/test_037.ppm
new file mode 100644
index 0000000..7cc7ace
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_037.ppm
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_038.ppm b/android_bench_suite/panorama_input/test_038.ppm
new file mode 100644
index 0000000..d44e1f1
--- /dev/null
+++ b/android_bench_suite/panorama_input/test_038.ppm
Binary files differ
diff --git a/android_bench_suite/parse_result.py b/android_bench_suite/parse_result.py
new file mode 100644
index 0000000..90b3c4d
--- /dev/null
+++ b/android_bench_suite/parse_result.py
@@ -0,0 +1,114 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helper functions to parse result collected from device"""
+
+from __future__ import print_function
+from fix_skia_results import _TransformBenchmarks
+
+import json
+
+def normalize(bench, dict_list):
+  bench_base = {
+      'Panorama': 1,
+      'Dex2oat': 1,
+      'Hwui': 10000,
+      'Skia': 1,
+      'Synthmark': 1,
+      'Binder': 0.001
+  }
+  result_dict = dict_list[0]
+  for key in result_dict:
+    result_dict[key] = result_dict[key] / bench_base[bench]
+  return [result_dict]
+
+
+# Functions to parse benchmark result for data collection.
+def parse_Panorama(bench, fin):
+  result_dict = {}
+  for line in fin:
+    words = line.split()
+    if 'elapsed' in words:
+      #TODO: Need to restructure the embedded word counts.
+      result_dict['total_time_s'] = float(words[3])
+      result_dict['retval'] = 0
+      return normalize(bench, [result_dict])
+  raise ValueError('You passed the right type of thing, '
+                   'but it didn\'t have the expected contents.')
+
+
+def parse_Synthmark(bench, fin):
+  result_dict = {}
+  accum = 0
+  cnt = 0
+  for line in fin:
+    words = line.split()
+    if 'normalized' in words:
+      #TODO: Need to restructure the embedded word counts.
+      accum += float(words[-1])
+      cnt += 1
+  if accum != 0:
+    result_dict['total_voices'] = accum / cnt
+    result_dict['retval'] = 0
+    return normalize(bench, [result_dict])
+  raise ValueError('You passed the right type of thing, '
+                   'but it didn\'t have the expected contents.')
+
+
+def parse_Binder(bench, fin):
+  result_dict = {}
+  accum = 0
+  cnt = 0
+  for line in fin:
+    words = line.split()
+    for word in words:
+      if 'average' in word:
+        #TODO: Need to restructure the embedded word counts.
+        accum += float(word[8:-2])
+        cnt += 1
+  if accum != 0:
+    result_dict['avg_time_ms'] = accum / cnt
+    result_dict['retval'] = 0
+    return normalize(bench, [result_dict])
+  raise ValueError('You passed the right type of thing, '
+                   'but it didn\'t have the expected contents.')
+
+
+def parse_Dex2oat(bench, fin):
+  result_dict = {}
+  cnt = 0
+  for line in fin:
+    words = line.split()
+    if 'elapsed' in words:
+      cnt += 1
+      #TODO: Need to restructure the embedded word counts.
+      if cnt == 1:
+        # First 'elapsed' time is for microbench 'Chrome'
+        result_dict['chrome_s'] = float(words[3])
+      elif cnt == 2:
+        # Second 'elapsed' time is for microbench 'Camera'
+        result_dict['camera_s'] = float(words[3])
+
+        result_dict['retval'] = 0
+        # Two results found, return
+        return normalize(bench, [result_dict])
+  raise ValueError('You passed the right type of thing, '
+                   'but it didn\'t have the expected contents.')
+
+
+def parse_Hwui(bench, fin):
+  result_dict = {}
+  for line in fin:
+    words = line.split()
+    if 'elapsed' in words:
+      #TODO: Need to restructure the embedded word counts.
+      result_dict['total_time_s'] = float(words[3])
+      result_dict['retval'] = 0
+      return normalize(bench, [result_dict])
+  raise ValueError('You passed the right type of thing, '
+                   'but it didn\'t have the expected contents.')
+
+
+def parse_Skia(bench, fin):
+  obj = json.load(fin)
+  return normalize(bench, _TransformBenchmarks(obj))
diff --git a/android_bench_suite/run.py b/android_bench_suite/run.py
new file mode 100755
index 0000000..55acb66
--- /dev/null
+++ b/android_bench_suite/run.py
@@ -0,0 +1,481 @@
+#!/usr/bin/env python2
+#
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# pylint: disable=cros-logging-import
+
+# This is the script to run specified benchmark with different toolchain
+# settings. It includes the process of building benchmark locally and running
+# benchmark on DUT.
+
+"""Main script to run the benchmark suite from building to testing."""
+from __future__ import print_function
+
+import argparse
+import config
+import ConfigParser
+import logging
+import os
+import subprocess
+import sys
+
+logging.basicConfig(level=logging.INFO)
+
+def _parse_arguments(argv):
+  parser = argparse.ArgumentParser(description='Build and run specific '
+                                   'benchamrk')
+  parser.add_argument(
+      '-b',
+      '--bench',
+      action='append',
+      default=[],
+      help='Select which benchmark to run')
+
+  # Only one of compiler directory and llvm prebuilts version can be indicated
+  # at the beginning, so set -c and -l into a exclusive group.
+  group = parser.add_mutually_exclusive_group()
+
+  # The toolchain setting arguments has action of 'append', so that users
+  # could compare performance with several toolchain settings together.
+  group.add_argument(
+      '-c',
+      '--compiler_dir',
+      metavar='DIR',
+      action='append',
+      default=[],
+      help='Specify path to the compiler\'s bin directory. '
+      'You shall give several paths, each with a -c, to '
+      'compare performance differences in '
+      'each compiler.')
+
+  parser.add_argument(
+      '-o',
+      '--build_os',
+      action='append',
+      default=[],
+      help='Specify the host OS to build the benchmark.')
+
+  group.add_argument(
+      '-l',
+      '--llvm_prebuilts_version',
+      action='append',
+      default=[],
+      help='Specify the version of prebuilt LLVM. When '
+      'specific prebuilt version of LLVM already '
+      'exists, no need to pass the path to compiler '
+      'directory.')
+
+  parser.add_argument(
+      '-f',
+      '--cflags',
+      action='append',
+      default=[],
+      help='Specify the cflags options for the toolchain. '
+      'Be sure to quote all the cflags with quotation '
+      'mark("") or use equal(=).')
+  parser.add_argument(
+      '--ldflags',
+      action='append',
+      default=[],
+      help='Specify linker flags for the toolchain.')
+
+  parser.add_argument(
+      '-i',
+      '--iterations',
+      type=int,
+      default=1,
+      help='Specify how many iterations does the test '
+      'take.')
+
+  # Arguments -s and -r are for connecting to DUT.
+  parser.add_argument(
+      '-s',
+      '--serials',
+      help='Comma separate list of device serials under '
+      'test.')
+
+  parser.add_argument(
+      '-r',
+      '--remote',
+      default='localhost',
+      help='hostname[:port] if the ADB device is connected '
+      'to a remote machine. Ensure this workstation '
+      'is configured for passwordless ssh access as '
+      'users "root" or "adb"')
+
+  # Arguments -frequency and -m are for device settings
+  parser.add_argument(
+      '--frequency',
+      type=int,
+      default=960000,
+      help='Specify the CPU frequency of the device. The '
+      'unit is KHZ. The available value is defined in'
+      'cpufreq/scaling_available_frequency file in '
+      'device\'s each core directory. '
+      'The default value is 960000, which shows a '
+      'balance in noise and performance. Lower '
+      'frequency will slow down the performance but '
+      'reduce noise.')
+
+  parser.add_argument(
+      '-m',
+      '--mode',
+      default='little',
+      help='User can specify whether \'little\' or \'big\' '
+      'mode to use. The default one is little mode. '
+      'The little mode runs on a single core of '
+      'Cortex-A53, while big mode runs on single core '
+      'of Cortex-A57.')
+
+  # Configure file for benchmark test
+  parser.add_argument(
+      '-t',
+      '--test',
+      help='Specify the test settings with configuration '
+      'file.')
+
+  # Whether to keep old json result or not
+  parser.add_argument(
+      '-k',
+      '--keep',
+      default='False',
+      help='User can specify whether to keep the old json '
+      'results from last run. This can be useful if you '
+      'want to compare performance differences in two or '
+      'more different runs. Default is False(off).')
+
+  return parser.parse_args(argv)
+
+
+# Clear old log files in bench suite directory
+def clear_logs():
+  logging.info('Removing old logfiles...')
+  for f in ['build_log', 'device_log', 'test_log']:
+    logfile = os.path.join(config.bench_suite_dir, f)
+    try:
+      os.remove(logfile)
+    except OSError:
+      logging.info('No logfile %s need to be removed. Ignored.', f)
+  logging.info('Old logfiles been removed.')
+
+
+# Clear old json files in bench suite directory
+def clear_results():
+  logging.info('Clearing old json results...')
+  for bench in config.bench_list:
+    result = os.path.join(config.bench_suite_dir, bench + '.json')
+    try:
+      os.remove(result)
+    except OSError:
+      logging.info('no %s json file need to be removed. Ignored.', bench)
+  logging.info('Old json results been removed.')
+
+
+# Use subprocess.check_call to run other script, and put logs to files
+def check_call_with_log(cmd, log_file):
+  log_file = os.path.join(config.bench_suite_dir, log_file)
+  with open(log_file, 'a') as logfile:
+    log_header = 'Log for command: %s\n' % (cmd)
+    logfile.write(log_header)
+    try:
+      subprocess.check_call(cmd, stdout=logfile)
+    except subprocess.CalledProcessError:
+      logging.error('Error running %s, please check %s for more info.', cmd,
+                    log_file)
+      raise
+  logging.info('Logs for %s are written to %s.', cmd, log_file)
+
+
+def set_device(serials, remote, frequency):
+  setting_cmd = [
+      os.path.join(
+          os.path.join(config.android_home, config.autotest_dir),
+          'site_utils/set_device.py')
+  ]
+  setting_cmd.append('-r=' + remote)
+  setting_cmd.append('-q=' + str(frequency))
+
+  # Deal with serials.
+  # If there is no serails specified, try to run test on the only device.
+  # If specified, split the serials into a list and run test on each device.
+  if serials:
+    for serial in serials.split(','):
+      setting_cmd.append('-s=' + serial)
+      check_call_with_log(setting_cmd, 'device_log')
+      setting_cmd.pop()
+  else:
+    check_call_with_log(setting_cmd, 'device_log')
+
+  logging.info('CPU mode and frequency set successfully!')
+
+
+def log_ambiguous_args():
+  logging.error('The count of arguments does not match!')
+  raise ValueError('The count of arguments does not match.')
+
+
+# Check if the count of building arguments are log_ambiguous or not.  The
+# number of -c/-l, -f, and -os should be either all 0s or all the same.
+def check_count(compiler, llvm_version, build_os, cflags, ldflags):
+  # Count will be set to 0 if no compiler or llvm_version specified.
+  # Otherwise, one of these two args length should be 0 and count will be
+  # the other one.
+  count = max(len(compiler), len(llvm_version))
+
+  # Check if number of cflags is 0 or the same with before.
+  if len(cflags) != 0:
+    if count != 0 and len(cflags) != count:
+      log_ambiguous_args()
+    count = len(cflags)
+
+  if len(ldflags) != 0:
+    if count != 0 and len(ldflags) != count:
+      log_ambiguous_args()
+    count = len(ldflags)
+
+  if len(build_os) != 0:
+    if count != 0 and len(build_os) != count:
+      log_ambiguous_args()
+    count = len(build_os)
+
+  # If no settings are passed, only run default once.
+  return max(1, count)
+
+
+# Build benchmark binary with toolchain settings
+def build_bench(setting_no, bench, compiler, llvm_version, build_os, cflags,
+                ldflags):
+  # Build benchmark locally
+  build_cmd = ['./build_bench.py', '-b=' + bench]
+  if compiler:
+    build_cmd.append('-c=' + compiler[setting_no])
+  if llvm_version:
+    build_cmd.append('-l=' + llvm_version[setting_no])
+  if build_os:
+    build_cmd.append('-o=' + build_os[setting_no])
+  if cflags:
+    build_cmd.append('-f=' + cflags[setting_no])
+  if ldflags:
+    build_cmd.append('--ldflags=' + ldflags[setting_no])
+
+  logging.info('Building benchmark for toolchain setting No.%d...', setting_no)
+  logging.info('Command: %s', build_cmd)
+
+  try:
+    subprocess.check_call(build_cmd)
+  except:
+    logging.error('Error while building benchmark!')
+    raise
+
+
+def run_and_collect_result(test_cmd, setting_no, i, bench, serial='default'):
+
+  # Run autotest script for benchmark on DUT
+  check_call_with_log(test_cmd, 'test_log')
+
+  logging.info('Benchmark with setting No.%d, iter.%d finished testing on '
+               'device %s.', setting_no, i, serial)
+
+  # Rename results from the bench_result generated in autotest
+  bench_result = os.path.join(config.bench_suite_dir, 'bench_result')
+  if not os.path.exists(bench_result):
+    logging.error('No result found at %s, '
+                  'please check test_log for details.', bench_result)
+    raise OSError('Result file %s not found.' % bench_result)
+
+  new_bench_result = 'bench_result_%s_%s_%d_%d' % (bench, serial, setting_no, i)
+  new_bench_result_path = os.path.join(config.bench_suite_dir, new_bench_result)
+  try:
+    os.rename(bench_result, new_bench_result_path)
+  except OSError:
+    logging.error('Error while renaming raw result %s to %s', bench_result,
+                  new_bench_result_path)
+    raise
+
+  logging.info('Benchmark result saved at %s.', new_bench_result_path)
+
+
+def test_bench(bench, setting_no, iterations, serials, remote, mode):
+  logging.info('Start running benchmark on device...')
+
+  # Run benchmark and tests on DUT
+  for i in xrange(iterations):
+    logging.info('Iteration No.%d:', i)
+    test_cmd = [
+        os.path.join(
+            os.path.join(config.android_home, config.autotest_dir),
+            'site_utils/test_bench.py')
+    ]
+    test_cmd.append('-b=' + bench)
+    test_cmd.append('-r=' + remote)
+    test_cmd.append('-m=' + mode)
+
+    # Deal with serials.
+    # If there is no serails specified, try to run test on the only device.
+    # If specified, split the serials into a list and run test on each device.
+    if serials:
+      for serial in serials.split(','):
+        test_cmd.append('-s=' + serial)
+
+        run_and_collect_result(test_cmd, setting_no, i, bench, serial)
+        test_cmd.pop()
+    else:
+      run_and_collect_result(test_cmd, setting_no, i, bench)
+
+
+def gen_json(bench, setting_no, iterations, serials):
+  bench_result = os.path.join(config.bench_suite_dir, 'bench_result')
+
+  logging.info('Generating JSON file for Crosperf...')
+
+  if not serials:
+    serials = 'default'
+
+  for serial in serials.split(','):
+
+    # Platform will be used as device lunch combo instead
+    #experiment = '_'.join([serial, str(setting_no)])
+    experiment = config.product_combo
+
+    # Input format: bench_result_{bench}_{serial}_{setting_no}_
+    input_file = '_'.join([bench_result, bench, serial, str(setting_no), ''])
+    gen_json_cmd = [
+        './gen_json.py', '--input=' + input_file,
+        '--output=%s.json' % os.path.join(config.bench_suite_dir, bench),
+        '--bench=' + bench, '--platform=' + experiment,
+        '--iterations=' + str(iterations)
+    ]
+
+    logging.info('Command: %s', gen_json_cmd)
+    if subprocess.call(gen_json_cmd):
+      logging.error('Error while generating JSON file, please check raw data'
+                    'of the results at %s.', input_file)
+
+
+def gen_crosperf(infile, outfile):
+  # Set environment variable for crosperf
+  os.environ['PYTHONPATH'] = os.path.dirname(config.toolchain_utils)
+
+  logging.info('Generating Crosperf Report...')
+  crosperf_cmd = [
+      os.path.join(config.toolchain_utils, 'generate_report.py'),
+      '-i=' + infile, '-o=' + outfile, '-f'
+  ]
+
+  # Run crosperf generate_report.py
+  logging.info('Command: %s', crosperf_cmd)
+  subprocess.call(crosperf_cmd)
+
+  logging.info('Report generated successfully!')
+  logging.info('Report Location: ' + outfile + '.html at bench'
+               'suite directory.')
+
+
+def main(argv):
+  # Set environment variable for the local loacation of benchmark suite.
+  # This is for collecting testing results to benchmark suite directory.
+  os.environ['BENCH_SUITE_DIR'] = config.bench_suite_dir
+
+  # Set Android type, used for the difference part between aosp and internal.
+  os.environ['ANDROID_TYPE'] = config.android_type
+
+  # Set ANDROID_HOME for both building and testing.
+  os.environ['ANDROID_HOME'] = config.android_home
+
+  # Set environment variable for architecture, this will be used in
+  # autotest.
+  os.environ['PRODUCT'] = config.product
+
+  arguments = _parse_arguments(argv)
+
+  bench_list = arguments.bench
+  if not bench_list:
+    bench_list = config.bench_list
+
+  compiler = arguments.compiler_dir
+  build_os = arguments.build_os
+  llvm_version = arguments.llvm_prebuilts_version
+  cflags = arguments.cflags
+  ldflags = arguments.ldflags
+  iterations = arguments.iterations
+  serials = arguments.serials
+  remote = arguments.remote
+  frequency = arguments.frequency
+  mode = arguments.mode
+  keep = arguments.keep
+
+  # Clear old logs every time before run script
+  clear_logs()
+
+  if keep == 'False':
+    clear_results()
+
+  # Set test mode and frequency of CPU on the DUT
+  set_device(serials, remote, frequency)
+
+  test = arguments.test
+  # if test configuration file has been given, use the build settings
+  # in the configuration file and run the test.
+  if test:
+    test_config = ConfigParser.ConfigParser(allow_no_value=True)
+    if not test_config.read(test):
+      logging.error('Error while reading from building '
+                    'configuration file %s.', test)
+      raise RuntimeError('Error while reading configuration file %s.' % test)
+
+    for setting_no, section in enumerate(test_config.sections()):
+      bench = test_config.get(section, 'bench')
+      compiler = [test_config.get(section, 'compiler')]
+      build_os = [test_config.get(section, 'build_os')]
+      llvm_version = [test_config.get(section, 'llvm_version')]
+      cflags = [test_config.get(section, 'cflags')]
+      ldflags = [test_config.get(section, 'ldflags')]
+
+      # Set iterations from test_config file, if not exist, use the one from
+      # command line.
+      it = test_config.get(section, 'iterations')
+      if not it:
+        it = iterations
+      it = int(it)
+
+      # Build benchmark for each single test configuration
+      build_bench(0, bench, compiler, llvm_version, build_os, cflags, ldflags)
+
+      test_bench(bench, setting_no, it, serials, remote, mode)
+
+      gen_json(bench, setting_no, it, serials)
+
+    for bench in config.bench_list:
+      infile = os.path.join(config.bench_suite_dir, bench + '.json')
+      if os.path.exists(infile):
+        outfile = os.path.join(config.bench_suite_dir, bench + '_report')
+        gen_crosperf(infile, outfile)
+
+    # Stop script if there is only config file provided
+    return 0
+
+  # If no configuration file specified, continue running.
+  # Check if the count of the setting arguments are log_ambiguous.
+  setting_count = check_count(compiler, llvm_version, build_os, cflags, ldflags)
+
+  for bench in bench_list:
+    logging.info('Start building and running benchmark: [%s]', bench)
+    # Run script for each toolchain settings
+    for setting_no in xrange(setting_count):
+      build_bench(setting_no, bench, compiler, llvm_version, build_os, cflags,
+                  ldflags)
+
+      # Run autotest script for benchmark test on device
+      test_bench(bench, setting_no, iterations, serials, remote, mode)
+
+      gen_json(bench, setting_no, iterations, serials)
+
+    infile = os.path.join(config.bench_suite_dir, bench + '.json')
+    outfile = os.path.join(config.bench_suite_dir, bench + '_report')
+    gen_crosperf(infile, outfile)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/android_bench_suite/set_flags.py b/android_bench_suite/set_flags.py
new file mode 100644
index 0000000..a243c7c
--- /dev/null
+++ b/android_bench_suite/set_flags.py
@@ -0,0 +1,128 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helper functions to put user defined flags to mk/bp files"""
+
+from __future__ import print_function
+
+import config
+import os
+import subprocess
+
+
+# Find the makefile/blueprint based on the benchmark, and make a copy of
+# it for restoring later.
+def backup_file(bench, file_type):
+  mk_file = os.path.join(config.android_home, config.bench_dict[bench],
+                         'Android.' + file_type)
+  try:
+    # Make a copy of the makefile/blueprint so that we can recover it after
+    # building the benchmark
+    subprocess.check_call([
+        'cp', mk_file,
+        os.path.join(config.android_home, config.bench_dict[bench],
+                     'tmp_makefile')
+    ])
+  except subprocess.CalledProcessError():
+    raise OSError('Cannot backup Android.%s file for %s' % (file_type, bench))
+
+
+# Insert lines to add LOCAL_CFLAGS/LOCAL_LDFLAGS to the benchmarks
+# makefile/blueprint
+def replace_flags(bench, android_type, file_type, cflags, ldflags):
+  # Use format ["Flag1", "Flag2"] for bp file
+  if file_type == 'bp':
+    if cflags:
+      cflags = '\", \"'.join(cflags.split())
+    if ldflags:
+      ldflags = '\", \"'.join(ldflags.split())
+
+  if not cflags:
+    cflags = ''
+  else:
+    cflags = '\"' + cflags + '\",'
+  if not ldflags:
+    ldflags = ''
+  else:
+    ldflags = '\"' + ldflags + '\",'
+
+  # Two different diffs are used for aosp or internal android repo.
+  if android_type == 'aosp':
+    bench_diff = bench + '_flags_aosp.diff'
+  else:
+    bench_diff = bench + '_flags_internal.diff'
+
+  # Replace CFLAGS_FOR_BENCH_SUITE marker with proper cflags
+  output = ''
+  with open(bench_diff) as f:
+    for line in f:
+      line = line.replace('CFLAGS_FOR_BENCH_SUITE', cflags)
+      line = line.replace('LDFLAGS_FOR_BENCH_SUITE', ldflags)
+      output += line
+
+  with open('modified.diff', 'w') as f:
+    f.write(output)
+
+
+def apply_patches(bench):
+  bench_dir = os.path.join(config.android_home, config.bench_dict[bench])
+  bench_diff = 'modified.diff'
+  flags_patch = os.path.join(
+      os.path.dirname(os.path.realpath(__file__)), bench_diff)
+  try:
+    subprocess.check_call(['git', '-C', bench_dir, 'apply', flags_patch])
+  except subprocess.CalledProcessError:
+    raise OSError('Patch for adding flags for %s does not succeed.' % (bench))
+
+
+def replace_flags_in_dir(bench, cflags, ldflags):
+  bench_mk = os.path.join(config.android_home, config.bench_dict[bench],
+                          'Android.mk')
+
+  if not cflags:
+    cflags = ''
+  if not ldflags:
+    ldflags = ''
+
+  output = ''
+  with open(bench_mk) as f:
+    for line in f:
+      line = line.replace('$(CFLAGS_FOR_BENCH_SUITE)', cflags)
+      line = line.replace('$(LDFLAGS_FOR_BENCH_SUITE)', ldflags)
+      output += line
+  with open(bench_mk, 'w') as f:
+    f.write(output)
+
+
+def add_flags_Panorama(cflags, ldflags):
+  backup_file('Panorama', 'mk')
+  replace_flags_in_dir('Panorama', cflags, ldflags)
+
+
+def add_flags_Synthmark(cflags, ldflags):
+  backup_file('Synthmark', 'mk')
+  replace_flags_in_dir('Synthmark', cflags, ldflags)
+
+
+def add_flags_Skia(cflags, ldflags):
+  backup_file('Skia', 'bp')
+  replace_flags('Skia', config.android_type, 'bp', cflags, ldflags)
+  apply_patches('Skia')
+
+
+def add_flags_Binder(cflags, ldflags):
+  backup_file('Binder', 'bp')
+  replace_flags('Binder', config.android_type, 'bp', cflags, ldflags)
+  apply_patches('Binder')
+
+
+def add_flags_Hwui(cflags, ldflags):
+  backup_file('Hwui', 'bp')
+  replace_flags('Hwui', config.android_type, 'bp', cflags, ldflags)
+  apply_patches('Hwui')
+
+
+def add_flags_Dex2oat(cflags, ldflags):
+  backup_file('Dex2oat', 'bp')
+  replace_flags('Dex2oat', config.android_type, 'bp', cflags, ldflags)
+  apply_patches('Dex2oat')
diff --git a/android_bench_suite/skia_aosp.diff b/android_bench_suite/skia_aosp.diff
new file mode 100644
index 0000000..269e02a
--- /dev/null
+++ b/android_bench_suite/skia_aosp.diff
@@ -0,0 +1,62 @@
+diff --git a/bench/ResultsWriter.h b/bench/ResultsWriter.h
+index f56deae..69a84c7 100644
+--- a/bench/ResultsWriter.h
++++ b/bench/ResultsWriter.h
+@@ -46,6 +46,9 @@ public:
+     // Record a single test metric.
+     virtual void metric(const char name[], double ms) {}
+ 
++    // Record a list of test metrics.
++    virtual void metrics(const char name[], const SkTArray<double> &array) {}
++
+     // Flush to storage now please.
+     virtual void flush() {}
+ };
+@@ -113,6 +116,17 @@ public:
+         SkASSERT(fConfig);
+         (*fConfig)[name] = ms;
+     }
++    void metrics(const char name[], const SkTArray<double> &array) override {
++        // The user who wrote this feature prefers NaNs over not having results.
++        // Hence, this ignores whether we have NaNs.
++        SkASSERT(fConfig);
++        Json::Value value = Json::Value(Json::arrayValue);
++        value.resize(array.count());
++        for (unsigned i = 0, e = array.count(); i != e; ++i) {
++          value[i] = array[i];
++        }
++        (*fConfig)[name] = value;
++    }
+ 
+     // Flush to storage now please.
+     void flush() override {
+diff --git a/bench/nanobench.cpp b/bench/nanobench.cpp
+index ae415fa..22011cd 100644
+--- a/bench/nanobench.cpp
++++ b/bench/nanobench.cpp
+@@ -42,6 +42,7 @@
+ #include "SkSurface.h"
+ #include "SkTaskGroup.h"
+ #include "SkThreadUtils.h"
++#include "SkTypes.h"
+ #include "ThermalManager.h"
+ 
+ #include <stdlib.h>
+@@ -1173,7 +1174,7 @@ int nanobench_main() {
+             target->setup();
+             bench->perCanvasPreDraw(canvas);
+ 
+-            int maxFrameLag;
++            int maxFrameLag = 0;
+             int loops = target->needsFrameTiming(&maxFrameLag)
+                 ? setup_gpu_bench(target, bench.get(), maxFrameLag)
+                 : setup_cpu_bench(overhead, target, bench.get());
+@@ -1197,6 +1198,7 @@ int nanobench_main() {
+             benchStream.fillCurrentOptions(log.get());
+             target->fillOptions(log.get());
+             log->metric("min_ms",    stats.min);
+             log->metric("median_ms", stats.median);
++            log->metrics("samples",    samples);
+ #if SK_SUPPORT_GPU
+             if (gpuStatsDump) {
+                 // dump to json, only SKPBench currently returns valid keys / values
diff --git a/android_bench_suite/skia_internal.diff b/android_bench_suite/skia_internal.diff
new file mode 100644
index 0000000..f6b1961
--- /dev/null
+++ b/android_bench_suite/skia_internal.diff
@@ -0,0 +1,61 @@
+diff --git a/bench/ResultsWriter.h b/bench/ResultsWriter.h
+index f56deae..69a84c7 100644
+--- a/bench/ResultsWriter.h
++++ b/bench/ResultsWriter.h
+@@ -46,6 +46,9 @@ public:
+     // Record a single test metric.
+     virtual void metric(const char name[], double ms) {}
+ 
++    // Record a list of test metrics.
++    virtual void metrics(const char name[], const SkTArray<double> &array) {}
++
+     // Flush to storage now please.
+     virtual void flush() {}
+ };
+@@ -113,6 +116,17 @@ public:
+         SkASSERT(fConfig);
+         (*fConfig)[name] = ms;
+     }
++    void metrics(const char name[], const SkTArray<double> &array) override {
++        // The user who wrote this feature prefers NaNs over not having results.
++        // Hence, this ignores whether we have NaNs.
++        SkASSERT(fConfig);
++        Json::Value value = Json::Value(Json::arrayValue);
++        value.resize(array.count());
++        for (unsigned i = 0, e = array.count(); i != e; ++i) {
++          value[i] = array[i];
++        }
++        (*fConfig)[name] = value;
++    }
+ 
+     // Flush to storage now please.
+     void flush() override {
+diff --git a/bench/nanobench.cpp b/bench/nanobench.cpp
+index 0651302..0623d61 100644
+--- a/bench/nanobench.cpp
++++ b/bench/nanobench.cpp
+@@ -43,6 +43,7 @@
+ #include "SkSVGDOM.h"
+ #include "SkTaskGroup.h"
+ #include "SkThreadUtils.h"
++#include "SkTypes.h"
+ #include "ThermalManager.h"
+ #include "SkScan.h"
+ 
+@@ -1240,7 +1241,7 @@ int nanobench_main() {
+             target->setup();
+             bench->perCanvasPreDraw(canvas);
+ 
+-            int maxFrameLag;
++            int maxFrameLag = 0;
+             int loops = target->needsFrameTiming(&maxFrameLag)
+                 ? setup_gpu_bench(target, bench.get(), maxFrameLag)
+                 : setup_cpu_bench(overhead, target, bench.get());
+@@ -1290,6 +1291,7 @@ int nanobench_main() {
+             benchStream.fillCurrentOptions(log.get());
+             target->fillOptions(log.get());
+             log->metric("min_ms",    stats.min);
++            log->metrics("samples",  samples);
+ #if SK_SUPPORT_GPU
+             if (gpuStatsDump) {
+                 // dump to json, only SKPBench currently returns valid keys / values
diff --git a/android_bench_suite/test_config b/android_bench_suite/test_config
new file mode 100644
index 0000000..ae2cff2
--- /dev/null
+++ b/android_bench_suite/test_config
@@ -0,0 +1,57 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+[Hwui_Test]
+bench = Hwui
+compiler =
+build_os =
+llvm_version =
+cflags =
+ldflags =
+iterations = 1
+
+[Skia_Test]
+bench = Skia
+compiler =
+build_os =
+llvm_version =
+cflags =
+ldflags =
+iterations = 1
+
+[Synthmark_Test]
+bench = Synthmark
+compiler =
+build_os =
+llvm_version =
+cflags =
+ldflags =
+iterations = 1
+
+[Binder_Test]
+bench = Binder
+compiler =
+build_os =
+llvm_version =
+cflags =
+ldflags =
+iterations = 1
+
+[Panorama_Test]
+bench = Panorama
+compiler =
+build_os =
+llvm_version =
+cflags =
+ldflags =
+iterations = 1
+
+[Dex2oat_Test]
+bench = Dex2oat
+compiler =
+build_os =
+llvm_version =
+cflags =
+ldflags =
+iterations = 1