aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlix <agespino@google.com>2023-02-08 23:42:28 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2023-02-08 23:42:28 +0000
commitffb36b59c243267c70416f55a82aea9831f65adb (patch)
tree86c1a927ef7e618f74d849c5615208e09bd380f2
parent6763a6223b98438beddffc756e5e64e23b4bd2d3 (diff)
parent01aa290fe3446514c26bf490db4ac7b2ac1b42b3 (diff)
downloadbazelbuild-rules_android-ffb36b59c243267c70416f55a82aea9831f65adb.tar.gz
Merge remote-tracking branch 'aosp/upstream-pre-alpha' into aosp am: 24ee0a8a28 am: 01aa290fe3
Original change: https://android-review.googlesource.com/c/platform/external/bazelbuild-rules_android/+/2426734 Change-Id: Ica7c72363e0a38992add3ef340296fe16b5d1c70 Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
-rw-r--r--BUILD37
-rw-r--r--METADATA2
-rw-r--r--MODULE.bazel48
-rw-r--r--README.md4
-rw-r--r--WORKSPACE98
-rw-r--r--WORKSPACE.bzlmod14
-rw-r--r--defs.bzl37
-rw-r--r--examples/basicapp/WORKSPACE2
-rw-r--r--go.mod9
-rw-r--r--go.sum10
-rw-r--r--kokoro/presubmit/kokoro_presubmit.sh2
-rw-r--r--mobile_install/adapters/aar_import.bzl113
-rw-r--r--mobile_install/adapters/android_binary.bzl149
-rw-r--r--mobile_install/adapters/android_instrumentation_test.bzl76
-rw-r--r--mobile_install/adapters/android_library.bzl117
-rw-r--r--mobile_install/adapters/android_sdk.bzl39
-rw-r--r--mobile_install/adapters/apk_import.bzl47
-rw-r--r--mobile_install/adapters/base.bzl33
-rw-r--r--mobile_install/adapters/java_import.bzl71
-rw-r--r--mobile_install/adapters/java_library.bzl70
-rw-r--r--mobile_install/adapters/java_lite_grpc_library.bzl59
-rw-r--r--mobile_install/adapters/java_lite_proto_library.bzl57
-rw-r--r--mobile_install/adapters/java_rpc_toolchain.bzl39
-rw-r--r--mobile_install/adapters/proto_lang_toolchain.bzl50
-rw-r--r--mobile_install/adapters/proto_library.bzl47
-rw-r--r--prereqs.bzl87
-rw-r--r--rules/aar_import/impl.bzl4
-rw-r--r--rules/acls.bzl13
-rw-r--r--rules/acls/android_binary_starlark_split_transition.bzl22
-rw-r--r--rules/acls/android_library_use_aosp_aidl_compiler.bzl18
-rw-r--r--rules/android_application/android_application_rule.bzl14
-rw-r--r--rules/android_application/android_feature_module_rule.bzl5
-rw-r--r--rules/android_application/attrs.bzl1
-rw-r--r--rules/android_binary_internal/attrs.bzl15
-rw-r--r--rules/android_binary_internal/impl.bzl19
-rw-r--r--rules/android_binary_internal/rule.bzl1
-rw-r--r--rules/android_library/attrs.bzl23
-rw-r--r--rules/android_library/impl.bzl33
-rw-r--r--rules/idl.bzl29
-rw-r--r--rules/java.bzl4
-rw-r--r--rules/native_deps.bzl310
-rw-r--r--rules/proguard.bzl41
-rw-r--r--rules/resources.bzl64
-rw-r--r--src/java/com/example/sampleapp/SampleApp.java4
-rw-r--r--src/java/com/example/sampleapp/res/layout/basic_activity.xml12
-rw-r--r--src/java/com/example/sampleapp/res/values/strings.xml1
-rw-r--r--src/tools/ak/akhelper.go27
-rw-r--r--src/tools/ak/bucketize/BUILD58
-rw-r--r--src/tools/ak/bucketize/bucketize.go451
-rw-r--r--src/tools/ak/bucketize/bucketize_bin.go29
-rw-r--r--src/tools/ak/bucketize/bucketize_test.go483
-rw-r--r--src/tools/ak/bucketize/partitioner.go319
-rw-r--r--src/tools/ak/bucketize/partitioner_test.go349
-rw-r--r--src/tools/ak/bucketize/pipe.go154
-rw-r--r--src/tools/ak/bucketize/pipe_test.go75
-rw-r--r--src/tools/ak/extractaar/BUILD44
-rw-r--r--src/tools/ak/extractaar/buildozer.go48
-rw-r--r--src/tools/ak/extractaar/extractaar.go286
-rw-r--r--src/tools/ak/extractaar/extractaar_bin.go29
-rw-r--r--src/tools/ak/extractaar/extractaar_test.go73
-rw-r--r--src/tools/ak/extractaar/validator.go77
-rw-r--r--src/tools/ak/extractaar/validator_test.go175
-rw-r--r--src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor.py92
-rw-r--r--src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor_test.py15
64 files changed, 4598 insertions, 136 deletions
diff --git a/BUILD b/BUILD
index 4415141..c459979 100644
--- a/BUILD
+++ b/BUILD
@@ -1,4 +1,41 @@
load("@bazel_gazelle//:def.bzl", "gazelle")
+package(default_visibility = ["//visibility:public"])
+
# gazelle:prefix github.com/bazelbuild/rules_android
gazelle(name = "gazelle")
+
+# Common default platform definitions for use by Android projects.
+
+platform(
+ name = "x86",
+ constraint_values = [
+ "@platforms//os:android",
+ "@platforms//cpu:x86_32",
+ ],
+)
+
+platform(
+ name = "x86_64",
+ constraint_values = [
+ "@platforms//os:android",
+ "@platforms//cpu:x86_64",
+ ],
+)
+
+platform(
+ name = "armeabi-v7a",
+ constraint_values = [
+ "@platforms//os:android",
+ "@platforms//cpu:armv7",
+ ],
+)
+
+platform(
+ name = "arm64-v8a",
+ constraint_values =
+ [
+ "@platforms//cpu:arm64",
+ "@platforms//os:android",
+ ],
+)
diff --git a/METADATA b/METADATA
index 5ce332f..df20a64 100644
--- a/METADATA
+++ b/METADATA
@@ -13,6 +13,6 @@ third_party {
value: "https://github.com/bazelbuild/rules_android"
}
version: "ab13c86fafc79b965b7ad6e4d91c821760d869d3"
- last_upgrade_date { year: 2021 month: 2 day: 12 }
+ last_upgrade_date { year: 2023 month: 2 day: 08 }
license_type: NOTICE
}
diff --git a/MODULE.bazel b/MODULE.bazel
new file mode 100644
index 0000000..e5a32bd
--- /dev/null
+++ b/MODULE.bazel
@@ -0,0 +1,48 @@
+module(
+ name = "bazel_build_rules_android",
+ version = "0.2.0",
+)
+
+bazel_dep(name = "platforms", version = "0.0.5")
+bazel_dep(name = "rules_java", version = "5.3.5")
+rules_java_toolchains = use_extension("@rules_java//java:extensions.bzl", "toolchains")
+use_repo(rules_java_toolchains, "remote_java_tools")
+
+bazel_dep(name = "protobuf", version = "3.19.0", repo_name = "com_google_protobuf")
+bazel_dep(name = "rules_jvm_external", version = "4.5")
+bazel_dep(name = "bazel_skylib", version = "1.0.3")
+
+register_toolchains("//toolchains/android:all")
+register_toolchains("//toolchains/android_sdk:all")
+register_toolchains("//toolchains/emulator:all")
+
+# go-related dependency setup
+bazel_dep(name = "rules_go", version = "0.34.0", repo_name = "io_bazel_rules_go")
+bazel_dep(name = "gazelle", version = "0.28.0")
+go_sdk = use_extension("@io_bazel_rules_go//go:extensions.bzl", "go_sdk")
+go_deps = use_extension("@gazelle//:extensions.bzl", "go_deps")
+go_deps.from_file(go_mod = "//:go.mod")
+use_repo(
+ go_deps,
+ "org_golang_google_protobuf",
+ "com_github_google_go_cmp",
+ "org_golang_x_sync",
+)
+maven = use_extension("@rules_jvm_external//:extensions.bzl", "maven")
+maven.install(
+ name = "rules_android_maven",
+ artifacts = [
+ "com.android.tools.build:bundletool:1.6.1",
+ ],
+ repositories = [
+ "https://maven.google.com",
+ "https://repo1.maven.org/maven2",
+ ],
+)
+use_repo(
+ maven,
+ "rules_android_maven"
+)
+
+remote_android_extensions = use_extension("@bazel_tools//tools/android:android_extensions.bzl", "remote_android_tools_extensions")
+use_repo(remote_android_extensions, "android_tools", "android_gmaven_r8")
diff --git a/README.md b/README.md
index 1d3ffbf..606af21 100644
--- a/README.md
+++ b/README.md
@@ -32,9 +32,11 @@ To use the new Bazel Android rules, add the following to your WORKSPACE file:
urls = ["https://github.com/bazelbuild/rules_android/archive/refs/heads/pre-alpha.zip"],
strip_prefix = "rules_android-pre-alpha",
)
+ load("@build_bazel_rules_android//:prereqs.bzl", "rules_android_prereqs")
+ rules_android_prereqs()
load("@build_bazel_rules_android//:defs.bzl", "rules_android_workspace")
rules_android_workspace()
-
+
register_toolchains(
"@build_bazel_rules_android//toolchains/android:android_default_toolchain",
"@build_bazel_rules_android//toolchains/android_sdk:android_sdk_tools",
diff --git a/WORKSPACE b/WORKSPACE
index c691e57..fb3a90e 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -4,37 +4,6 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
maybe(
- http_archive,
- name = "remote_java_tools_for_rules_android",
- sha256 = "8fb4d3138bd92a9d3324dae29c9f70d91ca2db18cd0bf1997446eed4657d19b3",
- urls = [
- "https://mirror.bazel.build/bazel_java_tools/releases/java/v11.8/java_tools-v11.8.zip",
- "https://github.com/bazelbuild/java_tools/releases/download/java_v11.8/java_tools-v11.8.zip",
- ],
-)
-
-maybe(
- http_archive,
- name = "com_google_protobuf",
- sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422",
- strip_prefix = "protobuf-3.19.1",
- urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
-)
-
-load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
-protobuf_deps()
-
-maybe(
- http_archive,
- name = "rules_jvm_external",
- strip_prefix = "rules_jvm_external-fa73b1a8e4846cee88240d0019b8f80d39feb1c3",
- sha256 = "7e13e48b50f9505e8a99cc5a16c557cbe826e9b68d733050cd1e318d69f94bb5",
- url = "https://github.com/bazelbuild/rules_jvm_external/archive/fa73b1a8e4846cee88240d0019b8f80d39feb1c3.zip",
-)
-
-load("defs.bzl", "rules_android_workspace")
-
-maybe(
android_sdk_repository,
name = "androidsdk",
)
@@ -44,68 +13,13 @@ maybe(
name = "androidndk",
)
-maybe(
- http_archive,
- name = "bazel_skylib",
- sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
- urls = [
- "https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
- "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
- ],
-)
-register_toolchains("//toolchains/android:all")
-register_toolchains("//toolchains/android_sdk:all")
-register_toolchains("//toolchains/emulator:all")
+load("prereqs.bzl", "rules_android_prereqs")
+rules_android_prereqs()
-maybe(
- http_archive,
- name = "io_bazel_rules_go",
- sha256 = "16e9fca53ed6bd4ff4ad76facc9b7b651a89db1689a2877d6fd7b82aa824e366",
- urls = [
- "https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.34.0/rules_go-v0.34.0.zip",
- "https://github.com/bazelbuild/rules_go/releases/download/v0.34.0/rules_go-v0.34.0.zip",
- ],
-)
-
-maybe(
- http_archive,
- name = "bazel_gazelle",
- sha256 = "5982e5463f171da99e3bdaeff8c0f48283a7a5f396ec5282910b9e8a49c0dd7e",
- urls = [
- "https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.25.0/bazel-gazelle-v0.25.0.tar.gz",
- "https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.25.0/bazel-gazelle-v0.25.0.tar.gz",
- ],
-)
-
-load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
-load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies", "go_repository")
-go_rules_dependencies()
-
-go_register_toolchains(version = "1.18.3")
-
-gazelle_dependencies()
-# gazelle:repository go_repository name=org_golang_x_xerrors importpath=golang.org/x/xerrors
-
-go_repository(
- name = "org_golang_google_protobuf",
- importpath = "google.golang.org/protobuf",
- sum = "h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=",
- version = "v1.28.1",
-)
-
-go_repository(
- name = "com_github_google_go_cmp",
- importpath = "github.com/google/go-cmp",
- sum = "h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=",
- version = "v0.5.9",
-)
-
-go_repository(
- name = "org_golang_x_sync",
- importpath = "golang.org/x/sync",
- sum = "h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=",
- version = "v0.0.0-20210220032951-036812b2e83c",
-)
+load("defs.bzl", "rules_android_workspace")
rules_android_workspace()
+register_toolchains("//toolchains/android:all")
+register_toolchains("//toolchains/android_sdk:all")
+register_toolchains("//toolchains/emulator:all")
diff --git a/WORKSPACE.bzlmod b/WORKSPACE.bzlmod
new file mode 100644
index 0000000..0aca21e
--- /dev/null
+++ b/WORKSPACE.bzlmod
@@ -0,0 +1,14 @@
+workspace(name = "build_bazel_rules_android")
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+
+maybe(
+ android_sdk_repository,
+ name = "androidsdk",
+)
+
+maybe(
+ android_ndk_repository,
+ name = "androidndk",
+)
diff --git a/defs.bzl b/defs.bzl
index 4d08708..8abd100 100644
--- a/defs.bzl
+++ b/defs.bzl
@@ -14,10 +14,15 @@
"""Workspace setup macro for rules_android."""
+load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies", "go_repository")
+load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
+load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
+load("@robolectric//bazel:robolectric.bzl", "robolectric_repositories")
load("@rules_jvm_external//:defs.bzl", "maven_install")
def rules_android_workspace():
""" Sets up workspace dependencies for rules_android."""
+ protobuf_deps()
maven_install(
name = "rules_android_maven",
@@ -29,3 +34,35 @@ def rules_android_workspace():
"https://repo1.maven.org/maven2",
],
)
+
+ go_rules_dependencies()
+
+ go_register_toolchains(version = "1.18.3")
+
+ gazelle_dependencies()
+ # gazelle:repository go_repository name=org_golang_x_xerrors importpath=golang.org/x/xerrors
+
+ go_repository(
+ name = "org_golang_google_protobuf",
+ importpath = "google.golang.org/protobuf",
+ sum = "h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=",
+ version = "v1.28.1",
+ )
+
+ go_repository(
+ name = "com_github_google_go_cmp",
+ importpath = "github.com/google/go-cmp",
+ sum = "h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=",
+ version = "v0.5.9",
+ )
+
+ go_repository(
+ name = "org_golang_x_sync",
+ importpath = "golang.org/x/sync",
+ sum = "h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=",
+ version = "v0.0.0-20210220032951-036812b2e83c",
+ )
+
+ robolectric_repositories()
+
+
diff --git a/examples/basicapp/WORKSPACE b/examples/basicapp/WORKSPACE
index eebeac0..b7aa64a 100644
--- a/examples/basicapp/WORKSPACE
+++ b/examples/basicapp/WORKSPACE
@@ -33,6 +33,8 @@ local_repository(
name = "rules_android",
path = "../..", # rules_android's WORKSPACE relative to this inner workspace
)
+load("@rules_android//:prereqs.bzl", "rules_android_prereqs")
+rules_android_prereqs()
load("@rules_android//:defs.bzl", "rules_android_workspace")
rules_android_workspace()
register_toolchains(
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..68f0c90
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,9 @@
+module github.com/bazelbuild/rules_android
+
+go 1.18
+
+require (
+ github.com/google/go-cmp v0.5.9
+ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
+ google.golang.org/protobuf v1.28.1
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..23ba436
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,10 @@
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
+github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
+google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
diff --git a/kokoro/presubmit/kokoro_presubmit.sh b/kokoro/presubmit/kokoro_presubmit.sh
index ed6eb5f..cdd6642 100644
--- a/kokoro/presubmit/kokoro_presubmit.sh
+++ b/kokoro/presubmit/kokoro_presubmit.sh
@@ -66,7 +66,7 @@ COMMON_ARGS=(
# Go to rules_android workspace and run relevant tests.
cd "${KOKORO_ARTIFACTS_DIR}/git/rules_android"
"$bazel" test "${COMMON_ARGS[@]}" //src/common/golang/... \
- //src/tools/ak/{compile,dex,finalrjar,generatemanifest,link,manifest,mindex,res,rjar}/...
+ //src/tools/ak/{bucketize,compile,dex,extractaar,finalrjar,generatemanifest,link,manifest,mindex,res,rjar}/...
# Go to basic app workspace in the source tree
cd "${KOKORO_ARTIFACTS_DIR}/git/rules_android/examples/basicapp"
diff --git a/mobile_install/adapters/aar_import.bzl b/mobile_install/adapters/aar_import.bzl
new file mode 100644
index 0000000..f1e5db9
--- /dev/null
+++ b/mobile_install/adapters/aar_import.bzl
@@ -0,0 +1,113 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for aar_import."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(
+ ":providers.bzl",
+ "MIAndroidAarNativeLibsInfo",
+ "MIAndroidAssetsInfo",
+ "MIAndroidDexInfo",
+ "MIAndroidResourcesInfo",
+ "MIJavaResourcesInfo",
+ "providers",
+)
+load(":resources.bzl", "liteparse")
+load(":transform.bzl", "dex")
+load("//rules:java.bzl", _java = "java")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["deps", "exports"]
+
+def _adapt(target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+
+ assets = depset()
+ assets_dir = None
+ if AndroidAssetsInfo in target:
+ assets = target[AndroidAssetsInfo].assets
+ assets_dir = target[AndroidAssetsInfo].local_asset_dir
+
+ label = None
+ resources = depset()
+ if AndroidResourcesInfo in target:
+ label = target[AndroidResourcesInfo].label
+ resources = target[AndroidResourcesInfo].direct_android_resources
+
+ return [
+ providers.make_mi_android_aar_native_libs_info(
+ native_libs = target[AndroidNativeLibsInfo].native_libs,
+ deps = providers.collect(
+ MIAndroidAarNativeLibsInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_android_assets_info(
+ assets = assets,
+ assets_dir = assets_dir,
+ deps = providers.collect(
+ MIAndroidAssetsInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_android_dex_info(
+ dex_shards = dex(
+ ctx,
+ target[JavaInfo].runtime_output_jars,
+ target[JavaInfo].transitive_deps,
+ ),
+ deps = providers.collect(
+ MIAndroidDexInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_android_resources_info(
+ # TODO(b/124229660): The package for an aar should be retrieved from
+ # the AndroidManifest.xml in the aar. Using the package is a short
+ # term work-around.
+ package = _java.resolve_package_from_label(
+ ctx.label,
+ ctx.rule.attr.package,
+ ),
+ label = label,
+ r_pb = liteparse(ctx),
+ resources = resources,
+ deps = providers.collect(
+ MIAndroidResourcesInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_java_resources_info(
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ ]
+
+aar_import = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/android_binary.bzl b/mobile_install/adapters/android_binary.bzl
new file mode 100644
index 0000000..98641e1
--- /dev/null
+++ b/mobile_install/adapters/android_binary.bzl
@@ -0,0 +1,149 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for android_binary."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":launcher.bzl", "make_launcher")
+load(":launcher_direct.bzl", "make_direct_launcher")
+load(":process.bzl", "process")
+load(
+ ":providers.bzl",
+ "MIAndroidAarNativeLibsInfo",
+ "MIAndroidAssetsInfo",
+ "MIAndroidDexInfo",
+ "MIAndroidResourcesInfo",
+ "MIJavaResourcesInfo",
+ "providers",
+)
+load(":resources.bzl", "get_assets_dir")
+load(":transform.bzl", "dex", "filter_jars")
+load(":utils.bzl", "utils")
+load("//rules/flags:flags.bzl", "flags")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["_android_sdk", "deps", "resources", "instruments"]
+
+def extract(target, ctx):
+ # extract is made visibile for testing
+ """extract the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ Input for process method
+ """
+ return dict(
+ debug_key = utils.only(ctx.rule.files.debug_key, allow_empty = True),
+ debug_signing_keys = ctx.rule.files.debug_signing_keys,
+ debug_signing_lineage_file = utils.only(ctx.rule.files.debug_signing_lineage_file, allow_empty = True),
+ key_rotation_min_sdk = ctx.rule.attr.key_rotation_min_sdk,
+ merged_manifest = target[AndroidIdeInfo].generated_manifest,
+ native_libs = target[AndroidIdeInfo].native_libs,
+ package = target[AndroidIdeInfo].java_package,
+ resource_apk = target[AndroidIdeInfo].resource_apk,
+ resource_src_jar = target[AndroidIdeInfo].resource_jar.source_jar, # This is the R with real ids.
+ aar_native_libs_info = providers.make_mi_android_aar_native_libs_info(
+ deps = providers.collect(
+ MIAndroidAarNativeLibsInfo,
+ ctx.rule.attr.deps,
+ ),
+ ),
+ android_assets_info = providers.make_mi_android_assets_info(
+ assets = depset(ctx.rule.files.assets),
+ assets_dir = get_assets_dir(
+ ctx.rule.files.assets[0],
+ ctx.rule.attr.assets_dir,
+ ) if ctx.rule.files.assets else None,
+ deps = providers.collect(
+ MIAndroidAssetsInfo,
+ ctx.rule.attr.deps,
+ ),
+ ),
+ android_dex_info = providers.make_mi_android_dex_info(
+ dex_shards = dex(
+ ctx,
+ filter_jars(
+ ctx.label.name + "_resources.jar",
+ target[JavaInfo].runtime_output_jars,
+ ) +
+ (
+ ),
+ target[JavaInfo].transitive_deps,
+ ),
+ deps = providers.collect(MIAndroidDexInfo, ctx.rule.attr.deps),
+ ),
+ # TODO(djwhang): It wasteful to collect packages in
+ # android_resources_info, rather we should be looking to pull them
+ # from the resources_v3_info.
+ android_resources_info = providers.make_mi_android_resources_info(
+ package = target[AndroidIdeInfo].java_package,
+ deps = providers.collect(
+ MIAndroidResourcesInfo,
+ ctx.rule.attr.deps,
+ ),
+ ),
+ java_resources_info = providers.make_mi_java_resources_info(
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ ctx.rule.attr.deps,
+ ),
+ ),
+ android_jar = ctx.rule.attr._android_sdk[AndroidSdkInfo].android_jar,
+ instrumented_app = ctx.rule.attr.instruments,
+ apk = target.android.apk,
+ )
+
+def adapt(target, ctx):
+ # adapt is made visibile for testing
+ """Adapts the android rule
+
+ Args:
+ target: The target.
+ ctx: The context.
+ Returns:
+ A list of providers
+ """
+
+ # launcher is created here to be used as the sibling everywhere else.
+ launcher = utils.isolated_declare_file(ctx, ctx.label.name + "_mi/launcher")
+ mi_app_info = process(ctx, sibling = launcher, **extract(target, ctx))
+
+ if flags.get(ctx).use_direct_deploy:
+ mi_app_launch_info = make_direct_launcher(
+ ctx,
+ mi_app_info,
+ launcher,
+ use_adb_root = flags.get(ctx).use_adb_root,
+ )
+ else:
+ mi_app_launch_info = make_launcher(
+ ctx,
+ mi_app_info,
+ launcher,
+ use_adb_root = flags.get(ctx).use_adb_root,
+ )
+
+ return [
+ mi_app_info,
+ mi_app_launch_info,
+ OutputGroupInfo(
+ mobile_install_INTERNAL_ = depset(mi_app_launch_info.runfiles).to_list(),
+ mobile_install_launcher_INTERNAL_ = [mi_app_launch_info.launcher],
+ ),
+ ]
+
+android_binary = make_adapter(_aspect_attrs, adapt)
diff --git a/mobile_install/adapters/android_instrumentation_test.bzl b/mobile_install/adapters/android_instrumentation_test.bzl
new file mode 100644
index 0000000..ffd9c4d
--- /dev/null
+++ b/mobile_install/adapters/android_instrumentation_test.bzl
@@ -0,0 +1,76 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for android_instrumentation_test."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":launcher.bzl", "make_launcher")
+load(":launcher_direct.bzl", "make_direct_launcher")
+load(":providers.bzl", "MIAppInfo")
+load(":utils.bzl", "utils")
+load("//rules/flags:flags.bzl", "flags")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["test_app", "support_apps"]
+
+def _adapt(target, ctx):
+ if not hasattr(ctx.attr, "_android_test_runner"):
+ fail("mobile-install does not support running tests on mac, check b/134172473 for more details")
+
+ # TODO(b/): Tests have yet to be optimized so, this is an irrelevant error.
+ # if flags.get(ctx).enable_splits:
+ # fail("mobile-install does not support running tests for split apks, check b/139762843 for more details! To run tests with mobile-install without splits, pass --define=enable_splits=False")
+
+ launcher = utils.isolated_declare_file(ctx, ctx.label.name + "_mi/launcher")
+
+ test_app = ctx.rule.attr.test_app
+
+ # TODO(manalinandan): Re-enable direct deploy for test.
+ # if _flags.get(ctx).use_direct_deploy:
+ if False:
+ mi_app_launch_info = make_direct_launcher(
+ ctx,
+ test_app[MIAppInfo],
+ launcher,
+ test_args = ctx.rule.attr.args,
+ test_support_apps = ctx.rule.attr.support_apps,
+ use_adb_root = flags.get(ctx).use_adb_root,
+ is_test = True,
+ )
+ else:
+ googplayservices_container_app = None
+ test_support_apps = []
+ for support_app in ctx.rule.attr.support_apps:
+ # Checks if the support_apps is an android_binary rule and 'GoogPlayServices' is present in the label
+ # This implies there is a GoogPlayServices container binary in the dependency
+ if MIAppInfo in support_app and "GoogPlayServices" in str(support_app.label):
+ googplayservices_container_app = support_app
+ elif MIAppInfo in support_app:
+ test_support_apps.append(support_app[MIAppInfo].apk)
+ mi_app_launch_info = make_launcher(
+ ctx,
+ test_app[MIAppInfo],
+ launcher,
+ test_args = ctx.rule.attr.args,
+ test_support_apks = test_support_apps,
+ googplayservices_container_app = googplayservices_container_app,
+ use_adb_root = flags.get(ctx).use_adb_root,
+ is_test = True,
+ )
+ return [OutputGroupInfo(
+ mobile_install_INTERNAL_ = depset(mi_app_launch_info.runfiles).to_list(),
+ mobile_install_launcher_INTERNAL_ = [mi_app_launch_info.launcher],
+ )]
+
+android_instrumentation_test = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/android_library.bzl b/mobile_install/adapters/android_library.bzl
new file mode 100644
index 0000000..698bf27
--- /dev/null
+++ b/mobile_install/adapters/android_library.bzl
@@ -0,0 +1,117 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for android_library."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(
+ ":providers.bzl",
+ "MIAndroidAarNativeLibsInfo",
+ "MIAndroidAssetsInfo",
+ "MIAndroidDexInfo",
+ "MIAndroidResourcesInfo",
+ "MIAndroidSdkInfo",
+ "MIJavaResourcesInfo",
+ "providers",
+)
+load(":resources.bzl", "get_assets_dir")
+load(":transform.bzl", "dex", "filter_jars")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return [
+ "_android_sdk",
+
+ # For the Google-internal kotlin rule to access the toolchain to
+ # get kotlin std and runtime libs.
+ "_toolchain",
+ "deps",
+ "exports",
+ ]
+
+def _adapt(target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ kt_toolchain = [ctx.rule.attr._toolchain] if hasattr(ctx.rule.attr, "_toolchain") else []
+ if ctx.rule.attr.neverlink:
+ return []
+
+ if target[AndroidIdeInfo].idl_generated_java_files:
+ aidl_lib = [ctx.rule.attr._android_sdk[MIAndroidSdkInfo].aidl_lib]
+ else:
+ aidl_lib = []
+
+ return [
+ providers.make_mi_android_aar_native_libs_info(
+ deps = providers.collect(
+ MIAndroidAarNativeLibsInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_android_assets_info(
+ assets = depset(ctx.rule.files.assets),
+ assets_dir = get_assets_dir(
+ ctx.rule.files.assets[0],
+ ctx.rule.attr.assets_dir,
+ ) if ctx.rule.files.assets else None,
+ deps = providers.collect(
+ MIAndroidAssetsInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_android_dex_info(
+ dex_shards = dex(
+ ctx,
+ filter_jars(
+ ctx.label.name + "_resources.jar",
+ target[JavaInfo].runtime_output_jars,
+ ),
+ target[JavaInfo].transitive_deps,
+ ),
+ deps = providers.collect(
+ MIAndroidDexInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ aidl_lib,
+ kt_toolchain,
+ ),
+ ),
+ providers.make_mi_android_resources_info(
+ package = target[AndroidIdeInfo].java_package,
+ deps = providers.collect(
+ MIAndroidResourcesInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_java_resources_info(
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ aidl_lib,
+ kt_toolchain,
+ ),
+ ),
+ ]
+
+android_library = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/android_sdk.bzl b/mobile_install/adapters/android_sdk.bzl
new file mode 100644
index 0000000..44f8f19
--- /dev/null
+++ b/mobile_install/adapters/android_sdk.bzl
@@ -0,0 +1,39 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for android_sdk."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":providers.bzl", "MIAndroidSdkInfo")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["aidl_lib"]
+
+def _adapt(unused_target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ unused_target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ return [
+ MIAndroidSdkInfo(
+ aidl_lib = ctx.rule.attr.aidl_lib,
+ ),
+ ]
+
+android_sdk = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/apk_import.bzl b/mobile_install/adapters/apk_import.bzl
new file mode 100644
index 0000000..425d230
--- /dev/null
+++ b/mobile_install/adapters/apk_import.bzl
@@ -0,0 +1,47 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for android_binary."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":providers.bzl", "MIAppInfo")
+load(":utils.bzl", "utils")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["unsigned_apk"]
+
+def adapt(target, ctx):
+ # adapt is made visibile for testing
+ """Adapts the android rule
+
+ Args:
+ target: The target.
+ ctx: The context.
+ Returns:
+ A list of providers
+ """
+ apk = ctx.rule.file.unsigned_apk
+
+ package_name_output_file = utils.isolated_declare_file(ctx, ctx.label.name + "/manifest_package_name.txt")
+
+ utils.extract_package_name(ctx, apk, package_name_output_file)
+
+ return [
+ MIAppInfo(
+ apk = apk,
+ manifest_package_name = package_name_output_file,
+ ),
+ ]
+
+apk_import = make_adapter(_aspect_attrs, adapt)
diff --git a/mobile_install/adapters/base.bzl b/mobile_install/adapters/base.bzl
new file mode 100644
index 0000000..c969d9f
--- /dev/null
+++ b/mobile_install/adapters/base.bzl
@@ -0,0 +1,33 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Provides the base adapter functions."""
+
+def make_adapter(aspect_attrs, adapt):
+ """Creates an Adapter.
+
+ Args:
+ aspect_attrs: A function that returns a list of attrs for the aspect.
+ adapt: A function that extracts and processes data from the target.
+
+ Returns:
+ A struct that represents an adapter.
+ """
+ if not aspect_attrs:
+ fail("aspect_attrs is None.")
+ if not adapt:
+ fail("adapt is None.")
+ return struct(
+ aspect_attrs = aspect_attrs,
+ adapt = adapt,
+ )
diff --git a/mobile_install/adapters/java_import.bzl b/mobile_install/adapters/java_import.bzl
new file mode 100644
index 0000000..d4b9c92
--- /dev/null
+++ b/mobile_install/adapters/java_import.bzl
@@ -0,0 +1,71 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for java_import."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(
+ ":providers.bzl",
+ "MIAndroidDexInfo",
+ "MIJavaResourcesInfo",
+ "providers",
+)
+load(":transform.bzl", "dex", "extract_jar_resources")
+load(":utils.bzl", "utils")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["deps", "exports"]
+
+def _adapt(target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ if ctx.rule.attr.neverlink:
+ return []
+
+ return [
+ providers.make_mi_android_dex_info(
+ dex_shards = dex(
+ ctx,
+ target[JavaInfo].runtime_output_jars,
+ target[JavaInfo].transitive_deps,
+ create_file = utils.declare_file,
+ ),
+ deps = providers.collect(
+ MIAndroidDexInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_java_resources_info(
+ java_resources = extract_jar_resources(
+ ctx,
+ target[JavaInfo].runtime_output_jars,
+ create_file = utils.declare_file,
+ ),
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ ]
+
+java_import = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/java_library.bzl b/mobile_install/adapters/java_library.bzl
new file mode 100644
index 0000000..afeee64
--- /dev/null
+++ b/mobile_install/adapters/java_library.bzl
@@ -0,0 +1,70 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for java_library."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(
+ ":providers.bzl",
+ "MIAndroidDexInfo",
+ "MIJavaResourcesInfo",
+ "providers",
+)
+load(":transform.bzl", "dex", "extract_jar_resources")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["deps", "exports", "runtime_deps"]
+
+def _adapt(target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ if ctx.rule.attr.neverlink:
+ return []
+
+ return [
+ providers.make_mi_android_dex_info(
+ dex_shards = dex(
+ ctx,
+ target[JavaInfo].runtime_output_jars,
+ target[JavaInfo].transitive_deps,
+ ),
+ deps = providers.collect(
+ MIAndroidDexInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.runtime_deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ providers.make_mi_java_resources_info(
+ java_resources = extract_jar_resources(
+ ctx,
+ target[JavaInfo].runtime_output_jars,
+ ),
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ ctx.rule.attr.deps,
+ ctx.rule.attr.runtime_deps,
+ ctx.rule.attr.exports,
+ ),
+ ),
+ ]
+
+java_library = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/java_lite_grpc_library.bzl b/mobile_install/adapters/java_lite_grpc_library.bzl
new file mode 100644
index 0000000..5eff4a8
--- /dev/null
+++ b/mobile_install/adapters/java_lite_grpc_library.bzl
@@ -0,0 +1,59 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for _java_lite_grpc_library."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":providers.bzl", "MIAndroidDexInfo", "MIJavaResourcesInfo", "providers")
+load(":transform.bzl", "dex", "extract_jar_resources")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["deps", "_toolchain"]
+
+def _adapt(target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ return [
+ providers.make_mi_android_dex_info(
+ dex_shards = dex(
+ ctx,
+ target[JavaInfo].runtime_output_jars,
+ target[JavaInfo].transitive_deps,
+ ),
+ deps = providers.collect(
+ MIAndroidDexInfo,
+ ctx.rule.attr.deps,
+ [ctx.rule.attr._toolchain],
+ ),
+ ),
+ providers.make_mi_java_resources_info(
+ java_resources = extract_jar_resources(
+ ctx,
+ target[JavaInfo].runtime_output_jars,
+ ),
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ ctx.rule.attr.deps,
+ ),
+ ),
+ ]
+
+java_lite_grpc_library = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/java_lite_proto_library.bzl b/mobile_install/adapters/java_lite_proto_library.bzl
new file mode 100644
index 0000000..b251b65
--- /dev/null
+++ b/mobile_install/adapters/java_lite_proto_library.bzl
@@ -0,0 +1,57 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for java_lite_proto_library.
+
+The java_lite_proto_library rule applies an aspect onto its proto dependencies.
+Creates a "lite.jar" at every proto traversed. This adapter is used to just
+propagate the deps, the proto_library rules.
+"""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":providers.bzl", "MIAndroidDexInfo", "MIJavaResourcesInfo", "providers")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["deps", "_aspect_proto_toolchain_for_javalite"]
+
+def _adapt(target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ if not ctx.rule.attr.deps:
+ return []
+ return [
+ providers.make_mi_android_dex_info(
+ deps = providers.collect(
+ MIAndroidDexInfo,
+ ctx.rule.attr.deps,
+ [ctx.rule.attr._aspect_proto_toolchain_for_javalite],
+ ),
+ ),
+ providers.make_mi_java_resources_info(
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ ctx.rule.attr.deps,
+ [ctx.rule.attr._aspect_proto_toolchain_for_javalite],
+ ),
+ ),
+ ]
+
+java_lite_proto_library = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/java_rpc_toolchain.bzl b/mobile_install/adapters/java_rpc_toolchain.bzl
new file mode 100644
index 0000000..76ee505
--- /dev/null
+++ b/mobile_install/adapters/java_rpc_toolchain.bzl
@@ -0,0 +1,39 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for java_rpc_toolchain.bzl."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":providers.bzl", "MIAndroidDexInfo", "providers")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["runtime"] # all potential implicit runtime deps
+
+def _adapt(unused_target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ unused_target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ return [
+ providers.make_mi_android_dex_info(
+ deps = providers.collect(MIAndroidDexInfo, ctx.rule.attr.runtime),
+ ),
+ ]
+
+java_rpc_toolchain = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/proto_lang_toolchain.bzl b/mobile_install/adapters/proto_lang_toolchain.bzl
new file mode 100644
index 0000000..1e767e2
--- /dev/null
+++ b/mobile_install/adapters/proto_lang_toolchain.bzl
@@ -0,0 +1,50 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for proto_lang_toolchain."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":providers.bzl", "MIAndroidDexInfo", "MIJavaResourcesInfo", "providers")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["runtime"]
+
+def _adapt(unused_target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ unused_target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ if not ctx.rule.attr.runtime:
+ return []
+ return [
+ providers.make_mi_android_dex_info(
+ deps = providers.collect(
+ MIAndroidDexInfo,
+ [ctx.rule.attr.runtime],
+ ),
+ ),
+ providers.make_mi_java_resources_info(
+ deps = providers.collect(
+ MIJavaResourcesInfo,
+ [ctx.rule.attr.runtime],
+ ),
+ ),
+ ]
+
+proto_lang_toolchain = make_adapter(_aspect_attrs, _adapt)
diff --git a/mobile_install/adapters/proto_library.bzl b/mobile_install/adapters/proto_library.bzl
new file mode 100644
index 0000000..9e5d8da
--- /dev/null
+++ b/mobile_install/adapters/proto_library.bzl
@@ -0,0 +1,47 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule adapter for proto_library."""
+
+load(":adapters/base.bzl", "make_adapter")
+load(":providers.bzl", "MIAndroidDexInfo", "providers")
+load(":transform.bzl", "dex")
+
+def _aspect_attrs():
+ """Attrs of the rule requiring traversal by the aspect."""
+ return ["deps"]
+
+def _adapt(target, ctx):
+ """Adapts the rule and target data.
+
+ Args:
+ target: The target.
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ if not JavaInfo in target:
+ return []
+ return [
+ providers.make_mi_android_dex_info(
+ dex_shards = dex(
+ ctx,
+ [j.class_jar for j in target[JavaInfo].outputs.jars],
+ target[JavaInfo].transitive_deps,
+ ),
+ deps = providers.collect(MIAndroidDexInfo, ctx.rule.attr.deps),
+ ),
+ ]
+
+proto_library = make_adapter(_aspect_attrs, _adapt)
diff --git a/prereqs.bzl b/prereqs.bzl
new file mode 100644
index 0000000..d106a88
--- /dev/null
+++ b/prereqs.bzl
@@ -0,0 +1,87 @@
+# Copyright 2022 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Sets up prerequisites for rules_android."""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+
+
+def rules_android_prereqs():
+ """Downloads prerequisite repositories for rules_android."""
+ maybe(
+ http_archive,
+ name = "rules_jvm_external",
+ strip_prefix = "rules_jvm_external-fa73b1a8e4846cee88240d0019b8f80d39feb1c3",
+ sha256 = "7e13e48b50f9505e8a99cc5a16c557cbe826e9b68d733050cd1e318d69f94bb5",
+ url = "https://github.com/bazelbuild/rules_jvm_external/archive/fa73b1a8e4846cee88240d0019b8f80d39feb1c3.zip",
+ )
+
+ maybe(
+ http_archive,
+ name = "com_google_protobuf",
+ sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422",
+ strip_prefix = "protobuf-3.19.1",
+ urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
+ )
+
+ maybe(
+ http_archive,
+ name = "remote_java_tools_for_rules_android",
+ sha256 = "8fb4d3138bd92a9d3324dae29c9f70d91ca2db18cd0bf1997446eed4657d19b3",
+ urls = [
+ "https://mirror.bazel.build/bazel_java_tools/releases/java/v11.8/java_tools-v11.8.zip",
+ "https://github.com/bazelbuild/java_tools/releases/download/java_v11.8/java_tools-v11.8.zip",
+ ],
+ )
+
+ maybe(
+ http_archive,
+ name = "bazel_skylib",
+ sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
+ urls = [
+ "https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
+ "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
+ ],
+ )
+
+ maybe(
+ http_archive,
+ name = "io_bazel_rules_go",
+ sha256 = "16e9fca53ed6bd4ff4ad76facc9b7b651a89db1689a2877d6fd7b82aa824e366",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.34.0/rules_go-v0.34.0.zip",
+ "https://github.com/bazelbuild/rules_go/releases/download/v0.34.0/rules_go-v0.34.0.zip",
+ ],
+ )
+
+ maybe(
+ http_archive,
+ name = "bazel_gazelle",
+ sha256 = "5982e5463f171da99e3bdaeff8c0f48283a7a5f396ec5282910b9e8a49c0dd7e",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.25.0/bazel-gazelle-v0.25.0.tar.gz",
+ "https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.25.0/bazel-gazelle-v0.25.0.tar.gz",
+ ],
+ )
+
+ maybe(
+ http_archive,
+ name = "robolectric",
+ urls = ["https://github.com/robolectric/robolectric-bazel/archive/4.9.2.tar.gz"],
+ strip_prefix = "robolectric-bazel-4.9.2",
+ sha256 = "7e007fcfdca7b7228cb4de72707e8b317026ea95000f963e91d5ae365be52d0d",
+ )
+
+
diff --git a/rules/aar_import/impl.bzl b/rules/aar_import/impl.bzl
index 894baba..46e1337 100644
--- a/rules/aar_import/impl.bzl
+++ b/rules/aar_import/impl.bzl
@@ -475,7 +475,7 @@ def impl(ctx):
ctx,
aar = aar,
package = package,
- manifest = manifest_ctx.min_sdk_bumped_manifest,
+ manifest = manifest_ctx.processed_manifest,
deps = ctx.attr.deps,
aar_resources_extractor_tool =
_get_android_toolchain(ctx).aar_resources_extractor.files_to_run,
@@ -555,7 +555,7 @@ def impl(ctx):
ctx,
aar = aar,
package = package,
- manifest = manifest_ctx.min_sdk_bumped_manifest,
+ manifest = manifest_ctx.processed_manifest,
checks = _get_android_toolchain(ctx).aar_import_checks.files_to_run,
))
diff --git a/rules/acls.bzl b/rules/acls.bzl
index b038c29..e17e136 100644
--- a/rules/acls.bzl
+++ b/rules/acls.bzl
@@ -41,9 +41,11 @@ load("//rules/acls:android_test_lockdown.bzl", "ANDROID_TEST_LOCKDOWN_GENERATOR_
load("//rules/acls:android_device_plugin_rollout.bzl", "ANDROID_DEVICE_PLUGIN_FALLBACK", "ANDROID_DEVICE_PLUGIN_ROLLOUT")
load("//rules/acls:android_instrumentation_binary_starlark_resources.bzl", "ANDROID_INSTRUMENTATION_BINARY_STARLARK_RESOURCES_FALLBACK", "ANDROID_INSTRUMENTATION_BINARY_STARLARK_RESOURCES_ROLLOUT")
load("//rules/acls:android_binary_starlark_javac.bzl", "ANDROID_BINARY_STARLARK_JAVAC_FALLBACK", "ANDROID_BINARY_STARLARK_JAVAC_ROLLOUT")
+load("//rules/acls:android_binary_starlark_split_transition.bzl", "ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_FALLBACK", "ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_ROLLOUT")
load("//rules/acls:android_feature_splits_dogfood.bzl", "ANDROID_FEATURE_SPLITS_DOGFOOD")
load("//rules/acls:android_library_resources_without_srcs.bzl", "ANDROID_LIBRARY_RESOURCES_WITHOUT_SRCS", "ANDROID_LIBRARY_RESOURCES_WITHOUT_SRCS_GENERATOR_FUNCTIONS")
load("//rules/acls:android_library_starlark_resource_outputs.bzl", "ANDROID_LIBRARY_STARLARK_RESOURCE_OUTPUTS_FALLBACK", "ANDROID_LIBRARY_STARLARK_RESOURCE_OUTPUTS_ROLLOUT")
+load("//rules/acls:android_library_use_aosp_aidl_compiler.bzl", "ANDROID_LIBRARY_USE_AOSP_AIDL_COMPILER_ALLOWLIST")
load("//rules/acls:android_lint_checks_rollout.bzl", "ANDROID_LINT_CHECKS_FALLBACK", "ANDROID_LINT_CHECKS_ROLLOUT")
load("//rules/acls:android_lint_rollout.bzl", "ANDROID_LINT_FALLBACK", "ANDROID_LINT_ROLLOUT")
load("//rules/acls:lint_registry_rollout.bzl", "LINT_REGISTRY_FALLBACK", "LINT_REGISTRY_ROLLOUT")
@@ -101,6 +103,9 @@ def _in_android_instrumentation_binary_starlark_resources(fqn):
def _in_android_binary_starlark_javac(fqn):
return not matches(fqn, ANDROID_BINARY_STARLARK_JAVAC_FALLBACK_DICT) and matches(fqn, ANDROID_BINARY_STARLARK_JAVAC_ROLLOUT_DICT)
+def _in_android_binary_starlark_split_transition(fqn):
+ return not matches(fqn, ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_FALLBACK_DICT) and matches(fqn, ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_ROLLOUT_DICT)
+
def _in_android_feature_splits_dogfood(fqn):
return matches(fqn, ANDROID_FEATURE_SPLITS_DOGFOOD_DICT)
@@ -136,6 +141,9 @@ def _in_android_library_resources_without_srcs_generator_functions(gfn):
def _in_android_library_starlark_resource_outputs_rollout(fqn):
return not matches(fqn, ANDROID_LIBRARY_STARLARK_RESOURCE_OUTPUTS_FALLBACK_DICT) and matches(fqn, ANDROID_LIBRARY_STARLARK_RESOURCE_OUTPUTS_ROLLOUT_DICT)
+def _in_android_library_use_aosp_aidl_compiler_allowlist(fqn):
+ return matches(fqn, ANDROID_LIBRARY_USE_AOSP_AIDL_COMPILER_ALLOWLIST_DICT)
+
def _in_app_installation_snapshot(fqn):
return not matches(fqn, APP_INSTALLATION_SNAPSHOT_FALLBACK_DICT) and matches(fqn, APP_INSTALLATION_SNAPSHOT_DICT)
@@ -223,6 +231,8 @@ ANDROID_INSTRUMENTATION_BINARY_STARLARK_RESOURCES_ROLLOUT_DICT = make_dict(ANDRO
ANDROID_INSTRUMENTATION_BINARY_STARLARK_RESOURCES_FALLBACK_DICT = make_dict(ANDROID_INSTRUMENTATION_BINARY_STARLARK_RESOURCES_FALLBACK)
ANDROID_BINARY_STARLARK_JAVAC_ROLLOUT_DICT = make_dict(ANDROID_BINARY_STARLARK_JAVAC_ROLLOUT)
ANDROID_BINARY_STARLARK_JAVAC_FALLBACK_DICT = make_dict(ANDROID_BINARY_STARLARK_JAVAC_FALLBACK)
+ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_ROLLOUT_DICT = make_dict(ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_ROLLOUT)
+ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_FALLBACK_DICT = make_dict(ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_FALLBACK)
ANDROID_FEATURE_SPLITS_DOGFOOD_DICT = make_dict(ANDROID_FEATURE_SPLITS_DOGFOOD)
ANDROID_LIBRARY_RESOURCES_WITHOUT_SRCS_DICT = make_dict(ANDROID_LIBRARY_RESOURCES_WITHOUT_SRCS)
ANDROID_LIBRARY_RESOURCES_WITHOUT_SRCS_GENERATOR_FUNCTIONS_DICT = make_dict(ANDROID_LIBRARY_RESOURCES_WITHOUT_SRCS_GENERATOR_FUNCTIONS)
@@ -276,6 +286,7 @@ BASELINE_PROFILES_ROLLOUT_DICT = make_dict(BASELINE_PROFILES_ROLLOUT)
ENFORCE_MIN_SDK_FLOOR_ROLLOUT_DICT = make_dict(ENFORCE_MIN_SDK_FLOOR_ROLLOUT)
ENFORCE_MIN_SDK_FLOOR_FALLBACK_DICT = make_dict(ENFORCE_MIN_SDK_FLOOR_FALLBACK)
ANDROID_APK_TO_BUNDLE_FEATURES_DICT = make_dict(ANDROID_APK_TO_BUNDLE_FEATURES)
+ANDROID_LIBRARY_USE_AOSP_AIDL_COMPILER_ALLOWLIST_DICT = make_dict(ANDROID_LIBRARY_USE_AOSP_AIDL_COMPILER_ALLOWLIST)
def matches(fqn, dct):
# Labels with workspace names ("@workspace//pkg:target") are not supported.
@@ -326,10 +337,12 @@ acls = struct(
in_android_device_plugin_rollout = _in_android_device_plugin_rollout,
in_android_instrumentation_binary_starlark_resources = _in_android_instrumentation_binary_starlark_resources,
in_android_binary_starlark_javac = _in_android_binary_starlark_javac,
+ in_android_binary_starlark_split_transition = _in_android_binary_starlark_split_transition,
in_android_feature_splits_dogfood = _in_android_feature_splits_dogfood,
in_android_library_starlark_resource_outputs_rollout = _in_android_library_starlark_resource_outputs_rollout,
in_android_library_resources_without_srcs = _in_android_library_resources_without_srcs,
in_android_library_resources_without_srcs_generator_functions = _in_android_library_resources_without_srcs_generator_functions,
+ in_android_library_use_aosp_aidl_compiler_allowlist = _in_android_library_use_aosp_aidl_compiler_allowlist,
in_android_lint_checks_rollout = _in_android_lint_checks_rollout,
in_android_lint_rollout = _in_android_lint_rollout,
in_lint_registry_rollout = _in_lint_registry_rollout,
diff --git a/rules/acls/android_binary_starlark_split_transition.bzl b/rules/acls/android_binary_starlark_split_transition.bzl
new file mode 100644
index 0000000..0cc737b
--- /dev/null
+++ b/rules/acls/android_binary_starlark_split_transition.bzl
@@ -0,0 +1,22 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow and fallback lists for using the Starlark implementation of the android split transition"""
+
+ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_ROLLOUT = [
+ "//tools/build_defs/android/test/dev/android_binary_internal/java/com/nativelibs:__pkg__",
+]
+
+ANDROID_BINARY_STARLARK_SPLIT_TRANSITION_FALLBACK = [
+]
diff --git a/rules/acls/android_library_use_aosp_aidl_compiler.bzl b/rules/acls/android_library_use_aosp_aidl_compiler.bzl
new file mode 100644
index 0000000..3825458
--- /dev/null
+++ b/rules/acls/android_library_use_aosp_aidl_compiler.bzl
@@ -0,0 +1,18 @@
+# Copyright 2022 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 3.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow list for the `idl_use_aosp_compiler` attribute in the `android_library` rule."""
+
+ANDROID_LIBRARY_USE_AOSP_AIDL_COMPILER_ALLOWLIST = [
+]
diff --git a/rules/android_application/android_application_rule.bzl b/rules/android_application/android_application_rule.bzl
index d539620..ce3f321 100644
--- a/rules/android_application/android_application_rule.bzl
+++ b/rules/android_application/android_application_rule.bzl
@@ -57,7 +57,7 @@ def _verify_attrs(attrs, fqn):
if hasattr(attrs, attr):
_log.error("Unsupported attr: %s in android_application" % attr)
- if not attrs.get("manifest_values", default = {}).get("applicationId"):
+ if not attrs.get("manifest_values", {}).get("applicationId"):
_log.error("%s missing required applicationId in manifest_values" % fqn)
for attr in ["deps"]:
@@ -140,7 +140,6 @@ def _process_feature_module(
ctx,
inputs = [filtered_res, native_libs],
output = out,
- exclude_build_data = True,
java_toolchain = _common.get_java_toolchain(ctx),
)
@@ -326,6 +325,7 @@ def _impl(ctx):
android_application = rule(
attrs = ANDROID_APPLICATION_ATTRS,
+ cfg = android_common.android_platforms_transition,
fragments = [
"android",
"java",
@@ -351,8 +351,8 @@ def android_application_macro(_android_binary, **attrs):
fqn = "//%s:%s" % (native.package_name(), attrs["name"])
# Must pop these because android_binary does not have these attributes.
- app_integrity_config = attrs.pop("app_integrity_config", default = None)
- rotation_config = attrs.pop("rotation_config", default = None)
+ app_integrity_config = attrs.pop("app_integrity_config", None)
+ rotation_config = attrs.pop("rotation_config", None)
# Simply fall back to android_binary if no feature splits or bundle_config
if not attrs.get("feature_modules", None) and not (attrs.get("bundle_config", None) or attrs.get("bundle_config_file", None)):
@@ -366,9 +366,9 @@ def android_application_macro(_android_binary, **attrs):
base_split_name = "%s_base" % name
# default to [] if feature_modules = None is passed
- feature_modules = attrs.pop("feature_modules", default = []) or []
- bundle_config = attrs.pop("bundle_config", default = None)
- bundle_config_file = attrs.pop("bundle_config_file", default = None)
+ feature_modules = attrs.pop("feature_modules", []) or []
+ bundle_config = attrs.pop("bundle_config", None)
+ bundle_config_file = attrs.pop("bundle_config_file", None)
# bundle_config is deprecated in favor of bundle_config_file
# In the future bundle_config will accept a build rule rather than a raw file.
diff --git a/rules/android_application/android_feature_module_rule.bzl b/rules/android_application/android_feature_module_rule.bzl
index ec051d5..a0f7d2b 100644
--- a/rules/android_application/android_feature_module_rule.bzl
+++ b/rules/android_application/android_feature_module_rule.bzl
@@ -24,6 +24,7 @@ load("//rules:acls.bzl", "acls")
load(
"//rules:utils.bzl",
"get_android_toolchain",
+ "utils",
)
def _impl(ctx):
@@ -39,7 +40,7 @@ def _impl(ctx):
args.add(ctx.attr.binary[ApkInfo].unsigned_apk.path)
args.add(ctx.configuration.coverage_enabled)
args.add(ctx.fragments.android.desugar_java8_libs)
- args.add(ctx.attr.library.label)
+ args.add(utils.dedupe_split_attr(ctx.split_attr.library).label)
args.add(get_android_toolchain(ctx).xmllint_tool.files_to_run.executable)
args.add(get_android_toolchain(ctx).unzip_tool.files_to_run.executable)
@@ -59,7 +60,7 @@ def _impl(ctx):
return [
AndroidFeatureModuleInfo(
binary = ctx.attr.binary,
- library = ctx.attr.library,
+ library = utils.dedupe_split_attr(ctx.split_attr.library),
title_id = ctx.attr.title_id,
title_lib = ctx.attr.title_lib,
feature_name = ctx.attr.feature_name,
diff --git a/rules/android_application/attrs.bzl b/rules/android_application/attrs.bzl
index 131fb46..55864c2 100644
--- a/rules/android_application/attrs.bzl
+++ b/rules/android_application/attrs.bzl
@@ -84,6 +84,7 @@ ANDROID_FEATURE_MODULE_ATTRS = dict(
feature_name = attr.string(),
library = attr.label(
allow_rules = ["android_library"],
+ cfg = android_common.multi_cpu_configuration,
mandatory = True,
doc = "android_library target to include as a feature split.",
),
diff --git a/rules/android_binary_internal/attrs.bzl b/rules/android_binary_internal/attrs.bzl
index d0d2197..2cc6a3d 100644
--- a/rules/android_binary_internal/attrs.bzl
+++ b/rules/android_binary_internal/attrs.bzl
@@ -18,6 +18,10 @@ load(
"//rules:attrs.bzl",
_attrs = "attrs",
)
+load(
+ "//rules:native_deps.bzl",
+ "split_config_aspect",
+)
ATTRS = _attrs.replace(
_attrs.add(
@@ -59,6 +63,17 @@ ATTRS = _attrs.replace(
),
_defined_resource_files = attr.bool(default = False),
_enable_manifest_merging = attr.bool(default = True),
+ _cc_toolchain_split = attr.label(
+ cfg = android_common.multi_cpu_configuration,
+ default = "@bazel_tools//tools/cpp:current_cc_toolchain",
+ aspects = [split_config_aspect],
+ ),
+ _grep_includes = attr.label(
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ default = Label("@@bazel_tools//tools/cpp:grep-includes"),
+ ),
),
_attrs.COMPILATION,
_attrs.DATA_CONTEXT,
diff --git a/rules/android_binary_internal/impl.bzl b/rules/android_binary_internal/impl.bzl
index 847844a..3d89a7b 100644
--- a/rules/android_binary_internal/impl.bzl
+++ b/rules/android_binary_internal/impl.bzl
@@ -23,6 +23,10 @@ load(
)
load("//rules:resources.bzl", _resources = "resources")
load("//rules:utils.bzl", "compilation_mode", "get_android_toolchain", "utils")
+load(
+ "//rules:native_deps.bzl",
+ _process_native_deps = "process",
+)
def _process_manifest(ctx, **unused_ctxs):
manifest_ctx = _resources.bump_min_sdk(
@@ -43,7 +47,7 @@ def _process_resources(ctx, manifest_ctx, java_package, **unused_ctxs):
assets = ctx.files.assets,
assets_dir = ctx.attr.assets_dir,
resource_files = ctx.files.resource_files,
- manifest = manifest_ctx.min_sdk_bumped_manifest,
+ manifest = manifest_ctx.processed_manifest,
manifest_values = utils.expand_make_vars(ctx, ctx.attr.manifest_values),
resource_configs = ctx.attr.resource_configuration_filters,
densities = ctx.attr.densities,
@@ -85,6 +89,18 @@ def _validate_manifest(ctx, packaged_resources_ctx, **unused_ctxs):
value = manifest_validation_ctx,
)
+def _process_native_libs(ctx, **_unusued_ctxs):
+ providers = []
+ if acls.in_android_binary_starlark_split_transition(str(ctx.label)):
+ providers.append(_process_native_deps(
+ ctx,
+ filename = "nativedeps",
+ ))
+ return ProviderInfo(
+ name = "native_libs_ctx",
+ value = struct(providers = providers),
+ )
+
def use_legacy_manifest_merger(ctx):
"""Whether legacy manifest merging is enabled.
@@ -130,6 +146,7 @@ PROCESSORS = dict(
ManifestProcessor = _process_manifest,
ResourceProcessor = _process_resources,
ValidateManifestProcessor = _validate_manifest,
+ NativeLibsProcessor = _process_native_libs,
)
_PROCESSING_PIPELINE = processing_pipeline.make_processing_pipeline(
diff --git a/rules/android_binary_internal/rule.bzl b/rules/android_binary_internal/rule.bzl
index ec8abf3..b08a39e 100644
--- a/rules/android_binary_internal/rule.bzl
+++ b/rules/android_binary_internal/rule.bzl
@@ -48,6 +48,7 @@ def make_rule(
fragments = [
"android",
"java",
+ "cpp",
],
)
diff --git a/rules/android_library/attrs.bzl b/rules/android_library/attrs.bzl
index 8924e70..2bc3a0d 100644
--- a/rules/android_library/attrs.bzl
+++ b/rules/android_library/attrs.bzl
@@ -137,6 +137,29 @@ ATTRS = _attrs.add(
"for information about what this means."
),
),
+ idl_uses_aosp_compiler = attr.bool(
+ default = False,
+ doc = (
+ "Use the upstream AOSP compiler to generate Java files out of `idl_srcs`." +
+ "The upstream AOSP compiler provides several new language features that the " +
+ "Google3-only compiler doesn't provide. For example: structured parcelables, " +
+ "unions, enums, nested type declarations, constant expressions, annotations, " +
+ "and more. " +
+ "See [AIDL Doc](https://source.android.com/docs/core/architecture/aidl/overview) " +
+ "for more details. " +
+ "Note: the use of the AOSP compiler in google3 is restricted due to performance " +
+ "considerations. This should not be broadly used unless these features are " +
+ "strictly required."
+ ),
+ ),
+ idlopts = attr.string_list(
+ mandatory = False,
+ allow_empty = True,
+ default = [],
+ doc = (
+ "Add these flags to the AIDL compiler command."
+ ),
+ ),
neverlink = attr.bool(
default = False,
doc = (
diff --git a/rules/android_library/impl.bzl b/rules/android_library/impl.bzl
index f508c1b..72429fd 100644
--- a/rules/android_library/impl.bzl
+++ b/rules/android_library/impl.bzl
@@ -52,6 +52,14 @@ _IDL_SRC_FROM_DIFFERENT_PACKAGE_ERROR = (
"package or depend on an appropriate rule there."
)
+_IDL_USES_AOSP_COMPILER_ERROR = (
+ "Use of `idl_uses_aosp_compiler` is not allowed for %s."
+)
+
+_IDL_IDLOPTS_UNSUPPORTERD_ERROR = (
+ "`idlopts` is supported only if `idl_uses_aosp_compiler` is set to true."
+)
+
# Android library AAR context attributes.
_PROVIDERS = "providers"
_VALIDATION_OUTPUTS = "validation_outputs"
@@ -64,9 +72,11 @@ _AARContextInfo = provider(
},
)
+def _has_srcs(ctx):
+ return ctx.files.srcs or ctx.files.idl_srcs or getattr(ctx.files, "common_srcs", False)
+
def _uses_deprecated_implicit_export(ctx):
- return (ctx.attr.deps and not (ctx.files.srcs or
- ctx.files.idl_srcs or
+ return (ctx.attr.deps and not (_has_srcs(ctx) or
ctx.attr._defined_assets or
ctx.files.resource_files or
ctx.attr.manifest))
@@ -74,10 +84,10 @@ def _uses_deprecated_implicit_export(ctx):
def _uses_resources_and_deps_without_srcs(ctx):
return (ctx.attr.deps and
(ctx.attr._defined_assets or ctx.files.resource_files or ctx.attr.manifest) and
- not (ctx.files.srcs or ctx.files.idl_srcs))
+ not _has_srcs(ctx))
def _check_deps_without_java_srcs(ctx):
- if not ctx.attr.deps or ctx.files.srcs or ctx.files.idl_srcs:
+ if not ctx.attr.deps or _has_srcs(ctx):
return False
gfn = getattr(ctx.attr, "generator_function", "")
if _uses_deprecated_implicit_export(ctx):
@@ -99,6 +109,15 @@ def _validate_rule_context(ctx):
if ctx.label.package != idl_src.label.package:
log.error(_IDL_SRC_FROM_DIFFERENT_PACKAGE_ERROR % idl_src.label)
+ # Ensure that the AOSP AIDL compiler is used only in allowlisted packages
+ if (ctx.attr.idl_uses_aosp_compiler and
+ not acls.in_android_library_use_aosp_aidl_compiler_allowlist(str(ctx.label))):
+ log.error(_IDL_USES_AOSP_COMPILER_ERROR % ctx.label)
+
+ # Check if idlopts is with idl_uses_aosp_compiler
+ if ctx.attr.idlopts and not ctx.attr.idl_uses_aosp_compiler:
+ log.error(_IDL_IDLOPTS_UNSUPPORTERD_ERROR)
+
return struct(
enable_deps_without_srcs = _check_deps_without_java_srcs(ctx),
)
@@ -132,7 +151,7 @@ def _process_resources(ctx, java_package, manifest_ctx, **unused_ctxs):
# Process Android Resources
resources_ctx = _resources.process(
ctx,
- manifest = manifest_ctx.min_sdk_bumped_manifest,
+ manifest = manifest_ctx.processed_manifest,
resource_files = ctx.attr.resource_files,
defined_assets = ctx.attr._defined_assets,
assets = ctx.attr.assets,
@@ -201,6 +220,8 @@ def _process_idl(ctx, **unused_sub_ctxs):
aidl = get_android_sdk(ctx).aidl,
aidl_lib = get_android_sdk(ctx).aidl_lib,
aidl_framework = get_android_sdk(ctx).framework_aidl,
+ uses_aosp_compiler = ctx.attr.idl_uses_aosp_compiler,
+ idlopts = ctx.attr.idlopts,
),
)
@@ -385,7 +406,7 @@ def _process_intellij(ctx, java_package, manifest_ctx, resources_ctx, idl_ctx, j
android_ide_info = _intellij.make_android_ide_info(
ctx,
java_package = java_package,
- manifest = manifest_ctx.min_sdk_bumped_manifest,
+ manifest = manifest_ctx.processed_manifest,
defines_resources = resources_ctx.defines_resources,
merged_manifest = resources_ctx.merged_manifest,
resources_apk = resources_ctx.resources_apk,
diff --git a/rules/idl.bzl b/rules/idl.bzl
index 8dc52d3..42ea015 100644
--- a/rules/idl.bzl
+++ b/rules/idl.bzl
@@ -47,9 +47,20 @@ def _gen_java_from_idl(
transitive_idl_preprocessed = [],
aidl = None,
aidl_lib = None,
- aidl_framework = None):
+ aidl_framework = None,
+ uses_aosp_compiler = False,
+ idlopts = []):
args = ctx.actions.args()
- args.add("-b")
+
+ # Note: at the moment (2022/11/07), the flags that the AOSP compiler accepts is a superset of
+ # the Google3 compiler, but that might not be true in the future.
+ if uses_aosp_compiler:
+ args.add("--use-aosp-compiler")
+
+ for opt in idlopts:
+ args.add(opt)
+
+ args.add("-b") # fail on parcelable
args.add_all(transitive_idl_import_roots, format_each = "-I%s")
args.add(aidl_framework, format = "-p%s")
args.add_all(transitive_idl_preprocessed, format_each = "-p%s")
@@ -129,7 +140,9 @@ def _process(
exports = [],
aidl = None,
aidl_lib = None,
- aidl_framework = None):
+ aidl_framework = None,
+ uses_aosp_compiler = False,
+ idlopts = []):
"""Processes Android IDL.
Args:
@@ -179,6 +192,14 @@ def _process(
Optional, unless idl_srcs are supplied.
aidl_framework: Target. A target pointing to the aidl framework. Optional,
unless idl_srcs are supplied.
+ uses_aosp_compiler: boolean. If True, the upstream AOSP AIDL compiler is
+ used instead of the Google3-only AIDL compiler. This allows wider range
+ of AIDL language features including the structured parcelable, enum,
+ union, and many more. On the other hand, using this may cause noticeable
+ regression in terms of code size and performance as the compiler doesn't
+ implement several optimization techniques that the Google3 compiler has.
+ idlopts: list of string. Additional flags to add to the AOSP AIDL compiler
+ invocation.
Returns:
A IDLContextInfo provider.
@@ -224,6 +245,8 @@ def _process(
aidl = aidl,
aidl_lib = aidl_lib,
aidl_framework = aidl_framework,
+ uses_aosp_compiler = uses_aosp_compiler,
+ idlopts = idlopts,
)
return IDLContextInfo(
diff --git a/rules/java.bzl b/rules/java.bzl
index 2e2fc58..25c7090 100644
--- a/rules/java.bzl
+++ b/rules/java.bzl
@@ -359,14 +359,14 @@ def _singlejar(
output,
mnemonic = "SingleJar",
progress_message = "Merge into a single jar.",
- exclude_build_data = False,
+ include_build_data = False,
java_toolchain = None):
args = ctx.actions.args()
args.add("--output")
args.add(output)
args.add("--compression")
args.add("--normalize")
- if exclude_build_data:
+ if not include_build_data:
args.add("--exclude_build_data")
args.add("--warn_duplicate_resources")
if inputs:
diff --git a/rules/native_deps.bzl b/rules/native_deps.bzl
new file mode 100644
index 0000000..7702e38
--- /dev/null
+++ b/rules/native_deps.bzl
@@ -0,0 +1,310 @@
+# Copyright 2022 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Defines the native libs processing and an aspect to collect build configuration
+of split deps
+"""
+
+load("//rules:common.bzl", "common")
+
+SplitConfigInfo = provider(
+ doc = "Provides information about configuration for a split config dep",
+ fields = dict(
+ build_config = "The build configuration of the dep.",
+ ),
+)
+
+def _split_config_aspect_impl(__, ctx):
+ return SplitConfigInfo(build_config = ctx.configuration)
+
+split_config_aspect = aspect(
+ implementation = _split_config_aspect_impl,
+)
+
+def process(ctx, filename):
+ """ Links native deps into a shared library
+
+ Args:
+ ctx: The context.
+ filename: String. The name of the artifact containing the name of the
+ linked shared library
+
+ Returns:
+ Tuple of (libs, libs_name) where libs is a depset of all native deps
+ and libs_name is a File containing the basename of the linked shared
+ library
+ """
+ actual_target_name = ctx.label.name.removesuffix(common.PACKAGED_RESOURCES_SUFFIX)
+ native_libs_basename = None
+ libs_name = None
+ libs = dict()
+ for key, deps in ctx.split_attr.deps.items():
+ cc_toolchain_dep = ctx.split_attr._cc_toolchain_split[key]
+ cc_toolchain = cc_toolchain_dep[cc_common.CcToolchainInfo]
+ build_config = cc_toolchain_dep[SplitConfigInfo].build_config
+ linker_input = cc_common.create_linker_input(
+ owner = ctx.label,
+ user_link_flags = ["-Wl,-soname=lib" + actual_target_name],
+ )
+ cc_info = cc_common.merge_cc_infos(
+ cc_infos = _concat(
+ [CcInfo(linking_context = cc_common.create_linking_context(
+ linker_inputs = depset([linker_input]),
+ ))],
+ [dep[JavaInfo].cc_link_params_info for dep in deps if JavaInfo in dep],
+ [dep[AndroidCcLinkParamsInfo].link_params for dep in deps if AndroidCcLinkParamsInfo in dep],
+ [dep[CcInfo] for dep in deps if CcInfo in dep],
+ ),
+ )
+ libraries = []
+
+ native_deps_lib = _link_native_deps_if_present(ctx, cc_info, cc_toolchain, build_config, actual_target_name)
+ if native_deps_lib:
+ libraries.append(native_deps_lib)
+ native_libs_basename = native_deps_lib.basename
+
+ libraries.extend(_filter_unique_shared_libs(native_deps_lib, cc_info))
+
+ if libraries:
+ libs[key] = depset(libraries)
+
+ if libs and native_libs_basename:
+ libs_name = ctx.actions.declare_file("nativedeps_filename/" + actual_target_name + "/" + filename)
+ ctx.actions.write(output = libs_name, content = native_libs_basename)
+
+ return AndroidBinaryNativeLibsInfo(libs, libs_name)
+
+def _all_inputs(cc_info):
+ return [
+ lib
+ for input in cc_info.linking_context.linker_inputs.to_list()
+ for lib in input.libraries
+ ]
+
+def _filter_unique_shared_libs(linked_lib, cc_info):
+ basenames = {}
+ artifacts = {}
+ if linked_lib:
+ basenames[linked_lib.basename] = linked_lib
+ for input in _all_inputs(cc_info):
+ if input.pic_static_library or input.static_library:
+ # This is not a shared library and will not be loaded by Android, so skip it.
+ continue
+
+ artifact = None
+ if input.interface_library:
+ if input.resolved_symlink_interface_library:
+ artifact = input.resolved_symlink_interface_library
+ else:
+ artifact = input.interface_library
+ elif input.resolved_symlink_dynamic_library:
+ artifact = input.resolved_symlink_dynamic_library
+ else:
+ artifact = input.dynamic_library
+
+ if not artifact:
+ fail("Should never happen: did not find artifact for link!")
+
+ if artifact in artifacts:
+ # We have already reached this library, e.g., through a different solib symlink.
+ continue
+ artifacts[artifact] = None
+ basename = artifact.basename
+ if basename in basenames:
+ old_artifact = basenames[basename]
+ fail(
+ "Each library in the transitive closure must have a " +
+ "unique basename to avoid name collisions when packaged into " +
+ "an apk, but two libraries have the basename '" + basename +
+ "': " + artifact + " and " + old_artifact + (
+ " (the library compiled for this target)" if old_artifact == linked_lib else ""
+ ),
+ )
+ else:
+ basenames[basename] = artifact
+
+ return artifacts.keys()
+
+def _contains_code_to_link(input):
+ if not input.static_library and not input.pic_static_library:
+ # this is a shared library so we're going to have to copy it
+ return False
+ if input.objects:
+ object_files = input.objects
+ elif input.pic_objects:
+ object_files = input.pic_objects
+ else:
+ # this is an opaque library so we're going to have to link it
+ return True
+ for obj in object_files:
+ if not _is_shared_library(obj):
+ # this library was built with a non-shared-library object so we should link it
+ return True
+ return False
+
+def _is_shared_library(lib_artifact):
+ if (lib_artifact.extension in ["so", "dll", "dylib"]):
+ return True
+
+ lib_name = lib_artifact.basename
+
+ # validate against the regex "^.+\\.((so)|(dylib))(\\.\\d\\w*)+$",
+ # must match VERSIONED_SHARED_LIBRARY.
+ for ext in (".so.", ".dylib."):
+ name, _, version = lib_name.rpartition(ext)
+ if name and version:
+ version_parts = version.split(".")
+ for part in version_parts:
+ if not part[0].isdigit():
+ return False
+ for c in part[1:].elems():
+ if not (c.isalnum() or c == "_"):
+ return False
+ return True
+ return False
+
+def _get_build_info(ctx):
+ return cc_common.get_build_info(ctx)
+
+def _get_shared_native_deps_path(
+ linker_inputs,
+ link_opts,
+ linkstamps,
+ build_info_artifacts,
+ features,
+ is_test_target_partially_disabled_thin_lto):
+ fp = []
+ for artifact in linker_inputs:
+ fp.append(artifact.short_path)
+ fp.append(str(len(link_opts)))
+ for opt in link_opts:
+ fp.append(opt)
+ for artifact in linkstamps:
+ fp.append(artifact.short_path)
+ for artifact in build_info_artifacts:
+ fp.append(artifact.short_path)
+ for feature in features:
+ fp.append(feature)
+
+ fp.append("1" if is_test_target_partially_disabled_thin_lto else "0")
+
+ fingerprint = "%x" % hash("".join(fp))
+ return "_nativedeps/" + fingerprint
+
+def _get_static_mode_params_for_dynamic_library_libraries(libs):
+ linker_inputs = []
+ for lib in libs:
+ if lib.pic_static_library:
+ linker_inputs.append(lib.pic_static_library)
+ elif lib.static_library:
+ linker_inputs.append(lib.static_library)
+ elif lib.interface_library:
+ linker_inputs.append(lib.interface_library)
+ else:
+ linker_inputs.append(lib.dynamic_library)
+ return linker_inputs
+
+def _link_native_deps_if_present(ctx, cc_info, cc_toolchain, build_config, actual_target_name, is_test_rule_class = False):
+ needs_linking = False
+ for input in _all_inputs(cc_info):
+ needs_linking = needs_linking or _contains_code_to_link(input)
+
+ if not needs_linking:
+ return None
+
+ # This does not need to be shareable, but we use this API to specify the
+ # custom file root (matching the configuration)
+ output_lib = ctx.actions.declare_shareable_artifact(
+ ctx.label.package + "/nativedeps/" + actual_target_name + "/lib" + actual_target_name + ".so",
+ build_config.bin_dir,
+ )
+
+ link_opts = cc_info.linking_context.user_link_flags
+
+ linkstamps = []
+ for input in cc_info.linking_context.linker_inputs.to_list():
+ linkstamps.extend(input.linkstamps)
+ linkstamps_dict = {linkstamp: None for linkstamp in linkstamps}
+
+ build_info_artifacts = _get_build_info(ctx) if linkstamps_dict else []
+ requested_features = ["static_linking_mode", "native_deps_link"]
+ requested_features.extend(ctx.features)
+ if not "legacy_whole_archive" in ctx.disabled_features:
+ requested_features.append("legacy_whole_archive")
+ requested_features = sorted(requested_features)
+ feature_config = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ requested_features = requested_features,
+ unsupported_features = ctx.disabled_features,
+ )
+ partially_disabled_thin_lto = (
+ cc_common.is_enabled(
+ feature_name = "thin_lto_linkstatic_tests_use_shared_nonlto_backends",
+ feature_configuration = feature_config,
+ ) and not cc_common.is_enabled(
+ feature_name = "thin_lto_all_linkstatic_use_shared_nonlto_backends",
+ feature_configuration = feature_config,
+ )
+ )
+ test_only_target = ctx.attr.testonly or is_test_rule_class
+ share_native_deps = ctx.fragments.cpp.share_native_deps()
+
+ linker_inputs = _get_static_mode_params_for_dynamic_library_libraries(cc_info.linking_context.libraries_to_link)
+
+ if share_native_deps:
+ shared_path = _get_shared_native_deps_path(
+ linker_inputs,
+ link_opts,
+ [linkstamp.file() for linkstamp in linkstamps_dict],
+ build_info_artifacts,
+ requested_features,
+ test_only_target and partially_disabled_thin_lto,
+ )
+ linked_lib = ctx.actions.declare_shareable_artifact(shared_path + ".so", build_config.bin_dir)
+ else:
+ linked_lib = output_lib
+
+ cc_common.link(
+ name = ctx.label.name,
+ actions = ctx.actions,
+ linking_contexts = [cc_info.linking_context],
+ output_type = "dynamic_library",
+ never_link = True,
+ native_deps = True,
+ feature_configuration = feature_config,
+ cc_toolchain = cc_toolchain,
+ test_only_target = test_only_target,
+ stamp = ctx.attr.stamp,
+ grep_includes = ctx.file._grep_includes,
+ main_output = linked_lib,
+ use_shareable_artifact_factory = True,
+ build_config = build_config,
+ )
+
+ if (share_native_deps):
+ ctx.actions.symlink(
+ output = output_lib,
+ target_file = linked_lib,
+ )
+ return output_lib
+ else:
+ return linked_lib
+
+def _concat(*list_of_lists):
+ res = []
+ for list in list_of_lists:
+ res.extend(list)
+ return res
diff --git a/rules/proguard.bzl b/rules/proguard.bzl
index 837d622..5b1a16c 100644
--- a/rules/proguard.bzl
+++ b/rules/proguard.bzl
@@ -14,6 +14,8 @@
"""Bazel Android Proguard library for the Android rules."""
+load(":utils.bzl", "utils")
+
_ProguardContextInfo = provider(
doc = "Contains data from processing Proguard specs.",
fields = dict(
@@ -100,11 +102,50 @@ def _process(
],
)
+def _collect_transitive_proguard_specs(
+ specs_to_include,
+ local_proguard_specs,
+ proguard_deps):
+ if len(local_proguard_specs) == 0:
+ return []
+
+ proguard_specs = local_proguard_specs + specs_to_include
+ for dep in proguard_deps:
+ proguard_specs.extend(dep.specs.to_list())
+
+ return sorted(proguard_specs)
+
+def _get_proguard_specs(
+ ctx,
+ resource_proguard_config,
+ proguard_specs_for_manifest = []):
+ proguard_deps = utils.collect_providers(ProguardSpecProvider, utils.dedupe_split_attr(ctx.split_attr.deps))
+ if ctx.configuration.coverage_enabled and hasattr(ctx.attr, "_jacoco_runtime"):
+ proguard_deps.append(ctx.attr._jacoco_runtime[ProguardSpecProvider])
+
+ local_proguard_specs = []
+ if ctx.files.proguard_specs:
+ local_proguard_specs = ctx.files.proguard_specs
+ proguard_specs = _collect_transitive_proguard_specs(
+ [resource_proguard_config],
+ local_proguard_specs,
+ proguard_deps,
+ )
+
+ if len(proguard_specs) > 0 and ctx.fragments.android.assume_min_sdk_version:
+ # NB: Order here is important. We're including generated Proguard specs before the user's
+ # specs so that they can override values.
+ proguard_specs = proguard_specs_for_manifest + proguard_specs
+
+ return proguard_specs
+
proguard = struct(
process = _process,
+ get_proguard_specs = _get_proguard_specs,
)
testing = struct(
validate_proguard_spec = _validate_proguard_spec,
+ collect_transitive_proguard_specs = _collect_transitive_proguard_specs,
ProguardContextInfo = _ProguardContextInfo,
)
diff --git a/rules/resources.bzl b/rules/resources.bzl
index 86e82e0..93a66a5 100644
--- a/rules/resources.bzl
+++ b/rules/resources.bzl
@@ -116,6 +116,7 @@ _PACKAGED_FINAL_MANIFEST = "processed_manifest"
_PACKAGED_RESOURCE_APK = "resources_apk"
_PACKAGED_CLASS_JAR = "class_jar"
_PACKAGED_VALIDATION_RESULT = "validation_result"
+_RESOURCE_PROGUARD_CONFIG = "resource_proguard_config"
_ResourcesPackageContextInfo = provider(
"Packaged resources context object",
@@ -126,17 +127,18 @@ _ResourcesPackageContextInfo = provider(
_PACKAGED_VALIDATION_RESULT: "Validation result.",
_R_JAVA: "JavaInfo for R.jar",
_DATA_BINDING_LAYOUT_INFO: "Databinding layout info file.",
+ _RESOURCE_PROGUARD_CONFIG: "Resource proguard config",
_PROVIDERS: "The list of all providers to propagate.",
},
)
# Manifest context attributes
-_MIN_SDK_BUMPED_MANIFEST = "min_sdk_bumped_manifest"
+_PROCESSED_MANIFEST = "processed_manifest"
_ManifestContextInfo = provider(
"Manifest context object",
fields = {
- _MIN_SDK_BUMPED_MANIFEST: "The manifest with the min SDK bumped to the floor.",
+ _PROCESSED_MANIFEST: "The manifest after the min SDK has been changed as necessary.",
},
)
@@ -688,6 +690,7 @@ def _package(
packaged_resources_ctx[_PACKAGED_FINAL_MANIFEST] = processed_manifest
packaged_resources_ctx[_PACKAGED_RESOURCE_APK] = resource_apk
packaged_resources_ctx[_PACKAGED_VALIDATION_RESULT] = resource_files_zip
+ packaged_resources_ctx[_RESOURCE_PROGUARD_CONFIG] = proguard_cfg
# Fix class jar name because some tests depend on {label_name}_resources.jar being the suffix of
# the path, with _common.PACKAGED_RESOURCES_SUFFIX removed from the label name.
@@ -1014,7 +1017,7 @@ def _bump_min_sdk(
"""
manifest_ctx = {}
if not manifest or floor <= 0:
- manifest_ctx[_MIN_SDK_BUMPED_MANIFEST] = manifest
+ manifest_ctx[_PROCESSED_MANIFEST] = manifest
return _ManifestContextInfo(**manifest_ctx)
args = ctx.actions.args()
@@ -1040,7 +1043,57 @@ def _bump_min_sdk(
mnemonic = "BumpMinSdkFloor",
progress_message = "Bumping up AndroidManifest min SDK %s" % str(ctx.label),
)
- manifest_ctx[_MIN_SDK_BUMPED_MANIFEST] = out_manifest
+ manifest_ctx[_PROCESSED_MANIFEST] = out_manifest
+
+ return _ManifestContextInfo(**manifest_ctx)
+
+def _set_default_min_sdk(
+ ctx,
+ manifest,
+ default,
+ enforce_min_sdk_floor_tool):
+ """ Sets the min SDK attribute of AndroidManifest to default if it is not already set.
+
+ Args:
+ ctx: The rules context.
+ manifest: File. The AndroidManifest.xml file.
+ default: string. The default value for min SDK. The manifest is unchanged if it already
+ specifies a min SDK.
+ enforce_min_sdk_floor_tool: FilesToRunProvider. The enforce_min_sdk_tool executable or
+ FilesToRunprovider
+
+ Returns:
+ A dict containing _ManifestContextInfo provider fields.
+ """
+ manifest_ctx = {}
+ if not manifest or not default:
+ manifest_ctx[_PROCESSED_MANIFEST] = manifest
+ return _ManifestContextInfo(**manifest_ctx)
+
+ args = ctx.actions.args()
+ args.add("-action", "set_default")
+ args.add("-manifest", manifest)
+ args.add("-default_min_sdk", default)
+
+ out_dir = "_migrated/_min_sdk_default_set/" + ctx.label.name + "/"
+ log = ctx.actions.declare_file(
+ out_dir + "log.txt",
+ )
+ args.add("-log", log.path)
+
+ out_manifest = ctx.actions.declare_file(
+ out_dir + "AndroidManifest.xml",
+ )
+ args.add("-output", out_manifest.path)
+ ctx.actions.run(
+ executable = enforce_min_sdk_floor_tool,
+ inputs = [manifest],
+ outputs = [out_manifest, log],
+ arguments = [args],
+ mnemonic = "SetDefaultMinSdkFloor",
+ progress_message = "Setting AndroidManifest min SDK to default %s" % str(ctx.label),
+ )
+ manifest_ctx[_PROCESSED_MANIFEST] = out_manifest
return _ManifestContextInfo(**manifest_ctx)
@@ -1791,6 +1844,9 @@ resources = struct(
# Exposed for android_library, aar_import, and android_binary
bump_min_sdk = _bump_min_sdk,
+ # Exposed for use in AOSP
+ set_default_sdk = _set_default_min_sdk,
+
# Exposed for android_binary
validate_min_sdk = _validate_min_sdk,
diff --git a/src/java/com/example/sampleapp/SampleApp.java b/src/java/com/example/sampleapp/SampleApp.java
index 9564755..fe5f6fe 100644
--- a/src/java/com/example/sampleapp/SampleApp.java
+++ b/src/java/com/example/sampleapp/SampleApp.java
@@ -28,9 +28,7 @@ public class SampleApp extends Activity {
public void onCreate(Bundle state) {
super.onCreate(state);
- TextView view = new TextView(this);
- view.setText(getString());
- setContentView(view);
+ setContentView(R.layout.basic_activity);
}
public native String getString();
diff --git a/src/java/com/example/sampleapp/res/layout/basic_activity.xml b/src/java/com/example/sampleapp/res/layout/basic_activity.xml
new file mode 100644
index 0000000..61fb73e
--- /dev/null
+++ b/src/java/com/example/sampleapp/res/layout/basic_activity.xml
@@ -0,0 +1,12 @@
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:orientation="vertical" >
+
+ <TextView
+ android:id="@+id/text_hello"
+ android:text="@string/hello_world"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content" />
+
+</LinearLayout>
diff --git a/src/java/com/example/sampleapp/res/values/strings.xml b/src/java/com/example/sampleapp/res/values/strings.xml
index 28193f2..2065009 100644
--- a/src/java/com/example/sampleapp/res/values/strings.xml
+++ b/src/java/com/example/sampleapp/res/values/strings.xml
@@ -1,4 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">SampleApp</string>
+ <string name="hello_world" translatable="false">Hello world!</string>
</resources>
diff --git a/src/tools/ak/akhelper.go b/src/tools/ak/akhelper.go
new file mode 100644
index 0000000..5a91098
--- /dev/null
+++ b/src/tools/ak/akhelper.go
@@ -0,0 +1,27 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package akhelper provides globally used functions.
+package akhelper
+
+import "strings"
+
+const (
+ lnBreak = "\n "
+)
+
+// FormatDesc returns an indented string with line breaks for each element in given string array.
+func FormatDesc(desc []string) string {
+ return strings.Join(desc, lnBreak)
+}
diff --git a/src/tools/ak/bucketize/BUILD b/src/tools/ak/bucketize/BUILD
new file mode 100644
index 0000000..750400a
--- /dev/null
+++ b/src/tools/ak/bucketize/BUILD
@@ -0,0 +1,58 @@
+load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
+load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test")
+
+# Description:
+# Package for bucketize module
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"])
+
+go_binary(
+ name = "bucketize_bin",
+ srcs = ["bucketize_bin.go"],
+ deps = [
+ ":bucketize",
+ "//src/common/golang:flagfile",
+ ],
+)
+
+go_library(
+ name = "bucketize",
+ srcs = [
+ "bucketize.go",
+ "partitioner.go",
+ "pipe.go",
+ ],
+ importpath = "src/tools/ak/bucketize/bucketize",
+ deps = [
+ "//src/common/golang:flags",
+ "//src/common/golang:shard",
+ "//src/common/golang:walk",
+ "//src/common/golang:xml2",
+ "//src/tools/ak:akhelper",
+ "//src/tools/ak:types",
+ "//src/tools/ak/res",
+ ],
+)
+
+go_test(
+ name = "bucketize_test",
+ size = "small",
+ srcs = [
+ "bucketize_test.go",
+ "partitioner_test.go",
+ ],
+ embed = [":bucketize"],
+ deps = [
+ "//src/common/golang:shard",
+ "//src/common/golang:walk",
+ "//src/tools/ak/res",
+ ],
+)
+
+go_test(
+ name = "pipe_test",
+ size = "small",
+ srcs = ["pipe_test.go"],
+ embed = [":bucketize"],
+)
diff --git a/src/tools/ak/bucketize/bucketize.go b/src/tools/ak/bucketize/bucketize.go
new file mode 100644
index 0000000..4e72b0f
--- /dev/null
+++ b/src/tools/ak/bucketize/bucketize.go
@@ -0,0 +1,451 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package bucketize provides functionality to bucketize Android resources.
+package bucketize
+
+import (
+ "bytes"
+ "context"
+ "encoding/xml"
+ "flag"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "path"
+ "strings"
+ "sync"
+
+ "src/common/golang/flags"
+ "src/common/golang/shard"
+ "src/common/golang/walk"
+ "src/common/golang/xml2"
+ "src/tools/ak/akhelper"
+ "src/tools/ak/res/res"
+ "src/tools/ak/types"
+)
+
+const (
+ numParsers = 25
+)
+
+// Archiver process the provided resource files and directories stores the data
+type Archiver struct {
+ ResFiles []*res.PathInfo
+ Partitioner Partitioner
+}
+
+// ResourcesAttribute correlates the attribute of a resources xml tag and the file where it originates
+type ResourcesAttribute struct {
+ Attribute xml.Attr
+ ResFile *res.PathInfo
+}
+
+var (
+ // Cmd defines the command to run repack
+ Cmd = types.Command{
+ Init: Init,
+ Run: Run,
+ Desc: desc,
+ Flags: []string{
+ "res_paths",
+ "typed_outputs",
+ },
+ }
+
+ resPaths flags.StringList
+ typedOutputs flags.StringList
+
+ initOnce sync.Once
+)
+
+// Init initializes repack.
+func Init() {
+ initOnce.Do(func() {
+ flag.Var(&resPaths, "res_paths", "List of res paths (a file or directory).")
+ flag.Var(&typedOutputs, "typed_outputs", akhelper.FormatDesc([]string{
+ "A list of output file paths, each path prefixed with the res type it supports.",
+ "<res_type>:<file_path> i.e. string:/foo/bar/res-string-0.zip,string:/foo/bar/res-string-1.zip,...",
+ "The number of files per res type will determine shards."}))
+ })
+}
+
+func desc() string {
+ return "Bucketize Android resources."
+}
+
+// MakeArchiver creates an Archiver
+func makeArchiver(resFiles []string, p Partitioner) (*Archiver, error) {
+ pis, err := res.MakePathInfos(resFiles)
+ if err != nil {
+ return nil, fmt.Errorf("converting res path failed: %v", err)
+ }
+ return &Archiver{ResFiles: pis, Partitioner: p}, nil
+}
+
+// Archive process the res directories and files of the archiver
+func (a *Archiver) Archive(ctx context.Context) error {
+ ctx, cancel := context.WithCancel(prefixErr(ctx, "archive: "))
+ defer cancel()
+ vPIC, nvPIC := separatePathInfosByValues(ctx, a.ResFiles)
+ vrCs := make([]<-chan *res.ValuesResource, 0, numParsers)
+ raCs := make([]<-chan *ResourcesAttribute, 0, numParsers)
+ errCs := make([]<-chan error, 0, numParsers)
+ for i := 0; i < numParsers; i++ {
+ vrC, raC, vErrC := handleValuesPathInfos(ctx, vPIC)
+ vrCs = append(vrCs, vrC)
+ raCs = append(raCs, raC)
+ errCs = append(errCs, vErrC)
+ }
+ mVRC := mergeValuesResourceStreams(ctx, vrCs)
+ mRAC := mergeResourcesAttributeStreams(ctx, raCs)
+ mErrC := mergeErrStreams(ctx, errCs)
+ return a.archive(ctx, nvPIC, mVRC, mRAC, mErrC)
+}
+
+// archive takes PathInfo, ValuesResource and error channels and process the values given
+func (a *Archiver) archive(ctx context.Context, piC <-chan *res.PathInfo, vrC <-chan *res.ValuesResource, raC <-chan *ResourcesAttribute, errC <-chan error) error {
+ var errs []error
+Loop:
+ for piC != nil || vrC != nil || errC != nil || raC != nil {
+ select {
+ case e, ok := <-errC:
+ if !ok {
+ errC = nil
+ continue
+ }
+ errs = append(errs, e)
+ break Loop
+ case ra, ok := <-raC:
+ if !ok {
+ raC = nil
+ continue
+ }
+ a.Partitioner.CollectResourcesAttribute(ra)
+ case pi, ok := <-piC:
+ if !ok {
+ piC = nil
+ continue
+ }
+ a.Partitioner.CollectPathResource(*pi)
+ case vr, ok := <-vrC:
+ if !ok {
+ vrC = nil
+ continue
+ }
+ if err := a.Partitioner.CollectValues(vr); err != nil {
+ return fmt.Errorf("got error collecting values: %v", err)
+ }
+ }
+ }
+
+ if len(errs) != 0 {
+ return errorf(ctx, "errors encountered: %v", errs)
+ }
+ if err := a.Partitioner.Close(); err != nil {
+ return fmt.Errorf("got error closing partitioner: %v", err)
+ }
+ return nil
+}
+
+func handleValuesPathInfos(ctx context.Context, piC <-chan *res.PathInfo) (<-chan *res.ValuesResource, <-chan *ResourcesAttribute, <-chan error) {
+ vrC := make(chan *res.ValuesResource)
+ raC := make(chan *ResourcesAttribute)
+ errC := make(chan error)
+ go func() {
+ defer close(vrC)
+ defer close(raC)
+ defer close(errC)
+ for pi := range piC {
+ if !syncParse(prefixErr(ctx, fmt.Sprintf("%s values-parse: ", pi.Path)), pi, vrC, raC, errC) {
+ return
+ }
+ }
+ }()
+ return vrC, raC, errC
+}
+
+func syncParse(ctx context.Context, pi *res.PathInfo, vrC chan<- *res.ValuesResource, raC chan<- *ResourcesAttribute, errC chan<- error) bool {
+ f, err := os.Open(pi.Path)
+ if err != nil {
+ return sendErr(ctx, errC, errorf(ctx, "open failed: %v", err))
+ }
+ defer f.Close()
+ return syncParseReader(ctx, pi, xml.NewDecoder(f), vrC, raC, errC)
+}
+
+func syncParseReader(ctx context.Context, pi *res.PathInfo, dec *xml.Decoder, vrC chan<- *res.ValuesResource, raC chan<- *ResourcesAttribute, errC chan<- error) bool {
+ // Shadow Encoder is used to track xml state, such as namespaces. The state will be inherited by child encoders.
+ parentEnc := xml2.NewEncoder(ioutil.Discard)
+ for {
+ t, err := dec.Token()
+ if err == io.EOF {
+ return true
+ }
+ if err != nil {
+ return sendErr(ctx, errC, errorf(ctx, "token failed: %v", err))
+ }
+ if err := parentEnc.EncodeToken(t); err != nil {
+ return sendErr(ctx, errC, errorf(ctx, "encoding token token %s failed: %v", t, err))
+ }
+ if se, ok := t.(xml.StartElement); ok && se.Name == res.ResourcesTagName {
+ for _, xmlAttr := range se.Attr {
+ raC <- &ResourcesAttribute{ResFile: pi, Attribute: xmlAttr}
+ }
+ // AAPT2 does not support a multiple resources sections in a single file and silently ignores
+ // subsequent resources sections. The parser will only parse the first resources tag and exit.
+ return parseRes(ctx, parentEnc, pi, dec, vrC, errC)
+ }
+ }
+}
+
+func skipTag(se xml.StartElement) bool {
+ _, ok := res.ResourcesChildToSkip[se.Name]
+ return ok
+}
+
+func parseRes(ctx context.Context, parentEnc *xml2.Encoder, pi *res.PathInfo, dec *xml.Decoder, vrC chan<- *res.ValuesResource, errC chan<- error) bool {
+ for {
+ t, err := dec.Token()
+ if err != nil {
+ return sendErr(ctx, errC, errorf(ctx, "extract token failed: %v", err))
+ }
+ // Encode all tokens to the shadow Encoder at the top-level loop to keep track of any required xml state.
+ if err := parentEnc.EncodeToken(t); err != nil {
+ return sendErr(ctx, errC, errorf(ctx, "encoding token token %s failed: %v", t, err))
+ }
+ switch t.(type) {
+ case xml.StartElement:
+ se := t.(xml.StartElement)
+ if skipTag(se) {
+ dec.Skip()
+ break
+ }
+
+ fqn, err := extractFQN(se)
+ if err != nil {
+ return sendErr(ctx, errC, errorf(ctx, "extract name and type failed: %v", err))
+ }
+
+ b, err := extractElement(parentEnc, dec, se)
+ if err != nil {
+ return sendErr(ctx, errC, errorf(ctx, "extracting element failed: %v", err))
+ }
+
+ if !sendVR(ctx, vrC, &res.ValuesResource{pi, fqn, b.Bytes()}) {
+ return false
+ }
+
+ if fqn.Type == res.Styleable {
+ // with a declare-styleable tag, parse its childen and treat them as direct children of resources
+ dsDec := xml.NewDecoder(b)
+ dsDec.Token() // we've already processed the first token (the declare-styleable start element)
+ if !parseRes(ctx, parentEnc, pi, dsDec, vrC, errC) {
+ return false
+ }
+ }
+ case xml.EndElement:
+ return true
+ }
+ }
+}
+
+func extractFQN(se xml.StartElement) (res.FullyQualifiedName, error) {
+ if matches(se.Name, res.ItemTagName) {
+ nameAttr, resType, err := extractNameAndType(se)
+ if err != nil {
+ return res.FullyQualifiedName{}, err
+ }
+ return res.ParseName(nameAttr, resType)
+ }
+
+ nameAttr, err := extractName(se)
+ if err != nil {
+ return res.FullyQualifiedName{}, err
+ }
+ if resType, ok := res.ResourcesTagToType[se.Name.Local]; ok {
+ return res.ParseName(nameAttr, resType)
+ }
+ return res.FullyQualifiedName{}, fmt.Errorf("%s: is an unhandled tag", se.Name.Local)
+
+}
+
+func extractName(se xml.StartElement) (nameAttr string, err error) {
+ for _, a := range se.Attr {
+ if matches(res.NameAttrName, a.Name) {
+ nameAttr = a.Value
+ break
+ }
+ }
+ if nameAttr == "" {
+ err = fmt.Errorf("%s: tag is missing %q attribute or is empty", se.Name.Local, res.NameAttrName.Local)
+ }
+ return
+}
+
+func extractNameAndType(se xml.StartElement) (nameAttr string, resType res.Type, err error) {
+ var typeAttr string
+ for _, a := range se.Attr {
+ if matches(res.NameAttrName, a.Name) {
+ nameAttr = a.Value
+ }
+ if matches(res.TypeAttrName, a.Name) {
+ typeAttr = a.Value
+ }
+ }
+ if nameAttr == "" {
+ err = fmt.Errorf("%s: tag is missing %q attribute or is empty", se.Name.Local, res.NameAttrName.Local)
+ return
+ }
+ if typeAttr == "" {
+ err = fmt.Errorf("%s: tag is missing %q attribute or is empty", se.Name.Local, res.TypeAttrName.Local)
+ return
+ }
+ resType, err = res.ParseType(typeAttr)
+ return
+}
+
+func matches(n1, n2 xml.Name) bool {
+ // Ignores xml.Name Space attributes unless both names specify Space.
+ if n1.Space == "" || n2.Space == "" {
+ return n1.Local == n2.Local
+ }
+ return n1.Local == n2.Local && n1.Space == n2.Space
+}
+
+func extractElement(parentEnc *xml2.Encoder, dec *xml.Decoder, se xml.Token) (*bytes.Buffer, error) {
+ // copy tag contents to a buffer
+ b := &bytes.Buffer{}
+ enc := xml2.ChildEncoder(b, parentEnc)
+ if err := enc.EncodeToken(se); err != nil {
+ return nil, fmt.Errorf("encoding start element failed: %v", err)
+ }
+ if err := copyTag(enc, dec); err != nil {
+ return nil, fmt.Errorf("copyTag failed: %s", err)
+ }
+ enc.Flush()
+ return b, nil
+}
+
+func copyTag(enc *xml2.Encoder, dec *xml.Decoder) error {
+ for {
+ t, err := dec.Token()
+ if err != nil {
+ return fmt.Errorf("extract token failed: %v", err)
+ }
+ if err := enc.EncodeToken(t); err != nil {
+ return fmt.Errorf("encoding token %v failed: %v", t, err)
+ }
+ switch t.(type) {
+ case xml.StartElement:
+ if err := copyTag(enc, dec); err != nil {
+ return err
+ }
+ case xml.EndElement:
+ return nil
+ }
+ }
+}
+
+func sendVR(ctx context.Context, vrC chan<- *res.ValuesResource, vr *res.ValuesResource) bool {
+ select {
+ case vrC <- vr:
+ case <-ctx.Done():
+ return false
+ }
+ return true
+}
+
+func hasChildType(dec *xml.Decoder, lookup map[xml.Name]res.Type, want res.Type) (bool, error) {
+ for {
+ t, err := dec.Token()
+ if err != nil {
+ return false, fmt.Errorf("extract token failed: %v", err)
+ }
+ switch t.(type) {
+ case xml.StartElement:
+ if rt, ok := lookup[t.(xml.StartElement).Name]; ok {
+ if rt == want {
+ return true, nil
+ }
+ }
+ // when tag is not in the lookup or the type is unknown or "wanted", skip it.
+ dec.Skip()
+ case xml.EndElement:
+ return false, nil
+ }
+ }
+}
+
+func createPartitions(typedOutputs []string) (map[res.Type][]io.Writer, error) {
+ partitions := make(map[res.Type][]io.Writer)
+ for _, tAndOP := range typedOutputs {
+ tOP := strings.SplitN(tAndOP, ":", 2)
+ // no shard count override specified
+ if len(tOP) == 1 {
+ return nil, fmt.Errorf("got malformed typed output path %q wanted the following format \"<type>:<file path>\"", tAndOP)
+ }
+ t, err := res.ParseType(tOP[0])
+ if err != nil {
+ return nil, fmt.Errorf("got err while trying to parse %s to a res type: %v", tOP[0], err)
+ }
+ op := tOP[1]
+ if err := os.MkdirAll(path.Dir(op), 0744); err != nil {
+ return nil, fmt.Errorf("%s: mkdir failed: %v", op, err)
+ }
+ f, err := os.OpenFile(op, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0644)
+ if err != nil {
+ return nil, fmt.Errorf("open/create failed: %v", err)
+ }
+ partitions[t] = append(partitions[t], f)
+ }
+ return partitions, nil
+}
+
+// Run is the entry point for bucketize.
+func Run() {
+ if resPaths == nil || typedOutputs == nil {
+ log.Fatal("Flags -res_paths and -typed_outputs must be specified.")
+ }
+
+ resFiles, err := walk.Files(resPaths)
+ if err != nil {
+ log.Fatalf("Got error getting the resource paths: %v", err)
+ }
+ resFileIdxs := make(map[string]int)
+ for i, resFile := range resFiles {
+ resFileIdxs[resFile] = i
+ }
+
+ p, err := createPartitions(typedOutputs)
+ if err != nil {
+ log.Fatalf("Got error creating partitions: %v", err)
+ }
+
+ ps, err := makePartitionSession(p, shard.FNV, resFileIdxs)
+ if err != nil {
+ log.Fatalf("Got error making partition session: %v", err)
+ }
+
+ m, err := makeArchiver(resFiles, ps)
+ if err != nil {
+ log.Fatalf("Got error making archiver: %v", err)
+ }
+
+ if err := m.Archive(context.Background()); err != nil {
+ log.Fatalf("Got error archiving: %v", err)
+ }
+}
diff --git a/src/tools/ak/bucketize/bucketize_bin.go b/src/tools/ak/bucketize/bucketize_bin.go
new file mode 100644
index 0000000..f9ea517
--- /dev/null
+++ b/src/tools/ak/bucketize/bucketize_bin.go
@@ -0,0 +1,29 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// The bucketize_bin is a command line tool to bucketize Android resources.
+package main
+
+import (
+ "flag"
+
+ _ "src/common/golang/flagfile"
+ "src/tools/ak/bucketize/bucketize"
+)
+
+func main() {
+ bucketize.Init()
+ flag.Parse()
+ bucketize.Run()
+}
diff --git a/src/tools/ak/bucketize/bucketize_test.go b/src/tools/ak/bucketize/bucketize_test.go
new file mode 100644
index 0000000..3864164
--- /dev/null
+++ b/src/tools/ak/bucketize/bucketize_test.go
@@ -0,0 +1,483 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bucketize
+
+import (
+ "bytes"
+ "context"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "reflect"
+ "strings"
+ "testing"
+
+ "src/common/golang/shard"
+ "src/common/golang/walk"
+ "src/tools/ak/res/res"
+)
+
+func TestNormalizeResPaths(t *testing.T) {
+ // Create a temporary directory to house the fake workspace.
+ tmp, err := ioutil.TempDir("", "")
+ if err != nil {
+ t.Fatalf("Can't make temp directory: %v", err)
+ }
+ defer os.RemoveAll(tmp)
+
+ var resPaths []string
+ fp1 := path.Join(tmp, "foo")
+ _, err = os.Create(fp1)
+ if err != nil {
+ t.Fatalf("Got error while trying to create %s: %v", fp1, err)
+ }
+ resPaths = append(resPaths, fp1)
+
+ dp1 := path.Join(tmp, "bar", "baz", "qux")
+ if err != os.MkdirAll(dp1, 0777) {
+ t.Fatalf("Got error while trying to create %s: %v", dp1, err)
+ }
+ resPaths = append(resPaths, dp1)
+
+ // Create a file nested in the directory that is passed in as a resPath. This file will get
+ // injected between fp1 and fp3 because the directory is defined in the middle. Hence,
+ // files added to the directory will appear between fp1 and fp3. This behavior is intended.
+ fInDP1 := path.Join(dp1, "quux")
+ _, err = os.Create(fInDP1)
+ if err != nil {
+ t.Fatalf("Got error while trying to create %s: %v", fInDP1, err)
+ }
+
+ fp3 := path.Join(tmp, "bar", "corge")
+ _, err = os.Create(fp3)
+ if err != nil {
+ t.Fatalf("Got error while trying to create %s: %v", fp3, err)
+ }
+ resPaths = append(resPaths, fp3)
+
+ gotFiles, err := walk.Files(resPaths)
+ if err != nil {
+ t.Fatalf("Got error getting the resource paths: %v", err)
+ }
+ gotFileIdxs := make(map[string]int)
+ for i, gotFile := range gotFiles {
+ gotFileIdxs[gotFile] = i
+ }
+
+ wantFiles := []string{fp1, fInDP1, fp3}
+ if !reflect.DeepEqual(gotFiles, wantFiles) {
+ t.Errorf("DeepEqual(\n%#v\n,\n%#v\n): returned false", gotFiles, wantFiles)
+ }
+
+ wantFileIdxs := map[string]int{fp1: 0, fInDP1: 1, fp3: 2}
+ if !reflect.DeepEqual(gotFileIdxs, wantFileIdxs) {
+ t.Errorf("DeepEqual(\n%#v\n,\n%#v\n): returned false", gotFileIdxs, wantFileIdxs)
+ }
+}
+
+func TestArchiverWithPartitionSession(t *testing.T) {
+ order := make(map[string]int)
+ ps, err := makePartitionSession(map[res.Type][]io.Writer{}, shard.FNV, order)
+ if err != nil {
+ t.Fatalf("MakePartitionSesion got err: %v", err)
+ }
+ if _, err := makeArchiver([]string{}, ps); err != nil {
+ t.Errorf("MakeArchiver got err: %v", err)
+ }
+}
+
+func TestArchiveNoValues(t *testing.T) {
+ ctx, cxlFn := context.WithCancel(context.Background())
+ defer cxlFn()
+ a, err := makeArchiver([]string{}, &mockPartitioner{})
+ if err != nil {
+ t.Fatalf("MakeArchiver got error: %v", err)
+ }
+ a.Archive(ctx)
+}
+
+func TestInternalArchive(t *testing.T) {
+ tcs := []struct {
+ name string
+ p Partitioner
+ pis []*res.PathInfo
+ vrs []*res.ValuesResource
+ ras []ResourcesAttribute
+ errs []error
+ wantErr bool
+ }{
+ {
+ name: "MultipleResPathInfosAndValuesResources",
+ p: &mockPartitioner{},
+ pis: []*res.PathInfo{{Path: "foo"}},
+ vrs: []*res.ValuesResource{
+ {Src: &res.PathInfo{Path: "bar"}},
+ {Src: &res.PathInfo{Path: "baz"}},
+ },
+ errs: []error{},
+ },
+ {
+ name: "NoValues",
+ p: &mockPartitioner{},
+ pis: []*res.PathInfo{},
+ vrs: []*res.ValuesResource{},
+ errs: []error{},
+ },
+ {
+ name: "ErrorOccurred",
+ p: &mockPartitioner{},
+ pis: []*res.PathInfo{{Path: "foo"}},
+ vrs: []*res.ValuesResource{},
+ errs: []error{fmt.Errorf("failure")},
+ wantErr: true,
+ },
+ }
+
+ for _, tc := range tcs {
+ t.Run(tc.name, func(t *testing.T) {
+ piC := make(chan *res.PathInfo)
+ go func() {
+ defer close(piC)
+ for _, pi := range tc.pis {
+ piC <- pi
+ }
+ }()
+ vrC := make(chan *res.ValuesResource)
+ go func() {
+ defer close(vrC)
+ for _, vr := range tc.vrs {
+ vrC <- vr
+ }
+ }()
+ raC := make(chan *ResourcesAttribute)
+ go func() {
+ defer close(raC)
+ for _, ra := range tc.ras {
+ nra := new(ResourcesAttribute)
+ *nra = ra
+ raC <- nra
+ }
+ }()
+ errC := make(chan error)
+ go func() {
+ defer close(errC)
+ for _, err := range tc.errs {
+ errC <- err
+ }
+ }()
+ a, err := makeArchiver([]string{}, tc.p)
+ if err != nil {
+ t.Errorf("MakeArchiver got error: %v", err)
+ return
+ }
+ ctx, cxlFn := context.WithCancel(context.Background())
+ defer cxlFn()
+ if err := a.archive(ctx, piC, vrC, raC, errC); err != nil {
+ if !tc.wantErr {
+ t.Errorf("archive got unexpected error: %v", err)
+ }
+ return
+ }
+ })
+ }
+}
+
+func TestSyncParseReader(t *testing.T) {
+ tcs := []struct {
+ name string
+ pi *res.PathInfo
+ content *bytes.Buffer
+ want map[string]string
+ wantErr bool
+ }{
+ {
+ name: "SingleResourcesBlock",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources>
+ <string name="introduction">hello world</string>
+ <string name="foo">bar</string>
+ <attr name="baz" format="reference|color"></attr>
+ </resources>`),
+ want: map[string]string{
+ "introduction-string": "<string name=\"introduction\">hello world</string>",
+ "foo-string": "<string name=\"foo\">bar</string>",
+ "baz-attr": "<attr name=\"baz\" format=\"reference|color\"></attr>",
+ },
+ },
+ {
+ name: "MultipleResourcesBlocks",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources>
+ <string name="introduction">hello world</string>
+ <string name="foo">bar</string>
+ </resources>
+ <!--
+ Subsequent resources sections are ignored, hence the "qux" item will not
+ materialize in the parsed values.
+ -->
+ <resources>
+ <item name="qux" type="integer">23</item>
+ </resources>`),
+ want: map[string]string{
+ "introduction-string": "<string name=\"introduction\">hello world</string>",
+ "foo-string": "<string name=\"foo\">bar</string>",
+ },
+ },
+ {
+ name: "NamespacedResourcesBlock",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources xmlns:foo="bar">
+ <string name="namespaced"><foo:bar>hello</foo:bar> world</string>
+ </resources>`),
+ want: map[string]string{
+ "resource_attribute-xmlns:foo": "bar",
+ "namespaced-string": "<string name=\"namespaced\"><foo:bar>hello</foo:bar> world</string>",
+ },
+ },
+ {
+ name: "DeclareStyleable",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString("<resources><declare-styleable name=\"foo\"><attr name=\"bar\">baz</attr></declare-styleable></resources>"),
+ want: map[string]string{
+ "foo-styleable": "<declare-styleable name=\"foo\"><attr name=\"bar\">baz</attr></declare-styleable>",
+ "bar-attr": "<attr name=\"bar\">baz</attr>",
+ },
+ },
+ {
+ name: "NamespacedStyleableBlock",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString("<resources xmlns:zoo=\"zoo\"><declare-styleable name=\"foo\"><attr name=\"bar\" zoo:qux=\"rux\">baz</attr></declare-styleable></resources>"),
+ want: map[string]string{
+ "resource_attribute-xmlns:zoo": "zoo",
+ "foo-styleable": "<declare-styleable name=\"foo\"><attr name=\"bar\" zoo:qux=\"rux\">baz</attr></declare-styleable>",
+ "bar-attr": "<attr name=\"bar\" zoo:qux=\"rux\">baz</attr>",
+ },
+ },
+ {
+ name: "PluralsStringArrayOutputToStringToo",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources>
+ <string-array name="foo"><item>bar</item><item>baz</item></string-array>
+ <plurals name="corge"><item quantity="one">qux</item><item quantity="other">quux</item></plurals>
+ </resources>`),
+ want: map[string]string{
+ "foo-array": "<string-array name=\"foo\"><item>bar</item><item>baz</item></string-array>",
+ "corge-plurals": "<plurals name=\"corge\"><item quantity=\"one\">qux</item><item quantity=\"other\">quux</item></plurals>",
+ },
+ },
+ {
+ name: "AttrWithFlagOrEnumChildren",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources>
+ <attr name="foo"><enum name="bar" value="0" /><enum name="baz" value="10" /></attr>
+ <attr name="qux"><flag name="quux" value="0x4" /></attr>
+ </resources>`),
+ want: map[string]string{
+ "foo-attr": "<attr name=\"foo\"><enum name=\"bar\" value=\"0\"></enum><enum name=\"baz\" value=\"10\"></enum></attr>",
+ "qux-attr": "<attr name=\"qux\"><flag name=\"quux\" value=\"0x4\"></flag></attr>",
+ },
+ },
+ {
+ name: "Style",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources>
+ <style name="foo"><item>bar</item><item>baz</item></style>
+ </resources>`),
+ want: map[string]string{
+ "foo-style": "<style name=\"foo\"><item>bar</item><item>baz</item></style>",
+ },
+ },
+ {
+ name: "ArraysGoToStingAndInteger",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources>
+ <array name="foo"><item>bar</item><item>1</item></array>
+ </resources>`),
+ want: map[string]string{
+ "foo-array": "<array name=\"foo\"><item>bar</item><item>1</item></array>",
+ },
+ },
+ {
+ name: "NoContent",
+ pi: &res.PathInfo{},
+ content: &bytes.Buffer{},
+ want: map[string]string{},
+ },
+ {
+ name: "EmptyResources",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString("<resources></resources>"),
+ want: map[string]string{},
+ },
+ {
+ name: "IgnoredContent",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`
+ <!--ignore my comment-->
+ <ignore_tag />
+ ignore random string.
+ <resources>
+ <!--ignore this comment too-->
+ <attr name="foo">bar<baz>qux</baz></attr>
+ ignore this random string too.
+ <!-- following are a list of ignored tags -->
+ <eat-comment />
+ <skip />
+ </resources>`),
+ want: map[string]string{
+ "foo-attr": "<attr name=\"foo\">bar<baz>qux</baz></attr>",
+ },
+ },
+ {
+ name: "TagMissingNameAttribute",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources><string>MissingNameAttr</string></resources>`),
+ wantErr: true,
+ },
+ {
+ name: "ItemTagMissingTypeAttribute",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources><item name="MissingTypeAttr">bar</item></resources>`),
+ wantErr: true,
+ },
+ {
+ name: "ItemTagUnknownTypeAttribute",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources><item name="UnknownType" type="foo" /></resources>`),
+ wantErr: true,
+ },
+ {
+ name: "UnhandledTag",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources><foo name="bar"/></resources>`),
+ wantErr: true,
+ },
+ {
+ name: "MalFormedXml_OpenResourcesTag",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources>`),
+ wantErr: true,
+ },
+ {
+ name: "MalFormedXml_Unabalanced",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources><attr name="unbalanced"><b></attr></resources>`),
+ wantErr: true,
+ },
+ {
+ name: "NamespaceUsedWithoutNamespaceDefinition",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`<resources><string name="ohno"><bad:b>Oh no!</bad:b></string></resources>`),
+ wantErr: true,
+ },
+ {
+ // Verify parent Encoder is properly shadowing the xml file.
+ name: "NamespaceUsedOutsideOfDefinition",
+ pi: &res.PathInfo{},
+ content: bytes.NewBufferString(`
+ <resources>
+ <string name="foo" xmlns:bar="baz">qux</string>
+ <string name="ohno"><foo:b>Oh no!</foo:b></string>
+ </resources>`),
+ wantErr: true,
+ },
+ }
+ for _, tc := range tcs {
+ t.Run(tc.name, func(t *testing.T) {
+ ctx, cxlFn := context.WithCancel(context.Background())
+ defer cxlFn()
+ vrC := make(chan *res.ValuesResource)
+ raC := make(chan *ResourcesAttribute)
+ errC := make(chan error)
+ go func() {
+ defer close(vrC)
+ defer close(raC)
+ defer close(errC)
+ syncParseReader(ctx, tc.pi, xml.NewDecoder(tc.content), vrC, raC, errC)
+ }()
+ got := make(map[string]string)
+ errMs := make([]string, 0)
+ for errC != nil || vrC != nil {
+ select {
+ case e, ok := <-errC:
+ if !ok {
+ errC = nil
+ }
+ if e != nil {
+ errMs = append(errMs, e.Error())
+ }
+ case ra, ok := <-raC:
+ if !ok {
+ raC = nil
+ }
+ if ra != nil {
+ a := ra.Attribute
+ got[fmt.Sprintf("resource_attribute-%s:%s", a.Name.Space, a.Name.Local)] = a.Value
+ }
+ case vr, ok := <-vrC:
+ if !ok {
+ vrC = nil
+ }
+ if vr != nil {
+ got[fmt.Sprintf("%s-%s", vr.N.Name, vr.N.Type.String())] = string(vr.Payload)
+ }
+ }
+ }
+
+ // error handling
+ if tc.wantErr {
+ if len(errMs) == 0 {
+ t.Errorf("syncParseReader expected an error.")
+ }
+ return
+ }
+ if len(errMs) > 0 {
+ t.Errorf("syncParserReader got unexpected error(s): \n%s", strings.Join(errMs, "\n"))
+ return
+ }
+
+ if !reflect.DeepEqual(got, tc.want) {
+ t.Errorf("DeepEqual(\n%#v\n,\n%#v\n): returned false", got, tc.want)
+ }
+ })
+ }
+}
+
+// mockPartitioner is a Partitioner mock used for testing.
+type mockPartitioner struct {
+ strPI []res.PathInfo
+ cvVR []res.ValuesResource
+ ra []*ResourcesAttribute
+}
+
+func (mp *mockPartitioner) Close() error {
+ return nil
+}
+
+func (mp *mockPartitioner) CollectPathResource(src res.PathInfo) {
+ mp.strPI = append(mp.strPI, src)
+}
+
+func (mp *mockPartitioner) CollectValues(vr *res.ValuesResource) error {
+ mp.cvVR = append(mp.cvVR, res.ValuesResource{vr.Src, vr.N, vr.Payload})
+ return nil
+}
+
+func (mp *mockPartitioner) CollectResourcesAttribute(ra *ResourcesAttribute) {
+ mp.ra = append(mp.ra, ra)
+}
diff --git a/src/tools/ak/bucketize/partitioner.go b/src/tools/ak/bucketize/partitioner.go
new file mode 100644
index 0000000..97a328d
--- /dev/null
+++ b/src/tools/ak/bucketize/partitioner.go
@@ -0,0 +1,319 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bucketize
+
+import (
+ "archive/zip"
+ "bytes"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "src/common/golang/shard"
+ "src/common/golang/xml2"
+ "src/tools/ak/res/res"
+)
+
+// Helper struct to sort paths by index
+type indexedPaths struct {
+ order map[string]int
+ ps []string
+}
+
+type byPathIndex indexedPaths
+
+func (b byPathIndex) Len() int { return len(b.ps) }
+func (b byPathIndex) Swap(i, j int) { b.ps[i], b.ps[j] = b.ps[j], b.ps[i] }
+func (b byPathIndex) Less(i, j int) bool {
+ iIdx := pathIdx(b.ps[i], b.order)
+ jIdx := pathIdx(b.ps[j], b.order)
+ // Files exist in the same directory
+ if iIdx == jIdx {
+ return b.ps[i] < b.ps[j]
+ }
+ return iIdx < jIdx
+}
+
+// Helper struct to sort valuesKeys by index
+type indexedValuesKeys struct {
+ order map[string]int
+ ks []valuesKey
+}
+
+type byValueKeyIndex indexedValuesKeys
+
+func (b byValueKeyIndex) Len() int { return len(b.ks) }
+func (b byValueKeyIndex) Swap(i, j int) { b.ks[i], b.ks[j] = b.ks[j], b.ks[i] }
+func (b byValueKeyIndex) Less(i, j int) bool {
+ iIdx := pathIdx(b.ks[i].sourcePath.Path, b.order)
+ jIdx := pathIdx(b.ks[j].sourcePath.Path, b.order)
+ // Files exist in the same directory
+ if iIdx == jIdx {
+ return b.ks[i].sourcePath.Path < b.ks[j].sourcePath.Path
+ }
+ return iIdx < jIdx
+}
+
+type valuesKey struct {
+ sourcePath res.PathInfo
+ resType res.Type
+}
+
+// PartitionSession consumes resources and partitions them into archives by the resource type.
+// The typewise partitions can be further sharded by the provided shardFn
+type PartitionSession struct {
+ typedOutput map[res.Type][]*zip.Writer
+ sharder shard.Func
+ collectedVals map[valuesKey]map[string][]byte
+ collectedPaths map[string]res.PathInfo
+ collectedRAs map[string][]xml.Attr
+ resourceOrder map[string]int
+}
+
+// Partitioner takes the provided resource values and paths and stores the data sharded
+type Partitioner interface {
+ Close() error
+ CollectValues(vr *res.ValuesResource) error
+ CollectPathResource(src res.PathInfo)
+ CollectResourcesAttribute(attr *ResourcesAttribute)
+}
+
+// makePartitionSession creates a PartitionSession that writes to the given outputs.
+func makePartitionSession(outputs map[res.Type][]io.Writer, sharder shard.Func, resourceOrder map[string]int) (*PartitionSession, error) {
+ typeToArchs := make(map[res.Type][]*zip.Writer)
+ for t, ws := range outputs {
+ archs := make([]*zip.Writer, 0, len(ws))
+ for _, w := range ws {
+ archs = append(archs, zip.NewWriter(w))
+ }
+ typeToArchs[t] = archs
+ }
+ return &PartitionSession{
+ typeToArchs,
+ sharder,
+ make(map[valuesKey]map[string][]byte),
+ make(map[string]res.PathInfo),
+ make(map[string][]xml.Attr),
+ resourceOrder,
+ }, nil
+}
+
+// Close finalizes all archives in this partition session.
+func (ps *PartitionSession) Close() error {
+ if err := ps.flushCollectedPaths(); err != nil {
+ return fmt.Errorf("got error flushing collected paths: %v", err)
+ }
+ if err := ps.flushCollectedVals(); err != nil {
+ return fmt.Errorf("got error flushing collected values: %v", err)
+ }
+ // close archives.
+ for _, as := range ps.typedOutput {
+ for _, a := range as {
+ if err := a.Close(); err != nil {
+ return fmt.Errorf("%s: could not close: %v", a, err)
+ }
+ }
+ }
+ return nil
+}
+
+// CollectPathResource takes a file system resource and tracks it so that it can be stored in an output partition and shard.
+func (ps *PartitionSession) CollectPathResource(src res.PathInfo) {
+ // store the path only if the type is accepted by the underlying partitions.
+ if ps.isTypeAccepted(src.Type) {
+ ps.collectedPaths[src.Path] = src
+ }
+}
+
+// CollectValues stores the xml representation of a particular resource from a particular file.
+func (ps *PartitionSession) CollectValues(vr *res.ValuesResource) error {
+ // store the value only if the type is accepted by the underlying partitions.
+ if ps.isTypeAccepted(vr.N.Type) {
+ // Don't store style attr's from other packages
+ if !(vr.N.Type == res.Attr && vr.N.Package != "res-auto") {
+ k := valuesKey{*vr.Src, vr.N.Type}
+ if tv, ok := ps.collectedVals[k]; !ok {
+ ps.collectedVals[k] = make(map[string][]byte)
+ ps.collectedVals[k][vr.N.String()] = vr.Payload
+ } else {
+ if p, ok := tv[vr.N.String()]; !ok {
+ ps.collectedVals[k][vr.N.String()] = vr.Payload
+ } else if len(p) < len(vr.Payload) {
+ ps.collectedVals[k][vr.N.String()] = vr.Payload
+ } else if len(p) == len(vr.Payload) && bytes.Compare(p, vr.Payload) != 0 {
+ return fmt.Errorf("different values for resource %q", vr.N.String())
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// CollectResourcesAttribute stores the xml attributes of the resources tag from a particular file.
+func (ps *PartitionSession) CollectResourcesAttribute(ra *ResourcesAttribute) {
+ ps.collectedRAs[ra.ResFile.Path] = append(ps.collectedRAs[ra.ResFile.Path], ra.Attribute)
+}
+
+func (ps *PartitionSession) isTypeAccepted(t res.Type) bool {
+ _, ok := ps.typedOutput[t]
+ return ok
+}
+
+func (ps *PartitionSession) flushCollectedPaths() error {
+ // sort keys so that data is written to the archives in a deterministic order
+ // specifically the same order in which they were declared
+ ks := make([]string, 0, len(ps.collectedPaths))
+ for k := range ps.collectedPaths {
+ ks = append(ks, k)
+ }
+ sort.Sort(byPathIndex(indexedPaths{order: ps.resourceOrder, ps: ks}))
+ for _, k := range ks {
+ v := ps.collectedPaths[k]
+ f, err := os.Open(v.Path)
+ if err != nil {
+ return fmt.Errorf("%s: could not be opened for reading: %v", v.Path, err)
+ }
+ if err := ps.storePathResource(v, f); err != nil {
+ return fmt.Errorf("%s: got error storing path resource: %v", v.Path, err)
+ }
+ f.Close()
+ }
+ return nil
+}
+
+func (ps *PartitionSession) storePathResource(src res.PathInfo, r io.Reader) error {
+ p := path.Base(src.Path)
+ if dot := strings.Index(p, "."); dot == 0 {
+ // skip files where the name starts with a ".", these are already ignored by aapt
+ return nil
+ } else if dot > 0 {
+ p = p[:dot]
+ }
+ fqn, err := res.ParseName(p, src.Type)
+ if err != nil {
+ return fmt.Errorf("%s: %q could not be parsed into a res name: %v", src.Path, p, err)
+ }
+ arch, err := ps.archiveFor(fqn)
+ if err != nil {
+ return fmt.Errorf("%s: could not get partitioned archive: %v", src.Path, err)
+ }
+ w, err := arch.Create(pathResSuffix(src.Path))
+ if err != nil {
+ return fmt.Errorf("%s: could not create writer: %v", src.Path, err)
+ }
+ if _, err = io.Copy(w, r); err != nil {
+ return fmt.Errorf("%s: could not copy into archive: %v", src.Path, err)
+ }
+ return nil
+}
+
+func (ps *PartitionSession) archiveFor(fqn res.FullyQualifiedName) (*zip.Writer, error) {
+ archs, ok := ps.typedOutput[fqn.Type]
+ if !ok {
+ return nil, fmt.Errorf("%s: do not have output stream for this res type", fqn.Type)
+ }
+ shard := ps.sharder(fqn.String(), len(archs))
+ if shard > len(archs) || 0 > shard {
+ return nil, fmt.Errorf("%v: bad sharder f(%v, %d) -> %d must be [0,%d)", ps.sharder, fqn, len(archs), shard, len(archs))
+ }
+ return archs[shard], nil
+}
+
+var (
+ resXMLHeader = []byte("<?xml version='1.0' encoding='utf-8'?>")
+ resXMLFooter = []byte("</resources>")
+)
+
+func (ps *PartitionSession) flushCollectedVals() error {
+ // sort keys so that data is written to the archives in a deterministic order
+ // specifically the same order in which blaze provides them
+ ks := make([]valuesKey, 0, len(ps.collectedVals))
+ for k := range ps.collectedVals {
+ ks = append(ks, k)
+ }
+ sort.Sort(byValueKeyIndex(indexedValuesKeys{order: ps.resourceOrder, ks: ks}))
+ for _, k := range ks {
+ as, ok := ps.typedOutput[k.resType]
+ if !ok {
+ return fmt.Errorf("%s: no output for res type", k.resType)
+ }
+ ws := make([]io.Writer, 0, len(as))
+ // For each given source file, create a corresponding file in each of the shards. A file in a particular shard may be empty, if none of the resources defined in the source file ended up in that shard.
+ for _, a := range as {
+ w, err := a.Create(pathResSuffix(k.sourcePath.Path))
+ if err != nil {
+ return fmt.Errorf("%s: could not create entry: %v", k.sourcePath.Path, err)
+ }
+ if _, err = w.Write(resXMLHeader); err != nil {
+ return fmt.Errorf("%s: could not write xml header: %v", k.sourcePath.Path, err)
+ }
+ // Write the resources open tag, with the attributes collected.
+ b := bytes.Buffer{}
+ xml2.NewEncoder(&b).EncodeToken(xml.StartElement{
+ Name: res.ResourcesTagName,
+ Attr: ps.collectedRAs[k.sourcePath.Path],
+ })
+ if _, err = w.Write(b.Bytes()); err != nil {
+ return fmt.Errorf("%s: could not write resources tag %q: %v", k.sourcePath.Path, b.String(), err)
+ }
+ ws = append(ws, w)
+ }
+ v := ps.collectedVals[k]
+ var keys []string
+ for k := range v {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, fqn := range keys {
+ p := v[fqn]
+ shard := ps.sharder(fqn, len(ws))
+ if shard < 0 || shard >= len(ws) {
+ return fmt.Errorf("%v: bad sharder f(%s, %d) -> %d must be [0,%d)", ps.sharder, fqn, len(ws), shard, len(ws))
+ }
+ if _, err := ws[shard].Write(p); err != nil {
+ return fmt.Errorf("%s: writing resource %s failed: %v", k.sourcePath.Path, fqn, err)
+ }
+ }
+ for _, w := range ws {
+ if _, err := w.Write(resXMLFooter); err != nil {
+ return fmt.Errorf("%s: could not write xml footer: %v", k.sourcePath.Path, err)
+ }
+ }
+ }
+ return nil
+}
+
+func pathIdx(path string, order map[string]int) int {
+ if idx, ok := order[path]; ok == true {
+ return idx
+ }
+ // TODO(mauriciogg): maybe replace with prefix search
+ // list of resources might contain directories so exact match might not exist
+ dirPos := strings.LastIndex(path, "/res/")
+ idx, _ := order[path[0:dirPos+4]]
+ return idx
+}
+
+func pathResSuffix(path string) string {
+ // returns the relative resource path from the full path
+ // e.g. /foo/bar/res/values/strings.xml -> res/values/strings.xml
+ parentDir := filepath.Dir(filepath.Dir(filepath.Dir(path)))
+ return strings.TrimPrefix(path, parentDir+string(filepath.Separator))
+}
diff --git a/src/tools/ak/bucketize/partitioner_test.go b/src/tools/ak/bucketize/partitioner_test.go
new file mode 100644
index 0000000..846718e
--- /dev/null
+++ b/src/tools/ak/bucketize/partitioner_test.go
@@ -0,0 +1,349 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bucketize
+
+import (
+ "archive/zip"
+ "bytes"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "testing"
+
+ "src/common/golang/shard"
+ "src/tools/ak/res/res"
+)
+
+func TestInternalStorePathResource(t *testing.T) {
+ // test internal storePathResource and skip the creation of real files.
+ tcs := []struct {
+ name string
+ inFiles map[string]string
+ partitions map[res.Type][]io.Writer
+ shardFn shard.Func
+ want map[res.Type][][]string
+ wantErr bool
+ }{
+ {
+ name: "MultipleResTypeFilesWithShardsOfDifferentSizes",
+ inFiles: map[string]string{
+ "res/drawable/2-foo.xml": "all",
+ "res/layout/0-bar.xml": "your",
+ "res/color/0-baz.xml": "base",
+ "res/layout/1-qux.xml": "are",
+ "res/drawable/0-quux.xml": "belong",
+ "res/color/0-corge.xml": "to",
+ "res/color/0-grault.xml": "us",
+ "res/layout/0-garply.xml": "!",
+ },
+ shardFn: shard.Func(func(fqn string, shardCount int) int {
+ // sharding strategy is built into the file name as "<shard num>-foo.bar" (i.e. 8-baz.xml)
+ name := strings.Split(fqn, "/")[1]
+ ai := strings.SplitN(name, "-", 2)[0]
+ shard, err := strconv.Atoi(ai)
+ if err != nil {
+ t.Fatalf("Atoi(%s) got err: %v", ai, err)
+ }
+ return shard
+ }),
+ partitions: map[res.Type][]io.Writer{
+ res.Drawable: {&bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}},
+ res.Color: {&bytes.Buffer{}},
+ res.Layout: {&bytes.Buffer{}, &bytes.Buffer{}},
+ },
+ want: map[res.Type][][]string{
+ res.Drawable: {{"res/drawable/0-quux.xml"}, {}, {"res/drawable/2-foo.xml"}},
+ res.Color: {{"res/color/0-baz.xml", "res/color/0-corge.xml", "res/color/0-grault.xml"}},
+ res.Layout: {{"res/layout/0-bar.xml", "res/layout/0-garply.xml"}, {"res/layout/1-qux.xml"}},
+ },
+ },
+ {
+ name: "IgnoredFilePatterns",
+ inFiles: map[string]string{
+ "res/drawable/.ignore": "me",
+ },
+ shardFn: shard.FNV,
+ partitions: map[res.Type][]io.Writer{res.Drawable: {&bytes.Buffer{}}},
+ wantErr: true,
+ },
+ {
+ name: "NoFiles",
+ inFiles: map[string]string{},
+ shardFn: shard.FNV,
+ partitions: map[res.Type][]io.Writer{res.Drawable: {&bytes.Buffer{}}},
+ want: map[res.Type][][]string{res.Drawable: {{}}},
+ },
+ }
+
+ order := make(map[string]int)
+ for _, tc := range tcs {
+ t.Run(tc.name, func(t *testing.T) {
+ ps, err := makePartitionSession(tc.partitions, tc.shardFn, order)
+ if err != nil {
+ t.Errorf("MakePartitionSession(%v, %v, %d) got err: %v", tc.partitions, tc.shardFn, 0, err)
+ return
+ }
+
+ for k, v := range tc.inFiles {
+ pi, err := res.ParsePath(k)
+ if err != nil {
+ if !tc.wantErr {
+ t.Fatalf("ParsePath(%s) got err: %v", k, err)
+ }
+ return
+ }
+ if err := ps.storePathResource(pi, strings.NewReader(v)); err != nil {
+ t.Fatalf("storePathResource got unexpected err: %v", err)
+ }
+ }
+
+ if err := ps.Close(); err != nil {
+ t.Errorf("partition Close() got err: %v", err)
+ return
+ }
+
+ // validate data outputted to the partitions
+ got := make(map[res.Type][][]string)
+ for rt, shards := range tc.partitions {
+ shardPaths := make([][]string, 0, len(shards))
+ for _, shard := range shards {
+ br := bytes.NewReader(shard.(*bytes.Buffer).Bytes())
+ rr, err := zip.NewReader(br, br.Size())
+ if err != nil {
+ t.Errorf("NewReader(%v, %d) got err: %v", br, br.Size(), err)
+ return
+ }
+ paths := make([]string, 0, len(rr.File))
+ for _, f := range rr.File {
+ paths = append(paths, f.Name)
+ c, err := readAll(f)
+ if err != nil {
+ t.Errorf("readAll got err: %v", err)
+ return
+ }
+ if tc.inFiles[f.Name] != c {
+ t.Errorf("error copying data for %s got %q but wanted %q", f.Name, c, tc.inFiles[f.Name])
+ return
+ }
+ }
+ sort.Strings(paths)
+ shardPaths = append(shardPaths, paths)
+ }
+ got[rt] = shardPaths
+ }
+ if !reflect.DeepEqual(got, tc.want) {
+ t.Errorf("DeepEqual(\n%#v\n,\n%#v\n): returned false", got, tc.want)
+ }
+ })
+ }
+}
+
+func TestCollectValues(t *testing.T) {
+ tcs := []struct {
+ name string
+ pathVPsMap map[string]map[res.FullyQualifiedName][]byte
+ pathRAMap map[string][]xml.Attr
+ partitions map[res.Type][]io.Writer
+ want map[res.Type][][]string
+ wantErr bool
+ }{
+ {
+ name: "MultipleResTypesShardsResources",
+ partitions: map[res.Type][]io.Writer{
+ res.Attr: {&bytes.Buffer{}, &bytes.Buffer{}},
+ res.String: {&bytes.Buffer{}, &bytes.Buffer{}},
+ res.Color: {&bytes.Buffer{}, &bytes.Buffer{}},
+ },
+ pathVPsMap: map[string]map[res.FullyQualifiedName][]byte{
+ "res/values/strings.xml": {
+ res.FullyQualifiedName{Package: "res-auto", Type: res.String, Name: "foo"}: []byte("<string name='foo'>bar</string>"),
+ res.FullyQualifiedName{Package: "android", Type: res.String, Name: "baz"}: []byte("<string name='baz'>qux</string>"),
+ res.FullyQualifiedName{Package: "res-auto", Type: res.Attr, Name: "quux"}: []byte("<attr name='quux'>corge</attr>"),
+ },
+ "res/values/attr.xml": {
+ res.FullyQualifiedName{Package: "android", Type: res.Attr, Name: "foo"}: []byte("<attr name='android:foo'>bar</attr>"),
+ },
+ "baz/res/values/attr.xml": {
+ res.FullyQualifiedName{Package: "android", Type: res.Attr, Name: "bazfoo"}: []byte("<attr name='android:bazfoo'>qix</attr>"),
+ },
+ "baz/res/values/strings.xml": {
+ res.FullyQualifiedName{Package: "android", Type: res.String, Name: "baz"}: []byte("<string name='baz'>qux</string>"),
+ },
+ "foo/res/values/attr.xml": {
+ res.FullyQualifiedName{Package: "android", Type: res.Attr, Name: "foofoo"}: []byte("<attr name='android:foofoo'>qex</attr>"),
+ },
+ "foo/res/values/color.xml": {
+ res.FullyQualifiedName{Package: "android", Type: res.Color, Name: "foobar"}: []byte("<color name='foobar'>#FFFFFFFF</color>"),
+ },
+ "dir/res/values/strings.xml": {
+ res.FullyQualifiedName{Package: "android", Type: res.String, Name: "dirbaz"}: []byte("<string name='dirbaz'>qux</string>"),
+ },
+ "dir/res/values/color.xml": {
+ res.FullyQualifiedName{Package: "android", Type: res.Color, Name: "dirfoobar"}: []byte("<color name='dirfoobar'>#FFFFFFFF</color>"),
+ },
+ },
+ pathRAMap: map[string][]xml.Attr{
+ "res/values/strings.xml": {
+ xml.Attr{Name: xml.Name{Space: "xmlns", Local: "ns1"}, Value: "path1"},
+ xml.Attr{Name: xml.Name{Space: "xmlns", Local: "ns2"}, Value: "path2"},
+ },
+ },
+ want: map[res.Type][][]string{
+ res.Attr: {
+ {
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources xmlns:ns1=\"path1\" xmlns:ns2=\"path2\"><attr name='quux'>corge</attr></resources>",
+ },
+ {
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources xmlns:ns1=\"path1\" xmlns:ns2=\"path2\"></resources>",
+ },
+ },
+ res.String: {
+ {
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources xmlns:ns1=\"path1\" xmlns:ns2=\"path2\"><string name='baz'>qux</string><string name='foo'>bar</string></resources>",
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources><string name='dirbaz'>qux</string></resources>",
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources><string name='baz'>qux</string></resources>",
+ },
+ {
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources xmlns:ns1=\"path1\" xmlns:ns2=\"path2\"></resources>",
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources></resources>",
+ "res/values/strings.xml", "<?xml version='1.0' encoding='utf-8'?><resources></resources>",
+ },
+ },
+ res.Color: {
+ {
+ "res/values/color.xml", "<?xml version='1.0' encoding='utf-8'?><resources><color name='foobar'>#FFFFFFFF</color></resources>",
+ "res/values/color.xml", "<?xml version='1.0' encoding='utf-8'?><resources><color name='dirfoobar'>#FFFFFFFF</color></resources>",
+ },
+ {
+ "res/values/color.xml", "<?xml version='1.0' encoding='utf-8'?><resources></resources>",
+ "res/values/color.xml", "<?xml version='1.0' encoding='utf-8'?><resources></resources>",
+ },
+ },
+ },
+ },
+ {
+ name: "NoValuesPayloads",
+ pathVPsMap: map[string]map[res.FullyQualifiedName][]byte{
+ "res/values/strings.xml": {},
+ },
+ partitions: map[res.Type][]io.Writer{res.String: {&bytes.Buffer{}}},
+ want: map[res.Type][][]string{res.String: {{}}},
+ },
+ {
+ name: "ResTypeValuesResTypeMismatch",
+ pathVPsMap: map[string]map[res.FullyQualifiedName][]byte{
+ "res/values/strings.xml": {
+ res.FullyQualifiedName{
+ Package: "res-auto",
+ Type: res.String,
+ Name: "foo",
+ }: []byte("<string name='foo'>bar</string>"),
+ },
+ },
+ partitions: map[res.Type][]io.Writer{res.Attr: {&bytes.Buffer{}}},
+ want: map[res.Type][][]string{res.Attr: {{}}},
+ },
+ }
+
+ shardFn := func(name string, shardCount int) int { return 0 }
+ order := map[string]int{
+ "foo/res/values/attr.xml": 0,
+ "foo/res/values/color.xml": 1,
+ "res/values/attr.xml": 2,
+ "res/values/strings.xml": 3,
+ "dir/res": 4,
+ "baz/res/values/attr.xml": 5,
+ "baz/res/values/strings.xml": 6,
+ }
+ for _, tc := range tcs {
+ t.Run(tc.name, func(t *testing.T) {
+ ps, err := makePartitionSession(tc.partitions, shardFn, order)
+ if err != nil {
+ t.Errorf("makePartitionSession(%v, %v, %d) got err: %v", tc.partitions, shard.FNV, 0, err)
+ return
+ }
+ for p, vps := range tc.pathVPsMap {
+ pi, err := res.ParsePath(p)
+ if err != nil {
+ t.Errorf("ParsePath(%s) got err: %v", p, err)
+ return
+ }
+ for fqn, p := range vps {
+ ps.CollectValues(&res.ValuesResource{Src: &pi, N: fqn, Payload: p})
+ }
+ }
+ for p, as := range tc.pathRAMap {
+ pi, err := res.ParsePath(p)
+ if err != nil {
+ t.Errorf("ParsePath(%s) got err: %v", p, err)
+ return
+ }
+ for _, a := range as {
+ ps.CollectResourcesAttribute(&ResourcesAttribute{ResFile: &pi, Attribute: a})
+ }
+ }
+ if err := ps.Close(); err != nil {
+ t.Errorf("partition Close() got err: %v", err)
+ return
+ }
+
+ // validate data outputted to the partitions.
+ got := make(map[res.Type][][]string)
+ for rt, shards := range tc.partitions {
+ shardPaths := make([][]string, 0, len(shards))
+ for _, shard := range shards {
+ br := bytes.NewReader(shard.(*bytes.Buffer).Bytes())
+ rr, err := zip.NewReader(br, br.Size())
+ if err != nil {
+ t.Errorf("NewReader(%v, %d) got err: %v", br, br.Size(), err)
+ return
+ }
+ paths := make([]string, 0, len(rr.File))
+ for _, f := range rr.File {
+ c, err := readAll(f)
+ if err != nil {
+ t.Errorf("readAll got err: %v", err)
+ return
+ }
+ paths = append(paths, f.Name, c)
+ }
+ shardPaths = append(shardPaths, paths)
+ }
+ got[rt] = shardPaths
+ }
+ if !reflect.DeepEqual(got, tc.want) {
+ t.Errorf("DeepEqual(\n%#v\n,\n%#v\n): returned false", got, tc.want)
+ }
+ })
+ }
+}
+
+func readAll(f *zip.File) (string, error) {
+ rc, err := f.Open()
+ if err != nil {
+ return "", fmt.Errorf("%q: Open got err: %v", f.Name, err)
+ }
+ defer rc.Close()
+ body, err := ioutil.ReadAll(rc)
+ if err != nil {
+ return "", fmt.Errorf("%q: ReadAll got err: %v", f.Name, err)
+ }
+ return string(body), nil
+}
diff --git a/src/tools/ak/bucketize/pipe.go b/src/tools/ak/bucketize/pipe.go
new file mode 100644
index 0000000..7162232
--- /dev/null
+++ b/src/tools/ak/bucketize/pipe.go
@@ -0,0 +1,154 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bucketize
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "sync"
+
+ "src/tools/ak/res/res"
+)
+
+type contextKey int
+
+const (
+ ctxErr contextKey = 0
+)
+
+// errorf returns a formatted error with any context sensitive information prefixed to the error
+func errorf(ctx context.Context, fmts string, a ...interface{}) error {
+ if s, ok := ctx.Value(ctxErr).(string); ok {
+ return fmt.Errorf(strings.Join([]string{s, fmts}, ""), a...)
+ }
+ return fmt.Errorf(fmts, a...)
+}
+
+// prefixErr returns a context which adds a prefix to error messages.
+func prefixErr(ctx context.Context, add string) context.Context {
+ if s, ok := ctx.Value(ctxErr).(string); ok {
+ return context.WithValue(ctx, ctxErr, strings.Join([]string{s, add}, ""))
+ }
+ return context.WithValue(ctx, ctxErr, add)
+}
+
+func separatePathInfosByValues(ctx context.Context, pis []*res.PathInfo) (<-chan *res.PathInfo, <-chan *res.PathInfo) {
+ valuesPIC := make(chan *res.PathInfo)
+ nonValuesPIC := make(chan *res.PathInfo)
+ go func() {
+ defer close(valuesPIC)
+ defer close(nonValuesPIC)
+ for _, pi := range pis {
+ if pi.Type.Kind() == res.Value || pi.Type.Kind() == res.Both && strings.HasPrefix(pi.TypeDir, "values") {
+ select {
+ case valuesPIC <- pi:
+ case <-ctx.Done():
+ return
+ }
+ } else {
+ select {
+ case nonValuesPIC <- pi:
+ case <-ctx.Done():
+ return
+ }
+ }
+ }
+ }()
+ return valuesPIC, nonValuesPIC
+}
+
+func mergeValuesResourceStreams(ctx context.Context, vrCs []<-chan *res.ValuesResource) <-chan *res.ValuesResource {
+ vrC := make(chan *res.ValuesResource)
+ var wg sync.WaitGroup
+ wg.Add(len(vrCs))
+ output := func(c <-chan *res.ValuesResource) {
+ defer wg.Done()
+ for vr := range c {
+ select {
+ case vrC <- vr:
+ case <-ctx.Done():
+ return
+ }
+ }
+ }
+ for _, c := range vrCs {
+ go output(c)
+ }
+ go func() {
+ wg.Wait()
+ close(vrC)
+ }()
+ return vrC
+}
+
+func mergeResourcesAttributeStreams(ctx context.Context, raCs []<-chan *ResourcesAttribute) <-chan *ResourcesAttribute {
+ raC := make(chan *ResourcesAttribute)
+ var wg sync.WaitGroup
+ wg.Add(len(raCs))
+ output := func(c <-chan *ResourcesAttribute) {
+ defer wg.Done()
+ for ra := range c {
+ select {
+ case raC <- ra:
+ case <-ctx.Done():
+ return
+ }
+ }
+ }
+ for _, c := range raCs {
+ go output(c)
+ }
+ go func() {
+ wg.Wait()
+ close(raC)
+ }()
+ return raC
+}
+
+// mergeErrStreams fans in multiple error streams into a single stream.
+func mergeErrStreams(ctx context.Context, errCs []<-chan error) <-chan error {
+ errC := make(chan error)
+ var wg sync.WaitGroup
+ wg.Add(len(errCs))
+ output := func(c <-chan error) {
+ defer wg.Done()
+ for e := range c {
+ select {
+ case errC <- e:
+ case <-ctx.Done():
+ return
+ }
+ }
+ }
+ for _, rc := range errCs {
+ go output(rc)
+ }
+ go func() {
+ wg.Wait()
+ close(errC)
+ }()
+ return errC
+}
+
+// sendErr attempts to send the provided error to the provided chan, however is the context is canceled, it will return false.
+func sendErr(ctx context.Context, errC chan<- error, err error) bool {
+ select {
+ case <-ctx.Done():
+ return false
+ case errC <- err:
+ return true
+ }
+}
diff --git a/src/tools/ak/bucketize/pipe_test.go b/src/tools/ak/bucketize/pipe_test.go
new file mode 100644
index 0000000..81456ce
--- /dev/null
+++ b/src/tools/ak/bucketize/pipe_test.go
@@ -0,0 +1,75 @@
+// Copyright 2018 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bucketize
+
+import (
+ "context"
+ "errors"
+ "reflect"
+ "testing"
+)
+
+func TestPrefixErr(t *testing.T) {
+ tests := []struct {
+ ctx context.Context
+ fmts string
+ args []interface{}
+ want error
+ }{
+ {
+ ctx: context.Background(),
+ fmts: "Hello world",
+ want: errors.New("Hello world"),
+ },
+ {
+ ctx: prefixErr(context.Background(), "file: foo: "),
+ fmts: "Hello world: %d",
+ args: []interface{}{1},
+ want: errors.New("file: foo: Hello world: 1"),
+ },
+ {
+ ctx: prefixErr(prefixErr(context.Background(), "file: foo: "), "tag: <resources>: "),
+ fmts: "Hello world: %d",
+ args: []interface{}{1},
+ want: errors.New("file: foo: tag: <resources>: Hello world: 1"),
+ },
+ }
+ for _, tc := range tests {
+ got := errorf(tc.ctx, tc.fmts, tc.args...)
+ if !reflect.DeepEqual(got, tc.want) {
+ t.Errorf("Errorf(%v, %v, %v): %v wanted %v", tc.ctx, tc.fmts, tc.args, got, tc.want)
+ }
+ }
+}
+
+func TestMergeErrStreams(t *testing.T) {
+ ctx := context.Background()
+ sendClose := func(e error, eC chan<- error) {
+ defer close(eC)
+ eC <- e
+ }
+ in1 := make(chan error)
+ in2 := make(chan error)
+ go sendClose(errors.New("hi"), in1)
+ go sendClose(errors.New("hello"), in2)
+ merged := mergeErrStreams(ctx, []<-chan error{in1, in2})
+ var rcv []error
+ for r := range merged {
+ rcv = append(rcv, r)
+ }
+ if len(rcv) != 2 {
+ t.Errorf("got: %v on merged stream, wanted only 2 elements", rcv)
+ }
+}
diff --git a/src/tools/ak/extractaar/BUILD b/src/tools/ak/extractaar/BUILD
new file mode 100644
index 0000000..a4da9ce
--- /dev/null
+++ b/src/tools/ak/extractaar/BUILD
@@ -0,0 +1,44 @@
+load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
+load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test")
+
+# Description:
+# Package for extractaar module
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"])
+
+go_library(
+ name = "extractaar",
+ srcs = [
+ "buildozer.go",
+ "extractaar.go",
+ "validator.go",
+ ],
+ importpath = "src/tools/ak/extractaar/extractaar",
+ deps = [
+ "//src/tools/ak:types",
+ ],
+)
+
+go_binary(
+ name = "extractaar_bin",
+ srcs = ["extractaar_bin.go"],
+ deps = [
+ ":extractaar",
+ "//src/common/golang:flagfile",
+ ],
+)
+
+go_test(
+ name = "extractaar_test",
+ size = "small",
+ srcs = [
+ "extractaar_test.go",
+ "validator_test.go",
+ ],
+ embed = [":extractaar"],
+ deps = [
+ "@com_github_google_go_cmp//cmp:go_default_library",
+ "@com_github_google_go_cmp//cmp/cmpopts:go_default_library",
+ ],
+)
diff --git a/src/tools/ak/extractaar/buildozer.go b/src/tools/ak/extractaar/buildozer.go
new file mode 100644
index 0000000..f7f52a5
--- /dev/null
+++ b/src/tools/ak/extractaar/buildozer.go
@@ -0,0 +1,48 @@
+// Copyright 2022 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package extractaar
+
+import (
+ "fmt"
+ "strings"
+)
+
+// BuildozerError represent a rule configuration error fixable with a buildozer command.
+type BuildozerError struct {
+ Msg string
+ RuleAttr string
+ NewValue string
+}
+
+func mergeBuildozerErrors(label string, errs []*BuildozerError) string {
+ var msg strings.Builder
+ msg.WriteString(fmt.Sprintf("error(s) found while processing aar '%s':\n", label))
+ var buildozerCommand strings.Builder
+ buildozerCommand.WriteString("Use the following command to fix the target:\nbuildozer ")
+ useBuildozer := false
+ for _, err := range errs {
+ msg.WriteString(fmt.Sprintf("\t- %s\n", err.Msg))
+ if err.NewValue != "" {
+ useBuildozer = true
+ buildozerCommand.WriteString(fmt.Sprintf("'set %s %s' ", err.RuleAttr, err.NewValue))
+ }
+ }
+ buildozerCommand.WriteString(label)
+
+ if useBuildozer {
+ msg.WriteString(buildozerCommand.String())
+ }
+ return msg.String()
+}
diff --git a/src/tools/ak/extractaar/extractaar.go b/src/tools/ak/extractaar/extractaar.go
new file mode 100644
index 0000000..2a431fa
--- /dev/null
+++ b/src/tools/ak/extractaar/extractaar.go
@@ -0,0 +1,286 @@
+// Copyright 2021 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package extractaar extracts files from an aar.
+package extractaar
+
+import (
+ "archive/zip"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "src/tools/ak/types"
+)
+
+// A tristate may be true, false, or unset
+type tristate int
+
+func (t tristate) isSet() bool {
+ return t == tsTrue || t == tsFalse
+}
+
+func (t tristate) value() bool {
+ return t == tsTrue
+}
+
+const (
+ tsTrue = 1
+ tsFalse = -1
+
+ manifest = iota
+ res
+ assets
+)
+
+var (
+ // Cmd defines the command to run the extractor.
+ Cmd = types.Command{
+ Init: Init,
+ Run: Run,
+ Desc: desc,
+ Flags: []string{
+ "aar", "label",
+ "out_manifest", "out_res_dir", "out_assets_dir",
+ "has_res", "has_assets",
+ },
+ }
+
+ aar string
+ label string
+ outputManifest string
+ outputResDir string
+ outputAssetsDir string
+ hasRes int
+ hasAssets int
+
+ initOnce sync.Once
+)
+
+// Init initializes the extractor.
+func Init() {
+ initOnce.Do(func() {
+ flag.StringVar(&aar, "aar", "", "Path to the aar")
+ flag.StringVar(&label, "label", "", "Target's label")
+ flag.StringVar(&outputManifest, "out_manifest", "", "Output manifest")
+ flag.StringVar(&outputResDir, "out_res_dir", "", "Output resources directory")
+ flag.StringVar(&outputAssetsDir, "out_assets_dir", "", "Output assets directory")
+ flag.IntVar(&hasRes, "has_res", 0, "Whether the aar has resources")
+ flag.IntVar(&hasAssets, "has_assets", 0, "Whether the aar has assets")
+ })
+}
+
+func desc() string {
+ return "Extracts files from an AAR"
+}
+
+type aarFile struct {
+ path string
+ relPath string
+}
+
+func (file *aarFile) String() string {
+ return fmt.Sprintf("%s:%s", file.path, file.relPath)
+}
+
+type toCopy struct {
+ src string
+ dest string
+}
+
+// Run runs the extractor
+func Run() {
+ if err := doWork(aar, label, outputManifest, outputResDir, outputAssetsDir, hasRes, hasAssets); err != nil {
+ log.Fatal(err)
+ }
+}
+
+func doWork(aar, label, outputManifest, outputResDir, outputAssetsDir string, hasRes, hasAssets int) error {
+ tmpDir, err := os.MkdirTemp("", "extractaar_")
+ if err != nil {
+ return err
+ }
+ defer os.RemoveAll(tmpDir)
+
+ files, err := extractAAR(aar, tmpDir)
+ if err != nil {
+ return err
+ }
+
+ validators := map[int]validator{
+ manifest: manifestValidator{dest: outputManifest},
+ res: resourceValidator{dest: outputResDir, hasRes: tristate(hasRes), ruleAttr: "has_res"},
+ assets: resourceValidator{dest: outputAssetsDir, hasRes: tristate(hasAssets), ruleAttr: "has_assets"},
+ }
+
+ var filesToCopy []*toCopy
+ var validationErrs []*BuildozerError
+ for fileType, files := range groupAARFiles(files) {
+ validatedFiles, err := validators[fileType].validate(files)
+ if err != nil {
+ validationErrs = append(validationErrs, err)
+ continue
+ }
+ filesToCopy = append(filesToCopy, validatedFiles...)
+ }
+
+ if len(validationErrs) != 0 {
+ return errors.New(mergeBuildozerErrors(label, validationErrs))
+ }
+
+ for _, file := range filesToCopy {
+ if err := copyFile(file.src, file.dest); err != nil {
+ return err
+ }
+ }
+
+ // TODO(ostonge): Add has_res/has_assets attr to avoid having to do this
+ // We need to create at least one file so that Bazel does not complain
+ // that the output tree artifact was not created.
+ if err := createIfEmpty(outputResDir, "res/values/empty.xml", "<resources/>"); err != nil {
+ return err
+ }
+ // aapt will ignore this file and not print an error message, because it
+ // thinks that it is a swap file
+ if err := createIfEmpty(outputAssetsDir, "assets/empty_asset_generated_by_bazel~", ""); err != nil {
+ return err
+ }
+ return nil
+}
+
+func groupAARFiles(aarFiles []*aarFile) map[int][]*aarFile {
+ // Map of file type to channel of aarFile
+ filesMap := make(map[int][]*aarFile)
+ for _, fileType := range []int{manifest, res, assets} {
+ filesMap[fileType] = make([]*aarFile, 0)
+ }
+
+ for _, file := range aarFiles {
+ if file.relPath == "AndroidManifest.xml" {
+ filesMap[manifest] = append(filesMap[manifest], file)
+ } else if strings.HasPrefix(file.relPath, "res"+string(os.PathSeparator)) {
+ filesMap[res] = append(filesMap[res], file)
+ } else if strings.HasPrefix(file.relPath, "assets"+string(os.PathSeparator)) {
+ filesMap[assets] = append(filesMap[assets], file)
+ }
+ // TODO(ostonge): support jar and aidl files
+ }
+ return filesMap
+}
+
+func extractAAR(aar string, dest string) ([]*aarFile, error) {
+ reader, err := zip.OpenReader(aar)
+ if err != nil {
+ return nil, err
+ }
+ defer reader.Close()
+
+ var files []*aarFile
+ for _, f := range reader.File {
+ if f.FileInfo().IsDir() {
+ continue
+ }
+ extractedPath := filepath.Join(dest, f.Name)
+ if err := extractFile(f, extractedPath); err != nil {
+ return nil, err
+ }
+ files = append(files, &aarFile{path: extractedPath, relPath: f.Name})
+ }
+ return files, nil
+}
+
+func extractFile(file *zip.File, dest string) error {
+ if err := os.MkdirAll(filepath.Dir(dest), os.ModePerm); err != nil {
+ return err
+ }
+ outFile, err := os.OpenFile(dest, os.O_WRONLY|os.O_CREATE, file.Mode())
+ if err != nil {
+ return err
+ }
+ defer outFile.Close()
+
+ rc, err := file.Open()
+ if err != nil {
+ return err
+ }
+ defer rc.Close()
+
+ _, err = io.Copy(outFile, rc)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func copyFile(name, dest string) error {
+ in, err := os.Open(name)
+ if err != nil {
+ return err
+ }
+ defer in.Close()
+
+ if err := os.MkdirAll(filepath.Dir(dest), os.ModePerm); err != nil {
+ return err
+ }
+ out, err := os.Create(dest)
+ if err != nil {
+ return err
+ }
+ defer out.Close()
+
+ _, err = io.Copy(out, in)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func dirIsEmpty(dir string) (bool, error) {
+ f, err := os.Open(dir)
+ if os.IsNotExist(err) {
+ return true, nil
+ }
+ if err != nil {
+ return false, err
+ }
+ defer f.Close()
+
+ _, err = f.Readdirnames(1)
+ if err == io.EOF {
+ return true, nil
+ }
+ return false, err
+}
+
+// Create the file with the content if the directory is empty or does not exists
+func createIfEmpty(dir, filename, content string) error {
+ isEmpty, err := dirIsEmpty(dir)
+ if err != nil {
+ return err
+ }
+ if isEmpty {
+ dest := filepath.Join(dir, filename)
+ if err := os.MkdirAll(filepath.Dir(dest), os.ModePerm); err != nil {
+ return err
+ }
+ return os.WriteFile(dest, []byte(content), 0644)
+ }
+ return nil
+}
diff --git a/src/tools/ak/extractaar/extractaar_bin.go b/src/tools/ak/extractaar/extractaar_bin.go
new file mode 100644
index 0000000..bc7488b
--- /dev/null
+++ b/src/tools/ak/extractaar/extractaar_bin.go
@@ -0,0 +1,29 @@
+// Copyright 2021 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// extract_aar_bin is a command line tool that extracts files from an aar.
+package main
+
+import (
+ "flag"
+
+ _ "src/common/golang/flagfile"
+ "src/tools/ak/extractaar/extractaar"
+)
+
+func main() {
+ extractaar.Init()
+ flag.Parse()
+ extractaar.Run()
+}
diff --git a/src/tools/ak/extractaar/extractaar_test.go b/src/tools/ak/extractaar/extractaar_test.go
new file mode 100644
index 0000000..e0595c2
--- /dev/null
+++ b/src/tools/ak/extractaar/extractaar_test.go
@@ -0,0 +1,73 @@
+// Copyright 2022 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package extractaar
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+)
+
+func TestGroupAARFiles(t *testing.T) {
+ tests := []struct {
+ name string
+ files []*aarFile
+ expectedMap map[int][]*aarFile
+ }{
+ {
+ name: "empty aar",
+ files: []*aarFile{},
+ expectedMap: map[int][]*aarFile{
+ manifest: []*aarFile{},
+ res: []*aarFile{},
+ assets: []*aarFile{},
+ },
+ },
+ {
+ name: "simple aar",
+ files: []*aarFile{
+ &aarFile{relPath: "AndroidManifest.xml"},
+ &aarFile{relPath: "res/values/strings.xml"},
+ &aarFile{relPath: "lint.jar"},
+ &aarFile{relPath: "proguard.txt"},
+ &aarFile{relPath: "classes.jar"},
+ &aarFile{relPath: "assetsdir/values.txt"},
+ &aarFile{relPath: "libs/foo.jar"},
+ &aarFile{relPath: "resource/some/file.txt"},
+ &aarFile{relPath: "assets/some/asset.png"},
+ },
+ expectedMap: map[int][]*aarFile{
+ manifest: []*aarFile{
+ &aarFile{relPath: "AndroidManifest.xml"},
+ },
+ res: []*aarFile{
+ &aarFile{relPath: "res/values/strings.xml"},
+ },
+ assets: []*aarFile{
+ &aarFile{relPath: "assets/some/asset.png"},
+ },
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ filesMap := groupAARFiles(tc.files)
+ if diff := cmp.Diff(tc.expectedMap, filesMap, cmp.AllowUnexported(aarFile{})); diff != "" {
+ t.Errorf("groupAARFiles(%v) returned diff (-want, +got):\n%v", tc.files, diff)
+ }
+ })
+ }
+}
diff --git a/src/tools/ak/extractaar/validator.go b/src/tools/ak/extractaar/validator.go
new file mode 100644
index 0000000..2a0d845
--- /dev/null
+++ b/src/tools/ak/extractaar/validator.go
@@ -0,0 +1,77 @@
+// Copyright 2022 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package extractaar
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+)
+
+func boolToString(b bool) string {
+ return strings.Title(fmt.Sprintf("%t", b))
+}
+
+type validator interface {
+ validate(files []*aarFile) ([]*toCopy, *BuildozerError)
+}
+
+type manifestValidator struct {
+ dest string
+}
+
+func (v manifestValidator) validate(files []*aarFile) ([]*toCopy, *BuildozerError) {
+ var filesToCopy []*toCopy
+ seen := false
+ for _, file := range files {
+ if seen {
+ return nil, &BuildozerError{Msg: "More than one manifest was found"}
+ }
+ seen = true
+ filesToCopy = append(filesToCopy, &toCopy{src: file.path, dest: v.dest})
+ }
+ if !seen {
+ return nil, &BuildozerError{Msg: "No manifest was found"}
+ }
+ return filesToCopy, nil
+}
+
+type resourceValidator struct {
+ dest string
+ ruleAttr string
+ hasRes tristate
+}
+
+func (v resourceValidator) validate(files []*aarFile) ([]*toCopy, *BuildozerError) {
+ var filesToCopy []*toCopy
+ seen := false
+ for _, file := range files {
+ seen = true
+ filesToCopy = append(filesToCopy,
+ &toCopy{src: file.path, dest: filepath.Join(v.dest, file.relPath)},
+ )
+ }
+ if v.hasRes.isSet() {
+ if seen != v.hasRes.value() {
+ var not string
+ if !seen {
+ not = "not "
+ }
+ msg := fmt.Sprintf("%s attribute is %s, but files were %sfound", v.ruleAttr, boolToString(v.hasRes.value()), not)
+ return nil, &BuildozerError{Msg: msg, RuleAttr: v.ruleAttr, NewValue: boolToString(seen)}
+ }
+ }
+ return filesToCopy, nil
+}
diff --git a/src/tools/ak/extractaar/validator_test.go b/src/tools/ak/extractaar/validator_test.go
new file mode 100644
index 0000000..24b7003
--- /dev/null
+++ b/src/tools/ak/extractaar/validator_test.go
@@ -0,0 +1,175 @@
+// Copyright 2022 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package extractaar
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/google/go-cmp/cmp/cmpopts"
+)
+
+func TestValidateManifest(t *testing.T) {
+ tests := []struct {
+ name string
+ files []*aarFile
+ dest string
+ expectedFiles []*toCopy
+ }{
+ {
+ name: "one manifest",
+ files: []*aarFile{
+ &aarFile{path: "/tmp/aar/AndroidManifest.xml"},
+ },
+ dest: "/dest/outputManifest.xml",
+ expectedFiles: []*toCopy{
+ &toCopy{src: "/tmp/aar/AndroidManifest.xml", dest: "/dest/outputManifest.xml"},
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ validator := manifestValidator{dest: tc.dest}
+ files, err := validator.validate(tc.files)
+ if err != nil {
+ t.Fatalf("manifestValidator.validate(%s) unexpected error: %v", tc.files, err)
+ }
+ if diff := cmp.Diff(tc.expectedFiles, files, cmp.AllowUnexported(toCopy{})); diff != "" {
+ t.Errorf("manifestValidator.validate(%s) returned diff (-want, +got):\n%v", tc.files, diff)
+ }
+ })
+ }
+}
+
+func TestValidateManifestError(t *testing.T) {
+ tests := []struct {
+ name string
+ files []*aarFile
+ }{
+ {
+ name: "no manifest",
+ files: []*aarFile{},
+ },
+ {
+ name: "multiple manifests",
+ files: []*aarFile{
+ &aarFile{path: "/tmp/aar/AndroidManifest.xml"},
+ &aarFile{path: "/tmp/aar/SecondAndroidManifest.xml"},
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ validator := manifestValidator{}
+ if _, err := validator.validate(tc.files); err == nil {
+ t.Errorf("manifestValidator.validate(%s) expected error but test succeeded: %v", tc.files, err)
+ }
+ })
+ }
+}
+
+func TestValidateResources(t *testing.T) {
+ tests := []struct {
+ name string
+ files []*aarFile
+ dest string
+ hasRes tristate
+ expectedFiles []*toCopy
+ }{
+ {
+ name: "has resources with valid hasRes attribute",
+ files: []*aarFile{
+ &aarFile{path: "/tmp/aar/res/values/strings.xml", relPath: "res/values/strings.xml"},
+ &aarFile{path: "/tmp/aar/res/layout/activity.xml", relPath: "res/layout/activity.xml"},
+ },
+ hasRes: tristate(1),
+ dest: "/dest/outputres",
+ expectedFiles: []*toCopy{
+ &toCopy{src: "/tmp/aar/res/values/strings.xml", dest: "/dest/outputres/res/values/strings.xml"},
+ &toCopy{src: "/tmp/aar/res/layout/activity.xml", dest: "/dest/outputres/res/layout/activity.xml"},
+ },
+ },
+ {
+ name: "does not have resources with valid hasRes attribute",
+ files: []*aarFile{},
+ hasRes: tristate(0),
+ dest: "/dest/outputres",
+ expectedFiles: nil,
+ },
+ {
+ name: "no resources and checks disabled",
+ files: []*aarFile{},
+ hasRes: tristate(-1),
+ dest: "/dest/outputres",
+ expectedFiles: nil,
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ validator := resourceValidator{dest: tc.dest, hasRes: tc.hasRes}
+ files, err := validator.validate(tc.files)
+ if err != nil {
+ t.Fatalf("resourceValidator.validate(%s) unexpected error: %v", tc.files, err)
+ }
+ if diff := cmp.Diff(tc.expectedFiles, files, cmp.AllowUnexported(toCopy{})); diff != "" {
+ t.Errorf("resourceValidator.validate(%s) returned diff (-want, +got):\n%v", tc.files, diff)
+ }
+ })
+ }
+}
+
+func TestValidateResourcesError(t *testing.T) {
+ tests := []struct {
+ name string
+ files []*aarFile
+ hasRes tristate
+ ruleAttr string
+ expectedError *BuildozerError
+ }{
+ {
+ name: "has resources with invalid hasRes attribute",
+ files: []*aarFile{
+ &aarFile{path: "/tmp/aar/res/values/strings.xml", relPath: "res/values/strings.xml"},
+ &aarFile{path: "/tmp/aar/res/layout/activity.xml", relPath: "res/layout/activity.xml"},
+ },
+ hasRes: tristate(-1),
+ ruleAttr: "test",
+ expectedError: &BuildozerError{RuleAttr: "test", NewValue: "True"},
+ },
+ {
+ name: "no resources with invalid hasRes attribute",
+ files: []*aarFile{},
+ hasRes: tristate(1),
+ ruleAttr: "test",
+ expectedError: &BuildozerError{RuleAttr: "test", NewValue: "False"},
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ validator := resourceValidator{ruleAttr: tc.ruleAttr, hasRes: tc.hasRes}
+ _, err := validator.validate(tc.files)
+ if err == nil {
+ t.Fatalf("resourceValidator.validate(%s) expected error but test succeeded: %v", tc.files, err)
+ }
+ if diff := cmp.Diff(tc.expectedError, err, cmpopts.IgnoreFields(BuildozerError{}, "Msg")); diff != "" {
+ t.Errorf("resourceValidator.validate(%s) returned diff (-want, +got):\n%v", tc.files, diff)
+ }
+ })
+ }
+}
diff --git a/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor.py b/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor.py
index b7192f7..d45921a 100644
--- a/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor.py
+++ b/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor.py
@@ -28,6 +28,7 @@ from absl import flags
BUMP = "bump"
VALIDATE = "validate"
+SET_DEFAULT = "set_default"
USES_SDK = "uses-sdk"
MIN_SDK_ATTRIB = "{http://schemas.android.com/apk/res/android}minSdkVersion"
@@ -37,8 +38,8 @@ FLAGS = flags.FLAGS
flags.DEFINE_enum(
"action",
None,
- [BUMP, VALIDATE],
- f"Action to perform, either {BUMP} or {VALIDATE}")
+ [BUMP, VALIDATE, SET_DEFAULT],
+ f"Action to perform, either {BUMP}, {VALIDATE}, or {SET_DEFAULT}")
flags.DEFINE_string(
"manifest",
None,
@@ -48,6 +49,12 @@ flags.DEFINE_integer(
0,
"Min SDK floor",
lower_bound=0)
+# Needed for SET_DEFAULT
+flags.DEFINE_string(
+ "default_min_sdk",
+ None,
+ "Default min SDK")
+# Needed for BUMP and SET_DEFAULT
flags.DEFINE_string(
"output",
None,
@@ -59,6 +66,22 @@ class MinSdkError(Exception):
"""Raised when there is a problem with the min SDK attribute in AndroidManifest.xml."""
+def ParseNamespaces(xml_content):
+ """Parse namespaces first to keep the prefix.
+
+ Args:
+ xml_content: str, the contents of the AndroidManifest.xml file
+ """
+ ns_parser = ET.XMLPullParser(events=["start-ns"])
+ ns_parser.feed(xml_content)
+ ns_parser.close()
+ for _, ns_tuple in ns_parser.read_events():
+ try:
+ ET.register_namespace(ns_tuple[0], ns_tuple[1])
+ except ValueError:
+ pass
+
+
def _BumpMinSdk(xml_content, min_sdk_floor):
"""Checks the min SDK in xml_content and replaces with min_sdk_floor if needed.
@@ -76,15 +99,7 @@ def _BumpMinSdk(xml_content, min_sdk_floor):
if min_sdk_floor == 0:
return xml_content, "No min SDK floor specified. Manifest unchanged."
- # Parse namespaces first to keep the prefix.
- ns_parser = ET.XMLPullParser(events=["start-ns"])
- ns_parser.feed(xml_content)
- ns_parser.close()
- for _, ns_tuple in ns_parser.read_events():
- try:
- ET.register_namespace(ns_tuple[0], ns_tuple[1])
- except ValueError:
- pass
+ ParseNamespaces(xml_content)
root = ET.fromstring(xml_content)
uses_sdk = root.find(USES_SDK)
@@ -163,6 +178,49 @@ def _ValidateMinSdk(xml_content, min_sdk_floor):
return f"minSdkVersion = {min_sdk}\n min SDK floor = {min_sdk_floor}"
+def _SetDefaultMinSdk(xml_content, default_min_sdk):
+ """Checks the min SDK in xml_content and replaces with default_min_sdk if it is not already set.
+
+ Args:
+ xml_content: str, the contents of the AndroidManifest.xml file
+ default_min_sdk: str, can be set to either a number or an unreleased version
+ full name
+
+ Returns:
+ A tuple with the following elements:
+ - str: The xml contents of the manifest with the min SDK floor enforced.
+ This string will be equal to the input if the min SDK is already set.
+ - str: log message of action taken
+ """
+ if default_min_sdk is None:
+ return xml_content, ("No default min SDK floor specified. Manifest "
+ "unchanged.")
+
+ ParseNamespaces(xml_content)
+
+ root = ET.fromstring(xml_content)
+ uses_sdk = root.find(USES_SDK)
+ if uses_sdk is None:
+ ET.SubElement(root, USES_SDK, {MIN_SDK_ATTRIB: default_min_sdk})
+ return (
+ ET.tostring(root, encoding="utf-8", xml_declaration=True),
+ "No uses-sdk element found while default is specified. "
+ + f"Min SDK ({default_min_sdk}) added.")
+
+ min_sdk = uses_sdk.get(MIN_SDK_ATTRIB)
+ if min_sdk is None:
+ uses_sdk.set(MIN_SDK_ATTRIB, str(default_min_sdk))
+ return (
+ ET.tostring(root, encoding="utf-8", xml_declaration=True),
+ "No minSdkVersion attribute found while default is specified"
+ + f"({default_min_sdk}). Min SDK set to default.")
+
+ return (
+ xml_content,
+ f"minSdkVersion attribute specified in the manifest ({min_sdk}) "
+ + ". Manifest unchanged.")
+
+
def main(unused_argv):
manifest_path = FLAGS.manifest
with open(manifest_path, "rb") as f:
@@ -178,6 +236,18 @@ def main(unused_argv):
with open(output_path, "wb") as f:
f.write(out_contents)
+ elif FLAGS.action == SET_DEFAULT:
+ output_path = FLAGS.output
+ dirname = os.path.dirname(output_path)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ out_contents, log_message = _SetDefaultMinSdk(
+ manifest, FLAGS.default_min_sdk
+ )
+ with open(output_path, "wb") as f:
+ f.write(out_contents)
+
elif FLAGS.action == VALIDATE:
try:
log_message = _ValidateMinSdk(manifest, FLAGS.min_sdk_floor)
diff --git a/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor_test.py b/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor_test.py
index e49d2ac..87f9a8d 100644
--- a/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor_test.py
+++ b/src/tools/enforce_min_sdk_floor/enforce_min_sdk_floor_test.py
@@ -17,6 +17,7 @@ import unittest
import xml.etree.ElementTree as ET
from google3.third_party.bazel_rules.rules_android.src.tools.enforce_min_sdk_floor.enforce_min_sdk_floor import _BumpMinSdk
+from google3.third_party.bazel_rules.rules_android.src.tools.enforce_min_sdk_floor.enforce_min_sdk_floor import _SetDefaultMinSdk
from google3.third_party.bazel_rules.rules_android.src.tools.enforce_min_sdk_floor.enforce_min_sdk_floor import _ValidateMinSdk
from google3.third_party.bazel_rules.rules_android.src.tools.enforce_min_sdk_floor.enforce_min_sdk_floor import MIN_SDK_ATTRIB
@@ -80,6 +81,20 @@ class EnforceMinSdkFloorTest(unittest.TestCase):
min_sdk = ET.fromstring(out).find(USES_SDK).get(MIN_SDK_ATTRIB)
self.assertEqual(min_sdk, "14")
+ def test_set_default_no_uses(self):
+ out, _ = _SetDefaultMinSdk(MANIFEST_NO_USES_SDK, "11")
+ min_sdk = ET.fromstring(out).find(USES_SDK).get(MIN_SDK_ATTRIB)
+ self.assertEqual(min_sdk, "11")
+
+ def test_set_default_no_min_sdk(self):
+ out, _ = _SetDefaultMinSdk(MANIFEST_NO_USES_SDK, "current")
+ min_sdk = ET.fromstring(out).find(USES_SDK).get(MIN_SDK_ATTRIB)
+ self.assertEqual(min_sdk, "current")
+
+ def test_set_default_min_sdk_already_specified(self):
+ out, _ = _SetDefaultMinSdk(MANIFEST_MIN_SDK, "14")
+ self.assertEqual(out, MANIFEST_MIN_SDK)
+
def test_validate_no_min_sdk_floor(self):
_ = _ValidateMinSdk(MANIFEST_NO_USES_SDK, 0)