aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVinh Tran <vinhdaitran@google.com>2023-07-21 19:38:23 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2023-07-21 19:38:23 +0000
commitf0df148dbeb9b9ed3816aad328ebe7c65efaaa24 (patch)
treec75dabb560288e11786211bdc61ba40dde4b8674
parent3544b5a539d9e51161befd2ac3fdc04525bced91 (diff)
parent9a4853f0327e0266818c8d6b4967e2e8f36b1a88 (diff)
downloadbazelbuild-rules_cc-android14-qpr2-s1-release.tar.gz
Original change: https://android-review.googlesource.com/c/platform/external/bazelbuild-rules_cc/+/2663436 Change-Id: I450b9f32024fa1b0844cc21a825c26589feb3977 Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
-rw-r--r--.bazelci/presubmit.yml53
-rw-r--r--.bcr/metadata.template.json6
-rw-r--r--.bcr/presubmit.yml8
-rw-r--r--.bcr/source.template.json5
-rw-r--r--.gitignore1
-rw-r--r--AUTHORS9
-rw-r--r--BUILD25
-rw-r--r--CODEOWNERS1
-rw-r--r--CONTRIBUTING.md28
-rw-r--r--ISSUE_TEMPLATE.md56
-rw-r--r--LICENSE202
-rw-r--r--METADATA14
-rw-r--r--MODULE.bazel14
-rw-r--r--MODULE_LICENSE_APACHE20
-rw-r--r--OWNERS2
-rw-r--r--README.md81
-rw-r--r--WORKSPACE92
-rw-r--r--cc/BUILD85
-rw-r--r--cc/action_names.bzl180
-rw-r--r--cc/cc_toolchain_config_lib.bzl617
-rw-r--r--cc/compiler/BUILD71
-rw-r--r--cc/defs.bzl203
-rw-r--r--cc/extensions.bzl24
-rw-r--r--cc/find_cc_toolchain.bzl117
-rw-r--r--cc/private/rules_impl/BUILD18
-rw-r--r--cc/private/rules_impl/cc_flags_supplier.bzl35
-rw-r--r--cc/private/rules_impl/cc_flags_supplier_lib.bzl79
-rw-r--r--cc/private/rules_impl/compiler_flag.bzl29
-rw-r--r--cc/private/rules_impl/native.bzl34
-rw-r--r--cc/private/toolchain/BUILD95
-rw-r--r--cc/private/toolchain/BUILD.empty52
-rw-r--r--cc/private/toolchain/BUILD.static.freebsd112
-rw-r--r--cc/private/toolchain/BUILD.toolchains.tpl20
-rw-r--r--cc/private/toolchain/BUILD.tpl113
-rw-r--r--cc/private/toolchain/BUILD.windows.tpl316
-rw-r--r--cc/private/toolchain/armeabi_cc_toolchain_config.bzl82
-rw-r--r--cc/private/toolchain/build_interface_so8
-rw-r--r--cc/private/toolchain/cc_configure.bzl150
-rw-r--r--cc/private/toolchain/cc_toolchain_config.bzl1491
-rw-r--r--cc/private/toolchain/clang_installation_error.bat.tpl24
-rw-r--r--cc/private/toolchain/empty.cc15
-rw-r--r--cc/private/toolchain/empty_cc_toolchain_config.bzl42
-rw-r--r--cc/private/toolchain/freebsd_cc_toolchain_config.bzl307
-rwxr-xr-xcc/private/toolchain/grep-includes.sh19
-rw-r--r--cc/private/toolchain/lib_cc_configure.bzl286
-rwxr-xr-xcc/private/toolchain/link_dynamic_library.sh113
-rw-r--r--cc/private/toolchain/linux_cc_wrapper.sh.tpl25
-rw-r--r--cc/private/toolchain/msys_gcc_installation_error.bat23
-rw-r--r--cc/private/toolchain/osx_cc_wrapper.sh.tpl119
-rw-r--r--cc/private/toolchain/unix_cc_configure.bzl587
-rw-r--r--cc/private/toolchain/unix_cc_toolchain_config.bzl1200
-rw-r--r--cc/private/toolchain/vc_installation_error.bat.tpl24
-rw-r--r--cc/private/toolchain/windows_cc_configure.bzl703
-rw-r--r--cc/private/toolchain/windows_cc_toolchain_config.bzl1339
-rw-r--r--cc/repositories.bzl10
-rw-r--r--cc/runfiles/BUILD7
-rw-r--r--cc/system_library.bzl475
-rw-r--r--cc/toolchain_utils.bzl31
-rw-r--r--examples/BUILD43
-rw-r--r--examples/custom_toolchain/BUILD118
-rw-r--r--examples/custom_toolchain/README.md78
-rw-r--r--examples/custom_toolchain/buildme.cc4
-rwxr-xr-xexamples/custom_toolchain/sample_compiler21
-rwxr-xr-xexamples/custom_toolchain/sample_linker23
-rw-r--r--examples/custom_toolchain/toolchain_config.bzl77
-rw-r--r--examples/experimental_cc_shared_library.bzl48
-rw-r--r--examples/my_c_archive/BUILD50
-rw-r--r--examples/my_c_archive/bar.c1
-rw-r--r--examples/my_c_archive/foo.c15
-rw-r--r--examples/my_c_archive/main.c3
-rw-r--r--examples/my_c_archive/my_c_archive.bzl99
-rw-r--r--examples/my_c_compile/BUILD30
-rw-r--r--examples/my_c_compile/foo.c15
-rw-r--r--examples/my_c_compile/my_c_compile.bzl81
-rw-r--r--examples/write_cc_toolchain_cpu/BUILD27
-rw-r--r--examples/write_cc_toolchain_cpu/write_cc_toolchain_cpu.bzl32
-rw-r--r--renovate.json5
-rw-r--r--tests/compiler_settings/BUILD33
-rw-r--r--tests/compiler_settings/main.cc22
-rw-r--r--tests/load_from_macro/BUILD31
-rw-r--r--tests/load_from_macro/foo.cc13
-rw-r--r--tests/load_from_macro/tags.bzl17
-rw-r--r--tests/simple_binary/BUILD28
-rw-r--r--tests/simple_binary/foo.cc15
-rw-r--r--tests/system_library/BUILD13
-rwxr-xr-xtests/system_library/system_library_test.sh213
-rw-r--r--tests/system_library/unittest.bash801
-rw-r--r--third_party/BUILD1
-rw-r--r--third_party/com/github/bazelbuild/bazel/src/main/protobuf/BUILD30
-rw-r--r--third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config.proto548
-rw-r--r--third_party/six.BUILD16
-rw-r--r--tools/migration/BUILD150
-rw-r--r--tools/migration/cc_toolchain_config_comparator.bzl53
-rw-r--r--tools/migration/convert_crosstool_to_starlark.go101
-rw-r--r--tools/migration/crosstool_query.py53
-rw-r--r--tools/migration/crosstool_to_starlark_lib.go1419
-rw-r--r--tools/migration/crosstool_to_starlark_lib_test.go1756
-rw-r--r--tools/migration/ctoolchain_comparator.py127
-rw-r--r--tools/migration/ctoolchain_comparator_lib.py523
-rw-r--r--tools/migration/ctoolchain_comparator_lib_test.py1709
-rw-r--r--tools/migration/ctoolchain_compare.bzl49
-rw-r--r--tools/migration/legacy_fields_migration_lib.py564
-rw-r--r--tools/migration/legacy_fields_migration_lib_test.py1240
-rw-r--r--tools/migration/legacy_fields_migrator.py69
104 files changed, 20241 insertions, 0 deletions
diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml
new file mode 100644
index 0000000..8d7899d
--- /dev/null
+++ b/.bazelci/presubmit.yml
@@ -0,0 +1,53 @@
+---
+x_defaults:
+ # YAML has a feature for "repeated nodes", BazelCI is fine with extra nodes
+ # it doesn't know about; so that is used to avoid repeating common subparts.
+ common: &common
+ # We have to list every package because even with exclusion notation -//foo
+ # Bazel will load the excluded package and it will be an error because at
+ # release Bazel the cc_libraries do not have all the attributes.
+ build_targets:
+ - "//:all"
+ - "//cc:all"
+ - "//cc/private/rules_impl:all"
+ - "//cc/private/toolchain:all"
+ - "//cc/runfiles:all"
+ - "//examples:all"
+ - "//examples/my_c_archive:all"
+ - "//examples/my_c_compile:all"
+ - "//examples/write_cc_toolchain_cpu:all"
+ - "//tools/migration:all"
+ - "//tests/..."
+ test_flags:
+ - "--test_timeout=120"
+ test_targets:
+ - "//:all"
+ - "//cc:all"
+ - "//cc/private/rules_impl:all"
+ - "//cc/private/toolchain:all"
+ - "//examples:all"
+ - "//examples/my_c_archive:all"
+ - "//examples/my_c_compile:all"
+ - "//examples/write_cc_toolchain_cpu:all"
+ - "//tools/migration:all"
+ - "//tests/..."
+
+buildifier:
+ version: latest
+ warnings: "all"
+
+tasks:
+ ubuntu1804:
+ <<: *common
+ macos:
+ <<: *common
+ windows:
+ <<: *common
+ ubuntu_bzlmod:
+ name: Bzlmod
+ platform: ubuntu1804
+ build_flags:
+ - "--enable_bzlmod"
+ - "--ignore_dev_dependency"
+ build_targets:
+ - "//cc/..."
diff --git a/.bcr/metadata.template.json b/.bcr/metadata.template.json
new file mode 100644
index 0000000..9f0e465
--- /dev/null
+++ b/.bcr/metadata.template.json
@@ -0,0 +1,6 @@
+{
+ "homepage": "https://github.com/bazelbuild/rules_cc",
+ "maintainers": [],
+ "versions": [],
+ "yanked_versions": {}
+}
diff --git a/.bcr/presubmit.yml b/.bcr/presubmit.yml
new file mode 100644
index 0000000..52869b1
--- /dev/null
+++ b/.bcr/presubmit.yml
@@ -0,0 +1,8 @@
+matrix:
+ platform: ["centos7", "debian10", "macos", "ubuntu2004", "windows"]
+tasks:
+ verify_targets:
+ name: "Verify build targets"
+ platform: ${{ platform }}
+ build_targets:
+ - "@rules_cc//cc/..."
diff --git a/.bcr/source.template.json b/.bcr/source.template.json
new file mode 100644
index 0000000..4f14819
--- /dev/null
+++ b/.bcr/source.template.json
@@ -0,0 +1,5 @@
+{
+ "integrity": "",
+ "strip_prefix": "{REPO}-{VERSION}",
+ "url": "https://github.com/{OWNER}/{REPO}/archive/refs/tags/{TAG}.tar.gz"
+}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..65e8edc
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+/bazel-* \ No newline at end of file
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..cb5854b
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,9 @@
+# This the official list of authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+
+# Names should be added to this file as:
+# Name or Organization <email address>
+# The email address is not required for organizations.
+
+Google Inc.
diff --git a/BUILD b/BUILD
new file mode 100644
index 0000000..1ed7987
--- /dev/null
+++ b/BUILD
@@ -0,0 +1,25 @@
+load("//cc:defs.bzl", "cc_library")
+
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"])
+
+exports_files(["LICENSE"])
+
+cc_library(name = "empty_lib")
+
+# Label flag for extra libraries to be linked into every binary.
+# TODO(bazel-team): Support passing flag multiple times to build a list.
+label_flag(
+ name = "link_extra_libs",
+ build_setting_default = ":empty_lib",
+)
+
+# The final extra library to be linked into every binary target. This collects
+# the above flag, but may also include more libraries depending on config.
+cc_library(
+ name = "link_extra_lib",
+ deps = [
+ ":link_extra_libs",
+ ],
+)
diff --git a/CODEOWNERS b/CODEOWNERS
new file mode 100644
index 0000000..85a388b
--- /dev/null
+++ b/CODEOWNERS
@@ -0,0 +1 @@
+* @oquenchil @c-mita @comius @buildbreaker2021
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..939e534
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,28 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to <https://cla.developers.google.com/> to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use GitHub pull requests for this purpose. Consult
+[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
+information on using pull requests.
+
+## Community Guidelines
+
+This project follows [Google's Open Source Community
+Guidelines](https://opensource.google.com/conduct/).
diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000..af2e549
--- /dev/null
+++ b/ISSUE_TEMPLATE.md
@@ -0,0 +1,56 @@
+
+> ATTENTION! Please read and follow:
+> - if this is a _question_ about how to build / test / query / deploy using Bazel, ask it on StackOverflow instead: https://stackoverflow.com/questions/tagged/bazel
+> - if this is a _discussion starter_, send it to bazel-discuss@googlegroups.com or cc-bazel-discuss@googlegroups.com
+> - if this is a _bug_ or _feature request_, fill the form below as best as you can.
+
+### Description of the problem / feature request:
+
+> Replace this line with your answer.
+
+### Feature requests: what underlying problem are you trying to solve with this feature?
+
+> Replace this line with your answer.
+
+### Bugs: what's the simplest, easiest way to reproduce this bug? Please provide a minimal example if possible.
+
+> Replace this line with your answer.
+
+### What operating system are you running Bazel on?
+
+> Replace this line with your answer.
+
+### What's the output of `bazel info release`?
+
+> Replace this line with your answer.
+
+### If `bazel info release` returns "development version" or "(@non-git)", tell us how you built Bazel.
+
+> Replace this line with your answer.
+
+### What version of rules_cc do you use? Can you paste the workspace rule used to fetch rules_cc? What other relevant dependencies does your project have?
+
+> Replace this line with your answer.
+
+### What Bazel options do you use to trigger the issue? What C++ toolchain do you use?
+
+> Replace this line with your answer.
+
+### Have you found anything relevant by searching the web?
+
+> Replace these lines with your answer.
+>
+> Places to look:
+> * StackOverflow: http://stackoverflow.com/questions/tagged/bazel
+> * GitHub issues:
+> * https://github.com/bazelbuild/rules_cc/issues
+> * https://github.com/bazelbuild/bazel/issues
+> * email threads:
+> * https://groups.google.com/forum/#!forum/bazel-discuss
+> * https://groups.google.com/forum/#!forum/cc-bazel-discuss
+
+### Any other information, logs, or outputs that you want to share?
+
+> Replace these lines with your answer.
+>
+> If the files are large, upload as attachment or provide link.
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..7a4a3ea
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. \ No newline at end of file
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..921edac
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,14 @@
+name: "bazelbuild-rules_cc"
+description:
+ "A repository of Starlark implementation of C++ rules in Bazel"
+
+third_party {
+ url {
+ type: GIT
+ value: "https://github.com/bazelbuild/rules_cc"
+ }
+ version: "34bcaf6223a39ec002efcf06e110871a6f562f44"
+ last_upgrade_date { year: 2023 month: 7 day: 14 }
+ license_type: NOTICE
+}
+
diff --git a/MODULE.bazel b/MODULE.bazel
new file mode 100644
index 0000000..3848c66
--- /dev/null
+++ b/MODULE.bazel
@@ -0,0 +1,14 @@
+module(
+ name = "rules_cc",
+ version = "0.0.4",
+ compatibility_level = 1,
+)
+
+bazel_dep(name = "platforms", version = "0.0.6")
+
+cc_configure = use_extension("@rules_cc//cc:extensions.bzl", "cc_configure")
+use_repo(cc_configure, "local_config_cc_toolchains")
+
+register_toolchains("@local_config_cc_toolchains//:all")
+
+bazel_dep(name = "bazel_skylib", version = "1.3.0", dev_dependency = True)
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..1ee860c
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,2 @@
+include platform/build/soong:/OWNERS
+
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..2cc1973
--- /dev/null
+++ b/README.md
@@ -0,0 +1,81 @@
+# C++ rules for Bazel
+
+* Postsubmit [![Build status](https://badge.buildkite.com/f03592ae2d7d25a2abc2a2ba776e704823fa17fd3e061f5103.svg?branch=main)](https://buildkite.com/bazel/rules-cc)
+* Postsubmit + Current Bazel Incompatible flags [![Build status](https://badge.buildkite.com/5ba709cc33e5855078a1f8570adcf8e0a78ea93591bc0b4e81.svg?branch=master)](https://buildkite.com/bazel/rules-cc-plus-bazelisk-migrate)
+
+This repository contains Starlark implementation of C++ rules in Bazel.
+
+The rules are being incrementally converted from their native implementations in the [Bazel source tree](https://source.bazel.build/bazel/+/master:src/main/java/com/google/devtools/build/lib/rules/cpp/).
+
+For the list of C++ rules, see the Bazel
+[documentation](https://docs.bazel.build/versions/main/be/overview.html).
+
+# Getting Started
+
+There is no need to use rules from this repository just yet. If you want to use
+`rules_cc` anyway, add the following to your `WORKSPACE` file:
+
+```starlark
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+http_archive(
+ name = "rules_cc",
+ urls = ["https://github.com/bazelbuild/rules_cc/archive/refs/tags/<VERSION>.tar.gz"],
+ sha256 = "...",
+)
+```
+
+Then, in your `BUILD` files, import and use the rules:
+
+```starlark
+load("@rules_cc//cc:defs.bzl", "cc_library")
+
+cc_library(
+ ...
+)
+```
+
+# Using the rules_cc toolchain
+
+If you'd like to use the cc toolchain defined in this repo add this to
+your WORKSPACE after you include rules_cc:
+
+```bzl
+load("@rules_cc//cc:repositories.bzl", "rules_cc_dependencies", "rules_cc_toolchains")
+
+rules_cc_dependencies()
+
+rules_cc_toolchains()
+```
+
+# Migration Tools
+
+This repository also contains migration tools that can be used to migrate your
+project for Bazel incompatible changes.
+
+## Legacy fields migrator
+
+Script that migrates legacy crosstool fields into features
+([incompatible flag](https://github.com/bazelbuild/bazel/issues/6861),
+[tracking issue](https://github.com/bazelbuild/bazel/issues/5883)).
+
+TLDR:
+
+```
+bazel run @rules_cc//tools/migration:legacy_fields_migrator -- \
+ --input=my_toolchain/CROSSTOOL \
+ --inline
+```
+
+# Contributing
+
+Bazel and `rules_cc` are the work of many contributors. We appreciate your help!
+
+To contribute, please read the contribution guidelines: [CONTRIBUTING.md](https://github.com/bazelbuild/rules_cc/blob/main/CONTRIBUTING.md).
+
+Note that the `rules_cc` use the GitHub issue tracker for bug reports and feature requests only.
+For asking questions see:
+
+* [Stack Overflow](https://stackoverflow.com/questions/tagged/bazel)
+* [`rules_cc` mailing list](https://groups.google.com/forum/#!forum/cc-bazel-discuss)
+* Slack channel `#cc` on [slack.bazel.build](https://slack.bazel.build)
diff --git a/WORKSPACE b/WORKSPACE
new file mode 100644
index 0000000..8550fce
--- /dev/null
+++ b/WORKSPACE
@@ -0,0 +1,92 @@
+workspace(name = "rules_cc")
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+http_archive(
+ name = "bazel_skylib",
+ sha256 = "b8a1527901774180afc798aeb28c4634bdccf19c4d98e7bdd1ce79d1fe9aaad7",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.4.1/bazel-skylib-1.4.1.tar.gz",
+ "https://github.com/bazelbuild/bazel-skylib/releases/download/1.4.1/bazel-skylib-1.4.1.tar.gz",
+ ],
+)
+
+http_archive(
+ name = "com_google_googletest",
+ sha256 = "81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2",
+ strip_prefix = "googletest-release-1.12.1",
+ urls = [
+ "https://mirror.bazel.build/github.com/google/googletest/archive/refs/tags/release-1.12.1.tar.gz",
+ "https://github.com/google/googletest/archive/refs/tags/release-1.12.1.tar.gz",
+ ],
+)
+
+http_archive(
+ name = "io_abseil_py",
+ sha256 = "0fb3a4916a157eb48124ef309231cecdfdd96ff54adf1660b39c0d4a9790a2c0",
+ strip_prefix = "abseil-py-1.4.0",
+ urls = [
+ "https://github.com/abseil/abseil-py/archive/refs/tags/v1.4.0.tar.gz",
+ ],
+)
+
+http_archive(
+ name = "io_bazel_rules_go",
+ sha256 = "dd926a88a564a9246713a9c00b35315f54cbd46b31a26d5d8fb264c07045f05d",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.38.1/rules_go-v0.38.1.zip",
+ "https://github.com/bazelbuild/rules_go/releases/download/v0.38.1/rules_go-v0.38.1.zip",
+ ],
+)
+
+http_archive(
+ name = "platforms",
+ sha256 = "5308fc1d8865406a49427ba24a9ab53087f17f5266a7aabbfc28823f3916e1ca",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz",
+ "https://github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz",
+ ],
+)
+
+http_archive(
+ name = "py_mock",
+ patch_cmds = [
+ "mkdir -p py/mock",
+ "mv mock.py py/mock/__init__.py",
+ """echo 'licenses(["notice"])' > BUILD""",
+ "touch py/BUILD",
+ """echo 'py_library(name = "mock", srcs = ["__init__.py"], visibility = ["//visibility:public"],)' > py/mock/BUILD""",
+ ],
+ sha256 = "b839dd2d9c117c701430c149956918a423a9863b48b09c90e30a6013e7d2f44f",
+ strip_prefix = "mock-1.0.1",
+ urls = [
+ "https://mirror.bazel.build/pypi.python.org/packages/source/m/mock/mock-1.0.1.tar.gz",
+ "https://pypi.python.org/packages/source/m/mock/mock-1.0.1.tar.gz",
+ ],
+)
+
+http_archive(
+ name = "rules_proto",
+ sha256 = "9a0503631679e9ab4e27d891ea60fee3e86a85654ea2048cae25516171dd260e",
+ strip_prefix = "rules_proto-e51f588e5932966ab9e63e0b0f6de6f740cf04c4",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/e51f588e5932966ab9e63e0b0f6de6f740cf04c4.tar.gz",
+ "https://github.com/bazelbuild/rules_proto/archive/e51f588e5932966ab9e63e0b0f6de6f740cf04c4.tar.gz",
+ ],
+)
+
+load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
+
+bazel_skylib_workspace()
+
+load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
+
+go_rules_dependencies()
+
+go_register_toolchains(version = "1.19.4")
+
+load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains")
+
+rules_proto_dependencies()
+
+rules_proto_toolchains()
diff --git a/cc/BUILD b/cc/BUILD
new file mode 100644
index 0000000..aeb5beb
--- /dev/null
+++ b/cc/BUILD
@@ -0,0 +1,85 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"]) # Apache 2.0
+
+filegroup(
+ name = "all_files_for_testing",
+ srcs = glob(["**"]) + [
+ "//cc/private/rules_impl:srcs",
+ "//cc/private/toolchain:srcs",
+ ],
+)
+
+exports_files([
+ "defs.bzl",
+ "action_names.bzl",
+ "system_library.bzl",
+])
+
+# The toolchain type used to distinguish cc toolchains.
+alias(
+ name = "toolchain_type",
+ actual = "@bazel_tools//tools/cpp:toolchain_type",
+)
+
+filegroup(
+ name = "action_names_test_files",
+ testonly = True,
+ srcs = [
+ "BUILD",
+ "action_names.bzl",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "bzl_srcs",
+ srcs = glob([
+ "**/*.bzl",
+ ]) + [
+ "//cc/private/rules_impl:bzl_srcs",
+ "//cc/private/toolchain:bzl_srcs",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "srcs",
+ srcs = glob([
+ "**/*.bzl",
+ "**/BUILD",
+ ]) + [
+ "//cc/private/rules_impl:srcs",
+ "//cc/private/toolchain:srcs",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+# TODO(aiuto): Find a way to strip this rule from the distribution tarball.
+filegroup(
+ name = "distribution",
+ srcs = glob([
+ "**",
+ ]),
+ visibility = [
+ "//distro:__pkg__",
+ ],
+)
+
+cc_toolchain_alias(name = "current_cc_toolchain")
+
+cc_libc_top_alias(name = "current_libc_top")
diff --git a/cc/action_names.bzl b/cc/action_names.bzl
new file mode 100644
index 0000000..82325d1
--- /dev/null
+++ b/cc/action_names.bzl
@@ -0,0 +1,180 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Constants for action names used for C++ rules."""
+
+# Name for the C compilation action.
+C_COMPILE_ACTION_NAME = "c-compile"
+
+# Name of the C++ compilation action.
+CPP_COMPILE_ACTION_NAME = "c++-compile"
+
+# Name of the linkstamp-compile action.
+LINKSTAMP_COMPILE_ACTION_NAME = "linkstamp-compile"
+
+# Name of the action used to compute CC_FLAGS make variable.
+CC_FLAGS_MAKE_VARIABLE_ACTION_NAME = "cc-flags-make-variable"
+
+# Name of the C++ module codegen action.
+CPP_MODULE_CODEGEN_ACTION_NAME = "c++-module-codegen"
+
+# Name of the C++ header parsing action.
+CPP_HEADER_PARSING_ACTION_NAME = "c++-header-parsing"
+
+# Name of the C++ module compile action.
+CPP_MODULE_COMPILE_ACTION_NAME = "c++-module-compile"
+
+# Name of the assembler action.
+ASSEMBLE_ACTION_NAME = "assemble"
+
+# Name of the assembly preprocessing action.
+PREPROCESS_ASSEMBLE_ACTION_NAME = "preprocess-assemble"
+
+# Name of the action producing ThinLto index.
+LTO_INDEXING_ACTION_NAME = "lto-indexing"
+
+# Name of the action producing ThinLto index for executable.
+LTO_INDEX_FOR_EXECUTABLE_ACTION_NAME = "lto-index-for-executable"
+
+# Name of the action producing ThinLto index for dynamic library.
+LTO_INDEX_FOR_DYNAMIC_LIBRARY_ACTION_NAME = "lto-index-for-dynamic-library"
+
+# Name of the action producing ThinLto index for nodeps dynamic library.
+LTO_INDEX_FOR_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME = "lto-index-for-nodeps-dynamic-library"
+
+# Name of the action compiling lto bitcodes into native objects.
+LTO_BACKEND_ACTION_NAME = "lto-backend"
+
+# Name of the link action producing executable binary.
+CPP_LINK_EXECUTABLE_ACTION_NAME = "c++-link-executable"
+
+# Name of the link action producing dynamic library.
+CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME = "c++-link-dynamic-library"
+
+# Name of the link action producing dynamic library that doesn't include it's
+# transitive dependencies.
+CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME = "c++-link-nodeps-dynamic-library"
+
+# Name of the archiving action producing static library.
+CPP_LINK_STATIC_LIBRARY_ACTION_NAME = "c++-link-static-library"
+
+# Name of the action stripping the binary.
+STRIP_ACTION_NAME = "strip"
+
+# A string constant for the objc compilation action.
+OBJC_COMPILE_ACTION_NAME = "objc-compile"
+
+# A string constant for the objc++ compile action.
+OBJCPP_COMPILE_ACTION_NAME = "objc++-compile"
+
+# A string constant for the objc executable link action.
+OBJC_EXECUTABLE_ACTION_NAME = "objc-executable"
+
+# A string constant for the objc fully-link link action.
+OBJC_FULLY_LINK_ACTION_NAME = "objc-fully-link"
+
+# A string constant for the clif action.
+CLIF_MATCH_ACTION_NAME = "clif-match"
+
+ACTION_NAMES = struct(
+ c_compile = C_COMPILE_ACTION_NAME,
+ cpp_compile = CPP_COMPILE_ACTION_NAME,
+ linkstamp_compile = LINKSTAMP_COMPILE_ACTION_NAME,
+ cc_flags_make_variable = CC_FLAGS_MAKE_VARIABLE_ACTION_NAME,
+ cpp_module_codegen = CPP_MODULE_CODEGEN_ACTION_NAME,
+ cpp_header_parsing = CPP_HEADER_PARSING_ACTION_NAME,
+ cpp_module_compile = CPP_MODULE_COMPILE_ACTION_NAME,
+ assemble = ASSEMBLE_ACTION_NAME,
+ preprocess_assemble = PREPROCESS_ASSEMBLE_ACTION_NAME,
+ lto_indexing = LTO_INDEXING_ACTION_NAME,
+ lto_backend = LTO_BACKEND_ACTION_NAME,
+ lto_index_for_executable = LTO_INDEX_FOR_EXECUTABLE_ACTION_NAME,
+ lto_index_for_dynamic_library = LTO_INDEX_FOR_DYNAMIC_LIBRARY_ACTION_NAME,
+ lto_index_for_nodeps_dynamic_library = LTO_INDEX_FOR_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME,
+ cpp_link_executable = CPP_LINK_EXECUTABLE_ACTION_NAME,
+ cpp_link_dynamic_library = CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME,
+ cpp_link_nodeps_dynamic_library = CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME,
+ cpp_link_static_library = CPP_LINK_STATIC_LIBRARY_ACTION_NAME,
+ strip = STRIP_ACTION_NAME,
+ objc_compile = OBJC_COMPILE_ACTION_NAME,
+ objc_executable = OBJC_EXECUTABLE_ACTION_NAME,
+ objc_fully_link = OBJC_FULLY_LINK_ACTION_NAME,
+ objcpp_compile = OBJCPP_COMPILE_ACTION_NAME,
+ clif_match = CLIF_MATCH_ACTION_NAME,
+)
+
+# Names of actions that parse or compile C++ code.
+ALL_CPP_COMPILE_ACTION_NAMES = [
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+ ACTION_NAMES.clif_match,
+]
+
+# Names of actions that parse or compile C, C++ and assembly code.
+ALL_CC_COMPILE_ACTION_NAMES = ALL_CPP_COMPILE_ACTION_NAMES + [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.assemble,
+]
+
+# Names of actions that link C, C++ and assembly code.
+ALL_CC_LINK_ACTION_NAMES = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.lto_index_for_executable,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+ ACTION_NAMES.lto_index_for_nodeps_dynamic_library,
+]
+
+# Names of actions that link entire programs.
+CC_LINK_EXECUTABLE_ACTION_NAMES = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.lto_index_for_executable,
+]
+
+# Names of actions that link dynamic libraries.
+DYNAMIC_LIBRARY_LINK_ACTION_NAMES = [
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+ ACTION_NAMES.lto_index_for_nodeps_dynamic_library,
+]
+
+# Names of actions that link nodeps dynamic libraries.
+NODEPS_DYNAMIC_LIBRARY_LINK_ACTION_NAMES = [
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.lto_index_for_nodeps_dynamic_library,
+]
+
+# Names of actions that link transitive dependencies.
+TRANSITIVE_LINK_ACTION_NAMES = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.lto_index_for_executable,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+]
+
+ACTION_NAME_GROUPS = struct(
+ all_cc_compile_actions = ALL_CC_COMPILE_ACTION_NAMES,
+ all_cc_link_actions = ALL_CC_LINK_ACTION_NAMES,
+ all_cpp_compile_actions = ALL_CPP_COMPILE_ACTION_NAMES,
+ cc_link_executable_actions = CC_LINK_EXECUTABLE_ACTION_NAMES,
+ dynamic_library_link_actions = DYNAMIC_LIBRARY_LINK_ACTION_NAMES,
+ nodeps_dynamic_library_link_actions = NODEPS_DYNAMIC_LIBRARY_LINK_ACTION_NAMES,
+ transitive_link_actions = TRANSITIVE_LINK_ACTION_NAMES,
+)
diff --git a/cc/cc_toolchain_config_lib.bzl b/cc/cc_toolchain_config_lib.bzl
new file mode 100644
index 0000000..3a259de
--- /dev/null
+++ b/cc/cc_toolchain_config_lib.bzl
@@ -0,0 +1,617 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" A library of functions creating structs for CcToolchainConfigInfo."""
+
+def _check_is_none(obj, parameter_name, method_name):
+ if obj != None:
+ fail("{} parameter of {} should be None, found {}."
+ .format(parameter_name, method_name, type(obj)))
+
+def _check_is_none_or_right_type(obj, obj_of_right_type, parameter_name, method_name):
+ if obj != None:
+ _check_same_type(obj, obj_of_right_type, parameter_name, method_name)
+
+def _check_right_type(obj, expected_type, parameter_name, method_name):
+ if type(obj) != expected_type:
+ fail("{} parameter of {} should be a {}, found {}."
+ .format(parameter_name, method_name, expected_type, type(obj)))
+
+def _check_same_type(obj, obj_of_right_type, parameter_name, method_name):
+ _check_right_type(obj, type(obj_of_right_type), parameter_name, method_name)
+
+def _check_is_nonempty_string(obj, parameter_name, method_name):
+ _check_same_type(obj, "", parameter_name, method_name)
+ if obj == "":
+ fail("{} parameter of {} must be a nonempty string."
+ .format(parameter_name, method_name))
+
+def _check_is_nonempty_list(obj, parameter_name, method_name):
+ _check_same_type(obj, [], parameter_name, method_name)
+ if len(obj) == 0:
+ fail("{} parameter of {} must be a nonempty list."
+ .format(parameter_name, method_name))
+
+EnvEntryInfo = provider(
+ "A key/value pair to be added as an environment variable.",
+ fields = ["key", "value", "expand_if_available", "type_name"],
+)
+
+def env_entry(key, value, expand_if_available = None):
+ """ A key/value pair to be added as an environment variable.
+
+ The returned EnvEntry provider finds its use in EnvSet creation through
+ the env_entries parameter of env_set(); EnvSet groups environment variables
+ that need to be expanded for specific actions.
+ The value of this pair is expanded in the same way as is described in
+ flag_group. The key remains an unexpanded string literal.
+
+ Args:
+ key: a string literal representing the name of the variable.
+ value: the value to be expanded.
+ expand_if_available: A build variable that needs to be available
+ in order to expand the env_entry.
+
+ Returns:
+ An EnvEntryInfo provider.
+ """
+ _check_is_nonempty_string(key, "key", "env_entry")
+ _check_is_nonempty_string(value, "value", "env_entry")
+ _check_is_none_or_right_type(expand_if_available, "string", "expand_if_available", "env_entry")
+ return EnvEntryInfo(
+ key = key,
+ value = value,
+ expand_if_available = expand_if_available,
+ type_name = "env_entry",
+ )
+
+VariableWithValueInfo = provider(
+ "Represents equality check between a variable and a certain value.",
+ fields = ["name", "value", "type_name"],
+)
+
+def variable_with_value(name, value):
+ """ Represents equality check between a variable and a certain value.
+
+ The returned provider finds its use through flag_group.expand_if_equal,
+ making the expansion of the flag_group conditional on the value of the
+ variable.
+
+ Args:
+ name: name of the variable.
+ value: the value the variable should be compared against.
+
+ Returns:
+ A VariableWithValueInfo provider.
+ """
+ _check_is_nonempty_string(name, "name", "variable_with_value")
+ _check_is_nonempty_string(value, "value", "variable_with_value")
+ return VariableWithValueInfo(
+ name = name,
+ value = value,
+ type_name = "variable_with_value",
+ )
+
+MakeVariableInfo = provider(
+ "A make variable that is made accessible to rules.",
+ fields = ["name", "value", "type_name"],
+)
+
+def make_variable(name, value):
+ """ A make variable that is made accessible to rules."""
+ _check_is_nonempty_string(name, "name", "make_variable")
+ _check_is_nonempty_string(value, "value", "make_variable")
+ return MakeVariableInfo(
+ name = name,
+ value = value,
+ type_name = "make_variable",
+ )
+
+FeatureSetInfo = provider(
+ "A set of features.",
+ fields = ["features", "type_name"],
+)
+
+def feature_set(features = []):
+ """ A set of features.
+
+ Used to support logical 'and' when specifying feature requirements in a
+ feature.
+
+ Args:
+ features: A list of unordered feature names.
+
+ Returns:
+ A FeatureSetInfo provider.
+ """
+ _check_same_type(features, [], "features", "feature_set")
+ return FeatureSetInfo(features = features, type_name = "feature_set")
+
+WithFeatureSetInfo = provider(
+ "A set of positive and negative features.",
+ fields = ["features", "not_features", "type_name"],
+)
+
+def with_feature_set(features = [], not_features = []):
+ """ A set of positive and negative features.
+
+ This stanza will evaluate to true when every 'feature' is enabled, and
+ every 'not_feature' is not enabled.
+
+ Args:
+ features: A list of feature names that need to be enabled.
+ not_features: A list of feature names that need to not be enabled.
+
+ Returns:
+ A WithFeatureSetInfo provider.
+ """
+ _check_same_type(features, [], "features", "with_feature_set")
+ _check_same_type(not_features, [], "not_features", "with_feature_set")
+ return WithFeatureSetInfo(
+ features = features,
+ not_features = not_features,
+ type_name = "with_feature_set",
+ )
+
+EnvSetInfo = provider(
+ "Groups a set of environment variables to apply for certain actions.",
+ fields = ["actions", "env_entries", "with_features", "type_name"],
+)
+
+def env_set(actions, env_entries = [], with_features = []):
+ """ Groups a set of environment variables to apply for certain actions.
+
+ EnvSet providers are passed to feature() and action_config(), to be applied to
+ the actions they are specified for.
+
+ Args:
+ actions: A list of actions this env set applies to; each env set must
+ specify at least one action.
+ env_entries: A list of EnvEntry - the environment variables applied
+ via this env set.
+ with_features: A list of feature sets defining when this env set gets
+ applied. The env set will be applied when any one of the feature
+ sets evaluate to true. (That is, when when every 'feature' is
+ enabled, and every 'not_feature' is not enabled.)
+ If 'with_features' is omitted, the env set will be applied
+ unconditionally for every action specified.
+
+ Returns:
+ An EnvSetInfo provider.
+ """
+ _check_is_nonempty_list(actions, "actions", "env_set")
+ _check_same_type(env_entries, [], "env_entries", "env_set")
+ _check_same_type(with_features, [], "with_features", "env_set")
+ return EnvSetInfo(
+ actions = actions,
+ env_entries = env_entries,
+ with_features = with_features,
+ type_name = "env_set",
+ )
+
+FlagGroupInfo = provider(
+ "A group of flags. Supports parametrization via variable expansion.",
+ fields = [
+ "flags",
+ "flag_groups",
+ "iterate_over",
+ "expand_if_available",
+ "expand_if_not_available",
+ "expand_if_true",
+ "expand_if_false",
+ "expand_if_equal",
+ "type_name",
+ ],
+)
+
+def flag_group(
+ flags = [],
+ flag_groups = [],
+ iterate_over = None,
+ expand_if_available = None,
+ expand_if_not_available = None,
+ expand_if_true = None,
+ expand_if_false = None,
+ expand_if_equal = None):
+ """ A group of flags. Supports parametrization via variable expansion.
+
+ To expand a variable of list type, flag_group has to be annotated with
+ `iterate_over` message. Then all nested flags or flag_groups will be
+ expanded repeatedly for each element of the list.
+ For example:
+ flag_group(
+ iterate_over = 'include_path',
+ flags = ['-I', '%{include_path}'],
+ )
+ ... will get expanded to -I /to/path1 -I /to/path2 ... for each
+ include_path /to/pathN.
+
+ To expand a variable of structure type, use dot-notation, e.g.:
+ flag_group(
+ iterate_over = "libraries_to_link",
+ flag_groups = [
+ flag_group(
+ iterate_over = "libraries_to_link.libraries",
+ flags = ["-L%{libraries_to_link.libraries.directory}"],
+ )
+ ]
+ )
+
+ Flag groups can be nested; if they are, the flag group must only contain
+ other flag groups (no flags) so the order is unambiguously specified.
+ In order to expand a variable of nested lists, 'iterate_over' can be used.
+ For example:
+ flag_group (
+ iterate_over = 'object_files',
+ flag_groups = [
+ flag_group (
+ flags = ['--start-lib'],
+ ),
+ flag_group (
+ iterate_over = 'object_files',
+ flags = ['%{object_files}'],
+ ),
+ flag_group (
+ flags = ['--end-lib'],
+ )
+ ]
+ )
+ ... will get expanded to
+ --start-lib a1.o a2.o ... --end-lib --start-lib b1.o b2.o .. --end-lib
+ with %{object_files} being a variable of nested list type
+ [['a1.o', 'a2.o', ...], ['b1.o', 'b2.o', ...], ...].
+
+ Args:
+ flags: a string list, representing flags. Only one of flags and
+ flag_groups can be set, as to avoid ambiguity.
+ flag_groups: a list of FlagGroup entries. Only one of flags and
+ flag_groups can be set, as to avoid ambiguity.
+ iterate_over: a string, representing a variable of list type.
+ expand_if_available: A build variable that needs to be available
+ in order to expand the flag_group.
+ expand_if_not_available: A build variable that needs to be
+ unavailable in order for this flag_group to be expanded.
+ expand_if_true: if set, this variable needs to evaluate to True in
+ order for the flag_group to be expanded.
+ expand_if_false: if set, this variable needs to evalate to False in
+ order for the flag_group to be expanded.
+ expand_if_equal: a VariableWithValue, the flag_group is expanded in
+ case of equality.
+
+ Returns:
+ A FlagGroupInfo provider.
+ """
+
+ _check_same_type(flags, [], "flags", "flag_group")
+ _check_same_type(flag_groups, [], "flag_groups", "flag_group")
+ if len(flags) > 0 and len(flag_groups) > 0:
+ fail("flag_group must not contain both a flag and another flag_group.")
+ if len(flags) == 0 and len(flag_groups) == 0:
+ fail("flag_group must contain either a list of flags or a list of flag_groups.")
+ _check_is_none_or_right_type(expand_if_true, "string", "expand_if_true", "flag_group")
+ _check_is_none_or_right_type(expand_if_false, "string", "expand_if_false", "flag_group")
+ _check_is_none_or_right_type(expand_if_available, "string", "expand_if_available", "flag_group")
+ _check_is_none_or_right_type(
+ expand_if_not_available,
+ "string",
+ "expand_if_not_available",
+ "flag_group",
+ )
+ _check_is_none_or_right_type(iterate_over, "string", "iterate_over", "flag_group")
+
+ return FlagGroupInfo(
+ flags = flags,
+ flag_groups = flag_groups,
+ iterate_over = iterate_over,
+ expand_if_available = expand_if_available,
+ expand_if_not_available = expand_if_not_available,
+ expand_if_true = expand_if_true,
+ expand_if_false = expand_if_false,
+ expand_if_equal = expand_if_equal,
+ type_name = "flag_group",
+ )
+
+FlagSetInfo = provider(
+ "A set of flags to be expanded in the command line for specific actions.",
+ fields = [
+ "actions",
+ "with_features",
+ "flag_groups",
+ "type_name",
+ ],
+)
+
+def flag_set(
+ actions = [],
+ with_features = [],
+ flag_groups = []):
+ """ A set of flags to be expanded in the command line for specific actions.
+
+ Args:
+ actions: The actions this flag set applies to; each flag set must
+ specify at least one action.
+ with_features: A list of feature sets defining when this flag set gets
+ applied. The flag set will be applied when any one of the feature
+ sets evaluate to true. (That is, when when every 'feature' is
+ enabled, and every 'not_feature' is not enabled.)
+ If 'with_feature' is omitted, the flag set will be applied
+ unconditionally for every action specified.
+ flag_groups: A FlagGroup list - the flags applied via this flag set.
+
+ Returns:
+ A FlagSetInfo provider.
+ """
+ _check_same_type(actions, [], "actions", "flag_set")
+ _check_same_type(with_features, [], "with_features", "flag_set")
+ _check_same_type(flag_groups, [], "flag_groups", "flag_set")
+ return FlagSetInfo(
+ actions = actions,
+ with_features = with_features,
+ flag_groups = flag_groups,
+ type_name = "flag_set",
+ )
+
+FeatureInfo = provider(
+ "Contains all flag specifications for one feature.",
+ fields = [
+ "name",
+ "enabled",
+ "flag_sets",
+ "env_sets",
+ "requires",
+ "implies",
+ "provides",
+ "type_name",
+ ],
+)
+
+def feature(
+ name,
+ enabled = False,
+ flag_sets = [],
+ env_sets = [],
+ requires = [],
+ implies = [],
+ provides = []):
+ """ Contains all flag specifications for one feature.
+
+ Args:
+ name: The feature's name. It is possible to introduce a feature without
+ a change to Bazel by adding a 'feature' section to the toolchain
+ and adding the corresponding string as feature in the BUILD file.
+ enabled: If 'True', this feature is enabled unless a rule type
+ explicitly marks it as unsupported.
+ flag_sets: A FlagSet list - If the given feature is enabled, the flag
+ sets will be applied for the actions are specified for.
+ env_sets: an EnvSet list - If the given feature is enabled, the env
+ sets will be applied for the actions they are specified for.
+ requires: A list of feature sets defining when this feature is
+ supported by the toolchain. The feature is supported if any of the
+ feature sets fully apply, that is, when all features of a feature
+ set are enabled.
+ If 'requires' is omitted, the feature is supported independently of
+ which other features are enabled.
+ Use this for example to filter flags depending on the build mode
+ enabled (opt / fastbuild / dbg).
+ implies: A string list of features or action configs that are
+ automatically enabled when this feature is enabled. If any of the
+ implied features or action configs cannot be enabled, this feature
+ will (silently) not be enabled either.
+ provides: A list of names this feature conflicts with.
+ A feature cannot be enabled if:
+ - 'provides' contains the name of a different feature or action
+ config that we want to enable.
+ - 'provides' contains the same value as a 'provides' in a
+ different feature or action config that we want to enable.
+ Use this in order to ensure that incompatible features cannot be
+ accidentally activated at the same time, leading to hard to
+ diagnose compiler errors.
+
+ Returns:
+ A FeatureInfo provider.
+ """
+ _check_same_type(enabled, True, "enabled", "feature")
+ _check_same_type(flag_sets, [], "flag_sets", "feature")
+ _check_same_type(env_sets, [], "env_sets", "feature")
+ _check_same_type(requires, [], "requires", "feature")
+ _check_same_type(provides, [], "provides", "feature")
+ _check_same_type(implies, [], "implies", "feature")
+ return FeatureInfo(
+ name = name,
+ enabled = enabled,
+ flag_sets = flag_sets,
+ env_sets = env_sets,
+ requires = requires,
+ implies = implies,
+ provides = provides,
+ type_name = "feature",
+ )
+
+ToolPathInfo = provider(
+ "Tool locations.",
+ fields = ["name", "path", "type_name"],
+)
+
+def tool_path(name, path):
+ """ Tool locations.
+
+ Args:
+ name: Name of the tool.
+ path: Location of the tool; Can be absolute path (in case of non hermetic
+ toolchain), or path relative to the cc_toolchain's package.
+
+ Returns:
+ A ToolPathInfo provider.
+
+ Deprecated:
+ Prefer specifying an ActionConfig for the action that needs the tool.
+ TODO(b/27903698) migrate to ActionConfig.
+ """
+ _check_is_nonempty_string(name, "name", "tool_path")
+ _check_is_nonempty_string(path, "path", "tool_path")
+ return ToolPathInfo(name = name, path = path, type_name = "tool_path")
+
+ToolInfo = provider(
+ doc = "Tool information. This differs from ToolPathInfo as it is intended to be used\
+ in action_configs and can accept labels.",
+ fields = [
+ "path",
+ "tool",
+ "with_features",
+ "execution_requirements",
+ "type_name",
+ ],
+)
+
+def tool(path = None, with_features = [], execution_requirements = [], tool = None):
+ """ Describes a tool associated with a crosstool action config.
+
+ Args:
+ path: Location of the tool; Can be absolute path (in case of non hermetic
+ toolchain), or path relative to the cc_toolchain's package. If this
+ parameter is set, tool must not be set.
+ tool: The built-artifact that should be used as this tool. If this is
+ set, path must not be set.
+ with_features: A list of feature sets defining when this tool is
+ applicable. The tool will used when any one of the feature sets
+ evaluate to true. (That is, when when every 'feature' is enabled,
+ and every 'not_feature' is not enabled.)
+ If 'with_feature' is omitted, the tool will apply for any feature
+ configuration.
+ execution_requirements: Requirements on the execution environment for
+ the execution of this tool, to be passed as out-of-band "hints" to
+ the execution backend.
+ Ex. "requires-darwin"
+
+ Returns:
+ A ToolInfo provider.
+ """
+ if path == None and tool == None:
+ fail("Parameter path or parameter tool of tool should not be None.")
+
+ if path != None:
+ _check_is_nonempty_string(path, "path", "tool")
+ _check_is_none(tool, "tool", "tool")
+ if tool != None:
+ _check_is_none(path, "path", "tool")
+ _check_right_type(tool, "File", "tool", "tool")
+
+ _check_same_type(with_features, [], "with_features", "tool")
+ _check_same_type(execution_requirements, [], "execution_requirements", "tool")
+ return ToolInfo(
+ path = path,
+ tool = tool,
+ with_features = with_features,
+ execution_requirements = execution_requirements,
+ type_name = "tool",
+ )
+
+ActionConfigInfo = provider(
+ "Configuration of a Bazel action.",
+ fields = [
+ "config_name",
+ "action_name",
+ "enabled",
+ "tools",
+ "flag_sets",
+ "implies",
+ "type_name",
+ ],
+)
+
+def action_config(
+ action_name,
+ enabled = False,
+ tools = [],
+ flag_sets = [],
+ implies = []):
+ """ Configuration of a Bazel action.
+
+ An action config corresponds to a Bazel action, and allows selection of
+ a tool based on activated features.
+ Action config activation occurs by the same semantics as features: a
+ feature can 'require' or 'imply' an action config in the same way that it
+ would another feature.
+
+ Args:
+ action_name: The name of the Bazel action that this config applies to,
+ ex. 'c-compile' or 'c-module-compile'.
+ enabled: If 'True', this action is enabled unless a rule type
+ explicitly marks it as unsupported.
+ tools: The tool applied to the action will be the first Tool with a
+ feature set that matches the feature configuration. An error will
+ be thrown if no tool matches a provided feature configuration - for
+ that reason, it's a good idea to provide a default tool with an
+ empty feature set.
+ flag_sets: If the given action config is enabled, the flag sets will be
+ applied to the corresponding action.
+ implies: A list of features or action configs that are automatically
+ enabled when this action config is enabled. If any of the implied
+ features or action configs cannot be enabled, this action config
+ will (silently) not be enabled either.
+
+ Returns:
+ An ActionConfigInfo provider.
+ """
+ _check_is_nonempty_string(action_name, "name", "action_config")
+ _check_same_type(enabled, True, "enabled", "action_config")
+ _check_same_type(tools, [], "tools", "action_config")
+ _check_same_type(flag_sets, [], "flag_sets", "action_config")
+ _check_same_type(implies, [], "implies", "action_config")
+ return ActionConfigInfo(
+ action_name = action_name,
+ enabled = enabled,
+ tools = tools,
+ flag_sets = flag_sets,
+ implies = implies,
+ type_name = "action_config",
+ )
+
+ArtifactNamePatternInfo = provider(
+ "The name for an artifact of a given category of input or output artifacts to an action.",
+ fields = [
+ "category_name",
+ "prefix",
+ "extension",
+ "type_name",
+ ],
+)
+
+def artifact_name_pattern(category_name, prefix, extension):
+ """ The name for an artifact of a given category of input or output artifacts to an action.
+
+ Args:
+ category_name: The category of artifacts that this selection applies
+ to. This field is compared against a list of categories defined
+ in bazel. Example categories include "linked_output" or
+ "debug_symbols". An error is thrown if no category is matched.
+ prefix: The prefix for creating the artifact for this selection.
+ Together with the extension it is used to create an artifact name
+ based on the target name.
+ extension: The extension for creating the artifact for this selection.
+ Together with the prefix it is used to create an artifact name
+ based on the target name.
+
+ Returns:
+ An ArtifactNamePatternInfo provider
+ """
+ _check_is_nonempty_string(category_name, "category_name", "artifact_name_pattern")
+ _check_is_none_or_right_type(prefix, "", "prefix", "artifact_name_pattern")
+ _check_is_none_or_right_type(extension, "", "extension", "artifact_name_pattern")
+ return ArtifactNamePatternInfo(
+ category_name = category_name,
+ prefix = prefix,
+ extension = extension,
+ type_name = "artifact_name_pattern",
+ )
diff --git a/cc/compiler/BUILD b/cc/compiler/BUILD
new file mode 100644
index 0000000..41f00e4
--- /dev/null
+++ b/cc/compiler/BUILD
@@ -0,0 +1,71 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Config settings for compilers identified by Bazel.
+
+Targets that require compiler-specific flags can use the config_settings defined
+in this package in their select() statements.
+
+*Note*: Before Bazel 6, gcc on Linux and clang on macOS would not match their
+specific config_setting, but only the fallback case of a select expression.
+
+Toolchains not shipped with Bazel are encouraged to use the same names to
+identify compilers as used below, but this is not enforced.
+
+Example:
+
+ cc_binary(
+ name = "foo",
+ srcs = ["foo.cc"],
+ copts = select({
+ "@rules_cc//cc/compiler:gcc": [...],
+ "@rules_cc//cc/compiler:clang": [...],
+ "@rules_cc//cc/compiler:msvc-cl": [...],
+ # Fallback case for an undetected compiler.
+ "//conditions:default": [...],
+ }),
+ )
+
+If multiple targets use the same set of conditionally enabled flags, this can be
+simplified by extracting the select expression into a Starlark constant.
+"""
+
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"])
+
+config_setting(
+ name = "clang",
+ flag_values = {"@bazel_tools//tools/cpp:compiler": "clang"},
+)
+
+config_setting(
+ name = "clang-cl",
+ flag_values = {"@bazel_tools//tools/cpp:compiler": "clang-cl"},
+)
+
+config_setting(
+ name = "gcc",
+ flag_values = {"@bazel_tools//tools/cpp:compiler": "gcc"},
+)
+
+config_setting(
+ name = "mingw-gcc",
+ flag_values = {"@bazel_tools//tools/cpp:compiler": "mingw-gcc"},
+)
+
+config_setting(
+ name = "msvc-cl",
+ flag_values = {"@bazel_tools//tools/cpp:compiler": "msvc-cl"},
+)
diff --git a/cc/defs.bzl b/cc/defs.bzl
new file mode 100644
index 0000000..a3acac7
--- /dev/null
+++ b/cc/defs.bzl
@@ -0,0 +1,203 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Starlark rules for building C++ projects."""
+
+load("//cc/private/rules_impl:cc_flags_supplier.bzl", _cc_flags_supplier = "cc_flags_supplier")
+load("//cc/private/rules_impl:compiler_flag.bzl", _compiler_flag = "compiler_flag")
+load("//cc/private/rules_impl:native.bzl", "NativeCcInfo", "NativeCcToolchainConfigInfo", "NativeDebugPackageInfo", "native_cc_common")
+
+_MIGRATION_TAG = "__CC_RULES_MIGRATION_DO_NOT_USE_WILL_BREAK__"
+
+# TODO(bazel-team): To avoid breaking changes, if the below are no longer
+# forwarding to native rules, flag @bazel_tools@bazel_tools//tools/cpp:link_extra_libs
+# should either: (a) alias the flag @rules_cc//:link_extra_libs, or (b) be
+# added as a dependency to @rules_cc//:link_extra_lib. The intermediate library
+# @bazel_tools@bazel_tools//tools/cpp:link_extra_lib should either be added as a dependency
+# to @rules_cc//:link_extra_lib, or removed entirely (if possible).
+_LINK_EXTRA_LIB = "@rules_cc//:link_extra_lib" # copybara-use-repo-external-label
+
+def _add_tags(attrs, is_binary = False):
+ if "tags" in attrs and attrs["tags"] != None:
+ attrs["tags"] = attrs["tags"] + [_MIGRATION_TAG]
+ else:
+ attrs["tags"] = [_MIGRATION_TAG]
+
+ if is_binary:
+ is_library = "linkshared" in attrs and attrs["linkshared"]
+
+ # Executable builds also include the "link_extra_lib" library.
+ if not is_library:
+ if "deps" in attrs and attrs["deps"] != None:
+ attrs["deps"] = attrs["deps"] + [_LINK_EXTRA_LIB]
+ else:
+ attrs["deps"] = [_LINK_EXTRA_LIB]
+
+ return attrs
+
+def cc_binary(**attrs):
+ """Bazel cc_binary rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#cc_binary
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.cc_binary(**_add_tags(attrs, True))
+
+def cc_test(**attrs):
+ """Bazel cc_test rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#cc_test
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.cc_test(**_add_tags(attrs, True))
+
+def cc_library(**attrs):
+ """Bazel cc_library rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#cc_library
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.cc_library(**_add_tags(attrs))
+
+def cc_import(**attrs):
+ """Bazel cc_import rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#cc_import
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.cc_import(**_add_tags(attrs))
+
+def cc_proto_library(**attrs):
+ """Bazel cc_proto_library rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#cc_proto_library
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.cc_proto_library(**_add_tags(attrs))
+
+def fdo_prefetch_hints(**attrs):
+ """Bazel fdo_prefetch_hints rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#fdo_prefetch_hints
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.fdo_prefetch_hints(**_add_tags(attrs))
+
+def fdo_profile(**attrs):
+ """Bazel fdo_profile rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#fdo_profile
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.fdo_profile(**_add_tags(attrs))
+
+def cc_toolchain(**attrs):
+ """Bazel cc_toolchain rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#cc_toolchain
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.cc_toolchain(**_add_tags(attrs))
+
+def cc_toolchain_suite(**attrs):
+ """Bazel cc_toolchain_suite rule.
+
+ https://docs.bazel.build/versions/main/be/c-cpp.html#cc_toolchain_suite
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.cc_toolchain_suite(**_add_tags(attrs))
+
+def objc_library(**attrs):
+ """Bazel objc_library rule.
+
+ https://docs.bazel.build/versions/main/be/objective-c.html#objc_library
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.objc_library(**_add_tags(attrs))
+
+def objc_import(**attrs):
+ """Bazel objc_import rule.
+
+ https://docs.bazel.build/versions/main/be/objective-c.html#objc_import
+
+ Args:
+ **attrs: Rule attributes
+ """
+
+ # buildifier: disable=native-cc
+ native.objc_import(**_add_tags(attrs))
+
+def cc_flags_supplier(**attrs):
+ """Bazel cc_flags_supplier rule.
+
+ Args:
+ **attrs: Rule attributes
+ """
+ _cc_flags_supplier(**_add_tags(attrs))
+
+def compiler_flag(**attrs):
+ """Bazel compiler_flag rule.
+
+ Args:
+ **attrs: Rule attributes
+ """
+ _compiler_flag(**_add_tags(attrs))
+
+cc_common = native_cc_common
+
+CcInfo = NativeCcInfo
+
+CcToolchainConfigInfo = NativeCcToolchainConfigInfo
+
+DebugPackageInfo = NativeDebugPackageInfo
diff --git a/cc/extensions.bzl b/cc/extensions.bzl
new file mode 100644
index 0000000..72b2dca
--- /dev/null
+++ b/cc/extensions.bzl
@@ -0,0 +1,24 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module extension for cc auto configuration."""
+
+load("@bazel_tools//tools/osx:xcode_configure.bzl", "xcode_configure")
+load("//cc/private/toolchain:cc_configure.bzl", "cc_autoconf", "cc_autoconf_toolchains")
+
+def _cc_configure_impl(_):
+ cc_autoconf_toolchains(name = "local_config_cc_toolchains")
+ cc_autoconf(name = "local_config_cc")
+ xcode_configure("@bazel_tools//tools/osx:xcode_locator.m")
+
+cc_configure = module_extension(implementation = _cc_configure_impl)
diff --git a/cc/find_cc_toolchain.bzl b/cc/find_cc_toolchain.bzl
new file mode 100644
index 0000000..d2f2d9f
--- /dev/null
+++ b/cc/find_cc_toolchain.bzl
@@ -0,0 +1,117 @@
+# pylint: disable=g-bad-file-header
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Helpers for CC Toolchains.
+
+Rules that require a CC toolchain should call `use_cc_toolchain` and `find_cc_toolchain`
+to depend on and find a cc toolchain.
+
+* When https://github.com/bazelbuild/bazel/issues/7260 is **not** flipped, current
+ C++ toolchain is selected using the legacy mechanism (`--crosstool_top`,
+ `--cpu`, `--compiler`). For that to work the rule needs to declare an
+ `_cc_toolchain` attribute, e.g.
+
+ foo = rule(
+ implementation = _foo_impl,
+ attrs = {
+ "_cc_toolchain": attr.label(
+ default = Label(
+ "@rules_cc//cc:current_cc_toolchain", # copybara-use-repo-external-label
+ ),
+ ),
+ },
+ )
+
+* When https://github.com/bazelbuild/bazel/issues/7260 **is** flipped, current
+ C++ toolchain is selected using the toolchain resolution mechanism
+ (`--platforms`). For that to work the rule needs to declare a dependency on
+ C++ toolchain type:
+
+ load(":find_cc_toolchain/bzl", "use_cc_toolchain")
+
+ foo = rule(
+ implementation = _foo_impl,
+ toolchains = use_cc_toolchain(),
+ )
+
+We advise to depend on both `_cc_toolchain` attr and on the toolchain type for
+the duration of the migration. After
+https://github.com/bazelbuild/bazel/issues/7260 is flipped (and support for old
+Bazel version is not needed), it's enough to only keep the toolchain type.
+"""
+
+CC_TOOLCHAIN_TYPE = "@bazel_tools//tools/cpp:toolchain_type" # copybara-use-repo-external-label
+
+def find_cc_toolchain(ctx):
+ """
+Returns the current `CcToolchainInfo`.
+
+ Args:
+ ctx: The rule context for which to find a toolchain.
+
+ Returns:
+ A CcToolchainInfo.
+ """
+
+ # Check the incompatible flag for toolchain resolution.
+ if hasattr(cc_common, "is_cc_toolchain_resolution_enabled_do_not_use") and cc_common.is_cc_toolchain_resolution_enabled_do_not_use(ctx = ctx):
+ if not CC_TOOLCHAIN_TYPE in ctx.toolchains:
+ fail("In order to use find_cc_toolchain, your rule has to depend on C++ toolchain. See find_cc_toolchain.bzl docs for details.")
+ toolchain_info = ctx.toolchains[CC_TOOLCHAIN_TYPE]
+ if toolchain_info == None:
+ # No cpp toolchain was found, so report an error.
+ fail("Unable to find a CC toolchain using toolchain resolution. Target: %s, Platform: %s, Exec platform: %s" %
+ (ctx.label, ctx.fragments.platform.platform, ctx.fragments.platform.host_platform))
+ if hasattr(toolchain_info, "cc_provider_in_toolchain") and hasattr(toolchain_info, "cc"):
+ return toolchain_info.cc
+ return toolchain_info
+
+ # Fall back to the legacy implicit attribute lookup.
+ if hasattr(ctx.attr, "_cc_toolchain"):
+ return ctx.attr._cc_toolchain[cc_common.CcToolchainInfo]
+
+ # We didn't find anything.
+ fail("In order to use find_cc_toolchain, your rule has to depend on C++ toolchain. See find_cc_toolchain.bzl docs for details.")
+
+def find_cpp_toolchain(ctx):
+ """Deprecated, use `find_cc_toolchain` instead.
+
+ Args:
+ ctx: See `find_cc_toolchain`.
+
+ Returns:
+ A CcToolchainInfo.
+ """
+ return find_cc_toolchain(ctx)
+
+def use_cc_toolchain(mandatory = False):
+ """
+ Helper to depend on the cc toolchain.
+
+ Usage:
+ ```
+ my_rule = rule(
+ toolchains = [other toolchain types] + use_cc_toolchain(),
+ )
+ ```
+
+ Args:
+ mandatory: Whether or not it should be an error if the toolchain cannot be resolved.
+
+ Returns:
+ A list that can be used as the value for `rule.toolchains`.
+ """
+ return [config_common.toolchain_type(CC_TOOLCHAIN_TYPE, mandatory = mandatory)]
diff --git a/cc/private/rules_impl/BUILD b/cc/private/rules_impl/BUILD
new file mode 100644
index 0000000..dc74dfe
--- /dev/null
+++ b/cc/private/rules_impl/BUILD
@@ -0,0 +1,18 @@
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"]) # Apache 2.0
+
+filegroup(
+ name = "bzl_srcs",
+ srcs = glob([
+ "**/*.bzl",
+ ]),
+)
+
+filegroup(
+ name = "srcs",
+ srcs = glob([
+ "**/*.bzl",
+ "**/BUILD",
+ ]),
+)
diff --git a/cc/private/rules_impl/cc_flags_supplier.bzl b/cc/private/rules_impl/cc_flags_supplier.bzl
new file mode 100644
index 0000000..474c7ce
--- /dev/null
+++ b/cc/private/rules_impl/cc_flags_supplier.bzl
@@ -0,0 +1,35 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rule that provides the CC_FLAGS Make variable."""
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain")
+load("//cc:action_names.bzl", "CC_FLAGS_MAKE_VARIABLE_ACTION_NAME")
+load("//cc/private/rules_impl:cc_flags_supplier_lib.bzl", "build_cc_flags")
+
+def _cc_flags_supplier_impl(ctx):
+ cc_toolchain = find_cpp_toolchain(ctx)
+ cc_flags = build_cc_flags(ctx, cc_toolchain, CC_FLAGS_MAKE_VARIABLE_ACTION_NAME)
+ variables = platform_common.TemplateVariableInfo({
+ "CC_FLAGS": cc_flags,
+ })
+ return [variables]
+
+cc_flags_supplier = rule(
+ implementation = _cc_flags_supplier_impl,
+ attrs = {
+ "_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
+ },
+ toolchains = use_cpp_toolchain(),
+ fragments = ["cpp"],
+)
diff --git a/cc/private/rules_impl/cc_flags_supplier_lib.bzl b/cc/private/rules_impl/cc_flags_supplier_lib.bzl
new file mode 100644
index 0000000..4b0782a
--- /dev/null
+++ b/cc/private/rules_impl/cc_flags_supplier_lib.bzl
@@ -0,0 +1,79 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Library of functions that provide the CC_FLAGS Make variable."""
+
+# This should match the logic in CcCommon.computeCcFlags:
+def build_cc_flags(ctx, cc_toolchain, action_name):
+ """Determine the value for CC_FLAGS based on the given toolchain.
+
+ Args:
+ ctx: The rule context.
+ cc_toolchain: CcToolchainInfo instance.
+ action_name: Name of the action.
+ Returns:
+ string containing flags separated by a space.
+ """
+
+ # Get default cc flags from toolchain's make_variables.
+ legacy_cc_flags = cc_common.legacy_cc_flags_make_variable_do_not_use(
+ cc_toolchain = cc_toolchain,
+ )
+
+ # Determine the sysroot flag.
+ sysroot_cc_flags = _from_sysroot(cc_toolchain)
+
+ # Flags from feature config.
+ feature_config_cc_flags = _from_features(ctx, cc_toolchain, action_name)
+
+ # Combine the different sources, but only add the sysroot flag if nothing
+ # else adds sysroot.
+ # If added, it must appear before the feature config flags.
+ cc_flags = []
+ if legacy_cc_flags:
+ cc_flags.append(legacy_cc_flags)
+ if sysroot_cc_flags and not _contains_sysroot(feature_config_cc_flags):
+ cc_flags.append(sysroot_cc_flags)
+ cc_flags.extend(feature_config_cc_flags)
+
+ return " ".join(cc_flags)
+
+def _contains_sysroot(flags):
+ for flag in flags:
+ if "--sysroot=" in flag:
+ return True
+ return False
+
+def _from_sysroot(cc_toolchain):
+ sysroot = cc_toolchain.sysroot
+ if sysroot:
+ return "--sysroot=%s" % sysroot
+ else:
+ return None
+
+def _from_features(ctx, cc_toolchain, action_name):
+ feature_configuration = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ requested_features = ctx.features,
+ unsupported_features = ctx.disabled_features,
+ )
+
+ variables = cc_common.empty_variables()
+
+ cc_flags = cc_common.get_memory_inefficient_command_line(
+ feature_configuration = feature_configuration,
+ action_name = action_name,
+ variables = variables,
+ )
+ return cc_flags
diff --git a/cc/private/rules_impl/compiler_flag.bzl b/cc/private/rules_impl/compiler_flag.bzl
new file mode 100644
index 0000000..ebbac94
--- /dev/null
+++ b/cc/private/rules_impl/compiler_flag.bzl
@@ -0,0 +1,29 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Rule that allows select() to differentiate between compilers."""
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain")
+
+def _compiler_flag_impl(ctx):
+ toolchain = find_cpp_toolchain(ctx)
+ return [config_common.FeatureFlagInfo(value = toolchain.compiler)]
+
+compiler_flag = rule(
+ implementation = _compiler_flag_impl,
+ attrs = {
+ "_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
+ },
+ toolchains = use_cpp_toolchain(),
+)
diff --git a/cc/private/rules_impl/native.bzl b/cc/private/rules_impl/native.bzl
new file mode 100644
index 0000000..cce8c7f
--- /dev/null
+++ b/cc/private/rules_impl/native.bzl
@@ -0,0 +1,34 @@
+# Copyright 2022 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Redefine native symbols with a new name as a workaround for
+# exporting them in `//third_party/bazel_rules/rules_proto/proto:defs.bzl` with their original name.
+#
+# While we cannot force users to load these symbol due to the lack of a
+# allowlisting mechanism, we can still export them and tell users to
+# load it to make a future migration to pure Starlark easier.
+
+"""Lovely workaround to be able to expose native constants pretending to be Starlark."""
+
+# buildifier: disable=native-cc
+NativeCcInfo = CcInfo
+
+# buildifier: disable=native-cc
+NativeDebugPackageInfo = DebugPackageInfo
+
+# buildifier: disable=native-cc
+NativeCcToolchainConfigInfo = CcToolchainConfigInfo
+
+# buildifier: disable=native-cc
+native_cc_common = cc_common
diff --git a/cc/private/toolchain/BUILD b/cc/private/toolchain/BUILD
new file mode 100644
index 0000000..557a6a7
--- /dev/null
+++ b/cc/private/toolchain/BUILD
@@ -0,0 +1,95 @@
+load("//cc:defs.bzl", "cc_flags_supplier", "cc_library", "compiler_flag")
+
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"]) # Apache 2.0
+
+# It is frequently necessary to constrain platforms based on the cc compiler type.
+constraint_setting(name = "cc_compiler")
+
+constraint_value(
+ name = "clang",
+ constraint_setting = ":cc_compiler",
+)
+
+constraint_value(
+ name = "gcc",
+ constraint_setting = ":cc_compiler",
+)
+
+constraint_value(
+ name = "msvc",
+ constraint_setting = ":cc_compiler",
+)
+
+constraint_value(
+ name = "clang-cl",
+ constraint_setting = ":cc_compiler",
+)
+
+constraint_value(
+ name = "mingw",
+ constraint_setting = ":cc_compiler",
+)
+
+constraint_value(
+ name = "msys",
+ constraint_setting = ":cc_compiler",
+)
+
+cc_library(
+ name = "malloc",
+)
+
+filegroup(
+ name = "grep-includes",
+ srcs = ["grep-includes.sh"],
+)
+
+filegroup(
+ name = "empty",
+ srcs = [],
+)
+
+filegroup(
+ name = "bzl_srcs",
+ srcs = glob(["**/*.bzl"]),
+)
+
+filegroup(
+ name = "srcs",
+ srcs = glob(["**"]),
+)
+
+filegroup(
+ name = "interface_library_builder",
+ srcs = ["build_interface_so"],
+)
+
+filegroup(
+ name = "link_dynamic_library",
+ srcs = ["link_dynamic_library.sh"],
+)
+
+filegroup(
+ name = "lib_cc_configure",
+ srcs = ["lib_cc_configure.bzl"],
+)
+
+compiler_flag(name = "compiler")
+
+cc_flags_supplier(name = "cc_flags")
diff --git a/cc/private/toolchain/BUILD.empty b/cc/private/toolchain/BUILD.empty
new file mode 100644
index 0000000..a873d0c
--- /dev/null
+++ b/cc/private/toolchain/BUILD.empty
@@ -0,0 +1,52 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@rules_cc//cc:defs.bzl", "cc_library", "cc_toolchain", "cc_toolchain_suite")
+
+package(default_visibility = ["//visibility:public"])
+
+load(":cc_toolchain_config.bzl", "cc_toolchain_config")
+
+cc_library(
+ name = "malloc",
+)
+
+filegroup(
+ name = "empty",
+ srcs = [],
+)
+
+cc_toolchain_suite(
+ name = "toolchain",
+ toolchains = {
+ "local": ":local",
+ "local|local": ":local",
+ },
+)
+
+cc_toolchain(
+ name = "local",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":empty",
+ compiler_files = ":empty",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ toolchain_config = ":local_config",
+ toolchain_identifier = "local",
+)
+
+cc_toolchain_config(name = "local_config")
diff --git a/cc/private/toolchain/BUILD.static.freebsd b/cc/private/toolchain/BUILD.static.freebsd
new file mode 100644
index 0000000..d8a7b2d
--- /dev/null
+++ b/cc/private/toolchain/BUILD.static.freebsd
@@ -0,0 +1,112 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This becomes the BUILD file for @local_config_cc// under FreeBSD.
+
+package(default_visibility = ["//visibility:public"])
+
+load("@rules_cc//cc:defs.bzl", "cc_library", "cc_toolchain", "cc_toolchain_suite")
+load(":cc_toolchain_config.bzl", "cc_toolchain_config")
+
+cc_library(
+ name = "malloc",
+)
+
+filegroup(
+ name = "empty",
+ srcs = [],
+)
+
+# Hardcoded toolchain, legacy behaviour.
+cc_toolchain_suite(
+ name = "toolchain",
+ toolchains = {
+ "armeabi-v7a": ":cc-compiler-armeabi-v7a",
+ "armeabi-v7a|compiler": ":cc-compiler-armeabi-v7a",
+ "freebsd": ":cc-compiler-freebsd",
+ "freebsd|compiler": ":cc-compiler-freebsd",
+ },
+)
+
+cc_toolchain(
+ name = "cc-compiler-freebsd",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":empty",
+ compiler_files = ":empty",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 0,
+ toolchain_config = ":local_freebsd",
+ toolchain_identifier = "local_freebsd",
+)
+
+cc_toolchain_config(
+ name = "local_freebsd",
+ cpu = "freebsd",
+)
+
+toolchain(
+ name = "cc-toolchain-freebsd",
+ exec_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:freebsd",
+ ],
+ target_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:freebsd",
+ ],
+ toolchain = ":cc-compiler-freebsd",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+cc_toolchain(
+ name = "cc-compiler-armeabi-v7a",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":empty",
+ compiler_files = ":empty",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 0,
+ toolchain_config = ":stub_armeabi-v7a",
+ toolchain_identifier = "stub_armeabi-v7a",
+)
+
+cc_toolchain_config(
+ name = "stub_armeabi-v7a",
+ cpu = "armeabi-v7a",
+)
+
+toolchain(
+ name = "cc-toolchain-armeabi-v7a",
+ exec_compatible_with = [
+ "@platforms//cpu:arm",
+ ],
+ target_compatible_with = [
+ "@platforms//cpu:arm",
+ "@platforms//os:android",
+ ],
+ toolchain = ":cc-compiler-armeabi-v7a",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+filegroup(
+ name = "link_dynamic_library",
+ srcs = ["link_dynamic_library.sh"],
+)
diff --git a/cc/private/toolchain/BUILD.toolchains.tpl b/cc/private/toolchain/BUILD.toolchains.tpl
new file mode 100644
index 0000000..3fee112
--- /dev/null
+++ b/cc/private/toolchain/BUILD.toolchains.tpl
@@ -0,0 +1,20 @@
+load("@local_config_platform//:constraints.bzl", "HOST_CONSTRAINTS")
+
+toolchain(
+ name = "cc-toolchain-%{name}",
+ exec_compatible_with = HOST_CONSTRAINTS,
+ target_compatible_with = HOST_CONSTRAINTS,
+ toolchain = "@local_config_cc//:cc-compiler-%{name}",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+toolchain(
+ name = "cc-toolchain-armeabi-v7a",
+ exec_compatible_with = HOST_CONSTRAINTS,
+ target_compatible_with = [
+ "@platforms//cpu:arm",
+ "@platforms//os:android",
+ ],
+ toolchain = "@local_config_cc//:cc-compiler-armeabi-v7a",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
diff --git a/cc/private/toolchain/BUILD.tpl b/cc/private/toolchain/BUILD.tpl
new file mode 100644
index 0000000..9241326
--- /dev/null
+++ b/cc/private/toolchain/BUILD.tpl
@@ -0,0 +1,113 @@
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This becomes the BUILD file for @local_config_cc// under non-FreeBSD unixes.
+
+package(default_visibility = ["//visibility:public"])
+
+load(":cc_toolchain_config.bzl", "cc_toolchain_config")
+load(":armeabi_cc_toolchain_config.bzl", "armeabi_cc_toolchain_config")
+load("@rules_cc//cc:defs.bzl", "cc_toolchain", "cc_toolchain_suite")
+
+licenses(["notice"]) # Apache 2.0
+
+cc_library(
+ name = "malloc",
+)
+
+filegroup(
+ name = "empty",
+ srcs = [],
+)
+
+filegroup(
+ name = "cc_wrapper",
+ srcs = ["cc_wrapper.sh"],
+)
+
+filegroup(
+ name = "compiler_deps",
+ srcs = glob(["extra_tools/**"], allow_empty = True) + [%{cc_compiler_deps}],
+)
+
+# This is the entry point for --crosstool_top. Toolchains are found
+# by lopping off the name of --crosstool_top and searching for
+# the "${CPU}" entry in the toolchains attribute.
+cc_toolchain_suite(
+ name = "toolchain",
+ toolchains = {
+ "%{name}|%{compiler}": ":cc-compiler-%{name}",
+ "%{name}": ":cc-compiler-%{name}",
+ "armeabi-v7a|compiler": ":cc-compiler-armeabi-v7a",
+ "armeabi-v7a": ":cc-compiler-armeabi-v7a",
+ },
+)
+
+cc_toolchain(
+ name = "cc-compiler-%{name}",
+ toolchain_identifier = "%{cc_toolchain_identifier}",
+ toolchain_config = ":%{cc_toolchain_identifier}",
+ all_files = ":compiler_deps",
+ ar_files = ":compiler_deps",
+ as_files = ":compiler_deps",
+ compiler_files = ":compiler_deps",
+ dwp_files = ":empty",
+ linker_files = ":compiler_deps",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = %{supports_param_files},
+)
+
+cc_toolchain_config(
+ name = "%{cc_toolchain_identifier}",
+ cpu = "%{target_cpu}",
+ compiler = "%{compiler}",
+ toolchain_identifier = "%{cc_toolchain_identifier}",
+ host_system_name = "%{host_system_name}",
+ target_system_name = "%{target_system_name}",
+ target_libc = "%{target_libc}",
+ abi_version = "%{abi_version}",
+ abi_libc_version = "%{abi_libc_version}",
+ cxx_builtin_include_directories = [%{cxx_builtin_include_directories}],
+ tool_paths = {%{tool_paths}},
+ compile_flags = [%{compile_flags}],
+ opt_compile_flags = [%{opt_compile_flags}],
+ dbg_compile_flags = [%{dbg_compile_flags}],
+ cxx_flags = [%{cxx_flags}],
+ link_flags = [%{link_flags}],
+ link_libs = [%{link_libs}],
+ opt_link_flags = [%{opt_link_flags}],
+ unfiltered_compile_flags = [%{unfiltered_compile_flags}],
+ coverage_compile_flags = [%{coverage_compile_flags}],
+ coverage_link_flags = [%{coverage_link_flags}],
+ supports_start_end_lib = %{supports_start_end_lib},
+)
+
+# Android tooling requires a default toolchain for the armeabi-v7a cpu.
+cc_toolchain(
+ name = "cc-compiler-armeabi-v7a",
+ toolchain_identifier = "stub_armeabi-v7a",
+ toolchain_config = ":stub_armeabi-v7a",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":empty",
+ compiler_files = ":empty",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 1,
+)
+
+armeabi_cc_toolchain_config(name = "stub_armeabi-v7a")
diff --git a/cc/private/toolchain/BUILD.windows.tpl b/cc/private/toolchain/BUILD.windows.tpl
new file mode 100644
index 0000000..66dbafd
--- /dev/null
+++ b/cc/private/toolchain/BUILD.windows.tpl
@@ -0,0 +1,316 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This becomes the BUILD file for @local_config_cc// under Windows.
+
+package(default_visibility = ["//visibility:public"])
+
+load("@rules_cc//cc:defs.bzl", "cc_toolchain", "cc_toolchain_suite", "cc_library")
+load(":windows_cc_toolchain_config.bzl", "cc_toolchain_config")
+load(":armeabi_cc_toolchain_config.bzl", "armeabi_cc_toolchain_config")
+cc_library(
+ name = "malloc",
+)
+
+filegroup(
+ name = "empty",
+ srcs = [],
+)
+
+filegroup(
+ name = "mingw_compiler_files",
+ srcs = [":builtin_include_directory_paths_mingw"]
+)
+
+filegroup(
+ name = "clangcl_compiler_files",
+ srcs = [":builtin_include_directory_paths_clangcl"]
+)
+
+filegroup(
+ name = "msvc_compiler_files",
+ srcs = [":builtin_include_directory_paths_msvc"]
+)
+
+# Hardcoded toolchain, legacy behaviour.
+cc_toolchain_suite(
+ name = "toolchain",
+ toolchains = {
+ "armeabi-v7a|compiler": ":cc-compiler-armeabi-v7a",
+ "x64_windows|msvc-cl": ":cc-compiler-x64_windows",
+ "x64_windows|msys-gcc": ":cc-compiler-x64_windows_msys",
+ "x64_windows|mingw-gcc": ":cc-compiler-x64_windows_mingw",
+ "x64_windows|clang-cl": ":cc-compiler-x64_windows-clang-cl",
+ "x64_windows_msys": ":cc-compiler-x64_windows_msys",
+ "x64_windows": ":cc-compiler-x64_windows",
+ "armeabi-v7a": ":cc-compiler-armeabi-v7a",
+ },
+)
+
+cc_toolchain(
+ name = "cc-compiler-x64_windows_msys",
+ toolchain_identifier = "msys_x64",
+ toolchain_config = ":msys_x64",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":mingw_compiler_files",
+ compiler_files = ":mingw_compiler_files",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 1,
+)
+
+cc_toolchain_config(
+ name = "msys_x64",
+ cpu = "x64_windows",
+ compiler = "msys-gcc",
+ host_system_name = "local",
+ target_system_name = "local",
+ target_libc = "msys",
+ abi_version = "local",
+ abi_libc_version = "local",
+ cxx_builtin_include_directories = [%{cxx_builtin_include_directories}],
+ tool_paths = {%{tool_paths}},
+ tool_bin_path = "%{tool_bin_path}",
+ dbg_mode_debug_flag = "%{dbg_mode_debug_flag}",
+ fastbuild_mode_debug_flag = "%{fastbuild_mode_debug_flag}",
+)
+
+toolchain(
+ name = "cc-toolchain-x64_windows_msys",
+ exec_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ "@rules_cc//cc/private/toolchain:msys",
+ ],
+ target_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ ],
+ toolchain = ":cc-compiler-x64_windows_msys",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+cc_toolchain(
+ name = "cc-compiler-x64_windows_mingw",
+ toolchain_identifier = "msys_x64_mingw",
+ toolchain_config = ":msys_x64_mingw",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":mingw_compiler_files",
+ compiler_files = ":mingw_compiler_files",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 0,
+)
+
+cc_toolchain_config(
+ name = "msys_x64_mingw",
+ cpu = "x64_windows",
+ compiler = "mingw-gcc",
+ host_system_name = "local",
+ target_system_name = "local",
+ target_libc = "mingw",
+ abi_version = "local",
+ abi_libc_version = "local",
+ tool_bin_path = "%{mingw_tool_bin_path}",
+ cxx_builtin_include_directories = [%{mingw_cxx_builtin_include_directories}],
+ tool_paths = {%{mingw_tool_paths}},
+ dbg_mode_debug_flag = "%{dbg_mode_debug_flag}",
+ fastbuild_mode_debug_flag = "%{fastbuild_mode_debug_flag}",
+)
+
+toolchain(
+ name = "cc-toolchain-x64_windows_mingw",
+ exec_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ "@rules_cc//cc/private/toolchain:mingw",
+ ],
+ target_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ ],
+ toolchain = ":cc-compiler-x64_windows_mingw",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+cc_toolchain(
+ name = "cc-compiler-x64_windows",
+ toolchain_identifier = "msvc_x64",
+ toolchain_config = ":msvc_x64",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":msvc_compiler_files",
+ compiler_files = ":msvc_compiler_files",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 1,
+)
+
+cc_toolchain_config(
+ name = "msvc_x64",
+ cpu = "x64_windows",
+ compiler = "msvc-cl",
+ host_system_name = "local",
+ target_system_name = "local",
+ target_libc = "msvcrt",
+ abi_version = "local",
+ abi_libc_version = "local",
+ toolchain_identifier = "msvc_x64",
+ msvc_env_tmp = "%{msvc_env_tmp}",
+ msvc_env_path = "%{msvc_env_path}",
+ msvc_env_include = "%{msvc_env_include}",
+ msvc_env_lib = "%{msvc_env_lib}",
+ msvc_cl_path = "%{msvc_cl_path}",
+ msvc_ml_path = "%{msvc_ml_path}",
+ msvc_link_path = "%{msvc_link_path}",
+ msvc_lib_path = "%{msvc_lib_path}",
+ cxx_builtin_include_directories = [%{msvc_cxx_builtin_include_directories}],
+ tool_paths = {
+ "ar": "%{msvc_lib_path}",
+ "ml": "%{msvc_ml_path}",
+ "cpp": "%{msvc_cl_path}",
+ "gcc": "%{msvc_cl_path}",
+ "gcov": "wrapper/bin/msvc_nop.bat",
+ "ld": "%{msvc_link_path}",
+ "nm": "wrapper/bin/msvc_nop.bat",
+ "objcopy": "wrapper/bin/msvc_nop.bat",
+ "objdump": "wrapper/bin/msvc_nop.bat",
+ "strip": "wrapper/bin/msvc_nop.bat",
+ },
+ default_link_flags = ["/MACHINE:X64"],
+ dbg_mode_debug_flag = "%{dbg_mode_debug_flag}",
+ fastbuild_mode_debug_flag = "%{fastbuild_mode_debug_flag}",
+)
+
+toolchain(
+ name = "cc-toolchain-x64_windows",
+ exec_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ ],
+ target_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ ],
+ toolchain = ":cc-compiler-x64_windows",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+cc_toolchain(
+ name = "cc-compiler-x64_windows-clang-cl",
+ toolchain_identifier = "clang_cl_x64",
+ toolchain_config = ":clang_cl_x64",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":clangcl_compiler_files",
+ compiler_files = ":clangcl_compiler_files",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 1,
+)
+
+cc_toolchain_config(
+ name = "clang_cl_x64",
+ cpu = "x64_windows",
+ compiler = "clang-cl",
+ host_system_name = "local",
+ target_system_name = "local",
+ target_libc = "msvcrt",
+ abi_version = "local",
+ abi_libc_version = "local",
+ toolchain_identifier = "clang_cl_x64",
+ msvc_env_tmp = "%{clang_cl_env_tmp}",
+ msvc_env_path = "%{clang_cl_env_path}",
+ msvc_env_include = "%{clang_cl_env_include}",
+ msvc_env_lib = "%{clang_cl_env_lib}",
+ msvc_cl_path = "%{clang_cl_cl_path}",
+ msvc_ml_path = "%{clang_cl_ml_path}",
+ msvc_link_path = "%{clang_cl_link_path}",
+ msvc_lib_path = "%{clang_cl_lib_path}",
+ cxx_builtin_include_directories = [%{clang_cl_cxx_builtin_include_directories}],
+ tool_paths = {
+ "ar": "%{clang_cl_lib_path}",
+ "ml": "%{clang_cl_ml_path}",
+ "cpp": "%{clang_cl_cl_path}",
+ "gcc": "%{clang_cl_cl_path}",
+ "gcov": "wrapper/bin/msvc_nop.bat",
+ "ld": "%{clang_cl_link_path}",
+ "nm": "wrapper/bin/msvc_nop.bat",
+ "objcopy": "wrapper/bin/msvc_nop.bat",
+ "objdump": "wrapper/bin/msvc_nop.bat",
+ "strip": "wrapper/bin/msvc_nop.bat",
+ },
+ default_link_flags = ["/MACHINE:X64", "/DEFAULTLIB:clang_rt.builtins-x86_64.lib"],
+ dbg_mode_debug_flag = "%{clang_cl_dbg_mode_debug_flag}",
+ fastbuild_mode_debug_flag = "%{clang_cl_fastbuild_mode_debug_flag}",
+)
+
+toolchain(
+ name = "cc-toolchain-x64_windows-clang-cl",
+ exec_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ "@rules_cc//cc/private/toolchain:clang-cl",
+ ],
+ target_compatible_with = [
+ "@platforms//cpu:x86_64",
+ "@platforms//os:windows",
+ ],
+ toolchain = ":cc-compiler-x64_windows-clang-cl",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+cc_toolchain(
+ name = "cc-compiler-armeabi-v7a",
+ toolchain_identifier = "stub_armeabi-v7a",
+ toolchain_config = ":stub_armeabi-v7a",
+ all_files = ":empty",
+ ar_files = ":empty",
+ as_files = ":empty",
+ compiler_files = ":empty",
+ dwp_files = ":empty",
+ linker_files = ":empty",
+ objcopy_files = ":empty",
+ strip_files = ":empty",
+ supports_param_files = 1,
+)
+
+armeabi_cc_toolchain_config(name = "stub_armeabi-v7a")
+
+toolchain(
+ name = "cc-toolchain-armeabi-v7a",
+ exec_compatible_with = [
+ ],
+ target_compatible_with = [
+ "@platforms//cpu:arm",
+ "@platforms//os:android",
+ ],
+ toolchain = ":cc-compiler-armeabi-v7a",
+ toolchain_type = "@rules_cc//cc:toolchain_type",
+)
+
+filegroup(
+ name = "link_dynamic_library",
+ srcs = ["link_dynamic_library.sh"],
+)
diff --git a/cc/private/toolchain/armeabi_cc_toolchain_config.bzl b/cc/private/toolchain/armeabi_cc_toolchain_config.bzl
new file mode 100644
index 0000000..66c5752
--- /dev/null
+++ b/cc/private/toolchain/armeabi_cc_toolchain_config.bzl
@@ -0,0 +1,82 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A Starlark cc_toolchain configuration rule"""
+
+load(
+ "@rules_cc//cc:cc_toolchain_config_lib.bzl",
+ "feature",
+ "tool_path",
+)
+
+def _impl(ctx):
+ toolchain_identifier = "stub_armeabi-v7a"
+ host_system_name = "armeabi-v7a"
+ target_system_name = "armeabi-v7a"
+ target_cpu = "armeabi-v7a"
+ target_libc = "armeabi-v7a"
+ compiler = "compiler"
+ abi_version = "armeabi-v7a"
+ abi_libc_version = "armeabi-v7a"
+ cc_target_os = None
+ builtin_sysroot = None
+ action_configs = []
+
+ supports_pic_feature = feature(name = "supports_pic", enabled = True)
+ supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True)
+ features = [supports_dynamic_linker_feature, supports_pic_feature]
+
+ cxx_builtin_include_directories = []
+ artifact_name_patterns = []
+ make_variables = []
+
+ tool_paths = [
+ tool_path(name = "ar", path = "/bin/false"),
+ tool_path(name = "compat-ld", path = "/bin/false"),
+ tool_path(name = "cpp", path = "/bin/false"),
+ tool_path(name = "dwp", path = "/bin/false"),
+ tool_path(name = "gcc", path = "/bin/false"),
+ tool_path(name = "gcov", path = "/bin/false"),
+ tool_path(name = "ld", path = "/bin/false"),
+ tool_path(name = "nm", path = "/bin/false"),
+ tool_path(name = "objcopy", path = "/bin/false"),
+ tool_path(name = "objdump", path = "/bin/false"),
+ tool_path(name = "strip", path = "/bin/false"),
+ ]
+
+ return cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ features = features,
+ action_configs = action_configs,
+ artifact_name_patterns = artifact_name_patterns,
+ cxx_builtin_include_directories = cxx_builtin_include_directories,
+ toolchain_identifier = toolchain_identifier,
+ host_system_name = host_system_name,
+ target_system_name = target_system_name,
+ target_cpu = target_cpu,
+ target_libc = target_libc,
+ compiler = compiler,
+ abi_version = abi_version,
+ abi_libc_version = abi_libc_version,
+ tool_paths = tool_paths,
+ make_variables = make_variables,
+ builtin_sysroot = builtin_sysroot,
+ cc_target_os = cc_target_os,
+ )
+
+armeabi_cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {},
+ provides = [CcToolchainConfigInfo],
+)
diff --git a/cc/private/toolchain/build_interface_so b/cc/private/toolchain/build_interface_so
new file mode 100644
index 0000000..626e707
--- /dev/null
+++ b/cc/private/toolchain/build_interface_so
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+if [[ $# != 2 ]]; then
+ echo "Usage: $0 <so> <interface so>" 1>&2
+ exit 1
+fi
+
+exec cp $1 $2
diff --git a/cc/private/toolchain/cc_configure.bzl b/cc/private/toolchain/cc_configure.bzl
new file mode 100644
index 0000000..c7b19de
--- /dev/null
+++ b/cc/private/toolchain/cc_configure.bzl
@@ -0,0 +1,150 @@
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Rules for configuring the C++ toolchain (experimental)."""
+
+load(
+ ":lib_cc_configure.bzl",
+ "get_cpu_value",
+ "resolve_labels",
+)
+load(":unix_cc_configure.bzl", "configure_unix_toolchain")
+load(":windows_cc_configure.bzl", "configure_windows_toolchain")
+
+def cc_autoconf_toolchains_impl(repository_ctx):
+ """Generate BUILD file with 'toolchain' targets for the local host C++ toolchain.
+
+ Args:
+ repository_ctx: repository context
+ """
+ env = repository_ctx.os.environ
+
+ # Should we try to find C++ toolchain at all? If not, we don't have to generate toolchains for C++ at all.
+ should_detect_cpp_toolchain = "BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN" not in env or env["BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN"] != "1"
+
+ if should_detect_cpp_toolchain:
+ paths = resolve_labels(repository_ctx, [
+ "@rules_cc//cc/private/toolchain:BUILD.toolchains.tpl",
+ ])
+ repository_ctx.template(
+ "BUILD",
+ paths["@rules_cc//cc/private/toolchain:BUILD.toolchains.tpl"],
+ {"%{name}": get_cpu_value(repository_ctx)},
+ )
+ else:
+ repository_ctx.file("BUILD", "# C++ toolchain autoconfiguration was disabled by BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN env variable.")
+
+cc_autoconf_toolchains = repository_rule(
+ environ = [
+ "BAZEL_USE_CPP_ONLY_TOOLCHAIN",
+ "BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN",
+ ],
+ implementation = cc_autoconf_toolchains_impl,
+ configure = True,
+)
+
+def cc_autoconf_impl(repository_ctx, overriden_tools = dict()):
+ """Generate BUILD file with 'cc_toolchain' targets for the local host C++ toolchain.
+
+ Args:
+ repository_ctx: repository context
+ overriden_tools: dict of tool paths to use instead of autoconfigured tools
+ """
+
+ env = repository_ctx.os.environ
+ cpu_value = get_cpu_value(repository_ctx)
+ if "BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN" in env and env["BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN"] == "1":
+ paths = resolve_labels(repository_ctx, [
+ "@rules_cc//cc/private/toolchain:BUILD.empty",
+ "@rules_cc//cc/private/toolchain:empty_cc_toolchain_config.bzl",
+ ])
+ repository_ctx.symlink(paths["@rules_cc//cc/private/toolchain:empty_cc_toolchain_config.bzl"], "cc_toolchain_config.bzl")
+ repository_ctx.symlink(paths["@rules_cc//cc/private/toolchain:BUILD.empty"], "BUILD")
+ elif cpu_value == "freebsd":
+ paths = resolve_labels(repository_ctx, [
+ "@rules_cc//cc/private/toolchain:BUILD.static.freebsd",
+ "@rules_cc//cc/private/toolchain:freebsd_cc_toolchain_config.bzl",
+ ])
+
+ # This is defaulting to a static crosstool, we should eventually
+ # autoconfigure this platform too. Theorically, FreeBSD should be
+ # straightforward to add but we cannot run it in a docker container so
+ # skipping until we have proper tests for FreeBSD.
+ repository_ctx.symlink(paths["@rules_cc//cc/private/toolchain:freebsd_cc_toolchain_config.bzl"], "cc_toolchain_config.bzl")
+ repository_ctx.symlink(paths["@rules_cc//cc/private/toolchain:BUILD.static.freebsd"], "BUILD")
+ elif cpu_value == "x64_windows":
+ # TODO(ibiryukov): overriden_tools are only supported in configure_unix_toolchain.
+ # We might want to add that to Windows too(at least for msys toolchain).
+ configure_windows_toolchain(repository_ctx)
+ else:
+ configure_unix_toolchain(repository_ctx, cpu_value, overriden_tools)
+
+MSVC_ENVVARS = [
+ "BAZEL_VC",
+ "BAZEL_VC_FULL_VERSION",
+ "BAZEL_VS",
+ "BAZEL_WINSDK_FULL_VERSION",
+ "VS90COMNTOOLS",
+ "VS100COMNTOOLS",
+ "VS110COMNTOOLS",
+ "VS120COMNTOOLS",
+ "VS140COMNTOOLS",
+ "VS150COMNTOOLS",
+ "VS160COMNTOOLS",
+ "TMP",
+ "TEMP",
+]
+
+cc_autoconf = repository_rule(
+ environ = [
+ "ABI_LIBC_VERSION",
+ "ABI_VERSION",
+ "BAZEL_COMPILER",
+ "BAZEL_HOST_SYSTEM",
+ "BAZEL_CXXOPTS",
+ "BAZEL_LINKOPTS",
+ "BAZEL_LINKLIBS",
+ "BAZEL_PYTHON",
+ "BAZEL_SH",
+ "BAZEL_TARGET_CPU",
+ "BAZEL_TARGET_LIBC",
+ "BAZEL_TARGET_SYSTEM",
+ "BAZEL_USE_CPP_ONLY_TOOLCHAIN",
+ "BAZEL_USE_XCODE_TOOLCHAIN",
+ "BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN",
+ "BAZEL_USE_LLVM_NATIVE_COVERAGE",
+ "BAZEL_LLVM",
+ "BAZEL_IGNORE_SYSTEM_HEADERS_VERSIONS",
+ "USE_CLANG_CL",
+ "CC",
+ "CC_CONFIGURE_DEBUG",
+ "CC_TOOLCHAIN_NAME",
+ "CPLUS_INCLUDE_PATH",
+ "GCOV",
+ "HOMEBREW_RUBY_PATH",
+ "SYSTEMROOT",
+ ] + MSVC_ENVVARS,
+ implementation = cc_autoconf_impl,
+ configure = True,
+)
+
+# buildifier: disable=unnamed-macro
+def cc_configure():
+ """A C++ configuration rules that generate the crosstool file."""
+ cc_autoconf_toolchains(name = "local_config_cc_toolchains")
+ cc_autoconf(name = "local_config_cc")
+ native.bind(name = "cc_toolchain", actual = "@local_config_cc//:toolchain")
+ native.register_toolchains(
+ # Use register_toolchain's target pattern expansion to register all toolchains in the package.
+ "@local_config_cc_toolchains//:all",
+ )
diff --git a/cc/private/toolchain/cc_toolchain_config.bzl b/cc/private/toolchain/cc_toolchain_config.bzl
new file mode 100644
index 0000000..265fce6
--- /dev/null
+++ b/cc/private/toolchain/cc_toolchain_config.bzl
@@ -0,0 +1,1491 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A Starlark cc_toolchain configuration rule"""
+
+load(
+ "@rules_cc//cc:action_names.bzl",
+ _ASSEMBLE_ACTION_NAME = "ASSEMBLE_ACTION_NAME",
+ _CLIF_MATCH_ACTION_NAME = "CLIF_MATCH_ACTION_NAME",
+ _CPP_COMPILE_ACTION_NAME = "CPP_COMPILE_ACTION_NAME",
+ _CPP_HEADER_PARSING_ACTION_NAME = "CPP_HEADER_PARSING_ACTION_NAME",
+ _CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME = "CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME",
+ _CPP_LINK_EXECUTABLE_ACTION_NAME = "CPP_LINK_EXECUTABLE_ACTION_NAME",
+ _CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME = "CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME",
+ _CPP_MODULE_CODEGEN_ACTION_NAME = "CPP_MODULE_CODEGEN_ACTION_NAME",
+ _CPP_MODULE_COMPILE_ACTION_NAME = "CPP_MODULE_COMPILE_ACTION_NAME",
+ _C_COMPILE_ACTION_NAME = "C_COMPILE_ACTION_NAME",
+ _LINKSTAMP_COMPILE_ACTION_NAME = "LINKSTAMP_COMPILE_ACTION_NAME",
+ _LTO_BACKEND_ACTION_NAME = "LTO_BACKEND_ACTION_NAME",
+ _PREPROCESS_ASSEMBLE_ACTION_NAME = "PREPROCESS_ASSEMBLE_ACTION_NAME",
+)
+load(
+ "@rules_cc//cc:cc_toolchain_config_lib.bzl",
+ "action_config",
+ "feature",
+ "flag_group",
+ "flag_set",
+ "tool",
+ "tool_path",
+ "with_feature_set",
+)
+
+all_compile_actions = [
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+]
+
+all_cpp_compile_actions = [
+ _CPP_COMPILE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+]
+
+preprocessor_compile_actions = [
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+]
+
+codegen_compile_actions = [
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+]
+
+all_link_actions = [
+ _CPP_LINK_EXECUTABLE_ACTION_NAME,
+ _CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME,
+ _CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME,
+]
+
+def _impl(ctx):
+ if ctx.attr.disable_static_cc_toolchains:
+ fail("@rules_cc//cc/private/toolchain:default-toolchain, as well as the cc_toolchains it points " +
+ "to have been removed. See https://github.com/bazelbuild/bazel/issues/8546.")
+
+ if (ctx.attr.cpu == "darwin"):
+ toolchain_identifier = "local_darwin"
+ elif (ctx.attr.cpu == "freebsd"):
+ toolchain_identifier = "local_freebsd"
+ elif (ctx.attr.cpu == "local"):
+ toolchain_identifier = "local_linux"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_clang"):
+ toolchain_identifier = "local_windows_clang"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_mingw"):
+ toolchain_identifier = "local_windows_mingw"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ toolchain_identifier = "local_windows_msys64"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64_mingw64"):
+ toolchain_identifier = "local_windows_msys64_mingw64"
+ elif (ctx.attr.cpu == "armeabi-v7a"):
+ toolchain_identifier = "stub_armeabi-v7a"
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ toolchain_identifier = "vc_14_0_x64"
+ else:
+ fail("Unreachable")
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ host_system_name = "armeabi-v7a"
+ elif (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local" or
+ ctx.attr.cpu == "x64_windows" or
+ ctx.attr.cpu == "x64_windows_msvc"):
+ host_system_name = "local"
+ else:
+ fail("Unreachable")
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ target_system_name = "armeabi-v7a"
+ elif (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local" or
+ ctx.attr.cpu == "x64_windows" or
+ ctx.attr.cpu == "x64_windows_msvc"):
+ target_system_name = "local"
+ else:
+ fail("Unreachable")
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ target_cpu = "armeabi-v7a"
+ elif (ctx.attr.cpu == "darwin"):
+ target_cpu = "darwin"
+ elif (ctx.attr.cpu == "freebsd"):
+ target_cpu = "freebsd"
+ elif (ctx.attr.cpu == "local"):
+ target_cpu = "local"
+ elif (ctx.attr.cpu == "x64_windows"):
+ target_cpu = "x64_windows"
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ target_cpu = "x64_windows_msvc"
+ else:
+ fail("Unreachable")
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ target_libc = "armeabi-v7a"
+ elif (ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local" or
+ ctx.attr.cpu == "x64_windows"):
+ target_libc = "local"
+ elif (ctx.attr.cpu == "darwin"):
+ target_libc = "macosx"
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ target_libc = "msvcrt140"
+ else:
+ fail("Unreachable")
+
+ if (ctx.attr.cpu == "x64_windows_msvc"):
+ compiler = "cl"
+ elif (ctx.attr.cpu == "armeabi-v7a" or
+ ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local"):
+ compiler = "compiler"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_clang"):
+ compiler = "windows_clang"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_mingw"):
+ compiler = "windows_mingw"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ compiler = "windows_msys64"
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64_mingw64"):
+ compiler = "windows_msys64_mingw64"
+ else:
+ fail("Unreachable")
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ abi_version = "armeabi-v7a"
+ elif (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local" or
+ ctx.attr.cpu == "x64_windows" or
+ ctx.attr.cpu == "x64_windows_msvc"):
+ abi_version = "local"
+ else:
+ fail("Unreachable")
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ abi_libc_version = "armeabi-v7a"
+ elif (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local" or
+ ctx.attr.cpu == "x64_windows" or
+ ctx.attr.cpu == "x64_windows_msvc"):
+ abi_libc_version = "local"
+ else:
+ fail("Unreachable")
+
+ cc_target_os = None
+
+ builtin_sysroot = None
+
+ objcopy_embed_data_action = None
+ if (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local"):
+ objcopy_embed_data_action = action_config(
+ action_name = "objcopy_embed_data",
+ enabled = True,
+ tools = [tool(path = "/usr/bin/objcopy")],
+ )
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_clang"):
+ objcopy_embed_data_action = action_config(
+ action_name = "objcopy_embed_data",
+ enabled = True,
+ tools = [tool(path = "C:/Program Files (x86)/LLVM/bin/objcopy")],
+ )
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_mingw"):
+ objcopy_embed_data_action = action_config(
+ action_name = "objcopy_embed_data",
+ enabled = True,
+ tools = [tool(path = "C:/mingw/bin/objcopy")],
+ )
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64_mingw64"):
+ objcopy_embed_data_action = action_config(
+ action_name = "objcopy_embed_data",
+ enabled = True,
+ tools = [tool(path = "C:/tools/msys64/mingw64/bin/objcopy")],
+ )
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ objcopy_embed_data_action = action_config(
+ action_name = "objcopy_embed_data",
+ enabled = True,
+ tools = [tool(path = "C:/tools/msys64/usr/bin/objcopy")],
+ )
+
+ c_compile_action = action_config(
+ action_name = _C_COMPILE_ACTION_NAME,
+ implies = [
+ "compiler_input_flags",
+ "compiler_output_flags",
+ "default_compile_flags",
+ "user_compile_flags",
+ "sysroot",
+ "unfiltered_compile_flags",
+ ],
+ tools = [tool(path = "wrapper/bin/msvc_cl.bat")],
+ )
+
+ cpp_compile_action = action_config(
+ action_name = _CPP_COMPILE_ACTION_NAME,
+ implies = [
+ "compiler_input_flags",
+ "compiler_output_flags",
+ "default_compile_flags",
+ "user_compile_flags",
+ "sysroot",
+ "unfiltered_compile_flags",
+ ],
+ tools = [tool(path = "wrapper/bin/msvc_cl.bat")],
+ )
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ action_configs = []
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ action_configs = [c_compile_action, cpp_compile_action]
+ elif (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local" or
+ ctx.attr.cpu == "x64_windows"):
+ action_configs = [objcopy_embed_data_action]
+ else:
+ fail("Unreachable")
+
+ random_seed_feature = feature(name = "random_seed", enabled = True)
+
+ compiler_output_flags_feature = feature(
+ name = "compiler_output_flags",
+ flag_sets = [
+ flag_set(
+ actions = [_ASSEMBLE_ACTION_NAME],
+ flag_groups = [
+ flag_group(
+ flags = ["/Fo%{output_file}", "/Zi"],
+ expand_if_available = "output_file",
+ expand_if_not_available = "output_assembly_file",
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/Fo%{output_file}"],
+ expand_if_available = "output_file",
+ expand_if_not_available = "output_assembly_file",
+ ),
+ flag_group(
+ flags = ["/Fa%{output_file}"],
+ expand_if_available = "output_file",
+ ),
+ flag_group(
+ flags = ["/P", "/Fi%{output_file}"],
+ expand_if_available = "output_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ default_link_flags_feature = None
+ if (ctx.attr.cpu == "local"):
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-lstdc++",
+ "-Wl,-z,relro,-z,now",
+ "-no-canonical-prefixes",
+ "-pass-exit-codes",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["-Wl,--gc-sections"])],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "freebsd"):
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-lstdc++",
+ "-Wl,-z,relro,-z,now",
+ "-no-canonical-prefixes",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["-Wl,--gc-sections"])],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "darwin"):
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-lstdc++",
+ "-undefined",
+ "dynamic_lookup",
+ "-headerpad_max_install_names",
+ "-no-canonical-prefixes",
+ ],
+ ),
+ ],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["-lstdc++"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["-m64"])],
+ ),
+ ],
+ )
+
+ unfiltered_compile_flags_feature = None
+ if (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd"):
+ unfiltered_compile_flags_feature = feature(
+ name = "unfiltered_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-no-canonical-prefixes",
+ "-Wno-builtin-macro-redefined",
+ "-D__DATE__=\"redacted\"",
+ "-D__TIMESTAMP__=\"redacted\"",
+ "-D__TIME__=\"redacted\"",
+ ],
+ ),
+ ],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "local"):
+ unfiltered_compile_flags_feature = feature(
+ name = "unfiltered_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-no-canonical-prefixes",
+ "-fno-canonical-system-headers",
+ "-Wno-builtin-macro-redefined",
+ "-D__DATE__=\"redacted\"",
+ "-D__TIMESTAMP__=\"redacted\"",
+ "-D__TIME__=\"redacted\"",
+ ],
+ ),
+ ],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ unfiltered_compile_flags_feature = feature(
+ name = "unfiltered_compile_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["%{unfiltered_compile_flags}"],
+ iterate_over = "unfiltered_compile_flags",
+ expand_if_available = "unfiltered_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ supports_pic_feature = feature(name = "supports_pic", enabled = True)
+
+ default_compile_flags_feature = None
+ if (ctx.attr.cpu == "darwin"):
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-D_FORTIFY_SOURCE=1",
+ "-fstack-protector",
+ "-fcolor-diagnostics",
+ "-Wall",
+ "-Wthread-safety",
+ "-Wself-assign",
+ "-fno-omit-frame-pointer",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-g"])],
+ with_features = [with_feature_set(features = ["dbg"])],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-g0",
+ "-O2",
+ "-DNDEBUG",
+ "-ffunction-sections",
+ "-fdata-sections",
+ ],
+ ),
+ ],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ flag_set(
+ actions = [
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-std=c++0x"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "local"):
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-U_FORTIFY_SOURCE",
+ "-D_FORTIFY_SOURCE=1",
+ "-fstack-protector",
+ "-Wall",
+ "-Wunused-but-set-parameter",
+ "-Wno-free-nonheap-object",
+ "-fno-omit-frame-pointer",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-g"])],
+ with_features = [with_feature_set(features = ["dbg"])],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-g0",
+ "-O2",
+ "-DNDEBUG",
+ "-ffunction-sections",
+ "-fdata-sections",
+ ],
+ ),
+ ],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ flag_set(
+ actions = [
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-std=c++0x"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "freebsd"):
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-U_FORTIFY_SOURCE",
+ "-D_FORTIFY_SOURCE=1",
+ "-fstack-protector",
+ "-Wall",
+ "-fno-omit-frame-pointer",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-g"])],
+ with_features = [with_feature_set(features = ["dbg"])],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-g0",
+ "-O2",
+ "-DNDEBUG",
+ "-ffunction-sections",
+ "-fdata-sections",
+ ],
+ ),
+ ],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ flag_set(
+ actions = [
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-std=c++0x"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-m64",
+ "/D__inline__=__inline",
+ "/DCOMPILER_MSVC",
+ "/DNOGDI",
+ "/DNOMINMAX",
+ "/DPRAGMA_SUPPORTED",
+ "/D_WIN32_WINNT=0x0601",
+ "/D_CRT_SECURE_NO_DEPRECATE",
+ "/D_CRT_SECURE_NO_WARNINGS",
+ "/D_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS",
+ "/D_USE_MATH_DEFINES",
+ "/nologo",
+ "/bigobj",
+ "/Zm500",
+ "/J",
+ "/Gy",
+ "/GF",
+ "/W3",
+ "/EHsc",
+ "/wd4351",
+ "/wd4291",
+ "/wd4250",
+ "/wd4996",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/DDEBUG=1", "-g", "/Od", "-Xcompilation-mode=dbg"],
+ ),
+ ],
+ with_features = [with_feature_set(features = ["dbg"])],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/DNDEBUG", "/Od", "-Xcompilation-mode=fastbuild"],
+ ),
+ ],
+ with_features = [with_feature_set(features = ["fastbuild"])],
+ ),
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/DNDEBUG", "/O2", "-Xcompilation-mode=opt"],
+ ),
+ ],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_clang" or
+ ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_mingw" or
+ ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64_mingw64"):
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-std=c++0x"])],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [flag_group(flags = ["-std=gnu++0x"])],
+ ),
+ ],
+ )
+
+ opt_feature = feature(name = "opt")
+
+ supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True)
+
+ objcopy_embed_flags_feature = feature(
+ name = "objcopy_embed_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = ["objcopy_embed_data"],
+ flag_groups = [flag_group(flags = ["-I", "binary"])],
+ ),
+ ],
+ )
+
+ dbg_feature = feature(name = "dbg")
+
+ user_compile_flags_feature = None
+ if (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local"):
+ user_compile_flags_feature = feature(
+ name = "user_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_compile_flags}"],
+ iterate_over = "user_compile_flags",
+ expand_if_available = "user_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ user_compile_flags_feature = feature(
+ name = "user_compile_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_compile_flags}"],
+ iterate_over = "user_compile_flags",
+ expand_if_available = "user_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ sysroot_feature = None
+ if (ctx.attr.cpu == "darwin" or
+ ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local"):
+ sysroot_feature = feature(
+ name = "sysroot",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _LINKSTAMP_COMPILE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _LTO_BACKEND_ACTION_NAME,
+ _CLIF_MATCH_ACTION_NAME,
+ _CPP_LINK_EXECUTABLE_ACTION_NAME,
+ _CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME,
+ _CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["--sysroot=%{sysroot}"],
+ expand_if_available = "sysroot",
+ ),
+ ],
+ ),
+ ],
+ )
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ sysroot_feature = feature(
+ name = "sysroot",
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ _CPP_LINK_EXECUTABLE_ACTION_NAME,
+ _CPP_LINK_DYNAMIC_LIBRARY_ACTION_NAME,
+ _CPP_LINK_NODEPS_DYNAMIC_LIBRARY_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["--sysroot=%{sysroot}"],
+ iterate_over = "sysroot",
+ expand_if_available = "sysroot",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ include_paths_feature = feature(
+ name = "include_paths",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/I%{quote_include_paths}"],
+ iterate_over = "quote_include_paths",
+ ),
+ flag_group(
+ flags = ["/I%{include_paths}"],
+ iterate_over = "include_paths",
+ ),
+ flag_group(
+ flags = ["/I%{system_include_paths}"],
+ iterate_over = "system_include_paths",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ dependency_file_feature = feature(
+ name = "dependency_file",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/DEPENDENCY_FILE", "%{dependency_file}"],
+ expand_if_available = "dependency_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ compiler_input_flags_feature = feature(
+ name = "compiler_input_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ _ASSEMBLE_ACTION_NAME,
+ _PREPROCESS_ASSEMBLE_ACTION_NAME,
+ _C_COMPILE_ACTION_NAME,
+ _CPP_COMPILE_ACTION_NAME,
+ _CPP_HEADER_PARSING_ACTION_NAME,
+ _CPP_MODULE_COMPILE_ACTION_NAME,
+ _CPP_MODULE_CODEGEN_ACTION_NAME,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/c", "%{source_file}"],
+ expand_if_available = "source_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ fastbuild_feature = feature(name = "fastbuild")
+
+ features = None
+ if (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ features = [
+ default_compile_flags_feature,
+ default_link_flags_feature,
+ supports_dynamic_linker_feature,
+ objcopy_embed_flags_feature,
+ ]
+ elif (ctx.attr.cpu == "darwin"):
+ features = [
+ default_compile_flags_feature,
+ default_link_flags_feature,
+ supports_dynamic_linker_feature,
+ supports_pic_feature,
+ objcopy_embed_flags_feature,
+ dbg_feature,
+ opt_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ unfiltered_compile_flags_feature,
+ ]
+ elif (ctx.attr.cpu == "freebsd" or
+ ctx.attr.cpu == "local"):
+ features = [
+ default_compile_flags_feature,
+ default_link_flags_feature,
+ supports_dynamic_linker_feature,
+ supports_pic_feature,
+ objcopy_embed_flags_feature,
+ opt_feature,
+ dbg_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ unfiltered_compile_flags_feature,
+ ]
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_clang" or
+ ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_mingw" or
+ ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64_mingw64"):
+ features = [
+ default_compile_flags_feature,
+ supports_dynamic_linker_feature,
+ objcopy_embed_flags_feature,
+ ]
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ features = [
+ default_link_flags_feature,
+ random_seed_feature,
+ default_compile_flags_feature,
+ include_paths_feature,
+ dependency_file_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ unfiltered_compile_flags_feature,
+ compiler_output_flags_feature,
+ compiler_input_flags_feature,
+ dbg_feature,
+ fastbuild_feature,
+ opt_feature,
+ ]
+ elif (ctx.attr.cpu == "armeabi-v7a"):
+ features = [supports_dynamic_linker_feature, supports_pic_feature]
+
+ if (ctx.attr.cpu == "armeabi-v7a"):
+ cxx_builtin_include_directories = []
+ elif (ctx.attr.cpu == "darwin"):
+ cxx_builtin_include_directories = ["/"]
+ elif (ctx.attr.cpu == "freebsd"):
+ cxx_builtin_include_directories = ["/usr/lib/clang", "/usr/local/include", "/usr/include"]
+ elif (ctx.attr.cpu == "local" or
+ ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_clang"):
+ cxx_builtin_include_directories = ["/usr/lib/gcc/", "/usr/local/include", "/usr/include"]
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ cxx_builtin_include_directories = [
+ "C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/INCLUDE",
+ "C:/Program Files (x86)/Windows Kits/10/include/",
+ "C:/Program Files (x86)/Windows Kits/8.1/include/",
+ "C:/Program Files (x86)/GnuWin32/include/",
+ "C:/python_27_amd64/files/include",
+ ]
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_mingw"):
+ cxx_builtin_include_directories = ["C:/mingw/include", "C:/mingw/lib/gcc"]
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64_mingw64"):
+ cxx_builtin_include_directories = ["C:/tools/msys64/mingw64/x86_64-w64-mingw32/include"]
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ cxx_builtin_include_directories = ["C:/tools/msys64/", "/usr/"]
+ else:
+ fail("Unreachable")
+
+ artifact_name_patterns = []
+
+ make_variables = []
+
+ if (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64_mingw64"):
+ tool_paths = [
+ tool_path(
+ name = "ar",
+ path = "C:/tools/msys64/mingw64/bin/ar",
+ ),
+ tool_path(
+ name = "compat-ld",
+ path = "C:/tools/msys64/mingw64/bin/ld",
+ ),
+ tool_path(
+ name = "cpp",
+ path = "C:/tools/msys64/mingw64/bin/cpp",
+ ),
+ tool_path(
+ name = "dwp",
+ path = "C:/tools/msys64/mingw64/bin/dwp",
+ ),
+ tool_path(
+ name = "gcc",
+ path = "C:/tools/msys64/mingw64/bin/gcc",
+ ),
+ tool_path(
+ name = "gcov",
+ path = "C:/tools/msys64/mingw64/bin/gcov",
+ ),
+ tool_path(
+ name = "ld",
+ path = "C:/tools/msys64/mingw64/bin/ld",
+ ),
+ tool_path(
+ name = "nm",
+ path = "C:/tools/msys64/mingw64/bin/nm",
+ ),
+ tool_path(
+ name = "objcopy",
+ path = "C:/tools/msys64/mingw64/bin/objcopy",
+ ),
+ tool_path(
+ name = "objdump",
+ path = "C:/tools/msys64/mingw64/bin/objdump",
+ ),
+ tool_path(
+ name = "strip",
+ path = "C:/tools/msys64/mingw64/bin/strip",
+ ),
+ ]
+ elif (ctx.attr.cpu == "armeabi-v7a"):
+ tool_paths = [
+ tool_path(name = "ar", path = "/bin/false"),
+ tool_path(name = "compat-ld", path = "/bin/false"),
+ tool_path(name = "cpp", path = "/bin/false"),
+ tool_path(name = "dwp", path = "/bin/false"),
+ tool_path(name = "gcc", path = "/bin/false"),
+ tool_path(name = "gcov", path = "/bin/false"),
+ tool_path(name = "ld", path = "/bin/false"),
+ tool_path(name = "nm", path = "/bin/false"),
+ tool_path(name = "objcopy", path = "/bin/false"),
+ tool_path(name = "objdump", path = "/bin/false"),
+ tool_path(name = "strip", path = "/bin/false"),
+ ]
+ elif (ctx.attr.cpu == "freebsd"):
+ tool_paths = [
+ tool_path(name = "ar", path = "/usr/bin/ar"),
+ tool_path(name = "compat-ld", path = "/usr/bin/ld"),
+ tool_path(name = "cpp", path = "/usr/bin/cpp"),
+ tool_path(name = "dwp", path = "/usr/bin/dwp"),
+ tool_path(name = "gcc", path = "/usr/bin/clang"),
+ tool_path(name = "gcov", path = "/usr/bin/gcov"),
+ tool_path(name = "ld", path = "/usr/bin/ld"),
+ tool_path(name = "nm", path = "/usr/bin/nm"),
+ tool_path(name = "objcopy", path = "/usr/bin/objcopy"),
+ tool_path(name = "objdump", path = "/usr/bin/objdump"),
+ tool_path(name = "strip", path = "/usr/bin/strip"),
+ ]
+ elif (ctx.attr.cpu == "local"):
+ tool_paths = [
+ tool_path(name = "ar", path = "/usr/bin/ar"),
+ tool_path(name = "compat-ld", path = "/usr/bin/ld"),
+ tool_path(name = "cpp", path = "/usr/bin/cpp"),
+ tool_path(name = "dwp", path = "/usr/bin/dwp"),
+ tool_path(name = "gcc", path = "/usr/bin/gcc"),
+ tool_path(name = "gcov", path = "/usr/bin/gcov"),
+ tool_path(name = "ld", path = "/usr/bin/ld"),
+ tool_path(name = "nm", path = "/usr/bin/nm"),
+ tool_path(name = "objcopy", path = "/usr/bin/objcopy"),
+ tool_path(name = "objdump", path = "/usr/bin/objdump"),
+ tool_path(name = "strip", path = "/usr/bin/strip"),
+ ]
+ elif (ctx.attr.cpu == "darwin"):
+ tool_paths = [
+ tool_path(name = "ar", path = "/usr/bin/libtool"),
+ tool_path(name = "compat-ld", path = "/usr/bin/ld"),
+ tool_path(name = "cpp", path = "/usr/bin/cpp"),
+ tool_path(name = "dwp", path = "/usr/bin/dwp"),
+ tool_path(name = "gcc", path = "osx_cc_wrapper.sh"),
+ tool_path(name = "gcov", path = "/usr/bin/gcov"),
+ tool_path(name = "ld", path = "/usr/bin/ld"),
+ tool_path(name = "nm", path = "/usr/bin/nm"),
+ tool_path(name = "objcopy", path = "/usr/bin/objcopy"),
+ tool_path(name = "objdump", path = "/usr/bin/objdump"),
+ tool_path(name = "strip", path = "/usr/bin/strip"),
+ ]
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_clang"):
+ tool_paths = [
+ tool_path(name = "ar", path = "C:/mingw/bin/ar"),
+ tool_path(
+ name = "compat-ld",
+ path = "C:/Program Files (x86)/LLVM/bin/ld",
+ ),
+ tool_path(
+ name = "cpp",
+ path = "C:/Program Files (x86)/LLVM/bin/cpp",
+ ),
+ tool_path(
+ name = "dwp",
+ path = "C:/Program Files (x86)/LLVM/bin/dwp",
+ ),
+ tool_path(
+ name = "gcc",
+ path = "C:/Program Files (x86)/LLVM/bin/clang",
+ ),
+ tool_path(
+ name = "gcov",
+ path = "C:/Program Files (x86)/LLVM/bin/gcov",
+ ),
+ tool_path(
+ name = "ld",
+ path = "C:/Program Files (x86)/LLVM/bin/ld",
+ ),
+ tool_path(
+ name = "nm",
+ path = "C:/Program Files (x86)/LLVM/bin/nm",
+ ),
+ tool_path(
+ name = "objcopy",
+ path = "C:/Program Files (x86)/LLVM/bin/objcopy",
+ ),
+ tool_path(
+ name = "objdump",
+ path = "C:/Program Files (x86)/LLVM/bin/objdump",
+ ),
+ tool_path(
+ name = "strip",
+ path = "C:/Program Files (x86)/LLVM/bin/strip",
+ ),
+ ]
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_mingw"):
+ tool_paths = [
+ tool_path(name = "ar", path = "C:/mingw/bin/ar"),
+ tool_path(name = "compat-ld", path = "C:/mingw/bin/ld"),
+ tool_path(name = "cpp", path = "C:/mingw/bin/cpp"),
+ tool_path(name = "dwp", path = "C:/mingw/bin/dwp"),
+ tool_path(name = "gcc", path = "C:/mingw/bin/gcc"),
+ tool_path(name = "gcov", path = "C:/mingw/bin/gcov"),
+ tool_path(name = "ld", path = "C:/mingw/bin/ld"),
+ tool_path(name = "nm", path = "C:/mingw/bin/nm"),
+ tool_path(name = "objcopy", path = "C:/mingw/bin/objcopy"),
+ tool_path(name = "objdump", path = "C:/mingw/bin/objdump"),
+ tool_path(name = "strip", path = "C:/mingw/bin/strip"),
+ ]
+ elif (ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "windows_msys64"):
+ tool_paths = [
+ tool_path(name = "ar", path = "C:/tools/msys64/usr/bin/ar"),
+ tool_path(
+ name = "compat-ld",
+ path = "C:/tools/msys64/usr/bin/ld",
+ ),
+ tool_path(
+ name = "cpp",
+ path = "C:/tools/msys64/usr/bin/cpp",
+ ),
+ tool_path(
+ name = "dwp",
+ path = "C:/tools/msys64/usr/bin/dwp",
+ ),
+ tool_path(
+ name = "gcc",
+ path = "C:/tools/msys64/usr/bin/gcc",
+ ),
+ tool_path(
+ name = "gcov",
+ path = "C:/tools/msys64/usr/bin/gcov",
+ ),
+ tool_path(name = "ld", path = "C:/tools/msys64/usr/bin/ld"),
+ tool_path(name = "nm", path = "C:/tools/msys64/usr/bin/nm"),
+ tool_path(
+ name = "objcopy",
+ path = "C:/tools/msys64/usr/bin/objcopy",
+ ),
+ tool_path(
+ name = "objdump",
+ path = "C:/tools/msys64/usr/bin/objdump",
+ ),
+ tool_path(
+ name = "strip",
+ path = "C:/tools/msys64/usr/bin/strip",
+ ),
+ ]
+ elif (ctx.attr.cpu == "x64_windows_msvc"):
+ tool_paths = [
+ tool_path(name = "ar", path = "wrapper/bin/msvc_link.bat"),
+ tool_path(name = "cpp", path = "wrapper/bin/msvc_cl.bat"),
+ tool_path(name = "gcc", path = "wrapper/bin/msvc_cl.bat"),
+ tool_path(name = "gcov", path = "wrapper/bin/msvc_nop.bat"),
+ tool_path(name = "ld", path = "wrapper/bin/msvc_link.bat"),
+ tool_path(name = "nm", path = "wrapper/bin/msvc_nop.bat"),
+ tool_path(
+ name = "objcopy",
+ path = "wrapper/bin/msvc_nop.bat",
+ ),
+ tool_path(
+ name = "objdump",
+ path = "wrapper/bin/msvc_nop.bat",
+ ),
+ tool_path(
+ name = "strip",
+ path = "wrapper/bin/msvc_nop.bat",
+ ),
+ ]
+ else:
+ fail("Unreachable")
+
+ out = ctx.actions.declare_file(ctx.label.name)
+ ctx.actions.write(out, "Fake executable")
+ return [
+ cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ features = features,
+ action_configs = action_configs,
+ artifact_name_patterns = artifact_name_patterns,
+ cxx_builtin_include_directories = cxx_builtin_include_directories,
+ toolchain_identifier = toolchain_identifier,
+ host_system_name = host_system_name,
+ target_system_name = target_system_name,
+ target_cpu = target_cpu,
+ target_libc = target_libc,
+ compiler = compiler,
+ abi_version = abi_version,
+ abi_libc_version = abi_libc_version,
+ tool_paths = tool_paths,
+ make_variables = make_variables,
+ builtin_sysroot = builtin_sysroot,
+ cc_target_os = cc_target_os,
+ ),
+ DefaultInfo(
+ executable = out,
+ ),
+ ]
+
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "compiler": attr.string(),
+ "cpu": attr.string(mandatory = True),
+ "disable_static_cc_toolchains": attr.bool(),
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
diff --git a/cc/private/toolchain/clang_installation_error.bat.tpl b/cc/private/toolchain/clang_installation_error.bat.tpl
new file mode 100644
index 0000000..e3a61a4
--- /dev/null
+++ b/cc/private/toolchain/clang_installation_error.bat.tpl
@@ -0,0 +1,24 @@
+:: Copyright 2019 The Bazel Authors. All rights reserved.
+::
+:: Licensed under the Apache License, Version 2.0 (the "License");
+:: you may not use this file except in compliance with the License.
+:: You may obtain a copy of the License at
+::
+:: http://www.apache.org/licenses/LICENSE-2.0
+::
+:: Unless required by applicable law or agreed to in writing, software
+:: distributed under the License is distributed on an "AS IS" BASIS,
+:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+:: See the License for the specific language governing permissions and
+:: limitations under the License.
+
+@echo OFF
+
+echo. 1>&2
+echo The target you are compiling requires the Clang compiler. 1>&2
+echo Bazel couldn't find a valid Clang installation on your machine. 1>&2
+%{clang_error_message}
+echo Please check your installation following https://docs.bazel.build/versions/main/windows.html#using 1>&2
+echo. 1>&2
+
+exit /b 1
diff --git a/cc/private/toolchain/empty.cc b/cc/private/toolchain/empty.cc
new file mode 100644
index 0000000..4cda5c6
--- /dev/null
+++ b/cc/private/toolchain/empty.cc
@@ -0,0 +1,15 @@
+// Copyright 2017 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+int main() {}
diff --git a/cc/private/toolchain/empty_cc_toolchain_config.bzl b/cc/private/toolchain/empty_cc_toolchain_config.bzl
new file mode 100644
index 0000000..5d42d06
--- /dev/null
+++ b/cc/private/toolchain/empty_cc_toolchain_config.bzl
@@ -0,0 +1,42 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A fake C++ toolchain configuration rule"""
+
+def _impl(ctx):
+ out = ctx.actions.declare_file(ctx.label.name)
+ ctx.actions.write(out, "Fake executable")
+ return [
+ cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ toolchain_identifier = "local_linux",
+ host_system_name = "local",
+ target_system_name = "local",
+ target_cpu = "local",
+ target_libc = "local",
+ compiler = "compiler",
+ abi_version = "local",
+ abi_libc_version = "local",
+ ),
+ DefaultInfo(
+ executable = out,
+ ),
+ ]
+
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {},
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
diff --git a/cc/private/toolchain/freebsd_cc_toolchain_config.bzl b/cc/private/toolchain/freebsd_cc_toolchain_config.bzl
new file mode 100644
index 0000000..3521d92
--- /dev/null
+++ b/cc/private/toolchain/freebsd_cc_toolchain_config.bzl
@@ -0,0 +1,307 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A Starlark cc_toolchain configuration rule for freebsd."""
+
+load("@rules_cc//cc:action_names.bzl", "ACTION_NAMES")
+load(
+ "@rules_cc//cc:cc_toolchain_config_lib.bzl",
+ "action_config",
+ "feature",
+ "flag_group",
+ "flag_set",
+ "tool",
+ "tool_path",
+ "with_feature_set",
+)
+
+all_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.lto_backend,
+]
+
+all_cpp_compile_actions = [
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+]
+
+all_link_actions = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+]
+
+def _impl(ctx):
+ cpu = ctx.attr.cpu
+ compiler = "compiler"
+ toolchain_identifier = "local_freebsd" if cpu == "freebsd" else "stub_armeabi-v7a"
+ host_system_name = "local" if cpu == "freebsd" else "armeabi-v7a"
+ target_system_name = "local" if cpu == "freebsd" else "armeabi-v7a"
+ target_libc = "local" if cpu == "freebsd" else "armeabi-v7a"
+ abi_version = "local" if cpu == "freebsd" else "armeabi-v7a"
+ abi_libc_version = "local" if cpu == "freebsd" else "armeabi-v7a"
+
+ objcopy_embed_data_action = action_config(
+ action_name = "objcopy_embed_data",
+ enabled = True,
+ tools = [tool(path = "/usr/bin/objcopy")],
+ )
+
+ action_configs = [objcopy_embed_data_action] if cpu == "freebsd" else []
+
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-lstdc++",
+ "-Wl,-z,relro,-z,now",
+ "-no-canonical-prefixes",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["-Wl,--gc-sections"])],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ ],
+ )
+
+ unfiltered_compile_flags_feature = feature(
+ name = "unfiltered_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-no-canonical-prefixes",
+ "-Wno-builtin-macro-redefined",
+ "-D__DATE__=\"redacted\"",
+ "-D__TIMESTAMP__=\"redacted\"",
+ "-D__TIME__=\"redacted\"",
+ ],
+ ),
+ ],
+ ),
+ ],
+ )
+
+ supports_pic_feature = feature(name = "supports_pic", enabled = True)
+
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-U_FORTIFY_SOURCE",
+ "-D_FORTIFY_SOURCE=1",
+ "-fstack-protector",
+ "-Wall",
+ "-fno-omit-frame-pointer",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = [flag_group(flags = ["-g"])],
+ with_features = [with_feature_set(features = ["dbg"])],
+ ),
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-g0",
+ "-O2",
+ "-DNDEBUG",
+ "-ffunction-sections",
+ "-fdata-sections",
+ ],
+ ),
+ ],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ flag_set(
+ actions = all_cpp_compile_actions + [ACTION_NAMES.lto_backend],
+ flag_groups = [flag_group(flags = ["-std=c++0x"])],
+ ),
+ ],
+ )
+
+ opt_feature = feature(name = "opt")
+
+ supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True)
+
+ objcopy_embed_flags_feature = feature(
+ name = "objcopy_embed_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = ["objcopy_embed_data"],
+ flag_groups = [flag_group(flags = ["-I", "binary"])],
+ ),
+ ],
+ )
+
+ dbg_feature = feature(name = "dbg")
+
+ user_compile_flags_feature = feature(
+ name = "user_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_compile_flags}"],
+ iterate_over = "user_compile_flags",
+ expand_if_available = "user_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ sysroot_feature = feature(
+ name = "sysroot",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.lto_backend,
+ ] + all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["--sysroot=%{sysroot}"],
+ expand_if_available = "sysroot",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ if cpu == "freebsd":
+ features = [
+ default_compile_flags_feature,
+ default_link_flags_feature,
+ supports_dynamic_linker_feature,
+ supports_pic_feature,
+ objcopy_embed_flags_feature,
+ opt_feature,
+ dbg_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ unfiltered_compile_flags_feature,
+ ]
+ else:
+ features = [supports_dynamic_linker_feature, supports_pic_feature]
+
+ if (cpu == "freebsd"):
+ cxx_builtin_include_directories = ["/usr/lib/clang", "/usr/local/include", "/usr/include"]
+ else:
+ cxx_builtin_include_directories = []
+
+ if cpu == "freebsd":
+ tool_paths = [
+ tool_path(name = "ar", path = "/usr/bin/ar"),
+ tool_path(name = "compat-ld", path = "/usr/bin/ld"),
+ tool_path(name = "cpp", path = "/usr/bin/cpp"),
+ tool_path(name = "dwp", path = "/usr/bin/dwp"),
+ tool_path(name = "gcc", path = "/usr/bin/clang"),
+ tool_path(name = "gcov", path = "/usr/bin/gcov"),
+ tool_path(name = "ld", path = "/usr/bin/ld"),
+ tool_path(name = "nm", path = "/usr/bin/nm"),
+ tool_path(name = "objcopy", path = "/usr/bin/objcopy"),
+ tool_path(name = "objdump", path = "/usr/bin/objdump"),
+ tool_path(name = "strip", path = "/usr/bin/strip"),
+ ]
+ else:
+ tool_paths = [
+ tool_path(name = "ar", path = "/bin/false"),
+ tool_path(name = "compat-ld", path = "/bin/false"),
+ tool_path(name = "cpp", path = "/bin/false"),
+ tool_path(name = "dwp", path = "/bin/false"),
+ tool_path(name = "gcc", path = "/bin/false"),
+ tool_path(name = "gcov", path = "/bin/false"),
+ tool_path(name = "ld", path = "/bin/false"),
+ tool_path(name = "nm", path = "/bin/false"),
+ tool_path(name = "objcopy", path = "/bin/false"),
+ tool_path(name = "objdump", path = "/bin/false"),
+ tool_path(name = "strip", path = "/bin/false"),
+ ]
+
+ out = ctx.actions.declare_file(ctx.label.name)
+ ctx.actions.write(out, "Fake executable")
+ return [
+ cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ features = features,
+ action_configs = action_configs,
+ cxx_builtin_include_directories = cxx_builtin_include_directories,
+ toolchain_identifier = toolchain_identifier,
+ host_system_name = host_system_name,
+ target_system_name = target_system_name,
+ target_cpu = cpu,
+ target_libc = target_libc,
+ compiler = compiler,
+ abi_version = abi_version,
+ abi_libc_version = abi_libc_version,
+ tool_paths = tool_paths,
+ ),
+ DefaultInfo(
+ executable = out,
+ ),
+ ]
+
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "cpu": attr.string(mandatory = True),
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
diff --git a/cc/private/toolchain/grep-includes.sh b/cc/private/toolchain/grep-includes.sh
new file mode 100755
index 0000000..ee51361
--- /dev/null
+++ b/cc/private/toolchain/grep-includes.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+#
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# TODO(bazel-team): Support include scanning and grep-includes in Bazel
+echo "grep-includes is not supported by Bazel"
+exit 1
diff --git a/cc/private/toolchain/lib_cc_configure.bzl b/cc/private/toolchain/lib_cc_configure.bzl
new file mode 100644
index 0000000..bcd9013
--- /dev/null
+++ b/cc/private/toolchain/lib_cc_configure.bzl
@@ -0,0 +1,286 @@
+# pylint: disable=g-bad-file-header
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Base library for configuring the C++ toolchain."""
+
+def resolve_labels(repository_ctx, labels):
+ """Resolves a collection of labels to their paths.
+
+ Label resolution can cause the evaluation of Starlark functions to restart.
+ For functions with side-effects (like the auto-configuration functions, which
+ inspect the system and touch the file system), such restarts are costly.
+ We cannot avoid the restarts, but we can minimize their penalty by resolving
+ all labels upfront.
+
+ Among other things, doing less work on restarts can cut analysis times by
+ several seconds and may also prevent tickling kernel conditions that cause
+ build failures. See https://github.com/bazelbuild/bazel/issues/5196 for
+ more details.
+
+ Args:
+ repository_ctx: The context with which to resolve the labels.
+ labels: Labels to be resolved expressed as a list of strings.
+
+ Returns:
+ A dictionary with the labels as keys and their paths as values.
+ """
+ return dict([(label, repository_ctx.path(Label(label))) for label in labels])
+
+def escape_string(arg):
+ """Escape percent sign (%) in the string so it can appear in the Crosstool."""
+ if arg != None:
+ return str(arg).replace("%", "%%")
+ else:
+ return None
+
+def split_escaped(string, delimiter):
+ """Split string on the delimiter unless %-escaped.
+
+ Examples:
+ Basic usage:
+ split_escaped("a:b:c", ":") -> [ "a", "b", "c" ]
+
+ Delimeter that is not supposed to be splitten on has to be %-escaped:
+ split_escaped("a%:b", ":") -> [ "a:b" ]
+
+ Literal % can be represented by escaping it as %%:
+ split_escaped("a%%b", ":") -> [ "a%b" ]
+
+ Consecutive delimiters produce empty strings:
+ split_escaped("a::b", ":") -> [ "a", "", "", "b" ]
+
+ Args:
+ string: The string to be split.
+ delimiter: Non-empty string not containing %-sign to be used as a
+ delimiter.
+
+ Returns:
+ A list of substrings.
+ """
+ if delimiter == "":
+ fail("Delimiter cannot be empty")
+ if delimiter.find("%") != -1:
+ fail("Delimiter cannot contain %-sign")
+
+ i = 0
+ result = []
+ accumulator = []
+ length = len(string)
+ delimiter_length = len(delimiter)
+
+ if not string:
+ return []
+
+ # Iterate over the length of string since Starlark doesn't have while loops
+ for _ in range(length):
+ if i >= length:
+ break
+ if i + 2 <= length and string[i:i + 2] == "%%":
+ accumulator.append("%")
+ i += 2
+ elif (i + 1 + delimiter_length <= length and
+ string[i:i + 1 + delimiter_length] == "%" + delimiter):
+ accumulator.append(delimiter)
+ i += 1 + delimiter_length
+ elif i + delimiter_length <= length and string[i:i + delimiter_length] == delimiter:
+ result.append("".join(accumulator))
+ accumulator = []
+ i += delimiter_length
+ else:
+ accumulator.append(string[i])
+ i += 1
+
+ # Append the last group still in accumulator
+ result.append("".join(accumulator))
+ return result
+
+def auto_configure_fail(msg):
+ """Output failure message when auto configuration fails."""
+ red = "\033[0;31m"
+ no_color = "\033[0m"
+ fail("\n%sAuto-Configuration Error:%s %s\n" % (red, no_color, msg))
+
+def auto_configure_warning(msg):
+ """Output warning message during auto configuration."""
+ yellow = "\033[1;33m"
+ no_color = "\033[0m"
+
+ # buildifier: disable=print
+ print("\n%sAuto-Configuration Warning:%s %s\n" % (yellow, no_color, msg))
+
+def get_env_var(repository_ctx, name, default = None, enable_warning = True):
+ """Find an environment variable in system path. Doesn't %-escape the value!
+
+ Args:
+ repository_ctx: The repository context.
+ name: Name of the environment variable.
+ default: Default value to be used when such environment variable is not present.
+ enable_warning: Show warning if the variable is not present.
+ Returns:
+ value of the environment variable or default.
+ """
+
+ if name in repository_ctx.os.environ:
+ return repository_ctx.os.environ[name]
+ if default != None:
+ if enable_warning:
+ auto_configure_warning("'%s' environment variable is not set, using '%s' as default" % (name, default))
+ return default
+ return auto_configure_fail("'%s' environment variable is not set" % name)
+
+def which(repository_ctx, cmd, default = None):
+ """A wrapper around repository_ctx.which() to provide a fallback value. Doesn't %-escape the value!
+
+ Args:
+ repository_ctx: The repository context.
+ cmd: name of the executable to resolve.
+ default: Value to be returned when such executable couldn't be found.
+ Returns:
+ absolute path to the cmd or default when not found.
+ """
+ result = repository_ctx.which(cmd)
+ return default if result == None else str(result)
+
+def which_cmd(repository_ctx, cmd, default = None):
+ """Find cmd in PATH using repository_ctx.which() and fail if cannot find it. Doesn't %-escape the cmd!
+
+ Args:
+ repository_ctx: The repository context.
+ cmd: name of the executable to resolve.
+ default: Value to be returned when such executable couldn't be found.
+ Returns:
+ absolute path to the cmd or default when not found.
+ """
+ result = repository_ctx.which(cmd)
+ if result != None:
+ return str(result)
+ path = get_env_var(repository_ctx, "PATH")
+ if default != None:
+ auto_configure_warning("Cannot find %s in PATH, using '%s' as default.\nPATH=%s" % (cmd, default, path))
+ return default
+ auto_configure_fail("Cannot find %s in PATH, please make sure %s is installed and add its directory in PATH.\nPATH=%s" % (cmd, cmd, path))
+ return str(result)
+
+def execute(
+ repository_ctx,
+ command,
+ environment = None,
+ expect_failure = False):
+ """Execute a command, return stdout if succeed and throw an error if it fails. Doesn't %-escape the result!
+
+ Args:
+ repository_ctx: The repository context.
+ command: command to execute.
+ environment: dictionary with environment variables to set for the command.
+ expect_failure: True if the command is expected to fail.
+ Returns:
+ stdout of the executed command.
+ """
+ if environment:
+ result = repository_ctx.execute(command, environment = environment)
+ else:
+ result = repository_ctx.execute(command)
+ if expect_failure != (result.return_code != 0):
+ if expect_failure:
+ auto_configure_fail(
+ "expected failure, command %s, stderr: (%s)" % (
+ command,
+ result.stderr,
+ ),
+ )
+ else:
+ auto_configure_fail(
+ "non-zero exit code: %d, command %s, stderr: (%s)" % (
+ result.return_code,
+ command,
+ result.stderr,
+ ),
+ )
+ stripped_stdout = result.stdout.strip()
+ if not stripped_stdout:
+ auto_configure_fail(
+ "empty output from command %s, stderr: (%s)" % (command, result.stderr),
+ )
+ return stripped_stdout
+
+def get_cpu_value(repository_ctx):
+ """Compute the cpu_value based on the OS name. Doesn't %-escape the result!
+
+ Args:
+ repository_ctx: The repository context.
+ Returns:
+ One of (darwin, freebsd, x64_windows, ppc, s390x, arm, aarch64, k8, piii)
+ """
+ os_name = repository_ctx.os.name.lower()
+ if os_name.startswith("mac os"):
+ return "darwin"
+ if os_name.find("freebsd") != -1:
+ return "freebsd"
+ if os_name.find("windows") != -1:
+ return "x64_windows"
+
+ # Use uname to figure out whether we are on x86_32 or x86_64
+ result = repository_ctx.execute(["uname", "-m"])
+ if result.stdout.strip() in ["power", "ppc64le", "ppc", "ppc64"]:
+ return "ppc"
+ if result.stdout.strip() in ["s390x"]:
+ return "s390x"
+ if result.stdout.strip() in ["arm", "armv7l"]:
+ return "arm"
+ if result.stdout.strip() in ["aarch64"]:
+ return "aarch64"
+ return "k8" if result.stdout.strip() in ["amd64", "x86_64", "x64"] else "piii"
+
+def is_cc_configure_debug(repository_ctx):
+ """Returns True if CC_CONFIGURE_DEBUG is set to 1."""
+ env = repository_ctx.os.environ
+ return "CC_CONFIGURE_DEBUG" in env and env["CC_CONFIGURE_DEBUG"] == "1"
+
+def build_flags(flags):
+ """Convert `flags` to a string of flag fields."""
+ return "\n".join([" flag: '" + flag + "'" for flag in flags])
+
+def get_starlark_list(values):
+ """Convert a list of string into a string that can be passed to a rule attribute."""
+ if not values:
+ return ""
+ return "\"" + "\",\n \"".join(values) + "\""
+
+def auto_configure_warning_maybe(repository_ctx, msg):
+ """Output warning message when CC_CONFIGURE_DEBUG is enabled."""
+ if is_cc_configure_debug(repository_ctx):
+ auto_configure_warning(msg)
+
+def write_builtin_include_directory_paths(repository_ctx, cc, directories, file_suffix = ""):
+ """Generate output file named 'builtin_include_directory_paths' in the root of the repository."""
+ if get_env_var(repository_ctx, "BAZEL_IGNORE_SYSTEM_HEADERS_VERSIONS", "0", False) == "1":
+ repository_ctx.file(
+ "builtin_include_directory_paths" + file_suffix,
+ """This file is generated by cc_configure and normally contains builtin include directories
+that C++ compiler reported. But because BAZEL_IGNORE_SYSTEM_HEADERS_VERSIONS was set to 1,
+header include directory paths are intentionally not put there.
+""",
+ )
+ else:
+ repository_ctx.file(
+ "builtin_include_directory_paths" + file_suffix,
+ """This file is generated by cc_configure and contains builtin include directories
+that %s reported. This file is a dependency of every compilation action and
+changes to it will be reflected in the action cache key. When some of these
+paths change, Bazel will make sure to rerun the action, even though none of
+declared action inputs or the action commandline changes.
+
+%s
+""" % (cc, "\n".join(directories)),
+ )
diff --git a/cc/private/toolchain/link_dynamic_library.sh b/cc/private/toolchain/link_dynamic_library.sh
new file mode 100755
index 0000000..c71d498
--- /dev/null
+++ b/cc/private/toolchain/link_dynamic_library.sh
@@ -0,0 +1,113 @@
+#!/bin/bash
+#
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script handles interface library generation for dynamic library
+# link action.
+#
+# Bazel can be configured to generate external interface library script
+# to generate interface libraries in CppLinkAction for dynamic libraries.
+# This is not needed on Windows (as the "interface" libraries are
+# generated by default). This script therefore handles the cases when
+# external script is provided, or when no script should be used.
+
+set -eu
+
+E_LINKER_COMMAND_NOT_FOUND=12
+E_INTERFACE_BUILDER_NOT_FOUND=13
+
+
+SUFFIX=".rewritten"
+
+other_args=""
+
+if [[ "$#" -eq 1 ]]; then
+ if [[ "$1" != @* ]]; then
+ echo "Parameter file must start with @" 1>&2;
+ exit "$E_LINKER_COMMAND_NOT_FOUND"
+ fi
+
+ filename=$(echo "$1" | cut -c2-)
+ first_five_lines=$(head -n 5 $filename)
+
+ # Should generate interface library switch (<yes|no>); if the value is "no",
+ # following 3 args are ignored (but must be present)
+ GENERATE_INTERFACE_LIBRARY=$(echo "$first_five_lines" | head -n1 | tail -n1)
+ # Tool which can generate interface library from dynamic library file
+ INTERFACE_LIBRARY_BUILDER=$(echo "$first_five_lines" | head -n2 | tail -n1)
+ # Dynamic library from which we want to generate interface library
+ DYNAMIC_LIBRARY=$(echo "$first_five_lines" | head -n3 | tail -n1)
+ # Resulting interface library
+ INTERFACE_LIBRARY=$(echo "$first_five_lines" | head -n4 | tail -n1)
+ # The command used to generate the dynamic library
+ LINKER_COMMAND=$(echo "$first_five_lines" | head -n5 | tail -n1)
+
+ rest_of_lines=$(tail -n +6 $filename)
+ new_param_file="${filename}${SUFFIX}"
+ echo "$rest_of_lines" > $new_param_file
+ other_args="@$new_param_file"
+
+ if [[ ! -e "$LINKER_COMMAND" ]]; then
+ echo "Linker command ($LINKER_COMMAND) not found." 1>&2;
+ exit "$E_LINKER_COMMAND_NOT_FOUND"
+ fi
+
+ if [[ "no" == "$GENERATE_INTERFACE_LIBRARY" ]]; then
+ INTERFACE_GENERATION=:
+ else
+ if [[ ! -e "$INTERFACE_LIBRARY_BUILDER" ]]; then
+ echo "Interface library builder ($INTERFACE_LIBRARY_BUILDER)
+ not found." 1>&2;
+ exit "$E_INTERFACE_BUILDER_NOT_FOUND"
+ fi
+ INTERFACE_GENERATION="${INTERFACE_LIBRARY_BUILDER} ${DYNAMIC_LIBRARY}
+ ${INTERFACE_LIBRARY}"
+ fi
+
+ ${LINKER_COMMAND} "$other_args" && ${INTERFACE_GENERATION}
+else
+ # TODO(b/113358321): Remove this branch once projects are migrated to not
+ # splitting the linking command line.
+ # Should generate interface library switch (<yes|no>); if the value is "no",
+ # following 3 args are ignored (but must be present)
+ GENERATE_INTERFACE_LIBRARY="$1"
+ # Tool which can generate interface library from dynamic library file
+ INTERFACE_LIBRARY_BUILDER="$2"
+ # Dynamic library from which we want to generate interface library
+ DYNAMIC_LIBRARY="$3"
+ # Resulting interface library
+ INTERFACE_LIBRARY="$4"
+ # The command used to generate the dynamic library
+ LINKER_COMMAND="$5"
+ shift 5
+ if [[ ! -e "$LINKER_COMMAND" ]]; then
+ echo "Linker command ($LINKER_COMMAND) not found." 1>&2;
+ exit "$E_LINKER_COMMAND_NOT_FOUND"
+ fi
+
+ if [[ "no" == "$GENERATE_INTERFACE_LIBRARY" ]]; then
+ INTERFACE_GENERATION=:
+ else
+ if [[ ! -e "$INTERFACE_LIBRARY_BUILDER" ]]; then
+ echo "Interface library builder ($INTERFACE_LIBRARY_BUILDER)
+ not found." 1>&2;
+ exit "$E_INTERFACE_BUILDER_NOT_FOUND"
+ fi
+ INTERFACE_GENERATION="${INTERFACE_LIBRARY_BUILDER} ${DYNAMIC_LIBRARY}
+ ${INTERFACE_LIBRARY}"
+ fi
+
+ ${LINKER_COMMAND} "$@" && ${INTERFACE_GENERATION}
+fi
diff --git a/cc/private/toolchain/linux_cc_wrapper.sh.tpl b/cc/private/toolchain/linux_cc_wrapper.sh.tpl
new file mode 100644
index 0000000..a83be50
--- /dev/null
+++ b/cc/private/toolchain/linux_cc_wrapper.sh.tpl
@@ -0,0 +1,25 @@
+#!/bin/bash
+#
+# Copyright 2015 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Ship the environment to the C++ action
+#
+set -eu
+
+# Set-up the environment
+%{env}
+
+# Call the C++ compiler
+%{cc} "$@"
diff --git a/cc/private/toolchain/msys_gcc_installation_error.bat b/cc/private/toolchain/msys_gcc_installation_error.bat
new file mode 100644
index 0000000..25c3553
--- /dev/null
+++ b/cc/private/toolchain/msys_gcc_installation_error.bat
@@ -0,0 +1,23 @@
+:: Copyright 2018 The Bazel Authors. All rights reserved.
+::
+:: Licensed under the Apache License, Version 2.0 (the "License");
+:: you may not use this file except in compliance with the License.
+:: You may obtain a copy of the License at
+::
+:: http://www.apache.org/licenses/LICENSE-2.0
+::
+:: Unless required by applicable law or agreed to in writing, software
+:: distributed under the License is distributed on an "AS IS" BASIS,
+:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+:: See the License for the specific language governing permissions and
+:: limitations under the License.
+
+@echo OFF
+
+echo. 1>&2
+echo The target you are compiling requires MSYS gcc / MINGW gcc. 1>&2
+echo Bazel couldn't find gcc installation on your machine. 1>&2
+echo Please install MSYS gcc / MINGW gcc and set BAZEL_SH environment variable 1>&2
+echo. 1>&2
+
+exit /b 1
diff --git a/cc/private/toolchain/osx_cc_wrapper.sh.tpl b/cc/private/toolchain/osx_cc_wrapper.sh.tpl
new file mode 100644
index 0000000..28bd47b
--- /dev/null
+++ b/cc/private/toolchain/osx_cc_wrapper.sh.tpl
@@ -0,0 +1,119 @@
+#!/bin/bash
+#
+# Copyright 2015 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# OS X relpath is not really working. This is a wrapper script around gcc
+# to simulate relpath behavior.
+#
+# This wrapper uses install_name_tool to replace all paths in the binary
+# (bazel-out/.../path/to/original/library.so) by the paths relative to
+# the binary. It parses the command line to behave as rpath is supposed
+# to work.
+#
+# See https://blogs.oracle.com/dipol/entry/dynamic_libraries_rpath_and_mac
+# on how to set those paths for Mach-O binaries.
+#
+set -eu
+
+INSTALL_NAME_TOOL="/usr/bin/install_name_tool"
+
+LIBS=
+LIB_DIRS=
+RPATHS=
+OUTPUT=
+
+function parse_option() {
+ local -r opt="$1"
+ if [[ "${OUTPUT}" = "1" ]]; then
+ OUTPUT=$opt
+ elif [[ "$opt" =~ ^-l(.*)$ ]]; then
+ LIBS="${BASH_REMATCH[1]} $LIBS"
+ elif [[ "$opt" =~ ^-L(.*)$ ]]; then
+ LIB_DIRS="${BASH_REMATCH[1]} $LIB_DIRS"
+ elif [[ "$opt" =~ ^-Wl,-rpath,\@loader_path/(.*)$ ]]; then
+ RPATHS="${BASH_REMATCH[1]} ${RPATHS}"
+ elif [[ "$opt" = "-o" ]]; then
+ # output is coming
+ OUTPUT=1
+ fi
+}
+
+# let parse the option list
+for i in "$@"; do
+ if [[ "$i" = @* ]]; then
+ while IFS= read -r opt
+ do
+ parse_option "$opt"
+ done < "${i:1}" || exit 1
+ else
+ parse_option "$i"
+ fi
+done
+
+# Set-up the environment
+%{env}
+
+# Call the C++ compiler
+%{cc} "$@"
+
+function get_library_path() {
+ for libdir in ${LIB_DIRS}; do
+ if [ -f ${libdir}/lib$1.so ]; then
+ echo "${libdir}/lib$1.so"
+ elif [ -f ${libdir}/lib$1.dylib ]; then
+ echo "${libdir}/lib$1.dylib"
+ fi
+ done
+}
+
+# A convenient method to return the actual path even for non symlinks
+# and multi-level symlinks.
+function get_realpath() {
+ local previous="$1"
+ local next=$(readlink "${previous}")
+ while [ -n "${next}" ]; do
+ previous="${next}"
+ next=$(readlink "${previous}")
+ done
+ echo "${previous}"
+}
+
+# Get the path of a lib inside a tool
+function get_otool_path() {
+ # the lib path is the path of the original lib relative to the workspace
+ get_realpath $1 | sed 's|^.*/bazel-out/|bazel-out/|'
+}
+
+# Do replacements in the output
+for rpath in ${RPATHS}; do
+ for lib in ${LIBS}; do
+ unset libname
+ if [ -f "$(dirname ${OUTPUT})/${rpath}/lib${lib}.so" ]; then
+ libname="lib${lib}.so"
+ elif [ -f "$(dirname ${OUTPUT})/${rpath}/lib${lib}.dylib" ]; then
+ libname="lib${lib}.dylib"
+ fi
+ # ${libname-} --> return $libname if defined, or undefined otherwise. This is to make
+ # this set -e friendly
+ if [[ -n "${libname-}" ]]; then
+ libpath=$(get_library_path ${lib})
+ if [ -n "${libpath}" ]; then
+ ${INSTALL_NAME_TOOL} -change $(get_otool_path "${libpath}") \
+ "@loader_path/${rpath}/${libname}" "${OUTPUT}"
+ fi
+ fi
+ done
+done
+
diff --git a/cc/private/toolchain/unix_cc_configure.bzl b/cc/private/toolchain/unix_cc_configure.bzl
new file mode 100644
index 0000000..0c936de
--- /dev/null
+++ b/cc/private/toolchain/unix_cc_configure.bzl
@@ -0,0 +1,587 @@
+# pylint: disable=g-bad-file-header
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Configuring the C++ toolchain on Unix platforms."""
+
+load(
+ ":lib_cc_configure.bzl",
+ "auto_configure_fail",
+ "auto_configure_warning",
+ "auto_configure_warning_maybe",
+ "escape_string",
+ "get_env_var",
+ "get_starlark_list",
+ "resolve_labels",
+ "split_escaped",
+ "which",
+ "write_builtin_include_directory_paths",
+)
+
+def _uniq(iterable):
+ """Remove duplicates from a list."""
+
+ unique_elements = {element: None for element in iterable}
+ return unique_elements.keys()
+
+def _prepare_include_path(repo_ctx, path):
+ """Resolve and sanitize include path before outputting it into the crosstool.
+
+ Args:
+ repo_ctx: repository_ctx object.
+ path: an include path to be sanitized.
+
+ Returns:
+ Sanitized include path that can be written to the crosstoot. Resulting path
+ is absolute if it is outside the repository and relative otherwise.
+ """
+
+ repo_root = str(repo_ctx.path("."))
+
+ # We're on UNIX, so the path delimiter is '/'.
+ repo_root += "/"
+ path = str(repo_ctx.path(path))
+ if path.startswith(repo_root):
+ return escape_string(path[len(repo_root):])
+ return escape_string(path)
+
+def _find_tool(repository_ctx, tool, overriden_tools):
+ """Find a tool for repository, taking overriden tools into account."""
+ if tool in overriden_tools:
+ return overriden_tools[tool]
+ return which(repository_ctx, tool, "/usr/bin/" + tool)
+
+def _get_tool_paths(repository_ctx, overriden_tools):
+ """Compute the %-escaped path to the various tools"""
+ return dict({
+ k: escape_string(_find_tool(repository_ctx, k, overriden_tools))
+ for k in [
+ "ar",
+ "ld",
+ "cpp",
+ "gcc",
+ "dwp",
+ "gcov",
+ "nm",
+ "objcopy",
+ "objdump",
+ "strip",
+ ]
+ }.items())
+
+def _escaped_cplus_include_paths(repository_ctx):
+ """Use ${CPLUS_INCLUDE_PATH} to compute the %-escaped list of flags for cxxflag."""
+ if "CPLUS_INCLUDE_PATH" in repository_ctx.os.environ:
+ result = []
+ for p in repository_ctx.os.environ["CPLUS_INCLUDE_PATH"].split(":"):
+ p = escape_string(str(repository_ctx.path(p))) # Normalize the path
+ result.append("-I" + p)
+ return result
+ else:
+ return []
+
+_INC_DIR_MARKER_BEGIN = "#include <...>"
+
+# OSX add " (framework directory)" at the end of line, strip it.
+_OSX_FRAMEWORK_SUFFIX = " (framework directory)"
+_OSX_FRAMEWORK_SUFFIX_LEN = len(_OSX_FRAMEWORK_SUFFIX)
+
+def _cxx_inc_convert(path):
+ """Convert path returned by cc -E xc++ in a complete path. Doesn't %-escape the path!"""
+ path = path.strip()
+ if path.endswith(_OSX_FRAMEWORK_SUFFIX):
+ path = path[:-_OSX_FRAMEWORK_SUFFIX_LEN].strip()
+ return path
+
+def get_escaped_cxx_inc_directories(repository_ctx, cc, lang_flag, additional_flags = []):
+ """Compute the list of default %-escaped C++ include directories.
+
+ Args:
+ repository_ctx: The repository context.
+ cc: path to the C compiler.
+ lang_flag: value for the language flag (c, c++).
+ additional_flags: additional flags to pass to cc.
+ Returns:
+ a list of escaped system include directories.
+ """
+ result = repository_ctx.execute([cc, "-E", lang_flag, "-", "-v"] + additional_flags)
+ index1 = result.stderr.find(_INC_DIR_MARKER_BEGIN)
+ if index1 == -1:
+ return []
+ index1 = result.stderr.find("\n", index1)
+ if index1 == -1:
+ return []
+ index2 = result.stderr.rfind("\n ")
+ if index2 == -1 or index2 < index1:
+ return []
+ index2 = result.stderr.find("\n", index2 + 1)
+ if index2 == -1:
+ inc_dirs = result.stderr[index1 + 1:]
+ else:
+ inc_dirs = result.stderr[index1 + 1:index2].strip()
+
+ inc_directories = [
+ _prepare_include_path(repository_ctx, _cxx_inc_convert(p))
+ for p in inc_dirs.split("\n")
+ ]
+
+ if _is_compiler_option_supported(repository_ctx, cc, "-print-resource-dir"):
+ resource_dir = repository_ctx.execute(
+ [cc, "-print-resource-dir"],
+ ).stdout.strip() + "/share"
+ inc_directories.append(_prepare_include_path(repository_ctx, resource_dir))
+
+ return inc_directories
+
+def _is_compiler_option_supported(repository_ctx, cc, option):
+ """Checks that `option` is supported by the C compiler. Doesn't %-escape the option."""
+ result = repository_ctx.execute([
+ cc,
+ option,
+ "-o",
+ "/dev/null",
+ "-c",
+ str(repository_ctx.path("tools/cpp/empty.cc")),
+ ])
+ return result.stderr.find(option) == -1
+
+def _is_linker_option_supported(repository_ctx, cc, option, pattern):
+ """Checks that `option` is supported by the C linker. Doesn't %-escape the option."""
+ result = repository_ctx.execute([
+ cc,
+ option,
+ "-o",
+ "/dev/null",
+ str(repository_ctx.path("tools/cpp/empty.cc")),
+ ])
+ return result.stderr.find(pattern) == -1
+
+def _find_gold_linker_path(repository_ctx, cc):
+ """Checks if `gold` is supported by the C compiler.
+
+ Args:
+ repository_ctx: repository_ctx.
+ cc: path to the C compiler.
+
+ Returns:
+ String to put as value to -fuse-ld= flag, or None if gold couldn't be found.
+ """
+ result = repository_ctx.execute([
+ cc,
+ str(repository_ctx.path("tools/cpp/empty.cc")),
+ "-o",
+ "/dev/null",
+ # Some macos clang versions don't fail when setting -fuse-ld=gold, adding
+ # these lines to force it to. This also means that we will not detect
+ # gold when only a very old (year 2010 and older) is present.
+ "-Wl,--start-lib",
+ "-Wl,--end-lib",
+ "-fuse-ld=gold",
+ "-v",
+ ])
+ if result.return_code != 0:
+ return None
+
+ for line in result.stderr.splitlines():
+ if line.find("gold") == -1:
+ continue
+ for flag in line.split(" "):
+ if flag.find("gold") == -1:
+ continue
+ if flag.find("--enable-gold") > -1 or flag.find("--with-plugin-ld") > -1:
+ # skip build configuration options of gcc itself
+ # TODO(hlopko): Add redhat-like worker on the CI (#9392)
+ continue
+
+ # flag is '-fuse-ld=gold' for GCC or "/usr/lib/ld.gold" for Clang
+ # strip space, single quote, and double quotes
+ flag = flag.strip(" \"'")
+
+ # remove -fuse-ld= from GCC output so we have only the flag value part
+ flag = flag.replace("-fuse-ld=", "")
+ return flag
+ auto_configure_warning(
+ "CC with -fuse-ld=gold returned 0, but its -v output " +
+ "didn't contain 'gold', falling back to the default linker.",
+ )
+ return None
+
+def _add_compiler_option_if_supported(repository_ctx, cc, option):
+ """Returns `[option]` if supported, `[]` otherwise. Doesn't %-escape the option."""
+ return [option] if _is_compiler_option_supported(repository_ctx, cc, option) else []
+
+def _add_linker_option_if_supported(repository_ctx, cc, option, pattern):
+ """Returns `[option]` if supported, `[]` otherwise. Doesn't %-escape the option."""
+ return [option] if _is_linker_option_supported(repository_ctx, cc, option, pattern) else []
+
+def _get_no_canonical_prefixes_opt(repository_ctx, cc):
+ # If the compiler sometimes rewrites paths in the .d files without symlinks
+ # (ie when they're shorter), it confuses Bazel's logic for verifying all
+ # #included header files are listed as inputs to the action.
+
+ # The '-fno-canonical-system-headers' should be enough, but clang does not
+ # support it, so we also try '-no-canonical-prefixes' if first option does
+ # not work.
+ opt = _add_compiler_option_if_supported(
+ repository_ctx,
+ cc,
+ "-fno-canonical-system-headers",
+ )
+ if len(opt) == 0:
+ return _add_compiler_option_if_supported(
+ repository_ctx,
+ cc,
+ "-no-canonical-prefixes",
+ )
+ return opt
+
+def get_env(repository_ctx):
+ """Convert the environment in a list of export if in Homebrew. Doesn't %-escape the result!
+
+ Args:
+ repository_ctx: The repository context.
+ Returns:
+ empty string or a list of exports in case we're running with homebrew. Don't ask me why.
+ """
+ env = repository_ctx.os.environ
+ if "HOMEBREW_RUBY_PATH" in env:
+ return "\n".join([
+ "export %s='%s'" % (k, env[k].replace("'", "'\\''"))
+ for k in env
+ if k != "_" and k.find(".") == -1
+ ])
+ else:
+ return ""
+
+def _coverage_flags(repository_ctx, darwin):
+ use_llvm_cov = "1" == get_env_var(
+ repository_ctx,
+ "BAZEL_USE_LLVM_NATIVE_COVERAGE",
+ default = "0",
+ enable_warning = False,
+ )
+ if darwin or use_llvm_cov:
+ compile_flags = '"-fprofile-instr-generate", "-fcoverage-mapping"'
+ link_flags = '"-fprofile-instr-generate"'
+ else:
+ # gcc requires --coverage being passed for compilation and linking
+ # https://gcc.gnu.org/onlinedocs/gcc/Instrumentation-Options.html#Instrumentation-Options
+ compile_flags = '"--coverage"'
+ link_flags = '"--coverage"'
+ return compile_flags, link_flags
+
+def _find_generic(repository_ctx, name, env_name, overriden_tools, warn = False, silent = False):
+ """Find a generic C++ toolchain tool. Doesn't %-escape the result."""
+
+ if name in overriden_tools:
+ return overriden_tools[name]
+
+ result = name
+ env_value = repository_ctx.os.environ.get(env_name)
+ env_value_with_paren = ""
+ if env_value != None:
+ env_value = env_value.strip()
+ if env_value:
+ result = env_value
+ env_value_with_paren = " (%s)" % env_value
+ if result.startswith("/"):
+ # Absolute path, maybe we should make this suported by our which function.
+ return result
+ result = repository_ctx.which(result)
+ if result == None:
+ msg = ("Cannot find %s or %s%s; either correct your path or set the %s" +
+ " environment variable") % (name, env_name, env_value_with_paren, env_name)
+ if warn:
+ if not silent:
+ auto_configure_warning(msg)
+ else:
+ auto_configure_fail(msg)
+ return result
+
+def find_cc(repository_ctx, overriden_tools):
+ return _find_generic(repository_ctx, "gcc", "CC", overriden_tools)
+
+def configure_unix_toolchain(repository_ctx, cpu_value, overriden_tools):
+ """Configure C++ toolchain on Unix platforms.
+
+ Args:
+ repository_ctx: The repository context.
+ cpu_value: current cpu name.
+ overriden_tools: overriden tools.
+ """
+ paths = resolve_labels(repository_ctx, [
+ "@rules_cc//cc/private/toolchain:BUILD.tpl",
+ "@rules_cc//cc/private/toolchain:armeabi_cc_toolchain_config.bzl",
+ "@rules_cc//cc/private/toolchain:unix_cc_toolchain_config.bzl",
+ "@rules_cc//cc/private/toolchain:linux_cc_wrapper.sh.tpl",
+ "@rules_cc//cc/private/toolchain:osx_cc_wrapper.sh.tpl",
+ ])
+
+ repository_ctx.symlink(
+ paths["@rules_cc//cc/private/toolchain:unix_cc_toolchain_config.bzl"],
+ "cc_toolchain_config.bzl",
+ )
+
+ repository_ctx.symlink(
+ paths["@rules_cc//cc/private/toolchain:armeabi_cc_toolchain_config.bzl"],
+ "armeabi_cc_toolchain_config.bzl",
+ )
+
+ repository_ctx.file("tools/cpp/empty.cc", "int main() {}")
+ darwin = cpu_value == "darwin"
+
+ cc = _find_generic(repository_ctx, "gcc", "CC", overriden_tools)
+ overriden_tools = dict(overriden_tools)
+ overriden_tools["gcc"] = cc
+ overriden_tools["gcov"] = _find_generic(
+ repository_ctx,
+ "gcov",
+ "GCOV",
+ overriden_tools,
+ warn = True,
+ silent = True,
+ )
+ if darwin:
+ overriden_tools["gcc"] = "cc_wrapper.sh"
+ overriden_tools["ar"] = "/usr/bin/libtool"
+ auto_configure_warning_maybe(repository_ctx, "CC used: " + str(cc))
+ tool_paths = _get_tool_paths(repository_ctx, overriden_tools)
+ cc_toolchain_identifier = escape_string(get_env_var(
+ repository_ctx,
+ "CC_TOOLCHAIN_NAME",
+ "local",
+ False,
+ ))
+
+ cc_wrapper_src = (
+ "@rules_cc//cc/private/toolchain:osx_cc_wrapper.sh.tpl" if darwin else "@rules_cc//cc/private/toolchain:linux_cc_wrapper.sh.tpl"
+ )
+ repository_ctx.template(
+ "cc_wrapper.sh",
+ paths[cc_wrapper_src],
+ {
+ "%{cc}": escape_string(str(cc)),
+ "%{env}": escape_string(get_env(repository_ctx)),
+ },
+ )
+
+ cxx_opts = split_escaped(get_env_var(
+ repository_ctx,
+ "BAZEL_CXXOPTS",
+ "-std=c++0x",
+ False,
+ ), ":")
+
+ bazel_linklibs = "-lstdc++:-lm"
+ bazel_linkopts = ""
+ link_opts = split_escaped(get_env_var(
+ repository_ctx,
+ "BAZEL_LINKOPTS",
+ bazel_linkopts,
+ False,
+ ), ":")
+ link_libs = split_escaped(get_env_var(
+ repository_ctx,
+ "BAZEL_LINKLIBS",
+ bazel_linklibs,
+ False,
+ ), ":")
+ gold_linker_path = _find_gold_linker_path(repository_ctx, cc)
+ cc_path = repository_ctx.path(cc)
+ if not str(cc_path).startswith(str(repository_ctx.path(".")) + "/"):
+ # cc is outside the repository, set -B
+ bin_search_flag = ["-B" + escape_string(str(cc_path.dirname))]
+ else:
+ # cc is inside the repository, don't set -B.
+ bin_search_flag = []
+
+ coverage_compile_flags, coverage_link_flags = _coverage_flags(repository_ctx, darwin)
+ builtin_include_directories = _uniq(
+ get_escaped_cxx_inc_directories(repository_ctx, cc, "-xc") +
+ get_escaped_cxx_inc_directories(repository_ctx, cc, "-xc++", cxx_opts) +
+ get_escaped_cxx_inc_directories(
+ repository_ctx,
+ cc,
+ "-xc",
+ _get_no_canonical_prefixes_opt(repository_ctx, cc),
+ ) +
+ get_escaped_cxx_inc_directories(
+ repository_ctx,
+ cc,
+ "-xc++",
+ cxx_opts + _get_no_canonical_prefixes_opt(repository_ctx, cc),
+ ),
+ )
+
+ write_builtin_include_directory_paths(repository_ctx, cc, builtin_include_directories)
+ repository_ctx.template(
+ "BUILD",
+ paths["@rules_cc//cc/private/toolchain:BUILD.tpl"],
+ {
+ "%{abi_libc_version}": escape_string(get_env_var(
+ repository_ctx,
+ "ABI_LIBC_VERSION",
+ "local",
+ False,
+ )),
+ "%{abi_version}": escape_string(get_env_var(
+ repository_ctx,
+ "ABI_VERSION",
+ "local",
+ False,
+ )),
+ "%{cc_compiler_deps}": get_starlark_list([":builtin_include_directory_paths"] + (
+ [":cc_wrapper"] if darwin else []
+ )),
+ "%{cc_toolchain_identifier}": cc_toolchain_identifier,
+ "%{compile_flags}": get_starlark_list(
+ [
+ # Security hardening requires optimization.
+ # We need to undef it as some distributions now have it enabled by default.
+ "-U_FORTIFY_SOURCE",
+ "-fstack-protector",
+ # All warnings are enabled. Maybe enable -Werror as well?
+ "-Wall",
+ # Enable a few more warnings that aren't part of -Wall.
+ ] + (
+ _add_compiler_option_if_supported(repository_ctx, cc, "-Wthread-safety") +
+ _add_compiler_option_if_supported(repository_ctx, cc, "-Wself-assign")
+ ) + (
+ # Disable problematic warnings.
+ _add_compiler_option_if_supported(repository_ctx, cc, "-Wunused-but-set-parameter") +
+ # has false positives
+ _add_compiler_option_if_supported(repository_ctx, cc, "-Wno-free-nonheap-object") +
+ # Enable coloring even if there's no attached terminal. Bazel removes the
+ # escape sequences if --nocolor is specified.
+ _add_compiler_option_if_supported(repository_ctx, cc, "-fcolor-diagnostics")
+ ) + [
+ # Keep stack frames for debugging, even in opt mode.
+ "-fno-omit-frame-pointer",
+ ],
+ ),
+ "%{compiler}": escape_string(get_env_var(
+ repository_ctx,
+ "BAZEL_COMPILER",
+ "compiler",
+ False,
+ )),
+ "%{coverage_compile_flags}": coverage_compile_flags,
+ "%{coverage_link_flags}": coverage_link_flags,
+ "%{cxx_builtin_include_directories}": get_starlark_list(builtin_include_directories),
+ "%{cxx_flags}": get_starlark_list(cxx_opts + _escaped_cplus_include_paths(repository_ctx)),
+ "%{dbg_compile_flags}": get_starlark_list(["-g"]),
+ "%{host_system_name}": escape_string(get_env_var(
+ repository_ctx,
+ "BAZEL_HOST_SYSTEM",
+ "local",
+ False,
+ )),
+ "%{link_flags}": get_starlark_list((
+ ["-fuse-ld=" + gold_linker_path] if gold_linker_path else []
+ ) + _add_linker_option_if_supported(
+ repository_ctx,
+ cc,
+ "-Wl,-no-as-needed",
+ "-no-as-needed",
+ ) + _add_linker_option_if_supported(
+ repository_ctx,
+ cc,
+ "-Wl,-z,relro,-z,now",
+ "-z",
+ ) + (
+ [
+ "-undefined",
+ "dynamic_lookup",
+ "-headerpad_max_install_names",
+ ] if darwin else bin_search_flag + [
+ # Gold linker only? Can we enable this by default?
+ # "-Wl,--warn-execstack",
+ # "-Wl,--detect-odr-violations"
+ ] + _add_compiler_option_if_supported(
+ # Have gcc return the exit code from ld.
+ repository_ctx,
+ cc,
+ "-pass-exit-codes",
+ )
+ ) + link_opts),
+ "%{link_libs}": get_starlark_list(link_libs),
+ "%{name}": cpu_value,
+ "%{opt_compile_flags}": get_starlark_list(
+ [
+ # No debug symbols.
+ # Maybe we should enable https://gcc.gnu.org/wiki/DebugFission for opt or
+ # even generally? However, that can't happen here, as it requires special
+ # handling in Bazel.
+ "-g0",
+
+ # Conservative choice for -O
+ # -O3 can increase binary size and even slow down the resulting binaries.
+ # Profile first and / or use FDO if you need better performance than this.
+ "-O2",
+
+ # Security hardening on by default.
+ # Conservative choice; -D_FORTIFY_SOURCE=2 may be unsafe in some cases.
+ "-D_FORTIFY_SOURCE=1",
+
+ # Disable assertions
+ "-DNDEBUG",
+
+ # Removal of unused code and data at link time (can this increase binary
+ # size in some cases?).
+ "-ffunction-sections",
+ "-fdata-sections",
+ ],
+ ),
+ "%{opt_link_flags}": get_starlark_list(
+ [] if darwin else _add_linker_option_if_supported(
+ repository_ctx,
+ cc,
+ "-Wl,--gc-sections",
+ "-gc-sections",
+ ),
+ ),
+ "%{supports_param_files}": "0" if darwin else "1",
+ "%{supports_start_end_lib}": "True" if gold_linker_path else "False",
+ "%{target_cpu}": escape_string(get_env_var(
+ repository_ctx,
+ "BAZEL_TARGET_CPU",
+ cpu_value,
+ False,
+ )),
+ "%{target_libc}": "macosx" if darwin else escape_string(get_env_var(
+ repository_ctx,
+ "BAZEL_TARGET_LIBC",
+ "local",
+ False,
+ )),
+ "%{target_system_name}": escape_string(get_env_var(
+ repository_ctx,
+ "BAZEL_TARGET_SYSTEM",
+ "local",
+ False,
+ )),
+ "%{tool_paths}": ",\n ".join(
+ ['"%s": "%s"' % (k, v) for k, v in tool_paths.items()],
+ ),
+ "%{unfiltered_compile_flags}": get_starlark_list(
+ _get_no_canonical_prefixes_opt(repository_ctx, cc) + [
+ # Make C++ compilation deterministic. Use linkstamping instead of these
+ # compiler symbols.
+ "-Wno-builtin-macro-redefined",
+ "-D__DATE__=\\\"redacted\\\"",
+ "-D__TIMESTAMP__=\\\"redacted\\\"",
+ "-D__TIME__=\\\"redacted\\\"",
+ ],
+ ),
+ },
+ )
diff --git a/cc/private/toolchain/unix_cc_toolchain_config.bzl b/cc/private/toolchain/unix_cc_toolchain_config.bzl
new file mode 100644
index 0000000..4325a68
--- /dev/null
+++ b/cc/private/toolchain/unix_cc_toolchain_config.bzl
@@ -0,0 +1,1200 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A Starlark cc_toolchain configuration rule"""
+
+load("@rules_cc//cc:action_names.bzl", "ACTION_NAMES")
+load(
+ "@rules_cc//cc:cc_toolchain_config_lib.bzl",
+ "feature",
+ "feature_set",
+ "flag_group",
+ "flag_set",
+ "tool_path",
+ "variable_with_value",
+ "with_feature_set",
+)
+
+all_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.lto_backend,
+]
+
+all_cpp_compile_actions = [
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+]
+
+preprocessor_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.clif_match,
+]
+
+codegen_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+]
+
+all_link_actions = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+]
+
+lto_index_actions = [
+ ACTION_NAMES.lto_index_for_executable,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+ ACTION_NAMES.lto_index_for_nodeps_dynamic_library,
+]
+
+def _impl(ctx):
+ tool_paths = [
+ tool_path(name = name, path = path)
+ for name, path in ctx.attr.tool_paths.items()
+ ]
+ action_configs = []
+
+ supports_pic_feature = feature(
+ name = "supports_pic",
+ enabled = True,
+ )
+ supports_start_end_lib_feature = feature(
+ name = "supports_start_end_lib",
+ enabled = True,
+ )
+
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = ([
+ flag_group(
+ flags = ctx.attr.compile_flags,
+ ),
+ ] if ctx.attr.compile_flags else []),
+ ),
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = ([
+ flag_group(
+ flags = ctx.attr.dbg_compile_flags,
+ ),
+ ] if ctx.attr.dbg_compile_flags else []),
+ with_features = [with_feature_set(features = ["dbg"])],
+ ),
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = ([
+ flag_group(
+ flags = ctx.attr.opt_compile_flags,
+ ),
+ ] if ctx.attr.opt_compile_flags else []),
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ flag_set(
+ actions = all_cpp_compile_actions + [ACTION_NAMES.lto_backend],
+ flag_groups = ([
+ flag_group(
+ flags = ctx.attr.cxx_flags,
+ ),
+ ] if ctx.attr.cxx_flags else []),
+ ),
+ ],
+ )
+
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = ([
+ flag_group(
+ flags = ctx.attr.link_flags,
+ ),
+ ] if ctx.attr.link_flags else []),
+ ),
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = ([
+ flag_group(
+ flags = ctx.attr.opt_link_flags,
+ ),
+ ] if ctx.attr.opt_link_flags else []),
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ ],
+ )
+
+ dbg_feature = feature(name = "dbg")
+
+ opt_feature = feature(name = "opt")
+
+ sysroot_feature = feature(
+ name = "sysroot",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+ ACTION_NAMES.clif_match,
+ ] + all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["--sysroot=%{sysroot}"],
+ expand_if_available = "sysroot",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ fdo_optimize_feature = feature(
+ name = "fdo_optimize",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-fprofile-use=%{fdo_profile_path}",
+ "-fprofile-correction",
+ ],
+ expand_if_available = "fdo_profile_path",
+ ),
+ ],
+ ),
+ ],
+ provides = ["profile"],
+ )
+
+ supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True)
+
+ user_compile_flags_feature = feature(
+ name = "user_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_compile_flags}"],
+ iterate_over = "user_compile_flags",
+ expand_if_available = "user_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ unfiltered_compile_flags_feature = feature(
+ name = "unfiltered_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_compile_actions,
+ flag_groups = ([
+ flag_group(
+ flags = ctx.attr.unfiltered_compile_flags,
+ ),
+ ] if ctx.attr.unfiltered_compile_flags else []),
+ ),
+ ],
+ )
+
+ library_search_directories_feature = feature(
+ name = "library_search_directories",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["-L%{library_search_directories}"],
+ iterate_over = "library_search_directories",
+ expand_if_available = "library_search_directories",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ static_libgcc_feature = feature(
+ name = "static_libgcc",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.lto_index_for_executable,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+ ],
+ flag_groups = [flag_group(flags = ["-static-libgcc"])],
+ with_features = [
+ with_feature_set(features = ["static_link_cpp_runtimes"]),
+ ],
+ ),
+ ],
+ )
+
+ pic_feature = feature(
+ name = "pic",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.cpp_module_compile,
+ ],
+ flag_groups = [
+ flag_group(flags = ["-fPIC"], expand_if_available = "pic"),
+ ],
+ ),
+ ],
+ )
+
+ per_object_debug_info_feature = feature(
+ name = "per_object_debug_info",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-gsplit-dwarf", "-g"],
+ expand_if_available = "per_object_debug_info_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ preprocessor_defines_feature = feature(
+ name = "preprocessor_defines",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.clif_match,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-D%{preprocessor_defines}"],
+ iterate_over = "preprocessor_defines",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ cs_fdo_optimize_feature = feature(
+ name = "cs_fdo_optimize",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.lto_backend],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-fprofile-use=%{fdo_profile_path}",
+ "-Wno-profile-instr-unprofiled",
+ "-Wno-profile-instr-out-of-date",
+ "-fprofile-correction",
+ ],
+ expand_if_available = "fdo_profile_path",
+ ),
+ ],
+ ),
+ ],
+ provides = ["csprofile"],
+ )
+
+ autofdo_feature = feature(
+ name = "autofdo",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-fauto-profile=%{fdo_profile_path}",
+ "-fprofile-correction",
+ ],
+ expand_if_available = "fdo_profile_path",
+ ),
+ ],
+ ),
+ ],
+ provides = ["profile"],
+ )
+
+ runtime_library_search_directories_feature = feature(
+ name = "runtime_library_search_directories",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ iterate_over = "runtime_library_search_directories",
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-Wl,-rpath,$EXEC_ORIGIN/%{runtime_library_search_directories}",
+ ],
+ expand_if_true = "is_cc_test",
+ ),
+ flag_group(
+ flags = [
+ "-Wl,-rpath,$ORIGIN/%{runtime_library_search_directories}",
+ ],
+ expand_if_false = "is_cc_test",
+ ),
+ ],
+ expand_if_available =
+ "runtime_library_search_directories",
+ ),
+ ],
+ with_features = [
+ with_feature_set(features = ["static_link_cpp_runtimes"]),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ iterate_over = "runtime_library_search_directories",
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-Wl,-rpath,$ORIGIN/%{runtime_library_search_directories}",
+ ],
+ ),
+ ],
+ expand_if_available =
+ "runtime_library_search_directories",
+ ),
+ ],
+ with_features = [
+ with_feature_set(
+ not_features = ["static_link_cpp_runtimes"],
+ ),
+ ],
+ ),
+ ],
+ )
+
+ fission_support_feature = feature(
+ name = "fission_support",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["-Wl,--gdb-index"],
+ expand_if_available = "is_using_fission",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ shared_flag_feature = feature(
+ name = "shared_flag",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+ ACTION_NAMES.lto_index_for_nodeps_dynamic_library,
+ ],
+ flag_groups = [flag_group(flags = ["-shared"])],
+ ),
+ ],
+ )
+
+ random_seed_feature = feature(
+ name = "random_seed",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.cpp_module_compile,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-frandom-seed=%{output_file}"],
+ expand_if_available = "output_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ includes_feature = feature(
+ name = "includes",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.objc_compile,
+ ACTION_NAMES.objcpp_compile,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-include", "%{includes}"],
+ iterate_over = "includes",
+ expand_if_available = "includes",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ fdo_instrument_feature = feature(
+ name = "fdo_instrument",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ] + all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-fprofile-generate=%{fdo_instrument_path}",
+ "-fno-data-sections",
+ ],
+ expand_if_available = "fdo_instrument_path",
+ ),
+ ],
+ ),
+ ],
+ provides = ["profile"],
+ )
+
+ cs_fdo_instrument_feature = feature(
+ name = "cs_fdo_instrument",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.lto_backend,
+ ] + all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-fcs-profile-generate=%{cs_fdo_instrument_path}",
+ ],
+ expand_if_available = "cs_fdo_instrument_path",
+ ),
+ ],
+ ),
+ ],
+ provides = ["csprofile"],
+ )
+
+ include_paths_feature = feature(
+ name = "include_paths",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.objc_compile,
+ ACTION_NAMES.objcpp_compile,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-iquote", "%{quote_include_paths}"],
+ iterate_over = "quote_include_paths",
+ ),
+ flag_group(
+ flags = ["-I%{include_paths}"],
+ iterate_over = "include_paths",
+ ),
+ flag_group(
+ flags = ["-isystem", "%{system_include_paths}"],
+ iterate_over = "system_include_paths",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ symbol_counts_feature = feature(
+ name = "symbol_counts",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-Wl,--print-symbol-counts=%{symbol_counts_output}",
+ ],
+ expand_if_available = "symbol_counts_output",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ llvm_coverage_map_format_feature = feature(
+ name = "llvm_coverage_map_format",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.objc_compile,
+ ACTION_NAMES.objcpp_compile,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-fprofile-instr-generate",
+ "-fcoverage-mapping",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions + lto_index_actions + [
+ "objc-executable",
+ "objc++-executable",
+ ],
+ flag_groups = [
+ flag_group(flags = ["-fprofile-instr-generate"]),
+ ],
+ ),
+ ],
+ requires = [feature_set(features = ["coverage"])],
+ provides = ["profile"],
+ )
+
+ strip_debug_symbols_feature = feature(
+ name = "strip_debug_symbols",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["-Wl,-S"],
+ expand_if_available = "strip_debug_symbols",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ build_interface_libraries_feature = feature(
+ name = "build_interface_libraries",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+ ACTION_NAMES.lto_index_for_nodeps_dynamic_library,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "%{generate_interface_library}",
+ "%{interface_library_builder_path}",
+ "%{interface_library_input_path}",
+ "%{interface_library_output_path}",
+ ],
+ expand_if_available = "generate_interface_library",
+ ),
+ ],
+ with_features = [
+ with_feature_set(
+ features = ["supports_interface_shared_libraries"],
+ ),
+ ],
+ ),
+ ],
+ )
+
+ libraries_to_link_feature = feature(
+ name = "libraries_to_link",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ iterate_over = "libraries_to_link",
+ flag_groups = [
+ flag_group(
+ flags = ["-Wl,--start-lib"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file_group",
+ ),
+ ),
+ flag_group(
+ flags = ["-Wl,-whole-archive"],
+ expand_if_true =
+ "libraries_to_link.is_whole_archive",
+ ),
+ flag_group(
+ flags = ["%{libraries_to_link.object_files}"],
+ iterate_over = "libraries_to_link.object_files",
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file_group",
+ ),
+ ),
+ flag_group(
+ flags = ["%{libraries_to_link.name}"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file",
+ ),
+ ),
+ flag_group(
+ flags = ["%{libraries_to_link.name}"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "interface_library",
+ ),
+ ),
+ flag_group(
+ flags = ["%{libraries_to_link.name}"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "static_library",
+ ),
+ ),
+ flag_group(
+ flags = ["-l%{libraries_to_link.name}"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "dynamic_library",
+ ),
+ ),
+ flag_group(
+ flags = ["-l:%{libraries_to_link.name}"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "versioned_dynamic_library",
+ ),
+ ),
+ flag_group(
+ flags = ["-Wl,-no-whole-archive"],
+ expand_if_true = "libraries_to_link.is_whole_archive",
+ ),
+ flag_group(
+ flags = ["-Wl,--end-lib"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file_group",
+ ),
+ ),
+ ],
+ expand_if_available = "libraries_to_link",
+ ),
+ flag_group(
+ flags = ["-Wl,@%{thinlto_param_file}"],
+ expand_if_true = "thinlto_param_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ user_link_flags_feature = feature(
+ name = "user_link_flags",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_link_flags}"],
+ iterate_over = "user_link_flags",
+ expand_if_available = "user_link_flags",
+ ),
+ ] + ([flag_group(flags = ctx.attr.link_libs)] if ctx.attr.link_libs else []),
+ ),
+ ],
+ )
+
+ fdo_prefetch_hints_feature = feature(
+ name = "fdo_prefetch_hints",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.lto_backend,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-mllvm",
+ "-prefetch-hints-file=%{fdo_prefetch_hints_path}",
+ ],
+ expand_if_available = "fdo_prefetch_hints_path",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ linkstamps_feature = feature(
+ name = "linkstamps",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["%{linkstamp_paths}"],
+ iterate_over = "linkstamp_paths",
+ expand_if_available = "linkstamp_paths",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ gcc_coverage_map_format_feature = feature(
+ name = "gcc_coverage_map_format",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.objc_compile,
+ ACTION_NAMES.objcpp_compile,
+ "objc-executable",
+ "objc++-executable",
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-fprofile-arcs", "-ftest-coverage"],
+ expand_if_available = "gcov_gcno_file",
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [flag_group(flags = ["--coverage"])],
+ ),
+ ],
+ requires = [feature_set(features = ["coverage"])],
+ provides = ["profile"],
+ )
+
+ archiver_flags_feature = feature(
+ name = "archiver_flags",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.cpp_link_static_library],
+ flag_groups = [
+ flag_group(flags = ["rcsD"]),
+ flag_group(
+ flags = ["%{output_execpath}"],
+ expand_if_available = "output_execpath",
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [ACTION_NAMES.cpp_link_static_library],
+ flag_groups = [
+ flag_group(
+ iterate_over = "libraries_to_link",
+ flag_groups = [
+ flag_group(
+ flags = ["%{libraries_to_link.name}"],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file",
+ ),
+ ),
+ flag_group(
+ flags = ["%{libraries_to_link.object_files}"],
+ iterate_over = "libraries_to_link.object_files",
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file_group",
+ ),
+ ),
+ ],
+ expand_if_available = "libraries_to_link",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ force_pic_flags_feature = feature(
+ name = "force_pic_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.lto_index_for_executable,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-pie"],
+ expand_if_available = "force_pic",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ dependency_file_feature = feature(
+ name = "dependency_file",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.objc_compile,
+ ACTION_NAMES.objcpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.clif_match,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["-MD", "-MF", "%{dependency_file}"],
+ expand_if_available = "dependency_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ dynamic_library_linker_tool_feature = feature(
+ name = "dynamic_library_linker_tool",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.lto_index_for_dynamic_library,
+ ACTION_NAMES.lto_index_for_nodeps_dynamic_library,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [" + cppLinkDynamicLibraryToolPath + "],
+ expand_if_available = "generate_interface_library",
+ ),
+ ],
+ with_features = [
+ with_feature_set(
+ features = ["supports_interface_shared_libraries"],
+ ),
+ ],
+ ),
+ ],
+ )
+
+ output_execpath_flags_feature = feature(
+ name = "output_execpath_flags",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["-o", "%{output_execpath}"],
+ expand_if_available = "output_execpath",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ # Note that we also set --coverage for c++-link-nodeps-dynamic-library. The
+ # generated code contains references to gcov symbols, and the dynamic linker
+ # can't resolve them unless the library is linked against gcov.
+ coverage_feature = feature(
+ name = "coverage",
+ provides = ["profile"],
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ],
+ flag_groups = ([
+ flag_group(flags = ctx.attr.coverage_compile_flags),
+ ] if ctx.attr.coverage_compile_flags else []),
+ ),
+ flag_set(
+ actions = all_link_actions + lto_index_actions,
+ flag_groups = ([
+ flag_group(flags = ctx.attr.coverage_link_flags),
+ ] if ctx.attr.coverage_link_flags else []),
+ ),
+ ],
+ )
+
+ thinlto_feature = feature(
+ name = "thin_lto",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ] + all_link_actions + lto_index_actions,
+ flag_groups = [
+ flag_group(flags = ["-flto=thin"]),
+ flag_group(
+ expand_if_available = "lto_indexing_bitcode_file",
+ flags = [
+ "-Xclang",
+ "-fthin-link-bitcode=%{lto_indexing_bitcode_file}",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [ACTION_NAMES.linkstamp_compile],
+ flag_groups = [flag_group(flags = ["-DBUILD_LTO_TYPE=thin"])],
+ ),
+ flag_set(
+ actions = lto_index_actions,
+ flag_groups = [
+ flag_group(flags = [
+ "-flto=thin",
+ "-Wl,-plugin-opt,thinlto-index-only%{thinlto_optional_params_file}",
+ "-Wl,-plugin-opt,thinlto-emit-imports-files",
+ "-Wl,-plugin-opt,thinlto-prefix-replace=%{thinlto_prefix_replace}",
+ ]),
+ flag_group(
+ expand_if_available = "thinlto_object_suffix_replace",
+ flags = [
+ "-Wl,-plugin-opt,thinlto-object-suffix-replace=%{thinlto_object_suffix_replace}",
+ ],
+ ),
+ flag_group(
+ expand_if_available = "thinlto_merged_object_file",
+ flags = [
+ "-Wl,-plugin-opt,obj-path=%{thinlto_merged_object_file}",
+ ],
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [ACTION_NAMES.lto_backend],
+ flag_groups = [
+ flag_group(flags = [
+ "-c",
+ "-fthinlto-index=%{thinlto_index}",
+ "-o",
+ "%{thinlto_output_object_file}",
+ "-x",
+ "ir",
+ "%{thinlto_input_bitcode_file}",
+ ]),
+ ],
+ ),
+ ],
+ )
+
+ is_linux = ctx.attr.target_libc != "macosx"
+
+ # TODO(#8303): Mac crosstool should also declare every feature.
+ if is_linux:
+ features = [
+ dependency_file_feature,
+ random_seed_feature,
+ pic_feature,
+ per_object_debug_info_feature,
+ preprocessor_defines_feature,
+ includes_feature,
+ include_paths_feature,
+ fdo_instrument_feature,
+ cs_fdo_instrument_feature,
+ cs_fdo_optimize_feature,
+ thinlto_feature,
+ fdo_prefetch_hints_feature,
+ autofdo_feature,
+ build_interface_libraries_feature,
+ dynamic_library_linker_tool_feature,
+ symbol_counts_feature,
+ shared_flag_feature,
+ linkstamps_feature,
+ output_execpath_flags_feature,
+ runtime_library_search_directories_feature,
+ library_search_directories_feature,
+ archiver_flags_feature,
+ force_pic_flags_feature,
+ fission_support_feature,
+ strip_debug_symbols_feature,
+ coverage_feature,
+ supports_pic_feature,
+ gcc_coverage_map_format_feature,
+ llvm_coverage_map_format_feature,
+ ] + (
+ [
+ supports_start_end_lib_feature,
+ ] if ctx.attr.supports_start_end_lib else []
+ ) + [
+ default_compile_flags_feature,
+ default_link_flags_feature,
+ libraries_to_link_feature,
+ user_link_flags_feature,
+ static_libgcc_feature,
+ fdo_optimize_feature,
+ supports_dynamic_linker_feature,
+ dbg_feature,
+ opt_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ unfiltered_compile_flags_feature,
+ ]
+ else:
+ features = [
+ supports_pic_feature,
+ ] + (
+ [
+ supports_start_end_lib_feature,
+ ] if ctx.attr.supports_start_end_lib else []
+ ) + [
+ coverage_feature,
+ default_compile_flags_feature,
+ default_link_flags_feature,
+ fdo_optimize_feature,
+ supports_dynamic_linker_feature,
+ dbg_feature,
+ opt_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ unfiltered_compile_flags_feature,
+ gcc_coverage_map_format_feature,
+ llvm_coverage_map_format_feature,
+ ]
+
+ return cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ features = features,
+ action_configs = action_configs,
+ cxx_builtin_include_directories = ctx.attr.cxx_builtin_include_directories,
+ toolchain_identifier = ctx.attr.toolchain_identifier,
+ host_system_name = ctx.attr.host_system_name,
+ target_system_name = ctx.attr.target_system_name,
+ target_cpu = ctx.attr.cpu,
+ target_libc = ctx.attr.target_libc,
+ compiler = ctx.attr.compiler,
+ abi_version = ctx.attr.abi_version,
+ abi_libc_version = ctx.attr.abi_libc_version,
+ tool_paths = tool_paths,
+ )
+
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "abi_libc_version": attr.string(mandatory = True),
+ "abi_version": attr.string(mandatory = True),
+ "compile_flags": attr.string_list(),
+ "compiler": attr.string(mandatory = True),
+ "coverage_compile_flags": attr.string_list(),
+ "coverage_link_flags": attr.string_list(),
+ "cpu": attr.string(mandatory = True),
+ "cxx_builtin_include_directories": attr.string_list(),
+ "cxx_flags": attr.string_list(),
+ "dbg_compile_flags": attr.string_list(),
+ "host_system_name": attr.string(mandatory = True),
+ "link_flags": attr.string_list(),
+ "link_libs": attr.string_list(),
+ "opt_compile_flags": attr.string_list(),
+ "opt_link_flags": attr.string_list(),
+ "supports_start_end_lib": attr.bool(),
+ "target_libc": attr.string(mandatory = True),
+ "target_system_name": attr.string(mandatory = True),
+ "tool_paths": attr.string_dict(),
+ "toolchain_identifier": attr.string(mandatory = True),
+ "unfiltered_compile_flags": attr.string_list(),
+ },
+ provides = [CcToolchainConfigInfo],
+)
diff --git a/cc/private/toolchain/vc_installation_error.bat.tpl b/cc/private/toolchain/vc_installation_error.bat.tpl
new file mode 100644
index 0000000..9cdd658
--- /dev/null
+++ b/cc/private/toolchain/vc_installation_error.bat.tpl
@@ -0,0 +1,24 @@
+:: Copyright 2017 The Bazel Authors. All rights reserved.
+::
+:: Licensed under the Apache License, Version 2.0 (the "License");
+:: you may not use this file except in compliance with the License.
+:: You may obtain a copy of the License at
+::
+:: http://www.apache.org/licenses/LICENSE-2.0
+::
+:: Unless required by applicable law or agreed to in writing, software
+:: distributed under the License is distributed on an "AS IS" BASIS,
+:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+:: See the License for the specific language governing permissions and
+:: limitations under the License.
+
+@echo OFF
+
+echo. 1>&2
+echo The target you are compiling requires Visual C++ build tools. 1>&2
+echo Bazel couldn't find a valid Visual C++ build tools installation on your machine. 1>&2
+%{vc_error_message}
+echo Please check your installation following https://docs.bazel.build/versions/main/windows.html#using 1>&2
+echo. 1>&2
+
+exit /b 1
diff --git a/cc/private/toolchain/windows_cc_configure.bzl b/cc/private/toolchain/windows_cc_configure.bzl
new file mode 100644
index 0000000..598d4b2
--- /dev/null
+++ b/cc/private/toolchain/windows_cc_configure.bzl
@@ -0,0 +1,703 @@
+# pylint: disable=g-bad-file-header
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Configuring the C++ toolchain on Windows."""
+
+load(
+ ":lib_cc_configure.bzl",
+ "auto_configure_fail",
+ "auto_configure_warning",
+ "auto_configure_warning_maybe",
+ "escape_string",
+ "execute",
+ "resolve_labels",
+ "write_builtin_include_directory_paths",
+)
+
+def _get_path_env_var(repository_ctx, name):
+ """Returns a path from an environment variable.
+
+ Removes quotes, replaces '/' with '\', and strips trailing '\'s."""
+ if name in repository_ctx.os.environ:
+ value = repository_ctx.os.environ[name]
+ if value[0] == "\"":
+ if len(value) == 1 or value[-1] != "\"":
+ auto_configure_fail("'%s' environment variable has no trailing quote" % name)
+ value = value[1:-1]
+ if "/" in value:
+ value = value.replace("/", "\\")
+ if value[-1] == "\\":
+ value = value.rstrip("\\")
+ return value
+ else:
+ return None
+
+def _get_temp_env(repository_ctx):
+ """Returns the value of TMP, or TEMP, or if both undefined then C:\\Windows."""
+ tmp = _get_path_env_var(repository_ctx, "TMP")
+ if not tmp:
+ tmp = _get_path_env_var(repository_ctx, "TEMP")
+ if not tmp:
+ tmp = "C:\\Windows\\Temp"
+ auto_configure_warning(
+ "neither 'TMP' nor 'TEMP' environment variables are set, using '%s' as default" % tmp,
+ )
+ return tmp
+
+def _get_escaped_windows_msys_starlark_content(repository_ctx, use_mingw = False):
+ """Return the content of msys cc toolchain rule."""
+ msys_root = ""
+ bazel_sh = _get_path_env_var(repository_ctx, "BAZEL_SH")
+ if bazel_sh:
+ bazel_sh = bazel_sh.replace("\\", "/").lower()
+ tokens = bazel_sh.rsplit("/", 1)
+ if tokens[0].endswith("/usr/bin"):
+ msys_root = tokens[0][:len(tokens[0]) - len("usr/bin")]
+ elif tokens[0].endswith("/bin"):
+ msys_root = tokens[0][:len(tokens[0]) - len("bin")]
+
+ prefix = "mingw64" if use_mingw else "usr"
+ tool_path_prefix = escape_string(msys_root) + prefix
+ tool_bin_path = tool_path_prefix + "/bin"
+ tool_path = {}
+
+ for tool in ["ar", "compat-ld", "cpp", "dwp", "gcc", "gcov", "ld", "nm", "objcopy", "objdump", "strip"]:
+ if msys_root:
+ tool_path[tool] = tool_bin_path + "/" + tool
+ else:
+ tool_path[tool] = "msys_gcc_installation_error.bat"
+ tool_paths = ",\n ".join(['"%s": "%s"' % (k, v) for k, v in tool_path.items()])
+ include_directories = (' "%s/",\n ' % tool_path_prefix) if msys_root else ""
+ return tool_paths, tool_bin_path, include_directories
+
+def _get_system_root(repository_ctx):
+ """Get System root path on Windows, default is C:\\Windows. Doesn't %-escape the result."""
+ systemroot = _get_path_env_var(repository_ctx, "SYSTEMROOT")
+ if not systemroot:
+ systemroot = "C:\\Windows"
+ auto_configure_warning_maybe(
+ repository_ctx,
+ "SYSTEMROOT is not set, using default SYSTEMROOT=C:\\Windows",
+ )
+ return escape_string(systemroot)
+
+def _add_system_root(repository_ctx, env):
+ """Running VCVARSALL.BAT and VCVARSQUERYREGISTRY.BAT need %SYSTEMROOT%\\\\system32 in PATH."""
+ if "PATH" not in env:
+ env["PATH"] = ""
+ env["PATH"] = env["PATH"] + ";" + _get_system_root(repository_ctx) + "\\system32"
+ return env
+
+def _find_vc_path(repository_ctx):
+ """Find Visual C++ build tools install path. Doesn't %-escape the result."""
+
+ # 1. Check if BAZEL_VC or BAZEL_VS is already set by user.
+ bazel_vc = _get_path_env_var(repository_ctx, "BAZEL_VC")
+ if bazel_vc:
+ if repository_ctx.path(bazel_vc).exists:
+ return bazel_vc
+ else:
+ auto_configure_warning_maybe(
+ repository_ctx,
+ "%BAZEL_VC% is set to non-existent path, ignoring.",
+ )
+
+ bazel_vs = _get_path_env_var(repository_ctx, "BAZEL_VS")
+ if bazel_vs:
+ if repository_ctx.path(bazel_vs).exists:
+ bazel_vc = bazel_vs + "\\VC"
+ if repository_ctx.path(bazel_vc).exists:
+ return bazel_vc
+ else:
+ auto_configure_warning_maybe(
+ repository_ctx,
+ "No 'VC' directory found under %BAZEL_VS%, ignoring.",
+ )
+ else:
+ auto_configure_warning_maybe(
+ repository_ctx,
+ "%BAZEL_VS% is set to non-existent path, ignoring.",
+ )
+
+ auto_configure_warning_maybe(
+ repository_ctx,
+ "Neither %BAZEL_VC% nor %BAZEL_VS% are set, start looking for the latest Visual C++" +
+ " installed.",
+ )
+
+ # 2. Check if VS%VS_VERSION%COMNTOOLS is set, if true then try to find and use
+ # vcvarsqueryregistry.bat / VsDevCmd.bat to detect VC++.
+ auto_configure_warning_maybe(repository_ctx, "Looking for VS%VERSION%COMNTOOLS environment variables, " +
+ "eg. VS140COMNTOOLS")
+ for vscommontools_env, script in [
+ ("VS160COMNTOOLS", "VsDevCmd.bat"),
+ ("VS150COMNTOOLS", "VsDevCmd.bat"),
+ ("VS140COMNTOOLS", "vcvarsqueryregistry.bat"),
+ ("VS120COMNTOOLS", "vcvarsqueryregistry.bat"),
+ ("VS110COMNTOOLS", "vcvarsqueryregistry.bat"),
+ ("VS100COMNTOOLS", "vcvarsqueryregistry.bat"),
+ ("VS90COMNTOOLS", "vcvarsqueryregistry.bat"),
+ ]:
+ if vscommontools_env not in repository_ctx.os.environ:
+ continue
+ script = _get_path_env_var(repository_ctx, vscommontools_env) + "\\" + script
+ if not repository_ctx.path(script).exists:
+ continue
+ repository_ctx.file(
+ "get_vc_dir.bat",
+ "@echo off\n" +
+ "call \"" + script + "\"\n" +
+ "echo %VCINSTALLDIR%",
+ True,
+ )
+ env = _add_system_root(repository_ctx, repository_ctx.os.environ)
+ vc_dir = execute(repository_ctx, ["./get_vc_dir.bat"], environment = env)
+
+ auto_configure_warning_maybe(repository_ctx, "Visual C++ build tools found at %s" % vc_dir)
+ return vc_dir
+
+ # 3. User might have purged all environment variables. If so, look for Visual C++ in registry.
+ # Works for Visual Studio 2017 and older. (Does not work for Visual Studio 2019 Preview.)
+ # TODO(laszlocsomor): check if "16.0" also has this registry key, after VS 2019 is released.
+ auto_configure_warning_maybe(repository_ctx, "Looking for Visual C++ through registry")
+ reg_binary = _get_system_root(repository_ctx) + "\\system32\\reg.exe"
+ vc_dir = None
+ for key, suffix in (("VC7", ""), ("VS7", "\\VC")):
+ for version in ["15.0", "14.0", "12.0", "11.0", "10.0", "9.0", "8.0"]:
+ if vc_dir:
+ break
+ result = repository_ctx.execute([reg_binary, "query", "HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\" + key, "/v", version])
+ auto_configure_warning_maybe(repository_ctx, "registry query result for VC %s:\n\nSTDOUT(start)\n%s\nSTDOUT(end)\nSTDERR(start):\n%s\nSTDERR(end)\n" %
+ (version, result.stdout, result.stderr))
+ if not result.stderr:
+ for line in result.stdout.split("\n"):
+ line = line.strip()
+ if line.startswith(version) and line.find("REG_SZ") != -1:
+ vc_dir = line[line.find("REG_SZ") + len("REG_SZ"):].strip() + suffix
+ if vc_dir:
+ auto_configure_warning_maybe(repository_ctx, "Visual C++ build tools found at %s" % vc_dir)
+ return vc_dir
+
+ # 4. Check default directories for VC installation
+ auto_configure_warning_maybe(repository_ctx, "Looking for default Visual C++ installation directory")
+ program_files_dir = _get_path_env_var(repository_ctx, "PROGRAMFILES(X86)")
+ if not program_files_dir:
+ program_files_dir = "C:\\Program Files (x86)"
+ auto_configure_warning_maybe(
+ repository_ctx,
+ "'PROGRAMFILES(X86)' environment variable is not set, using '%s' as default" % program_files_dir,
+ )
+ for path in [
+ "Microsoft Visual Studio\\2019\\Preview\\VC",
+ "Microsoft Visual Studio\\2019\\BuildTools\\VC",
+ "Microsoft Visual Studio\\2019\\Community\\VC",
+ "Microsoft Visual Studio\\2019\\Professional\\VC",
+ "Microsoft Visual Studio\\2019\\Enterprise\\VC",
+ "Microsoft Visual Studio\\2017\\BuildTools\\VC",
+ "Microsoft Visual Studio\\2017\\Community\\VC",
+ "Microsoft Visual Studio\\2017\\Professional\\VC",
+ "Microsoft Visual Studio\\2017\\Enterprise\\VC",
+ "Microsoft Visual Studio 14.0\\VC",
+ ]:
+ path = program_files_dir + "\\" + path
+ if repository_ctx.path(path).exists:
+ vc_dir = path
+ break
+
+ if not vc_dir:
+ auto_configure_warning_maybe(repository_ctx, "Visual C++ build tools not found.")
+ return None
+ auto_configure_warning_maybe(repository_ctx, "Visual C++ build tools found at %s" % vc_dir)
+ return vc_dir
+
+def _is_vs_2017_or_2019(vc_path):
+ """Check if the installed VS version is Visual Studio 2017."""
+
+ # In VS 2017 and 2019, the location of VC is like:
+ # C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\
+ # In VS 2015 or older version, it is like:
+ # C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\
+ return vc_path.find("2017") != -1 or vc_path.find("2019") != -1
+
+def _find_vcvars_bat_script(repository_ctx, vc_path):
+ """Find batch script to set up environment variables for VC. Doesn't %-escape the result."""
+ if _is_vs_2017_or_2019(vc_path):
+ vcvars_script = vc_path + "\\Auxiliary\\Build\\VCVARSALL.BAT"
+ else:
+ vcvars_script = vc_path + "\\VCVARSALL.BAT"
+
+ if not repository_ctx.path(vcvars_script).exists:
+ return None
+
+ return vcvars_script
+
+def _is_support_vcvars_ver(vc_full_version):
+ """-vcvars_ver option is supported from version 14.11.25503 (VS 2017 version 15.3)."""
+ version = [int(i) for i in vc_full_version.split(".")]
+ min_version = [14, 11, 25503]
+ return version >= min_version
+
+def _is_support_winsdk_selection(repository_ctx, vc_path):
+ """Windows SDK selection is supported with VC 2017 / 2019 or with full VS 2015 installation."""
+ if _is_vs_2017_or_2019(vc_path):
+ return True
+
+ # By checking the source code of VCVARSALL.BAT in VC 2015, we know that
+ # when devenv.exe or wdexpress.exe exists, VCVARSALL.BAT supports Windows SDK selection.
+ vc_common_ide = repository_ctx.path(vc_path).dirname.get_child("Common7").get_child("IDE")
+ for tool in ["devenv.exe", "wdexpress.exe"]:
+ if vc_common_ide.get_child(tool).exists:
+ return True
+ return False
+
+def setup_vc_env_vars(repository_ctx, vc_path, envvars = [], allow_empty = False, escape = True):
+ """Get environment variables set by VCVARSALL.BAT script. Doesn't %-escape the result!
+
+ Args:
+ repository_ctx: the repository_ctx object
+ vc_path: Visual C++ root directory
+ envvars: list of envvars to retrieve; default is ["PATH", "INCLUDE", "LIB", "WINDOWSSDKDIR"]
+ allow_empty: allow unset envvars; if False then report errors for those
+ escape: if True, escape "\" as "\\" and "%" as "%%" in the envvar values
+
+ Returns:
+ dictionary of the envvars
+ """
+ if not envvars:
+ envvars = ["PATH", "INCLUDE", "LIB", "WINDOWSSDKDIR"]
+
+ vcvars_script = _find_vcvars_bat_script(repository_ctx, vc_path)
+ if not vcvars_script:
+ auto_configure_fail("Cannot find VCVARSALL.BAT script under %s" % vc_path)
+
+ # Getting Windows SDK version set by user.
+ # Only supports VC 2017 & 2019 and VC 2015 with full VS installation.
+ winsdk_version = _get_winsdk_full_version(repository_ctx)
+ if winsdk_version and not _is_support_winsdk_selection(repository_ctx, vc_path):
+ auto_configure_warning(("BAZEL_WINSDK_FULL_VERSION=%s is ignored, " +
+ "because standalone Visual C++ Build Tools 2015 doesn't support specifying Windows " +
+ "SDK version, please install the full VS 2015 or use VC 2017/2019.") % winsdk_version)
+ winsdk_version = ""
+
+ # Get VC version set by user. Only supports VC 2017 & 2019.
+ vcvars_ver = ""
+ if _is_vs_2017_or_2019(vc_path):
+ full_version = _get_vc_full_version(repository_ctx, vc_path)
+
+ # Because VCVARSALL.BAT is from the latest VC installed, so we check if the latest
+ # version supports -vcvars_ver or not.
+ if _is_support_vcvars_ver(_get_latest_subversion(repository_ctx, vc_path)):
+ vcvars_ver = "-vcvars_ver=" + full_version
+
+ cmd = "\"%s\" amd64 %s %s" % (vcvars_script, winsdk_version, vcvars_ver)
+ print_envvars = ",".join(["{k}=%{k}%".format(k = k) for k in envvars])
+ repository_ctx.file(
+ "get_env.bat",
+ "@echo off\n" +
+ ("call %s > NUL \n" % cmd) + ("echo %s \n" % print_envvars),
+ True,
+ )
+ env = _add_system_root(repository_ctx, {k: "" for k in envvars})
+ envs = execute(repository_ctx, ["./get_env.bat"], environment = env).split(",")
+ env_map = {}
+ for env in envs:
+ key, value = env.split("=", 1)
+ env_map[key] = escape_string(value.replace("\\", "\\\\")) if escape else value
+ if not allow_empty:
+ _check_env_vars(env_map, cmd, expected = envvars)
+ return env_map
+
+def _check_env_vars(env_map, cmd, expected):
+ for env in expected:
+ if not env_map.get(env):
+ auto_configure_fail(
+ "Setting up VC environment variables failed, %s is not set by the following command:\n %s" % (env, cmd),
+ )
+
+def _get_latest_subversion(repository_ctx, vc_path):
+ """Get the latest subversion of a VS 2017/2019 installation.
+
+ For VS 2017 & 2019, there could be multiple versions of VC build tools.
+ The directories are like:
+ <vc_path>\\Tools\\MSVC\\14.10.24930\\bin\\HostX64\\x64
+ <vc_path>\\Tools\\MSVC\\14.16.27023\\bin\\HostX64\\x64
+ This function should return 14.16.27023 in this case."""
+ versions = [path.basename for path in repository_ctx.path(vc_path + "\\Tools\\MSVC").readdir()]
+ if len(versions) < 1:
+ auto_configure_warning_maybe(repository_ctx, "Cannot find any VC installation under BAZEL_VC(%s)" % vc_path)
+ return None
+
+ # Parse the version string into integers, then sort the integers to prevent textual sorting.
+ version_list = []
+ for version in versions:
+ parts = [int(i) for i in version.split(".")]
+ version_list.append((parts, version))
+
+ version_list = sorted(version_list)
+ latest_version = version_list[-1][1]
+
+ auto_configure_warning_maybe(repository_ctx, "Found the following VC verisons:\n%s\n\nChoosing the latest version = %s" % ("\n".join(versions), latest_version))
+ return latest_version
+
+def _get_vc_full_version(repository_ctx, vc_path):
+ """Return the value of BAZEL_VC_FULL_VERSION if defined, otherwise the latest version."""
+ if "BAZEL_VC_FULL_VERSION" in repository_ctx.os.environ:
+ return repository_ctx.os.environ["BAZEL_VC_FULL_VERSION"]
+ return _get_latest_subversion(repository_ctx, vc_path)
+
+def _get_winsdk_full_version(repository_ctx):
+ """Return the value of BAZEL_WINSDK_FULL_VERSION if defined, otherwise an empty string."""
+ return repository_ctx.os.environ.get("BAZEL_WINSDK_FULL_VERSION", default = "")
+
+def _find_msvc_tool(repository_ctx, vc_path, tool):
+ """Find the exact path of a specific build tool in MSVC. Doesn't %-escape the result."""
+ tool_path = None
+ if _is_vs_2017_or_2019(vc_path):
+ full_version = _get_vc_full_version(repository_ctx, vc_path)
+ if full_version:
+ tool_path = "%s\\Tools\\MSVC\\%s\\bin\\HostX64\\x64\\%s" % (vc_path, full_version, tool)
+ else:
+ # For VS 2015 and older version, the tools are under:
+ # C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64
+ tool_path = vc_path + "\\bin\\amd64\\" + tool
+
+ if not tool_path or not repository_ctx.path(tool_path).exists:
+ return None
+
+ return tool_path.replace("\\", "/")
+
+def _find_missing_vc_tools(repository_ctx, vc_path):
+ """Check if any required tool is missing under given VC path."""
+ missing_tools = []
+ if not _find_vcvars_bat_script(repository_ctx, vc_path):
+ missing_tools.append("VCVARSALL.BAT")
+
+ for tool in ["cl.exe", "link.exe", "lib.exe", "ml64.exe"]:
+ if not _find_msvc_tool(repository_ctx, vc_path, tool):
+ missing_tools.append(tool)
+
+ return missing_tools
+
+def _is_support_debug_fastlink(repository_ctx, linker):
+ """Run linker alone to see if it supports /DEBUG:FASTLINK."""
+ if _use_clang_cl(repository_ctx):
+ # LLVM's lld-link.exe doesn't support /DEBUG:FASTLINK.
+ return False
+ result = execute(repository_ctx, [linker], expect_failure = True)
+ return result.find("/DEBUG[:{FASTLINK|FULL|NONE}]") != -1
+
+def _find_llvm_path(repository_ctx):
+ """Find LLVM install path."""
+
+ # 1. Check if BAZEL_LLVM is already set by user.
+ bazel_llvm = _get_path_env_var(repository_ctx, "BAZEL_LLVM")
+ if bazel_llvm:
+ return bazel_llvm
+
+ auto_configure_warning_maybe(repository_ctx, "'BAZEL_LLVM' is not set, " +
+ "start looking for LLVM installation on machine.")
+
+ # 2. Look for LLVM installation through registry.
+ auto_configure_warning_maybe(repository_ctx, "Looking for LLVM installation through registry")
+ reg_binary = _get_system_root(repository_ctx) + "\\system32\\reg.exe"
+ llvm_dir = None
+ result = repository_ctx.execute([reg_binary, "query", "HKEY_LOCAL_MACHINE\\SOFTWARE\\WOW6432Node\\LLVM\\LLVM"])
+ auto_configure_warning_maybe(repository_ctx, "registry query result for LLVM:\n\nSTDOUT(start)\n%s\nSTDOUT(end)\nSTDERR(start):\n%s\nSTDERR(end)\n" %
+ (result.stdout, result.stderr))
+ if not result.stderr:
+ for line in result.stdout.split("\n"):
+ line = line.strip()
+ if line.startswith("(Default)") and line.find("REG_SZ") != -1:
+ llvm_dir = line[line.find("REG_SZ") + len("REG_SZ"):].strip()
+ if llvm_dir:
+ auto_configure_warning_maybe(repository_ctx, "LLVM installation found at %s" % llvm_dir)
+ return llvm_dir
+
+ # 3. Check default directories for LLVM installation
+ auto_configure_warning_maybe(repository_ctx, "Looking for default LLVM installation directory")
+ program_files_dir = _get_path_env_var(repository_ctx, "PROGRAMFILES")
+ if not program_files_dir:
+ program_files_dir = "C:\\Program Files"
+ auto_configure_warning_maybe(
+ repository_ctx,
+ "'PROGRAMFILES' environment variable is not set, using '%s' as default" % program_files_dir,
+ )
+ path = program_files_dir + "\\LLVM"
+ if repository_ctx.path(path).exists:
+ llvm_dir = path
+
+ if not llvm_dir:
+ auto_configure_warning_maybe(repository_ctx, "LLVM installation not found.")
+ return None
+ auto_configure_warning_maybe(repository_ctx, "LLVM installation found at %s" % llvm_dir)
+ return llvm_dir
+
+def _find_llvm_tool(repository_ctx, llvm_path, tool):
+ """Find the exact path of a specific build tool in LLVM. Doesn't %-escape the result."""
+ tool_path = llvm_path + "\\bin\\" + tool
+
+ if not repository_ctx.path(tool_path).exists:
+ return None
+
+ return tool_path.replace("\\", "/")
+
+def _use_clang_cl(repository_ctx):
+ """Returns True if USE_CLANG_CL is set to 1."""
+ return repository_ctx.os.environ.get("USE_CLANG_CL", default = "0") == "1"
+
+def _find_missing_llvm_tools(repository_ctx, llvm_path):
+ """Check if any required tool is missing under given LLVM path."""
+ missing_tools = []
+ for tool in ["clang-cl.exe", "lld-link.exe", "llvm-lib.exe"]:
+ if not _find_llvm_tool(repository_ctx, llvm_path, tool):
+ missing_tools.append(tool)
+
+ return missing_tools
+
+def _get_clang_version(repository_ctx, clang_cl):
+ result = repository_ctx.execute([clang_cl, "-v"])
+ if result.return_code != 0:
+ auto_configure_fail("Failed to get clang version by running \"%s -v\"" % clang_cl)
+
+ # Stderr should look like "clang version X.X.X ..."
+ return result.stderr.splitlines()[0].split(" ")[2]
+
+def _get_msys_mingw_vars(repository_ctx):
+ """Get the variables we need to populate the msys/mingw toolchains."""
+ tool_paths, tool_bin_path, inc_dir_msys = _get_escaped_windows_msys_starlark_content(repository_ctx)
+ tool_paths_mingw, tool_bin_path_mingw, inc_dir_mingw = _get_escaped_windows_msys_starlark_content(repository_ctx, use_mingw = True)
+ write_builtin_include_directory_paths(repository_ctx, "mingw", [inc_dir_mingw], file_suffix = "_mingw")
+ msys_mingw_vars = {
+ "%{cxx_builtin_include_directories}": inc_dir_msys,
+ "%{mingw_cxx_builtin_include_directories}": inc_dir_mingw,
+ "%{mingw_tool_bin_path}": tool_bin_path_mingw,
+ "%{mingw_tool_paths}": tool_paths_mingw,
+ "%{tool_bin_path}": tool_bin_path,
+ "%{tool_paths}": tool_paths,
+ }
+ return msys_mingw_vars
+
+def _get_msvc_vars(repository_ctx, paths):
+ """Get the variables we need to populate the MSVC toolchains."""
+ msvc_vars = dict()
+ vc_path = _find_vc_path(repository_ctx)
+ missing_tools = None
+ if not vc_path:
+ repository_ctx.template(
+ "vc_installation_error.bat",
+ paths["@rules_cc//cc/private/toolchain:vc_installation_error.bat.tpl"],
+ {"%{vc_error_message}": ""},
+ )
+ else:
+ missing_tools = _find_missing_vc_tools(repository_ctx, vc_path)
+ if missing_tools:
+ message = "\r\n".join([
+ "echo. 1>&2",
+ "echo Visual C++ build tools seems to be installed at %s 1>&2" % vc_path,
+ "echo But Bazel can't find the following tools: 1>&2",
+ "echo %s 1>&2" % ", ".join(missing_tools),
+ "echo. 1>&2",
+ ])
+ repository_ctx.template(
+ "vc_installation_error.bat",
+ paths["@rules_cc//cc/private/toolchain:vc_installation_error.bat.tpl"],
+ {"%{vc_error_message}": message},
+ )
+
+ if not vc_path or missing_tools:
+ write_builtin_include_directory_paths(repository_ctx, "msvc", [], file_suffix = "_msvc")
+ msvc_vars = {
+ "%{dbg_mode_debug_flag}": "/DEBUG",
+ "%{fastbuild_mode_debug_flag}": "/DEBUG",
+ "%{msvc_cl_path}": "vc_installation_error.bat",
+ "%{msvc_cxx_builtin_include_directories}": "",
+ "%{msvc_env_include}": "msvc_not_found",
+ "%{msvc_env_lib}": "msvc_not_found",
+ "%{msvc_env_path}": "msvc_not_found",
+ "%{msvc_env_tmp}": "msvc_not_found",
+ "%{msvc_lib_path}": "vc_installation_error.bat",
+ "%{msvc_link_path}": "vc_installation_error.bat",
+ "%{msvc_ml_path}": "vc_installation_error.bat",
+ }
+ return msvc_vars
+
+ env = setup_vc_env_vars(repository_ctx, vc_path)
+ escaped_paths = escape_string(env["PATH"])
+ escaped_include_paths = escape_string(env["INCLUDE"])
+ escaped_lib_paths = escape_string(env["LIB"])
+ escaped_tmp_dir = escape_string(_get_temp_env(repository_ctx).replace("\\", "\\\\"))
+
+ llvm_path = ""
+ if _use_clang_cl(repository_ctx):
+ llvm_path = _find_llvm_path(repository_ctx)
+ if not llvm_path:
+ auto_configure_fail("\nUSE_CLANG_CL is set to 1, but Bazel cannot find Clang installation on your system.\n" +
+ "Please install Clang via http://releases.llvm.org/download.html\n")
+ cl_path = _find_llvm_tool(repository_ctx, llvm_path, "clang-cl.exe")
+ link_path = _find_llvm_tool(repository_ctx, llvm_path, "lld-link.exe")
+ if not link_path:
+ link_path = _find_msvc_tool(repository_ctx, vc_path, "link.exe")
+ lib_path = _find_llvm_tool(repository_ctx, llvm_path, "llvm-lib.exe")
+ if not lib_path:
+ lib_path = _find_msvc_tool(repository_ctx, vc_path, "lib.exe")
+ else:
+ cl_path = _find_msvc_tool(repository_ctx, vc_path, "cl.exe")
+ link_path = _find_msvc_tool(repository_ctx, vc_path, "link.exe")
+ lib_path = _find_msvc_tool(repository_ctx, vc_path, "lib.exe")
+
+ msvc_ml_path = _find_msvc_tool(repository_ctx, vc_path, "ml64.exe")
+ escaped_cxx_include_directories = []
+
+ for path in escaped_include_paths.split(";"):
+ if path:
+ escaped_cxx_include_directories.append("\"%s\"" % path)
+ if llvm_path:
+ clang_version = _get_clang_version(repository_ctx, cl_path)
+ clang_dir = llvm_path + "\\lib\\clang\\" + clang_version
+ clang_include_path = (clang_dir + "\\include").replace("\\", "\\\\")
+ escaped_cxx_include_directories.append("\"%s\"" % clang_include_path)
+ clang_lib_path = (clang_dir + "\\lib\\windows").replace("\\", "\\\\")
+ escaped_lib_paths = escaped_lib_paths + ";" + clang_lib_path
+
+ support_debug_fastlink = _is_support_debug_fastlink(repository_ctx, link_path)
+
+ write_builtin_include_directory_paths(repository_ctx, "msvc", escaped_cxx_include_directories, file_suffix = "_msvc")
+ msvc_vars = {
+ "%{dbg_mode_debug_flag}": "/DEBUG:FULL" if support_debug_fastlink else "/DEBUG",
+ "%{fastbuild_mode_debug_flag}": "/DEBUG:FASTLINK" if support_debug_fastlink else "/DEBUG",
+ "%{msvc_cl_path}": cl_path,
+ "%{msvc_cxx_builtin_include_directories}": " " + ",\n ".join(escaped_cxx_include_directories),
+ "%{msvc_env_include}": escaped_include_paths,
+ "%{msvc_env_lib}": escaped_lib_paths,
+ "%{msvc_env_path}": escaped_paths,
+ "%{msvc_env_tmp}": escaped_tmp_dir,
+ "%{msvc_lib_path}": lib_path,
+ "%{msvc_link_path}": link_path,
+ "%{msvc_ml_path}": msvc_ml_path,
+ }
+ return msvc_vars
+
+def _get_clang_cl_vars(repository_ctx, paths, msvc_vars):
+ """Get the variables we need to populate the clang-cl toolchains."""
+ llvm_path = _find_llvm_path(repository_ctx)
+ error_script = None
+ if msvc_vars["%{msvc_cl_path}"] == "vc_installation_error.bat":
+ error_script = "vc_installation_error.bat"
+ elif not llvm_path:
+ repository_ctx.template(
+ "clang_installation_error.bat",
+ paths["@rules_cc//cc/private/toolchain:clang_installation_error.bat.tpl"],
+ {"%{clang_error_message}": ""},
+ )
+ error_script = "clang_installation_error.bat"
+ else:
+ missing_tools = _find_missing_llvm_tools(repository_ctx, llvm_path)
+ if missing_tools:
+ message = "\r\n".join([
+ "echo. 1>&2",
+ "echo LLVM/Clang seems to be installed at %s 1>&2" % llvm_path,
+ "echo But Bazel can't find the following tools: 1>&2",
+ "echo %s 1>&2" % ", ".join(missing_tools),
+ "echo. 1>&2",
+ ])
+ repository_ctx.template(
+ "clang_installation_error.bat",
+ paths["@rules_cc//cc/private/toolchain:clang_installation_error.bat.tpl"],
+ {"%{clang_error_message}": message},
+ )
+ error_script = "clang_installation_error.bat"
+
+ if error_script:
+ write_builtin_include_directory_paths(repository_ctx, "clang-cl", [], file_suffix = "_clangcl")
+ clang_cl_vars = {
+ "%{clang_cl_cl_path}": error_script,
+ "%{clang_cl_cxx_builtin_include_directories}": "",
+ "%{clang_cl_dbg_mode_debug_flag}": "/DEBUG",
+ "%{clang_cl_env_include}": "clang_cl_not_found",
+ "%{clang_cl_env_lib}": "clang_cl_not_found",
+ "%{clang_cl_env_path}": "clang_cl_not_found",
+ "%{clang_cl_env_tmp}": "clang_cl_not_found",
+ "%{clang_cl_fastbuild_mode_debug_flag}": "/DEBUG",
+ "%{clang_cl_lib_path}": error_script,
+ "%{clang_cl_link_path}": error_script,
+ "%{clang_cl_ml_path}": error_script,
+ }
+ return clang_cl_vars
+
+ clang_cl_path = _find_llvm_tool(repository_ctx, llvm_path, "clang-cl.exe")
+ lld_link_path = _find_llvm_tool(repository_ctx, llvm_path, "lld-link.exe")
+ llvm_lib_path = _find_llvm_tool(repository_ctx, llvm_path, "llvm-lib.exe")
+
+ clang_version = _get_clang_version(repository_ctx, clang_cl_path)
+ clang_dir = llvm_path + "\\lib\\clang\\" + clang_version
+ clang_include_path = (clang_dir + "\\include").replace("\\", "\\\\")
+ clang_lib_path = (clang_dir + "\\lib\\windows").replace("\\", "\\\\")
+
+ clang_cl_include_directories = msvc_vars["%{msvc_cxx_builtin_include_directories}"] + (",\n \"%s\"" % clang_include_path)
+ write_builtin_include_directory_paths(repository_ctx, "clang-cl", [clang_cl_include_directories], file_suffix = "_clangcl")
+ clang_cl_vars = {
+ "%{clang_cl_cl_path}": clang_cl_path,
+ "%{clang_cl_cxx_builtin_include_directories}": clang_cl_include_directories,
+ # LLVM's lld-link.exe doesn't support /DEBUG:FASTLINK.
+ "%{clang_cl_dbg_mode_debug_flag}": "/DEBUG",
+ "%{clang_cl_env_include}": msvc_vars["%{msvc_env_include}"] + ";" + clang_include_path,
+ "%{clang_cl_env_lib}": msvc_vars["%{msvc_env_lib}"] + ";" + clang_lib_path,
+ "%{clang_cl_env_path}": msvc_vars["%{msvc_env_path}"],
+ "%{clang_cl_env_tmp}": msvc_vars["%{msvc_env_tmp}"],
+ "%{clang_cl_fastbuild_mode_debug_flag}": "/DEBUG",
+ "%{clang_cl_lib_path}": llvm_lib_path,
+ "%{clang_cl_link_path}": lld_link_path,
+ "%{clang_cl_ml_path}": msvc_vars["%{msvc_ml_path}"],
+ }
+ return clang_cl_vars
+
+def configure_windows_toolchain(repository_ctx):
+ """Configure C++ toolchain on Windows.
+
+ Args:
+ repository_ctx: The repository context.
+ """
+ paths = resolve_labels(repository_ctx, [
+ "@rules_cc//cc/private/toolchain:BUILD.windows.tpl",
+ "@rules_cc//cc/private/toolchain:windows_cc_toolchain_config.bzl",
+ "@rules_cc//cc/private/toolchain:armeabi_cc_toolchain_config.bzl",
+ "@rules_cc//cc/private/toolchain:vc_installation_error.bat.tpl",
+ "@rules_cc//cc/private/toolchain:msys_gcc_installation_error.bat",
+ "@rules_cc//cc/private/toolchain:clang_installation_error.bat.tpl",
+ ])
+
+ repository_ctx.symlink(
+ paths["@rules_cc//cc/private/toolchain:windows_cc_toolchain_config.bzl"],
+ "windows_cc_toolchain_config.bzl",
+ )
+ repository_ctx.symlink(
+ paths["@rules_cc//cc/private/toolchain:armeabi_cc_toolchain_config.bzl"],
+ "armeabi_cc_toolchain_config.bzl",
+ )
+ repository_ctx.symlink(
+ paths["@rules_cc//cc/private/toolchain:msys_gcc_installation_error.bat"],
+ "msys_gcc_installation_error.bat",
+ )
+
+ template_vars = dict()
+ msvc_vars = _get_msvc_vars(repository_ctx, paths)
+ template_vars.update(msvc_vars)
+ template_vars.update(_get_clang_cl_vars(repository_ctx, paths, msvc_vars))
+ template_vars.update(_get_msys_mingw_vars(repository_ctx))
+
+ repository_ctx.template(
+ "BUILD",
+ paths["@rules_cc//cc/private/toolchain:BUILD.windows.tpl"],
+ template_vars,
+ )
diff --git a/cc/private/toolchain/windows_cc_toolchain_config.bzl b/cc/private/toolchain/windows_cc_toolchain_config.bzl
new file mode 100644
index 0000000..7fa2978
--- /dev/null
+++ b/cc/private/toolchain/windows_cc_toolchain_config.bzl
@@ -0,0 +1,1339 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A Starlark cc_toolchain configuration rule for Windows"""
+
+load("@rules_cc//cc:action_names.bzl", "ACTION_NAMES")
+load(
+ "@rules_cc//cc:cc_toolchain_config_lib.bzl",
+ "action_config",
+ "artifact_name_pattern",
+ "env_entry",
+ "env_set",
+ "feature",
+ "feature_set",
+ "flag_group",
+ "flag_set",
+ "tool",
+ "tool_path",
+ "variable_with_value",
+ "with_feature_set",
+)
+
+all_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.lto_backend,
+]
+
+all_cpp_compile_actions = [
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+]
+
+preprocessor_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.clif_match,
+]
+
+codegen_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+]
+
+all_link_actions = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+]
+
+def _use_msvc_toolchain(ctx):
+ return ctx.attr.cpu == "x64_windows" and (ctx.attr.compiler == "msvc-cl" or ctx.attr.compiler == "clang-cl")
+
+def _impl(ctx):
+ if _use_msvc_toolchain(ctx):
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "object_file",
+ prefix = "",
+ extension = ".obj",
+ ),
+ artifact_name_pattern(
+ category_name = "static_library",
+ prefix = "",
+ extension = ".lib",
+ ),
+ artifact_name_pattern(
+ category_name = "alwayslink_static_library",
+ prefix = "",
+ extension = ".lo.lib",
+ ),
+ artifact_name_pattern(
+ category_name = "executable",
+ prefix = "",
+ extension = ".exe",
+ ),
+ artifact_name_pattern(
+ category_name = "dynamic_library",
+ prefix = "",
+ extension = ".dll",
+ ),
+ artifact_name_pattern(
+ category_name = "interface_library",
+ prefix = "",
+ extension = ".if.lib",
+ ),
+ ]
+ else:
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "executable",
+ prefix = "",
+ extension = ".exe",
+ ),
+ ]
+
+ if _use_msvc_toolchain(ctx):
+ cpp_link_nodeps_dynamic_library_action = action_config(
+ action_name = ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ implies = [
+ "nologo",
+ "shared_flag",
+ "linkstamps",
+ "output_execpath_flags",
+ "input_param_flags",
+ "user_link_flags",
+ "default_link_flags",
+ "linker_subsystem_flag",
+ "linker_param_file",
+ "msvc_env",
+ "no_stripping",
+ "has_configured_linker_path",
+ "def_file",
+ ],
+ tools = [tool(path = ctx.attr.msvc_link_path)],
+ )
+
+ cpp_link_static_library_action = action_config(
+ action_name = ACTION_NAMES.cpp_link_static_library,
+ implies = [
+ "nologo",
+ "archiver_flags",
+ "input_param_flags",
+ "linker_param_file",
+ "msvc_env",
+ ],
+ tools = [tool(path = ctx.attr.msvc_lib_path)],
+ )
+
+ assemble_action = action_config(
+ action_name = ACTION_NAMES.assemble,
+ implies = [
+ "compiler_input_flags",
+ "compiler_output_flags",
+ "nologo",
+ "msvc_env",
+ "sysroot",
+ ],
+ tools = [tool(path = ctx.attr.msvc_ml_path)],
+ )
+
+ preprocess_assemble_action = action_config(
+ action_name = ACTION_NAMES.preprocess_assemble,
+ implies = [
+ "compiler_input_flags",
+ "compiler_output_flags",
+ "nologo",
+ "msvc_env",
+ "sysroot",
+ ],
+ tools = [tool(path = ctx.attr.msvc_ml_path)],
+ )
+
+ c_compile_action = action_config(
+ action_name = ACTION_NAMES.c_compile,
+ implies = [
+ "compiler_input_flags",
+ "compiler_output_flags",
+ "default_compile_flags",
+ "nologo",
+ "msvc_env",
+ "parse_showincludes",
+ "user_compile_flags",
+ "sysroot",
+ "unfiltered_compile_flags",
+ ],
+ tools = [tool(path = ctx.attr.msvc_cl_path)],
+ )
+
+ cpp_compile_action = action_config(
+ action_name = ACTION_NAMES.cpp_compile,
+ implies = [
+ "compiler_input_flags",
+ "compiler_output_flags",
+ "default_compile_flags",
+ "nologo",
+ "msvc_env",
+ "parse_showincludes",
+ "user_compile_flags",
+ "sysroot",
+ "unfiltered_compile_flags",
+ ],
+ tools = [tool(path = ctx.attr.msvc_cl_path)],
+ )
+
+ cpp_link_executable_action = action_config(
+ action_name = ACTION_NAMES.cpp_link_executable,
+ implies = [
+ "nologo",
+ "linkstamps",
+ "output_execpath_flags",
+ "input_param_flags",
+ "user_link_flags",
+ "default_link_flags",
+ "linker_subsystem_flag",
+ "linker_param_file",
+ "msvc_env",
+ "no_stripping",
+ ],
+ tools = [tool(path = ctx.attr.msvc_link_path)],
+ )
+
+ cpp_link_dynamic_library_action = action_config(
+ action_name = ACTION_NAMES.cpp_link_dynamic_library,
+ implies = [
+ "nologo",
+ "shared_flag",
+ "linkstamps",
+ "output_execpath_flags",
+ "input_param_flags",
+ "user_link_flags",
+ "default_link_flags",
+ "linker_subsystem_flag",
+ "linker_param_file",
+ "msvc_env",
+ "no_stripping",
+ "has_configured_linker_path",
+ "def_file",
+ ],
+ tools = [tool(path = ctx.attr.msvc_link_path)],
+ )
+
+ action_configs = [
+ assemble_action,
+ preprocess_assemble_action,
+ c_compile_action,
+ cpp_compile_action,
+ cpp_link_executable_action,
+ cpp_link_dynamic_library_action,
+ cpp_link_nodeps_dynamic_library_action,
+ cpp_link_static_library_action,
+ ]
+ else:
+ action_configs = []
+
+ if _use_msvc_toolchain(ctx):
+ msvc_link_env_feature = feature(
+ name = "msvc_link_env",
+ env_sets = [
+ env_set(
+ actions = all_link_actions +
+ [ACTION_NAMES.cpp_link_static_library],
+ env_entries = [env_entry(key = "LIB", value = ctx.attr.msvc_env_lib)],
+ ),
+ ],
+ )
+
+ shared_flag_feature = feature(
+ name = "shared_flag",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ],
+ flag_groups = [flag_group(flags = ["/DLL"])],
+ ),
+ ],
+ )
+
+ determinism_feature = feature(
+ name = "determinism",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "/wd4117",
+ "-D__DATE__=\"redacted\"",
+ "-D__TIMESTAMP__=\"redacted\"",
+ "-D__TIME__=\"redacted\"",
+ ] + (["-Wno-builtin-macro-redefined"] if ctx.attr.compiler == "clang-cl" else []),
+ ),
+ ],
+ ),
+ ],
+ )
+
+ sysroot_feature = feature(
+ name = "sysroot",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["--sysroot=%{sysroot}"],
+ iterate_over = "sysroot",
+ expand_if_available = "sysroot",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ unfiltered_compile_flags_feature = feature(
+ name = "unfiltered_compile_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["%{unfiltered_compile_flags}"],
+ iterate_over = "unfiltered_compile_flags",
+ expand_if_available = "unfiltered_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ compiler_param_file_feature = feature(
+ name = "compiler_param_file",
+ )
+
+ copy_dynamic_libraries_to_binary_feature = feature(
+ name = "copy_dynamic_libraries_to_binary",
+ )
+
+ input_param_flags_feature = feature(
+ name = "input_param_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/IMPLIB:%{interface_library_output_path}"],
+ expand_if_available = "interface_library_output_path",
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["%{libopts}"],
+ iterate_over = "libopts",
+ expand_if_available = "libopts",
+ ),
+ ],
+ ),
+ flag_set(
+ actions = all_link_actions +
+ [ACTION_NAMES.cpp_link_static_library],
+ flag_groups = [
+ flag_group(
+ iterate_over = "libraries_to_link",
+ flag_groups = [
+ flag_group(
+ iterate_over = "libraries_to_link.object_files",
+ flag_groups = [flag_group(flags = ["%{libraries_to_link.object_files}"])],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file_group",
+ ),
+ ),
+ flag_group(
+ flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "object_file",
+ ),
+ ),
+ flag_group(
+ flag_groups = [flag_group(flags = ["%{libraries_to_link.name}"])],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "interface_library",
+ ),
+ ),
+ flag_group(
+ flag_groups = [
+ flag_group(
+ flags = ["%{libraries_to_link.name}"],
+ expand_if_false = "libraries_to_link.is_whole_archive",
+ ),
+ flag_group(
+ flags = ["/WHOLEARCHIVE:%{libraries_to_link.name}"],
+ expand_if_true = "libraries_to_link.is_whole_archive",
+ ),
+ ],
+ expand_if_equal = variable_with_value(
+ name = "libraries_to_link.type",
+ value = "static_library",
+ ),
+ ),
+ ],
+ expand_if_available = "libraries_to_link",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ fastbuild_feature = feature(
+ name = "fastbuild",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/Od", "/Z7"])],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = [ctx.attr.fastbuild_mode_debug_flag, "/INCREMENTAL:NO"],
+ ),
+ ],
+ ),
+ ],
+ implies = ["generate_pdb_file"],
+ )
+
+ user_compile_flags_feature = feature(
+ name = "user_compile_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_compile_flags}"],
+ iterate_over = "user_compile_flags",
+ expand_if_available = "user_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ archiver_flags_feature = feature(
+ name = "archiver_flags",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.cpp_link_static_library],
+ flag_groups = [
+ flag_group(
+ flags = ["/OUT:%{output_execpath}"],
+ expand_if_available = "output_execpath",
+ ),
+ flag_group(
+ flags = ["/MACHINE:X64"],
+ ),
+ ],
+ ),
+ ],
+ )
+
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ctx.attr.default_link_flags)],
+ ),
+ ],
+ )
+
+ static_link_msvcrt_feature = feature(name = "static_link_msvcrt")
+
+ dynamic_link_msvcrt_debug_feature = feature(
+ name = "dynamic_link_msvcrt_debug",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/MDd"])],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrtd.lib"])],
+ ),
+ ],
+ requires = [feature_set(features = ["dbg"])],
+ )
+
+ dbg_feature = feature(
+ name = "dbg",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/Od", "/Z7"])],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = [ctx.attr.dbg_mode_debug_flag, "/INCREMENTAL:NO"],
+ ),
+ ],
+ ),
+ ],
+ implies = ["generate_pdb_file"],
+ )
+
+ opt_feature = feature(
+ name = "opt",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/O2"])],
+ ),
+ ],
+ implies = ["frame_pointer"],
+ )
+
+ supports_interface_shared_libraries_feature = feature(
+ name = "supports_interface_shared_libraries",
+ enabled = True,
+ )
+
+ user_link_flags_feature = feature(
+ name = "user_link_flags",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_link_flags}"],
+ iterate_over = "user_link_flags",
+ expand_if_available = "user_link_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+ ACTION_NAMES.clif_match,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "/DCOMPILER_MSVC",
+ "/DNOMINMAX",
+ "/D_WIN32_WINNT=0x0601",
+ "/D_CRT_SECURE_NO_DEPRECATE",
+ "/D_CRT_SECURE_NO_WARNINGS",
+ "/bigobj",
+ "/Zm500",
+ "/EHsc",
+ "/wd4351",
+ "/wd4291",
+ "/wd4250",
+ "/wd4996",
+ ],
+ ),
+ ],
+ ),
+ ],
+ )
+
+ msvc_compile_env_feature = feature(
+ name = "msvc_compile_env",
+ env_sets = [
+ env_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ],
+ env_entries = [env_entry(key = "INCLUDE", value = ctx.attr.msvc_env_include)],
+ ),
+ ],
+ )
+
+ preprocessor_defines_feature = feature(
+ name = "preprocessor_defines",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/D%{preprocessor_defines}"],
+ iterate_over = "preprocessor_defines",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ generate_pdb_file_feature = feature(
+ name = "generate_pdb_file",
+ )
+
+ output_execpath_flags_feature = feature(
+ name = "output_execpath_flags",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["/OUT:%{output_execpath}"],
+ expand_if_available = "output_execpath",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ dynamic_link_msvcrt_no_debug_feature = feature(
+ name = "dynamic_link_msvcrt_no_debug",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/MD"])],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["/DEFAULTLIB:msvcrt.lib"])],
+ ),
+ ],
+ requires = [
+ feature_set(features = ["fastbuild"]),
+ feature_set(features = ["opt"]),
+ ],
+ )
+
+ disable_assertions_feature = feature(
+ name = "disable_assertions",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/DNDEBUG"])],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ ],
+ )
+
+ has_configured_linker_path_feature = feature(name = "has_configured_linker_path")
+
+ supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True)
+
+ no_stripping_feature = feature(name = "no_stripping")
+
+ linker_param_file_feature = feature(
+ name = "linker_param_file",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions +
+ [ACTION_NAMES.cpp_link_static_library],
+ flag_groups = [
+ flag_group(
+ flags = ["@%{linker_param_file}"],
+ expand_if_available = "linker_param_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ ignore_noisy_warnings_feature = feature(
+ name = "ignore_noisy_warnings",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.cpp_link_static_library],
+ flag_groups = [flag_group(flags = ["/ignore:4221"])],
+ ),
+ ],
+ )
+
+ no_legacy_features_feature = feature(name = "no_legacy_features")
+
+ parse_showincludes_feature = feature(
+ name = "parse_showincludes",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ],
+ flag_groups = [flag_group(flags = ["/showIncludes"])],
+ ),
+ ],
+ )
+
+ static_link_msvcrt_no_debug_feature = feature(
+ name = "static_link_msvcrt_no_debug",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/MT"])],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmt.lib"])],
+ ),
+ ],
+ requires = [
+ feature_set(features = ["fastbuild"]),
+ feature_set(features = ["opt"]),
+ ],
+ )
+
+ treat_warnings_as_errors_feature = feature(
+ name = "treat_warnings_as_errors",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/WX"])],
+ ),
+ ],
+ )
+
+ windows_export_all_symbols_feature = feature(name = "windows_export_all_symbols")
+
+ no_windows_export_all_symbols_feature = feature(name = "no_windows_export_all_symbols")
+
+ include_paths_feature = feature(
+ name = "include_paths",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/I%{quote_include_paths}"],
+ iterate_over = "quote_include_paths",
+ ),
+ flag_group(
+ flags = ["/I%{include_paths}"],
+ iterate_over = "include_paths",
+ ),
+ flag_group(
+ flags = ["/I%{system_include_paths}"],
+ iterate_over = "system_include_paths",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ linkstamps_feature = feature(
+ name = "linkstamps",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["%{linkstamp_paths}"],
+ iterate_over = "linkstamp_paths",
+ expand_if_available = "linkstamp_paths",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ targets_windows_feature = feature(
+ name = "targets_windows",
+ enabled = True,
+ implies = ["copy_dynamic_libraries_to_binary"],
+ )
+
+ linker_subsystem_flag_feature = feature(
+ name = "linker_subsystem_flag",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["/SUBSYSTEM:CONSOLE"])],
+ ),
+ ],
+ )
+
+ static_link_msvcrt_debug_feature = feature(
+ name = "static_link_msvcrt_debug",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/MTd"])],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["/DEFAULTLIB:libcmtd.lib"])],
+ ),
+ ],
+ requires = [feature_set(features = ["dbg"])],
+ )
+
+ frame_pointer_feature = feature(
+ name = "frame_pointer",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/Oy-"])],
+ ),
+ ],
+ )
+
+ compiler_output_flags_feature = feature(
+ name = "compiler_output_flags",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.assemble],
+ flag_groups = [
+ flag_group(
+ flag_groups = [
+ flag_group(
+ flags = ["/Fo%{output_file}", "/Zi"],
+ expand_if_available = "output_file",
+ expand_if_not_available = "output_assembly_file",
+ ),
+ ],
+ expand_if_not_available = "output_preprocess_file",
+ ),
+ ],
+ ),
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ],
+ flag_groups = [
+ flag_group(
+ flag_groups = [
+ flag_group(
+ flags = ["/Fo%{output_file}"],
+ expand_if_not_available = "output_preprocess_file",
+ ),
+ ],
+ expand_if_available = "output_file",
+ expand_if_not_available = "output_assembly_file",
+ ),
+ flag_group(
+ flag_groups = [
+ flag_group(
+ flags = ["/Fa%{output_file}"],
+ expand_if_available = "output_assembly_file",
+ ),
+ ],
+ expand_if_available = "output_file",
+ ),
+ flag_group(
+ flag_groups = [
+ flag_group(
+ flags = ["/P", "/Fi%{output_file}"],
+ expand_if_available = "output_preprocess_file",
+ ),
+ ],
+ expand_if_available = "output_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ nologo_feature = feature(
+ name = "nologo",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.cpp_link_static_library,
+ ],
+ flag_groups = [flag_group(flags = ["/nologo"])],
+ ),
+ ],
+ )
+
+ smaller_binary_feature = feature(
+ name = "smaller_binary",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [flag_group(flags = ["/Gy", "/Gw"])],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["/OPT:ICF", "/OPT:REF"])],
+ with_features = [with_feature_set(features = ["opt"])],
+ ),
+ ],
+ )
+
+ compiler_input_flags_feature = feature(
+ name = "compiler_input_flags",
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["/c", "%{source_file}"],
+ expand_if_available = "source_file",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ def_file_feature = feature(
+ name = "def_file",
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [
+ flag_group(
+ flags = ["/DEF:%{def_file_path}", "/ignore:4070"],
+ expand_if_available = "def_file_path",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ msvc_env_feature = feature(
+ name = "msvc_env",
+ env_sets = [
+ env_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.cpp_link_static_library,
+ ],
+ env_entries = [
+ env_entry(key = "PATH", value = ctx.attr.msvc_env_path),
+ env_entry(key = "TMP", value = ctx.attr.msvc_env_tmp),
+ env_entry(key = "TEMP", value = ctx.attr.msvc_env_tmp),
+ ],
+ ),
+ ],
+ implies = ["msvc_compile_env", "msvc_link_env"],
+ )
+ features = [
+ no_legacy_features_feature,
+ nologo_feature,
+ has_configured_linker_path_feature,
+ no_stripping_feature,
+ targets_windows_feature,
+ copy_dynamic_libraries_to_binary_feature,
+ default_compile_flags_feature,
+ msvc_env_feature,
+ msvc_compile_env_feature,
+ msvc_link_env_feature,
+ include_paths_feature,
+ preprocessor_defines_feature,
+ parse_showincludes_feature,
+ generate_pdb_file_feature,
+ shared_flag_feature,
+ linkstamps_feature,
+ output_execpath_flags_feature,
+ archiver_flags_feature,
+ input_param_flags_feature,
+ linker_subsystem_flag_feature,
+ user_link_flags_feature,
+ default_link_flags_feature,
+ linker_param_file_feature,
+ static_link_msvcrt_feature,
+ static_link_msvcrt_no_debug_feature,
+ dynamic_link_msvcrt_no_debug_feature,
+ static_link_msvcrt_debug_feature,
+ dynamic_link_msvcrt_debug_feature,
+ dbg_feature,
+ fastbuild_feature,
+ opt_feature,
+ frame_pointer_feature,
+ disable_assertions_feature,
+ determinism_feature,
+ treat_warnings_as_errors_feature,
+ smaller_binary_feature,
+ ignore_noisy_warnings_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ unfiltered_compile_flags_feature,
+ compiler_param_file_feature,
+ compiler_output_flags_feature,
+ compiler_input_flags_feature,
+ def_file_feature,
+ windows_export_all_symbols_feature,
+ no_windows_export_all_symbols_feature,
+ supports_dynamic_linker_feature,
+ supports_interface_shared_libraries_feature,
+ ]
+ else:
+ targets_windows_feature = feature(
+ name = "targets_windows",
+ implies = ["copy_dynamic_libraries_to_binary"],
+ enabled = True,
+ )
+
+ copy_dynamic_libraries_to_binary_feature = feature(name = "copy_dynamic_libraries_to_binary")
+
+ gcc_env_feature = feature(
+ name = "gcc_env",
+ enabled = True,
+ env_sets = [
+ env_set(
+ actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ACTION_NAMES.cpp_link_static_library,
+ ],
+ env_entries = [
+ env_entry(key = "PATH", value = ctx.attr.tool_bin_path),
+ ],
+ ),
+ ],
+ )
+
+ default_compile_flags_feature = feature(
+ name = "default_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+ ACTION_NAMES.clif_match,
+ ],
+ flag_groups = [flag_group(flags = ["-std=gnu++0x"])],
+ ),
+ ],
+ )
+
+ default_link_flags_feature = feature(
+ name = "default_link_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = all_link_actions,
+ flag_groups = [flag_group(flags = ["-lstdc++"])],
+ ),
+ ],
+ )
+
+ supports_dynamic_linker_feature = feature(
+ name = "supports_dynamic_linker",
+ enabled = True,
+ )
+
+ if ctx.attr.cpu == "x64_windows" and ctx.attr.compiler == "mingw-gcc":
+ compiler_param_file_feature = feature(
+ name = "compiler_param_file",
+ )
+
+ features = [
+ targets_windows_feature,
+ copy_dynamic_libraries_to_binary_feature,
+ gcc_env_feature,
+ default_compile_flags_feature,
+ compiler_param_file_feature,
+ default_link_flags_feature,
+ supports_dynamic_linker_feature,
+ ]
+ else:
+ supports_pic_feature = feature(
+ name = "supports_pic",
+ enabled = True,
+ )
+ supports_start_end_lib_feature = feature(
+ name = "supports_start_end_lib",
+ enabled = True,
+ )
+
+ dbg_feature = feature(name = "dbg")
+
+ opt_feature = feature(name = "opt")
+
+ sysroot_feature = feature(
+ name = "sysroot",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["--sysroot=%{sysroot}"],
+ expand_if_available = "sysroot",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ fdo_optimize_feature = feature(
+ name = "fdo_optimize",
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile],
+ flag_groups = [
+ flag_group(
+ flags = [
+ "-fprofile-use=%{fdo_profile_path}",
+ "-fprofile-correction",
+ ],
+ expand_if_available = "fdo_profile_path",
+ ),
+ ],
+ ),
+ ],
+ provides = ["profile"],
+ )
+
+ user_compile_flags_feature = feature(
+ name = "user_compile_flags",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+ ACTION_NAMES.clif_match,
+ ],
+ flag_groups = [
+ flag_group(
+ flags = ["%{user_compile_flags}"],
+ iterate_over = "user_compile_flags",
+ expand_if_available = "user_compile_flags",
+ ),
+ ],
+ ),
+ ],
+ )
+
+ features = [
+ targets_windows_feature,
+ copy_dynamic_libraries_to_binary_feature,
+ gcc_env_feature,
+ supports_pic_feature,
+ supports_start_end_lib_feature,
+ default_compile_flags_feature,
+ default_link_flags_feature,
+ fdo_optimize_feature,
+ supports_dynamic_linker_feature,
+ dbg_feature,
+ opt_feature,
+ user_compile_flags_feature,
+ sysroot_feature,
+ ]
+
+ tool_paths = [
+ tool_path(name = name, path = path)
+ for name, path in ctx.attr.tool_paths.items()
+ ]
+
+ return cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ features = features,
+ action_configs = action_configs,
+ artifact_name_patterns = artifact_name_patterns,
+ cxx_builtin_include_directories = ctx.attr.cxx_builtin_include_directories,
+ toolchain_identifier = ctx.attr.toolchain_identifier,
+ host_system_name = ctx.attr.host_system_name,
+ target_system_name = ctx.attr.target_system_name,
+ target_cpu = ctx.attr.cpu,
+ target_libc = ctx.attr.target_libc,
+ compiler = ctx.attr.compiler,
+ abi_version = ctx.attr.abi_version,
+ abi_libc_version = ctx.attr.abi_libc_version,
+ tool_paths = tool_paths,
+ )
+
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "abi_libc_version": attr.string(),
+ "abi_version": attr.string(),
+ "compiler": attr.string(),
+ "cpu": attr.string(mandatory = True),
+ "cxx_builtin_include_directories": attr.string_list(),
+ "dbg_mode_debug_flag": attr.string(),
+ "default_link_flags": attr.string_list(default = []),
+ "fastbuild_mode_debug_flag": attr.string(),
+ "host_system_name": attr.string(),
+ "msvc_cl_path": attr.string(default = "vc_installation_error.bat"),
+ "msvc_env_include": attr.string(default = "msvc_not_found"),
+ "msvc_env_lib": attr.string(default = "msvc_not_found"),
+ "msvc_env_path": attr.string(default = "msvc_not_found"),
+ "msvc_env_tmp": attr.string(default = "msvc_not_found"),
+ "msvc_lib_path": attr.string(default = "vc_installation_error.bat"),
+ "msvc_link_path": attr.string(default = "vc_installation_error.bat"),
+ "msvc_ml_path": attr.string(default = "vc_installation_error.bat"),
+ "target_libc": attr.string(),
+ "target_system_name": attr.string(),
+ "tool_bin_path": attr.string(default = "not_found"),
+ "tool_paths": attr.string_dict(),
+ "toolchain_identifier": attr.string(),
+ },
+ provides = [CcToolchainConfigInfo],
+)
diff --git a/cc/repositories.bzl b/cc/repositories.bzl
new file mode 100644
index 0000000..3ff7dbc
--- /dev/null
+++ b/cc/repositories.bzl
@@ -0,0 +1,10 @@
+"""Repository rules entry point module for rules_cc."""
+
+load("//cc/private/toolchain:cc_configure.bzl", "cc_configure")
+
+def rules_cc_dependencies():
+ pass
+
+# buildifier: disable=unnamed-macro
+def rules_cc_toolchains(*args):
+ cc_configure(*args)
diff --git a/cc/runfiles/BUILD b/cc/runfiles/BUILD
new file mode 100644
index 0000000..887e1f2
--- /dev/null
+++ b/cc/runfiles/BUILD
@@ -0,0 +1,7 @@
+licenses(["notice"])
+
+alias(
+ name = "runfiles",
+ actual = "@bazel_tools//tools/cpp/runfiles",
+ visibility = ["//visibility:public"],
+)
diff --git a/cc/system_library.bzl b/cc/system_library.bzl
new file mode 100644
index 0000000..f87bb35
--- /dev/null
+++ b/cc/system_library.bzl
@@ -0,0 +1,475 @@
+"""system_library is a repository rule for importing system libraries"""
+
+BAZEL_LIB_ADDITIONAL_PATHS_ENV_VAR = "BAZEL_LIB_ADDITIONAL_PATHS"
+BAZEL_LIB_OVERRIDE_PATHS_ENV_VAR = "BAZEL_LIB_OVERRIDE_PATHS"
+BAZEL_INCLUDE_ADDITIONAL_PATHS_ENV_VAR = "BAZEL_INCLUDE_ADDITIONAL_PATHS"
+BAZEL_INCLUDE_OVERRIDE_PATHS_ENV_VAR = "BAZEL_INCLUDE_OVERRIDE_PATHS"
+ENV_VAR_SEPARATOR = ","
+ENV_VAR_ASSIGNMENT = "="
+
+def _make_flags(flag_values, flag):
+ flags = []
+ if flag_values:
+ for s in flag_values:
+ flags.append(flag + s)
+ return " ".join(flags)
+
+def _split_env_var(repo_ctx, var_name):
+ value = repo_ctx.os.environ.get(var_name)
+ if value:
+ assignments = value.split(ENV_VAR_SEPARATOR)
+ dict = {}
+ for assignment in assignments:
+ pair = assignment.split(ENV_VAR_ASSIGNMENT)
+ if len(pair) != 2:
+ fail(
+ "Assignments should have form 'name=value', " +
+ "but encountered {} in env variable {}"
+ .format(assignment, var_name),
+ )
+ key, value = pair[0], pair[1]
+ if not dict.get(key):
+ dict[key] = []
+ dict[key].append(value)
+ return dict
+ else:
+ return {}
+
+def _get_list_from_env_var(repo_ctx, var_name, key):
+ return _split_env_var(repo_ctx, var_name).get(key, default = [])
+
+def _execute_bash(repo_ctx, cmd):
+ return repo_ctx.execute(["/bin/bash", "-c", cmd]).stdout.strip("\n")
+
+def _find_linker(repo_ctx):
+ ld = _execute_bash(repo_ctx, "which ld")
+ lld = _execute_bash(repo_ctx, "which lld")
+ if ld:
+ return ld
+ elif lld:
+ return lld
+ else:
+ fail("No linker found")
+
+def _find_compiler(repo_ctx):
+ gcc = _execute_bash(repo_ctx, "which g++")
+ clang = _execute_bash(repo_ctx, "which clang++")
+ if gcc:
+ return gcc
+ elif clang:
+ return clang
+ else:
+ fail("No compiler found")
+
+def _find_lib_path(repo_ctx, lib_name, archive_names, lib_path_hints):
+ override_paths = _get_list_from_env_var(
+ repo_ctx,
+ BAZEL_LIB_OVERRIDE_PATHS_ENV_VAR,
+ lib_name,
+ )
+ additional_paths = _get_list_from_env_var(
+ repo_ctx,
+ BAZEL_LIB_ADDITIONAL_PATHS_ENV_VAR,
+ lib_name,
+ )
+
+ # Directories will be searched in order
+ path_flags = _make_flags(
+ override_paths + lib_path_hints + additional_paths,
+ "-L",
+ )
+ linker = _find_linker(repo_ctx)
+ for archive_name in archive_names:
+ cmd = """
+ {} -verbose -l:{} {} 2>/dev/null | \\
+ grep succeeded | \\
+ head -1 | \\
+ sed -e 's/^\\s*attempt to open //' -e 's/ succeeded\\s*$//'
+ """.format(
+ linker,
+ archive_name,
+ path_flags,
+ )
+ path = _execute_bash(repo_ctx, cmd)
+ if path:
+ return (archive_name, path)
+ return (None, None)
+
+def _find_header_path(repo_ctx, lib_name, header_name, includes):
+ override_paths = _get_list_from_env_var(
+ repo_ctx,
+ BAZEL_INCLUDE_OVERRIDE_PATHS_ENV_VAR,
+ lib_name,
+ )
+ additional_paths = _get_list_from_env_var(
+ repo_ctx,
+ BAZEL_INCLUDE_ADDITIONAL_PATHS_ENV_VAR,
+ lib_name,
+ )
+
+ compiler = _find_compiler(repo_ctx)
+ cmd = """
+ print | \\
+ {} -Wp,-v -x c++ - -fsyntax-only 2>&1 | \\
+ sed -n -e '/^\\s\\+/p' | \\
+ sed -e 's/^[ \t]*//'
+ """.format(compiler)
+ system_includes = _execute_bash(repo_ctx, cmd).split("\n")
+ all_includes = (override_paths + includes +
+ system_includes + additional_paths)
+
+ for directory in all_includes:
+ cmd = """
+ test -f "{dir}/{hdr}" && echo "{dir}/{hdr}"
+ """.format(dir = directory, hdr = header_name)
+ result = _execute_bash(repo_ctx, cmd)
+ if result:
+ return result
+ return None
+
+def _system_library_impl(repo_ctx):
+ repo_name = repo_ctx.attr.name
+ includes = repo_ctx.attr.includes
+ hdrs = repo_ctx.attr.hdrs
+ optional_hdrs = repo_ctx.attr.optional_hdrs
+ deps = repo_ctx.attr.deps
+ lib_path_hints = repo_ctx.attr.lib_path_hints
+ static_lib_names = repo_ctx.attr.static_lib_names
+ shared_lib_names = repo_ctx.attr.shared_lib_names
+
+ static_lib_name, static_lib_path = _find_lib_path(
+ repo_ctx,
+ repo_name,
+ static_lib_names,
+ lib_path_hints,
+ )
+ shared_lib_name, shared_lib_path = _find_lib_path(
+ repo_ctx,
+ repo_name,
+ shared_lib_names,
+ lib_path_hints,
+ )
+
+ if not static_lib_path and not shared_lib_path:
+ fail("Library {} could not be found".format(repo_name))
+
+ hdr_names = []
+ hdr_paths = []
+ for hdr in hdrs:
+ hdr_path = _find_header_path(repo_ctx, repo_name, hdr, includes)
+ if hdr_path:
+ repo_ctx.symlink(hdr_path, hdr)
+ hdr_names.append(hdr)
+ hdr_paths.append(hdr_path)
+ else:
+ fail("Could not find required header {}".format(hdr))
+
+ for hdr in optional_hdrs:
+ hdr_path = _find_header_path(repo_ctx, repo_name, hdr, includes)
+ if hdr_path:
+ repo_ctx.symlink(hdr_path, hdr)
+ hdr_names.append(hdr)
+ hdr_paths.append(hdr_path)
+
+ hdrs_param = "hdrs = {},".format(str(hdr_names))
+
+ # This is needed for the case when quote-includes and system-includes
+ # alternate in the include chain, i.e.
+ # #include <SDL2/SDL.h> -> #include "SDL_main.h"
+ # -> #include <SDL2/_real_SDL_config.h> -> #include "SDL_platform.h"
+ # The problem is that the quote-includes are assumed to be
+ # in the same directory as the header they are included from -
+ # they have no subdir prefix ("SDL2/") in their paths
+ include_subdirs = {}
+ for hdr in hdr_names:
+ path_segments = hdr.split("/")
+ path_segments.pop()
+ current_path_segments = ["external", repo_name]
+ for segment in path_segments:
+ current_path_segments.append(segment)
+ current_path = "/".join(current_path_segments)
+ include_subdirs.update({current_path: None})
+
+ includes_param = "includes = {},".format(str(include_subdirs.keys()))
+
+ deps_names = []
+ for dep in deps:
+ dep_name = repr("@" + dep)
+ deps_names.append(dep_name)
+ deps_param = "deps = [{}],".format(",".join(deps_names))
+
+ link_hdrs_command = "mkdir -p $(RULEDIR)/remote \n"
+ remote_hdrs = []
+ for path, hdr in zip(hdr_paths, hdr_names):
+ remote_hdr = "remote/" + hdr
+ remote_hdrs.append(remote_hdr)
+ link_hdrs_command += "cp {path} $(RULEDIR)/{hdr}\n ".format(
+ path = path,
+ hdr = remote_hdr,
+ )
+
+ link_remote_static_lib_genrule = ""
+ link_remote_shared_lib_genrule = ""
+ remote_static_library_param = ""
+ remote_shared_library_param = ""
+ static_library_param = ""
+ shared_library_param = ""
+
+ if static_lib_path:
+ repo_ctx.symlink(static_lib_path, static_lib_name)
+ static_library_param = "static_library = \"{}\",".format(
+ static_lib_name,
+ )
+ remote_static_library = "remote/" + static_lib_name
+ link_library_command = """
+mkdir -p $(RULEDIR)/remote && cp {path} $(RULEDIR)/{lib}""".format(
+ path = static_lib_path,
+ lib = remote_static_library,
+ )
+ remote_static_library_param = """
+static_library = "remote_link_static_library","""
+ link_remote_static_lib_genrule = """
+genrule(
+ name = "remote_link_static_library",
+ outs = ["{remote_static_library}"],
+ cmd = {link_library_command}
+)
+""".format(
+ link_library_command = repr(link_library_command),
+ remote_static_library = remote_static_library,
+ )
+
+ if shared_lib_path:
+ repo_ctx.symlink(shared_lib_path, shared_lib_name)
+ shared_library_param = "shared_library = \"{}\",".format(
+ shared_lib_name,
+ )
+ remote_shared_library = "remote/" + shared_lib_name
+ link_library_command = """
+mkdir -p $(RULEDIR)/remote && cp {path} $(RULEDIR)/{lib}""".format(
+ path = shared_lib_path,
+ lib = remote_shared_library,
+ )
+ remote_shared_library_param = """
+shared_library = "remote_link_shared_library","""
+ link_remote_shared_lib_genrule = """
+genrule(
+ name = "remote_link_shared_library",
+ outs = ["{remote_shared_library}"],
+ cmd = {link_library_command}
+)
+""".format(
+ link_library_command = repr(link_library_command),
+ remote_shared_library = remote_shared_library,
+ )
+
+ repo_ctx.file(
+ "BUILD",
+ executable = False,
+ content =
+ """
+load("@bazel_tools//tools/build_defs/cc:cc_import.bzl", "cc_import")
+cc_import(
+ name = "local_includes",
+ {static_library}
+ {shared_library}
+ {hdrs}
+ {deps}
+ {includes}
+)
+
+genrule(
+ name = "remote_link_headers",
+ outs = {remote_hdrs},
+ cmd = {link_hdrs_command}
+)
+
+{link_remote_static_lib_genrule}
+
+{link_remote_shared_lib_genrule}
+
+cc_import(
+ name = "remote_includes",
+ hdrs = [":remote_link_headers"],
+ {remote_static_library}
+ {remote_shared_library}
+ {deps}
+ {includes}
+)
+
+alias(
+ name = "{name}",
+ actual = select({{
+ "@bazel_tools//src/conditions:remote": "remote_includes",
+ "//conditions:default": "local_includes",
+ }}),
+ visibility = ["//visibility:public"],
+)
+""".format(
+ static_library = static_library_param,
+ shared_library = shared_library_param,
+ hdrs = hdrs_param,
+ deps = deps_param,
+ hdr_names = str(hdr_names),
+ link_hdrs_command = repr(link_hdrs_command),
+ name = repo_name,
+ includes = includes_param,
+ remote_hdrs = remote_hdrs,
+ link_remote_static_lib_genrule = link_remote_static_lib_genrule,
+ link_remote_shared_lib_genrule = link_remote_shared_lib_genrule,
+ remote_static_library = remote_static_library_param,
+ remote_shared_library = remote_shared_library_param,
+ ),
+ )
+
+system_library = repository_rule(
+ implementation = _system_library_impl,
+ local = True,
+ remotable = True,
+ environ = [
+ BAZEL_INCLUDE_ADDITIONAL_PATHS_ENV_VAR,
+ BAZEL_INCLUDE_OVERRIDE_PATHS_ENV_VAR,
+ BAZEL_LIB_ADDITIONAL_PATHS_ENV_VAR,
+ BAZEL_LIB_OVERRIDE_PATHS_ENV_VAR,
+ ],
+ attrs = {
+ "deps": attr.string_list(doc = """
+List of names of system libraries this target depends upon.
+"""),
+ "hdrs": attr.string_list(
+ mandatory = True,
+ allow_empty = False,
+ doc = """
+List of the library's public headers which must be imported.
+""",
+ ),
+ "includes": attr.string_list(doc = """
+List of directories that should be browsed when looking for headers.
+"""),
+ "lib_path_hints": attr.string_list(doc = """
+List of directories that should be browsed when looking for library archives.
+"""),
+ "optional_hdrs": attr.string_list(doc = """
+List of library's private headers.
+"""),
+ "shared_lib_names": attr.string_list(doc = """
+List of possible shared library names in order of preference.
+"""),
+ "static_lib_names": attr.string_list(doc = """
+List of possible static library names in order of preference.
+"""),
+ },
+ doc =
+ """system_library is a repository rule for importing system libraries
+
+`system_library` is a repository rule for safely depending on system-provided
+libraries on Linux. It can be used with remote caching and remote execution.
+Under the hood it uses gcc/clang for finding the library files and headers
+and symlinks them into the build directory. Symlinking allows Bazel to take
+these files into account when it calculates a checksum of the project.
+This prevents cache poisoning from happening.
+
+Currently `system_library` requires two exeperimental flags:
+--experimental_starlark_cc_import
+--experimental_repo_remote_exec
+
+A typical usage looks like this:
+WORKSPACE
+```
+system_library(
+ name = "jpeg",
+ hdrs = ["jpeglib.h"],
+ shared_lib_names = ["libjpeg.so, libjpeg.so.62"],
+ static_lib_names = ["libjpeg.a"],
+ includes = ["/usr/additional_includes"],
+ lib_path_hints = ["/usr/additional_libs", "/usr/some/other_path"]
+ optional_hdrs = [
+ "jconfig.h",
+ "jmorecfg.h",
+ ],
+)
+
+system_library(
+ name = "bar",
+ hdrs = ["bar.h"],
+ shared_lib_names = ["libbar.so"],
+ deps = ["jpeg"]
+
+)
+```
+
+BUILD
+```
+cc_binary(
+ name = "foo",
+ srcs = ["foo.cc"],
+ deps = ["@bar"]
+)
+```
+
+foo.cc
+```
+#include "jpeglib.h"
+#include "bar.h"
+
+[code using symbols from jpeglib and bar]
+```
+
+`system_library` requires users to specify at least one header
+(as it makes no sense to import a library without headers).
+Public headers of a library (i.e. those included in the user-written code,
+like `jpeglib.h` in the example above) should be put in `hdrs` param, as they
+are required for the library to work. However, some libraries may use more
+"private" headers. They should be imported as well, but their names may differ
+from system to system. They should be specified in the `optional_hdrs` param.
+The build will not fail if some of them are not found, so it's safe to put a
+superset there, containing all possible combinations of names for different
+versions/distributions. It's up to the user to determine which headers are
+required for the library to work.
+
+One `system_library` target always imports exactly one library.
+Users can specify many potential names for the library file,
+as these names can differ from system to system. The order of names establishes
+the order of preference. As some libraries can be linked both statically
+and dynamically, the names of files of each kind can be specified separately.
+`system_library` rule will try to find library archives of both kinds, but it's
+up to the top-level target (for example, `cc_binary`) to decide which kind of
+linking will be used.
+
+`system_library` rule depends on gcc/clang (whichever is installed) for
+finding the actual locations of library archives and headers.
+Libraries installed in a standard way by a package manager
+(`sudo apt install libjpeg-dev`) are usually placed in one of directories
+searched by the compiler/linker by default - on Ubuntu library most archives
+are stored in `/usr/lib/x86_64-linux-gnu/` and their headers in
+`/usr/include/`. If the maintainer of a project expects the files
+to be installed in a non-standard location, they can use the `includes`
+parameter to add directories to the search path for headers
+and `lib_path_hints` to add directories to the search path for library
+archives.
+
+User building the project can override or extend these search paths by
+providing these environment variables to the build:
+BAZEL_INCLUDE_ADDITIONAL_PATHS, BAZEL_INCLUDE_OVERRIDE_PATHS,
+BAZEL_LIB_ADDITIONAL_PATHS, BAZEL_LIB_OVERRIDE_PATHS.
+The syntax for setting the env variables is:
+`<library>=<path>,<library>=<path2>`.
+Users can provide multiple paths for one library by repeating this segment:
+`<library>=<path>`.
+
+So in order to build the example presented above but with custom paths for the
+jpeg lib, one would use the following command:
+
+```
+bazel build //:foo \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ --action_env=BAZEL_LIB_OVERRIDE_PATHS=jpeg=/custom/libraries/path \
+ --action_env=BAZEL_INCLUDE_OVERRIDE_PATHS=jpeg=/custom/include/path,jpeg=/inc
+```
+
+Some libraries can depend on other libraries. `system_library` rule provides
+a `deps` parameter for specifying such relationships. `system_library` targets
+can depend only on other system libraries.
+""",
+)
diff --git a/cc/toolchain_utils.bzl b/cc/toolchain_utils.bzl
new file mode 100644
index 0000000..bec575e
--- /dev/null
+++ b/cc/toolchain_utils.bzl
@@ -0,0 +1,31 @@
+# pylint: disable=g-bad-file-header
+# Copyright 2016 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Deprecated, use find_cc_toolchain.bzl
+"""
+
+load(":find_cc_toolchain.bzl", "find_cc_toolchain")
+
+def find_cpp_toolchain(ctx):
+ """Deprecated, use `find_cc_toolchain` instead.
+
+ Args:
+ ctx: See `find_cc_toolchain`.
+
+ Returns:
+ A CcToolchainInfo.
+ """
+ return find_cc_toolchain(ctx)
diff --git a/examples/BUILD b/examples/BUILD
new file mode 100644
index 0000000..c7da75d
--- /dev/null
+++ b/examples/BUILD
@@ -0,0 +1,43 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("@bazel_skylib//rules:common_settings.bzl", "bool_flag")
+
+# A collection of examples showing the usage of rules_cc
+licenses(["notice"])
+
+bool_flag(
+ name = "incompatible_link_once",
+ build_setting_default = False,
+ visibility = ["//visibility:public"],
+)
+
+bool_flag(
+ name = "enable_permissions_check",
+ build_setting_default = False,
+ visibility = ["//visibility:public"],
+)
+
+bool_flag(
+ name = "experimental_debug",
+ build_setting_default = False,
+ visibility = ["//visibility:public"],
+)
+
+bzl_library(
+ name = "experimental_cc_shared_library_bzl",
+ srcs = ["experimental_cc_shared_library.bzl"],
+ visibility = ["//visibility:private"],
+)
diff --git a/examples/custom_toolchain/BUILD b/examples/custom_toolchain/BUILD
new file mode 100644
index 0000000..371fdfd
--- /dev/null
+++ b/examples/custom_toolchain/BUILD
@@ -0,0 +1,118 @@
+# Copyright 2021 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Proof-of-concept example showing how to write a custom C++ toolchain.
+#
+# Important documentation:
+#
+# - https://docs.bazel.build/versions/master/platforms-intro.html#c
+# - https://docs.bazel.build/versions/master/tutorial/cc-toolchain-config.html
+# - https://docs.bazel.build/versions/master/be/c-cpp.html#cc_toolchain
+#
+# There are two ways to select C++ toolchains:
+#
+# - NEW (USE IF POSSIBLE): with the --platforms flag
+# - LEGACY: with the --crosstool_top and --cpu flags
+#
+# See https://docs.bazel.build/versions/master/platforms-intro.html#c for details.
+#
+# This example demonstrates both approaches.
+
+load("@rules_cc//cc:defs.bzl", "cc_library", "cc_toolchain", "cc_toolchain_suite")
+
+# Load the Starlark logic defining the toolchain's behavior. For example: what
+# program runs to compile a source file and how its command line is
+# constructed. See toolchain_config.bzl for details.
+load(":toolchain_config.bzl", "cc_toolchain_config")
+
+# The library we want to build. Building this calls two C++ actions: compile (.cc ->
+# .o) and archive (.o -> .a).
+cc_library(
+ name = "buildme",
+ srcs = ["buildme.cc"],
+)
+
+# This example intentionally makes the cc_toolchain_config definition
+# simple. You could alternative add attributes to support multiple
+# cc_toolchain_config targets with finer customization.
+cc_toolchain_config(
+ name = "toolchain_semantics",
+)
+
+# Register the toolchain with Bazel. Most of these attribute just tell Bazel
+# where to find the files needed to run C++ commands. The toolchain_config
+# attribute registers the behavior specification declared above.
+cc_toolchain(
+ name = "my_custom_toolchain",
+ all_files = ":toolchain_files",
+ ar_files = ":toolchain_files",
+ compiler_files = ":toolchain_files",
+ dwp_files = ":toolchain_files",
+ linker_files = ":toolchain_files",
+ objcopy_files = ":toolchain_files",
+ strip_files = ":toolchain_files",
+ toolchain_config = ":toolchain_semantics",
+)
+
+filegroup(
+ name = "toolchain_files",
+ srcs = [
+ "sample_compiler",
+ "sample_linker",
+ ],
+)
+
+# Implements legacy toolchain selection.
+#
+# Setting --crosstool_top here registers the set of available
+# toolchains. Setting --cpu to one of the toolchain attribute's keys selects a
+#toolchain.
+cc_toolchain_suite(
+ name = "legacy_selector",
+ toolchains = {
+ "x86": ":my_custom_toolchain",
+ },
+)
+
+# Implements platform-based (recommended) toolchain selection.
+#
+# See https://docs.bazel.build/versions/master/platforms-intro.html. The main
+# differences are:
+#
+# 1. --cpu / --crosstool_top are replaced by a platform() definition with
+# much more customizable properties. For example, a platform can specify
+# OS, device type (server, phone, tablet) or custom hardware extensions.
+# 2. All languages can support platform-based toolchains. A single --platforms
+# value can choose C++, Python, Scala, and all other toolchains in your
+# build. This is especially useful for multi-language builds.
+# 3. Platforms support features like incompatible target skipping:
+# https://docs.bazel.build/versions/master/platforms.html#skipping-incompatible-targets.
+toolchain(
+ name = "platform_based_toolchain",
+ # Trigger this toolchain for x86-compatible platforms.
+ # See https://github.com/bazelbuild/platforms.
+ target_compatible_with = ["@platforms//cpu:x86_64"],
+ # Register this toolchain with platforms.
+ toolchain = ":my_custom_toolchain",
+ # The public interface for all C++ toolchains. Starlark rules that use C++
+ # access the toolchain through this interface.
+ toolchain_type = "@bazel_tools//tools/cpp:toolchain_type",
+)
+
+# Define a platform matching any x86-compatible toolchain. See
+# https://docs.bazel.build/versions/master/platforms.html.
+platform(
+ name = "x86_platform",
+ constraint_values = ["@platforms//cpu:x86_64"],
+)
diff --git a/examples/custom_toolchain/README.md b/examples/custom_toolchain/README.md
new file mode 100644
index 0000000..df7f486
--- /dev/null
+++ b/examples/custom_toolchain/README.md
@@ -0,0 +1,78 @@
+# Writing a custom C++ toolchain
+
+This example shows how to define and use a simple custom C++ toolchain.
+
+Output is non-functional: simple scripts replace compilation and linking
+with `I compiled!` and `I linked!` messages.
+
+[BUILD](BUILD) provides detailed implementation walkthrough. The fundamental
+sequence is:
+
+1. Define the toolchain
+1. Define how to invoke the toolchain.
+
+`1` is C++-specific: the logic and structure depends specifically on C++'s
+language model. Other languages have their own models.
+
+`2` supports two variations. `--crosstool_top` / `--cpu`, the legacy version,
+is C++-specific. `--platforms`, the modern version, is much more generic and
+supports all languages and features like [incompatible target
+skipping](https://docs.bazel.build/versions/master/platforms.html#skipping-incompatible-targets). See
+[Building with
+Platforms](https://docs.bazel.build/versions/master/platforms-intro.html) and
+its [C++
+notes](https://docs.bazel.build/versions/master/platforms-intro.html#c) for
+full review.
+
+## Building with the default toolchain
+
+```
+$ bazel clean
+$ bazel build //examples/custom_toolchain:buildme
+$ file bazel-bin/examples/custom_toolchain/libbuildme.a
+bazel-bin/examples/custom_toolchain/libbuildme.a: current ar archive
+```
+
+## Custom toolchain with platforms
+
+This mode requires `--incompatible_enable_cc_toolchain_resolution`. Without this
+flag, `--platforms` and `--extra_toolchains` are ignored and the default
+toolchain triggers.
+
+```
+$ bazel clean
+$ bazel build //examples/custom_toolchain:buildme --platforms=//examples/custom_toolchain:x86_platform --extra_toolchains=//examples/custom_toolchain:platform_based_toolchain --incompatible_enable_cc_toolchain_resolution
+DEBUG: /usr/local/google/home/gregce/bazel/rules_cc/examples/custom_toolchain/toolchain_config.bzl:17:10: Invoking my custom toolchain!
+INFO: From Compiling examples/custom_toolchain/buildme.cc:
+examples/custom_toolchain/sample_compiler: running sample cc_library compiler (produces .o output).
+INFO: From Linking examples/custom_toolchain/libbuildme.a:
+examples/custom_toolchain/sample_linker: running sample cc_library linker (produces .a output).
+
+$ cat bazel-bin/examples/custom_toolchain/libbuildme.a
+examples/custom_toolchain/sample_linker: sample output
+```
+
+This example uses a long command line for demonstration purposes. A real project
+would [register toolchains](https://docs.bazel.build/versions/master/toolchains.html#registering-and-building-with-toolchains)
+in `WORKSPACE` and auto-set
+`--incompatible_enable_cc_toolchain_resolution`. That reduces the command to:
+
+```
+$ bazel build //examples/custom_toolchain:buildme --platforms=//examples/custom_toolchain:x86_platform
+```
+
+## Custom toolchain with legacy selection:
+
+```
+$ bazel clean
+$ bazel build //examples/custom_toolchain:buildme --crosstool_top=//examples/custom_toolchain:legacy_selector --cpu=x86
+DEBUG: /usr/local/google/home/gregce/bazel/rules_cc/examples/custom_toolchain/toolchain_config.bzl:17:10: Invoking my custom toolchain!
+INFO: From Compiling examples/custom_toolchain/buildme.cc:
+examples/custom_toolchain/sample_compiler: running sample cc_library compiler (produces .o output).
+INFO: From Linking examples/custom_toolchain/libbuildme.a:
+examples/custom_toolchain/sample_linker: running sample cc_library linker (produces .a output).
+
+$ cat bazel-bin/examples/custom_toolchain/libbuildme.a
+examples/custom_toolchain/sample_linker: sample output
+```
+
diff --git a/examples/custom_toolchain/buildme.cc b/examples/custom_toolchain/buildme.cc
new file mode 100644
index 0000000..459ade0
--- /dev/null
+++ b/examples/custom_toolchain/buildme.cc
@@ -0,0 +1,4 @@
+
+int some_function() {
+ return 0;
+}
diff --git a/examples/custom_toolchain/sample_compiler b/examples/custom_toolchain/sample_compiler
new file mode 100755
index 0000000..a1a1458
--- /dev/null
+++ b/examples/custom_toolchain/sample_compiler
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Sample script demonstrating custom C++ toolchain selection: handles
+# the command that translates a cc_library's .cc (source file) into .o (object
+# file).
+
+echo "$0: running sample cc_library compiler (produces .o output)."
+
+# https://docs.bazel.build/versions/master/cc-toolchain-config-reference.html
+# defines fancier ways to generate custom command lines. This script just shows
+# the default, which looks like:
+#
+# examples/custom_toolchain/sample_compiler <various compiler flags> -o bazel-out/x86-fastbuild/bin/examples/custom_toolchain/_objs/buildme/buildme.o.
+
+# The .o is the last parameter.
+OBJECT_FILE=${@: -1}
+# Swap out .o for .d to get expected .d (source dependency output).
+DOTD_FILE=${OBJECT_FILE%?}d
+
+echo "$0: sample .o output" > $OBJECT_FILE
+echo "sample .d output ($0)" > $DOTD_FILE
diff --git a/examples/custom_toolchain/sample_linker b/examples/custom_toolchain/sample_linker
new file mode 100755
index 0000000..69ef204
--- /dev/null
+++ b/examples/custom_toolchain/sample_linker
@@ -0,0 +1,23 @@
+#!/bin/bash
+#
+# Sample script demonstrating custom C++ toolchain selection: handles
+# the command that translates a cc_library's .o (object file) into
+# .a (archive).
+
+echo "$0: running sample cc_library linker (produces .a output)."
+
+# https://docs.bazel.build/versions/master/cc-toolchain-config-reference.html
+# defines fancier ways to generate custom command lines. This script just shows
+# the default, which looks like:
+#
+# examples/custom_toolchain/sample_linker @bazel-out/x86-fastbuild/bin/examples/custom_toolchain/libbuildme.a-2.params.
+
+# Get "@bazel-out/.../libbuildme.a-2.params".
+PARAMS_FILE=${@: -1}
+# Remove the "@" prefix.
+OUTFILE=${PARAMS_FILE#?}
+# Replace "libbuildme.a-2.params" with "libbuildme.a".
+OUTFILE=${OUTFILE%-*}
+
+echo "$0: sample output" > $OUTFILE
+
diff --git a/examples/custom_toolchain/toolchain_config.bzl b/examples/custom_toolchain/toolchain_config.bzl
new file mode 100644
index 0000000..e83162b
--- /dev/null
+++ b/examples/custom_toolchain/toolchain_config.bzl
@@ -0,0 +1,77 @@
+"""Sample Starlark definition defining a C++ toolchain's behavior.
+
+When you build a cc_* rule, this logic defines what programs run for what
+build steps (e.g. compile / link / archive) and how their command lines are
+structured.
+
+This is a proof-of-concept simple implementation. It doesn't construct fancy
+command lines and uses mock shell scripts to compile and link
+("sample_compiler" and "sample_linker"). See
+https://docs.bazel.build/versions/main/cc-toolchain-config-reference.html and
+https://docs.bazel.build/versions/main/tutorial/cc-toolchain-config.html for
+advanced usage.
+"""
+
+load("@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl", "tool_path")
+
+def _impl(ctx):
+ tool_paths = [
+ tool_path(
+ name = "ar",
+ path = "sample_linker",
+ ),
+ tool_path(
+ name = "cpp",
+ path = "not_used_in_this_example",
+ ),
+ tool_path(
+ name = "gcc",
+ path = "sample_compiler",
+ ),
+ tool_path(
+ name = "gcov",
+ path = "not_used_in_this_example",
+ ),
+ tool_path(
+ name = "ld",
+ path = "sample_linker",
+ ),
+ tool_path(
+ name = "nm",
+ path = "not_used_in_this_example",
+ ),
+ tool_path(
+ name = "objdump",
+ path = "not_used_in_this_example",
+ ),
+ tool_path(
+ name = "strip",
+ path = "not_used_in_this_example",
+ ),
+ ]
+
+ # Documented at
+ # https://docs.bazel.build/versions/main/skylark/lib/cc_common.html#create_cc_toolchain_config_info.
+ #
+ # create_cc_toolchain_config_info is the public interface for registering
+ # C++ toolchain behavior.
+ return cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ toolchain_identifier = "custom-toolchain-identifier",
+ host_system_name = "local",
+ target_system_name = "local",
+ target_cpu = "sample_cpu",
+ target_libc = "unknown",
+ compiler = "gcc",
+ abi_version = "unknown",
+ abi_libc_version = "unknown",
+ tool_paths = tool_paths,
+ )
+
+cc_toolchain_config = rule(
+ implementation = _impl,
+ # You can alternatively define attributes here that make it possible to
+ # instantiate different cc_toolchain_config targets with different behavior.
+ attrs = {},
+ provides = [CcToolchainConfigInfo],
+)
diff --git a/examples/experimental_cc_shared_library.bzl b/examples/experimental_cc_shared_library.bzl
new file mode 100644
index 0000000..e327bb8
--- /dev/null
+++ b/examples/experimental_cc_shared_library.bzl
@@ -0,0 +1,48 @@
+"""This is an experimental implementation of cc_shared_library.
+
+We may change the implementation at any moment or even delete this file. Do not
+rely on this. It requires bazel >1.2 and passing the flag
+--experimental_cc_shared_library
+"""
+
+# Add this as a tag to any target that can be linked by more than one
+# cc_shared_library because it doesn't have static initializers or anything
+# else that may cause issues when being linked more than once. This should be
+# used sparingly after making sure it's safe to use.
+LINKABLE_MORE_THAN_ONCE = "LINKABLE_MORE_THAN_ONCE"
+
+CcSharedLibraryPermissionsInfo = provider(
+ "Permissions for a cc shared library.",
+ fields = {
+ "targets": "Matches targets that can be exported.",
+ },
+)
+GraphNodeInfo = provider(
+ "Nodes in the graph of shared libraries.",
+ fields = {
+ "children": "Other GraphNodeInfo from dependencies of this target",
+ "label": "Label of the target visited",
+ "linkable_more_than_once": "Linkable into more than a single cc_shared_library",
+ },
+)
+CcSharedLibraryInfo = provider(
+ "Information about a cc shared library.",
+ fields = {
+ "dynamic_deps": "All shared libraries depended on transitively",
+ "exports": "cc_libraries that are linked statically and exported",
+ "link_once_static_libs": "All libraries linked statically into this library that should " +
+ "only be linked once, e.g. because they have static " +
+ "initializers. If we try to link them more than once, " +
+ "we will throw an error",
+ "linker_input": "the resulting linker input artifact for the shared library",
+ "preloaded_deps": "cc_libraries needed by this cc_shared_library that should" +
+ " be linked the binary. If this is set, this cc_shared_library has to " +
+ " be a direct dependency of the cc_binary",
+ },
+)
+
+def cc_shared_library_permissions(**kwargs):
+ native.cc_shared_library_permissions(**kwargs)
+
+def cc_shared_library(**kwargs):
+ native.cc_shared_library(**kwargs)
diff --git a/examples/my_c_archive/BUILD b/examples/my_c_archive/BUILD
new file mode 100644
index 0000000..4484684
--- /dev/null
+++ b/examples/my_c_archive/BUILD
@@ -0,0 +1,50 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Example showing how to create a custom Starlark rule that rules_cc can depend on
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library")
+load("//examples/my_c_archive:my_c_archive.bzl", "my_c_archive")
+load("//examples/my_c_compile:my_c_compile.bzl", "my_c_compile")
+
+licenses(["notice"])
+
+cc_binary(
+ name = "main",
+ srcs = ["main.c"],
+ deps = [":archive"],
+)
+
+my_c_archive(
+ name = "archive",
+ object = ":object",
+ deps = [":bar"],
+)
+
+my_c_compile(
+ name = "object",
+ src = "foo.c",
+)
+
+cc_library(
+ name = "bar",
+ srcs = ["bar.c"],
+)
+
+bzl_library(
+ name = "my_c_archive_bzl",
+ srcs = ["my_c_archive.bzl"],
+ visibility = ["//visibility:private"],
+)
diff --git a/examples/my_c_archive/bar.c b/examples/my_c_archive/bar.c
new file mode 100644
index 0000000..8c9de53
--- /dev/null
+++ b/examples/my_c_archive/bar.c
@@ -0,0 +1 @@
+int bar() { return -42; }
diff --git a/examples/my_c_archive/foo.c b/examples/my_c_archive/foo.c
new file mode 100644
index 0000000..6718fbd
--- /dev/null
+++ b/examples/my_c_archive/foo.c
@@ -0,0 +1,15 @@
+// Copyright 2019 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+int foo() { return 42; }
diff --git a/examples/my_c_archive/main.c b/examples/my_c_archive/main.c
new file mode 100644
index 0000000..33ca256
--- /dev/null
+++ b/examples/my_c_archive/main.c
@@ -0,0 +1,3 @@
+int foo();
+int bar();
+int main() { return foo() + bar(); }
diff --git a/examples/my_c_archive/my_c_archive.bzl b/examples/my_c_archive/my_c_archive.bzl
new file mode 100644
index 0000000..314564f
--- /dev/null
+++ b/examples/my_c_archive/my_c_archive.bzl
@@ -0,0 +1,99 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Example showing how to create a rule that rules_cc can depend on."""
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain")
+load("@rules_cc//cc:action_names.bzl", "CPP_LINK_STATIC_LIBRARY_ACTION_NAME")
+load("//examples/my_c_compile:my_c_compile.bzl", "MyCCompileInfo")
+
+def _my_c_archive_impl(ctx):
+ cc_toolchain = find_cpp_toolchain(ctx)
+ object_file = ctx.attr.object[MyCCompileInfo].object
+ output_file = ctx.actions.declare_file(ctx.label.name + ".a")
+
+ feature_configuration = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ requested_features = ctx.features,
+ unsupported_features = ctx.disabled_features,
+ )
+
+ linker_input = cc_common.create_linker_input(
+ owner = ctx.label,
+ libraries = depset(direct = [
+ cc_common.create_library_to_link(
+ actions = ctx.actions,
+ feature_configuration = feature_configuration,
+ cc_toolchain = cc_toolchain,
+ static_library = output_file,
+ ),
+ ]),
+ )
+ compilation_context = cc_common.create_compilation_context()
+ linking_context = cc_common.create_linking_context(linker_inputs = depset(direct = [linker_input]))
+
+ archiver_path = cc_common.get_tool_for_action(
+ feature_configuration = feature_configuration,
+ action_name = CPP_LINK_STATIC_LIBRARY_ACTION_NAME,
+ )
+ archiver_variables = cc_common.create_link_variables(
+ feature_configuration = feature_configuration,
+ cc_toolchain = cc_toolchain,
+ output_file = output_file.path,
+ is_using_linker = False,
+ )
+ command_line = cc_common.get_memory_inefficient_command_line(
+ feature_configuration = feature_configuration,
+ action_name = CPP_LINK_STATIC_LIBRARY_ACTION_NAME,
+ variables = archiver_variables,
+ )
+ args = ctx.actions.args()
+ args.add_all(command_line)
+ args.add(object_file)
+
+ env = cc_common.get_environment_variables(
+ feature_configuration = feature_configuration,
+ action_name = CPP_LINK_STATIC_LIBRARY_ACTION_NAME,
+ variables = archiver_variables,
+ )
+
+ ctx.actions.run(
+ executable = archiver_path,
+ arguments = [args],
+ env = env,
+ inputs = depset(
+ direct = [object_file],
+ transitive = [
+ cc_toolchain.all_files,
+ ],
+ ),
+ outputs = [output_file],
+ )
+
+ cc_info = cc_common.merge_cc_infos(cc_infos = [
+ CcInfo(compilation_context = compilation_context, linking_context = linking_context),
+ ] + [dep[CcInfo] for dep in ctx.attr.deps])
+ return [cc_info]
+
+my_c_archive = rule(
+ implementation = _my_c_archive_impl,
+ attrs = {
+ "deps": attr.label_list(providers = [CcInfo]),
+ "object": attr.label(mandatory = True, providers = [MyCCompileInfo]),
+ "_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
+ },
+ fragments = ["cpp"],
+ toolchains = use_cpp_toolchain(),
+)
diff --git a/examples/my_c_compile/BUILD b/examples/my_c_compile/BUILD
new file mode 100644
index 0000000..b045509
--- /dev/null
+++ b/examples/my_c_compile/BUILD
@@ -0,0 +1,30 @@
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//examples/my_c_compile:my_c_compile.bzl", "my_c_compile")
+
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Example showing how to create a custom Starlark rule that just compiles C sources
+licenses(["notice"])
+
+my_c_compile(
+ name = "foo",
+ src = "foo.c",
+)
+
+bzl_library(
+ name = "my_c_compile_bzl",
+ srcs = ["my_c_compile.bzl"],
+ visibility = ["//visibility:private"],
+)
diff --git a/examples/my_c_compile/foo.c b/examples/my_c_compile/foo.c
new file mode 100644
index 0000000..6718fbd
--- /dev/null
+++ b/examples/my_c_compile/foo.c
@@ -0,0 +1,15 @@
+// Copyright 2019 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+int foo() { return 42; }
diff --git a/examples/my_c_compile/my_c_compile.bzl b/examples/my_c_compile/my_c_compile.bzl
new file mode 100644
index 0000000..d232f91
--- /dev/null
+++ b/examples/my_c_compile/my_c_compile.bzl
@@ -0,0 +1,81 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Example showing how to create a rule that just compiles C sources."""
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain")
+load("@rules_cc//cc:action_names.bzl", "C_COMPILE_ACTION_NAME")
+
+MyCCompileInfo = provider(doc = "", fields = ["object"])
+
+DISABLED_FEATURES = [
+ "module_maps", # # copybara-comment-this-out-please
+]
+
+def _my_c_compile_impl(ctx):
+ cc_toolchain = find_cpp_toolchain(ctx)
+ source_file = ctx.file.src
+ output_file = ctx.actions.declare_file(ctx.label.name + ".o")
+ feature_configuration = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ requested_features = ctx.features,
+ unsupported_features = DISABLED_FEATURES + ctx.disabled_features,
+ )
+ c_compiler_path = cc_common.get_tool_for_action(
+ feature_configuration = feature_configuration,
+ action_name = C_COMPILE_ACTION_NAME,
+ )
+ c_compile_variables = cc_common.create_compile_variables(
+ feature_configuration = feature_configuration,
+ cc_toolchain = cc_toolchain,
+ user_compile_flags = ctx.fragments.cpp.copts + ctx.fragments.cpp.conlyopts,
+ source_file = source_file.path,
+ output_file = output_file.path,
+ )
+ command_line = cc_common.get_memory_inefficient_command_line(
+ feature_configuration = feature_configuration,
+ action_name = C_COMPILE_ACTION_NAME,
+ variables = c_compile_variables,
+ )
+ env = cc_common.get_environment_variables(
+ feature_configuration = feature_configuration,
+ action_name = C_COMPILE_ACTION_NAME,
+ variables = c_compile_variables,
+ )
+
+ ctx.actions.run(
+ executable = c_compiler_path,
+ arguments = command_line,
+ env = env,
+ inputs = depset(
+ [source_file],
+ transitive = [cc_toolchain.all_files],
+ ),
+ outputs = [output_file],
+ )
+ return [
+ DefaultInfo(files = depset([output_file])),
+ MyCCompileInfo(object = output_file),
+ ]
+
+my_c_compile = rule(
+ implementation = _my_c_compile_impl,
+ attrs = {
+ "src": attr.label(mandatory = True, allow_single_file = True),
+ "_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
+ },
+ toolchains = use_cpp_toolchain(),
+ fragments = ["cpp"],
+)
diff --git a/examples/write_cc_toolchain_cpu/BUILD b/examples/write_cc_toolchain_cpu/BUILD
new file mode 100644
index 0000000..c9ee72f
--- /dev/null
+++ b/examples/write_cc_toolchain_cpu/BUILD
@@ -0,0 +1,27 @@
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//examples/write_cc_toolchain_cpu:write_cc_toolchain_cpu.bzl", "write_cc_toolchain_cpu")
+
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Example showing how to get CcToolchainInfo in a custom starlark rule
+licenses(["notice"])
+
+write_cc_toolchain_cpu(name = "write_me_the_cpu")
+
+bzl_library(
+ name = "write_cc_toolchain_cpu_bzl",
+ srcs = ["write_cc_toolchain_cpu.bzl"],
+ visibility = ["//visibility:private"],
+)
diff --git a/examples/write_cc_toolchain_cpu/write_cc_toolchain_cpu.bzl b/examples/write_cc_toolchain_cpu/write_cc_toolchain_cpu.bzl
new file mode 100644
index 0000000..3e93b42
--- /dev/null
+++ b/examples/write_cc_toolchain_cpu/write_cc_toolchain_cpu.bzl
@@ -0,0 +1,32 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Example showing how to get CcToolchainInfo in a custom rule."""
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain")
+
+def _write_cc_toolchain_cpu_impl(ctx):
+ cc_toolchain = find_cpp_toolchain(ctx)
+ output = ctx.actions.declare_file(ctx.label.name + "_cpu")
+ ctx.actions.write(output, cc_toolchain.cpu)
+ return [DefaultInfo(files = depset([output]))]
+
+# This rule does nothing, just writes the target_cpu from the cc_toolchain used for this build.
+write_cc_toolchain_cpu = rule(
+ implementation = _write_cc_toolchain_cpu_impl,
+ attrs = {
+ "_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
+ },
+ toolchains = use_cpp_toolchain(),
+)
diff --git a/renovate.json b/renovate.json
new file mode 100644
index 0000000..ee8c906
--- /dev/null
+++ b/renovate.json
@@ -0,0 +1,5 @@
+{
+ "extends": [
+ "config:base"
+ ]
+}
diff --git a/tests/compiler_settings/BUILD b/tests/compiler_settings/BUILD
new file mode 100644
index 0000000..33c8206
--- /dev/null
+++ b/tests/compiler_settings/BUILD
@@ -0,0 +1,33 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//cc:defs.bzl", "cc_binary")
+
+licenses(["notice"])
+
+cc_binary(
+ name = "main",
+ srcs = ["main.cc"],
+ local_defines = select(
+ {
+ "//cc/compiler:clang-cl": ["COMPILER=clang-cl"],
+ "//cc/compiler:clang": ["COMPILER=clang"],
+ "//cc/compiler:gcc": ["COMPILER=gcc"],
+ "//cc/compiler:mingw-gcc": ["COMPILER=mingw-gcc"],
+ "//cc/compiler:msvc-cl": ["COMPILER=msvc-cl"],
+ "//conditions:default": [],
+ },
+ no_match_error = "Compiler not detected by Bazel",
+ ),
+)
diff --git a/tests/compiler_settings/main.cc b/tests/compiler_settings/main.cc
new file mode 100644
index 0000000..35b088c
--- /dev/null
+++ b/tests/compiler_settings/main.cc
@@ -0,0 +1,22 @@
+// Copyright 2023 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <iostream>
+
+#define STRINGIFY(x) #x
+#define TO_STRING(x) STRINGIFY(x)
+
+int main() {
+ std::cout << "Hello, " << TO_STRING(COMPILER) << "!" << std::endl;
+}
diff --git a/tests/load_from_macro/BUILD b/tests/load_from_macro/BUILD
new file mode 100644
index 0000000..93b902a
--- /dev/null
+++ b/tests/load_from_macro/BUILD
@@ -0,0 +1,31 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//cc:defs.bzl", "cc_library")
+load(":tags.bzl", "TAGS")
+
+licenses(["notice"])
+
+cc_library(
+ name = "foo",
+ srcs = ["foo.cc"],
+ tags = TAGS,
+)
+
+bzl_library(
+ name = "tags_bzl",
+ srcs = ["tags.bzl"],
+ visibility = ["//visibility:private"],
+)
diff --git a/tests/load_from_macro/foo.cc b/tests/load_from_macro/foo.cc
new file mode 100644
index 0000000..c19005a
--- /dev/null
+++ b/tests/load_from_macro/foo.cc
@@ -0,0 +1,13 @@
+// Copyright 2019 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
diff --git a/tests/load_from_macro/tags.bzl b/tests/load_from_macro/tags.bzl
new file mode 100644
index 0000000..aa604c3
--- /dev/null
+++ b/tests/load_from_macro/tags.bzl
@@ -0,0 +1,17 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Example tags defined in a separate file.
+"""
+TAGS = ["first_tag", "second_tag"]
diff --git a/tests/simple_binary/BUILD b/tests/simple_binary/BUILD
new file mode 100644
index 0000000..c8d78a6
--- /dev/null
+++ b/tests/simple_binary/BUILD
@@ -0,0 +1,28 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//cc:defs.bzl", "cc_binary")
+
+licenses(["notice"])
+
+cc_binary(
+ name = "foo",
+ srcs = ["foo.cc"],
+)
+
+cc_binary(
+ name = "libfoo.so",
+ srcs = ["foo.cc"],
+ linkshared = 1,
+)
diff --git a/tests/simple_binary/foo.cc b/tests/simple_binary/foo.cc
new file mode 100644
index 0000000..cc38c10
--- /dev/null
+++ b/tests/simple_binary/foo.cc
@@ -0,0 +1,15 @@
+// Copyright 2023 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+int main(int argc, char *argv[]) { return 0; }
diff --git a/tests/system_library/BUILD b/tests/system_library/BUILD
new file mode 100644
index 0000000..abc1392
--- /dev/null
+++ b/tests/system_library/BUILD
@@ -0,0 +1,13 @@
+sh_test(
+ name = "system_library_test",
+ size = "small",
+ srcs = ["system_library_test.sh"],
+ data = [
+ ":unittest.bash",
+ "//cc:system_library.bzl",
+ "@bazel_tools//tools/bash/runfiles",
+ ],
+ target_compatible_with = [
+ "@platforms//os:linux",
+ ],
+)
diff --git a/tests/system_library/system_library_test.sh b/tests/system_library/system_library_test.sh
new file mode 100755
index 0000000..b8d52b3
--- /dev/null
+++ b/tests/system_library/system_library_test.sh
@@ -0,0 +1,213 @@
+# --- begin runfiles.bash initialization ---
+set -euo pipefail
+if [[ ! -d "${RUNFILES_DIR:-/dev/null}" && ! -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
+ if [[ -f "$0.runfiles_manifest" ]]; then
+ export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
+ elif [[ -f "$0.runfiles/MANIFEST" ]]; then
+ export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST"
+ elif [[ -f "$0.runfiles/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
+ export RUNFILES_DIR="$0.runfiles"
+ fi
+fi
+if [[ -f "${RUNFILES_DIR:-/dev/null}/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
+ source "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
+elif [[ -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
+ source "$(grep -m1 "^bazel_tools/tools/bash/runfiles/runfiles.bash " \
+ "$RUNFILES_MANIFEST_FILE" | cut -d ' ' -f 2-)"
+else
+ echo >&2 "ERROR: cannot find @bazel_tools//tools/bash/runfiles:runfiles.bash"
+ exit 1
+fi
+# --- end runfiles.bash initialization ---
+
+source "$(rlocation rules_cc/tests/system_library/unittest.bash)" \
+ || { echo "Could not rules_cc/source tests/system_library/unittest.bash" >&2; exit 1; }
+
+
+function setup_system_library() {
+ mkdir -p systemlib
+
+ cat << EOF > systemlib/foo.cc
+int bar() {
+ return 42;
+}
+EOF
+
+ cat << EOF > systemlib/foo.h
+int bar();
+EOF
+
+ cd systemlib
+
+ g++ -c -fpic foo.cc || fail "Expected foo.o to build successfully"
+ g++ -shared -o libfoo.so foo.o || fail "Expected foo.so to build successfully"
+ g++ -c foo.cc || fail "Expected foo.o to build successfully"
+ ar rvs foo.a foo.o || fail "Expected foo.a to build successfully"
+
+ cd ..
+
+ cat << EOF > WORKSPACE
+load("//:cc/system_library.bzl", "system_library")
+system_library(
+ name = "foo",
+ hdrs = [
+ "foo.h",
+ ],
+ static_lib_names = ["libfoo.a"],
+ shared_lib_names = ["libfoo.so"]
+)
+
+system_library(
+ name = "foo_hardcoded_path",
+ hdrs = [
+ "foo.h",
+ ],
+ static_lib_names = ["libfoo.a"],
+ shared_lib_names = ["libfoo.so"],
+ lib_path_hints = ["${PWD}/systemlib"],
+ includes = ["${PWD}/systemlib"]
+)
+EOF
+
+ cat << EOF > BUILD
+cc_binary(
+ name = "test",
+ srcs = ["test.cc"],
+ deps = ["@foo"]
+)
+
+cc_binary(
+ name = "test_static",
+ srcs = ["test.cc"],
+ deps = ["@foo"],
+ linkstatic = True
+)
+
+cc_binary(
+ name = "test_hardcoded_path",
+ srcs = ["test.cc"],
+ deps = ["@foo_hardcoded_path"]
+)
+
+cc_binary(
+ name = "test_static_hardcoded_path",
+ srcs = ["test.cc"],
+ deps = ["@foo_hardcoded_path"],
+ linkstatic = True
+)
+
+cc_binary(
+ name = "fake_rbe",
+ srcs = ["test.cc"],
+ deps = ["@foo_hardcoded_path"]
+)
+EOF
+
+ cat << EOF > test.cc
+#include "foo.h"
+
+int main() {
+ return 42 - bar();
+}
+EOF
+}
+#### TESTS #############################################################
+
+# Make sure it fails with a correct message when no library is found
+function test_system_library_not_found() {
+ setup_system_library
+
+ bazel run //:test \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ &> $TEST_log \
+ || true
+ expect_log "Library foo could not be found"
+
+ bazel run //:test_static \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ &> $TEST_log \
+ || true
+ expect_log "Library foo could not be found"
+ }
+
+function test_override_paths() {
+ setup_system_library
+
+ bazel run //:test \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ --action_env=BAZEL_LIB_OVERRIDE_PATHS=foo="${PWD}"/systemlib \
+ --action_env=BAZEL_INCLUDE_OVERRIDE_PATHS=foo="${PWD}"/systemlib \
+ || fail "Expected test to run successfully"
+
+ bazel run //:test_static \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ --action_env=BAZEL_LIB_OVERRIDE_PATHS=foo="${PWD}"/systemlib \
+ --action_env=BAZEL_INCLUDE_OVERRIDE_PATHS=foo="${PWD}"/systemlib \
+ || fail "Expected test_static to run successfully"
+}
+
+function test_additional_paths() {
+ setup_system_library
+
+ bazel run //:test \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ --action_env=BAZEL_LIB_ADDITIONAL_PATHS=foo="${PWD}"/systemlib \
+ --action_env=BAZEL_INCLUDE_ADDITIONAL_PATHS=foo="${PWD}"/systemlib \
+ || fail "Expected test to run successfully"
+
+ bazel run //:test_static \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ --action_env=BAZEL_LIB_ADDITIONAL_PATHS=foo="${PWD}"/systemlib \
+ --action_env=BAZEL_INCLUDE_ADDITIONAL_PATHS=foo="${PWD}"/systemlib \
+ || fail "Expected test_static to run successfully"
+}
+
+function test_hardcoded_paths() {
+ setup_system_library
+
+ bazel run //:test_hardcoded_path \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ || fail "Expected test_hardcoded_path to run successfully"
+
+ bazel run //:test_static_hardcoded_path \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ || fail "Expected test_static_hardcoded_path to run successfully"
+}
+
+function test_system_library_no_lib_names() {
+ cat << EOF > WORKSPACE
+load("//:cc/system_library.bzl", "system_library")
+system_library(
+ name = "foo",
+ hdrs = [
+ "foo.h",
+ ]
+)
+EOF
+
+ cat << EOF > BUILD
+cc_binary(
+ name = "test",
+ srcs = ["test.cc"],
+ deps = ["@foo"]
+)
+EOF
+
+ # It should fail when no static_lib_names and static_lib_names are given
+ bazel run //:test \
+ --experimental_starlark_cc_import \
+ --experimental_repo_remote_exec \
+ &> $TEST_log \
+ || true
+ expect_log "Library foo could not be found"
+}
+
+run_suite "Integration tests for system_library." \ No newline at end of file
diff --git a/tests/system_library/unittest.bash b/tests/system_library/unittest.bash
new file mode 100644
index 0000000..3bd07c7
--- /dev/null
+++ b/tests/system_library/unittest.bash
@@ -0,0 +1,801 @@
+#!/bin/bash
+#
+# Copyright 2015 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Common utility file for Bazel shell tests
+#
+# unittest.bash: a unit test framework in Bash.
+#
+# A typical test suite looks like so:
+#
+# ------------------------------------------------------------------------
+# #!/bin/bash
+#
+# source path/to/unittest.bash || exit 1
+#
+# # Test that foo works.
+# function test_foo() {
+# foo >$TEST_log || fail "foo failed";
+# expect_log "blah" "Expected to see 'blah' in output of 'foo'."
+# }
+#
+# # Test that bar works.
+# function test_bar() {
+# bar 2>$TEST_log || fail "bar failed";
+# expect_not_log "ERROR" "Unexpected error from 'bar'."
+# ...
+# assert_equals $x $y
+# }
+#
+# run_suite "Test suite for blah"
+# ------------------------------------------------------------------------
+#
+# Each test function is considered to pass iff fail() is not called
+# while it is active. fail() may be called directly, or indirectly
+# via other assertions such as expect_log(). run_suite must be called
+# at the very end.
+#
+# A test function may redefine functions "set_up" and/or "tear_down";
+# these functions are executed before and after each test function,
+# respectively. Similarly, "cleanup" and "timeout" may be redefined,
+# and these function are called upon exit (of any kind) or a timeout.
+#
+# The user can pass --test_arg to bazel test to select specific tests
+# to run. Specifying --test_arg multiple times allows to select several
+# tests to be run in the given order. Additionally the user may define
+# TESTS=(test_foo test_bar ...) to specify a subset of test functions to
+# execute, for example, a working set during debugging. By default, all
+# functions called test_* will be executed.
+#
+# This file provides utilities for assertions over the output of a
+# command. The output of the command under test is directed to the
+# file $TEST_log, and then the expect_log* assertions can be used to
+# test for the presence of certain regular expressions in that file.
+#
+# The test framework is responsible for restoring the original working
+# directory before each test.
+#
+# The order in which test functions are run is not defined, so it is
+# important that tests clean up after themselves.
+#
+# Each test will be run in a new subshell.
+#
+# Functions named __* are not intended for use by clients.
+#
+# This framework implements the "test sharding protocol".
+#
+
+[ -n "$BASH_VERSION" ] ||
+ { echo "unittest.bash only works with bash!" >&2; exit 1; }
+
+DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
+
+#### Configuration variables (may be overridden by testenv.sh or the suite):
+
+# This function may be called by testenv.sh or a test suite to enable errexit
+# in a way that enables us to print pretty stack traces when something fails.
+function enable_errexit() {
+ set -o errtrace
+ set -eu
+ trap __test_terminated_err ERR
+}
+
+function disable_errexit() {
+ set +o errtrace
+ set +eu
+ trap - ERR
+}
+
+#### Set up the test environment, branched from the old shell/testenv.sh
+
+# Enable errexit with pretty stack traces.
+enable_errexit
+
+# Print message in "$1" then exit with status "$2"
+die () {
+ # second argument is optional, defaulting to 1
+ local status_code=${2:-1}
+ # Stop capturing stdout/stderr, and dump captured output
+ if [ "$CAPTURED_STD_ERR" -ne 0 -o "$CAPTURED_STD_OUT" -ne 0 ]; then
+ restore_outputs
+ if [ "$CAPTURED_STD_OUT" -ne 0 ]; then
+ cat "${TEST_TMPDIR}/captured.out"
+ CAPTURED_STD_OUT=0
+ fi
+ if [ "$CAPTURED_STD_ERR" -ne 0 ]; then
+ cat "${TEST_TMPDIR}/captured.err" 1>&2
+ CAPTURED_STD_ERR=0
+ fi
+ fi
+
+ if [ -n "${1-}" ] ; then
+ echo "$1" 1>&2
+ fi
+ if [ -n "${BASH-}" ]; then
+ local caller_n=0
+ while [ $caller_n -lt 4 ] && caller_out=$(caller $caller_n 2>/dev/null); do
+ test $caller_n -eq 0 && echo "CALLER stack (max 4):"
+ echo " $caller_out"
+ let caller_n=caller_n+1
+ done 1>&2
+ fi
+ if [ x"$status_code" != x -a x"$status_code" != x"0" ]; then
+ exit "$status_code"
+ else
+ exit 1
+ fi
+}
+
+# Print message in "$1" then record that a non-fatal error occurred in ERROR_COUNT
+ERROR_COUNT="${ERROR_COUNT:-0}"
+error () {
+ if [ -n "$1" ] ; then
+ echo "$1" 1>&2
+ fi
+ ERROR_COUNT=$(($ERROR_COUNT + 1))
+}
+
+# Die if "$1" != "$2", print $3 as death reason
+check_eq () {
+ [ "$1" = "$2" ] || die "Check failed: '$1' == '$2' ${3:+ ($3)}"
+}
+
+# Die if "$1" == "$2", print $3 as death reason
+check_ne () {
+ [ "$1" != "$2" ] || die "Check failed: '$1' != '$2' ${3:+ ($3)}"
+}
+
+# The structure of the following if statements is such that if '[' fails
+# (e.g., a non-number was passed in) then the check will fail.
+
+# Die if "$1" > "$2", print $3 as death reason
+check_le () {
+ [ "$1" -gt "$2" ] || die "Check failed: '$1' <= '$2' ${3:+ ($3)}"
+}
+
+# Die if "$1" >= "$2", print $3 as death reason
+check_lt () {
+ [ "$1" -lt "$2" ] || die "Check failed: '$1' < '$2' ${3:+ ($3)}"
+}
+
+# Die if "$1" < "$2", print $3 as death reason
+check_ge () {
+ [ "$1" -ge "$2" ] || die "Check failed: '$1' >= '$2' ${3:+ ($3)}"
+}
+
+# Die if "$1" <= "$2", print $3 as death reason
+check_gt () {
+ [ "$1" -gt "$2" ] || die "Check failed: '$1' > '$2' ${3:+ ($3)}"
+}
+
+# Die if $2 !~ $1; print $3 as death reason
+check_match ()
+{
+ expr match "$2" "$1" >/dev/null || \
+ die "Check failed: '$2' does not match regex '$1' ${3:+ ($3)}"
+}
+
+# Run command "$1" at exit. Like "trap" but multiple atexits don't
+# overwrite each other. Will break if someone does call trap
+# directly. So, don't do that.
+ATEXIT="${ATEXIT-}"
+atexit () {
+ if [ -z "$ATEXIT" ]; then
+ ATEXIT="$1"
+ else
+ ATEXIT="$1 ; $ATEXIT"
+ fi
+ trap "$ATEXIT" EXIT
+}
+
+## TEST_TMPDIR
+if [ -z "${TEST_TMPDIR:-}" ]; then
+ export TEST_TMPDIR="$(mktemp -d ${TMPDIR:-/tmp}/bazel-test.XXXXXXXX)"
+fi
+if [ ! -e "${TEST_TMPDIR}" ]; then
+ mkdir -p -m 0700 "${TEST_TMPDIR}"
+ # Clean TEST_TMPDIR on exit
+ atexit "rm -fr ${TEST_TMPDIR}"
+fi
+
+# Functions to compare the actual output of a test to the expected
+# (golden) output.
+#
+# Usage:
+# capture_test_stdout
+# ... do something ...
+# diff_test_stdout "$TEST_SRCDIR/path/to/golden.out"
+
+# Redirect a file descriptor to a file.
+CAPTURED_STD_OUT="${CAPTURED_STD_OUT:-0}"
+CAPTURED_STD_ERR="${CAPTURED_STD_ERR:-0}"
+
+capture_test_stdout () {
+ exec 3>&1 # Save stdout as fd 3
+ exec 4>"${TEST_TMPDIR}/captured.out"
+ exec 1>&4
+ CAPTURED_STD_OUT=1
+}
+
+capture_test_stderr () {
+ exec 6>&2 # Save stderr as fd 6
+ exec 7>"${TEST_TMPDIR}/captured.err"
+ exec 2>&7
+ CAPTURED_STD_ERR=1
+}
+
+# Force XML_OUTPUT_FILE to an existing path
+if [ -z "${XML_OUTPUT_FILE:-}" ]; then
+ XML_OUTPUT_FILE=${TEST_TMPDIR}/ouput.xml
+fi
+
+#### Global variables:
+
+TEST_name="" # The name of the current test.
+
+TEST_log=$TEST_TMPDIR/log # The log file over which the
+ # expect_log* assertions work. Must
+ # be absolute to be robust against
+ # tests invoking 'cd'!
+
+TEST_passed="true" # The result of the current test;
+ # failed assertions cause this to
+ # become false.
+
+# These variables may be overridden by the test suite:
+
+TESTS=() # A subset or "working set" of test
+ # functions that should be run. By
+ # default, all tests called test_* are
+ # run.
+if [ $# -gt 0 ]; then
+ # Legacy behavior is to ignore missing regexp, but with errexit
+ # the following line fails without || true.
+ # TODO(dmarting): maybe we should revisit the way of selecting
+ # test with that framework (use Bazel's environment variable instead).
+ TESTS=($(for i in $@; do echo $i; done | grep ^test_ || true))
+ if (( ${#TESTS[@]} == 0 )); then
+ echo "WARNING: Arguments do not specifies tests!" >&2
+ fi
+fi
+
+TEST_verbose="true" # Whether or not to be verbose. A
+ # command; "true" or "false" are
+ # acceptable. The default is: true.
+
+TEST_script="$(pwd)/$0" # Full path to test script
+
+#### Internal functions
+
+function __show_log() {
+ echo "-- Test log: -----------------------------------------------------------"
+ [[ -e $TEST_log ]] && cat $TEST_log || echo "(Log file did not exist.)"
+ echo "------------------------------------------------------------------------"
+}
+
+# Usage: __pad <title> <pad-char>
+# Print $title padded to 80 columns with $pad_char.
+function __pad() {
+ local title=$1
+ local pad=$2
+ {
+ echo -n "$pad$pad $title "
+ printf "%80s" " " | tr ' ' "$pad"
+ } | head -c 80
+ echo
+}
+
+#### Exported functions
+
+# Usage: init_test ...
+# Deprecated. Has no effect.
+function init_test() {
+ :
+}
+
+
+# Usage: set_up
+# Called before every test function. May be redefined by the test suite.
+function set_up() {
+ :
+}
+
+# Usage: tear_down
+# Called after every test function. May be redefined by the test suite.
+function tear_down() {
+ :
+}
+
+# Usage: cleanup
+# Called upon eventual exit of the test suite. May be redefined by
+# the test suite.
+function cleanup() {
+ :
+}
+
+# Usage: timeout
+# Called upon early exit from a test due to timeout.
+function timeout() {
+ :
+}
+
+# Usage: fail <message> [<message> ...]
+# Print failure message with context information, and mark the test as
+# a failure. The context includes a stacktrace including the longest sequence
+# of calls outside this module. (We exclude the top and bottom portions of
+# the stack because they just add noise.) Also prints the contents of
+# $TEST_log.
+function fail() {
+ __show_log >&2
+ echo "$TEST_name FAILED:" "$@" "." >&2
+ echo "$@" >$TEST_TMPDIR/__fail
+ TEST_passed="false"
+ __show_stack
+ # Cleanup as we are leaving the subshell now
+ tear_down
+ exit 1
+}
+
+# Usage: warn <message>
+# Print a test warning with context information.
+# The context includes a stacktrace including the longest sequence
+# of calls outside this module. (We exclude the top and bottom portions of
+# the stack because they just add noise.)
+function warn() {
+ __show_log >&2
+ echo "$TEST_name WARNING: $1." >&2
+ __show_stack
+
+ if [ -n "${TEST_WARNINGS_OUTPUT_FILE:-}" ]; then
+ echo "$TEST_name WARNING: $1." >> "$TEST_WARNINGS_OUTPUT_FILE"
+ fi
+}
+
+# Usage: show_stack
+# Prints the portion of the stack that does not belong to this module,
+# i.e. the user's code that called a failing assertion. Stack may not
+# be available if Bash is reading commands from stdin; an error is
+# printed in that case.
+__show_stack() {
+ local i=0
+ local trace_found=0
+
+ # Skip over active calls within this module:
+ while (( i < ${#FUNCNAME[@]} )) && [[ ${BASH_SOURCE[i]:-} == ${BASH_SOURCE[0]} ]]; do
+ (( ++i ))
+ done
+
+ # Show all calls until the next one within this module (typically run_suite):
+ while (( i < ${#FUNCNAME[@]} )) && [[ ${BASH_SOURCE[i]:-} != ${BASH_SOURCE[0]} ]]; do
+ # Read online docs for BASH_LINENO to understand the strange offset.
+ # Undefined can occur in the BASH_SOURCE stack apparently when one exits from a subshell
+ echo "${BASH_SOURCE[i]:-"Unknown"}:${BASH_LINENO[i - 1]:-"Unknown"}: in call to ${FUNCNAME[i]:-"Unknown"}" >&2
+ (( ++i ))
+ trace_found=1
+ done
+
+ [ $trace_found = 1 ] || echo "[Stack trace not available]" >&2
+}
+
+# Usage: expect_log <regexp> [error-message]
+# Asserts that $TEST_log matches regexp. Prints the contents of
+# $TEST_log and the specified (optional) error message otherwise, and
+# returns non-zero.
+function expect_log() {
+ local pattern=$1
+ local message=${2:-Expected regexp "$pattern" not found}
+ grep -sq -- "$pattern" $TEST_log && return 0
+
+ fail "$message"
+ return 1
+}
+
+# Usage: expect_log_warn <regexp> [error-message]
+# Warns if $TEST_log does not match regexp. Prints the contents of
+# $TEST_log and the specified (optional) error message on mismatch.
+function expect_log_warn() {
+ local pattern=$1
+ local message=${2:-Expected regexp "$pattern" not found}
+ grep -sq -- "$pattern" $TEST_log && return 0
+
+ warn "$message"
+ return 1
+}
+
+# Usage: expect_log_once <regexp> [error-message]
+# Asserts that $TEST_log contains one line matching <regexp>.
+# Prints the contents of $TEST_log and the specified (optional)
+# error message otherwise, and returns non-zero.
+function expect_log_once() {
+ local pattern=$1
+ local message=${2:-Expected regexp "$pattern" not found exactly once}
+ expect_log_n "$pattern" 1 "$message"
+}
+
+# Usage: expect_log_n <regexp> <count> [error-message]
+# Asserts that $TEST_log contains <count> lines matching <regexp>.
+# Prints the contents of $TEST_log and the specified (optional)
+# error message otherwise, and returns non-zero.
+function expect_log_n() {
+ local pattern=$1
+ local expectednum=${2:-1}
+ local message=${3:-Expected regexp "$pattern" not found exactly $expectednum times}
+ local count=$(grep -sc -- "$pattern" $TEST_log)
+ [[ $count = $expectednum ]] && return 0
+ fail "$message"
+ return 1
+}
+
+# Usage: expect_not_log <regexp> [error-message]
+# Asserts that $TEST_log does not match regexp. Prints the contents
+# of $TEST_log and the specified (optional) error message otherwise, and
+# returns non-zero.
+function expect_not_log() {
+ local pattern=$1
+ local message=${2:-Unexpected regexp "$pattern" found}
+ grep -sq -- "$pattern" $TEST_log || return 0
+
+ fail "$message"
+ return 1
+}
+
+# Usage: expect_log_with_timeout <regexp> <timeout> [error-message]
+# Waits for the given regexp in the $TEST_log for up to timeout seconds.
+# Prints the contents of $TEST_log and the specified (optional)
+# error message otherwise, and returns non-zero.
+function expect_log_with_timeout() {
+ local pattern=$1
+ local timeout=$2
+ local message=${3:-Regexp "$pattern" not found in "$timeout" seconds}
+ local count=0
+ while [ $count -lt $timeout ]; do
+ grep -sq -- "$pattern" $TEST_log && return 0
+ let count=count+1
+ sleep 1
+ done
+
+ grep -sq -- "$pattern" $TEST_log && return 0
+ fail "$message"
+ return 1
+}
+
+# Usage: expect_cmd_with_timeout <expected> <cmd> [timeout]
+# Repeats the command once a second for up to timeout seconds (10s by default),
+# until the output matches the expected value. Fails and returns 1 if
+# the command does not return the expected value in the end.
+function expect_cmd_with_timeout() {
+ local expected="$1"
+ local cmd="$2"
+ local timeout=${3:-10}
+ local count=0
+ while [ $count -lt $timeout ]; do
+ local actual="$($cmd)"
+ [ "$expected" = "$actual" ] && return 0
+ let count=count+1
+ sleep 1
+ done
+
+ [ "$expected" = "$actual" ] && return 0
+ fail "Expected '$expected' within ${timeout}s, was '$actual'"
+ return 1
+}
+
+# Usage: assert_one_of <expected_list>... <actual>
+# Asserts that actual is one of the items in expected_list
+# Example: assert_one_of ( "foo", "bar", "baz" ) actualval
+function assert_one_of() {
+ local args=("$@")
+ local last_arg_index=$((${#args[@]} - 1))
+ local actual=${args[last_arg_index]}
+ unset args[last_arg_index]
+ for expected_item in "${args[@]}"; do
+ [ "$expected_item" = "$actual" ] && return 0
+ done;
+
+ fail "Expected one of '${args[@]}', was '$actual'"
+ return 1
+}
+
+# Usage: assert_equals <expected> <actual>
+# Asserts [ expected = actual ].
+function assert_equals() {
+ local expected=$1 actual=$2
+ [ "$expected" = "$actual" ] && return 0
+
+ fail "Expected '$expected', was '$actual'"
+ return 1
+}
+
+# Usage: assert_not_equals <unexpected> <actual>
+# Asserts [ unexpected != actual ].
+function assert_not_equals() {
+ local unexpected=$1 actual=$2
+ [ "$unexpected" != "$actual" ] && return 0;
+
+ fail "Expected not '$unexpected', was '$actual'"
+ return 1
+}
+
+# Usage: assert_contains <regexp> <file> [error-message]
+# Asserts that file matches regexp. Prints the contents of
+# file and the specified (optional) error message otherwise, and
+# returns non-zero.
+function assert_contains() {
+ local pattern=$1
+ local file=$2
+ local message=${3:-Expected regexp "$pattern" not found in "$file"}
+ grep -sq -- "$pattern" "$file" && return 0
+
+ cat "$file" >&2
+ fail "$message"
+ return 1
+}
+
+# Usage: assert_not_contains <regexp> <file> [error-message]
+# Asserts that file does not match regexp. Prints the contents of
+# file and the specified (optional) error message otherwise, and
+# returns non-zero.
+function assert_not_contains() {
+ local pattern=$1
+ local file=$2
+ local message=${3:-Expected regexp "$pattern" found in "$file"}
+ grep -sq -- "$pattern" "$file" || return 0
+
+ cat "$file" >&2
+ fail "$message"
+ return 1
+}
+
+# Updates the global variables TESTS if
+# sharding is enabled, i.e. ($TEST_TOTAL_SHARDS > 0).
+function __update_shards() {
+ [ -z "${TEST_TOTAL_SHARDS-}" ] && return 0
+
+ [ "$TEST_TOTAL_SHARDS" -gt 0 ] ||
+ { echo "Invalid total shards $TEST_TOTAL_SHARDS" >&2; exit 1; }
+
+ [ "$TEST_SHARD_INDEX" -lt 0 -o "$TEST_SHARD_INDEX" -ge "$TEST_TOTAL_SHARDS" ] &&
+ { echo "Invalid shard $shard_index" >&2; exit 1; }
+
+ TESTS=$(for test in "${TESTS[@]}"; do echo "$test"; done |
+ awk "NR % $TEST_TOTAL_SHARDS == $TEST_SHARD_INDEX")
+
+ [ -z "${TEST_SHARD_STATUS_FILE-}" ] || touch "$TEST_SHARD_STATUS_FILE"
+}
+
+# Usage: __test_terminated <signal-number>
+# Handler that is called when the test terminated unexpectedly
+function __test_terminated() {
+ __show_log >&2
+ echo "$TEST_name FAILED: terminated by signal $1." >&2
+ TEST_passed="false"
+ __show_stack
+ timeout
+ exit 1
+}
+
+# Usage: __test_terminated_err
+# Handler that is called when the test terminated unexpectedly due to "errexit".
+function __test_terminated_err() {
+ # When a subshell exits due to signal ERR, its parent shell also exits,
+ # thus the signal handler is called recursively and we print out the
+ # error message and stack trace multiple times. We're only interested
+ # in the first one though, as it contains the most information, so ignore
+ # all following.
+ if [[ -f $TEST_TMPDIR/__err_handled ]]; then
+ exit 1
+ fi
+ __show_log >&2
+ if [[ ! -z "$TEST_name" ]]; then
+ echo -n "$TEST_name "
+ fi
+ echo "FAILED: terminated because this command returned a non-zero status:" >&2
+ touch $TEST_TMPDIR/__err_handled
+ TEST_passed="false"
+ __show_stack
+ # If $TEST_name is still empty, the test suite failed before we even started
+ # to run tests, so we shouldn't call tear_down.
+ if [[ ! -z "$TEST_name" ]]; then
+ tear_down
+ fi
+ exit 1
+}
+
+# Usage: __trap_with_arg <handler> <signals ...>
+# Helper to install a trap handler for several signals preserving the signal
+# number, so that the signal number is available to the trap handler.
+function __trap_with_arg() {
+ func="$1" ; shift
+ for sig ; do
+ trap "$func $sig" "$sig"
+ done
+}
+
+# Usage: <node> <block>
+# Adds the block to the given node in the report file. Quotes in the in
+# arguments need to be escaped.
+function __log_to_test_report() {
+ local node="$1"
+ local block="$2"
+ if [[ ! -e "$XML_OUTPUT_FILE" ]]; then
+ local xml_header='<?xml version="1.0" encoding="UTF-8"?>'
+ echo "$xml_header<testsuites></testsuites>" > $XML_OUTPUT_FILE
+ fi
+
+ # replace match on node with block and match
+ # replacement expression only needs escaping for quotes
+ perl -e "\
+\$input = @ARGV[0]; \
+\$/=undef; \
+open FILE, '+<$XML_OUTPUT_FILE'; \
+\$content = <FILE>; \
+if (\$content =~ /($node.*)\$/) { \
+ seek FILE, 0, 0; \
+ print FILE \$\` . \$input . \$1; \
+}; \
+close FILE" "$block"
+}
+
+# Usage: <total> <passed>
+# Adds the test summaries to the xml nodes.
+function __finish_test_report() {
+ local total=$1
+ local passed=$2
+ local failed=$((total - passed))
+
+ cat $XML_OUTPUT_FILE | \
+ sed \
+ "s/<testsuites>/<testsuites tests=\"$total\" failures=\"0\" errors=\"$failed\">/" | \
+ sed \
+ "s/<testsuite>/<testsuite tests=\"$total\" failures=\"0\" errors=\"$failed\">/" \
+ > $XML_OUTPUT_FILE.bak
+
+ rm -f $XML_OUTPUT_FILE
+ mv $XML_OUTPUT_FILE.bak $XML_OUTPUT_FILE
+}
+
+# Multi-platform timestamp function
+UNAME=$(uname -s | tr 'A-Z' 'a-z')
+if [ "$UNAME" = "linux" ] || [[ "$UNAME" =~ msys_nt* ]]; then
+ function timestamp() {
+ echo $(($(date +%s%N)/1000000))
+ }
+else
+ function timestamp() {
+ # OS X and FreeBSD do not have %N so python is the best we can do
+ python -c 'import time; print int(round(time.time() * 1000))'
+ }
+fi
+
+function get_run_time() {
+ local ts_start=$1
+ local ts_end=$2
+ run_time_ms=$((${ts_end}-${ts_start}))
+ echo $(($run_time_ms/1000)).${run_time_ms: -3}
+}
+
+# Usage: run_tests <suite-comment>
+# Must be called from the end of the user's test suite.
+# Calls exit with zero on success, non-zero otherwise.
+function run_suite() {
+ echo >&2
+ echo "$1" >&2
+ echo >&2
+
+ __log_to_test_report "<\/testsuites>" "<testsuite></testsuite>"
+
+ local total=0
+ local passed=0
+
+ atexit "cleanup"
+
+ # If the user didn't specify an explicit list of tests (e.g. a
+ # working set), use them all.
+ if [ ${#TESTS[@]} = 0 ]; then
+ TESTS=$(declare -F | awk '{print $3}' | grep ^test_)
+ elif [ -n "${TEST_WARNINGS_OUTPUT_FILE:-}" ]; then
+ if grep -q "TESTS=" "$TEST_script" ; then
+ echo "TESTS variable overridden in Bazel sh_test. Please remove before submitting" \
+ >> "$TEST_WARNINGS_OUTPUT_FILE"
+ fi
+ fi
+
+ __update_shards
+
+ for TEST_name in ${TESTS[@]}; do
+ >$TEST_log # Reset the log.
+ TEST_passed="true"
+
+ total=$(($total + 1))
+ if [[ "$TEST_verbose" == "true" ]]; then
+ __pad $TEST_name '*' >&2
+ fi
+
+ local run_time="0.0"
+ rm -f $TEST_TMPDIR/{__ts_start,__ts_end}
+
+ if [ "$(type -t $TEST_name)" = function ]; then
+ # Save exit handlers eventually set.
+ local SAVED_ATEXIT="$ATEXIT";
+ ATEXIT=
+
+ # Run test in a subshell.
+ rm -f $TEST_TMPDIR/__err_handled
+ __trap_with_arg __test_terminated INT KILL PIPE TERM ABRT FPE ILL QUIT SEGV
+ (
+ timestamp >$TEST_TMPDIR/__ts_start
+ set_up
+ eval $TEST_name
+ tear_down
+ timestamp >$TEST_TMPDIR/__ts_end
+ test $TEST_passed == "true"
+ ) 2>&1 | tee $TEST_TMPDIR/__log
+ # Note that tee will prevent the control flow continuing if the test
+ # spawned any processes which are still running and have not closed
+ # their stdout.
+
+ test_subshell_status=${PIPESTATUS[0]}
+ if [ "$test_subshell_status" != 0 ]; then
+ TEST_passed="false"
+ # Ensure that an end time is recorded in case the test subshell
+ # terminated prematurely.
+ [ -f $TEST_TMPDIR/__ts_end ] || timestamp >$TEST_TMPDIR/__ts_end
+ fi
+
+ # Calculate run time for the testcase.
+ local ts_start=$(cat $TEST_TMPDIR/__ts_start)
+ local ts_end=$(cat $TEST_TMPDIR/__ts_end)
+ run_time=$(get_run_time $ts_start $ts_end)
+
+ # Eventually restore exit handlers.
+ if [ -n "$SAVED_ATEXIT" ]; then
+ ATEXIT="$SAVED_ATEXIT"
+ trap "$ATEXIT" EXIT
+ fi
+ else # Bad test explicitly specified in $TESTS.
+ fail "Not a function: '$TEST_name'"
+ fi
+
+ local testcase_tag=""
+
+ if [[ "$TEST_passed" == "true" ]]; then
+ if [[ "$TEST_verbose" == "true" ]]; then
+ echo "PASSED: $TEST_name" >&2
+ fi
+ passed=$(($passed + 1))
+ testcase_tag="<testcase name=\"$TEST_name\" status=\"run\" time=\"$run_time\" classname=\"\"></testcase>"
+ else
+ echo "FAILED: $TEST_name" >&2
+ # end marker in CDATA cannot be escaped, we need to split the CDATA sections
+ log=$(cat $TEST_TMPDIR/__log | sed 's/]]>/]]>]]&gt;<![CDATA[/g')
+ fail_msg=$(cat $TEST_TMPDIR/__fail 2> /dev/null || echo "No failure message")
+ testcase_tag="<testcase name=\"$TEST_name\" status=\"run\" time=\"$run_time\" classname=\"\"><error message=\"$fail_msg\"><![CDATA[$log]]></error></testcase>"
+ fi
+
+ if [[ "$TEST_verbose" == "true" ]]; then
+ echo >&2
+ fi
+ __log_to_test_report "<\/testsuite>" "$testcase_tag"
+ done
+
+ __finish_test_report $total $passed
+ __pad "$passed / $total tests passed." '*' >&2
+ [ $total = $passed ] || {
+ __pad "There were errors." '*'
+ exit 1
+ } >&2
+
+ exit 0
+}
diff --git a/third_party/BUILD b/third_party/BUILD
new file mode 100644
index 0000000..0c41157
--- /dev/null
+++ b/third_party/BUILD
@@ -0,0 +1 @@
+# Intentionally empty, only there to make //third_party a package.
diff --git a/third_party/com/github/bazelbuild/bazel/src/main/protobuf/BUILD b/third_party/com/github/bazelbuild/bazel/src/main/protobuf/BUILD
new file mode 100644
index 0000000..c08e13b
--- /dev/null
+++ b/third_party/com/github/bazelbuild/bazel/src/main/protobuf/BUILD
@@ -0,0 +1,30 @@
+load("@com_google_protobuf//:protobuf.bzl", "py_proto_library")
+load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
+load("@rules_proto//proto:defs.bzl", "proto_library")
+
+licenses(["notice"]) # Apache 2.0
+
+py_proto_library(
+ name = "crosstool_config_py_pb2",
+ srcs = ["crosstool_config.proto"],
+ visibility = [
+ "//tools/migration:__pkg__",
+ ],
+)
+
+proto_library(
+ name = "crosstool_config_pb2",
+ srcs = ["crosstool_config.proto"],
+ visibility = [
+ "//tools/migration:__pkg__",
+ ],
+)
+
+go_proto_library(
+ name = "crosstool_config_go_proto",
+ importpath = "third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config_go_proto",
+ proto = ":crosstool_config_pb2",
+ visibility = [
+ "//tools/migration:__pkg__",
+ ],
+)
diff --git a/third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config.proto b/third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config.proto
new file mode 100644
index 0000000..45ad1e5
--- /dev/null
+++ b/third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config.proto
@@ -0,0 +1,548 @@
+// Copyright 2014 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// File format for Blaze to configure Crosstool releases.
+
+syntax = "proto2";
+
+package com.google.devtools.build.lib.view.config.crosstool;
+
+// option java_api_version = 2; // copybara-comment-this-out-please
+option java_package = "com.google.devtools.build.lib.view.config.crosstool";
+
+// A description of a toolchain, which includes all the tools generally expected
+// to be available for building C/C++ targets, based on the GNU C compiler.
+//
+// System and cpu names are two overlapping concepts, which need to be both
+// supported at this time. The cpu name is the blaze command-line name for the
+// target system. The most common values are 'k8' and 'piii'. The system name is
+// a more generic identification of the executable system, based on the names
+// used by the GNU C compiler.
+//
+// Typically, the system name contains an identifier for the cpu (e.g. x86_64 or
+// alpha), an identifier for the machine (e.g. pc, or unknown), and an
+// identifier for the operating system (e.g. cygwin or linux-gnu). Typical
+// examples are 'x86_64-unknown-linux-gnu' and 'i686-unknown-cygwin'.
+//
+// The system name is used to determine if a given machine can execute a given
+// executable. In particular, it is used to check if the compilation products of
+// a toolchain can run on the host machine.
+message CToolchain {
+ // A group of correlated flags. Supports parametrization via variable
+ // expansion.
+ //
+ // To expand a variable of list type, flag_group has to be annotated with
+ // `iterate_over` message. Then all nested flags or flag_groups will be
+ // expanded repeatedly for each element of the list.
+ //
+ // For example:
+ // flag_group {
+ // iterate_over: 'include_path'
+ // flag: '-I'
+ // flag: '%{include_path}'
+ // }
+ // ... will get expanded to -I /to/path1 -I /to/path2 ... for each
+ // include_path /to/pathN.
+ //
+ // To expand a variable of structure type, use dot-notation, e.g.:
+ // flag_group {
+ // iterate_over: "libraries_to_link"
+ // flag_group {
+ // iterate_over: "libraries_to_link.libraries"
+ // flag: "-L%{libraries_to_link.libraries.directory}"
+ // }
+ // }
+ //
+ // Flag groups can be nested; if they are, the flag group must only contain
+ // other flag groups (no flags) so the order is unambiguously specified.
+ // In order to expand a variable of nested lists, 'iterate_over' can be used.
+ //
+ // For example:
+ // flag_group {
+ // iterate_over: 'object_files'
+ // flag_group { flag: '--start-lib' }
+ // flag_group {
+ // iterate_over: 'object_files'
+ // flag: '%{object_files}'
+ // }
+ // flag_group { flag: '--end-lib' }
+ // }
+ // ... will get expanded to
+ // --start-lib a1.o a2.o ... --end-lib --start-lib b1.o b2.o .. --end-lib
+ // with %{object_files} being a variable of nested list type
+ // [['a1.o', 'a2.o', ...], ['b1.o', 'b2.o', ...], ...].
+ //
+ // TODO(bazel-team): Write more elaborate documentation and add a link to it.
+ message FlagGroup {
+ repeated string flag = 1;
+
+ repeated FlagGroup flag_group = 2;
+
+ optional string iterate_over = 3;
+
+ repeated string expand_if_all_available = 4;
+
+ repeated string expand_if_none_available = 5;
+
+ optional string expand_if_true = 6;
+
+ optional string expand_if_false = 7;
+
+ optional VariableWithValue expand_if_equal = 8;
+ }
+
+ message VariableWithValue {
+ required string variable = 1;
+
+ required string value = 2;
+ }
+
+ // A key/value pair to be added as an environment variable. The value of
+ // this pair is expanded in the same way as is described in FlagGroup.
+ // The key remains an unexpanded string literal.
+ message EnvEntry {
+ required string key = 1;
+ required string value = 2;
+ repeated string expand_if_all_available = 3;
+ }
+
+ // A set of features; used to support logical 'and' when specifying feature
+ // requirements in Feature.
+ message FeatureSet {
+ repeated string feature = 1;
+ }
+
+ // A set of positive and negative features. This stanza will
+ // evaluate to true when every 'feature' is enabled, and every
+ // 'not_feature' is not enabled.
+ message WithFeatureSet {
+ repeated string feature = 1;
+ repeated string not_feature = 2;
+ }
+
+ // A set of flags that are expanded in the command line for specific actions.
+ message FlagSet {
+ // The actions this flag set applies to; each flag set must specify at
+ // least one action.
+ repeated string action = 1;
+
+ // The flags applied via this flag set.
+ repeated FlagGroup flag_group = 2;
+
+ // A list of feature sets defining when this flag set gets applied. The
+ // flag set will be applied when any one of the feature sets evaluate to
+ // true. (That is, when when every 'feature' is enabled, and every
+ // 'not_feature' is not enabled.)
+ //
+ // If 'with_feature' is omitted, the flag set will be applied
+ // unconditionally for every action specified.
+ repeated WithFeatureSet with_feature = 3;
+
+ // Deprecated (https://github.com/bazelbuild/bazel/issues/7008) - use
+ // expand_if_all_available in flag_group
+ //
+ // A list of build variables that this feature set needs, but which are
+ // allowed to not be set. If any of the build variables listed is not
+ // set, the feature set will not be expanded.
+ //
+ // NOTE: Consider alternatives before using this; usually tools should
+ // consistently create the same set of files, even if empty; use this
+ // only for backwards compatibility with already existing behavior in tools
+ // that are currently not worth changing.
+ repeated string expand_if_all_available = 4;
+ }
+
+ // A set of environment variables that are expanded in the command line for
+ // specific actions.
+ message EnvSet {
+ // The actions this env set applies to; each env set must specify at
+ // least one action.
+ repeated string action = 1;
+
+ // The environment variables applied via this env set.
+ repeated EnvEntry env_entry = 2;
+
+ // A list of feature sets defining when this env set gets applied. The
+ // env set will be applied when any one of the feature sets evaluate to
+ // true. (That is, when when every 'feature' is enabled, and every
+ // 'not_feature' is not enabled.)
+ //
+ // If 'with_feature' is omitted, the env set will be applied
+ // unconditionally for every action specified.
+ repeated WithFeatureSet with_feature = 3;
+ }
+
+ // Contains all flag specifications for one feature.
+ // Next ID: 8
+ message Feature {
+ // The feature's name. Feature names are generally defined by Bazel; it is
+ // possible to introduce a feature without a change to Bazel by adding a
+ // 'feature' section to the toolchain and adding the corresponding string as
+ // feature in the BUILD file.
+ optional string name = 1;
+
+ // If 'true', this feature is enabled unless a rule type explicitly marks it
+ // as unsupported. Such features cannot be turned off from within a BUILD
+ // file or the command line.
+ optional bool enabled = 7;
+
+ // If the given feature is enabled, the flag sets will be applied for the
+ // actions in the modes that they are specified for.
+ repeated FlagSet flag_set = 2;
+
+ // If the given feature is enabled, the env sets will be applied for the
+ // actions in the modes that they are specified for.
+ repeated EnvSet env_set = 6;
+
+ // A list of feature sets defining when this feature is supported by the
+ // toolchain. The feature is supported if any of the feature sets fully
+ // apply, that is, when all features of a feature set are enabled.
+ //
+ // If 'requires' is omitted, the feature is supported independently of which
+ // other features are enabled.
+ //
+ // Use this for example to filter flags depending on the build mode
+ // enabled (opt / fastbuild / dbg).
+ repeated FeatureSet requires = 3;
+
+ // A list of features or action configs that are automatically enabled when
+ // this feature is enabled. If any of the implied features or action configs
+ // cannot be enabled, this feature will (silently) not be enabled either.
+ repeated string implies = 4;
+
+ // A list of names this feature conflicts with.
+ // A feature cannot be enabled if:
+ // - 'provides' contains the name of a different feature or action config
+ // that we want to enable.
+ // - 'provides' contains the same value as a 'provides' in a different
+ // feature or action config that we want to enable.
+ //
+ // Use this in order to ensure that incompatible features cannot be
+ // accidentally activated at the same time, leading to hard to diagnose
+ // compiler errors.
+ repeated string provides = 5;
+ }
+
+ // Describes a tool associated with a crosstool action config.
+ message Tool {
+ // Describes the origin of a path.
+ enum PathOrigin {
+ // Indicates that `tool_path` is relative to the location of the
+ // crosstool. For legacy reasons, absolute paths are als0 allowed here.
+ CROSSTOOL_PACKAGE = 0;
+
+ // Indicates that `tool_path` is an absolute path.
+ // This is enforced by Bazel.
+ FILESYSTEM_ROOT = 1;
+
+ // Indicates that `tool_path` is relative to the current workspace's
+ // exec root.
+ WORKSPACE_ROOT = 2;
+ }
+
+ // Path to the tool, relative to the location of the crosstool.
+ required string tool_path = 1;
+
+ // Origin of `tool_path`.
+ // Optional only for legacy reasons. New crosstools should set this value!
+ optional PathOrigin tool_path_origin = 4 [default = CROSSTOOL_PACKAGE];
+
+ // A list of feature sets defining when this tool is applicable. The tool
+ // will used when any one of the feature sets evaluate to true. (That is,
+ // when when every 'feature' is enabled, and every 'not_feature' is not
+ // enabled.)
+ //
+ // If 'with_feature' is omitted, the tool will apply for any feature
+ // configuration.
+ repeated WithFeatureSet with_feature = 2;
+
+ // Requirements on the execution environment for the execution of this tool,
+ // to be passed as out-of-band "hints" to the execution backend.
+ // Ex. "requires-darwin"
+ repeated string execution_requirement = 3;
+ }
+
+ // The name for an artifact of a given category of input or output artifacts
+ // to an action.
+ message ArtifactNamePattern {
+ // The category of artifacts that this selection applies to. This field
+ // is compared against a list of categories defined in bazel. Example
+ // categories include "linked_output" or "debug_symbols". An error is thrown
+ // if no category is matched.
+ required string category_name = 1;
+ // The prefix and extension for creating the artifact for this selection.
+ // They are used to create an artifact name based on the target name.
+ required string prefix = 2;
+ required string extension = 3;
+ }
+
+ // An action config corresponds to a blaze action, and allows selection of
+ // a tool based on activated features. Action configs come in two varieties:
+ // automatic (the blaze action will exist whether or not the action config
+ // is activated) and attachable (the blaze action will be added to the
+ // action graph only if the action config is activated).
+ //
+ // Action config activation occurs by the same semantics as features: a
+ // feature can 'require' or 'imply' an action config in the same way that it
+ // would another feature.
+ // Next ID: 9
+ message ActionConfig {
+ // The name other features will use to activate this action config. Can
+ // be the same as action_name.
+ required string config_name = 1;
+
+ // The name of the blaze action that this config applies to, ex. 'c-compile'
+ // or 'c-module-compile'.
+ required string action_name = 2;
+
+ // If 'true', this feature is enabled unless a rule type explicitly marks it
+ // as unsupported. Such action_configs cannot be turned off from within a
+ // BUILD file or the command line.
+ optional bool enabled = 8;
+
+ // The tool applied to the action will be the first Tool with a feature
+ // set that matches the feature configuration. An error will be thrown
+ // if no tool matches a provided feature configuration - for that reason,
+ // it's a good idea to provide a default tool with an empty feature set.
+ repeated Tool tool = 3;
+
+ // If the given action config is enabled, the flag sets will be applied
+ // to the corresponding action.
+ repeated FlagSet flag_set = 4;
+
+ // If the given action config is enabled, the env sets will be applied
+ // to the corresponding action.
+ repeated EnvSet env_set = 5;
+
+ // A list of feature sets defining when this action config
+ // is supported by the toolchain. The action config is supported if any of
+ // the feature sets fully apply, that is, when all features of a
+ // feature set are enabled.
+ //
+ // If 'requires' is omitted, the action config is supported independently
+ // of which other features are enabled.
+ //
+ // Use this for example to filter actions depending on the build
+ // mode enabled (opt / fastbuild / dbg).
+ repeated FeatureSet requires = 6;
+
+ // A list of features or action configs that are automatically enabled when
+ // this action config is enabled. If any of the implied features or action
+ // configs cannot be enabled, this action config will (silently)
+ // not be enabled either.
+ repeated string implies = 7;
+ }
+
+ repeated Feature feature = 50;
+ repeated ActionConfig action_config = 53;
+ repeated ArtifactNamePattern artifact_name_pattern = 54;
+
+ // The unique identifier of the toolchain within the crosstool release. It
+ // must be possible to use this as a directory name in a path.
+ // It has to match the following regex: [a-zA-Z_][\.\- \w]*
+ required string toolchain_identifier = 1;
+
+ // A basic toolchain description.
+ required string host_system_name = 2;
+ required string target_system_name = 3;
+ required string target_cpu = 4;
+ required string target_libc = 5;
+ required string compiler = 6;
+
+ required string abi_version = 7;
+ required string abi_libc_version = 8;
+
+ // Tool locations. Relative paths are resolved relative to the configuration
+ // file directory.
+ // NOTE: DEPRECATED. Prefer specifying an ActionConfig for the action that
+ // needs the tool.
+ // TODO(b/27903698) migrate to ActionConfig.
+ repeated ToolPath tool_path = 9;
+
+ // Feature flags.
+ // TODO(bazel-team): Sink those into 'Feature' instances.
+ // Legacy field, ignored by Bazel.
+ optional bool supports_gold_linker = 10 [default = false];
+ // Legacy field, ignored by Bazel.
+ optional bool supports_thin_archives = 11 [default = false];
+ // Legacy field, use 'supports_start_end_lib' feature instead.
+ optional bool supports_start_end_lib = 28 [default = false];
+ // Legacy field, use 'supports_interface_shared_libraries' instead.
+ optional bool supports_interface_shared_objects = 32 [default = false];
+ // Legacy field, use 'static_link_cpp_runtimes' feature instead.
+ optional bool supports_embedded_runtimes = 40 [default = false];
+ // If specified, Blaze finds statically linked / dynamically linked runtime
+ // libraries in the declared crosstool filegroup. Otherwise, Blaze
+ // looks in "[static|dynamic]-runtime-libs-$TARGET_CPU".
+ // Deprecated, see https://github.com/bazelbuild/bazel/issues/6942
+ optional string static_runtimes_filegroup = 45;
+ // Deprecated, see https://github.com/bazelbuild/bazel/issues/6942
+ optional string dynamic_runtimes_filegroup = 46;
+ // Legacy field, ignored by Bazel.
+ optional bool supports_incremental_linker = 41 [default = false];
+ // Legacy field, ignored by Bazel.
+ optional bool supports_normalizing_ar = 26 [default = false];
+ // Legacy field, use 'per_object_debug_info' feature instead.
+ optional bool supports_fission = 43 [default = false];
+ // Legacy field, ignored by Bazel.
+ optional bool supports_dsym = 51 [default = false];
+ // Legacy field, use 'supports_pic' feature instead
+ optional bool needsPic = 12 [default = false];
+
+ // Compiler flags for C/C++/Asm compilation.
+ repeated string compiler_flag = 13;
+ // Additional compiler flags for C++ compilation.
+ repeated string cxx_flag = 14;
+ // Additional unfiltered compiler flags for C/C++/Asm compilation.
+ // These are not subject to nocopt filtering in cc_* rules.
+ // Note: These flags are *not* applied to objc/objc++ compiles.
+ repeated string unfiltered_cxx_flag = 25;
+ // Linker flags.
+ repeated string linker_flag = 15;
+ // Additional linker flags when linking dynamic libraries.
+ repeated string dynamic_library_linker_flag = 27;
+ // Additional test-only linker flags.
+ repeated string test_only_linker_flag = 49;
+ // Objcopy flags for embedding files into binaries.
+ repeated string objcopy_embed_flag = 16;
+ // Ld flags for embedding files into binaries. This is used by filewrapper
+ // since it calls ld directly and needs to know what -m flag to pass.
+ repeated string ld_embed_flag = 23;
+ // Ar flags for combining object files into archives. If this is not set, it
+ // defaults to "rcsD".
+ // TODO(b/37271982): Remove after blaze with ar action_config release
+ repeated string ar_flag = 47;
+ // Legacy field, ignored by Bazel.
+ repeated string ar_thin_archives_flag = 48;
+ // Legacy field, ignored by Bazel.
+ repeated string gcc_plugin_compiler_flag = 34;
+
+ // Additional compiler and linker flags depending on the compilation mode.
+ repeated CompilationModeFlags compilation_mode_flags = 17;
+
+ // Additional linker flags depending on the linking mode.
+ repeated LinkingModeFlags linking_mode_flags = 18;
+
+ // Legacy field, ignored by Bazel.
+ repeated string gcc_plugin_header_directory = 19;
+ // Legacy field, ignored by Bazel.
+ repeated string mao_plugin_header_directory = 20;
+
+ // Make variables that are made accessible to rules.
+ repeated MakeVariable make_variable = 21;
+
+ // Built-in include directories for C++ compilation. These should be the exact
+ // paths used by the compiler, and are generally relative to the exec root.
+ // The paths used by the compiler can be determined by 'gcc -Wp,-v some.c'.
+ // We currently use the C++ paths also for C compilation, which is safe as
+ // long as there are no name clashes between C++ and C header files.
+ //
+ // Relative paths are resolved relative to the configuration file directory.
+ //
+ // If the compiler has --sysroot support, then these paths should use
+ // %sysroot% rather than the include path, and specify the sysroot attribute
+ // in order to give blaze the information necessary to make the correct
+ // replacements.
+ repeated string cxx_builtin_include_directory = 22;
+
+ // The built-in sysroot. If this attribute is not present, blaze does not
+ // allow using a different sysroot, i.e. through the --grte_top option. Also
+ // see the documentation above.
+ optional string builtin_sysroot = 24;
+
+ // Legacy field, ignored by Bazel.
+ optional string default_python_top = 29;
+ // Legacy field, ignored by Bazel.
+ optional string default_python_version = 30;
+ // Legacy field, ignored by Bazel.
+ optional bool python_preload_swigdeps = 42;
+
+ // The default GRTE to use. This should be a label, and gets the same
+ // treatment from Blaze as the --grte_top option. This setting is only used in
+ // the absence of an explicit --grte_top option. If unset, Blaze will not pass
+ // -sysroot by default. The local part must be 'everything', i.e.,
+ // '//some/label:everything'. There can only be one GRTE library per package,
+ // because the compiler expects the directory as a parameter of the -sysroot
+ // option.
+ // This may only be set to a non-empty value if builtin_sysroot is also set!
+ optional string default_grte_top = 31;
+
+ // Legacy field, ignored by Bazel.
+ repeated string debian_extra_requires = 33;
+
+ // Legacy field, ignored by Bazel. Only there for compatibility with
+ // things internal to Google.
+ optional string cc_target_os = 55;
+
+ // Next free id: 56
+}
+
+message ToolPath {
+ required string name = 1;
+ required string path = 2;
+}
+
+enum CompilationMode {
+ FASTBUILD = 1;
+ DBG = 2;
+ OPT = 3;
+ // This value is ignored and should not be used in new files.
+ COVERAGE = 4;
+}
+
+message CompilationModeFlags {
+ required CompilationMode mode = 1;
+ repeated string compiler_flag = 2;
+ repeated string cxx_flag = 3;
+ // Linker flags that are added when compiling in a certain mode.
+ repeated string linker_flag = 4;
+}
+
+enum LinkingMode {
+ FULLY_STATIC = 1;
+ MOSTLY_STATIC = 2;
+ DYNAMIC = 3;
+ MOSTLY_STATIC_LIBRARIES = 4;
+}
+
+message LinkingModeFlags {
+ required LinkingMode mode = 1;
+ repeated string linker_flag = 2;
+}
+
+message MakeVariable {
+ required string name = 1;
+ required string value = 2;
+}
+
+message DefaultCpuToolchain {
+ required string cpu = 1;
+ required string toolchain_identifier = 2;
+}
+
+// An entire crosstool release, containing the version number, and a set of
+// toolchains.
+message CrosstoolRelease {
+ // The major and minor version of the crosstool release.
+ required string major_version = 1;
+ required string minor_version = 2;
+
+ // Legacy field, ignored by Bazel.
+ optional string default_target_cpu = 3;
+ // Legacy field, ignored by Bazel.
+ repeated DefaultCpuToolchain default_toolchain = 4;
+
+ // All the toolchains in this release.
+ repeated CToolchain toolchain = 5;
+}
diff --git a/third_party/six.BUILD b/third_party/six.BUILD
new file mode 100644
index 0000000..19433c2
--- /dev/null
+++ b/third_party/six.BUILD
@@ -0,0 +1,16 @@
+# Description:
+# Six provides simple utilities for wrapping over differences between Python 2
+# and Python 3.
+
+load("@rules_python//python:defs.bzl", "py_library")
+
+licenses(["notice"]) # MIT
+
+exports_files(["LICENSE"])
+
+py_library(
+ name = "six",
+ srcs = ["six.py"],
+ srcs_version = "PY2AND3",
+ visibility = ["//visibility:public"],
+)
diff --git a/tools/migration/BUILD b/tools/migration/BUILD
new file mode 100644
index 0000000..b1dfafb
--- /dev/null
+++ b/tools/migration/BUILD
@@ -0,0 +1,150 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+
+# Go rules
+load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test")
+
+# Python rules
+load("@rules_python//python:defs.bzl", "py_binary", "py_library", "py_test")
+
+package(default_visibility = ["//visibility:public"])
+
+licenses(["notice"])
+
+py_binary(
+ name = "legacy_fields_migrator",
+ srcs = ["legacy_fields_migrator.py"],
+ python_version = "PY3",
+ deps = [
+ ":legacy_fields_migration_lib",
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_py_pb2",
+ "@io_abseil_py//absl:app",
+ "@io_abseil_py//absl/flags",
+ ],
+)
+
+py_library(
+ name = "legacy_fields_migration_lib",
+ srcs = ["legacy_fields_migration_lib.py"],
+ deps = [
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_py_pb2",
+ ],
+)
+
+py_test(
+ name = "legacy_fields_migration_lib_test",
+ srcs = ["legacy_fields_migration_lib_test.py"],
+ python_version = "PY3",
+ deps = [
+ ":legacy_fields_migration_lib",
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_py_pb2",
+ ],
+)
+
+py_binary(
+ name = "crosstool_query",
+ srcs = ["crosstool_query.py"],
+ python_version = "PY3",
+ deps = [
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_py_pb2",
+ "@io_abseil_py//absl:app",
+ "@io_abseil_py//absl/flags",
+ ],
+)
+
+py_binary(
+ name = "ctoolchain_comparator",
+ srcs = ["ctoolchain_comparator.py"],
+ python_version = "PY3",
+ deps = [
+ ":ctoolchain_comparator_lib",
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_py_pb2",
+ "@io_abseil_py//absl:app",
+ "@io_abseil_py//absl/flags",
+ ],
+)
+
+py_library(
+ name = "ctoolchain_comparator_lib",
+ srcs = ["ctoolchain_comparator_lib.py"],
+ deps = [
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_py_pb2",
+ ],
+)
+
+py_test(
+ name = "ctoolchain_comparator_lib_test",
+ srcs = ["ctoolchain_comparator_lib_test.py"],
+ python_version = "PY3",
+ deps = [
+ ":ctoolchain_comparator_lib",
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_py_pb2",
+ "@py_mock//py/mock",
+ ],
+)
+
+go_binary(
+ name = "convert_crosstool_to_starlark",
+ srcs = ["convert_crosstool_to_starlark.go"],
+ deps = [
+ ":crosstooltostarlarklib",
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_go_proto",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ ],
+)
+
+go_library(
+ name = "crosstooltostarlarklib",
+ srcs = ["crosstool_to_starlark_lib.go"],
+ importpath = "tools/migration/crosstooltostarlarklib",
+ deps = ["//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_go_proto"],
+)
+
+go_test(
+ name = "crosstooltostarlarklib_test",
+ size = "small",
+ srcs = ["crosstool_to_starlark_lib_test.go"],
+ embed = [":crosstooltostarlarklib"],
+ deps = [
+ "//third_party/com/github/bazelbuild/bazel/src/main/protobuf:crosstool_config_go_proto",
+ "@com_github_golang_protobuf//proto:go_default_library",
+ ],
+)
+
+filegroup(
+ name = "bazel_osx_p4deps",
+ srcs = [
+ "BUILD",
+ "ctoolchain_compare.bzl",
+ ],
+)
+
+exports_files([
+ "cc_toolchain_config_comparator.bzl",
+ "ctoolchain_compare.bzl",
+])
+
+bzl_library(
+ name = "ctoolchain_compare_bzl",
+ srcs = ["ctoolchain_compare.bzl"],
+ visibility = ["//visibility:private"],
+)
+
+bzl_library(
+ name = "cc_toolchain_config_comparator_bzl",
+ srcs = ["cc_toolchain_config_comparator.bzl"],
+ visibility = ["//visibility:private"],
+)
diff --git a/tools/migration/cc_toolchain_config_comparator.bzl b/tools/migration/cc_toolchain_config_comparator.bzl
new file mode 100644
index 0000000..66746b3
--- /dev/null
+++ b/tools/migration/cc_toolchain_config_comparator.bzl
@@ -0,0 +1,53 @@
+"""A test rule that compares two C++ toolchain configuration rules in proto format."""
+
+def _impl(ctx):
+ first_toolchain_config_proto = ctx.actions.declare_file(
+ ctx.label.name + "_first_toolchain_config.proto",
+ )
+ ctx.actions.write(
+ first_toolchain_config_proto,
+ ctx.attr.first[CcToolchainConfigInfo].proto,
+ )
+
+ second_toolchain_config_proto = ctx.actions.declare_file(
+ ctx.label.name + "_second_toolchain_config.proto",
+ )
+ ctx.actions.write(
+ second_toolchain_config_proto,
+ ctx.attr.second[CcToolchainConfigInfo].proto,
+ )
+
+ script = ("%s --before='%s' --after='%s'" % (
+ ctx.executable._comparator.short_path,
+ first_toolchain_config_proto.short_path,
+ second_toolchain_config_proto.short_path,
+ ))
+ test_executable = ctx.actions.declare_file(ctx.label.name)
+ ctx.actions.write(test_executable, script, is_executable = True)
+
+ runfiles = ctx.runfiles(files = [first_toolchain_config_proto, second_toolchain_config_proto])
+ runfiles = runfiles.merge(ctx.attr._comparator[DefaultInfo].default_runfiles)
+
+ return DefaultInfo(runfiles = runfiles, executable = test_executable)
+
+cc_toolchain_config_compare_test = rule(
+ implementation = _impl,
+ attrs = {
+ "first": attr.label(
+ mandatory = True,
+ providers = [CcToolchainConfigInfo],
+ doc = "A C++ toolchain config rule",
+ ),
+ "second": attr.label(
+ mandatory = True,
+ providers = [CcToolchainConfigInfo],
+ doc = "A C++ toolchain config rule",
+ ),
+ "_comparator": attr.label(
+ default = ":ctoolchain_comparator",
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+ test = True,
+)
diff --git a/tools/migration/convert_crosstool_to_starlark.go b/tools/migration/convert_crosstool_to_starlark.go
new file mode 100644
index 0000000..2c31456
--- /dev/null
+++ b/tools/migration/convert_crosstool_to_starlark.go
@@ -0,0 +1,101 @@
+/*
+The convert_crosstool_to_starlark script takes in a CROSSTOOL file and
+generates a Starlark rule.
+
+See https://github.com/bazelbuild/bazel/issues/5380
+
+Example usage:
+bazel run \
+@rules_cc//tools/migration:convert_crosstool_to_starlark -- \
+--crosstool=/path/to/CROSSTOOL \
+--output_location=/path/to/cc_config.bzl
+*/
+package main
+
+import (
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "os/user"
+ "path"
+ "strings"
+
+ // Google internal base/go package, commented out by copybara
+ "log"
+ crosstoolpb "third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config_go_proto"
+ "github.com/golang/protobuf/proto"
+
+ "tools/migration/crosstooltostarlarklib"
+)
+
+var (
+ crosstoolLocation = flag.String(
+ "crosstool", "", "Location of the CROSSTOOL file")
+ outputLocation = flag.String(
+ "output_location", "", "Location of the output .bzl file")
+)
+
+func toAbsolutePath(pathString string) (string, error) {
+ usr, err := user.Current()
+ if err != nil {
+ return "", err
+ }
+ homeDir := usr.HomeDir
+
+ if strings.HasPrefix(pathString, "~") {
+ return path.Join(homeDir, pathString[1:]), nil
+ }
+
+ if path.IsAbs(pathString) {
+ return pathString, nil
+ }
+
+ workingDirectory := os.Getenv("BUILD_WORKING_DIRECTORY")
+ return path.Join(workingDirectory, pathString), nil
+}
+
+func main() {
+ flag.Parse()
+
+ if *crosstoolLocation == "" {
+ log.Fatalf("Missing mandatory argument 'crosstool'")
+ }
+ crosstoolPath, err := toAbsolutePath(*crosstoolLocation)
+ if err != nil {
+ log.Fatalf("Error while resolving CROSSTOOL location:", err)
+ }
+
+ if *outputLocation == "" {
+ log.Fatalf("Missing mandatory argument 'output_location'")
+ }
+ outputPath, err := toAbsolutePath(*outputLocation)
+ if err != nil {
+ log.Fatalf("Error resolving output location:", err)
+ }
+
+ in, err := ioutil.ReadFile(crosstoolPath)
+ if err != nil {
+ log.Fatalf("Error reading CROSSTOOL file:", err)
+ }
+ crosstool := &crosstoolpb.CrosstoolRelease{}
+ if err := proto.UnmarshalText(string(in), crosstool); err != nil {
+ log.Fatalf("Failed to parse CROSSTOOL:", err)
+ }
+
+ file, err := os.Create(outputPath)
+ if err != nil {
+ log.Fatalf("Error creating output file:", err)
+ }
+ defer file.Close()
+
+ rule, err := crosstooltostarlarklib.Transform(crosstool)
+ if err != nil {
+ log.Fatalf("Error converting CROSSTOOL to a Starlark rule:", err)
+ }
+
+ if _, err := file.WriteString(rule); err != nil {
+ log.Fatalf("Error converting CROSSTOOL to a Starlark rule:", err)
+ }
+ fmt.Println("Success!")
+}
diff --git a/tools/migration/crosstool_query.py b/tools/migration/crosstool_query.py
new file mode 100644
index 0000000..af3f7fa
--- /dev/null
+++ b/tools/migration/crosstool_query.py
@@ -0,0 +1,53 @@
+"""Script to make automated CROSSTOOL refactorings easier.
+
+This script reads the CROSSTOOL file and allows for querying of its fields.
+"""
+
+from absl import app
+from absl import flags
+from google.protobuf import text_format
+from third_party.com.github.bazelbuild.bazel.src.main.protobuf import crosstool_config_pb2
+
+flags.DEFINE_string("crosstool", None, "CROSSTOOL file path to be queried")
+flags.DEFINE_string("identifier", None,
+ "Toolchain identifier to specify toolchain.")
+flags.DEFINE_string("print_field", None, "Field to be printed to stdout.")
+
+
+def main(unused_argv):
+ crosstool = crosstool_config_pb2.CrosstoolRelease()
+
+ crosstool_filename = flags.FLAGS.crosstool
+ identifier = flags.FLAGS.identifier
+ print_field = flags.FLAGS.print_field
+
+ if not crosstool_filename:
+ raise app.UsageError("ERROR crosstool unspecified")
+ if not identifier:
+ raise app.UsageError("ERROR identifier unspecified")
+
+ if not print_field:
+ raise app.UsageError("ERROR print_field unspecified")
+
+ with open(crosstool_filename, "r") as f:
+ text = f.read()
+ text_format.Merge(text, crosstool)
+
+ toolchain_found = False
+ for toolchain in crosstool.toolchain:
+ if toolchain.toolchain_identifier == identifier:
+ toolchain_found = True
+ if not print_field:
+ continue
+ for field, value in toolchain.ListFields():
+ if print_field == field.name:
+ print value
+
+ if not toolchain_found:
+ print "toolchain_identifier %s not found, valid values are:" % identifier
+ for toolchain in crosstool.toolchain:
+ print " " + toolchain.toolchain_identifier
+
+
+if __name__ == "__main__":
+ app.run(main)
diff --git a/tools/migration/crosstool_to_starlark_lib.go b/tools/migration/crosstool_to_starlark_lib.go
new file mode 100644
index 0000000..4403a4b
--- /dev/null
+++ b/tools/migration/crosstool_to_starlark_lib.go
@@ -0,0 +1,1419 @@
+/*
+Package crosstooltostarlarklib provides the Transform method
+for conversion of a CROSSTOOL file to a Starlark rule.
+
+https://github.com/bazelbuild/bazel/issues/5380
+*/
+package crosstooltostarlarklib
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "sort"
+ "strings"
+
+ crosstoolpb "third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config_go_proto"
+)
+
+// CToolchainIdentifier is what we'll use to differ between CToolchains
+// If a CToolchain can be distinguished from the other CToolchains
+// by only one of the fields (eg if cpu is different for each CToolchain
+// then only that field will be set.
+type CToolchainIdentifier struct {
+ cpu string
+ compiler string
+}
+
+// Writes the load statement for the cc_toolchain_config_lib
+func getCcToolchainConfigHeader() string {
+ return `load("@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl",
+ "action_config",
+ "artifact_name_pattern",
+ "env_entry",
+ "env_set",
+ "feature",
+ "feature_set",
+ "flag_group",
+ "flag_set",
+ "make_variable",
+ "tool",
+ "tool_path",
+ "variable_with_value",
+ "with_feature_set",
+)
+`
+}
+
+var allCompileActions = []string{
+ "c-compile",
+ "c++-compile",
+ "linkstamp-compile",
+ "assemble",
+ "preprocess-assemble",
+ "c++-header-parsing",
+ "c++-module-compile",
+ "c++-module-codegen",
+ "clif-match",
+ "lto-backend",
+}
+
+var allCppCompileActions = []string{
+ "c++-compile",
+ "linkstamp-compile",
+ "c++-header-parsing",
+ "c++-module-compile",
+ "c++-module-codegen",
+ "clif-match",
+}
+
+var preprocessorCompileActions = []string{
+ "c-compile",
+ "c++-compile",
+ "linkstamp-compile",
+ "preprocess-assemble",
+ "c++-header-parsing",
+ "c++-module-compile",
+ "clif-match",
+}
+
+var codegenCompileActions = []string{
+ "c-compile",
+ "c++-compile",
+ "linkstamp-compile",
+ "assemble",
+ "preprocess-assemble",
+ "c++-module-codegen",
+ "lto-backend",
+}
+
+var allLinkActions = []string{
+ "c++-link-executable",
+ "c++-link-dynamic-library",
+ "c++-link-nodeps-dynamic-library",
+}
+
+var actionNames = map[string]string{
+ "c-compile": "ACTION_NAMES.c_compile",
+ "c++-compile": "ACTION_NAMES.cpp_compile",
+ "linkstamp-compile": "ACTION_NAMES.linkstamp_compile",
+ "cc-flags-make-variable": "ACTION_NAMES.cc_flags_make_variable",
+ "c++-module-codegen": "ACTION_NAMES.cpp_module_codegen",
+ "c++-header-parsing": "ACTION_NAMES.cpp_header_parsing",
+ "c++-module-compile": "ACTION_NAMES.cpp_module_compile",
+ "assemble": "ACTION_NAMES.assemble",
+ "preprocess-assemble": "ACTION_NAMES.preprocess_assemble",
+ "lto-indexing": "ACTION_NAMES.lto_indexing",
+ "lto-backend": "ACTION_NAMES.lto_backend",
+ "c++-link-executable": "ACTION_NAMES.cpp_link_executable",
+ "c++-link-dynamic-library": "ACTION_NAMES.cpp_link_dynamic_library",
+ "c++-link-nodeps-dynamic-library": "ACTION_NAMES.cpp_link_nodeps_dynamic_library",
+ "c++-link-static-library": "ACTION_NAMES.cpp_link_static_library",
+ "strip": "ACTION_NAMES.strip",
+ "objc-compile": "ACTION_NAMES.objc_compile",
+ "objc++-compile": "ACTION_NAMES.objcpp_compile",
+ "clif-match": "ACTION_NAMES.clif_match",
+// "objcopy_embed_data": "ACTION_NAMES.objcopy_embed_data", // copybara-comment-this-out-please
+// "ld_embed_data": "ACTION_NAMES.ld_embed_data", // copybara-comment-this-out-please
+}
+
+func getLoadActionsStmt() string {
+ return "load(\"@bazel_tools//tools/build_defs/cc:action_names.bzl\", \"ACTION_NAMES\")\n\n"
+}
+
+// Returns a map {toolchain_identifier : CToolchainIdentifier}
+func toolchainToCToolchainIdentifier(
+ crosstool *crosstoolpb.CrosstoolRelease) map[string]CToolchainIdentifier {
+ cpuToCompiler := make(map[string][]string)
+ compilerToCPU := make(map[string][]string)
+ var cpus []string
+ var compilers []string
+ var identifiers []string
+ res := make(map[string]CToolchainIdentifier)
+ for _, cToolchain := range crosstool.GetToolchain() {
+ cpu := cToolchain.GetTargetCpu()
+ compiler := cToolchain.GetCompiler()
+
+ cpuToCompiler[cpu] = append(cpuToCompiler[cpu], compiler)
+ compilerToCPU[compiler] = append(compilerToCPU[compiler], cpu)
+
+ cpus = append(cpus, cToolchain.GetTargetCpu())
+ compilers = append(compilers, cToolchain.GetCompiler())
+ identifiers = append(identifiers, cToolchain.GetToolchainIdentifier())
+ }
+
+ for i := range cpus {
+ if len(cpuToCompiler[cpus[i]]) == 1 {
+ // if cpu is unique among CToolchains, we don't need the compiler field
+ res[identifiers[i]] = CToolchainIdentifier{cpu: cpus[i], compiler: ""}
+ } else {
+ res[identifiers[i]] = CToolchainIdentifier{
+ cpu: cpus[i],
+ compiler: compilers[i],
+ }
+ }
+ }
+ return res
+}
+
+func getConditionStatementForCToolchainIdentifier(identifier CToolchainIdentifier) string {
+ if identifier.compiler != "" {
+ return fmt.Sprintf(
+ "ctx.attr.cpu == \"%s\" and ctx.attr.compiler == \"%s\"",
+ identifier.cpu,
+ identifier.compiler)
+ }
+ return fmt.Sprintf("ctx.attr.cpu == \"%s\"", identifier.cpu)
+}
+
+func isArrayPrefix(prefix []string, arr []string) bool {
+ if len(prefix) > len(arr) {
+ return false
+ }
+ for i := 0; i < len(prefix); i++ {
+ if arr[i] != prefix[i] {
+ return false
+ }
+ }
+ return true
+}
+
+func isAllCompileActions(actions []string) (bool, []string) {
+ if isArrayPrefix(allCompileActions, actions) {
+ return true, actions[len(allCompileActions):]
+ }
+ return false, actions
+}
+
+func isAllCppCompileActions(actions []string) (bool, []string) {
+ if isArrayPrefix(allCppCompileActions, actions) {
+ return true, actions[len(allCppCompileActions):]
+ }
+ return false, actions
+}
+
+func isPreprocessorCompileActions(actions []string) (bool, []string) {
+ if isArrayPrefix(preprocessorCompileActions, actions) {
+ return true, actions[len(preprocessorCompileActions):]
+ }
+ return false, actions
+}
+
+func isCodegenCompileActions(actions []string) (bool, []string) {
+ if isArrayPrefix(codegenCompileActions, actions) {
+ return true, actions[len(codegenCompileActions):]
+ }
+ return false, actions
+}
+
+func isAllLinkActions(actions []string) (bool, []string) {
+ if isArrayPrefix(allLinkActions, actions) {
+ return true, actions[len(allLinkActions):]
+ }
+ return false, actions
+}
+
+func getActionNames(actions []string) []string {
+ var res []string
+ for _, el := range actions {
+ if name, ok := actionNames[el]; ok {
+ res = append(res, name)
+ } else {
+ res = append(res, "\""+el+"\"")
+ }
+ }
+ return res
+}
+
+func getListOfActions(name string, depth int) string {
+ var res []string
+ if name == "all_compile_actions" {
+ res = getActionNames(allCompileActions)
+ } else if name == "all_cpp_compile_actions" {
+ res = getActionNames(allCppCompileActions)
+ } else if name == "preprocessor_compile_actions" {
+ res = getActionNames(preprocessorCompileActions)
+ } else if name == "codegen_compile_actions" {
+ res = getActionNames(codegenCompileActions)
+ } else if name == "all_link_actions" {
+ res = getActionNames(allLinkActions)
+ }
+ stmt := fmt.Sprintf("%s%s = %s\n\n", getTabs(depth),
+ name, makeStringArr(res, depth /* isPlainString= */, false))
+ return stmt
+}
+
+func processActions(actions []string, depth int) []string {
+ var res []string
+ var ok bool
+ initLen := len(actions)
+ if ok, actions = isAllCompileActions(actions); ok {
+ res = append(res, "all_compile_actions")
+ }
+ if ok, actions = isAllCppCompileActions(actions); ok {
+ res = append(res, "all_cpp_compile_actions")
+ }
+ if ok, actions = isPreprocessorCompileActions(actions); ok {
+ res = append(res, "preprocessor_compile_actions")
+ }
+ if ok, actions = isCodegenCompileActions(actions); ok {
+ res = append(res, "codegen_actions")
+ }
+ if ok, actions = isAllLinkActions(actions); ok {
+ res = append(res, "all_link_actions")
+ }
+ if len(actions) != 0 {
+ actions = getActionNames(actions)
+ newDepth := depth + 1
+ if len(actions) != initLen {
+ newDepth++
+ }
+ res = append(res, makeStringArr(actions, newDepth /* isPlainString= */, false))
+ }
+ return res
+}
+
+func getUniqueValues(arr []string) []string {
+ valuesSet := make(map[string]bool)
+ for _, val := range arr {
+ valuesSet[val] = true
+ }
+ var uniques []string
+ for val, _ := range valuesSet {
+ uniques = append(uniques, val)
+ }
+ sort.Strings(uniques)
+ return uniques
+}
+
+func getRule(cToolchainIdentifiers map[string]CToolchainIdentifier,
+ allowedCompilers []string) string {
+ cpus := make(map[string]bool)
+ shouldUseCompilerAttribute := false
+ for _, val := range cToolchainIdentifiers {
+ cpus[val.cpu] = true
+ if val.compiler != "" {
+ shouldUseCompilerAttribute = true
+ }
+ }
+
+ var cpuValues []string
+ for cpu := range cpus {
+ cpuValues = append(cpuValues, cpu)
+ }
+
+ var args []string
+ sort.Strings(cpuValues)
+ args = append(args,
+ fmt.Sprintf(
+ `"cpu": attr.string(mandatory=True, values=["%s"]),`,
+ strings.Join(cpuValues, "\", \"")))
+ if shouldUseCompilerAttribute {
+ // If there are two CToolchains that share the cpu we need the compiler attribute
+ // for our cc_toolchain_config rule.
+ allowedCompilers = getUniqueValues(allowedCompilers)
+ args = append(args,
+ fmt.Sprintf(`"compiler": attr.string(mandatory=True, values=["%s"]),`,
+ strings.Join(allowedCompilers, "\", \"")))
+ }
+ return fmt.Sprintf(`cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ %s
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
+`, strings.Join(args, "\n "))
+}
+
+func getImplHeader() string {
+ return "def _impl(ctx):\n"
+}
+
+func getStringStatement(crosstool *crosstoolpb.CrosstoolRelease,
+ cToolchainIdentifiers map[string]CToolchainIdentifier, field string,
+ depth int) string {
+
+ identifiers := getToolchainIdentifiers(crosstool)
+ var fieldValues []string
+ if field == "toolchain_identifier" {
+ fieldValues = getToolchainIdentifiers(crosstool)
+ } else if field == "host_system_name" {
+ fieldValues = getHostSystemNames(crosstool)
+ } else if field == "target_system_name" {
+ fieldValues = getTargetSystemNames(crosstool)
+ } else if field == "target_cpu" {
+ fieldValues = getTargetCpus(crosstool)
+ } else if field == "target_libc" {
+ fieldValues = getTargetLibcs(crosstool)
+ } else if field == "compiler" {
+ fieldValues = getCompilers(crosstool)
+ } else if field == "abi_version" {
+ fieldValues = getAbiVersions(crosstool)
+ } else if field == "abi_libc_version" {
+ fieldValues = getAbiLibcVersions(crosstool)
+ } else if field == "cc_target_os" {
+ fieldValues = getCcTargetOss(crosstool)
+ } else if field == "builtin_sysroot" {
+ fieldValues = getBuiltinSysroots(crosstool)
+ }
+
+ mappedValuesToIds := getMappedStringValuesToIdentifiers(identifiers, fieldValues)
+ return getAssignmentStatement(field, mappedValuesToIds, crosstool,
+ cToolchainIdentifiers, depth /* isPlainString= */, true /* shouldFail= */, true)
+}
+
+func getFeatures(crosstool *crosstoolpb.CrosstoolRelease) (
+ map[string][]string, map[string]map[string][]string, error) {
+ featureNameToFeature := make(map[string]map[string][]string)
+ toolchainToFeatures := make(map[string][]string)
+ for _, toolchain := range crosstool.GetToolchain() {
+ id := toolchain.GetToolchainIdentifier()
+ if len(toolchain.GetFeature()) == 0 {
+ toolchainToFeatures[id] = []string{}
+ }
+ for _, feature := range toolchain.GetFeature() {
+ featureName := strings.ToLower(feature.GetName()) + "_feature"
+ featureName = strings.Replace(featureName, "+", "p", -1)
+ featureName = strings.Replace(featureName, ".", "_", -1)
+ featureName = strings.Replace(featureName, "-", "_", -1)
+ stringFeature, err := parseFeature(feature, 1)
+ if err != nil {
+ return nil, nil, fmt.Errorf(
+ "Error in feature '%s': %v", feature.GetName(), err)
+ }
+ if _, ok := featureNameToFeature[featureName]; !ok {
+ featureNameToFeature[featureName] = make(map[string][]string)
+ }
+ featureNameToFeature[featureName][stringFeature] = append(
+ featureNameToFeature[featureName][stringFeature], id)
+ toolchainToFeatures[id] = append(toolchainToFeatures[id], featureName)
+ }
+ }
+ return toolchainToFeatures, featureNameToFeature, nil
+}
+
+func getFeaturesDeclaration(crosstool *crosstoolpb.CrosstoolRelease,
+ cToolchainIdentifiers map[string]CToolchainIdentifier,
+ featureNameToFeature map[string]map[string][]string, depth int) string {
+ var res []string
+ for featureName, featureStringToID := range featureNameToFeature {
+ res = append(res,
+ getAssignmentStatement(
+ featureName,
+ featureStringToID,
+ crosstool,
+ cToolchainIdentifiers,
+ depth,
+ /* isPlainString= */ false,
+ /* shouldFail= */ false))
+ }
+ return strings.Join(res, "")
+}
+
+func getFeaturesStmt(cToolchainIdentifiers map[string]CToolchainIdentifier,
+ toolchainToFeatures map[string][]string, depth int) string {
+ var res []string
+ arrToIdentifier := make(map[string][]string)
+ for id, features := range toolchainToFeatures {
+ arrayString := strings.Join(features, "{arrayFieldDelimiter}")
+ arrToIdentifier[arrayString] = append(arrToIdentifier[arrayString], id)
+ }
+ res = append(res,
+ getStringArrStatement(
+ "features",
+ arrToIdentifier,
+ cToolchainIdentifiers,
+ depth,
+ /* isPlainString= */ false))
+ return strings.Join(res, "\n")
+}
+
+func getActions(crosstool *crosstoolpb.CrosstoolRelease) (
+ map[string][]string, map[string]map[string][]string, error) {
+ actionNameToAction := make(map[string]map[string][]string)
+ toolchainToActions := make(map[string][]string)
+ for _, toolchain := range crosstool.GetToolchain() {
+ id := toolchain.GetToolchainIdentifier()
+ var actionName string
+ if len(toolchain.GetActionConfig()) == 0 {
+ toolchainToActions[id] = []string{}
+ }
+ for _, action := range toolchain.GetActionConfig() {
+ if aName, ok := actionNames[action.GetActionName()]; ok {
+ actionName = aName
+ } else {
+ actionName = strings.ToLower(action.GetActionName())
+ actionName = strings.Replace(actionName, "+", "p", -1)
+ actionName = strings.Replace(actionName, ".", "_", -1)
+ actionName = strings.Replace(actionName, "-", "_", -1)
+ }
+ stringAction, err := parseAction(action, 1)
+ if err != nil {
+ return nil, nil, fmt.Errorf(
+ "Error in action_config '%s': %v", action.GetActionName(), err)
+ }
+ if _, ok := actionNameToAction[actionName]; !ok {
+ actionNameToAction[actionName] = make(map[string][]string)
+ }
+ actionNameToAction[actionName][stringAction] = append(
+ actionNameToAction[actionName][stringAction], id)
+ toolchainToActions[id] = append(
+ toolchainToActions[id],
+ strings.TrimPrefix(strings.ToLower(actionName), "action_names.")+"_action")
+ }
+ }
+ return toolchainToActions, actionNameToAction, nil
+}
+
+func getActionConfigsDeclaration(
+ crosstool *crosstoolpb.CrosstoolRelease,
+ cToolchainIdentifiers map[string]CToolchainIdentifier,
+ actionNameToAction map[string]map[string][]string, depth int) string {
+ var res []string
+ for actionName, actionStringToID := range actionNameToAction {
+ variableName := strings.TrimPrefix(strings.ToLower(actionName), "action_names.") + "_action"
+ res = append(res,
+ getAssignmentStatement(
+ variableName,
+ actionStringToID,
+ crosstool,
+ cToolchainIdentifiers,
+ depth,
+ /* isPlainString= */ false,
+ /* shouldFail= */ false))
+ }
+ return strings.Join(res, "")
+}
+
+func getActionConfigsStmt(
+ cToolchainIdentifiers map[string]CToolchainIdentifier,
+ toolchainToActions map[string][]string, depth int) string {
+ var res []string
+ arrToIdentifier := make(map[string][]string)
+ for id, actions := range toolchainToActions {
+ var arrayString string
+ arrayString = strings.Join(actions, "{arrayFieldDelimiter}")
+ arrToIdentifier[arrayString] = append(arrToIdentifier[arrayString], id)
+ }
+ res = append(res,
+ getStringArrStatement(
+ "action_configs",
+ arrToIdentifier,
+ cToolchainIdentifiers,
+ depth,
+ /* isPlainString= */ false))
+ return strings.Join(res, "\n")
+}
+
+func parseAction(action *crosstoolpb.CToolchain_ActionConfig, depth int) (string, error) {
+ actionName := action.GetActionName()
+ aName := ""
+ if val, ok := actionNames[actionName]; ok {
+ aName = val
+ } else {
+ aName = "\"" + action.GetActionName() + "\""
+ }
+ name := fmt.Sprintf("action_name = %s", aName)
+ fields := []string{name}
+ if action.GetEnabled() {
+ fields = append(fields, "enabled = True")
+ }
+ if len(action.GetFlagSet()) != 0 {
+ flagSets, err := parseFlagSets(action.GetFlagSet(), depth+1)
+ if err != nil {
+ return "", err
+ }
+ fields = append(fields, "flag_sets = "+flagSets)
+ }
+ if len(action.GetImplies()) != 0 {
+ implies := "implies = " +
+ makeStringArr(action.GetImplies(), depth+1 /* isPlainString= */, true)
+ fields = append(fields, implies)
+ }
+ if len(action.GetTool()) != 0 {
+ tools := "tools = " + parseTools(action.GetTool(), depth+1)
+ fields = append(fields, tools)
+ }
+ return createObject("action_config", fields, depth), nil
+}
+
+func getStringArrStatement(attr string, arrValToIds map[string][]string,
+ cToolchainIdentifiers map[string]CToolchainIdentifier, depth int, plainString bool) string {
+ var b bytes.Buffer
+ if len(arrValToIds) == 0 {
+ b.WriteString(fmt.Sprintf("%s%s = []\n", getTabs(depth), attr))
+ } else if len(arrValToIds) == 1 {
+ for value := range arrValToIds {
+ var arr []string
+ if value == "" {
+ arr = []string{}
+ } else if value == "None" {
+ b.WriteString(fmt.Sprintf("%s%s = None\n", getTabs(depth), attr))
+ break
+ } else {
+ arr = strings.Split(value, "{arrayFieldDelimiter}")
+ }
+ b.WriteString(
+ fmt.Sprintf(
+ "%s%s = %s\n",
+ getTabs(depth),
+ attr,
+ makeStringArr(arr, depth+1, plainString)))
+ break
+ }
+ } else {
+ first := true
+ var keys []string
+ for k := range arrValToIds {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, value := range keys {
+ ids := arrValToIds[value]
+ branch := "elif"
+ if first {
+ branch = "if"
+ }
+ first = false
+ var arr []string
+ if value == "" {
+ arr = []string{}
+ } else if value == "None" {
+ b.WriteString(
+ getIfStatement(
+ branch, ids, attr, "None", cToolchainIdentifiers,
+ depth /* isPlainString= */, true))
+ continue
+ } else {
+ arr = strings.Split(value, "{arrayFieldDelimiter}")
+ }
+ b.WriteString(
+ getIfStatement(branch, ids, attr,
+ makeStringArr(arr, depth+1, plainString),
+ cToolchainIdentifiers, depth /* isPlainString= */, false))
+ }
+ b.WriteString(fmt.Sprintf("%selse:\n%sfail(\"Unreachable\")\n", getTabs(depth), getTabs(depth+1)))
+ }
+ b.WriteString("\n")
+ return b.String()
+}
+
+func getStringArr(crosstool *crosstoolpb.CrosstoolRelease,
+ cToolchainIdentifiers map[string]CToolchainIdentifier, attr string, depth int) string {
+ var res []string
+ arrToIdentifier := make(map[string][]string)
+ for _, toolchain := range crosstool.GetToolchain() {
+ id := toolchain.GetToolchainIdentifier()
+ arrayString := strings.Join(getArrField(attr, toolchain), "{arrayFieldDelimiter}")
+ arrToIdentifier[arrayString] = append(arrToIdentifier[arrayString], id)
+ }
+ statement := getStringArrStatement(attr, arrToIdentifier, cToolchainIdentifiers, depth /* isPlainString= */, true)
+ res = append(res, statement)
+ return strings.Join(res, "\n")
+}
+
+func getArrField(attr string, toolchain *crosstoolpb.CToolchain) []string {
+ var arr []string
+ if attr == "cxx_builtin_include_directories" {
+ arr = toolchain.GetCxxBuiltinIncludeDirectory()
+ }
+ return arr
+}
+
+func getTabs(depth int) string {
+ var res string
+ for i := 0; i < depth; i++ {
+ res = res + " "
+ }
+ return res
+}
+
+func createObject(objtype string, fields []string, depth int) string {
+ if len(fields) == 0 {
+ return objtype + "()"
+ }
+ singleLine := objtype + "(" + strings.Join(fields, ", ") + ")"
+ if len(singleLine) < 60 {
+ return singleLine
+ }
+ return objtype +
+ "(\n" +
+ getTabs(depth+1) +
+ strings.Join(fields, ",\n"+getTabs(depth+1)) +
+ ",\n" + getTabs(depth) +
+ ")"
+}
+
+func getArtifactNamePatterns(crosstool *crosstoolpb.CrosstoolRelease,
+ cToolchainIdentifiers map[string]CToolchainIdentifier, depth int) string {
+ var res []string
+ artifactToIds := make(map[string][]string)
+ for _, toolchain := range crosstool.GetToolchain() {
+ artifactNamePatterns := parseArtifactNamePatterns(
+ toolchain.GetArtifactNamePattern(),
+ depth)
+ artifactToIds[artifactNamePatterns] = append(
+ artifactToIds[artifactNamePatterns],
+ toolchain.GetToolchainIdentifier())
+ }
+ res = append(res,
+ getAssignmentStatement(
+ "artifact_name_patterns",
+ artifactToIds,
+ crosstool,
+ cToolchainIdentifiers,
+ depth,
+ /* isPlainString= */ false,
+ /* shouldFail= */ true))
+ return strings.Join(res, "\n")
+}
+
+func parseArtifactNamePatterns(
+ artifactNamePatterns []*crosstoolpb.CToolchain_ArtifactNamePattern, depth int) string {
+ var res []string
+ for _, pattern := range artifactNamePatterns {
+ res = append(res, parseArtifactNamePattern(pattern, depth+1))
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseArtifactNamePattern(
+ artifactNamePattern *crosstoolpb.CToolchain_ArtifactNamePattern, depth int) string {
+ categoryName := fmt.Sprintf("category_name = \"%s\"", artifactNamePattern.GetCategoryName())
+ prefix := fmt.Sprintf("prefix = \"%s\"", artifactNamePattern.GetPrefix())
+ extension := fmt.Sprintf("extension = \"%s\"", artifactNamePattern.GetExtension())
+ fields := []string{categoryName, prefix, extension}
+ return createObject("artifact_name_pattern", fields, depth)
+}
+
+func parseFeature(feature *crosstoolpb.CToolchain_Feature, depth int) (string, error) {
+ name := fmt.Sprintf("name = \"%s\"", feature.GetName())
+
+ fields := []string{name}
+ if feature.GetEnabled() {
+ fields = append(fields, "enabled = True")
+ }
+
+ if len(feature.GetFlagSet()) > 0 {
+ flagSets, err := parseFlagSets(feature.GetFlagSet(), depth+1)
+ if err != nil {
+ return "", err
+ }
+ fields = append(fields, "flag_sets = "+flagSets)
+ }
+ if len(feature.GetEnvSet()) > 0 {
+ envSets := "env_sets = " + parseEnvSets(feature.GetEnvSet(), depth+1)
+ fields = append(fields, envSets)
+ }
+ if len(feature.GetRequires()) > 0 {
+ requires := "requires = " + parseFeatureSets(feature.GetRequires(), depth+1)
+ fields = append(fields, requires)
+ }
+ if len(feature.GetImplies()) > 0 {
+ implies := "implies = " +
+ makeStringArr(feature.GetImplies(), depth+1 /* isPlainString= */, true)
+ fields = append(fields, implies)
+ }
+ if len(feature.GetProvides()) > 0 {
+ provides := "provides = " +
+ makeStringArr(feature.GetProvides(), depth+1 /* isPlainString= */, true)
+ fields = append(fields, provides)
+ }
+ return createObject("feature", fields, depth), nil
+}
+
+func parseFlagSets(flagSets []*crosstoolpb.CToolchain_FlagSet, depth int) (string, error) {
+ var res []string
+ for _, flagSet := range flagSets {
+ parsedFlagset, err := parseFlagSet(flagSet, depth+1)
+ if err != nil {
+ return "", err
+ }
+ res = append(res, parsedFlagset)
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false), nil
+}
+
+func parseFlagSet(flagSet *crosstoolpb.CToolchain_FlagSet, depth int) (string, error) {
+ var fields []string
+ if len(flagSet.GetAction()) > 0 {
+ actionArr := processActions(flagSet.GetAction(), depth)
+ actions := "actions = " + strings.Join(actionArr, " +\n"+getTabs(depth+2))
+ fields = append(fields, actions)
+ }
+ if len(flagSet.GetFlagGroup()) > 0 {
+ flagGroups, err := parseFlagGroups(flagSet.GetFlagGroup(), depth+1)
+ if err != nil {
+ return "", err
+ }
+ fields = append(fields, "flag_groups = "+flagGroups)
+ }
+ if len(flagSet.GetWithFeature()) > 0 {
+ withFeatures := "with_features = " +
+ parseWithFeatureSets(flagSet.GetWithFeature(), depth+1)
+ fields = append(fields, withFeatures)
+ }
+ return createObject("flag_set", fields, depth), nil
+}
+
+func parseFlagGroups(flagGroups []*crosstoolpb.CToolchain_FlagGroup, depth int) (string, error) {
+ var res []string
+ for _, flagGroup := range flagGroups {
+ flagGroupString, err := parseFlagGroup(flagGroup, depth+1)
+ if err != nil {
+ return "", err
+ }
+ res = append(res, flagGroupString)
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false), nil
+}
+
+func parseFlagGroup(flagGroup *crosstoolpb.CToolchain_FlagGroup, depth int) (string, error) {
+ var res []string
+ if len(flagGroup.GetFlag()) != 0 {
+ res = append(res, "flags = "+makeStringArr(flagGroup.GetFlag(), depth+1, true))
+ }
+ if flagGroup.GetIterateOver() != "" {
+ res = append(res, fmt.Sprintf("iterate_over = \"%s\"", flagGroup.GetIterateOver()))
+ }
+ if len(flagGroup.GetFlagGroup()) != 0 {
+ flagGroupString, err := parseFlagGroups(flagGroup.GetFlagGroup(), depth+1)
+ if err != nil {
+ return "", err
+ }
+ res = append(res, "flag_groups = "+flagGroupString)
+ }
+ if len(flagGroup.GetExpandIfAllAvailable()) > 1 {
+ return "", errors.New("Flag group must not have more than one 'expand_if_all_available' field")
+ }
+ if len(flagGroup.GetExpandIfAllAvailable()) != 0 {
+ res = append(res,
+ fmt.Sprintf(
+ "expand_if_available = \"%s\"",
+ flagGroup.GetExpandIfAllAvailable()[0]))
+ }
+ if len(flagGroup.GetExpandIfNoneAvailable()) > 1 {
+ return "", errors.New("Flag group must not have more than one 'expand_if_none_available' field")
+ }
+ if len(flagGroup.GetExpandIfNoneAvailable()) != 0 {
+ res = append(res,
+ fmt.Sprintf(
+ "expand_if_not_available = \"%s\"",
+ flagGroup.GetExpandIfNoneAvailable()[0]))
+ }
+ if flagGroup.GetExpandIfTrue() != "" {
+ res = append(res, fmt.Sprintf("expand_if_true = \"%s\"",
+ flagGroup.GetExpandIfTrue()))
+ }
+ if flagGroup.GetExpandIfFalse() != "" {
+ res = append(res, fmt.Sprintf("expand_if_false = \"%s\"",
+ flagGroup.GetExpandIfFalse()))
+ }
+ if flagGroup.GetExpandIfEqual() != nil {
+ res = append(res,
+ "expand_if_equal = "+parseVariableWithValue(
+ flagGroup.GetExpandIfEqual(), depth+1))
+ }
+ return createObject("flag_group", res, depth), nil
+}
+
+func parseVariableWithValue(variable *crosstoolpb.CToolchain_VariableWithValue, depth int) string {
+ variableName := fmt.Sprintf("name = \"%s\"", variable.GetVariable())
+ value := fmt.Sprintf("value = \"%s\"", variable.GetValue())
+ return createObject("variable_with_value", []string{variableName, value}, depth)
+}
+
+func getToolPaths(crosstool *crosstoolpb.CrosstoolRelease,
+ cToolchainIdentifiers map[string]CToolchainIdentifier, depth int) string {
+ var res []string
+ toolPathsToIds := make(map[string][]string)
+ for _, toolchain := range crosstool.GetToolchain() {
+ toolPaths := parseToolPaths(toolchain.GetToolPath(), depth)
+ toolPathsToIds[toolPaths] = append(
+ toolPathsToIds[toolPaths],
+ toolchain.GetToolchainIdentifier())
+ }
+ res = append(res,
+ getAssignmentStatement(
+ "tool_paths",
+ toolPathsToIds,
+ crosstool,
+ cToolchainIdentifiers,
+ depth,
+ /* isPlainString= */ false,
+ /* shouldFail= */ true))
+ return strings.Join(res, "\n")
+}
+
+func parseToolPaths(toolPaths []*crosstoolpb.ToolPath, depth int) string {
+ var res []string
+ for _, toolPath := range toolPaths {
+ res = append(res, parseToolPath(toolPath, depth+1))
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseToolPath(toolPath *crosstoolpb.ToolPath, depth int) string {
+ name := fmt.Sprintf("name = \"%s\"", toolPath.GetName())
+ path := toolPath.GetPath()
+ if path == "" {
+ path = "NOT_USED"
+ }
+ path = fmt.Sprintf("path = \"%s\"", path)
+ return createObject("tool_path", []string{name, path}, depth)
+}
+
+func getMakeVariables(crosstool *crosstoolpb.CrosstoolRelease,
+ cToolchainIdentifiers map[string]CToolchainIdentifier, depth int) string {
+ var res []string
+ makeVariablesToIds := make(map[string][]string)
+ for _, toolchain := range crosstool.GetToolchain() {
+ makeVariables := parseMakeVariables(toolchain.GetMakeVariable(), depth)
+ makeVariablesToIds[makeVariables] = append(
+ makeVariablesToIds[makeVariables],
+ toolchain.GetToolchainIdentifier())
+ }
+ res = append(res,
+ getAssignmentStatement(
+ "make_variables",
+ makeVariablesToIds,
+ crosstool,
+ cToolchainIdentifiers,
+ depth,
+ /* isPlainString= */ false,
+ /* shouldFail= */ true))
+ return strings.Join(res, "\n")
+}
+
+func parseMakeVariables(makeVariables []*crosstoolpb.MakeVariable, depth int) string {
+ var res []string
+ for _, makeVariable := range makeVariables {
+ res = append(res, parseMakeVariable(makeVariable, depth+1))
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseMakeVariable(makeVariable *crosstoolpb.MakeVariable, depth int) string {
+ name := fmt.Sprintf("name = \"%s\"", makeVariable.GetName())
+ value := fmt.Sprintf("value = \"%s\"", makeVariable.GetValue())
+ return createObject("make_variable", []string{name, value}, depth)
+}
+
+func parseTools(tools []*crosstoolpb.CToolchain_Tool, depth int) string {
+ var res []string
+ for _, tool := range tools {
+ res = append(res, parseTool(tool, depth+1))
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseTool(tool *crosstoolpb.CToolchain_Tool, depth int) string {
+ toolPath := "path = \"NOT_USED\""
+ if tool.GetToolPath() != "" {
+ toolPath = fmt.Sprintf("path = \"%s\"", tool.GetToolPath())
+ }
+ fields := []string{toolPath}
+ if len(tool.GetWithFeature()) != 0 {
+ withFeatures := "with_features = " + parseWithFeatureSets(tool.GetWithFeature(), depth+1)
+ fields = append(fields, withFeatures)
+ }
+ if len(tool.GetExecutionRequirement()) != 0 {
+ executionRequirements := "execution_requirements = " +
+ makeStringArr(tool.GetExecutionRequirement(), depth+1 /* isPlainString= */, true)
+ fields = append(fields, executionRequirements)
+ }
+ return createObject("tool", fields, depth)
+}
+
+func parseEnvEntries(envEntries []*crosstoolpb.CToolchain_EnvEntry, depth int) string {
+ var res []string
+ for _, envEntry := range envEntries {
+ res = append(res, parseEnvEntry(envEntry, depth+1))
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseEnvEntry(envEntry *crosstoolpb.CToolchain_EnvEntry, depth int) string {
+ key := fmt.Sprintf("key = \"%s\"", envEntry.GetKey())
+ value := fmt.Sprintf("value = \"%s\"", envEntry.GetValue())
+ return createObject("env_entry", []string{key, value}, depth)
+}
+
+func parseWithFeatureSets(withFeatureSets []*crosstoolpb.CToolchain_WithFeatureSet,
+ depth int) string {
+ var res []string
+ for _, withFeature := range withFeatureSets {
+ res = append(res, parseWithFeatureSet(withFeature, depth+1))
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseWithFeatureSet(withFeature *crosstoolpb.CToolchain_WithFeatureSet,
+ depth int) string {
+ var fields []string
+ if len(withFeature.GetFeature()) != 0 {
+ features := "features = " +
+ makeStringArr(withFeature.GetFeature(), depth+1 /* isPlainString= */, true)
+ fields = append(fields, features)
+ }
+ if len(withFeature.GetNotFeature()) != 0 {
+ notFeatures := "not_features = " +
+ makeStringArr(withFeature.GetNotFeature(), depth+1 /* isPlainString= */, true)
+ fields = append(fields, notFeatures)
+ }
+ return createObject("with_feature_set", fields, depth)
+}
+
+func parseEnvSets(envSets []*crosstoolpb.CToolchain_EnvSet, depth int) string {
+ var res []string
+ for _, envSet := range envSets {
+ envSetString := parseEnvSet(envSet, depth+1)
+ res = append(res, envSetString)
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseEnvSet(envSet *crosstoolpb.CToolchain_EnvSet, depth int) string {
+ actionsStatement := processActions(envSet.GetAction(), depth)
+ actions := "actions = " + strings.Join(actionsStatement, " +\n"+getTabs(depth+2))
+ fields := []string{actions}
+ if len(envSet.GetEnvEntry()) != 0 {
+ envEntries := "env_entries = " + parseEnvEntries(envSet.GetEnvEntry(), depth+1)
+ fields = append(fields, envEntries)
+ }
+ if len(envSet.GetWithFeature()) != 0 {
+ withFeatures := "with_features = " + parseWithFeatureSets(envSet.GetWithFeature(), depth+1)
+ fields = append(fields, withFeatures)
+ }
+ return createObject("env_set", fields, depth)
+}
+
+func parseFeatureSets(featureSets []*crosstoolpb.CToolchain_FeatureSet, depth int) string {
+ var res []string
+ for _, featureSet := range featureSets {
+ res = append(res, parseFeatureSet(featureSet, depth+1))
+ }
+ return makeStringArr(res, depth /* isPlainString= */, false)
+}
+
+func parseFeatureSet(featureSet *crosstoolpb.CToolchain_FeatureSet, depth int) string {
+ features := "features = " +
+ makeStringArr(featureSet.GetFeature(), depth+1 /* isPlainString= */, true)
+ return createObject("feature_set", []string{features}, depth)
+}
+
+// Takes in a list of string elements and returns a string that represents
+// an array :
+// [
+// "element1",
+// "element2",
+// ]
+// The isPlainString argument tells us whether the input elements should be
+// treated as string (eg, flags), or not (eg, variable names)
+func makeStringArr(arr []string, depth int, isPlainString bool) string {
+ if len(arr) == 0 {
+ return "[]"
+ }
+ var escapedArr []string
+ for _, el := range arr {
+ if isPlainString {
+ escapedArr = append(escapedArr, strings.Replace(el, "\"", "\\\"", -1))
+ } else {
+ escapedArr = append(escapedArr, el)
+ }
+ }
+ addQuote := ""
+ if isPlainString {
+ addQuote = "\""
+ }
+ singleLine := "[" + addQuote + strings.Join(escapedArr, addQuote+", "+addQuote) + addQuote + "]"
+ if len(singleLine) < 60 {
+ return singleLine
+ }
+ return "[\n" +
+ getTabs(depth+1) +
+ addQuote +
+ strings.Join(escapedArr, addQuote+",\n"+getTabs(depth+1)+addQuote) +
+ addQuote +
+ ",\n" +
+ getTabs(depth) +
+ "]"
+}
+
+// Returns a string that represents a value assignment
+// (eg if ctx.attr.cpu == "linux":
+// compiler = "llvm"
+// elif ctx.attr.cpu == "windows":
+// compiler = "mingw"
+// else:
+// fail("Unreachable")
+func getAssignmentStatement(field string, valToIds map[string][]string,
+ crosstool *crosstoolpb.CrosstoolRelease,
+ toCToolchainIdentifier map[string]CToolchainIdentifier,
+ depth int, isPlainString, shouldFail bool) string {
+ var b bytes.Buffer
+ if len(valToIds) <= 1 {
+ // if there is only one possible value for this field, we don't need if statements
+ for val := range valToIds {
+ if val != "None" && isPlainString {
+ val = "\"" + val + "\""
+ }
+ b.WriteString(fmt.Sprintf("%s%s = %s\n", getTabs(depth), field, val))
+ break
+ }
+ } else {
+ first := true
+ var keys []string
+ for k := range valToIds {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, value := range keys {
+ ids := valToIds[value]
+ branch := "elif"
+ if first {
+ branch = "if"
+ }
+ b.WriteString(
+ getIfStatement(branch, ids, field, value,
+ toCToolchainIdentifier, depth, isPlainString))
+ first = false
+ }
+ if shouldFail {
+ b.WriteString(
+ fmt.Sprintf(
+ "%selse:\n%sfail(\"Unreachable\")\n",
+ getTabs(depth), getTabs(depth+1)))
+ } else {
+ b.WriteString(
+ fmt.Sprintf(
+ "%selse:\n%s%s = None\n",
+ getTabs(depth), getTabs(depth+1), field))
+ }
+ }
+ b.WriteString("\n")
+ return b.String()
+}
+
+func getCPUToCompilers(identifiers []CToolchainIdentifier) map[string][]string {
+ res := make(map[string][]string)
+ for _, identifier := range identifiers {
+ if identifier.compiler != "" {
+ res[identifier.cpu] = append(res[identifier.cpu], identifier.compiler)
+ }
+ }
+ return res
+}
+
+func getIfStatement(ifOrElseIf string, identifiers []string, field, val string,
+ toCToolchainIdentifier map[string]CToolchainIdentifier, depth int,
+ isPlainString bool) string {
+ usedStmts := make(map[string]bool)
+ if val != "None" && isPlainString {
+ val = "\"" + val + "\""
+ }
+ var cToolchainIdentifiers []CToolchainIdentifier
+ for _, value := range toCToolchainIdentifier {
+ cToolchainIdentifiers = append(cToolchainIdentifiers, value)
+ }
+ cpuToCompilers := getCPUToCompilers(cToolchainIdentifiers)
+ countCpus := make(map[string]int)
+ var conditions []string
+ for _, id := range identifiers {
+ identifier := toCToolchainIdentifier[id]
+ stmt := getConditionStatementForCToolchainIdentifier(identifier)
+ if _, ok := usedStmts[stmt]; !ok {
+ conditions = append(conditions, stmt)
+ usedStmts[stmt] = true
+ if identifier.compiler != "" {
+ countCpus[identifier.cpu]++
+ }
+ }
+ }
+
+ var compressedConditions []string
+ usedStmtsOptimized := make(map[string]bool)
+ for _, id := range identifiers {
+ identifier := toCToolchainIdentifier[id]
+ var stmt string
+ if _, ok := countCpus[identifier.cpu]; ok {
+ if countCpus[identifier.cpu] == len(cpuToCompilers[identifier.cpu]) {
+ stmt = getConditionStatementForCToolchainIdentifier(
+ CToolchainIdentifier{cpu: identifier.cpu, compiler: ""})
+ } else {
+ stmt = getConditionStatementForCToolchainIdentifier(identifier)
+ }
+ } else {
+ stmt = getConditionStatementForCToolchainIdentifier(identifier)
+ }
+ if _, ok := usedStmtsOptimized[stmt]; !ok {
+ compressedConditions = append(compressedConditions, stmt)
+ usedStmtsOptimized[stmt] = true
+ }
+ }
+
+ sort.Strings(compressedConditions)
+ val = strings.Join(strings.Split(val, "\n"+getTabs(depth)), "\n"+getTabs(depth+1))
+ return fmt.Sprintf(`%s%s %s:
+%s%s = %s
+`, getTabs(depth),
+ ifOrElseIf,
+ "("+strings.Join(compressedConditions, "\n"+getTabs(depth+1)+"or ")+")",
+ getTabs(depth+1),
+ field,
+ val)
+}
+
+func getToolchainIdentifiers(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetToolchainIdentifier())
+ }
+ return res
+}
+
+func getHostSystemNames(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetHostSystemName())
+ }
+ return res
+}
+
+func getTargetSystemNames(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetTargetSystemName())
+ }
+ return res
+}
+
+func getTargetCpus(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetTargetCpu())
+ }
+ return res
+}
+
+func getTargetLibcs(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetTargetLibc())
+ }
+ return res
+}
+
+func getCompilers(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetCompiler())
+ }
+ return res
+}
+
+func getAbiVersions(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetAbiVersion())
+ }
+ return res
+}
+
+func getAbiLibcVersions(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ res = append(res, toolchain.GetAbiLibcVersion())
+ }
+ return res
+}
+
+func getCcTargetOss(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ targetOS := "None"
+ if toolchain.GetCcTargetOs() != "" {
+ targetOS = toolchain.GetCcTargetOs()
+ }
+ res = append(res, targetOS)
+ }
+ return res
+}
+
+func getBuiltinSysroots(crosstool *crosstoolpb.CrosstoolRelease) []string {
+ var res []string
+ for _, toolchain := range crosstool.GetToolchain() {
+ sysroot := "None"
+ if toolchain.GetBuiltinSysroot() != "" {
+ sysroot = toolchain.GetBuiltinSysroot()
+ }
+ res = append(res, sysroot)
+ }
+ return res
+}
+
+func getMappedStringValuesToIdentifiers(identifiers, fields []string) map[string][]string {
+ res := make(map[string][]string)
+ for i := range identifiers {
+ res[fields[i]] = append(res[fields[i]], identifiers[i])
+ }
+ return res
+}
+
+func getReturnStatement() string {
+ return `
+ out = ctx.actions.declare_file(ctx.label.name)
+ ctx.actions.write(out, "Fake executable")
+ return [
+ cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ features = features,
+ action_configs = action_configs,
+ artifact_name_patterns = artifact_name_patterns,
+ cxx_builtin_include_directories = cxx_builtin_include_directories,
+ toolchain_identifier = toolchain_identifier,
+ host_system_name = host_system_name,
+ target_system_name = target_system_name,
+ target_cpu = target_cpu,
+ target_libc = target_libc,
+ compiler = compiler,
+ abi_version = abi_version,
+ abi_libc_version = abi_libc_version,
+ tool_paths = tool_paths,
+ make_variables = make_variables,
+ builtin_sysroot = builtin_sysroot,
+ cc_target_os = cc_target_os
+ ),
+ DefaultInfo(
+ executable = out,
+ ),
+ ]
+`
+}
+
+// Transform writes a cc_toolchain_config rule functionally equivalent to the
+// CROSSTOOL file.
+func Transform(crosstool *crosstoolpb.CrosstoolRelease) (string, error) {
+ var b bytes.Buffer
+
+ cToolchainIdentifiers := toolchainToCToolchainIdentifier(crosstool)
+
+ toolchainToFeatures, featureNameToFeature, err := getFeatures(crosstool)
+ if err != nil {
+ return "", err
+ }
+
+ toolchainToActions, actionNameToAction, err := getActions(crosstool)
+ if err != nil {
+ return "", err
+ }
+
+ header := getCcToolchainConfigHeader()
+ if _, err := b.WriteString(header); err != nil {
+ return "", err
+ }
+
+ loadActionsStmt := getLoadActionsStmt()
+ if _, err := b.WriteString(loadActionsStmt); err != nil {
+ return "", err
+ }
+
+ implHeader := getImplHeader()
+ if _, err := b.WriteString(implHeader); err != nil {
+ return "", err
+ }
+
+ stringFields := []string{
+ "toolchain_identifier",
+ "host_system_name",
+ "target_system_name",
+ "target_cpu",
+ "target_libc",
+ "compiler",
+ "abi_version",
+ "abi_libc_version",
+ "cc_target_os",
+ "builtin_sysroot",
+ }
+
+ for _, stringField := range stringFields {
+ stmt := getStringStatement(crosstool, cToolchainIdentifiers, stringField, 1)
+ if _, err := b.WriteString(stmt); err != nil {
+ return "", err
+ }
+ }
+
+ listsOfActions := []string{
+ "all_compile_actions",
+ "all_cpp_compile_actions",
+ "preprocessor_compile_actions",
+ "codegen_compile_actions",
+ "all_link_actions",
+ }
+
+ for _, listOfActions := range listsOfActions {
+ actions := getListOfActions(listOfActions, 1)
+ if _, err := b.WriteString(actions); err != nil {
+ return "", err
+ }
+ }
+
+ actionConfigDeclaration := getActionConfigsDeclaration(
+ crosstool, cToolchainIdentifiers, actionNameToAction, 1)
+ if _, err := b.WriteString(actionConfigDeclaration); err != nil {
+ return "", err
+ }
+
+ actionConfigStatement := getActionConfigsStmt(
+ cToolchainIdentifiers, toolchainToActions, 1)
+ if _, err := b.WriteString(actionConfigStatement); err != nil {
+ return "", err
+ }
+
+ featureDeclaration := getFeaturesDeclaration(
+ crosstool, cToolchainIdentifiers, featureNameToFeature, 1)
+ if _, err := b.WriteString(featureDeclaration); err != nil {
+ return "", err
+ }
+
+ featuresStatement := getFeaturesStmt(
+ cToolchainIdentifiers, toolchainToFeatures, 1)
+ if _, err := b.WriteString(featuresStatement); err != nil {
+ return "", err
+ }
+
+ includeDirectories := getStringArr(
+ crosstool, cToolchainIdentifiers, "cxx_builtin_include_directories", 1)
+ if _, err := b.WriteString(includeDirectories); err != nil {
+ return "", err
+ }
+
+ artifactNamePatterns := getArtifactNamePatterns(
+ crosstool, cToolchainIdentifiers, 1)
+ if _, err := b.WriteString(artifactNamePatterns); err != nil {
+ return "", err
+ }
+
+ makeVariables := getMakeVariables(crosstool, cToolchainIdentifiers, 1)
+ if _, err := b.WriteString(makeVariables); err != nil {
+ return "", err
+ }
+
+ toolPaths := getToolPaths(crosstool, cToolchainIdentifiers, 1)
+ if _, err := b.WriteString(toolPaths); err != nil {
+ return "", err
+ }
+
+ if _, err := b.WriteString(getReturnStatement()); err != nil {
+ return "", err
+ }
+
+ rule := getRule(cToolchainIdentifiers, getCompilers(crosstool))
+ if _, err := b.WriteString(rule); err != nil {
+ return "", err
+ }
+
+ return b.String(), nil
+}
diff --git a/tools/migration/crosstool_to_starlark_lib_test.go b/tools/migration/crosstool_to_starlark_lib_test.go
new file mode 100644
index 0000000..a5db02f
--- /dev/null
+++ b/tools/migration/crosstool_to_starlark_lib_test.go
@@ -0,0 +1,1756 @@
+package crosstooltostarlarklib
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "log"
+ crosstoolpb "third_party/com/github/bazelbuild/bazel/src/main/protobuf/crosstool_config_go_proto"
+ "github.com/golang/protobuf/proto"
+)
+
+func makeCToolchainString(lines []string) string {
+ return fmt.Sprintf(`toolchain {
+ %s
+}`, strings.Join(lines, "\n "))
+}
+
+func makeCrosstool(CToolchains []string) *crosstoolpb.CrosstoolRelease {
+ crosstool := &crosstoolpb.CrosstoolRelease{}
+ requiredFields := []string{
+ "major_version: '0'",
+ "minor_version: '0'",
+ "default_target_cpu: 'cpu'",
+ }
+ CToolchains = append(CToolchains, requiredFields...)
+ if err := proto.UnmarshalText(strings.Join(CToolchains, "\n"), crosstool); err != nil {
+ log.Fatalf("Failed to parse CROSSTOOL:", err)
+ }
+ return crosstool
+}
+
+func getSimpleCToolchain(id string) string {
+ lines := []string{
+ "toolchain_identifier: 'id-" + id + "'",
+ "host_system_name: 'host-" + id + "'",
+ "target_system_name: 'target-" + id + "'",
+ "target_cpu: 'cpu-" + id + "'",
+ "compiler: 'compiler-" + id + "'",
+ "target_libc: 'libc-" + id + "'",
+ "abi_version: 'version-" + id + "'",
+ "abi_libc_version: 'libc_version-" + id + "'",
+ }
+ return makeCToolchainString(lines)
+}
+
+func getCToolchain(id, cpu, compiler string, extraLines []string) string {
+ lines := []string{
+ "toolchain_identifier: '" + id + "'",
+ "host_system_name: 'host'",
+ "target_system_name: 'target'",
+ "target_cpu: '" + cpu + "'",
+ "compiler: '" + compiler + "'",
+ "target_libc: 'libc'",
+ "abi_version: 'version'",
+ "abi_libc_version: 'libc_version'",
+ }
+ lines = append(lines, extraLines...)
+ return makeCToolchainString(lines)
+}
+
+func TestStringFieldsConditionStatement(t *testing.T) {
+ toolchain1 := getSimpleCToolchain("1")
+ toolchain2 := getSimpleCToolchain("2")
+ toolchains := []string{toolchain1, toolchain2}
+ crosstool := makeCrosstool(toolchains)
+
+ testCases := []struct {
+ field string
+ expectedText string
+ }{
+ {field: "toolchain_identifier",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ toolchain_identifier = "id-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ toolchain_identifier = "id-2"
+ else:
+ fail("Unreachable")`},
+ {field: "host_system_name",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ host_system_name = "host-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ host_system_name = "host-2"
+ else:
+ fail("Unreachable")`},
+ {field: "target_system_name",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ target_system_name = "target-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ target_system_name = "target-2"
+ else:
+ fail("Unreachable")`},
+ {field: "target_cpu",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ target_cpu = "cpu-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ target_cpu = "cpu-2"
+ else:
+ fail("Unreachable")`},
+ {field: "target_libc",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ target_libc = "libc-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ target_libc = "libc-2"
+ else:
+ fail("Unreachable")`},
+ {field: "compiler",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ compiler = "compiler-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ compiler = "compiler-2"
+ else:
+ fail("Unreachable")`},
+ {field: "abi_version",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ abi_version = "version-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ abi_version = "version-2"
+ else:
+ fail("Unreachable")`},
+ {field: "abi_libc_version",
+ expectedText: `
+ if (ctx.attr.cpu == "cpu-1"):
+ abi_libc_version = "libc_version-1"
+ elif (ctx.attr.cpu == "cpu-2"):
+ abi_libc_version = "libc_version-2"
+ else:
+ fail("Unreachable")`}}
+
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+
+ failed := false
+ for _, tc := range testCases {
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ failed = true
+ }
+ }
+ if failed {
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(toolchains, "\n"), got)
+ }
+}
+
+func TestConditionsSameCpu(t *testing.T) {
+ toolchainAA := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainAB := getCToolchain("2", "cpuA", "compilerB", []string{})
+ toolchains := []string{toolchainAA, toolchainAB}
+ crosstool := makeCrosstool(toolchains)
+
+ testCases := []struct {
+ field string
+ expectedText string
+ }{
+ {field: "toolchain_identifier",
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerA"):
+ toolchain_identifier = "1"
+ elif (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerB"):
+ toolchain_identifier = "2"
+ else:
+ fail("Unreachable")`},
+ {field: "host_system_name",
+ expectedText: `
+ host_system_name = "host"`},
+ {field: "target_system_name",
+ expectedText: `
+ target_system_name = "target"`},
+ {field: "target_cpu",
+ expectedText: `
+ target_cpu = "cpuA"`},
+ {field: "target_libc",
+ expectedText: `
+ target_libc = "libc"`},
+ {field: "compiler",
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerA"):
+ compiler = "compilerA"
+ elif (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerB"):
+ compiler = "compilerB"
+ else:
+ fail("Unreachable")`},
+ {field: "abi_version",
+ expectedText: `
+ abi_version = "version"`},
+ {field: "abi_libc_version",
+ expectedText: `
+ abi_libc_version = "libc_version"`}}
+
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+
+ failed := false
+ for _, tc := range testCases {
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ failed = true
+ }
+ }
+ if failed {
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(toolchains, "\n"), got)
+ }
+}
+
+func TestConditionsSameCompiler(t *testing.T) {
+ toolchainAA := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainBA := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchains := []string{toolchainAA, toolchainBA}
+ crosstool := makeCrosstool(toolchains)
+
+ testCases := []struct {
+ field string
+ expectedText string
+ }{
+ {field: "toolchain_identifier",
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"):
+ toolchain_identifier = "1"
+ elif (ctx.attr.cpu == "cpuB"):
+ toolchain_identifier = "2"
+ else:
+ fail("Unreachable")`},
+ {field: "target_cpu",
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"):
+ target_cpu = "cpuA"
+ elif (ctx.attr.cpu == "cpuB"):
+ target_cpu = "cpuB"
+ else:
+ fail("Unreachable")`},
+ {field: "compiler",
+ expectedText: `
+ compiler = "compilerA"`}}
+
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+
+ failed := false
+ for _, tc := range testCases {
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ failed = true
+ }
+ }
+ if failed {
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(toolchains, "\n"), got)
+ }
+}
+
+func TestNonMandatoryStrings(t *testing.T) {
+ toolchainAA := getCToolchain("1", "cpuA", "compilerA", []string{"cc_target_os: 'osA'"})
+ toolchainBB := getCToolchain("2", "cpuB", "compilerB", []string{})
+ toolchains := []string{toolchainAA, toolchainBB}
+ crosstool := makeCrosstool(toolchains)
+
+ testCases := []struct {
+ field string
+ expectedText string
+ }{
+ {field: "cc_target_os",
+ expectedText: `
+ if (ctx.attr.cpu == "cpuB"):
+ cc_target_os = None
+ elif (ctx.attr.cpu == "cpuA"):
+ cc_target_os = "osA"
+ else:
+ fail("Unreachable")`},
+ {field: "builtin_sysroot",
+ expectedText: `
+ builtin_sysroot = None`}}
+
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+
+ failed := false
+ for _, tc := range testCases {
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ failed = true
+ }
+ }
+ if failed {
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(toolchains, "\n"), got)
+ }
+}
+
+func TestBuiltinIncludeDirectories(t *testing.T) {
+ toolchainAA := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainBA := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchainCA := getCToolchain("3", "cpuC", "compilerA",
+ []string{"cxx_builtin_include_directory: 'dirC'"})
+ toolchainCB := getCToolchain("4", "cpuC", "compilerB",
+ []string{"cxx_builtin_include_directory: 'dirC'",
+ "cxx_builtin_include_directory: 'dirB'"})
+ toolchainDA := getCToolchain("5", "cpuD", "compilerA",
+ []string{"cxx_builtin_include_directory: 'dirC'"})
+
+ toolchainsEmpty := []string{toolchainAA, toolchainBA}
+
+ toolchainsOneNonempty := []string{toolchainAA, toolchainBA, toolchainCA}
+
+ toolchainsSameNonempty := []string{toolchainCA, toolchainDA}
+
+ allToolchains := []string{toolchainAA, toolchainBA, toolchainCA, toolchainCB, toolchainDA}
+
+ testCases := []struct {
+ field string
+ toolchains []string
+ expectedText string
+ }{
+ {field: "cxx_builtin_include_directories",
+ toolchains: toolchainsEmpty,
+ expectedText: `
+ cxx_builtin_include_directories = []`},
+ {field: "cxx_builtin_include_directories",
+ toolchains: toolchainsOneNonempty,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"
+ or ctx.attr.cpu == "cpuB"):
+ cxx_builtin_include_directories = []
+ elif (ctx.attr.cpu == "cpuC"):
+ cxx_builtin_include_directories = ["dirC"]
+ else:
+ fail("Unreachable")`},
+ {field: "cxx_builtin_include_directories",
+ toolchains: toolchainsSameNonempty,
+ expectedText: `
+ cxx_builtin_include_directories = ["dirC"]`},
+ {field: "cxx_builtin_include_directories",
+ toolchains: allToolchains,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"
+ or ctx.attr.cpu == "cpuB"):
+ cxx_builtin_include_directories = []
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerA"
+ or ctx.attr.cpu == "cpuD"):
+ cxx_builtin_include_directories = ["dirC"]
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerB"):
+ cxx_builtin_include_directories = ["dirC", "dirB"]`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func TestMakeVariables(t *testing.T) {
+ toolchainEmpty1 := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainEmpty2 := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchainA1 := getCToolchain("3", "cpuC", "compilerA",
+ []string{"make_variable {name: 'A', value: 'a/b/c'}"})
+ toolchainA2 := getCToolchain("4", "cpuC", "compilerB",
+ []string{"make_variable {name: 'A', value: 'a/b/c'}"})
+ toolchainAB := getCToolchain("5", "cpuC", "compilerC",
+ []string{"make_variable {name: 'A', value: 'a/b/c'}",
+ "make_variable {name: 'B', value: 'a/b/c'}"})
+ toolchainBA := getCToolchain("6", "cpuD", "compilerA",
+ []string{"make_variable {name: 'B', value: 'a/b/c'}",
+ "make_variable {name: 'A', value: 'a b c'}"})
+
+ toolchainsEmpty := []string{toolchainEmpty1, toolchainEmpty2}
+
+ toolchainsOneNonempty := []string{toolchainEmpty1, toolchainA1}
+
+ toolchainsSameNonempty := []string{toolchainA1, toolchainA2}
+
+ toolchainsDifferentOrder := []string{toolchainAB, toolchainBA}
+
+ allToolchains := []string{
+ toolchainEmpty1,
+ toolchainEmpty2,
+ toolchainA1,
+ toolchainA2,
+ toolchainAB,
+ toolchainBA,
+ }
+
+ testCases := []struct {
+ field string
+ toolchains []string
+ expectedText string
+ }{
+ {field: "make_variables",
+ toolchains: toolchainsEmpty,
+ expectedText: `
+ make_variables = []`},
+ {field: "make_variables",
+ toolchains: toolchainsOneNonempty,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"):
+ make_variables = []
+ elif (ctx.attr.cpu == "cpuC"):
+ make_variables = [make_variable(name = "A", value = "a/b/c")]
+ else:
+ fail("Unreachable")`},
+ {field: "make_variables",
+ toolchains: toolchainsSameNonempty,
+ expectedText: `
+ make_variables = [make_variable(name = "A", value = "a/b/c")]`},
+ {field: "make_variables",
+ toolchains: toolchainsDifferentOrder,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC"):
+ make_variables = [
+ make_variable(name = "A", value = "a/b/c"),
+ make_variable(name = "B", value = "a/b/c"),
+ ]
+ elif (ctx.attr.cpu == "cpuD"):
+ make_variables = [
+ make_variable(name = "B", value = "a/b/c"),
+ make_variable(name = "A", value = "a b c"),
+ ]
+ else:
+ fail("Unreachable")`},
+ {field: "make_variables",
+ toolchains: allToolchains,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerC"):
+ make_variables = [
+ make_variable(name = "A", value = "a/b/c"),
+ make_variable(name = "B", value = "a/b/c"),
+ ]
+ elif (ctx.attr.cpu == "cpuD"):
+ make_variables = [
+ make_variable(name = "B", value = "a/b/c"),
+ make_variable(name = "A", value = "a b c"),
+ ]
+ elif (ctx.attr.cpu == "cpuA"
+ or ctx.attr.cpu == "cpuB"):
+ make_variables = []
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerA"
+ or ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerB"):
+ make_variables = [make_variable(name = "A", value = "a/b/c")]
+ else:
+ fail("Unreachable")`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func TestToolPaths(t *testing.T) {
+ toolchainEmpty1 := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainEmpty2 := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchainA1 := getCToolchain("3", "cpuC", "compilerA",
+ []string{"tool_path {name: 'A', path: 'a/b/c'}"})
+ toolchainA2 := getCToolchain("4", "cpuC", "compilerB",
+ []string{"tool_path {name: 'A', path: 'a/b/c'}"})
+ toolchainAB := getCToolchain("5", "cpuC", "compilerC",
+ []string{"tool_path {name: 'A', path: 'a/b/c'}",
+ "tool_path {name: 'B', path: 'a/b/c'}"})
+ toolchainBA := getCToolchain("6", "cpuD", "compilerA",
+ []string{"tool_path {name: 'B', path: 'a/b/c'}",
+ "tool_path {name: 'A', path: 'a/b/c'}"})
+
+ toolchainsEmpty := []string{toolchainEmpty1, toolchainEmpty2}
+
+ toolchainsOneNonempty := []string{toolchainEmpty1, toolchainA1}
+
+ toolchainsSameNonempty := []string{toolchainA1, toolchainA2}
+
+ toolchainsDifferentOrder := []string{toolchainAB, toolchainBA}
+
+ allToolchains := []string{
+ toolchainEmpty1,
+ toolchainEmpty2,
+ toolchainA1,
+ toolchainA2,
+ toolchainAB,
+ toolchainBA,
+ }
+
+ testCases := []struct {
+ field string
+ toolchains []string
+ expectedText string
+ }{
+ {field: "tool_paths",
+ toolchains: toolchainsEmpty,
+ expectedText: `
+ tool_paths = []`},
+ {field: "tool_paths",
+ toolchains: toolchainsOneNonempty,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"):
+ tool_paths = []
+ elif (ctx.attr.cpu == "cpuC"):
+ tool_paths = [tool_path(name = "A", path = "a/b/c")]
+ else:
+ fail("Unreachable")`},
+ {field: "tool_paths",
+ toolchains: toolchainsSameNonempty,
+ expectedText: `
+ tool_paths = [tool_path(name = "A", path = "a/b/c")]`},
+ {field: "tool_paths",
+ toolchains: toolchainsDifferentOrder,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC"):
+ tool_paths = [
+ tool_path(name = "A", path = "a/b/c"),
+ tool_path(name = "B", path = "a/b/c"),
+ ]
+ elif (ctx.attr.cpu == "cpuD"):
+ tool_paths = [
+ tool_path(name = "B", path = "a/b/c"),
+ tool_path(name = "A", path = "a/b/c"),
+ ]
+ else:
+ fail("Unreachable")`},
+ {field: "tool_paths",
+ toolchains: allToolchains,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerC"):
+ tool_paths = [
+ tool_path(name = "A", path = "a/b/c"),
+ tool_path(name = "B", path = "a/b/c"),
+ ]
+ elif (ctx.attr.cpu == "cpuD"):
+ tool_paths = [
+ tool_path(name = "B", path = "a/b/c"),
+ tool_path(name = "A", path = "a/b/c"),
+ ]
+ elif (ctx.attr.cpu == "cpuA"
+ or ctx.attr.cpu == "cpuB"):
+ tool_paths = []
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerA"
+ or ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerB"):
+ tool_paths = [tool_path(name = "A", path = "a/b/c")]
+ else:
+ fail("Unreachable")`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func getArtifactNamePattern(lines []string) string {
+ return fmt.Sprintf(`artifact_name_pattern {
+ %s
+}`, strings.Join(lines, "\n "))
+}
+
+func TestArtifactNamePatterns(t *testing.T) {
+ toolchainEmpty1 := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainEmpty2 := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchainA1 := getCToolchain("3", "cpuC", "compilerA",
+ []string{
+ getArtifactNamePattern([]string{
+ "category_name: 'A'",
+ "prefix: 'p'",
+ "extension: '.exe'"}),
+ },
+ )
+ toolchainA2 := getCToolchain("4", "cpuC", "compilerB",
+ []string{
+ getArtifactNamePattern([]string{
+ "category_name: 'A'",
+ "prefix: 'p'",
+ "extension: '.exe'"}),
+ },
+ )
+ toolchainAB := getCToolchain("5", "cpuC", "compilerC",
+ []string{
+ getArtifactNamePattern([]string{
+ "category_name: 'A'",
+ "prefix: 'p'",
+ "extension: '.exe'"}),
+ getArtifactNamePattern([]string{
+ "category_name: 'B'",
+ "prefix: 'p'",
+ "extension: '.exe'"}),
+ },
+ )
+ toolchainBA := getCToolchain("6", "cpuD", "compilerA",
+ []string{
+ getArtifactNamePattern([]string{
+ "category_name: 'B'",
+ "prefix: 'p'",
+ "extension: '.exe'"}),
+ getArtifactNamePattern([]string{
+ "category_name: 'A'",
+ "prefix: 'p'",
+ "extension: '.exe'"}),
+ },
+ )
+ toolchainsEmpty := []string{toolchainEmpty1, toolchainEmpty2}
+
+ toolchainsOneNonempty := []string{toolchainEmpty1, toolchainA1}
+
+ toolchainsSameNonempty := []string{toolchainA1, toolchainA2}
+
+ toolchainsDifferentOrder := []string{toolchainAB, toolchainBA}
+
+ allToolchains := []string{
+ toolchainEmpty1,
+ toolchainEmpty2,
+ toolchainA1,
+ toolchainA2,
+ toolchainAB,
+ toolchainBA,
+ }
+
+ testCases := []struct {
+ field string
+ toolchains []string
+ expectedText string
+ }{
+ {field: "artifact_name_patterns",
+ toolchains: toolchainsEmpty,
+ expectedText: `
+ artifact_name_patterns = []`},
+ {field: "artifact_name_patterns",
+ toolchains: toolchainsOneNonempty,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC"):
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "A",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ ]
+ elif (ctx.attr.cpu == "cpuA"):
+ artifact_name_patterns = []
+ else:
+ fail("Unreachable")`},
+ {field: "artifact_name_patterns",
+ toolchains: toolchainsSameNonempty,
+ expectedText: `
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "A",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ ]`},
+ {field: "artifact_name_patterns",
+ toolchains: toolchainsDifferentOrder,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC"):
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "A",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ artifact_name_pattern(
+ category_name = "B",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ ]
+ elif (ctx.attr.cpu == "cpuD"):
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "B",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ artifact_name_pattern(
+ category_name = "A",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ ]
+ else:
+ fail("Unreachable")`},
+ {field: "artifact_name_patterns",
+ toolchains: allToolchains,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerC"):
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "A",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ artifact_name_pattern(
+ category_name = "B",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ ]
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerA"
+ or ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerB"):
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "A",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ ]
+ elif (ctx.attr.cpu == "cpuD"):
+ artifact_name_patterns = [
+ artifact_name_pattern(
+ category_name = "B",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ artifact_name_pattern(
+ category_name = "A",
+ prefix = "p",
+ extension = ".exe",
+ ),
+ ]
+ elif (ctx.attr.cpu == "cpuA"
+ or ctx.attr.cpu == "cpuB"):
+ artifact_name_patterns = []
+ else:
+ fail("Unreachable")`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func getFeature(lines []string) string {
+ return fmt.Sprintf(`feature {
+ %s
+}`, strings.Join(lines, "\n "))
+}
+
+func TestFeatureListAssignment(t *testing.T) {
+ toolchainEmpty1 := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainEmpty2 := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchainA1 := getCToolchain("3", "cpuC", "compilerA",
+ []string{getFeature([]string{"name: 'A'"})},
+ )
+ toolchainA2 := getCToolchain("4", "cpuC", "compilerB",
+ []string{getFeature([]string{"name: 'A'"})},
+ )
+ toolchainAB := getCToolchain("5", "cpuC", "compilerC",
+ []string{
+ getFeature([]string{"name: 'A'"}),
+ getFeature([]string{"name: 'B'"}),
+ },
+ )
+ toolchainBA := getCToolchain("6", "cpuD", "compilerA",
+ []string{
+ getFeature([]string{"name: 'B'"}),
+ getFeature([]string{"name: 'A'"}),
+ },
+ )
+ toolchainsEmpty := []string{toolchainEmpty1, toolchainEmpty2}
+
+ toolchainsOneNonempty := []string{toolchainEmpty1, toolchainA1}
+
+ toolchainsSameNonempty := []string{toolchainA1, toolchainA2}
+
+ toolchainsDifferentOrder := []string{toolchainAB, toolchainBA}
+
+ allToolchains := []string{
+ toolchainEmpty1,
+ toolchainEmpty2,
+ toolchainA1,
+ toolchainA2,
+ toolchainAB,
+ toolchainBA,
+ }
+
+ testCases := []struct {
+ field string
+ toolchains []string
+ expectedText string
+ }{
+ {field: "features",
+ toolchains: toolchainsEmpty,
+ expectedText: `
+ features = []`},
+ {field: "features",
+ toolchains: toolchainsOneNonempty,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"):
+ features = []
+ elif (ctx.attr.cpu == "cpuC"):
+ features = [a_feature]
+ else:
+ fail("Unreachable")`},
+ {field: "features",
+ toolchains: toolchainsSameNonempty,
+ expectedText: `
+ features = [a_feature]`},
+ {field: "features",
+ toolchains: toolchainsDifferentOrder,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC"):
+ features = [a_feature, b_feature]
+ elif (ctx.attr.cpu == "cpuD"):
+ features = [b_feature, a_feature]
+ else:
+ fail("Unreachable")`},
+ {field: "features",
+ toolchains: allToolchains,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"
+ or ctx.attr.cpu == "cpuB"):
+ features = []
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerA"
+ or ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerB"):
+ features = [a_feature]
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerC"):
+ features = [a_feature, b_feature]
+ elif (ctx.attr.cpu == "cpuD"):
+ features = [b_feature, a_feature]
+ else:
+ fail("Unreachable")`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func getActionConfig(lines []string) string {
+ return fmt.Sprintf(`action_config {
+ %s
+}`, strings.Join(lines, "\n "))
+}
+
+func TestActionConfigListAssignment(t *testing.T) {
+ toolchainEmpty1 := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainEmpty2 := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchainA1 := getCToolchain("3", "cpuC", "compilerA",
+ []string{
+ getActionConfig([]string{"action_name: 'A'", "config_name: 'A'"}),
+ },
+ )
+ toolchainA2 := getCToolchain("4", "cpuC", "compilerB",
+ []string{
+ getActionConfig([]string{"action_name: 'A'", "config_name: 'A'"}),
+ },
+ )
+ toolchainAB := getCToolchain("5", "cpuC", "compilerC",
+ []string{
+ getActionConfig([]string{"action_name: 'A'", "config_name: 'A'"}),
+ getActionConfig([]string{"action_name: 'B'", "config_name: 'B'"}),
+ },
+ )
+ toolchainBA := getCToolchain("6", "cpuD", "compilerA",
+ []string{
+ getActionConfig([]string{"action_name: 'B'", "config_name: 'B'"}),
+ getActionConfig([]string{"action_name: 'A'", "config_name: 'A'"}),
+ },
+ )
+ toolchainsEmpty := []string{toolchainEmpty1, toolchainEmpty2}
+
+ toolchainsOneNonempty := []string{toolchainEmpty1, toolchainA1}
+
+ toolchainsSameNonempty := []string{toolchainA1, toolchainA2}
+
+ toolchainsDifferentOrder := []string{toolchainAB, toolchainBA}
+
+ allToolchains := []string{
+ toolchainEmpty1,
+ toolchainEmpty2,
+ toolchainA1,
+ toolchainA2,
+ toolchainAB,
+ toolchainBA,
+ }
+
+ testCases := []struct {
+ field string
+ toolchains []string
+ expectedText string
+ }{
+ {field: "action_configs",
+ toolchains: toolchainsEmpty,
+ expectedText: `
+ action_configs = []`},
+ {field: "action_configs",
+ toolchains: toolchainsOneNonempty,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"):
+ action_configs = []
+ elif (ctx.attr.cpu == "cpuC"):
+ action_configs = [a_action]
+ else:
+ fail("Unreachable")`},
+ {field: "action_configs",
+ toolchains: toolchainsSameNonempty,
+ expectedText: `
+ action_configs = [a_action]`},
+ {field: "action_configs",
+ toolchains: toolchainsDifferentOrder,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC"):
+ action_configs = [a_action, b_action]
+ elif (ctx.attr.cpu == "cpuD"):
+ action_configs = [b_action, a_action]
+ else:
+ fail("Unreachable")`},
+ {field: "action_configs",
+ toolchains: allToolchains,
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA"
+ or ctx.attr.cpu == "cpuB"):
+ action_configs = []
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerA"
+ or ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerB"):
+ action_configs = [a_action]
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerC"):
+ action_configs = [a_action, b_action]
+ elif (ctx.attr.cpu == "cpuD"):
+ action_configs = [b_action, a_action]
+ else:
+ fail("Unreachable")`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func TestAllAndNoneAvailableErrorsWhenMoreThanOneElement(t *testing.T) {
+ toolchainFeatureAllAvailable := getCToolchain("1", "cpu", "compiler",
+ []string{getFeature([]string{
+ "name: 'A'",
+ "flag_set {",
+ " action: 'A'",
+ " flag_group {",
+ " flag: 'f'",
+ " expand_if_all_available: 'e1'",
+ " expand_if_all_available: 'e2'",
+ " }",
+ "}",
+ })},
+ )
+ toolchainFeatureNoneAvailable := getCToolchain("1", "cpu", "compiler",
+ []string{getFeature([]string{
+ "name: 'A'",
+ "flag_set {",
+ " action: 'A'",
+ " flag_group {",
+ " flag: 'f'",
+ " expand_if_none_available: 'e1'",
+ " expand_if_none_available: 'e2'",
+ " }",
+ "}",
+ })},
+ )
+ toolchainActionConfigAllAvailable := getCToolchain("1", "cpu", "compiler",
+ []string{getActionConfig([]string{
+ "config_name: 'A'",
+ "action_name: 'A'",
+ "flag_set {",
+ " action: 'A'",
+ " flag_group {",
+ " flag: 'f'",
+ " expand_if_all_available: 'e1'",
+ " expand_if_all_available: 'e2'",
+ " }",
+ "}",
+ })},
+ )
+ toolchainActionConfigNoneAvailable := getCToolchain("1", "cpu", "compiler",
+ []string{getActionConfig([]string{
+ "config_name: 'A'",
+ "action_name: 'A'",
+ "flag_set {",
+ " action: 'A'",
+ " flag_group {",
+ " flag: 'f'",
+ " expand_if_none_available: 'e1'",
+ " expand_if_none_available: 'e2'",
+ " }",
+ "}",
+ })},
+ )
+
+ testCases := []struct {
+ field string
+ toolchain string
+ expectedText string
+ }{
+ {field: "features",
+ toolchain: toolchainFeatureAllAvailable,
+ expectedText: "Error in feature 'A': Flag group must not have more " +
+ "than one 'expand_if_all_available' field"},
+ {field: "features",
+ toolchain: toolchainFeatureNoneAvailable,
+ expectedText: "Error in feature 'A': Flag group must not have more " +
+ "than one 'expand_if_none_available' field"},
+ {field: "action_configs",
+ toolchain: toolchainActionConfigAllAvailable,
+ expectedText: "Error in action_config 'A': Flag group must not have more " +
+ "than one 'expand_if_all_available' field"},
+ {field: "action_configs",
+ toolchain: toolchainActionConfigNoneAvailable,
+ expectedText: "Error in action_config 'A': Flag group must not have more " +
+ "than one 'expand_if_none_available' field"},
+ }
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool([]string{tc.toolchain})
+ _, err := Transform(crosstool)
+ if err == nil || !strings.Contains(err.Error(), tc.expectedText) {
+ t.Errorf("Expected error: %s, got: %v", tc.expectedText, err)
+ }
+ }
+}
+
+func TestFeaturesAndActionConfigsSetToNoneWhenAllOptionsAreExausted(t *testing.T) {
+ toolchainFeatureAEnabled := getCToolchain("1", "cpuA", "compilerA",
+ []string{getFeature([]string{"name: 'A'", "enabled: true"})},
+ )
+ toolchainFeatureADisabled := getCToolchain("2", "cpuA", "compilerB",
+ []string{getFeature([]string{"name: 'A'", "enabled: false"})},
+ )
+
+ toolchainWithoutFeatureA := getCToolchain("3", "cpuA", "compilerC", []string{})
+
+ toolchainActionConfigAEnabled := getCToolchain("4", "cpuA", "compilerD",
+ []string{getActionConfig([]string{
+ "config_name: 'A'",
+ "action_name: 'A'",
+ "enabled: true",
+ })})
+
+ toolchainActionConfigADisabled := getCToolchain("5", "cpuA", "compilerE",
+ []string{getActionConfig([]string{
+ "config_name: 'A'",
+ "action_name: 'A'",
+ })})
+
+ toolchainWithoutActionConfigA := getCToolchain("6", "cpuA", "compilerF", []string{})
+
+ testCases := []struct {
+ field string
+ toolchains []string
+ expectedText string
+ }{
+ {field: "features",
+ toolchains: []string{
+ toolchainFeatureAEnabled, toolchainFeatureADisabled, toolchainWithoutFeatureA},
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerB"):
+ a_feature = feature(name = "A")
+ elif (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerA"):
+ a_feature = feature(name = "A", enabled = True)
+ else:
+ a_feature = None
+`},
+ {field: "action_config",
+ toolchains: []string{
+ toolchainActionConfigAEnabled, toolchainActionConfigADisabled, toolchainWithoutActionConfigA},
+ expectedText: `
+ if (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerE"):
+ a_action = action_config(action_name = "A")
+ elif (ctx.attr.cpu == "cpuA" and ctx.attr.compiler == "compilerD"):
+ a_action = action_config(action_name = "A", enabled = True)
+ else:
+ a_action = None
+`},
+ }
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly convert '%s' field, expected to contain:\n%v\n",
+ tc.field, tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func TestActionConfigDeclaration(t *testing.T) {
+ toolchainEmpty1 := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainEmpty2 := getCToolchain("2", "cpuB", "compilerA", []string{})
+
+ toolchainNameNotInDict := getCToolchain("3", "cpBC", "compilerB",
+ []string{
+ getActionConfig([]string{"action_name: 'A-B.C'", "config_name: 'A-B.C'"}),
+ },
+ )
+ toolchainNameInDictA := getCToolchain("4", "cpuC", "compilerA",
+ []string{
+ getActionConfig([]string{"action_name: 'c++-compile'", "config_name: 'c++-compile'"}),
+ },
+ )
+ toolchainNameInDictB := getCToolchain("5", "cpuC", "compilerB",
+ []string{
+ getActionConfig([]string{
+ "action_name: 'c++-compile'",
+ "config_name: 'c++-compile'",
+ "tool {",
+ " tool_path: '/a/b/c'",
+ "}",
+ }),
+ },
+ )
+ toolchainComplexActionConfig := getCToolchain("6", "cpuC", "compilerC",
+ []string{
+ getActionConfig([]string{
+ "action_name: 'action-complex'",
+ "config_name: 'action-complex'",
+ "enabled: true",
+ "tool {",
+ " tool_path: '/a/b/c'",
+ " with_feature {",
+ " feature: 'a'",
+ " feature: 'b'",
+ " not_feature: 'c'",
+ " not_feature: 'd'",
+ " }",
+ " with_feature{",
+ " feature: 'e'",
+ " }",
+ " execution_requirement: 'a'",
+ "}",
+ "tool {",
+ " tool_path: ''",
+ "}",
+ "flag_set {",
+ " flag_group {",
+ " flag: 'a'",
+ " flag: '%b'",
+ " iterate_over: 'c'",
+ " expand_if_all_available: 'd'",
+ " expand_if_none_available: 'e'",
+ " expand_if_true: 'f'",
+ " expand_if_false: 'g'",
+ " expand_if_equal {",
+ " variable: 'var'",
+ " value: 'val'",
+ " }",
+ " }",
+ " flag_group {",
+ " flag_group {",
+ " flag: 'a'",
+ " }",
+ " }",
+ "}",
+ "flag_set {",
+ " with_feature {",
+ " feature: 'a'",
+ " feature: 'b'",
+ " not_feature: 'c'",
+ " not_feature: 'd'",
+ " }",
+ "}",
+ "env_set {",
+ " action: 'a'",
+ " env_entry {",
+ " key: 'k'",
+ " value: 'v'",
+ " }",
+ " with_feature {",
+ " feature: 'a'",
+ " }",
+ "}",
+ "requires {",
+ " feature: 'a'",
+ " feature: 'b'",
+ "}",
+ "implies: 'a'",
+ "implies: 'b'",
+ }),
+ },
+ )
+
+ testCases := []struct {
+ toolchains []string
+ expectedText string
+ }{
+ {
+ toolchains: []string{toolchainEmpty1, toolchainEmpty2},
+ expectedText: `
+ action_configs = []`},
+ {
+ toolchains: []string{toolchainEmpty1, toolchainNameNotInDict},
+ expectedText: `
+ a_b_c_action = action_config(action_name = "A-B.C")`},
+ {
+ toolchains: []string{toolchainNameInDictA, toolchainNameInDictB},
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerB"):
+ cpp_compile_action = action_config(
+ action_name = ACTION_NAMES.cpp_compile,
+ tools = [tool(path = "/a/b/c")],
+ )
+ elif (ctx.attr.cpu == "cpuC" and ctx.attr.compiler == "compilerA"):
+ cpp_compile_action = action_config(action_name = ACTION_NAMES.cpp_compile)`},
+ {
+ toolchains: []string{toolchainComplexActionConfig},
+ expectedText: `
+ action_complex_action = action_config(
+ action_name = "action-complex",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ flag_groups = [
+ flag_group(
+ flags = ["a", "%b"],
+ iterate_over = "c",
+ expand_if_available = "d",
+ expand_if_not_available = "e",
+ expand_if_true = "f",
+ expand_if_false = "g",
+ expand_if_equal = variable_with_value(name = "var", value = "val"),
+ ),
+ flag_group(flag_groups = [flag_group(flags = ["a"])]),
+ ],
+ ),
+ flag_set(
+ with_features = [
+ with_feature_set(
+ features = ["a", "b"],
+ not_features = ["c", "d"],
+ ),
+ ],
+ ),
+ ],
+ implies = ["a", "b"],
+ tools = [
+ tool(
+ path = "/a/b/c",
+ with_features = [
+ with_feature_set(
+ features = ["a", "b"],
+ not_features = ["c", "d"],
+ ),
+ with_feature_set(features = ["e"]),
+ ],
+ execution_requirements = ["a"],
+ ),
+ tool(path = "NOT_USED"),
+ ],
+ )`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly declare an action_config, expected to contain:\n%v\n",
+ tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func TestFeatureDeclaration(t *testing.T) {
+ toolchainEmpty1 := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainEmpty2 := getCToolchain("2", "cpuB", "compilerA", []string{})
+
+ toolchainSimpleFeatureA1 := getCToolchain("3", "cpuB", "compilerB",
+ []string{
+ getFeature([]string{"name: 'Feature-c++.a'", "enabled: true"}),
+ },
+ )
+ toolchainSimpleFeatureA2 := getCToolchain("4", "cpuC", "compilerA",
+ []string{
+ getFeature([]string{"name: 'Feature-c++.a'"}),
+ },
+ )
+ toolchainComplexFeature := getCToolchain("5", "cpuC", "compilerC",
+ []string{
+ getFeature([]string{
+ "name: 'complex-feature'",
+ "enabled: true",
+ "flag_set {",
+ " action: 'c++-compile'", // in ACTION_NAMES
+ " action: 'something-else'", // not in ACTION_NAMES
+ " flag_group {",
+ " flag: 'a'",
+ " flag: '%b'",
+ " iterate_over: 'c'",
+ " expand_if_all_available: 'd'",
+ " expand_if_none_available: 'e'",
+ " expand_if_true: 'f'",
+ " expand_if_false: 'g'",
+ " expand_if_equal {",
+ " variable: 'var'",
+ " value: 'val'",
+ " }",
+ " }",
+ " flag_group {",
+ " flag_group {",
+ " flag: 'a'",
+ " }",
+ " }",
+ "}",
+ "flag_set {", // all_compile_actions
+ " action: 'c-compile'",
+ " action: 'c++-compile'",
+ " action: 'linkstamp-compile'",
+ " action: 'assemble'",
+ " action: 'preprocess-assemble'",
+ " action: 'c++-header-parsing'",
+ " action: 'c++-module-compile'",
+ " action: 'c++-module-codegen'",
+ " action: 'clif-match'",
+ " action: 'lto-backend'",
+ "}",
+ "flag_set {", // all_cpp_compile_actions
+ " action: 'c++-compile'",
+ " action: 'linkstamp-compile'",
+ " action: 'c++-header-parsing'",
+ " action: 'c++-module-compile'",
+ " action: 'c++-module-codegen'",
+ " action: 'clif-match'",
+ "}",
+ "flag_set {", // all_link_actions
+ " action: 'c++-link-executable'",
+ " action: 'c++-link-dynamic-library'",
+ " action: 'c++-link-nodeps-dynamic-library'",
+ "}",
+ "flag_set {", // all_cpp_compile_actions + all_link_actions
+ " action: 'c++-compile'",
+ " action: 'linkstamp-compile'",
+ " action: 'c++-header-parsing'",
+ " action: 'c++-module-compile'",
+ " action: 'c++-module-codegen'",
+ " action: 'clif-match'",
+ " action: 'c++-link-executable'",
+ " action: 'c++-link-dynamic-library'",
+ " action: 'c++-link-nodeps-dynamic-library'",
+ "}",
+ "flag_set {", // all_link_actions + something else
+ " action: 'c++-link-executable'",
+ " action: 'c++-link-dynamic-library'",
+ " action: 'c++-link-nodeps-dynamic-library'",
+ " action: 'some.unknown-c++.action'",
+ "}",
+ "env_set {",
+ " action: 'a'",
+ " env_entry {",
+ " key: 'k'",
+ " value: 'v'",
+ " }",
+ " with_feature {",
+ " feature: 'a'",
+ " }",
+ "}",
+ "env_set {",
+ " action: 'c-compile'",
+ "}",
+ "env_set {", // all_compile_actions
+ " action: 'c-compile'",
+ " action: 'c++-compile'",
+ " action: 'linkstamp-compile'",
+ " action: 'assemble'",
+ " action: 'preprocess-assemble'",
+ " action: 'c++-header-parsing'",
+ " action: 'c++-module-compile'",
+ " action: 'c++-module-codegen'",
+ " action: 'clif-match'",
+ " action: 'lto-backend'",
+ "}",
+ "requires {",
+ " feature: 'a'",
+ " feature: 'b'",
+ "}",
+ "implies: 'a'",
+ "implies: 'b'",
+ "provides: 'c'",
+ "provides: 'd'",
+ }),
+ },
+ )
+
+ testCases := []struct {
+ toolchains []string
+ expectedText string
+ }{
+ {
+ toolchains: []string{toolchainEmpty1, toolchainEmpty2},
+ expectedText: `
+ features = []
+`},
+ {
+ toolchains: []string{toolchainEmpty1, toolchainSimpleFeatureA1},
+ expectedText: `
+ feature_cpp_a_feature = feature(name = "Feature-c++.a", enabled = True)`},
+ {
+ toolchains: []string{toolchainSimpleFeatureA1, toolchainSimpleFeatureA2},
+ expectedText: `
+ if (ctx.attr.cpu == "cpuC"):
+ feature_cpp_a_feature = feature(name = "Feature-c++.a")
+ elif (ctx.attr.cpu == "cpuB"):
+ feature_cpp_a_feature = feature(name = "Feature-c++.a", enabled = True)`},
+ {
+ toolchains: []string{toolchainComplexFeature},
+ expectedText: `
+ complex_feature_feature = feature(
+ name = "complex-feature",
+ enabled = True,
+ flag_sets = [
+ flag_set(
+ actions = [ACTION_NAMES.cpp_compile, "something-else"],
+ flag_groups = [
+ flag_group(
+ flags = ["a", "%b"],
+ iterate_over = "c",
+ expand_if_available = "d",
+ expand_if_not_available = "e",
+ expand_if_true = "f",
+ expand_if_false = "g",
+ expand_if_equal = variable_with_value(name = "var", value = "val"),
+ ),
+ flag_group(flag_groups = [flag_group(flags = ["a"])]),
+ ],
+ ),
+ flag_set(actions = all_compile_actions),
+ flag_set(actions = all_cpp_compile_actions),
+ flag_set(actions = all_link_actions),
+ flag_set(
+ actions = all_cpp_compile_actions +
+ all_link_actions,
+ ),
+ flag_set(
+ actions = all_link_actions +
+ ["some.unknown-c++.action"],
+ ),
+ ],
+ env_sets = [
+ env_set(
+ actions = ["a"],
+ env_entries = [env_entry(key = "k", value = "v")],
+ with_features = [with_feature_set(features = ["a"])],
+ ),
+ env_set(actions = [ACTION_NAMES.c_compile]),
+ env_set(actions = all_compile_actions),
+ ],
+ requires = [feature_set(features = ["a", "b"])],
+ implies = ["a", "b"],
+ provides = ["c", "d"],
+ )`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly declare a feature, expected to contain:\n%v\n",
+ tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
+
+func TestRule(t *testing.T) {
+ simpleToolchain := getSimpleCToolchain("simple")
+ expected := `load("@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl",
+ "action_config",
+ "artifact_name_pattern",
+ "env_entry",
+ "env_set",
+ "feature",
+ "feature_set",
+ "flag_group",
+ "flag_set",
+ "make_variable",
+ "tool",
+ "tool_path",
+ "variable_with_value",
+ "with_feature_set",
+)
+load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "ACTION_NAMES")
+
+def _impl(ctx):
+ toolchain_identifier = "id-simple"
+
+ host_system_name = "host-simple"
+
+ target_system_name = "target-simple"
+
+ target_cpu = "cpu-simple"
+
+ target_libc = "libc-simple"
+
+ compiler = "compiler-simple"
+
+ abi_version = "version-simple"
+
+ abi_libc_version = "libc_version-simple"
+
+ cc_target_os = None
+
+ builtin_sysroot = None
+
+ all_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+ ACTION_NAMES.lto_backend,
+ ]
+
+ all_cpp_compile_actions = [
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.clif_match,
+ ]
+
+ preprocessor_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_header_parsing,
+ ACTION_NAMES.cpp_module_compile,
+ ACTION_NAMES.clif_match,
+ ]
+
+ codegen_compile_actions = [
+ ACTION_NAMES.c_compile,
+ ACTION_NAMES.cpp_compile,
+ ACTION_NAMES.linkstamp_compile,
+ ACTION_NAMES.assemble,
+ ACTION_NAMES.preprocess_assemble,
+ ACTION_NAMES.cpp_module_codegen,
+ ACTION_NAMES.lto_backend,
+ ]
+
+ all_link_actions = [
+ ACTION_NAMES.cpp_link_executable,
+ ACTION_NAMES.cpp_link_dynamic_library,
+ ACTION_NAMES.cpp_link_nodeps_dynamic_library,
+ ]
+
+ action_configs = []
+
+ features = []
+
+ cxx_builtin_include_directories = []
+
+ artifact_name_patterns = []
+
+ make_variables = []
+
+ tool_paths = []
+
+
+ out = ctx.actions.declare_file(ctx.label.name)
+ ctx.actions.write(out, "Fake executable")
+ return [
+ cc_common.create_cc_toolchain_config_info(
+ ctx = ctx,
+ features = features,
+ action_configs = action_configs,
+ artifact_name_patterns = artifact_name_patterns,
+ cxx_builtin_include_directories = cxx_builtin_include_directories,
+ toolchain_identifier = toolchain_identifier,
+ host_system_name = host_system_name,
+ target_system_name = target_system_name,
+ target_cpu = target_cpu,
+ target_libc = target_libc,
+ compiler = compiler,
+ abi_version = abi_version,
+ abi_libc_version = abi_libc_version,
+ tool_paths = tool_paths,
+ make_variables = make_variables,
+ builtin_sysroot = builtin_sysroot,
+ cc_target_os = cc_target_os
+ ),
+ DefaultInfo(
+ executable = out,
+ ),
+ ]
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "cpu": attr.string(mandatory=True, values=["cpu-simple"]),
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
+`
+ crosstool := makeCrosstool([]string{simpleToolchain})
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if got != expected {
+ t.Fatalf("Expected:\n%v\nGot:\n%v\nTested CROSSTOOL:\n%v",
+ expected, got, simpleToolchain)
+ }
+}
+
+func TestAllowedCompilerValues(t *testing.T) {
+ toolchainAA := getCToolchain("1", "cpuA", "compilerA", []string{})
+ toolchainBA := getCToolchain("2", "cpuB", "compilerA", []string{})
+ toolchainBB := getCToolchain("3", "cpuB", "compilerB", []string{})
+ toolchainCC := getCToolchain("4", "cpuC", "compilerC", []string{})
+
+ testCases := []struct {
+ toolchains []string
+ expectedText string
+ }{
+ {
+ toolchains: []string{toolchainAA, toolchainBA},
+ expectedText: `
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "cpu": attr.string(mandatory=True, values=["cpuA", "cpuB"]),
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
+`},
+ {
+ toolchains: []string{toolchainBA, toolchainBB},
+ expectedText: `
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "cpu": attr.string(mandatory=True, values=["cpuB"]),
+ "compiler": attr.string(mandatory=True, values=["compilerA", "compilerB"]),
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
+`},
+ {
+ toolchains: []string{toolchainAA, toolchainBA, toolchainBB},
+ expectedText: `
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "cpu": attr.string(mandatory=True, values=["cpuA", "cpuB"]),
+ "compiler": attr.string(mandatory=True, values=["compilerA", "compilerB"]),
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
+`},
+ {
+ toolchains: []string{toolchainAA, toolchainBA, toolchainBB, toolchainCC},
+ expectedText: `
+cc_toolchain_config = rule(
+ implementation = _impl,
+ attrs = {
+ "cpu": attr.string(mandatory=True, values=["cpuA", "cpuB", "cpuC"]),
+ "compiler": attr.string(mandatory=True, values=["compilerA", "compilerB", "compilerC"]),
+ },
+ provides = [CcToolchainConfigInfo],
+ executable = True,
+)
+`}}
+
+ for _, tc := range testCases {
+ crosstool := makeCrosstool(tc.toolchains)
+ got, err := Transform(crosstool)
+ if err != nil {
+ t.Fatalf("CROSSTOOL conversion failed: %v", err)
+ }
+ if !strings.Contains(got, tc.expectedText) {
+ t.Errorf("Failed to correctly declare the rule, expected to contain:\n%v\n",
+ tc.expectedText)
+ t.Fatalf("Tested CROSSTOOL:\n%v\n\nGenerated rule:\n%v\n",
+ strings.Join(tc.toolchains, "\n"), got)
+ }
+ }
+}
diff --git a/tools/migration/ctoolchain_comparator.py b/tools/migration/ctoolchain_comparator.py
new file mode 100644
index 0000000..5143e02
--- /dev/null
+++ b/tools/migration/ctoolchain_comparator.py
@@ -0,0 +1,127 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+r"""A script that compares 2 CToolchains from proto format.
+
+This script accepts two files in either a CROSSTOOL proto text format or a
+CToolchain proto text format. It then locates the CToolchains with the given
+toolchain_identifier and checks if the resulting CToolchain objects in Java
+are the same.
+
+Example usage:
+
+bazel run \
+@rules_cc//tools/migration:ctoolchain_comparator -- \
+--before=/path/to/CROSSTOOL1 \
+--after=/path/to/CROSSTOOL2 \
+--toolchain_identifier=id
+"""
+
+import os
+from absl import app
+from absl import flags
+from google.protobuf import text_format
+from third_party.com.github.bazelbuild.bazel.src.main.protobuf import crosstool_config_pb2
+from tools.migration.ctoolchain_comparator_lib import compare_ctoolchains
+
+flags.DEFINE_string(
+ "before", None,
+ ("A text proto file containing the relevant CTooclchain before the change, "
+ "either a CROSSTOOL file or a single CToolchain proto text"))
+flags.DEFINE_string(
+ "after", None,
+ ("A text proto file containing the relevant CToolchain after the change, "
+ "either a CROSSTOOL file or a single CToolchain proto text"))
+flags.DEFINE_string("toolchain_identifier", None,
+ "The identifier of the CToolchain that is being compared.")
+flags.mark_flag_as_required("before")
+flags.mark_flag_as_required("after")
+
+
+def _to_absolute_path(path):
+ path = os.path.expanduser(path)
+ if os.path.isabs(path):
+ return path
+ else:
+ if "BUILD_WORKING_DIRECTORY" in os.environ:
+ return os.path.join(os.environ["BUILD_WORKING_DIRECTORY"], path)
+ else:
+ return path
+
+
+def _find_toolchain(crosstool, toolchain_identifier):
+ for toolchain in crosstool.toolchain:
+ if toolchain.toolchain_identifier == toolchain_identifier:
+ return toolchain
+ return None
+
+
+def _read_crosstool_or_ctoolchain_proto(input_file, toolchain_identifier=None):
+ """Reads a proto file and finds the CToolchain with the given identifier."""
+ with open(input_file, "r") as f:
+ text = f.read()
+ crosstool_release = crosstool_config_pb2.CrosstoolRelease()
+ c_toolchain = crosstool_config_pb2.CToolchain()
+ try:
+ text_format.Merge(text, crosstool_release)
+ if toolchain_identifier is None:
+ print("CROSSTOOL proto needs a 'toolchain_identifier' specified in "
+ "order to be able to select the right toolchain for comparison.")
+ return None
+ toolchain = _find_toolchain(crosstool_release, toolchain_identifier)
+ if toolchain is None:
+ print(("Cannot find a CToolchain with an identifier '%s' in CROSSTOOL "
+ "file") % toolchain_identifier)
+ return None
+ return toolchain
+ except text_format.ParseError as crosstool_error:
+ try:
+ text_format.Merge(text, c_toolchain)
+ if (toolchain_identifier is not None and
+ c_toolchain.toolchain_identifier != toolchain_identifier):
+ print(("Expected CToolchain with identifier '%s', got CToolchain with "
+ "identifier '%s'" % (toolchain_identifier,
+ c_toolchain.toolchain_identifier)))
+ return None
+ return c_toolchain
+ except text_format.ParseError as toolchain_error:
+ print(("Error parsing file '%s':" % input_file)) # pylint: disable=superfluous-parens
+ print("Attempt to parse it as a CROSSTOOL proto:") # pylint: disable=superfluous-parens
+ print(crosstool_error) # pylint: disable=superfluous-parens
+ print("Attempt to parse it as a CToolchain proto:") # pylint: disable=superfluous-parens
+ print(toolchain_error) # pylint: disable=superfluous-parens
+ return None
+
+
+def main(unused_argv):
+
+ before_file = _to_absolute_path(flags.FLAGS.before)
+ after_file = _to_absolute_path(flags.FLAGS.after)
+ toolchain_identifier = flags.FLAGS.toolchain_identifier
+
+ toolchain_before = _read_crosstool_or_ctoolchain_proto(
+ before_file, toolchain_identifier)
+ toolchain_after = _read_crosstool_or_ctoolchain_proto(after_file,
+ toolchain_identifier)
+
+ if not toolchain_before or not toolchain_after:
+ print("There was an error getting the required toolchains.")
+ exit(1)
+
+ found_difference = compare_ctoolchains(toolchain_before, toolchain_after)
+ if found_difference:
+ exit(1)
+
+
+if __name__ == "__main__":
+ app.run(main)
diff --git a/tools/migration/ctoolchain_comparator_lib.py b/tools/migration/ctoolchain_comparator_lib.py
new file mode 100644
index 0000000..eb47305
--- /dev/null
+++ b/tools/migration/ctoolchain_comparator_lib.py
@@ -0,0 +1,523 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module providing compare_ctoolchains function.
+
+compare_ctoolchains takes in two parsed CToolchains and compares them
+"""
+
+
+def _print_difference(field_name, before_value, after_value):
+ if not before_value and after_value:
+ print(("Difference in '%s' field:\nValue before change is not set\n"
+ "Value after change is set to '%s'") % (field_name, after_value))
+ elif before_value and not after_value:
+ print(("Difference in '%s' field:\nValue before change is set to '%s'\n"
+ "Value after change is not set") % (field_name, before_value))
+ else:
+ print(("Difference in '%s' field:\nValue before change:\t'%s'\n"
+ "Value after change:\t'%s'\n") % (field_name, before_value,
+ after_value))
+
+
+def _array_to_string(arr, ordered=False):
+ if not arr:
+ return "[]"
+ elif len(arr) == 1:
+ return "[" + list(arr)[0] + "]"
+ if not ordered:
+ return "[\n\t%s\n]" % "\n\t".join(arr)
+ else:
+ return "[\n\t%s\n]" % "\n\t".join(sorted(list(arr)))
+
+
+def _check_with_feature_set_equivalence(before, after):
+ before_set = set()
+ after_set = set()
+ for el in before:
+ before_set.add((str(set(el.feature)), str(set(el.not_feature))))
+ for el in after:
+ after_set.add((str(set(el.feature)), str(set(el.not_feature))))
+ return before_set == after_set
+
+
+def _check_tool_equivalence(before, after):
+ """Compares two "CToolchain.Tool"s."""
+ if before.tool_path == "NOT_USED":
+ before.tool_path = ""
+ if after.tool_path == "NOT_USED":
+ after.tool_path = ""
+ if before.tool_path != after.tool_path:
+ return False
+ if set(before.execution_requirement) != set(after.execution_requirement):
+ return False
+ if not _check_with_feature_set_equivalence(before.with_feature,
+ after.with_feature):
+ return False
+ return True
+
+
+def _check_flag_group_equivalence(before, after):
+ """Compares two "CToolchain.FlagGroup"s."""
+ if before.flag != after.flag:
+ return False
+ if before.expand_if_true != after.expand_if_true:
+ return False
+ if before.expand_if_false != after.expand_if_false:
+ return False
+ if set(before.expand_if_all_available) != set(after.expand_if_all_available):
+ return False
+ if set(before.expand_if_none_available) != set(
+ after.expand_if_none_available):
+ return False
+ if before.iterate_over != after.iterate_over:
+ return False
+ if before.expand_if_equal != after.expand_if_equal:
+ return False
+ if len(before.flag_group) != len(after.flag_group):
+ return False
+ for (flag_group_before, flag_group_after) in zip(before.flag_group,
+ after.flag_group):
+ if not _check_flag_group_equivalence(flag_group_before, flag_group_after):
+ return False
+ return True
+
+
+def _check_flag_set_equivalence(before, after, in_action_config=False):
+ """Compares two "CToolchain.FlagSet"s."""
+ # ActionConfigs in proto format do not have a 'FlagSet.action' field set.
+ # Instead, when construction the Java ActionConfig object, we set the
+ # flag_set.action field to the action name. This currently causes the
+ # CcToolchainConfigInfo.proto to generate a CToolchain.ActionConfig that still
+ # has the action name in the FlagSet.action field, therefore we don't compare
+ # the FlagSet.action field when comparing flag_sets that belong to an
+ # ActionConfig.
+ if not in_action_config and set(before.action) != set(after.action):
+ return False
+ if not _check_with_feature_set_equivalence(before.with_feature,
+ after.with_feature):
+ return False
+ if len(before.flag_group) != len(after.flag_group):
+ return False
+ for (flag_group_before, flag_group_after) in zip(before.flag_group,
+ after.flag_group):
+ if not _check_flag_group_equivalence(flag_group_before, flag_group_after):
+ return False
+ return True
+
+
+def _check_action_config_equivalence(before, after):
+ """Compares two "CToolchain.ActionConfig"s."""
+ if before.config_name != after.config_name:
+ return False
+ if before.action_name != after.action_name:
+ return False
+ if before.enabled != after.enabled:
+ return False
+ if len(before.tool) != len(after.tool):
+ return False
+ for (tool_before, tool_after) in zip(before.tool, after.tool):
+ if not _check_tool_equivalence(tool_before, tool_after):
+ return False
+ if before.implies != after.implies:
+ return False
+ if len(before.flag_set) != len(after.flag_set):
+ return False
+ for (flag_set_before, flag_set_after) in zip(before.flag_set, after.flag_set):
+ if not _check_flag_set_equivalence(flag_set_before, flag_set_after, True):
+ return False
+ return True
+
+
+def _check_env_set_equivalence(before, after):
+ """Compares two "CToolchain.EnvSet"s."""
+ if set(before.action) != set(after.action):
+ return False
+ if not _check_with_feature_set_equivalence(before.with_feature,
+ after.with_feature):
+ return False
+ if before.env_entry != after.env_entry:
+ return False
+ return True
+
+
+def _check_feature_equivalence(before, after):
+ """Compares two "CToolchain.Feature"s."""
+ if before.name != after.name:
+ return False
+ if before.enabled != after.enabled:
+ return False
+ if len(before.flag_set) != len(after.flag_set):
+ return False
+ for (flag_set_before, flag_set_after) in zip(before.flag_set, after.flag_set):
+ if not _check_flag_set_equivalence(flag_set_before, flag_set_after):
+ return False
+ if len(before.env_set) != len(after.env_set):
+ return False
+ for (env_set_before, env_set_after) in zip(before.env_set, after.env_set):
+ if not _check_env_set_equivalence(env_set_before, env_set_after):
+ return False
+ if len(before.requires) != len(after.requires):
+ return False
+ for (requires_before, requires_after) in zip(before.requires, after.requires):
+ if set(requires_before.feature) != set(requires_after.feature):
+ return False
+ if before.implies != after.implies:
+ return False
+ if before.provides != after.provides:
+ return False
+ return True
+
+
+def _compare_features(features_before, features_after):
+ """Compares two "CToolchain.FlagFeature" lists."""
+ feature_name_to_feature_before = {}
+ feature_name_to_feature_after = {}
+ for feature in features_before:
+ feature_name_to_feature_before[feature.name] = feature
+ for feature in features_after:
+ feature_name_to_feature_after[feature.name] = feature
+
+ feature_names_before = set(feature_name_to_feature_before.keys())
+ feature_names_after = set(feature_name_to_feature_after.keys())
+
+ before_after_diff = feature_names_before - feature_names_after
+ after_before_diff = feature_names_after - feature_names_before
+
+ diff_string = "Difference in 'feature' field:"
+ found_difference = False
+ if before_after_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List before change contains entries for the following features "
+ "that the list after the change doesn't:\n%s") % _array_to_string(
+ before_after_diff, ordered=True))
+ if after_before_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List after change contains entries for the following features "
+ "that the list before the change doesn't:\n%s") % _array_to_string(
+ after_before_diff, ordered=True))
+
+ names_before = [feature.name for feature in features_before]
+ names_after = [feature.name for feature in features_after]
+ if names_before != names_after:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("Features not in right order:\n"
+ "* List of features before change:\t%s"
+ "* List of features before change:\t%s") %
+ (_array_to_string(names_before), _array_to_string(names_after)))
+ for name in feature_name_to_feature_before:
+ feature_before = feature_name_to_feature_before[name]
+ feature_after = feature_name_to_feature_after.get(name, None)
+ if feature_after and not _check_feature_equivalence(feature_before,
+ feature_after):
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* Feature '%s' differs before and after the change:\n"
+ "Value before change:\n%s\n"
+ "Value after change:\n%s") % (name, str(feature_before),
+ str(feature_after)))
+ if found_difference:
+ print("") # pylint: disable=superfluous-parens
+ return found_difference
+
+
+def _compare_action_configs(action_configs_before, action_configs_after):
+ """Compares two "CToolchain.ActionConfig" lists."""
+ action_name_to_action_before = {}
+ action_name_to_action_after = {}
+ for action_config in action_configs_before:
+ action_name_to_action_before[action_config.config_name] = action_config
+ for action_config in action_configs_after:
+ action_name_to_action_after[action_config.config_name] = action_config
+
+ config_names_before = set(action_name_to_action_before.keys())
+ config_names_after = set(action_name_to_action_after.keys())
+
+ before_after_diff = config_names_before - config_names_after
+ after_before_diff = config_names_after - config_names_before
+
+ diff_string = "Difference in 'action_config' field:"
+ found_difference = False
+ if before_after_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List before change contains entries for the following "
+ "action_configs that the list after the change doesn't:\n%s") %
+ _array_to_string(before_after_diff, ordered=True))
+ if after_before_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List after change contains entries for the following "
+ "action_configs that the list before the change doesn't:\n%s") %
+ _array_to_string(after_before_diff, ordered=True))
+
+ names_before = [config.config_name for config in action_configs_before]
+ names_after = [config.config_name for config in action_configs_after]
+ if names_before != names_after:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("Action configs not in right order:\n"
+ "* List of action configs before change:\t%s"
+ "* List of action_configs before change:\t%s") %
+ (_array_to_string(names_before), _array_to_string(names_after)))
+ for name in config_names_before:
+ action_config_before = action_name_to_action_before[name]
+ action_config_after = action_name_to_action_after.get(name, None)
+ if action_config_after and not _check_action_config_equivalence(
+ action_config_before, action_config_after):
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* Action config '%s' differs before and after the change:\n"
+ "Value before change:\n%s\n"
+ "Value after change:\n%s") % (name, str(action_config_before),
+ str(action_config_after)))
+ if found_difference:
+ print("") # pylint: disable=superfluous-parens
+ return found_difference
+
+
+def _compare_tool_paths(tool_paths_before, tool_paths_after):
+ """Compares two "CToolchain.ToolPath" lists."""
+ tool_to_path_before = {}
+ tool_to_path_after = {}
+ for tool_path in tool_paths_before:
+ tool_to_path_before[tool_path.name] = (
+ tool_path.path if tool_path.path != "NOT_USED" else "")
+ for tool_path in tool_paths_after:
+ tool_to_path_after[tool_path.name] = (
+ tool_path.path if tool_path.path != "NOT_USED" else "")
+
+ tool_names_before = set(tool_to_path_before.keys())
+ tool_names_after = set(tool_to_path_after.keys())
+
+ before_after_diff = tool_names_before - tool_names_after
+ after_before_diff = tool_names_after - tool_names_before
+
+ diff_string = "Difference in 'tool_path' field:"
+ found_difference = False
+ if before_after_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List before change contains entries for the following tools "
+ "that the list after the change doesn't:\n%s") % _array_to_string(
+ before_after_diff, ordered=True))
+ if after_before_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List after change contains entries for the following tools that "
+ "the list before the change doesn't:\n%s") % _array_to_string(
+ after_before_diff, ordered=True))
+
+ for tool in tool_to_path_before:
+ path_before = tool_to_path_before[tool]
+ path_after = tool_to_path_after.get(tool, None)
+ if path_after and path_after != path_before:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* Path for tool '%s' differs before and after the change:\n"
+ "Value before change:\t'%s'\n"
+ "Value after change:\t'%s'") % (tool, path_before, path_after))
+ if found_difference:
+ print("") # pylint: disable=superfluous-parens
+ return found_difference
+
+
+def _compare_make_variables(make_variables_before, make_variables_after):
+ """Compares two "CToolchain.MakeVariable" lists."""
+ name_to_variable_before = {}
+ name_to_variable_after = {}
+ for variable in make_variables_before:
+ name_to_variable_before[variable.name] = variable.value
+ for variable in make_variables_after:
+ name_to_variable_after[variable.name] = variable.value
+
+ variable_names_before = set(name_to_variable_before.keys())
+ variable_names_after = set(name_to_variable_after.keys())
+
+ before_after_diff = variable_names_before - variable_names_after
+ after_before_diff = variable_names_after - variable_names_before
+
+ diff_string = "Difference in 'make_variable' field:"
+ found_difference = False
+ if before_after_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List before change contains entries for the following variables "
+ "that the list after the change doesn't:\n%s") % _array_to_string(
+ before_after_diff, ordered=True))
+ if after_before_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List after change contains entries for the following variables "
+ "that the list before the change doesn't:\n%s") % _array_to_string(
+ after_before_diff, ordered=True))
+
+ for variable in name_to_variable_before:
+ value_before = name_to_variable_before[variable]
+ value_after = name_to_variable_after.get(variable, None)
+ if value_after and value_after != value_before:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(
+ ("* Value for variable '%s' differs before and after the change:\n"
+ "Value before change:\t'%s'\n"
+ "Value after change:\t'%s'") % (variable, value_before, value_after))
+ if found_difference:
+ print("") # pylint: disable=superfluous-parens
+ return found_difference
+
+
+def _compare_cxx_builtin_include_directories(directories_before,
+ directories_after):
+ if directories_before != directories_after:
+ print(("Difference in 'cxx_builtin_include_directory' field:\n"
+ "List of elements before change:\n%s\n"
+ "List of elements after change:\n%s\n") %
+ (_array_to_string(directories_before),
+ _array_to_string(directories_after)))
+ return True
+ return False
+
+
+def _compare_artifact_name_patterns(artifact_name_patterns_before,
+ artifact_name_patterns_after):
+ """Compares two "CToolchain.ArtifactNamePattern" lists."""
+ category_to_values_before = {}
+ category_to_values_after = {}
+ for name_pattern in artifact_name_patterns_before:
+ category_to_values_before[name_pattern.category_name] = (
+ name_pattern.prefix, name_pattern.extension)
+ for name_pattern in artifact_name_patterns_after:
+ category_to_values_after[name_pattern.category_name] = (
+ name_pattern.prefix, name_pattern.extension)
+
+ category_names_before = set(category_to_values_before.keys())
+ category_names_after = set(category_to_values_after.keys())
+
+ before_after_diff = category_names_before - category_names_after
+ after_before_diff = category_names_after - category_names_before
+
+ diff_string = "Difference in 'artifact_name_pattern' field:"
+ found_difference = False
+ if before_after_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List before change contains entries for the following categories "
+ "that the list after the change doesn't:\n%s") % _array_to_string(
+ before_after_diff, ordered=True))
+ if after_before_diff:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* List after change contains entries for the following categories "
+ "that the list before the change doesn't:\n%s") % _array_to_string(
+ after_before_diff, ordered=True))
+
+ for category in category_to_values_before:
+ value_before = category_to_values_before[category]
+ value_after = category_to_values_after.get(category, None)
+ if value_after and value_after != value_before:
+ if not found_difference:
+ print(diff_string) # pylint: disable=superfluous-parens
+ found_difference = True
+ print(("* Value for category '%s' differs before and after the change:\n"
+ "Value before change:\tprefix:'%s'\textension:'%s'\n"
+ "Value after change:\tprefix:'%s'\textension:'%s'") %
+ (category, value_before[0], value_before[1], value_after[0],
+ value_after[1]))
+ if found_difference:
+ print("") # pylint: disable=superfluous-parens
+ return found_difference
+
+
+def compare_ctoolchains(toolchain_before, toolchain_after):
+ """Compares two CToolchains."""
+ found_difference = False
+ if (toolchain_before.toolchain_identifier !=
+ toolchain_after.toolchain_identifier):
+ _print_difference("toolchain_identifier",
+ toolchain_before.toolchain_identifier,
+ toolchain_after.toolchain_identifier)
+ if toolchain_before.host_system_name != toolchain_after.host_system_name:
+ _print_difference("host_system_name", toolchain_before.host_system_name,
+ toolchain_after.host_system_name)
+ found_difference = True
+ if toolchain_before.target_system_name != toolchain_after.target_system_name:
+ _print_difference("target_system_name", toolchain_before.target_system_name,
+ toolchain_after.target_system_name)
+ found_difference = True
+ if toolchain_before.target_cpu != toolchain_after.target_cpu:
+ _print_difference("target_cpu", toolchain_before.target_cpu,
+ toolchain_after.target_cpu)
+ found_difference = True
+ if toolchain_before.target_libc != toolchain_after.target_libc:
+ _print_difference("target_libc", toolchain_before.target_libc,
+ toolchain_after.target_libc)
+ found_difference = True
+ if toolchain_before.compiler != toolchain_after.compiler:
+ _print_difference("compiler", toolchain_before.compiler,
+ toolchain_after.compiler)
+ found_difference = True
+ if toolchain_before.abi_version != toolchain_after.abi_version:
+ _print_difference("abi_version", toolchain_before.abi_version,
+ toolchain_after.abi_version)
+ found_difference = True
+ if toolchain_before.abi_libc_version != toolchain_after.abi_libc_version:
+ _print_difference("abi_libc_version", toolchain_before.abi_libc_version,
+ toolchain_after.abi_libc_version)
+ found_difference = True
+ if toolchain_before.cc_target_os != toolchain_after.cc_target_os:
+ _print_difference("cc_target_os", toolchain_before.cc_target_os,
+ toolchain_after.cc_target_os)
+ found_difference = True
+ if toolchain_before.builtin_sysroot != toolchain_after.builtin_sysroot:
+ _print_difference("builtin_sysroot", toolchain_before.builtin_sysroot,
+ toolchain_after.builtin_sysroot)
+ found_difference = True
+ found_difference = _compare_features(
+ toolchain_before.feature, toolchain_after.feature) or found_difference
+ found_difference = _compare_action_configs(
+ toolchain_before.action_config,
+ toolchain_after.action_config) or found_difference
+ found_difference = _compare_tool_paths(
+ toolchain_before.tool_path, toolchain_after.tool_path) or found_difference
+ found_difference = _compare_cxx_builtin_include_directories(
+ toolchain_before.cxx_builtin_include_directory,
+ toolchain_after.cxx_builtin_include_directory) or found_difference
+ found_difference = _compare_make_variables(
+ toolchain_before.make_variable,
+ toolchain_after.make_variable) or found_difference
+ found_difference = _compare_artifact_name_patterns(
+ toolchain_before.artifact_name_pattern,
+ toolchain_after.artifact_name_pattern) or found_difference
+ if not found_difference:
+ print("No difference") # pylint: disable=superfluous-parens
+ return found_difference
diff --git a/tools/migration/ctoolchain_comparator_lib_test.py b/tools/migration/ctoolchain_comparator_lib_test.py
new file mode 100644
index 0000000..c81ff47
--- /dev/null
+++ b/tools/migration/ctoolchain_comparator_lib_test.py
@@ -0,0 +1,1709 @@
+# Copyright 2018 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from google.protobuf import text_format
+from third_party.com.github.bazelbuild.bazel.src.main.protobuf import crosstool_config_pb2
+from tools.migration.ctoolchain_comparator_lib import compare_ctoolchains
+
+from py import mock
+try:
+ # Python 2
+ from cStringIO import StringIO
+except ImportError:
+ # Python 3
+ from io import StringIO
+
+
+def make_toolchain(toolchain_proto):
+ toolchain = crosstool_config_pb2.CToolchain()
+ text_format.Merge(toolchain_proto, toolchain)
+ return toolchain
+
+
+class CtoolchainComparatorLibTest(unittest.TestCase):
+
+ def test_string_fields(self):
+ first = make_toolchain("""
+ toolchain_identifier: "first-id"
+ host_system_name: "first-host"
+ target_system_name: "first-target"
+ target_cpu: "first-cpu"
+ target_libc: "first-libc"
+ compiler: "first-compiler"
+ abi_version: "first-abi"
+ abi_libc_version: "first-abi-libc"
+ builtin_sysroot: "sysroot"
+ """)
+ second = make_toolchain("""
+ toolchain_identifier: "second-id"
+ host_system_name: "second-host"
+ target_system_name: "second-target"
+ target_cpu: "second-cpu"
+ target_libc: "second-libc"
+ compiler: "second-compiler"
+ abi_version: "second-abi"
+ abi_libc_version: "second-abi-libc"
+ cc_target_os: "os"
+ """)
+ error_toolchain_identifier = (
+ "Difference in 'toolchain_identifier' field:\n"
+ "Value before change:\t'first-id'\n"
+ "Value after change:\t'second-id'\n")
+ error_host_system_name = ("Difference in 'host_system_name' field:\n"
+ "Value before change:\t'first-host'\n"
+ "Value after change:\t'second-host'\n")
+ error_target_system_name = ("Difference in 'target_system_name' field:\n"
+ "Value before change:\t'first-target'\n"
+ "Value after change:\t'second-target'\n")
+ error_target_cpu = ("Difference in 'target_cpu' field:\n"
+ "Value before change:\t'first-cpu'\n"
+ "Value after change:\t'second-cpu'\n")
+ error_target_libc = ("Difference in 'target_libc' field:\n"
+ "Value before change:\t'first-libc'\n"
+ "Value after change:\t'second-libc'\n")
+ error_compiler = ("Difference in 'compiler' field:\n"
+ "Value before change:\t'first-compiler'\n"
+ "Value after change:\t'second-compiler'\n")
+ error_abi_version = ("Difference in 'abi_version' field:\n"
+ "Value before change:\t'first-abi'\n"
+ "Value after change:\t'second-abi'\n")
+ error_abi_libc_version = ("Difference in 'abi_libc_version' field:\n"
+ "Value before change:\t'first-abi-libc'\n"
+ "Value after change:\t'second-abi-libc'\n")
+ error_builtin_sysroot = ("Difference in 'builtin_sysroot' field:\n"
+ "Value before change is set to 'sysroot'\n"
+ "Value after change is not set\n")
+ error_cc_target_os = ("Difference in 'cc_target_os' field:\n"
+ "Value before change is not set\n"
+ "Value after change is set to 'os'\n")
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn(error_toolchain_identifier, mock_stdout.getvalue())
+ self.assertIn(error_host_system_name, mock_stdout.getvalue())
+ self.assertIn(error_target_system_name, mock_stdout.getvalue())
+ self.assertIn(error_target_cpu, mock_stdout.getvalue())
+ self.assertIn(error_target_libc, mock_stdout.getvalue())
+ self.assertIn(error_compiler, mock_stdout.getvalue())
+ self.assertIn(error_abi_version, mock_stdout.getvalue())
+ self.assertIn(error_abi_libc_version, mock_stdout.getvalue())
+ self.assertIn(error_builtin_sysroot, mock_stdout.getvalue())
+ self.assertIn(error_cc_target_os, mock_stdout.getvalue())
+
+ def test_tool_path(self):
+ first = make_toolchain("""
+ tool_path {
+ name: "only_first"
+ path: "/a/b/c"
+ }
+ tool_path {
+ name: "paths_differ"
+ path: "/path/first"
+ }
+ """)
+ second = make_toolchain("""
+ tool_path {
+ name: "paths_differ"
+ path: "/path/second"
+ }
+ tool_path {
+ name: "only_second_1"
+ path: "/a/b/c"
+ }
+ tool_path {
+ name: "only_second_2"
+ path: "/a/b/c"
+ }
+ """)
+ error_only_first = ("* List before change contains entries for the "
+ "following tools that the list after the change "
+ "doesn't:\n[only_first]\n")
+ error_only_second = ("* List after change contains entries for the "
+ "following tools that the list before the change "
+ "doesn't:\n"
+ "[\n"
+ "\tonly_second_1\n"
+ "\tonly_second_2\n"
+ "]\n")
+ error_paths_differ = ("* Path for tool 'paths_differ' differs before and "
+ "after the change:\n"
+ "Value before change:\t'/path/first'\n"
+ "Value after change:\t'/path/second'\n")
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn(error_only_first, mock_stdout.getvalue())
+ self.assertIn(error_only_second, mock_stdout.getvalue())
+ self.assertIn(error_paths_differ, mock_stdout.getvalue())
+
+ def test_make_variable(self):
+ first = make_toolchain("""
+ make_variable {
+ name: "only_first"
+ value: "val"
+ }
+ make_variable {
+ name: "value_differs"
+ value: "first_value"
+ }
+ """)
+ second = make_toolchain("""
+ make_variable {
+ name: "value_differs"
+ value: "second_value"
+ }
+ make_variable {
+ name: "only_second_1"
+ value: "val"
+ }
+ make_variable {
+ name: "only_second_2"
+ value: "val"
+ }
+ """)
+ error_only_first = ("* List before change contains entries for the "
+ "following variables that the list after the "
+ "change doesn't:\n[only_first]\n")
+ error_only_second = ("* List after change contains entries for the "
+ "following variables that the list before the "
+ "change doesn't:\n"
+ "[\n"
+ "\tonly_second_1\n"
+ "\tonly_second_2\n"
+ "]\n")
+ error_value_differs = ("* Value for variable 'value_differs' differs before"
+ " and after the change:\n"
+ "Value before change:\t'first_value'\n"
+ "Value after change:\t'second_value'\n")
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn(error_only_first, mock_stdout.getvalue())
+ self.assertIn(error_only_second, mock_stdout.getvalue())
+ self.assertIn(error_value_differs, mock_stdout.getvalue())
+
+ def test_cxx_builtin_include_directories(self):
+ first = make_toolchain("""
+ cxx_builtin_include_directory: "a/b/c"
+ cxx_builtin_include_directory: "d/e/f"
+ """)
+ second = make_toolchain("""
+ cxx_builtin_include_directory: "d/e/f"
+ cxx_builtin_include_directory: "a/b/c"
+ """)
+ expect_error = ("Difference in 'cxx_builtin_include_directory' field:\n"
+ "List of elements before change:\n"
+ "[\n"
+ "\ta/b/c\n"
+ "\td/e/f\n"
+ "]\n"
+ "List of elements after change:\n"
+ "[\n"
+ "\td/e/f\n"
+ "\ta/b/c\n"
+ "]\n")
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn(expect_error, mock_stdout.getvalue())
+
+ def test_artifact_name_pattern(self):
+ first = make_toolchain("""
+ artifact_name_pattern {
+ category_name: 'object_file'
+ prefix: ''
+ extension: '.obj1'
+ }
+ artifact_name_pattern {
+ category_name: 'executable'
+ prefix: 'first'
+ extension: '.exe'
+ }
+ artifact_name_pattern {
+ category_name: 'dynamic_library'
+ prefix: ''
+ extension: '.dll'
+ }
+ """)
+ second = make_toolchain("""
+ artifact_name_pattern {
+ category_name: 'object_file'
+ prefix: ''
+ extension: '.obj2'
+ }
+ artifact_name_pattern {
+ category_name: 'static_library'
+ prefix: ''
+ extension: '.lib'
+ }
+ artifact_name_pattern {
+ category_name: 'executable'
+ prefix: 'second'
+ extension: '.exe'
+ }
+ artifact_name_pattern {
+ category_name: 'interface_library'
+ prefix: ''
+ extension: '.if.lib'
+ }
+ """)
+ error_only_first = ("* List before change contains entries for the "
+ "following categories that the list after the "
+ "change doesn't:\n[dynamic_library]\n")
+ error_only_second = ("* List after change contains entries for the "
+ "following categories that the list before the "
+ "change doesn't:\n"
+ "[\n"
+ "\tinterface_library\n"
+ "\tstatic_library\n"
+ "]\n")
+ error_extension_differs = ("* Value for category 'object_file' differs "
+ "before and after the change:\n"
+ "Value before change:"
+ "\tprefix:''"
+ "\textension:'.obj1'\n"
+ "Value after change:"
+ "\tprefix:''"
+ "\textension:'.obj2'\n")
+ error_prefix_differs = ("* Value for category 'executable' differs "
+ "before and after the change:\n"
+ "Value before change:"
+ "\tprefix:'first'"
+ "\textension:'.exe'\n"
+ "Value after change:"
+ "\tprefix:'second'"
+ "\textension:'.exe'\n")
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn(error_only_first, mock_stdout.getvalue())
+ self.assertIn(error_only_second, mock_stdout.getvalue())
+ self.assertIn(error_extension_differs, mock_stdout.getvalue())
+ self.assertIn(error_prefix_differs, mock_stdout.getvalue())
+
+ def test_features_not_ordered(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature1'
+ }
+ feature {
+ name: 'feature2'
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature2'
+ }
+ feature {
+ name: 'feature1'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("Features not in right order", mock_stdout.getvalue())
+
+ def test_features_missing(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature1'
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature2'
+ }
+ """)
+ error_only_first = ("* List before change contains entries for the "
+ "following features that the list after the "
+ "change doesn't:\n[feature1]\n")
+ error_only_second = ("* List after change contains entries for the "
+ "following features that the list before the "
+ "change doesn't:\n[feature2]\n")
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn(error_only_first, mock_stdout.getvalue())
+ self.assertIn(error_only_second, mock_stdout.getvalue())
+
+ def test_feature_enabled(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ enabled: true
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ enabled: false
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_feature_provides(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ provides: 'a'
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ provides: 'b'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_feature_provides_preserves_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ provides: 'a'
+ provides: 'b'
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ provides: 'b'
+ provides: 'a'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_feature_implies(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ implies: 'a'
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_feature_implies_preserves_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ implies: 'a'
+ implies: 'b'
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ implies: 'b'
+ implies: 'a'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_feature_requires_preserves_list_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ requires: {
+ feature: 'feature1'
+ }
+ requires: {
+ feature: 'feature2'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ requires: {
+ feature: 'feature2'
+ }
+ requires: {
+ feature: 'feature1'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_feature_requires_ignores_required_features_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ requires: {
+ feature: 'feature1'
+ feature: 'feature2'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ requires: {
+ feature: 'feature2'
+ feature: 'feature1'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_feature_requires_differs(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ requires: {
+ feature: 'feature1'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ requires: {
+ feature: 'feature2'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_action_config_ignores_requires(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ requires: {
+ feature: 'feature1'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ requires: {
+ feature: 'feature2'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_env_set_actions_differ(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ action: 'a1'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set: {
+ action: 'a1'
+ action: 'a2'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_env_set_ignores_actions_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ action: 'a2'
+ action: 'a1'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set: {
+ action: 'a1'
+ action: 'a2'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_env_set_env_entries_not_ordered(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ env_entry {
+ key: 'k1'
+ value: 'v1'
+ }
+ env_entry {
+ key: 'k2'
+ value: 'v2'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ env_entry {
+ key: 'k2'
+ value: 'v2'
+ }
+ env_entry {
+ key: 'k1'
+ value: 'v1'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_env_set_env_entries_differ(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ env_entry {
+ key: 'k1'
+ value: 'value_first'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ env_entry {
+ key: 'k1'
+ value: 'value_second'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_feature_preserves_env_set_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ env_entry {
+ key: 'first'
+ value: 'first'
+ }
+ }
+ env_set {
+ env_entry {
+ key: 'second'
+ value: 'second'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ env_entry {
+ key: 'second'
+ value: 'second'
+ }
+ }
+ env_set {
+ env_entry {
+ key: 'first'
+ value: 'first'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after the change:",
+ mock_stdout.getvalue())
+
+ def test_action_config_ignores_env_set(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ env_set {
+ env_entry {
+ key: 'k1'
+ value: 'value_first'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ env_set {
+ env_entry {
+ key: 'k1'
+ value: 'value_second'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_env_set_ignores_with_feature_set_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set{
+ with_feature {
+ feature: 'feature1'
+ }
+ with_feature {
+ not_feature: 'feature2'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set {
+ with_feature {
+ not_feature: 'feature2'
+ }
+ with_feature {
+ feature: 'feature1'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_env_set_ignores_with_feature_set_lists_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set{
+ with_feature {
+ feature: 'feature1'
+ feature: 'feature2'
+ not_feature: 'not_feature1'
+ not_feature: 'not_feature2'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ env_set{
+ with_feature {
+ feature: 'feature2'
+ feature: 'feature1'
+ not_feature: 'not_feature2'
+ not_feature: 'not_feature1'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_flag_set_ignores_actions_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set {
+ action: 'a1'
+ action: 'a2'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set {
+ action: 'a2'
+ action: 'a1'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_action_config_flag_set_actions_ignored(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ flag_set {
+ action: 'a1'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ flag_set {
+ action: 'a2'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_flag_set_ignores_with_feature_set_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set {
+ with_feature {
+ feature: 'feature1'
+ }
+ with_feature {
+ not_feature: 'feature2'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set {
+ with_feature {
+ feature: 'feature1'
+ }
+ with_feature {
+ not_feature: 'feature2'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set {
+ with_feature {
+ not_feature: 'feature2'
+ }
+ with_feature {
+ feature: 'feature1'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set {
+ with_feature {
+ not_feature: 'feature2'
+ }
+ with_feature {
+ feature: 'feature1'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_flag_set_ignores_with_feature_set_lists_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ with_feature {
+ feature: 'feature1'
+ feature: 'feature2'
+ not_feature: 'not_feature1'
+ not_feature: 'not_feature2'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ with_feature {
+ feature: 'feature1'
+ feature: 'feature2'
+ not_feature: 'not_feature1'
+ not_feature: 'not_feature2'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ with_feature {
+ feature: 'feature2'
+ feature: 'feature1'
+ not_feature: 'not_feature2'
+ not_feature: 'not_feature1'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ with_feature {
+ feature: 'feature2'
+ feature: 'feature1'
+ not_feature: 'not_feature2'
+ not_feature: 'not_feature1'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_flag_set_preserves_flag_group_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set {
+ flag_group {
+ flag: 'a'
+ }
+ flag_group {
+ flag: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set {
+ flag_group {
+ flag: 'a'
+ }
+ flag_group {
+ flag: 'b'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set {
+ flag_group {
+ flag: 'b'
+ }
+ flag_group {
+ flag: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set {
+ flag_group {
+ flag: 'b'
+ }
+ flag_group {
+ flag: 'a'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_preserves_flags_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ flag: 'flag1'
+ flag: 'flag2'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ flag: 'flag1'
+ flag: 'flag2'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ flag: 'flag2'
+ flag: 'flag1'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ flag: 'flag2'
+ flag: 'flag1'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_iterate_over_differs(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ iterate_over: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ iterate_over: 'a'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ iterate_over: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ iterate_over: 'b'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_expand_if_true_differs(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_true: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_true: 'a'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_true: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_true: 'b'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_expand_if_false_differs(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_false: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_false: 'a'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_false: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_false: 'b'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_expand_if_all_available_differs(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'a'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'b'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_expand_if_none_available_differs(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'a'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'b'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_expand_if_all_available_ignores_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'a'
+ expand_if_all_available: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'a'
+ expand_if_all_available: 'b'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'b'
+ expand_if_all_available: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_all_available: 'b'
+ expand_if_all_available: 'a'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_flag_group_expand_if_none_available_ignores_order(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'a'
+ expand_if_none_available: 'b'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'a'
+ expand_if_none_available: 'b'
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'b'
+ expand_if_none_available: 'a'
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_none_available: 'b'
+ expand_if_none_available: 'a'
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_flag_group_expand_if_equal_differs(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_equal {
+ variable: 'first'
+ value: 'val'
+ }
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_equal {
+ variable: 'first'
+ value: 'val'
+ }
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ expand_if_equal {
+ variable: 'second'
+ value: 'val'
+ }
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ expand_if_equal {
+ variable: 'second'
+ value: 'val'
+ }
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_flag_group_flag_groups_differ(self):
+ first = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ flag_group {
+ flag: 'a'
+ flag: 'b'
+ }
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ flag_group {
+ flag: 'a'
+ flag: 'b'
+ }
+ }
+ }
+ }
+ """)
+ second = make_toolchain("""
+ feature {
+ name: 'feature'
+ flag_set{
+ flag_group {
+ flag_group {
+ flag: 'b'
+ flag: 'a'
+ }
+ }
+ }
+ }
+ action_config {
+ config_name: 'config'
+ flag_set{
+ flag_group {
+ flag_group {
+ flag: 'b'
+ flag: 'a'
+ }
+ }
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Feature 'feature' differs before and after",
+ mock_stdout.getvalue())
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_action_configs_not_ordered(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'action1'
+ }
+ action_config {
+ config_name: 'action2'
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'action2'
+ }
+ action_config {
+ config_name: 'action1'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("Action configs not in right order", mock_stdout.getvalue())
+
+ def test_action_configs_missing(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'action1'
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'action2'
+ }
+ """)
+ error_only_first = ("* List before change contains entries for the "
+ "following action_configs that the list after the "
+ "change doesn't:\n[action1]\n")
+ error_only_second = ("* List after change contains entries for the "
+ "following action_configs that the list before the "
+ "change doesn't:\n[action2]\n")
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn(error_only_first, mock_stdout.getvalue())
+ self.assertIn(error_only_second, mock_stdout.getvalue())
+
+ def test_action_config_enabled(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ enabled: true
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ enabled: false
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_action_config_action_name(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ action_name: 'config1'
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ action_name: 'config2'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_action_config_tool_tool_path_differs(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ tool_path: 'path1'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ tool_path: 'path2'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_action_config_tool_execution_requirements_differ(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ execution_requirement: 'a'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ execution_requirement: 'b'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_action_config_tool_execution_requirements_ignores_order(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ execution_requirement: 'a'
+ execution_requirement: 'b'
+ }
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ execution_requirement: 'b'
+ execution_requirement: 'a'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_action_config_implies_differs(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ implies: 'a'
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ implies: 'b'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_action_config_implies_preserves_order(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ implies: 'a'
+ implies: 'b'
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ implies: 'b'
+ implies: 'a'
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("* Action config 'config' differs before and after",
+ mock_stdout.getvalue())
+
+ def test_unused_tool_path(self):
+ first = make_toolchain("""
+ tool_path {
+ name: "empty"
+ path: ""
+ }
+ """)
+ second = make_toolchain("""
+ tool_path {
+ name: "empty"
+ path: "NOT_USED"
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+ def test_unused_tool_path_in_tool(self):
+ first = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ tool_path: ''
+ }
+ }
+ """)
+ second = make_toolchain("""
+ action_config {
+ config_name: 'config'
+ tool {
+ tool_path: 'NOT_USED'
+ }
+ }
+ """)
+ mock_stdout = StringIO()
+ with mock.patch("sys.stdout", mock_stdout):
+ compare_ctoolchains(first, second)
+ self.assertIn("No difference", mock_stdout.getvalue())
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tools/migration/ctoolchain_compare.bzl b/tools/migration/ctoolchain_compare.bzl
new file mode 100644
index 0000000..a9632af
--- /dev/null
+++ b/tools/migration/ctoolchain_compare.bzl
@@ -0,0 +1,49 @@
+"""A test rule that compares two CToolchains in proto format."""
+
+def _impl(ctx):
+ toolchain_config_proto = ctx.actions.declare_file(ctx.label.name + "_toolchain_config.proto")
+ ctx.actions.write(
+ toolchain_config_proto,
+ ctx.attr.toolchain_config[CcToolchainConfigInfo].proto,
+ )
+
+ script = ("%s --before='%s' --after='%s' --toolchain_identifier='%s'" % (
+ ctx.executable._comparator.short_path,
+ ctx.file.crosstool.short_path,
+ toolchain_config_proto.short_path,
+ ctx.attr.toolchain_identifier,
+ ))
+ test_executable = ctx.actions.declare_file(ctx.label.name)
+ ctx.actions.write(test_executable, script, is_executable = True)
+
+ runfiles = ctx.runfiles(files = [toolchain_config_proto, ctx.file.crosstool])
+ runfiles = runfiles.merge(ctx.attr._comparator[DefaultInfo].default_runfiles)
+
+ return DefaultInfo(runfiles = runfiles, executable = test_executable)
+
+cc_toolchains_compare_test = rule(
+ implementation = _impl,
+ attrs = {
+ "crosstool": attr.label(
+ mandatory = True,
+ allow_single_file = True,
+ doc = "Location of the CROSSTOOL file",
+ ),
+ "toolchain_config": attr.label(
+ mandatory = True,
+ providers = [CcToolchainConfigInfo],
+ doc = ("Starlark rule that replaces the CROSSTOOL file functionality " +
+ "for the CToolchain with the given identifier"),
+ ),
+ "toolchain_identifier": attr.string(
+ mandatory = True,
+ doc = "identifier of the CToolchain that is being compared",
+ ),
+ "_comparator": attr.label(
+ default = ":ctoolchain_comparator",
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+ test = True,
+)
diff --git a/tools/migration/legacy_fields_migration_lib.py b/tools/migration/legacy_fields_migration_lib.py
new file mode 100644
index 0000000..6107f92
--- /dev/null
+++ b/tools/migration/legacy_fields_migration_lib.py
@@ -0,0 +1,564 @@
+"""Module providing migrate_legacy_fields function.
+
+migrate_legacy_fields takes parsed CROSSTOOL proto and migrates it (inplace) to
+use only the features.
+
+Tracking issue: https://github.com/bazelbuild/bazel/issues/5187
+
+Since C++ rules team is working on migrating CROSSTOOL from text proto into
+Starlark, we advise CROSSTOOL owners to wait for the CROSSTOOL -> Starlark
+migrator before they invest too much time into fixing their pipeline. Tracking
+issue for the Starlark effort is
+https://github.com/bazelbuild/bazel/issues/5380.
+"""
+
+from third_party.com.github.bazelbuild.bazel.src.main.protobuf import crosstool_config_pb2
+
+ALL_CC_COMPILE_ACTIONS = [
+ "assemble", "preprocess-assemble", "linkstamp-compile", "c-compile",
+ "c++-compile", "c++-header-parsing", "c++-module-compile",
+ "c++-module-codegen", "lto-backend", "clif-match"
+]
+
+ALL_OBJC_COMPILE_ACTIONS = [
+ "objc-compile", "objc++-compile"
+]
+
+ALL_CXX_COMPILE_ACTIONS = [
+ action for action in ALL_CC_COMPILE_ACTIONS
+ if action not in ["c-compile", "preprocess-assemble", "assemble"]
+]
+
+ALL_CC_LINK_ACTIONS = [
+ "c++-link-executable", "c++-link-dynamic-library",
+ "c++-link-nodeps-dynamic-library"
+]
+
+ALL_OBJC_LINK_ACTIONS = [
+ "objc-executable", "objc++-executable",
+]
+
+DYNAMIC_LIBRARY_LINK_ACTIONS = [
+ "c++-link-dynamic-library", "c++-link-nodeps-dynamic-library"
+]
+
+NODEPS_DYNAMIC_LIBRARY_LINK_ACTIONS = ["c++-link-nodeps-dynamic-library"]
+
+TRANSITIVE_DYNAMIC_LIBRARY_LINK_ACTIONS = ["c++-link-dynamic-library"]
+
+TRANSITIVE_LINK_ACTIONS = ["c++-link-executable", "c++-link-dynamic-library"]
+
+CC_LINK_EXECUTABLE = ["c++-link-executable"]
+
+
+def compile_actions(toolchain):
+ """Returns compile actions for cc or objc rules."""
+ if _is_objc_toolchain(toolchain):
+ return ALL_CC_COMPILE_ACTIONS + ALL_OBJC_COMPILE_ACTIONS
+ else:
+ return ALL_CC_COMPILE_ACTIONS
+
+def link_actions(toolchain):
+ """Returns link actions for cc or objc rules."""
+ if _is_objc_toolchain(toolchain):
+ return ALL_CC_LINK_ACTIONS + ALL_OBJC_LINK_ACTIONS
+ else:
+ return ALL_CC_LINK_ACTIONS
+
+
+def executable_link_actions(toolchain):
+ """Returns transitive link actions for cc or objc rules."""
+ if _is_objc_toolchain(toolchain):
+ return CC_LINK_EXECUTABLE + ALL_OBJC_LINK_ACTIONS
+ else:
+ return CC_LINK_EXECUTABLE
+
+
+def _is_objc_toolchain(toolchain):
+ return any(ac.action_name == "objc-compile" for ac in toolchain.action_config)
+
+# Map converting from LinkingMode to corresponding feature name
+LINKING_MODE_TO_FEATURE_NAME = {
+ "FULLY_STATIC": "fully_static_link",
+ "MOSTLY_STATIC": "static_linking_mode",
+ "DYNAMIC": "dynamic_linking_mode",
+ "MOSTLY_STATIC_LIBRARIES": "static_linking_mode_nodeps_library",
+}
+
+def migrate_legacy_fields(crosstool):
+ """Migrates parsed crosstool (inplace) to not use legacy fields."""
+ crosstool.ClearField("default_toolchain")
+ for toolchain in crosstool.toolchain:
+ _ = [_migrate_expand_if_all_available(f) for f in toolchain.feature]
+ _ = [_migrate_expand_if_all_available(ac) for ac in toolchain.action_config]
+ _ = [_migrate_repeated_expands(f) for f in toolchain.feature]
+ _ = [_migrate_repeated_expands(ac) for ac in toolchain.action_config]
+
+ if (toolchain.dynamic_library_linker_flag or
+ _contains_dynamic_flags(toolchain)) and not _get_feature(
+ toolchain, "supports_dynamic_linker"):
+ feature = toolchain.feature.add()
+ feature.name = "supports_dynamic_linker"
+ feature.enabled = True
+
+ if toolchain.supports_start_end_lib and not _get_feature(
+ toolchain, "supports_start_end_lib"):
+ feature = toolchain.feature.add()
+ feature.name = "supports_start_end_lib"
+ feature.enabled = True
+
+ if toolchain.supports_interface_shared_objects and not _get_feature(
+ toolchain, "supports_interface_shared_libraries"):
+ feature = toolchain.feature.add()
+ feature.name = "supports_interface_shared_libraries"
+ feature.enabled = True
+
+ if toolchain.supports_embedded_runtimes and not _get_feature(
+ toolchain, "static_link_cpp_runtimes"):
+ feature = toolchain.feature.add()
+ feature.name = "static_link_cpp_runtimes"
+ feature.enabled = True
+
+ if toolchain.needsPic and not _get_feature(toolchain, "supports_pic"):
+ feature = toolchain.feature.add()
+ feature.name = "supports_pic"
+ feature.enabled = True
+
+ if toolchain.supports_fission and not _get_feature(
+ toolchain, "per_object_debug_info"):
+ # feature {
+ # name: "per_object_debug_info"
+ # enabled: true
+ # flag_set {
+ # action: "assemble"
+ # action: "preprocess-assemble"
+ # action: "c-compile"
+ # action: "c++-compile"
+ # action: "c++-module-codegen"
+ # action: "lto-backend"
+ # flag_group {
+ # expand_if_all_available: 'is_using_fission'",
+ # flag: "-gsplit-dwarf"
+ # }
+ # }
+ # }
+ feature = toolchain.feature.add()
+ feature.name = "per_object_debug_info"
+ feature.enabled = True
+ flag_set = feature.flag_set.add()
+ flag_set.action[:] = [
+ "c-compile", "c++-compile", "c++-module-codegen", "assemble",
+ "preprocess-assemble", "lto-backend"
+ ]
+ flag_group = flag_set.flag_group.add()
+ flag_group.expand_if_all_available[:] = ["is_using_fission"]
+ flag_group.flag[:] = ["-gsplit-dwarf"]
+
+ if toolchain.objcopy_embed_flag and not _get_feature(
+ toolchain, "objcopy_embed_flags"):
+ feature = toolchain.feature.add()
+ feature.name = "objcopy_embed_flags"
+ feature.enabled = True
+ flag_set = feature.flag_set.add()
+ flag_set.action[:] = ["objcopy_embed_data"]
+ flag_group = flag_set.flag_group.add()
+ flag_group.flag[:] = toolchain.objcopy_embed_flag
+
+ action_config = toolchain.action_config.add()
+ action_config.action_name = "objcopy_embed_data"
+ action_config.config_name = "objcopy_embed_data"
+ action_config.enabled = True
+ tool = action_config.tool.add()
+ tool.tool_path = _find_tool_path(toolchain, "objcopy")
+
+ if toolchain.ld_embed_flag and not _get_feature(
+ toolchain, "ld_embed_flags"):
+ feature = toolchain.feature.add()
+ feature.name = "ld_embed_flags"
+ feature.enabled = True
+ flag_set = feature.flag_set.add()
+ flag_set.action[:] = ["ld_embed_data"]
+ flag_group = flag_set.flag_group.add()
+ flag_group.flag[:] = toolchain.ld_embed_flag
+
+ action_config = toolchain.action_config.add()
+ action_config.action_name = "ld_embed_data"
+ action_config.config_name = "ld_embed_data"
+ action_config.enabled = True
+ tool = action_config.tool.add()
+ tool.tool_path = _find_tool_path(toolchain, "ld")
+
+
+ # Create default_link_flags feature for linker_flag
+ flag_sets = _extract_legacy_link_flag_sets_for(toolchain)
+ if flag_sets:
+ if _get_feature(toolchain, "default_link_flags"):
+ continue
+ if _get_feature(toolchain, "legacy_link_flags"):
+ for f in toolchain.feature:
+ if f.name == "legacy_link_flags":
+ f.ClearField("flag_set")
+ feature = f
+ _rename_feature_in_toolchain(toolchain, "legacy_link_flags",
+ "default_link_flags")
+ break
+ else:
+ feature = _prepend_feature(toolchain)
+ feature.name = "default_link_flags"
+ feature.enabled = True
+ _add_flag_sets(feature, flag_sets)
+
+ # Create default_compile_flags feature for compiler_flag, cxx_flag
+ flag_sets = _extract_legacy_compile_flag_sets_for(toolchain)
+ if flag_sets and not _get_feature(toolchain, "default_compile_flags"):
+ if _get_feature(toolchain, "legacy_compile_flags"):
+ for f in toolchain.feature:
+ if f.name == "legacy_compile_flags":
+ f.ClearField("flag_set")
+ feature = f
+ _rename_feature_in_toolchain(toolchain, "legacy_compile_flags",
+ "default_compile_flags")
+ break
+ else:
+ feature = _prepend_feature(toolchain)
+ feature.enabled = True
+ feature.name = "default_compile_flags"
+ _add_flag_sets(feature, flag_sets)
+
+ # Unfiltered cxx flags have to have their own special feature.
+ # "unfiltered_compile_flags" is a well-known (by Bazel) feature name that is
+ # excluded from nocopts filtering.
+ if toolchain.unfiltered_cxx_flag:
+ # If there already is a feature named unfiltered_compile_flags, the
+ # crosstool is already migrated for unfiltered_compile_flags
+ if _get_feature(toolchain, "unfiltered_compile_flags"):
+ for f in toolchain.feature:
+ if f.name == "unfiltered_compile_flags":
+ for flag_set in f.flag_set:
+ for flag_group in flag_set.flag_group:
+ if flag_group.iterate_over == "unfiltered_compile_flags":
+ flag_group.ClearField("iterate_over")
+ flag_group.ClearField("expand_if_all_available")
+ flag_group.ClearField("flag")
+ flag_group.flag[:] = toolchain.unfiltered_cxx_flag
+ else:
+ if not _get_feature(toolchain, "user_compile_flags"):
+ feature = toolchain.feature.add()
+ feature.name = "user_compile_flags"
+ feature.enabled = True
+ flag_set = feature.flag_set.add()
+ flag_set.action[:] = compile_actions(toolchain)
+ flag_group = flag_set.flag_group.add()
+ flag_group.expand_if_all_available[:] = ["user_compile_flags"]
+ flag_group.iterate_over = "user_compile_flags"
+ flag_group.flag[:] = ["%{user_compile_flags}"]
+
+ if not _get_feature(toolchain, "sysroot"):
+ sysroot_actions = compile_actions(toolchain) + link_actions(toolchain)
+ sysroot_actions.remove("assemble")
+ feature = toolchain.feature.add()
+ feature.name = "sysroot"
+ feature.enabled = True
+ flag_set = feature.flag_set.add()
+ flag_set.action[:] = sysroot_actions
+ flag_group = flag_set.flag_group.add()
+ flag_group.expand_if_all_available[:] = ["sysroot"]
+ flag_group.flag[:] = ["--sysroot=%{sysroot}"]
+
+ feature = toolchain.feature.add()
+ feature.name = "unfiltered_compile_flags"
+ feature.enabled = True
+ flag_set = feature.flag_set.add()
+ flag_set.action[:] = compile_actions(toolchain)
+ flag_group = flag_set.flag_group.add()
+ flag_group.flag[:] = toolchain.unfiltered_cxx_flag
+
+ # clear fields
+ toolchain.ClearField("debian_extra_requires")
+ toolchain.ClearField("gcc_plugin_compiler_flag")
+ toolchain.ClearField("ar_flag")
+ toolchain.ClearField("ar_thin_archives_flag")
+ toolchain.ClearField("gcc_plugin_header_directory")
+ toolchain.ClearField("mao_plugin_header_directory")
+ toolchain.ClearField("supports_normalizing_ar")
+ toolchain.ClearField("supports_thin_archives")
+ toolchain.ClearField("supports_incremental_linker")
+ toolchain.ClearField("supports_dsym")
+ toolchain.ClearField("supports_gold_linker")
+ toolchain.ClearField("default_python_top")
+ toolchain.ClearField("default_python_version")
+ toolchain.ClearField("python_preload_swigdeps")
+ toolchain.ClearField("needsPic")
+ toolchain.ClearField("compilation_mode_flags")
+ toolchain.ClearField("linking_mode_flags")
+ toolchain.ClearField("unfiltered_cxx_flag")
+ toolchain.ClearField("ld_embed_flag")
+ toolchain.ClearField("objcopy_embed_flag")
+ toolchain.ClearField("supports_start_end_lib")
+ toolchain.ClearField("supports_interface_shared_objects")
+ toolchain.ClearField("supports_fission")
+ toolchain.ClearField("supports_embedded_runtimes")
+ toolchain.ClearField("compiler_flag")
+ toolchain.ClearField("cxx_flag")
+ toolchain.ClearField("linker_flag")
+ toolchain.ClearField("dynamic_library_linker_flag")
+ toolchain.ClearField("static_runtimes_filegroup")
+ toolchain.ClearField("dynamic_runtimes_filegroup")
+
+ # Enable features that were previously enabled by Bazel
+ default_features = [
+ "dependency_file", "random_seed", "module_maps", "module_map_home_cwd",
+ "header_module_compile", "include_paths", "pic", "preprocessor_define"
+ ]
+ for feature_name in default_features:
+ feature = _get_feature(toolchain, feature_name)
+ if feature:
+ feature.enabled = True
+
+
+def _find_tool_path(toolchain, tool_name):
+ """Returns the tool path of the tool with the given name."""
+ for tool in toolchain.tool_path:
+ if tool.name == tool_name:
+ return tool.path
+ return None
+
+
+def _add_flag_sets(feature, flag_sets):
+ """Add flag sets into a feature."""
+ for flag_set in flag_sets:
+ with_feature = flag_set[0]
+ actions = flag_set[1]
+ flags = flag_set[2]
+ expand_if_all_available = flag_set[3]
+ not_feature = None
+ if len(flag_set) >= 5:
+ not_feature = flag_set[4]
+ flag_set = feature.flag_set.add()
+ if with_feature is not None:
+ flag_set.with_feature.add().feature[:] = [with_feature]
+ if not_feature is not None:
+ flag_set.with_feature.add().not_feature[:] = [not_feature]
+ flag_set.action[:] = actions
+ flag_group = flag_set.flag_group.add()
+ flag_group.expand_if_all_available[:] = expand_if_all_available
+ flag_group.flag[:] = flags
+ return feature
+
+
+def _extract_legacy_compile_flag_sets_for(toolchain):
+ """Get flag sets for default_compile_flags feature."""
+ result = []
+ if toolchain.compiler_flag:
+ result.append(
+ [None, compile_actions(toolchain), toolchain.compiler_flag, []])
+
+ # Migrate compiler_flag from compilation_mode_flags
+ for cmf in toolchain.compilation_mode_flags:
+ mode = crosstool_config_pb2.CompilationMode.Name(cmf.mode).lower()
+ # coverage mode has been a noop since a while
+ if mode == "coverage":
+ continue
+
+ if (cmf.compiler_flag or
+ cmf.cxx_flag) and not _get_feature(toolchain, mode):
+ feature = toolchain.feature.add()
+ feature.name = mode
+
+ if cmf.compiler_flag:
+ result.append([mode, compile_actions(toolchain), cmf.compiler_flag, []])
+
+ if toolchain.cxx_flag:
+ result.append([None, ALL_CXX_COMPILE_ACTIONS, toolchain.cxx_flag, []])
+
+ # Migrate compiler_flag/cxx_flag from compilation_mode_flags
+ for cmf in toolchain.compilation_mode_flags:
+ mode = crosstool_config_pb2.CompilationMode.Name(cmf.mode).lower()
+ # coverage mode has been a noop since a while
+ if mode == "coverage":
+ continue
+
+ if cmf.cxx_flag:
+ result.append([mode, ALL_CXX_COMPILE_ACTIONS, cmf.cxx_flag, []])
+
+ return result
+
+
+def _extract_legacy_link_flag_sets_for(toolchain):
+ """Get flag sets for default_link_flags feature."""
+ result = []
+
+ # Migrate linker_flag
+ if toolchain.linker_flag:
+ result.append([None, link_actions(toolchain), toolchain.linker_flag, []])
+
+ # Migrate linker_flags from compilation_mode_flags
+ for cmf in toolchain.compilation_mode_flags:
+ mode = crosstool_config_pb2.CompilationMode.Name(cmf.mode).lower()
+ # coverage mode has beed a noop since a while
+ if mode == "coverage":
+ continue
+
+ if cmf.linker_flag and not _get_feature(toolchain, mode):
+ feature = toolchain.feature.add()
+ feature.name = mode
+
+ if cmf.linker_flag:
+ result.append([mode, link_actions(toolchain), cmf.linker_flag, []])
+
+ # Migrate linker_flags from linking_mode_flags
+ for lmf in toolchain.linking_mode_flags:
+ mode = crosstool_config_pb2.LinkingMode.Name(lmf.mode)
+ feature_name = LINKING_MODE_TO_FEATURE_NAME.get(mode)
+ # if the feature is already there, we don't migrate, lmf is not used
+ if _get_feature(toolchain, feature_name):
+ continue
+
+ if lmf.linker_flag:
+ feature = toolchain.feature.add()
+ feature.name = feature_name
+ if mode == "DYNAMIC":
+ result.append(
+ [None, NODEPS_DYNAMIC_LIBRARY_LINK_ACTIONS, lmf.linker_flag, []])
+ result.append([
+ None,
+ TRANSITIVE_DYNAMIC_LIBRARY_LINK_ACTIONS,
+ lmf.linker_flag,
+ [],
+ "static_link_cpp_runtimes",
+ ])
+ result.append([
+ feature_name,
+ executable_link_actions(toolchain), lmf.linker_flag, []
+ ])
+ elif mode == "MOSTLY_STATIC":
+ result.append(
+ [feature_name,
+ CC_LINK_EXECUTABLE, lmf.linker_flag, []])
+ else:
+ result.append(
+ [feature_name,
+ link_actions(toolchain), lmf.linker_flag, []])
+
+ if toolchain.dynamic_library_linker_flag:
+ result.append([
+ None, DYNAMIC_LIBRARY_LINK_ACTIONS,
+ toolchain.dynamic_library_linker_flag, []
+ ])
+
+ if toolchain.test_only_linker_flag:
+ result.append([
+ None,
+ link_actions(toolchain), toolchain.test_only_linker_flag,
+ ["is_cc_test"]
+ ])
+
+ return result
+
+
+def _prepend_feature(toolchain):
+ """Create a new feature and make it be the first in the toolchain."""
+ features = toolchain.feature
+ toolchain.ClearField("feature")
+ new_feature = toolchain.feature.add()
+ toolchain.feature.extend(features)
+ return new_feature
+
+
+def _get_feature(toolchain, name):
+ """Returns feature with a given name or None."""
+ for feature in toolchain.feature:
+ if feature.name == name:
+ return feature
+ return None
+
+
+def _migrate_expand_if_all_available(message):
+ """Move expand_if_all_available field to flag_groups."""
+ for flag_set in message.flag_set:
+ if flag_set.expand_if_all_available:
+ for flag_group in flag_set.flag_group:
+ new_vars = (
+ flag_group.expand_if_all_available[:] +
+ flag_set.expand_if_all_available[:])
+ flag_group.expand_if_all_available[:] = new_vars
+ flag_set.ClearField("expand_if_all_available")
+
+
+def _migrate_repeated_expands(message):
+ """Replace repeated legacy fields with nesting."""
+ todo_queue = []
+ for flag_set in message.flag_set:
+ todo_queue.extend(flag_set.flag_group)
+ while todo_queue:
+ flag_group = todo_queue.pop()
+ todo_queue.extend(flag_group.flag_group)
+ if len(flag_group.expand_if_all_available) <= 1 and len(
+ flag_group.expand_if_none_available) <= 1:
+ continue
+
+ current_children = flag_group.flag_group
+ current_flags = flag_group.flag
+ flag_group.ClearField("flag_group")
+ flag_group.ClearField("flag")
+
+ new_flag_group = flag_group.flag_group.add()
+ new_flag_group.flag_group.extend(current_children)
+ new_flag_group.flag.extend(current_flags)
+
+ if len(flag_group.expand_if_all_available) > 1:
+ expands_to_move = flag_group.expand_if_all_available[1:]
+ flag_group.expand_if_all_available[:] = [
+ flag_group.expand_if_all_available[0]
+ ]
+ new_flag_group.expand_if_all_available.extend(expands_to_move)
+
+ if len(flag_group.expand_if_none_available) > 1:
+ expands_to_move = flag_group.expand_if_none_available[1:]
+ flag_group.expand_if_none_available[:] = [
+ flag_group.expand_if_none_available[0]
+ ]
+ new_flag_group.expand_if_none_available.extend(expands_to_move)
+
+ todo_queue.append(new_flag_group)
+ todo_queue.append(flag_group)
+
+
+def _contains_dynamic_flags(toolchain):
+ for lmf in toolchain.linking_mode_flags:
+ mode = crosstool_config_pb2.LinkingMode.Name(lmf.mode)
+ if mode == "DYNAMIC":
+ return True
+ return False
+
+
+def _rename_feature_in_toolchain(toolchain, from_name, to_name):
+ for f in toolchain.feature:
+ _rename_feature_in(f, from_name, to_name)
+ for a in toolchain.action_config:
+ _rename_feature_in(a, from_name, to_name)
+
+
+def _rename_feature_in(msg, from_name, to_name):
+ if from_name in msg.implies:
+ msg.implies.remove(from_name)
+ for requires in msg.requires:
+ if from_name in requires.feature:
+ requires.feature.remove(from_name)
+ requires.feature.extend([to_name])
+ for flag_set in msg.flag_set:
+ for with_feature in flag_set.with_feature:
+ if from_name in with_feature.feature:
+ with_feature.feature.remove(from_name)
+ with_feature.feature.extend([to_name])
+ if from_name in with_feature.not_feature:
+ with_feature.not_feature.remove(from_name)
+ with_feature.not_feature.extend([to_name])
+ for env_set in msg.env_set:
+ for with_feature in env_set.with_feature:
+ if from_name in with_feature.feature:
+ with_feature.feature.remove(from_name)
+ with_feature.feature.extend([to_name])
+ if from_name in with_feature.not_feature:
+ with_feature.not_feature.remove(from_name)
+ with_feature.not_feature.extend([to_name])
diff --git a/tools/migration/legacy_fields_migration_lib_test.py b/tools/migration/legacy_fields_migration_lib_test.py
new file mode 100644
index 0000000..93972cc
--- /dev/null
+++ b/tools/migration/legacy_fields_migration_lib_test.py
@@ -0,0 +1,1240 @@
+import unittest
+from google.protobuf import text_format
+from third_party.com.github.bazelbuild.bazel.src.main.protobuf import crosstool_config_pb2
+from tools.migration.legacy_fields_migration_lib import ALL_CC_COMPILE_ACTIONS
+from tools.migration.legacy_fields_migration_lib import ALL_OBJC_COMPILE_ACTIONS
+from tools.migration.legacy_fields_migration_lib import ALL_CXX_COMPILE_ACTIONS
+from tools.migration.legacy_fields_migration_lib import ALL_CC_LINK_ACTIONS
+from tools.migration.legacy_fields_migration_lib import ALL_OBJC_LINK_ACTIONS
+from tools.migration.legacy_fields_migration_lib import DYNAMIC_LIBRARY_LINK_ACTIONS
+from tools.migration.legacy_fields_migration_lib import NODEPS_DYNAMIC_LIBRARY_LINK_ACTIONS
+from tools.migration.legacy_fields_migration_lib import TRANSITIVE_LINK_ACTIONS
+from tools.migration.legacy_fields_migration_lib import TRANSITIVE_DYNAMIC_LIBRARY_LINK_ACTIONS
+from tools.migration.legacy_fields_migration_lib import CC_LINK_EXECUTABLE
+from tools.migration.legacy_fields_migration_lib import migrate_legacy_fields
+
+
+def assert_has_feature(self, toolchain, name):
+ self.assertTrue(any(feature.name == name for feature in toolchain.feature))
+
+
+def make_crosstool(string):
+ crosstool = crosstool_config_pb2.CrosstoolRelease()
+ text_format.Merge("major_version: '123' minor_version: '456'", crosstool)
+ toolchain = crosstool.toolchain.add()
+ text_format.Merge(string, toolchain)
+ return crosstool
+
+
+def migrate_to_string(crosstool):
+ migrate_legacy_fields(crosstool)
+ return to_string(crosstool)
+
+
+def to_string(crosstool):
+ return text_format.MessageToString(crosstool)
+
+
+class LegacyFieldsMigrationLibTest(unittest.TestCase):
+
+ def test_deletes_fields(self):
+ crosstool = make_crosstool("""
+ debian_extra_requires: 'debian-1'
+ gcc_plugin_compiler_flag: 'gcc_plugin_compiler_flag-1'
+ ar_flag: 'ar_flag-1'
+ ar_thin_archives_flag: 'ar_thin_archives_flag-1'
+ gcc_plugin_header_directory: 'gcc_plugin_header_directory-1'
+ mao_plugin_header_directory: 'mao_plugin_header_directory-1'
+ default_python_top: 'default_python_top-1'
+ default_python_version: 'default_python_version-1'
+ python_preload_swigdeps: false
+ supports_normalizing_ar: false
+ supports_thin_archives: false
+ supports_incremental_linker: false
+ supports_dsym: false
+ supports_gold_linker: false
+ needsPic: false
+ supports_start_end_lib: false
+ supports_interface_shared_objects: false
+ supports_fission: false
+ supports_embedded_runtimes: false
+ static_runtimes_filegroup: 'yolo'
+ dynamic_runtimes_filegroup: 'yolo'
+ """)
+ output = migrate_to_string(crosstool)
+ self.assertNotIn("debian_extra_requires", output)
+ self.assertNotIn("gcc_plugin_compiler_flag", output)
+ self.assertNotIn("ar_flag", output)
+ self.assertNotIn("ar_thin_archives_flag", output)
+ self.assertNotIn("gcc_plugin_header_directory", output)
+ self.assertNotIn("mao_plugin_header_directory", output)
+ self.assertNotIn("supports_normalizing_ar", output)
+ self.assertNotIn("supports_thin_archives", output)
+ self.assertNotIn("supports_incremental_linker", output)
+ self.assertNotIn("supports_dsym", output)
+ self.assertNotIn("default_python_top", output)
+ self.assertNotIn("default_python_version", output)
+ self.assertNotIn("python_preload_swigdeps", output)
+ self.assertNotIn("supports_gold_linker", output)
+ self.assertNotIn("needsPic", output)
+ self.assertNotIn("supports_start_end_lib", output)
+ self.assertNotIn("supports_interface_shared_objects", output)
+ self.assertNotIn("supports_fission", output)
+ self.assertNotIn("supports_embedded_runtimes", output)
+ self.assertNotIn("static_runtimes_filegroup", output)
+ self.assertNotIn("dynamic_runtimes_filegroup", output)
+
+ def test_deletes_default_toolchains(self):
+ crosstool = make_crosstool("")
+ crosstool.default_toolchain.add()
+ self.assertEqual(len(crosstool.default_toolchain), 1)
+ migrate_legacy_fields(crosstool)
+ self.assertEqual(len(crosstool.default_toolchain), 0)
+
+ def test_replace_legacy_compile_flags(self):
+ crosstool = make_crosstool("""
+ feature { name: 'foo' }
+ feature { name: 'legacy_compile_flags' }
+ compiler_flag: 'clang-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.compiler_flag), 0)
+ self.assertEqual(output.feature[0].name, "foo")
+ self.assertEqual(output.feature[1].name, "default_compile_flags")
+ self.assertEqual(output.feature[1].flag_set[0].action,
+ ALL_CC_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[1].flag_set[0].flag_group[0].flag,
+ ["clang-flag-1"])
+
+ def test_replace_legacy_compile_flags_in_action_configs(self):
+ crosstool = make_crosstool("""
+ feature {
+ name: 'foo'
+ implies: 'legacy_compile_flags'
+ requires: { feature: 'legacy_compile_flags' }
+ flag_set {
+ with_feature { feature: 'legacy_compile_flags' }
+ with_feature { not_feature: 'legacy_compile_flags' }
+ }
+ env_set {
+ with_feature { feature: 'legacy_compile_flags' }
+ with_feature { not_feature: 'legacy_compile_flags' }
+ }
+ }
+ feature { name: 'legacy_compile_flags' }
+ action_config {
+ action_name: 'foo'
+ config_name: 'foo'
+ implies: 'legacy_compile_flags'
+ requires: { feature: 'legacy_compile_flags' }
+ flag_set {
+ with_feature { feature: 'legacy_compile_flags' }
+ with_feature { not_feature: 'legacy_compile_flags' }
+ }
+ env_set {
+ with_feature { feature: 'legacy_compile_flags' }
+ with_feature { not_feature: 'legacy_compile_flags' }
+ }
+ }
+ compiler_flag: 'clang-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.action_config[0].action_name, "foo")
+ self.assertEqual(output.action_config[0].implies, [])
+ self.assertEqual(output.action_config[0].requires[0].feature,
+ ["default_compile_flags"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].with_feature[0].feature,
+ ["default_compile_flags"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].with_feature[1].not_feature,
+ ["default_compile_flags"])
+ self.assertEqual(output.action_config[0].env_set[0].with_feature[0].feature,
+ ["default_compile_flags"])
+ self.assertEqual(
+ output.action_config[0].env_set[0].with_feature[1].not_feature,
+ ["default_compile_flags"])
+ self.assertEqual(output.feature[0].name, "foo")
+ self.assertEqual(output.feature[0].implies, [])
+ self.assertEqual(output.feature[0].requires[0].feature,
+ ["default_compile_flags"])
+ self.assertEqual(output.feature[0].flag_set[0].with_feature[0].feature,
+ ["default_compile_flags"])
+ self.assertEqual(output.feature[0].flag_set[0].with_feature[1].not_feature,
+ ["default_compile_flags"])
+ self.assertEqual(output.feature[0].env_set[0].with_feature[0].feature,
+ ["default_compile_flags"])
+ self.assertEqual(output.feature[0].env_set[0].with_feature[1].not_feature,
+ ["default_compile_flags"])
+
+ def test_replace_legacy_link_flags(self):
+ crosstool = make_crosstool("""
+ feature { name: 'foo' }
+ feature { name: 'legacy_link_flags' }
+ linker_flag: 'ld-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.compiler_flag), 0)
+ self.assertEqual(output.feature[0].name, "foo")
+ self.assertEqual(output.feature[1].name, "default_link_flags")
+ self.assertEqual(output.feature[1].flag_set[0].action, ALL_CC_LINK_ACTIONS)
+ self.assertEqual(output.feature[1].flag_set[0].flag_group[0].flag,
+ ["ld-flag-1"])
+
+ def test_replace_legacy_link_flags_in_action_configs(self):
+ crosstool = make_crosstool("""
+ feature {
+ name: 'foo'
+ implies: 'legacy_link_flags'
+ requires: { feature: 'legacy_link_flags' }
+ flag_set {
+ with_feature { feature: 'legacy_link_flags' }
+ with_feature { not_feature: 'legacy_link_flags' }
+ }
+ env_set {
+ with_feature { feature: 'legacy_link_flags' }
+ with_feature { not_feature: 'legacy_link_flags' }
+ }
+ }
+ feature { name: 'legacy_link_flags' }
+ action_config {
+ action_name: 'foo'
+ config_name: 'foo'
+ implies: 'legacy_link_flags'
+ requires: { feature: 'legacy_link_flags' }
+ flag_set {
+ with_feature { feature: 'legacy_link_flags' }
+ with_feature { not_feature: 'legacy_link_flags' }
+ }
+ env_set {
+ with_feature { feature: 'legacy_link_flags' }
+ with_feature { not_feature: 'legacy_link_flags' }
+ }
+ }
+ linker_flag: 'clang-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.action_config[0].action_name, "foo")
+ self.assertEqual(output.action_config[0].implies, [])
+ self.assertEqual(output.action_config[0].requires[0].feature,
+ ["default_link_flags"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].with_feature[0].feature,
+ ["default_link_flags"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].with_feature[1].not_feature,
+ ["default_link_flags"])
+ self.assertEqual(output.action_config[0].env_set[0].with_feature[0].feature,
+ ["default_link_flags"])
+ self.assertEqual(
+ output.action_config[0].env_set[0].with_feature[1].not_feature,
+ ["default_link_flags"])
+ self.assertEqual(output.feature[0].name, "foo")
+ self.assertEqual(output.feature[0].implies, [])
+ self.assertEqual(output.feature[0].requires[0].feature,
+ ["default_link_flags"])
+ self.assertEqual(output.feature[0].flag_set[0].with_feature[0].feature,
+ ["default_link_flags"])
+ self.assertEqual(output.feature[0].flag_set[0].with_feature[1].not_feature,
+ ["default_link_flags"])
+ self.assertEqual(output.feature[0].env_set[0].with_feature[0].feature,
+ ["default_link_flags"])
+ self.assertEqual(output.feature[0].env_set[0].with_feature[1].not_feature,
+ ["default_link_flags"])
+
+
+ def test_migrate_compiler_flags(self):
+ crosstool = make_crosstool("""
+ compiler_flag: 'clang-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.compiler_flag), 0)
+ self.assertEqual(output.feature[0].name, "default_compile_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action, ALL_CC_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["clang-flag-1"])
+
+ def test_migrate_compiler_flags_for_objc(self):
+ crosstool = make_crosstool("""
+ action_config { action_name: "objc-compile" }
+ compiler_flag: 'clang-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.compiler_flag), 0)
+ self.assertEqual(output.feature[0].name, "default_compile_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action, ALL_CC_COMPILE_ACTIONS + ALL_OBJC_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["clang-flag-1"])
+
+ def test_migrate_cxx_flags(self):
+ crosstool = make_crosstool("""
+ cxx_flag: 'clang-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.cxx_flag), 0)
+ self.assertEqual(output.feature[0].name, "default_compile_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action,
+ ALL_CXX_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["clang-flag-1"])
+
+ def test_compiler_flag_come_before_cxx_flags(self):
+ crosstool = make_crosstool("""
+ compiler_flag: 'clang-flag-1'
+ cxx_flag: 'clang-flag-2'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "default_compile_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action, ALL_CC_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[1].action,
+ ALL_CXX_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["clang-flag-1"])
+ self.assertEqual(output.feature[0].flag_set[1].flag_group[0].flag,
+ ["clang-flag-2"])
+
+ def test_migrate_linker_flags(self):
+ crosstool = make_crosstool("""
+ linker_flag: 'linker-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.linker_flag), 0)
+ self.assertEqual(output.feature[0].name, "default_link_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action, ALL_CC_LINK_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["linker-flag-1"])
+
+ def test_migrate_dynamic_library_linker_flags(self):
+ crosstool = make_crosstool("""
+ dynamic_library_linker_flag: 'linker-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.dynamic_library_linker_flag), 0)
+ self.assertEqual(output.feature[0].name, "default_link_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action,
+ DYNAMIC_LIBRARY_LINK_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["linker-flag-1"])
+
+ def test_compilation_mode_flags(self):
+ crosstool = make_crosstool("""
+ compiler_flag: "compile-flag-1"
+ cxx_flag: "cxx-flag-1"
+ linker_flag: "linker-flag-1"
+ compilation_mode_flags {
+ mode: OPT
+ compiler_flag: "opt-flag-1"
+ cxx_flag: "opt-flag-2"
+ linker_flag: "opt-flag-3"
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.compilation_mode_flags), 0)
+ assert_has_feature(self, output, "opt")
+
+ self.assertEqual(output.feature[0].name, "default_compile_flags")
+ self.assertEqual(output.feature[1].name, "default_link_flags")
+
+ # flag set for compiler_flag fields
+ self.assertEqual(len(output.feature[0].flag_set[0].with_feature), 0)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["compile-flag-1"])
+
+ # flag set for compiler_flag from compilation_mode_flags
+ self.assertEqual(len(output.feature[0].flag_set[1].with_feature), 1)
+ self.assertEqual(output.feature[0].flag_set[1].with_feature[0].feature[0],
+ "opt")
+ self.assertEqual(output.feature[0].flag_set[1].flag_group[0].flag,
+ ["opt-flag-1"])
+
+ # flag set for cxx_flag fields
+ self.assertEqual(len(output.feature[0].flag_set[2].with_feature), 0)
+ self.assertEqual(output.feature[0].flag_set[2].flag_group[0].flag,
+ ["cxx-flag-1"])
+
+ # flag set for cxx_flag from compilation_mode_flags
+ self.assertEqual(len(output.feature[0].flag_set[3].with_feature), 1)
+ self.assertEqual(output.feature[0].flag_set[3].with_feature[0].feature[0],
+ "opt")
+ self.assertEqual(output.feature[0].flag_set[3].flag_group[0].flag,
+ ["opt-flag-2"])
+
+ # default_link_flags, flag set for linker_flag
+ self.assertEqual(len(output.feature[1].flag_set[0].with_feature), 0)
+ self.assertEqual(output.feature[1].flag_set[0].flag_group[0].flag,
+ ["linker-flag-1"])
+
+ # default_link_flags, flag set for linker_flag from
+ # compilation_mode_flags
+ self.assertEqual(len(output.feature[1].flag_set[1].with_feature), 1)
+ self.assertEqual(output.feature[1].flag_set[1].with_feature[0].feature[0],
+ "opt")
+ self.assertEqual(output.feature[1].flag_set[1].flag_group[0].flag,
+ ["opt-flag-3"])
+
+ def test_linking_mode_flags(self):
+ crosstool = make_crosstool("""
+ linker_flag: "linker-flag-1"
+ compilation_mode_flags {
+ mode: DBG
+ linker_flag: "dbg-flag-1"
+ }
+ linking_mode_flags {
+ mode: MOSTLY_STATIC
+ linker_flag: "mostly-static-flag-1"
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.compilation_mode_flags), 0)
+ self.assertEqual(len(output.linking_mode_flags), 0)
+
+ # flag set for linker_flag
+ self.assertEqual(len(output.feature[0].flag_set[0].with_feature), 0)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["linker-flag-1"])
+
+ # flag set for compilation_mode_flags
+ self.assertEqual(len(output.feature[0].flag_set[1].with_feature), 1)
+ self.assertEqual(output.feature[0].flag_set[1].with_feature[0].feature[0],
+ "dbg")
+ self.assertEqual(output.feature[0].flag_set[1].flag_group[0].flag,
+ ["dbg-flag-1"])
+
+ # flag set for linking_mode_flags
+ self.assertEqual(len(output.feature[0].flag_set[2].with_feature), 1)
+ self.assertEqual(output.feature[0].flag_set[2].action, CC_LINK_EXECUTABLE)
+ self.assertEqual(output.feature[0].flag_set[2].with_feature[0].feature[0],
+ "static_linking_mode")
+ self.assertEqual(output.feature[0].flag_set[2].flag_group[0].flag,
+ ["mostly-static-flag-1"])
+
+ def test_coverage_compilation_mode_ignored(self):
+ crosstool = make_crosstool("""
+ compilation_mode_flags {
+ mode: COVERAGE
+ compiler_flag: "coverage-flag-1"
+ cxx_flag: "coverage-flag-2"
+ linker_flag: "coverage-flag-3"
+ }
+ """)
+ output = migrate_to_string(crosstool)
+ self.assertNotIn("compilation_mode_flags", output)
+ self.assertNotIn("coverage-flag-1", output)
+ self.assertNotIn("coverage-flag-2", output)
+ self.assertNotIn("coverage-flag-3", output)
+ self.assertNotIn("COVERAGE", output)
+
+ def test_supports_dynamic_linker_when_dynamic_library_linker_flag_is_used(
+ self):
+ crosstool = make_crosstool("""
+ dynamic_library_linker_flag: "foo"
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "default_link_flags")
+ self.assertEqual(output.feature[1].name, "supports_dynamic_linker")
+ self.assertEqual(output.feature[1].enabled, True)
+
+ def test_supports_dynamic_linker_is_added_when_DYNAMIC_present(self):
+ crosstool = make_crosstool("""
+ linking_mode_flags {
+ mode: DYNAMIC
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "supports_dynamic_linker")
+ self.assertEqual(output.feature[0].enabled, True)
+
+ def test_supports_dynamic_linker_is_not_added_when_present(self):
+ crosstool = make_crosstool("""
+ feature { name: "supports_dynamic_linker" enabled: false }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "supports_dynamic_linker")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_all_linker_flag_ordering(self):
+ crosstool = make_crosstool("""
+ linker_flag: 'linker-flag-1'
+ compilation_mode_flags {
+ mode: OPT
+ linker_flag: 'cmf-flag-2'
+ }
+ linking_mode_flags {
+ mode: MOSTLY_STATIC
+ linker_flag: 'lmf-flag-3'
+ }
+ linking_mode_flags {
+ mode: DYNAMIC
+ linker_flag: 'lmf-dynamic-flag-4'
+ }
+ dynamic_library_linker_flag: 'dl-flag-5'
+ test_only_linker_flag: 'to-flag-6'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "default_link_flags")
+ self.assertEqual(output.feature[0].enabled, True)
+ self.assertEqual(output.feature[0].flag_set[0].action[:], ALL_CC_LINK_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag[:],
+ ["linker-flag-1"])
+
+ self.assertEqual(output.feature[0].flag_set[1].action[:], ALL_CC_LINK_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[1].with_feature[0].feature[0],
+ "opt")
+ self.assertEqual(output.feature[0].flag_set[1].flag_group[0].flag,
+ ["cmf-flag-2"])
+
+ self.assertEqual(output.feature[0].flag_set[2].action, CC_LINK_EXECUTABLE)
+ self.assertEqual(output.feature[0].flag_set[2].with_feature[0].feature[0],
+ "static_linking_mode")
+ self.assertEqual(output.feature[0].flag_set[2].flag_group[0].flag,
+ ["lmf-flag-3"])
+
+ self.assertEqual(len(output.feature[0].flag_set[3].with_feature), 0)
+ self.assertEqual(output.feature[0].flag_set[3].flag_group[0].flag,
+ ["lmf-dynamic-flag-4"])
+ self.assertEqual(output.feature[0].flag_set[3].action,
+ NODEPS_DYNAMIC_LIBRARY_LINK_ACTIONS)
+
+ self.assertEqual(
+ output.feature[0].flag_set[4].with_feature[0].not_feature[0],
+ "static_link_cpp_runtimes")
+ self.assertEqual(output.feature[0].flag_set[4].flag_group[0].flag,
+ ["lmf-dynamic-flag-4"])
+ self.assertEqual(output.feature[0].flag_set[4].action,
+ TRANSITIVE_DYNAMIC_LIBRARY_LINK_ACTIONS)
+
+ self.assertEqual(output.feature[0].flag_set[5].with_feature[0].feature[0],
+ "dynamic_linking_mode")
+ self.assertEqual(output.feature[0].flag_set[5].flag_group[0].flag,
+ ["lmf-dynamic-flag-4"])
+ self.assertEqual(output.feature[0].flag_set[5].action,
+ CC_LINK_EXECUTABLE)
+
+ self.assertEqual(output.feature[0].flag_set[6].flag_group[0].flag,
+ ["dl-flag-5"])
+ self.assertEqual(output.feature[0].flag_set[6].action,
+ DYNAMIC_LIBRARY_LINK_ACTIONS)
+
+ self.assertEqual(output.feature[0].flag_set[7].flag_group[0].flag,
+ ["to-flag-6"])
+ self.assertEqual(output.feature[0].flag_set[7].action, ALL_CC_LINK_ACTIONS)
+ self.assertEqual(
+ output.feature[0].flag_set[7].flag_group[0].expand_if_all_available,
+ ["is_cc_test"])
+
+ def test_all_linker_flag_objc_actions(self):
+ crosstool = make_crosstool("""
+ action_config { action_name: "objc-compile" }
+ linker_flag: 'linker-flag-1'
+ compilation_mode_flags {
+ mode: OPT
+ linker_flag: 'cmf-flag-2'
+ }
+ linking_mode_flags {
+ mode: MOSTLY_STATIC
+ linker_flag: 'lmf-flag-3'
+ }
+ dynamic_library_linker_flag: 'dl-flag-5'
+ test_only_linker_flag: 'to-flag-6'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "default_link_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action[:],
+ ALL_CC_LINK_ACTIONS + ALL_OBJC_LINK_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[1].action[:],
+ ALL_CC_LINK_ACTIONS + ALL_OBJC_LINK_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[2].action[:],
+ CC_LINK_EXECUTABLE)
+ self.assertEqual(output.feature[0].flag_set[3].action[:],
+ DYNAMIC_LIBRARY_LINK_ACTIONS)
+ self.assertEqual(output.feature[0].flag_set[4].action[:],
+ ALL_CC_LINK_ACTIONS + ALL_OBJC_LINK_ACTIONS)
+
+ def test_linking_mode_features_are_not_added_when_present(self):
+ crosstool = make_crosstool("""
+ linking_mode_flags {
+ mode: DYNAMIC
+ linker_flag: 'dynamic-flag'
+ }
+ linking_mode_flags {
+ mode: FULLY_STATIC
+ linker_flag: 'fully-static-flag'
+ }
+ linking_mode_flags {
+ mode: MOSTLY_STATIC
+ linker_flag: 'mostly-static-flag'
+ }
+ linking_mode_flags {
+ mode: MOSTLY_STATIC_LIBRARIES
+ linker_flag: 'mostly-static-libraries-flag'
+ }
+ feature { name: "static_linking_mode" }
+ feature { name: "dynamic_linking_mode" }
+ feature { name: "static_linking_mode_nodeps_library" }
+ feature { name: "fully_static_link" }
+ """)
+ output = migrate_to_string(crosstool)
+ self.assertNotIn("linking_mode_flags", output)
+ self.assertNotIn("DYNAMIC", output)
+ self.assertNotIn("MOSTLY_STATIC", output)
+ self.assertNotIn("MOSTLY_STATIC_LIBRARIES", output)
+ self.assertNotIn("MOSTLY_STATIC_LIBRARIES", output)
+ self.assertNotIn("dynamic-flag", output)
+ self.assertNotIn("fully-static-flag", output)
+ self.assertNotIn("mostly-static-flag", output)
+ self.assertNotIn("mostly-static-libraries-flag", output)
+
+ def test_unfiltered_require_user_compile_flags_and_sysroot(self):
+ crosstool = make_crosstool("""
+ feature { name: 'preexisting_feature' }
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ # all these features are added after features that are already present in
+ # the crosstool
+ self.assertEqual(output.feature[0].name, "preexisting_feature")
+ self.assertEqual(output.feature[1].name, "user_compile_flags")
+ self.assertEqual(output.feature[2].name, "sysroot")
+ self.assertEqual(output.feature[3].name, "unfiltered_compile_flags")
+
+ def test_user_compile_flags_not_migrated_when_present(self):
+ crosstool = make_crosstool("""
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ feature { name: 'user_compile_flags' }
+ feature { name: 'preexisting_feature' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "user_compile_flags")
+ self.assertEqual(output.feature[1].name, "preexisting_feature")
+ self.assertEqual(output.feature[2].name, "sysroot")
+ self.assertEqual(output.feature[3].name, "unfiltered_compile_flags")
+
+ def test_sysroot_not_migrated_when_present(self):
+ crosstool = make_crosstool("""
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ feature { name: 'sysroot' }
+ feature { name: 'preexisting_feature' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "sysroot")
+ self.assertEqual(output.feature[1].name, "preexisting_feature")
+ self.assertEqual(output.feature[2].name, "user_compile_flags")
+ self.assertEqual(output.feature[3].name, "unfiltered_compile_flags")
+
+ def test_user_compile_flags(self):
+ crosstool = make_crosstool("""
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "user_compile_flags")
+ self.assertEqual(output.feature[0].enabled, True)
+ self.assertEqual(output.feature[0].flag_set[0].action,
+ ALL_CC_COMPILE_ACTIONS)
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].expand_if_all_available,
+ ["user_compile_flags"])
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].iterate_over,
+ "user_compile_flags")
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["%{user_compile_flags}"])
+
+ def test_sysroot(self):
+ sysroot_actions = ALL_CC_COMPILE_ACTIONS + ALL_CC_LINK_ACTIONS
+ sysroot_actions.remove("assemble")
+ self.assertTrue("assemble" not in sysroot_actions)
+ crosstool = make_crosstool("""
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[1].name, "sysroot")
+ self.assertEqual(output.feature[1].enabled, True)
+ self.assertEqual(output.feature[1].flag_set[0].action, sysroot_actions)
+ self.assertEqual(
+ output.feature[1].flag_set[0].flag_group[0].expand_if_all_available,
+ ["sysroot"])
+ self.assertEqual(output.feature[1].flag_set[0].flag_group[0].flag,
+ ["--sysroot=%{sysroot}"])
+
+ def test_unfiltered_compile_flags_is_not_added_when_already_present(self):
+ crosstool = make_crosstool("""
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ feature { name: 'something_else' }
+ feature { name: 'unfiltered_compile_flags' }
+ feature { name: 'something_else_2' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "something_else")
+ self.assertEqual(output.feature[1].name, "unfiltered_compile_flags")
+ self.assertEqual(len(output.feature[1].flag_set), 0)
+ self.assertEqual(output.feature[2].name, "something_else_2")
+
+ def test_unfiltered_compile_flags_is_not_edited_if_old_variant_present(self):
+ crosstool = make_crosstool("""
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ feature {
+ name: 'unfiltered_compile_flags'
+ flag_set {
+ action: 'c-compile'
+ flag_group {
+ flag: 'foo-flag-1'
+ }
+ }
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "unfiltered_compile_flags")
+ self.assertEqual(len(output.feature[0].flag_set), 1)
+ self.assertEqual(output.feature[0].flag_set[0].action, ["c-compile"])
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["foo-flag-1"])
+
+ def test_use_of_unfiltered_compile_flags_var_is_removed_and_replaced(self):
+ crosstool = make_crosstool("""
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ feature {
+ name: 'unfiltered_compile_flags'
+ flag_set {
+ action: 'c-compile'
+ flag_group {
+ flag: 'foo-flag-1'
+ }
+ }
+ flag_set {
+ action: 'c++-compile'
+ flag_group {
+ flag: 'bar-flag-1'
+ }
+ flag_group {
+ expand_if_all_available: 'unfiltered_compile_flags'
+ iterate_over: 'unfiltered_compile_flags'
+ flag: '%{unfiltered_compile_flags}'
+ }
+ flag_group {
+ flag: 'bar-flag-2'
+ }
+ }
+ flag_set {
+ action: 'c-compile'
+ flag_group {
+ flag: 'foo-flag-2'
+ }
+ }
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "unfiltered_compile_flags")
+ self.assertEqual(output.feature[0].flag_set[0].action, ["c-compile"])
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["foo-flag-1"])
+ self.assertEqual(output.feature[0].flag_set[1].action, ["c++-compile"])
+ self.assertEqual(output.feature[0].flag_set[1].flag_group[0].flag,
+ ["bar-flag-1"])
+ self.assertEqual(output.feature[0].flag_set[1].flag_group[1].flag,
+ ["unfiltered-flag-1"])
+ self.assertEqual(output.feature[0].flag_set[1].flag_group[2].flag,
+ ["bar-flag-2"])
+ self.assertEqual(output.feature[0].flag_set[2].action, ["c-compile"])
+ self.assertEqual(output.feature[0].flag_set[2].flag_group[0].flag,
+ ["foo-flag-2"])
+
+ def test_unfiltered_compile_flags_is_added_at_the_end(self):
+ crosstool = make_crosstool("""
+ feature { name: 'something_else' }
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "something_else")
+ self.assertEqual(output.feature[1].name, "user_compile_flags")
+ self.assertEqual(output.feature[2].name, "sysroot")
+ self.assertEqual(output.feature[3].name, "unfiltered_compile_flags")
+ self.assertEqual(output.feature[3].flag_set[0].action,
+ ALL_CC_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[3].flag_set[0].flag_group[0].flag,
+ ["unfiltered-flag-1"])
+
+ def test_unfiltered_compile_flags_are_not_added_for_objc(self):
+ crosstool = make_crosstool("""
+ action_config { action_name: "obc-compile" }
+ feature { name: 'something_else' }
+ unfiltered_cxx_flag: 'unfiltered-flag-1'
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[3].name, "unfiltered_compile_flags")
+ self.assertEqual(output.feature[3].flag_set[0].action,
+ ALL_CC_COMPILE_ACTIONS)
+ self.assertEqual(output.feature[3].flag_set[0].flag_group[0].flag,
+ ["unfiltered-flag-1"])
+
+ def test_default_link_flags_is_added_first(self):
+ crosstool = make_crosstool("""
+ linker_flag: 'linker-flag-1'
+ feature { name: 'something_else' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "default_link_flags")
+ self.assertEqual(output.feature[0].enabled, True)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["linker-flag-1"])
+
+ def test_default_link_flags_is_not_added_when_already_present(self):
+ crosstool = make_crosstool("""
+ linker_flag: 'linker-flag-1'
+ feature { name: 'something_else' }
+ feature { name: 'default_link_flags' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "something_else")
+ self.assertEqual(output.feature[1].name, "default_link_flags")
+
+ def test_default_compile_flags_is_not_added_when_no_reason_to(self):
+ crosstool = make_crosstool("""
+ feature { name: 'something_else' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "something_else")
+ self.assertEqual(len(output.feature), 1)
+
+ def test_default_compile_flags_is_first(self):
+ crosstool = make_crosstool("""
+ compiler_flag: 'compiler-flag-1'
+ feature { name: 'something_else' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "default_compile_flags")
+ self.assertEqual(output.feature[0].enabled, True)
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag,
+ ["compiler-flag-1"])
+
+ def test_default_compile_flags_not_added_when_present(self):
+ crosstool = make_crosstool("""
+ compiler_flag: 'compiler-flag-1'
+ feature { name: 'something_else' }
+ feature { name: 'default_compile_flags' }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "something_else")
+ self.assertEqual(output.feature[1].name, "default_compile_flags")
+ self.assertEqual(len(output.feature[1].flag_set), 0)
+
+ def test_supports_start_end_lib_migrated(self):
+ crosstool = make_crosstool("supports_start_end_lib: true")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "supports_start_end_lib")
+ self.assertEqual(output.feature[0].enabled, True)
+
+ def test_supports_start_end_lib_not_migrated_on_false(self):
+ crosstool = make_crosstool("supports_start_end_lib: false")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.feature), 0)
+
+ def test_supports_start_end_lib_not_migrated_when_already_present(self):
+ crosstool = make_crosstool("""
+ supports_start_end_lib: true
+ feature { name: "supports_start_end_lib" enabled: false }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "supports_start_end_lib")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_supports_interface_shared_libraries_migrated(self):
+ crosstool = make_crosstool("supports_interface_shared_objects: true")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name,
+ "supports_interface_shared_libraries")
+ self.assertEqual(output.feature[0].enabled, True)
+
+ def test_supports_interface_shared_libraries_not_migrated_on_false(self):
+ crosstool = make_crosstool("supports_interface_shared_objects: false")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.feature), 0)
+
+ def test_supports_interface_shared_libraries_not_migrated_when_present(self):
+ crosstool = make_crosstool("""
+ supports_interface_shared_objects: true
+ feature {
+ name: "supports_interface_shared_libraries"
+ enabled: false }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name,
+ "supports_interface_shared_libraries")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_supports_embedded_runtimes_migrated(self):
+ crosstool = make_crosstool("supports_embedded_runtimes: true")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "static_link_cpp_runtimes")
+ self.assertEqual(output.feature[0].enabled, True)
+
+ def test_supports_embedded_runtimes_not_migrated_on_false(self):
+ crosstool = make_crosstool("supports_embedded_runtimes: false")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.feature), 0)
+
+ def test_supports_embedded_runtimes_not_migrated_when_already_present(self):
+ crosstool = make_crosstool("""
+ supports_embedded_runtimes: true
+ feature { name: "static_link_cpp_runtimes" enabled: false }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "static_link_cpp_runtimes")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_needs_pic_migrated(self):
+ crosstool = make_crosstool("needsPic: true")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "supports_pic")
+ self.assertEqual(output.feature[0].enabled, True)
+
+ def test_needs_pic_not_migrated_on_false(self):
+ crosstool = make_crosstool("needsPic: false")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.feature), 0)
+
+ def test_needs_pic_not_migrated_when_already_present(self):
+ crosstool = make_crosstool("""
+ needsPic: true
+ feature { name: "supports_pic" enabled: false }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "supports_pic")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_supports_fission_migrated(self):
+ crosstool = make_crosstool("supports_fission: true")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "per_object_debug_info")
+ self.assertEqual(output.feature[0].enabled, True)
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].expand_if_all_available,
+ ["is_using_fission"])
+
+ def test_supports_fission_not_migrated_on_false(self):
+ crosstool = make_crosstool("supports_fission: false")
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(len(output.feature), 0)
+
+ def test_supports_fission_not_migrated_when_already_present(self):
+ crosstool = make_crosstool("""
+ supports_fission: true
+ feature { name: "per_object_debug_info" enabled: false }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "per_object_debug_info")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_migrating_objcopy_embed_flag(self):
+ crosstool = make_crosstool("""
+ tool_path { name: "objcopy" path: "foo/objcopy" }
+ objcopy_embed_flag: "a"
+ objcopy_embed_flag: "b"
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "objcopy_embed_flags")
+ self.assertEqual(output.feature[0].enabled, True)
+ self.assertEqual(output.feature[0].flag_set[0].action[:],
+ ["objcopy_embed_data"])
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag[:],
+ ["a", "b"])
+ self.assertEqual(len(output.objcopy_embed_flag), 0)
+ self.assertEqual(output.action_config[0].action_name, "objcopy_embed_data")
+ self.assertEqual(output.action_config[0].tool[0].tool_path, "foo/objcopy")
+
+ def test_not_migrating_objcopy_embed_flag_when_feature_present(self):
+ crosstool = make_crosstool("""
+ objcopy_embed_flag: "a"
+ objcopy_embed_flag: "b"
+ feature { name: "objcopy_embed_flags" }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "objcopy_embed_flags")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_migrating_ld_embed_flag(self):
+ crosstool = make_crosstool("""
+ tool_path { name: "ld" path: "foo/ld" }
+ ld_embed_flag: "a"
+ ld_embed_flag: "b"
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "ld_embed_flags")
+ self.assertEqual(output.feature[0].enabled, True)
+ self.assertEqual(output.feature[0].flag_set[0].action[:], ["ld_embed_data"])
+ self.assertEqual(output.feature[0].flag_set[0].flag_group[0].flag[:],
+ ["a", "b"])
+ self.assertEqual(len(output.ld_embed_flag), 0)
+ self.assertEqual(output.action_config[0].action_name, "ld_embed_data")
+ self.assertEqual(output.action_config[0].tool[0].tool_path, "foo/ld")
+
+ def test_not_migrating_objcopy_embed_flag_when_feature_present(self):
+ crosstool = make_crosstool("""
+ objcopy_embed_flag: "a"
+ objcopy_embed_flag: "b"
+ feature { name: "objcopy_embed_flags" }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.feature[0].name, "objcopy_embed_flags")
+ self.assertEqual(output.feature[0].enabled, False)
+
+ def test_migrate_expand_if_all_available_from_flag_sets(self):
+ crosstool = make_crosstool("""
+ action_config {
+ action_name: 'something'
+ config_name: 'something'
+ flag_set {
+ expand_if_all_available: 'foo'
+ flag_group {
+ flag: '%{foo}'
+ }
+ flag_group {
+ flag: 'bar'
+ }
+ }
+ }
+ feature {
+ name: 'something_else'
+ flag_set {
+ action: 'c-compile'
+ expand_if_all_available: 'foo'
+ flag_group {
+ flag: '%{foo}'
+ }
+ flag_group {
+ flag: 'bar'
+ }
+ }
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.action_config[0].action_name, "something")
+ self.assertEqual(len(output.action_config[0].flag_set), 1)
+ self.assertEqual(
+ len(output.action_config[0].flag_set[0].expand_if_all_available), 0)
+ self.assertEqual(len(output.action_config[0].flag_set[0].flag_group), 2)
+ self.assertEqual(
+ output.action_config[0].flag_set[0].flag_group[0]
+ .expand_if_all_available, ["foo"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].flag_group[1]
+ .expand_if_all_available, ["foo"])
+
+ self.assertEqual(output.feature[0].name, "something_else")
+ self.assertEqual(len(output.feature[0].flag_set), 1)
+ self.assertEqual(
+ len(output.feature[0].flag_set[0].expand_if_all_available), 0)
+ self.assertEqual(len(output.feature[0].flag_set[0].flag_group), 2)
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].expand_if_all_available,
+ ["foo"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].expand_if_all_available,
+ ["foo"])
+
+ def test_enable_previously_default_features(self):
+ default_features = [
+ "dependency_file", "random_seed", "module_maps", "module_map_home_cwd",
+ "header_module_compile", "include_paths", "pic", "preprocessor_define"
+ ]
+ crosstool = make_crosstool("""
+ feature { name: "dependency_file" }
+ feature { name: "random_seed" }
+ feature { name: "module_maps" }
+ feature { name: "module_map_home_cwd" }
+ feature { name: "header_module_compile" }
+ feature { name: "include_paths" }
+ feature { name: "pic" }
+ feature { name: "preprocessor_define" }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ for i in range(0, 8):
+ self.assertEqual(output.feature[i].name, default_features[i])
+ self.assertTrue(output.feature[i].enabled)
+
+ def test_migrate_repeated_expand_if_all_available_from_flag_groups(self):
+ crosstool = make_crosstool("""
+ action_config {
+ action_name: 'something'
+ config_name: 'something'
+ flag_set {
+ flag_group {
+ expand_if_all_available: 'foo'
+ expand_if_all_available: 'bar'
+ flag: '%{foo}'
+ }
+ flag_group {
+ expand_if_none_available: 'foo'
+ expand_if_none_available: 'bar'
+ flag: 'bar'
+ }
+ }
+ }
+ feature {
+ name: 'something_else'
+ flag_set {
+ action: 'c-compile'
+ flag_group {
+ expand_if_all_available: 'foo'
+ expand_if_all_available: 'bar'
+ flag: '%{foo}'
+ }
+ flag_group {
+ expand_if_none_available: 'foo'
+ expand_if_none_available: 'bar'
+ flag: 'bar'
+ }
+ }
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+ self.assertEqual(output.action_config[0].action_name, "something")
+ self.assertEqual(len(output.action_config[0].flag_set), 1)
+ self.assertEqual(
+ len(output.action_config[0].flag_set[0].expand_if_all_available), 0)
+ self.assertEqual(len(output.action_config[0].flag_set[0].flag_group), 2)
+ self.assertEqual(
+ output.action_config[0].flag_set[0].flag_group[0]
+ .expand_if_all_available, ["foo"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].flag_group[0].flag_group[0]
+ .expand_if_all_available, ["bar"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].flag_group[1]
+ .expand_if_none_available, ["foo"])
+ self.assertEqual(
+ output.action_config[0].flag_set[0].flag_group[1].flag_group[0]
+ .expand_if_none_available, ["bar"])
+
+ self.assertEqual(output.feature[0].name, "something_else")
+ self.assertEqual(len(output.feature[0].flag_set), 1)
+ self.assertEqual(
+ len(output.feature[0].flag_set[0].expand_if_all_available), 0)
+ self.assertEqual(len(output.feature[0].flag_set[0].flag_group), 2)
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].expand_if_all_available,
+ ["foo"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].flag_group[0]
+ .expand_if_all_available, ["bar"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].expand_if_none_available,
+ ["foo"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].flag_group[0]
+ .expand_if_none_available, ["bar"])
+
+ def test_migrate_repeated_expands_from_nested_flag_groups(self):
+ crosstool = make_crosstool("""
+ feature {
+ name: 'something'
+ flag_set {
+ action: 'c-compile'
+ flag_group {
+ flag_group {
+ expand_if_all_available: 'foo'
+ expand_if_all_available: 'bar'
+ flag: '%{foo}'
+ }
+ }
+ flag_group {
+ flag_group {
+ expand_if_all_available: 'foo'
+ expand_if_all_available: 'bar'
+ expand_if_none_available: 'foo'
+ expand_if_none_available: 'bar'
+ flag: '%{foo}'
+ }
+ }
+ }
+ }
+ """)
+ migrate_legacy_fields(crosstool)
+ output = crosstool.toolchain[0]
+
+ self.assertEqual(output.feature[0].name, "something")
+ self.assertEqual(len(output.feature[0].flag_set[0].flag_group), 2)
+ self.assertEqual(
+ len(output.feature[0].flag_set[0].flag_group[0].expand_if_all_available
+ ), 0)
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].flag_group[0]
+ .expand_if_all_available, ["foo"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].flag_group[0].flag_group[0]
+ .expand_if_all_available, ["bar"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[0].flag_group[0].flag_group[0]
+ .flag, ["%{foo}"])
+
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].flag_group[0]
+ .expand_if_all_available, ["foo"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].flag_group[0]
+ .expand_if_none_available, ["foo"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].flag_group[0].flag_group[0]
+ .expand_if_none_available, ["bar"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].flag_group[0].flag_group[0]
+ .expand_if_all_available, ["bar"])
+ self.assertEqual(
+ output.feature[0].flag_set[0].flag_group[1].flag_group[0].flag_group[0]
+ .flag, ["%{foo}"])
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tools/migration/legacy_fields_migrator.py b/tools/migration/legacy_fields_migrator.py
new file mode 100644
index 0000000..cc1bb41
--- /dev/null
+++ b/tools/migration/legacy_fields_migrator.py
@@ -0,0 +1,69 @@
+"""Script migrating legacy CROSSTOOL fields into features.
+
+This script migrates the CROSSTOOL to use only the features to describe C++
+command lines. It is intended to be added as a last step of CROSSTOOL generation
+pipeline. Since it doesn't retain comments, we assume CROSSTOOL owners will want
+to migrate their pipeline manually.
+"""
+
+# Tracking issue: https://github.com/bazelbuild/bazel/issues/5187
+#
+# Since C++ rules team is working on migrating CROSSTOOL from text proto into
+# Starlark, we advise CROSSTOOL owners to wait for the CROSSTOOL -> Starlark
+# migrator before they invest too much time into fixing their pipeline. Tracking
+# issue for the Starlark effort is
+# https://github.com/bazelbuild/bazel/issues/5380.
+
+from absl import app
+from absl import flags
+from google.protobuf import text_format
+from third_party.com.github.bazelbuild.bazel.src.main.protobuf import crosstool_config_pb2
+from tools.migration.legacy_fields_migration_lib import migrate_legacy_fields
+import os
+
+flags.DEFINE_string("input", None, "Input CROSSTOOL file to be migrated")
+flags.DEFINE_string("output", None,
+ "Output path where to write migrated CROSSTOOL.")
+flags.DEFINE_boolean("inline", None, "Overwrite --input file")
+
+
+def main(unused_argv):
+ crosstool = crosstool_config_pb2.CrosstoolRelease()
+
+ input_filename = flags.FLAGS.input
+ output_filename = flags.FLAGS.output
+ inline = flags.FLAGS.inline
+
+ if not input_filename:
+ raise app.UsageError("ERROR --input unspecified")
+ if not output_filename and not inline:
+ raise app.UsageError("ERROR --output unspecified and --inline not passed")
+ if output_filename and inline:
+ raise app.UsageError("ERROR both --output and --inline passed")
+
+ with open(to_absolute_path(input_filename), "r") as f:
+ input_text = f.read()
+
+ text_format.Merge(input_text, crosstool)
+
+ migrate_legacy_fields(crosstool)
+ output_text = text_format.MessageToString(crosstool)
+
+ resolved_output_filename = to_absolute_path(
+ input_filename if inline else output_filename)
+ with open(resolved_output_filename, "w") as f:
+ f.write(output_text)
+
+def to_absolute_path(path):
+ path = os.path.expanduser(path)
+ if os.path.isabs(path):
+ return path
+ else:
+ if "BUILD_WORKING_DIRECTORY" in os.environ:
+ return os.path.join(os.environ["BUILD_WORKING_DIRECTORY"], path)
+ else:
+ return path
+
+
+if __name__ == "__main__":
+ app.run(main)