aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.cargo_vcs_info.json2
-rw-r--r--.github/FUNDING.yml1
-rw-r--r--.github/workflows/ci.yml88
-rw-r--r--Android.bp22
-rw-r--r--Cargo.toml39
-rw-r--r--Cargo.toml.orig24
-rw-r--r--LICENSE-APACHE25
-rw-r--r--LICENSE-MIT2
-rw-r--r--METADATA12
-rw-r--r--README.md4
-rw-r--r--TEST_MAPPING152
-rw-r--r--build.rs38
-rw-r--r--src/detection.rs2
-rw-r--r--src/extra.rs100
-rw-r--r--src/fallback.rs368
-rw-r--r--src/lib.rs163
-rw-r--r--src/location.rs29
-rw-r--r--src/marker.rs4
-rw-r--r--src/parse.rs87
-rw-r--r--src/rcvec.rs142
-rw-r--r--src/wrapper.rs108
-rw-r--r--tests/comments.rs2
-rw-r--r--tests/marker.rs2
-rw-r--r--tests/test.rs104
-rw-r--r--tests/test_size.rs42
25 files changed, 1080 insertions, 482 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index 59bd97e..9742e0b 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,6 +1,6 @@
{
"git": {
- "sha1": "7ecea3b88fe72672ca4270631b5d4585c0f7c715"
+ "sha1": "9f9328b7f016d3f7782ee9443dc441d63abe5b09"
},
"path_in_vcs": ""
} \ No newline at end of file
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 0000000..7507077
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1 @@
+github: dtolnay
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2063a5c..3e1bbba 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -3,68 +3,132 @@ name: CI
on:
push:
pull_request:
+ workflow_dispatch:
schedule: [cron: "40 1 * * *"]
+permissions:
+ contents: read
+
+env:
+ RUSTFLAGS: -Dwarnings
+
jobs:
+ pre_ci:
+ uses: dtolnay/.github/.github/workflows/pre_ci.yml@master
+
test:
name: Rust ${{matrix.rust}}
+ needs: pre_ci
+ if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
rust: [1.31.0, stable, beta]
+ timeout-minutes: 45
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{matrix.rust}}
- run: cargo test
- run: cargo test --no-default-features
- run: cargo test --features span-locations
- - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
+ - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
+ run: cargo test
+ env:
+ RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}}
+ - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
+ run: cargo test --no-default-features
+ env:
+ RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}}
nightly:
name: Rust nightly
+ needs: pre_ci
+ if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
+ timeout-minutes: 45
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
+ - name: Enable type layout randomization
+ run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV
- run: cargo test
- run: cargo test --no-default-features
- - run: cargo test --no-default-features -- --ignored # run the ignored test to make sure the `proc-macro` feature is disabled
+ - run: cargo test --no-default-features --test features -- --ignored make_sure_no_proc_macro # run the ignored test to make sure the `proc-macro` feature is disabled
- run: cargo test --features span-locations
- run: cargo test --manifest-path tests/ui/Cargo.toml
- - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
- - run: RUSTFLAGS='-Z allow-features=' cargo test
+ - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
+ run: cargo test
+ env:
+ RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}}
+ - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
+ run: cargo test --no-default-features
+ env:
+ RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}}
+ - name: RUSTFLAGS='-Z allow-features=' cargo test
+ run: cargo test
+ env:
+ RUSTFLAGS: -Z allow-features= ${{env.RUSTFLAGS}}
- run: cargo update -Z minimal-versions && cargo build
webassembly:
name: WebAssembly
+ needs: pre_ci
+ if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
+ timeout-minutes: 45
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
with:
target: wasm32-unknown-unknown
- run: cargo test --target wasm32-unknown-unknown --no-run
+ fuzz:
+ name: Fuzz
+ needs: pre_ci
+ if: needs.pre_ci.outputs.continue
+ runs-on: ubuntu-latest
+ timeout-minutes: 45
+ steps:
+ - uses: actions/checkout@v3
+ - uses: dtolnay/rust-toolchain@nightly
+ - uses: dtolnay/install@cargo-fuzz
+ - run: cargo fuzz check
+
clippy:
name: Clippy
runs-on: ubuntu-latest
if: github.event_name != 'pull_request'
+ timeout-minutes: 45
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@clippy
- run: cargo clippy --tests -- -Dclippy::all -Dclippy::pedantic
- run: cargo clippy --tests --all-features -- -Dclippy::all -Dclippy::pedantic
+ miri:
+ name: Miri
+ needs: pre_ci
+ if: needs.pre_ci.outputs.continue
+ runs-on: ubuntu-latest
+ timeout-minutes: 45
+ steps:
+ - uses: actions/checkout@v3
+ - uses: dtolnay/rust-toolchain@miri
+ - run: cargo miri test
+ env:
+ MIRIFLAGS: -Zmiri-strict-provenance
+
outdated:
name: Outdated
runs-on: ubuntu-latest
if: github.event_name != 'pull_request'
+ timeout-minutes: 45
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: dtolnay/install@cargo-outdated
- - run: cargo outdated --exit-code 1
+ - run: cargo outdated --workspace --exit-code 1
+ - run: cargo outdated --manifest-path fuzz/Cargo.toml --exit-code 1
diff --git a/Android.bp b/Android.bp
index 410a4f4..9033168 100644
--- a/Android.bp
+++ b/Android.bp
@@ -41,7 +41,7 @@ rust_library_host {
name: "libproc_macro2",
crate_name: "proc_macro2",
cargo_env_compat: true,
- cargo_pkg_version: "1.0.36",
+ cargo_pkg_version: "1.0.54",
srcs: ["src/lib.rs"],
edition: "2018",
features: [
@@ -50,21 +50,24 @@ rust_library_host {
"span-locations",
],
cfgs: [
+ "proc_macro_span",
"span_locations",
"use_proc_macro",
"wrap_proc_macro",
],
rustlibs: [
- "libunicode_xid",
+ "libunicode_ident",
],
compile_multilib: "first",
+ product_available: true,
+ vendor_available: true,
}
rust_defaults {
name: "proc-macro2_test_defaults",
crate_name: "proc_macro2",
cargo_env_compat: true,
- cargo_pkg_version: "1.0.36",
+ cargo_pkg_version: "1.0.54",
test_suites: ["general-tests"],
auto_gen_config: true,
edition: "2018",
@@ -74,6 +77,7 @@ rust_defaults {
"span-locations",
],
cfgs: [
+ "proc_macro_span",
"span_locations",
"use_proc_macro",
"wrap_proc_macro",
@@ -81,8 +85,9 @@ rust_defaults {
rustlibs: [
"libproc_macro2",
"libquote",
- "libunicode_xid",
+ "libunicode_ident",
],
+ proc_macros: ["librustversion"],
}
rust_test_host {
@@ -129,3 +134,12 @@ rust_test_host {
unit_test: true,
},
}
+
+rust_test_host {
+ name: "proc-macro2_test_tests_test_size",
+ defaults: ["proc-macro2_test_defaults"],
+ srcs: ["tests/test_size.rs"],
+ test_options: {
+ unit_test: true,
+ },
+}
diff --git a/Cargo.toml b/Cargo.toml
index 1272f37..f4269fd 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,29 +13,52 @@
edition = "2018"
rust-version = "1.31"
name = "proc-macro2"
-version = "1.0.36"
-authors = ["David Tolnay <dtolnay@gmail.com>", "Alex Crichton <alex@alexcrichton.com>"]
+version = "1.0.54"
+authors = [
+ "David Tolnay <dtolnay@gmail.com>",
+ "Alex Crichton <alex@alexcrichton.com>",
+]
autobenches = false
-description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case."
documentation = "https://docs.rs/proc-macro2"
readme = "README.md"
-keywords = ["macros"]
+keywords = [
+ "macros",
+ "syn",
+]
categories = ["development-tools::procedural-macro-helpers"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/proc-macro2"
+
[package.metadata.docs.rs]
-rustc-args = ["--cfg", "procmacro2_semver_exempt"]
-rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"]
+rustc-args = [
+ "--cfg",
+ "procmacro2_semver_exempt",
+]
+rustdoc-args = [
+ "--cfg",
+ "procmacro2_semver_exempt",
+ "--cfg",
+ "doc_cfg",
+]
targets = ["x86_64-unknown-linux-gnu"]
[package.metadata.playground]
features = ["span-locations"]
-[dependencies.unicode-xid]
-version = "0.2"
+
+[lib]
+doc-scrape-examples = false
+
+[dependencies.unicode-ident]
+version = "1.0"
+
[dev-dependencies.quote]
version = "1.0"
default_features = false
+[dev-dependencies.rustversion]
+version = "1"
+
[features]
default = ["proc-macro"]
nightly = []
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index 401bfaf..385bf0f 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,20 +1,16 @@
[package]
name = "proc-macro2"
-version = "1.0.36" # remember to update html_root_url
+version = "1.0.54" # remember to update html_root_url
authors = ["David Tolnay <dtolnay@gmail.com>", "Alex Crichton <alex@alexcrichton.com>"]
-license = "MIT OR Apache-2.0"
-readme = "README.md"
-keywords = ["macros"]
-repository = "https://github.com/dtolnay/proc-macro2"
-documentation = "https://docs.rs/proc-macro2"
+autobenches = false
categories = ["development-tools::procedural-macro-helpers"]
+description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case."
+documentation = "https://docs.rs/proc-macro2"
edition = "2018"
-autobenches = false
+keywords = ["macros", "syn"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/proc-macro2"
rust-version = "1.31"
-description = """
-A substitute implementation of the compiler's `proc_macro` API to decouple
-token-based libraries from the procedural macro use case.
-"""
[package.metadata.docs.rs]
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
@@ -25,10 +21,11 @@ targets = ["x86_64-unknown-linux-gnu"]
features = ["span-locations"]
[dependencies]
-unicode-xid = "0.2"
+unicode-ident = "1.0"
[dev-dependencies]
quote = { version = "1.0", default_features = false }
+rustversion = "1"
[features]
proc-macro = []
@@ -41,6 +38,9 @@ span-locations = []
# This feature no longer means anything.
nightly = []
+[lib]
+doc-scrape-examples = false
+
[workspace]
members = ["benches/bench-libproc-macro", "tests/ui"]
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
index 16fe87b..1b5ec8b 100644
--- a/LICENSE-APACHE
+++ b/LICENSE-APACHE
@@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/LICENSE-MIT b/LICENSE-MIT
index 39e0ed6..31aa793 100644
--- a/LICENSE-MIT
+++ b/LICENSE-MIT
@@ -1,5 +1,3 @@
-Copyright (c) 2014 Alex Crichton
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
diff --git a/METADATA b/METADATA
index e2ded2b..cb301c8 100644
--- a/METADATA
+++ b/METADATA
@@ -1,3 +1,7 @@
+# This project was upgraded with external_updater.
+# Usage: tools/external_updater/updater.sh update rust/crates/proc-macro2
+# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+
name: "proc-macro2"
description: "A substitute implementation of the compiler\'s `proc_macro` API to decouple token-based libraries from the procedural macro use case."
third_party {
@@ -7,13 +11,13 @@ third_party {
}
url {
type: ARCHIVE
- value: "https://static.crates.io/crates/proc-macro2/proc-macro2-1.0.36.crate"
+ value: "https://static.crates.io/crates/proc-macro2/proc-macro2-1.0.54.crate"
}
- version: "1.0.36"
+ version: "1.0.54"
license_type: NOTICE
last_upgrade_date {
- year: 2022
+ year: 2023
month: 3
- day: 1
+ day: 30
}
}
diff --git a/README.md b/README.md
index 35e1876..131ba51 100644
--- a/README.md
+++ b/README.md
@@ -2,8 +2,8 @@
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/proc--macro2-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/proc-macro2)
[<img alt="crates.io" src="https://img.shields.io/crates/v/proc-macro2.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/proc-macro2)
-[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-proc--macro2-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/proc-macro2)
-[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/proc-macro2/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster)
+[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-proc--macro2-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/proc-macro2)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/proc-macro2/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster)
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
This library serves two purposes:
diff --git a/TEST_MAPPING b/TEST_MAPPING
index 76a8253..7c46892 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -11,6 +11,9 @@
"path": "external/rust/crates/argh"
},
{
+ "path": "external/rust/crates/async-stream"
+ },
+ {
"path": "external/rust/crates/base64"
},
{
@@ -20,12 +23,30 @@
"path": "external/rust/crates/bytes"
},
{
+ "path": "external/rust/crates/coset"
+ },
+ {
"path": "external/rust/crates/either"
},
{
+ "path": "external/rust/crates/futures-channel"
+ },
+ {
+ "path": "external/rust/crates/futures-executor"
+ },
+ {
+ "path": "external/rust/crates/futures-test"
+ },
+ {
"path": "external/rust/crates/futures-util"
},
{
+ "path": "external/rust/crates/hashbrown"
+ },
+ {
+ "path": "external/rust/crates/hashlink"
+ },
+ {
"path": "external/rust/crates/jni"
},
{
@@ -72,160 +93,75 @@
},
{
"path": "external/rust/crates/url"
- }
- ],
- "presubmit": [
- {
- "name": "ZipFuseTest"
- },
- {
- "name": "apkdmverity.test"
- },
- {
- "name": "authfs_device_test_src_lib"
- },
- {
- "name": "diced_open_dice_cbor_test"
- },
- {
- "name": "diced_sample_inputs_test"
- },
- {
- "name": "diced_test"
- },
- {
- "name": "diced_utils_test"
- },
- {
- "name": "diced_vendor_test"
- },
- {
- "name": "doh_unit_test"
- },
- {
- "name": "keystore2_crypto_test_rust"
- },
- {
- "name": "keystore2_km_compat_test"
- },
- {
- "name": "keystore2_selinux_concurrency_test"
- },
- {
- "name": "keystore2_selinux_test"
- },
- {
- "name": "keystore2_test"
- },
- {
- "name": "keystore2_test_utils_test"
- },
- {
- "name": "keystore2_vintf_test"
- },
- {
- "name": "legacykeystore_test"
- },
- {
- "name": "libapkverify.integration_test"
- },
- {
- "name": "libapkverify.test"
- },
- {
- "name": "libcert_request_validator_tests"
- },
- {
- "name": "libidsig.test"
- },
- {
- "name": "librustutils_test"
- },
- {
- "name": "microdroid_manager_test"
- },
- {
- "name": "rustBinderTest"
- },
- {
- "name": "virtualizationservice_device_test"
- }
- ],
- "presubmit-rust": [
- {
- "name": "ZipFuseTest"
- },
- {
- "name": "apkdmverity.test"
},
{
- "name": "authfs_device_test_src_lib"
+ "path": "external/rust/crates/virtio-drivers"
},
{
- "name": "diced_open_dice_cbor_test"
+ "path": "external/rust/crates/zerocopy"
},
{
- "name": "diced_sample_inputs_test"
+ "path": "external/rust/crates/zeroize"
},
{
- "name": "diced_test"
+ "path": "external/uwb/src"
},
{
- "name": "diced_utils_test"
+ "path": "packages/modules/DnsResolver"
},
{
- "name": "diced_vendor_test"
+ "path": "packages/modules/Virtualization/apkdmverity"
},
{
- "name": "doh_unit_test"
+ "path": "packages/modules/Virtualization/authfs"
},
{
- "name": "keystore2_crypto_test_rust"
+ "path": "packages/modules/Virtualization/avmd"
},
{
- "name": "keystore2_km_compat_test"
+ "path": "packages/modules/Virtualization/encryptedstore"
},
{
- "name": "keystore2_selinux_concurrency_test"
+ "path": "packages/modules/Virtualization/libs/apexutil"
},
{
- "name": "keystore2_selinux_test"
+ "path": "packages/modules/Virtualization/libs/apkverify"
},
{
- "name": "keystore2_test"
+ "path": "packages/modules/Virtualization/libs/devicemapper"
},
{
- "name": "keystore2_test_utils_test"
+ "path": "packages/modules/Virtualization/microdroid_manager"
},
{
- "name": "keystore2_vintf_test"
+ "path": "packages/modules/Virtualization/virtualizationmanager"
},
{
- "name": "legacykeystore_test"
+ "path": "packages/modules/Virtualization/vm"
},
{
- "name": "libapkverify.integration_test"
+ "path": "packages/modules/Virtualization/zipfuse"
},
{
- "name": "libapkverify.test"
+ "path": "system/keymint/derive"
},
{
- "name": "libcert_request_validator_tests"
+ "path": "system/keymint/hal"
},
{
- "name": "libidsig.test"
+ "path": "system/security/diced"
},
{
- "name": "librustutils_test"
+ "path": "system/security/keystore2"
},
{
- "name": "microdroid_manager_test"
+ "path": "system/security/keystore2/legacykeystore"
},
{
- "name": "rustBinderTest"
+ "path": "system/security/keystore2/selinux"
},
{
- "name": "virtualizationservice_device_test"
+ "path": "system/security/keystore2/src/crypto"
}
]
}
diff --git a/build.rs b/build.rs
index 946be6e..59505a5 100644
--- a/build.rs
+++ b/build.rs
@@ -41,7 +41,6 @@
// 1.57+.
use std::env;
-use std::iter;
use std::process::{self, Command};
use std::str;
@@ -85,6 +84,10 @@ fn main() {
println!("cargo:rustc-cfg=no_hygiene");
}
+ if version.minor < 47 {
+ println!("cargo:rustc-cfg=no_ident_new_raw");
+ }
+
if version.minor < 54 {
println!("cargo:rustc-cfg=no_literal_from_str");
}
@@ -97,6 +100,10 @@ fn main() {
println!("cargo:rustc-cfg=no_is_available");
}
+ if version.minor < 66 {
+ println!("cargo:rustc-cfg=no_source_text");
+ }
+
let target = env::var("TARGET").unwrap();
if !enable_use_proc_macro(&target) {
return;
@@ -108,7 +115,10 @@ fn main() {
println!("cargo:rustc-cfg=wrap_proc_macro");
}
- if version.nightly && feature_allowed("proc_macro_span") {
+ if version.nightly
+ && feature_allowed("proc_macro_span")
+ && feature_allowed("proc_macro_span_shrink")
+ {
println!("cargo:rustc-cfg=proc_macro_span");
}
@@ -154,23 +164,13 @@ fn feature_allowed(feature: &str) -> bool {
let flags_var;
let flags_var_string;
- let mut flags_var_split;
- let mut flags_none;
- let flags: &mut dyn Iterator<Item = &str> =
- if let Some(encoded_rustflags) = env::var_os("CARGO_ENCODED_RUSTFLAGS") {
- flags_var = encoded_rustflags;
- flags_var_string = flags_var.to_string_lossy();
- flags_var_split = flags_var_string.split('\x1f');
- &mut flags_var_split
- } else if let Some(rustflags) = env::var_os("RUSTFLAGS") {
- flags_var = rustflags;
- flags_var_string = flags_var.to_string_lossy();
- flags_var_split = flags_var_string.split(' ');
- &mut flags_var_split
- } else {
- flags_none = iter::empty();
- &mut flags_none
- };
+ let flags = if let Some(encoded_rustflags) = env::var_os("CARGO_ENCODED_RUSTFLAGS") {
+ flags_var = encoded_rustflags;
+ flags_var_string = flags_var.to_string_lossy();
+ flags_var_string.split('\x1f')
+ } else {
+ return true;
+ };
for mut flag in flags {
if flag.starts_with("-Z") {
diff --git a/src/detection.rs b/src/detection.rs
index d139b73..beba7b2 100644
--- a/src/detection.rs
+++ b/src/detection.rs
@@ -1,4 +1,4 @@
-use std::sync::atomic::{AtomicUsize, Ordering};
+use core::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Once;
static WORKS: AtomicUsize = AtomicUsize::new(0);
diff --git a/src/extra.rs b/src/extra.rs
new file mode 100644
index 0000000..cbce162
--- /dev/null
+++ b/src/extra.rs
@@ -0,0 +1,100 @@
+//! Items which do not have a correspondence to any API in the proc_macro crate,
+//! but are necessary to include in proc-macro2.
+
+use crate::fallback;
+use crate::imp;
+use crate::marker::Marker;
+use crate::Span;
+use core::fmt::{self, Debug};
+
+/// An object that holds a [`Group`]'s `span_open()` and `span_close()` together
+/// (in a more compact representation than holding those 2 spans individually.
+///
+/// [`Group`]: crate::Group
+#[derive(Copy, Clone)]
+pub struct DelimSpan {
+ inner: DelimSpanEnum,
+ _marker: Marker,
+}
+
+#[derive(Copy, Clone)]
+enum DelimSpanEnum {
+ #[cfg(wrap_proc_macro)]
+ Compiler {
+ join: proc_macro::Span,
+ #[cfg(not(no_group_open_close))]
+ open: proc_macro::Span,
+ #[cfg(not(no_group_open_close))]
+ close: proc_macro::Span,
+ },
+ Fallback(fallback::Span),
+}
+
+impl DelimSpan {
+ pub(crate) fn new(group: &imp::Group) -> Self {
+ #[cfg(wrap_proc_macro)]
+ let inner = match group {
+ imp::Group::Compiler(group) => DelimSpanEnum::Compiler {
+ join: group.span(),
+ #[cfg(not(no_group_open_close))]
+ open: group.span_open(),
+ #[cfg(not(no_group_open_close))]
+ close: group.span_close(),
+ },
+ imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()),
+ };
+
+ #[cfg(not(wrap_proc_macro))]
+ let inner = DelimSpanEnum::Fallback(group.span());
+
+ DelimSpan {
+ inner,
+ _marker: Marker,
+ }
+ }
+
+ /// Returns a span covering the entire delimited group.
+ pub fn join(&self) -> Span {
+ match &self.inner {
+ #[cfg(wrap_proc_macro)]
+ DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)),
+ DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span),
+ }
+ }
+
+ /// Returns a span for the opening punctuation of the group only.
+ pub fn open(&self) -> Span {
+ match &self.inner {
+ #[cfg(wrap_proc_macro)]
+ DelimSpanEnum::Compiler {
+ #[cfg(not(no_group_open_close))]
+ open,
+ #[cfg(no_group_open_close)]
+ join: open,
+ ..
+ } => Span::_new(imp::Span::Compiler(*open)),
+ DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()),
+ }
+ }
+
+ /// Returns a span for the closing punctuation of the group only.
+ pub fn close(&self) -> Span {
+ match &self.inner {
+ #[cfg(wrap_proc_macro)]
+ DelimSpanEnum::Compiler {
+ #[cfg(not(no_group_open_close))]
+ close,
+ #[cfg(no_group_open_close)]
+ join: close,
+ ..
+ } => Span::_new(imp::Span::Compiler(*close)),
+ DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()),
+ }
+ }
+}
+
+impl Debug for DelimSpan {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Debug::fmt(&self.join(), f)
+ }
+}
diff --git a/src/fallback.rs b/src/fallback.rs
index ac5437d..29d3933 100644
--- a/src/fallback.rs
+++ b/src/fallback.rs
@@ -1,19 +1,19 @@
+#[cfg(span_locations)]
+use crate::location::LineColumn;
use crate::parse::{self, Cursor};
+use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut};
use crate::{Delimiter, Spacing, TokenTree};
+#[cfg(all(span_locations, not(fuzzing)))]
+use core::cell::RefCell;
#[cfg(span_locations)]
-use std::cell::RefCell;
-#[cfg(span_locations)]
-use std::cmp;
-use std::fmt::{self, Debug, Display};
-use std::iter::FromIterator;
-use std::mem;
-use std::ops::RangeBounds;
-#[cfg(procmacro2_semver_exempt)]
-use std::path::Path;
+use core::cmp;
+use core::fmt::{self, Debug, Display, Write};
+use core::iter::FromIterator;
+use core::mem::ManuallyDrop;
+use core::ops::RangeBounds;
+use core::ptr;
+use core::str::FromStr;
use std::path::PathBuf;
-use std::str::FromStr;
-use std::vec;
-use unicode_xid::UnicodeXID;
/// Force use of proc-macro2's fallback implementation of the API for now, even
/// if the compiler's implementation is available.
@@ -31,7 +31,7 @@ pub fn unforce() {
#[derive(Clone)]
pub(crate) struct TokenStream {
- inner: Vec<TokenTree>,
+ inner: RcVec<TokenTree>,
}
#[derive(Debug)]
@@ -53,71 +53,69 @@ impl LexError {
impl TokenStream {
pub fn new() -> Self {
- TokenStream { inner: Vec::new() }
+ TokenStream {
+ inner: RcVecBuilder::new().build(),
+ }
}
pub fn is_empty(&self) -> bool {
self.inner.len() == 0
}
- fn take_inner(&mut self) -> Vec<TokenTree> {
- mem::replace(&mut self.inner, Vec::new())
+ fn take_inner(self) -> RcVecBuilder<TokenTree> {
+ let nodrop = ManuallyDrop::new(self);
+ unsafe { ptr::read(&nodrop.inner) }.make_owned()
}
+}
- fn push_token(&mut self, token: TokenTree) {
- // https://github.com/dtolnay/proc-macro2/issues/235
- match token {
- #[cfg(not(no_bind_by_move_pattern_guard))]
- TokenTree::Literal(crate::Literal {
- #[cfg(wrap_proc_macro)]
- inner: crate::imp::Literal::Fallback(literal),
- #[cfg(not(wrap_proc_macro))]
- inner: literal,
- ..
- }) if literal.repr.starts_with('-') => {
- push_negative_literal(self, literal);
- }
- #[cfg(no_bind_by_move_pattern_guard)]
- TokenTree::Literal(crate::Literal {
- #[cfg(wrap_proc_macro)]
- inner: crate::imp::Literal::Fallback(literal),
- #[cfg(not(wrap_proc_macro))]
- inner: literal,
- ..
- }) => {
- if literal.repr.starts_with('-') {
- push_negative_literal(self, literal);
- } else {
- self.inner
- .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
- }
- }
- _ => self.inner.push(token),
+fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) {
+ // https://github.com/dtolnay/proc-macro2/issues/235
+ match token {
+ #[cfg(not(no_bind_by_move_pattern_guard))]
+ TokenTree::Literal(crate::Literal {
+ #[cfg(wrap_proc_macro)]
+ inner: crate::imp::Literal::Fallback(literal),
+ #[cfg(not(wrap_proc_macro))]
+ inner: literal,
+ ..
+ }) if literal.repr.starts_with('-') => {
+ push_negative_literal(vec, literal);
}
-
- #[cold]
- fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
- literal.repr.remove(0);
- let mut punct = crate::Punct::new('-', Spacing::Alone);
- punct.set_span(crate::Span::_new_stable(literal.span));
- stream.inner.push(TokenTree::Punct(punct));
- stream
- .inner
- .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
+ #[cfg(no_bind_by_move_pattern_guard)]
+ TokenTree::Literal(crate::Literal {
+ #[cfg(wrap_proc_macro)]
+ inner: crate::imp::Literal::Fallback(literal),
+ #[cfg(not(wrap_proc_macro))]
+ inner: literal,
+ ..
+ }) => {
+ if literal.repr.starts_with('-') {
+ push_negative_literal(vec, literal);
+ } else {
+ vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal)));
+ }
}
+ _ => vec.push(token),
}
-}
-impl From<Vec<TokenTree>> for TokenStream {
- fn from(inner: Vec<TokenTree>) -> Self {
- TokenStream { inner }
+ #[cold]
+ fn push_negative_literal(mut vec: RcVecMut<TokenTree>, mut literal: Literal) {
+ literal.repr.remove(0);
+ let mut punct = crate::Punct::new('-', Spacing::Alone);
+ punct.set_span(crate::Span::_new_fallback(literal.span));
+ vec.push(TokenTree::Punct(punct));
+ vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal)));
}
}
// Nonrecursive to prevent stack overflow.
impl Drop for TokenStream {
fn drop(&mut self) {
- while let Some(token) = self.inner.pop() {
+ let mut inner = match self.inner.get_mut() {
+ Some(inner) => inner,
+ None => return,
+ };
+ while let Some(token) = inner.pop() {
let group = match token {
TokenTree::Group(group) => group.inner,
_ => continue,
@@ -127,19 +125,49 @@ impl Drop for TokenStream {
crate::imp::Group::Fallback(group) => group,
crate::imp::Group::Compiler(_) => continue,
};
- let mut group = group;
- self.inner.extend(group.stream.take_inner());
+ inner.extend(group.stream.take_inner());
+ }
+ }
+}
+
+pub(crate) struct TokenStreamBuilder {
+ inner: RcVecBuilder<TokenTree>,
+}
+
+impl TokenStreamBuilder {
+ pub fn new() -> Self {
+ TokenStreamBuilder {
+ inner: RcVecBuilder::new(),
+ }
+ }
+
+ pub fn with_capacity(cap: usize) -> Self {
+ TokenStreamBuilder {
+ inner: RcVecBuilder::with_capacity(cap),
+ }
+ }
+
+ pub fn push_token_from_parser(&mut self, tt: TokenTree) {
+ self.inner.push(tt);
+ }
+
+ pub fn build(self) -> TokenStream {
+ TokenStream {
+ inner: self.inner.build(),
}
}
}
#[cfg(span_locations)]
fn get_cursor(src: &str) -> Cursor {
+ #[cfg(fuzzing)]
+ return Cursor { rest: src, off: 1 };
+
// Create a dummy file & add it to the source map
+ #[cfg(not(fuzzing))]
SOURCE_MAP.with(|cm| {
let mut cm = cm.borrow_mut();
- let name = format!("<parsed string {}>", cm.files.len());
- let span = cm.add_file(&name, src);
+ let span = cm.add_file(src);
Cursor {
rest: src,
off: span.lo,
@@ -157,7 +185,13 @@ impl FromStr for TokenStream {
fn from_str(src: &str) -> Result<TokenStream, LexError> {
// Create a dummy file & add it to the source map
- let cursor = get_cursor(src);
+ let mut cursor = get_cursor(src);
+
+ // Strip a byte order mark if present
+ const BYTE_ORDER_MARK: &str = "\u{feff}";
+ if cursor.starts_with(BYTE_ORDER_MARK) {
+ cursor = cursor.advance(BYTE_ORDER_MARK.len());
+ }
parse::token_stream(cursor)
}
@@ -201,7 +235,7 @@ impl Debug for TokenStream {
#[cfg(use_proc_macro)]
impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> Self {
inner
.to_string()
.parse()
@@ -211,7 +245,7 @@ impl From<proc_macro::TokenStream> for TokenStream {
#[cfg(use_proc_macro)]
impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> Self {
inner
.to_string()
.parse()
@@ -220,10 +254,12 @@ impl From<TokenStream> for proc_macro::TokenStream {
}
impl From<TokenTree> for TokenStream {
- fn from(tree: TokenTree) -> TokenStream {
- let mut stream = TokenStream::new();
- stream.push_token(tree);
- stream
+ fn from(tree: TokenTree) -> Self {
+ let mut stream = RcVecBuilder::new();
+ push_token_from_proc_macro(stream.as_mut(), tree);
+ TokenStream {
+ inner: stream.build(),
+ }
}
}
@@ -237,35 +273,38 @@ impl FromIterator<TokenTree> for TokenStream {
impl FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- let mut v = Vec::new();
+ let mut v = RcVecBuilder::new();
- for mut stream in streams {
+ for stream in streams {
v.extend(stream.take_inner());
}
- TokenStream { inner: v }
+ TokenStream { inner: v.build() }
}
}
impl Extend<TokenTree> for TokenStream {
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
- tokens.into_iter().for_each(|token| self.push_token(token));
+ let mut vec = self.inner.make_mut();
+ tokens
+ .into_iter()
+ .for_each(|token| push_token_from_proc_macro(vec.as_mut(), token));
}
}
impl Extend<TokenStream> for TokenStream {
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- self.inner.extend(streams.into_iter().flatten());
+ self.inner.make_mut().extend(streams.into_iter().flatten());
}
}
-pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+pub(crate) type TokenTreeIter = RcVecIntoIter<TokenTree>;
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = TokenTreeIter;
- fn into_iter(mut self) -> TokenTreeIter {
+ fn into_iter(self) -> TokenTreeIter {
self.take_inner().into_iter()
}
}
@@ -296,35 +335,27 @@ impl Debug for SourceFile {
}
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub(crate) struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
-#[cfg(span_locations)]
+#[cfg(all(span_locations, not(fuzzing)))]
thread_local! {
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
// NOTE: We start with a single dummy file which all call_site() and
// def_site() spans reference.
files: vec![FileInfo {
- #[cfg(procmacro2_semver_exempt)]
- name: "<unspecified>".to_owned(),
+ source_text: String::new(),
span: Span { lo: 0, hi: 0 },
lines: vec![0],
}],
});
}
-#[cfg(span_locations)]
+#[cfg(all(span_locations, not(fuzzing)))]
struct FileInfo {
- #[cfg(procmacro2_semver_exempt)]
- name: String,
+ source_text: String,
span: Span,
lines: Vec<usize>,
}
-#[cfg(span_locations)]
+#[cfg(all(span_locations, not(fuzzing)))]
impl FileInfo {
fn offset_line_column(&self, offset: usize) -> LineColumn {
assert!(self.span_within(Span {
@@ -347,11 +378,17 @@ impl FileInfo {
fn span_within(&self, span: Span) -> bool {
span.lo >= self.span.lo && span.hi <= self.span.hi
}
+
+ fn source_text(&self, span: Span) -> String {
+ let lo = (span.lo - self.span.lo) as usize;
+ let hi = (span.hi - self.span.lo) as usize;
+ self.source_text[lo..hi].to_owned()
+ }
}
/// Computes the offsets of each line in the given source string
/// and the total number of characters
-#[cfg(span_locations)]
+#[cfg(all(span_locations, not(fuzzing)))]
fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
let mut lines = vec![0];
let mut total = 0;
@@ -366,12 +403,12 @@ fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
(total, lines)
}
-#[cfg(span_locations)]
+#[cfg(all(span_locations, not(fuzzing)))]
struct SourceMap {
files: Vec<FileInfo>,
}
-#[cfg(span_locations)]
+#[cfg(all(span_locations, not(fuzzing)))]
impl SourceMap {
fn next_start_pos(&self) -> u32 {
// Add 1 so there's always space between files.
@@ -381,35 +418,45 @@ impl SourceMap {
self.files.last().unwrap().span.hi + 1
}
- fn add_file(&mut self, name: &str, src: &str) -> Span {
+ fn add_file(&mut self, src: &str) -> Span {
let (len, lines) = lines_offsets(src);
let lo = self.next_start_pos();
- // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ // XXX(nika): Should we bother doing a checked cast or checked add here?
let span = Span {
lo,
hi: lo + (len as u32),
};
self.files.push(FileInfo {
- #[cfg(procmacro2_semver_exempt)]
- name: name.to_owned(),
+ source_text: src.to_owned(),
span,
lines,
});
- #[cfg(not(procmacro2_semver_exempt))]
- let _ = name;
-
span
}
+ #[cfg(procmacro2_semver_exempt)]
+ fn filepath(&self, span: Span) -> PathBuf {
+ for (i, file) in self.files.iter().enumerate() {
+ if file.span_within(span) {
+ return PathBuf::from(if i == 0 {
+ "<unspecified>".to_owned()
+ } else {
+ format!("<parsed string {}>", i)
+ });
+ }
+ }
+ unreachable!("Invalid span with no related FileInfo!");
+ }
+
fn fileinfo(&self, span: Span) -> &FileInfo {
for file in &self.files {
if file.span_within(span) {
return file;
}
}
- panic!("Invalid span with no related FileInfo!");
+ unreachable!("Invalid span with no related FileInfo!");
}
}
@@ -455,17 +502,25 @@ impl Span {
#[cfg(procmacro2_semver_exempt)]
pub fn source_file(&self) -> SourceFile {
+ #[cfg(fuzzing)]
+ return SourceFile {
+ path: PathBuf::from("<unspecified>"),
+ };
+
+ #[cfg(not(fuzzing))]
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- SourceFile {
- path: Path::new(&fi.name).to_owned(),
- }
+ let path = cm.filepath(*self);
+ SourceFile { path }
})
}
#[cfg(span_locations)]
pub fn start(&self) -> LineColumn {
+ #[cfg(fuzzing)]
+ return LineColumn { line: 0, column: 0 };
+
+ #[cfg(not(fuzzing))]
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
let fi = cm.fileinfo(*self);
@@ -475,6 +530,10 @@ impl Span {
#[cfg(span_locations)]
pub fn end(&self) -> LineColumn {
+ #[cfg(fuzzing)]
+ return LineColumn { line: 0, column: 0 };
+
+ #[cfg(not(fuzzing))]
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
let fi = cm.fileinfo(*self);
@@ -482,6 +541,26 @@ impl Span {
})
}
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn before(&self) -> Span {
+ Span {
+ #[cfg(span_locations)]
+ lo: self.lo,
+ #[cfg(span_locations)]
+ hi: self.lo,
+ }
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn after(&self) -> Span {
+ Span {
+ #[cfg(span_locations)]
+ lo: self.hi,
+ #[cfg(span_locations)]
+ hi: self.hi,
+ }
+ }
+
#[cfg(not(span_locations))]
pub fn join(&self, _other: Span) -> Option<Span> {
Some(Span {})
@@ -489,6 +568,13 @@ impl Span {
#[cfg(span_locations)]
pub fn join(&self, other: Span) -> Option<Span> {
+ #[cfg(fuzzing)]
+ return {
+ let _ = other;
+ None
+ };
+
+ #[cfg(not(fuzzing))]
SOURCE_MAP.with(|cm| {
let cm = cm.borrow();
// If `other` is not within the same FileInfo as us, return None.
@@ -503,12 +589,32 @@ impl Span {
}
#[cfg(not(span_locations))]
- fn first_byte(self) -> Self {
+ pub fn source_text(&self) -> Option<String> {
+ None
+ }
+
+ #[cfg(span_locations)]
+ pub fn source_text(&self) -> Option<String> {
+ #[cfg(fuzzing)]
+ return None;
+
+ #[cfg(not(fuzzing))]
+ {
+ if self.is_call_site() {
+ None
+ } else {
+ Some(SOURCE_MAP.with(|cm| cm.borrow().fileinfo(*self).source_text(*self)))
+ }
+ }
+ }
+
+ #[cfg(not(span_locations))]
+ pub(crate) fn first_byte(self) -> Self {
self
}
#[cfg(span_locations)]
- fn first_byte(self) -> Self {
+ pub(crate) fn first_byte(self) -> Self {
Span {
lo: self.lo,
hi: cmp::min(self.lo.saturating_add(1), self.hi),
@@ -516,17 +622,22 @@ impl Span {
}
#[cfg(not(span_locations))]
- fn last_byte(self) -> Self {
+ pub(crate) fn last_byte(self) -> Self {
self
}
#[cfg(span_locations)]
- fn last_byte(self) -> Self {
+ pub(crate) fn last_byte(self) -> Self {
Span {
lo: cmp::max(self.hi.saturating_sub(1), self.lo),
hi: self.hi,
}
}
+
+ #[cfg(span_locations)]
+ fn is_call_site(&self) -> bool {
+ self.lo == 0 && self.hi == 0
+ }
}
impl Debug for Span {
@@ -542,7 +653,7 @@ impl Debug for Span {
pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
#[cfg(span_locations)]
{
- if span.lo == 0 && span.hi == 0 {
+ if span.is_call_site() {
return;
}
}
@@ -639,7 +750,7 @@ pub(crate) struct Ident {
impl Ident {
fn _new(string: &str, raw: bool, span: Span) -> Self {
- validate_ident(string);
+ validate_ident(string, raw);
Ident {
sym: string.to_owned(),
@@ -666,27 +777,19 @@ impl Ident {
}
pub(crate) fn is_ident_start(c: char) -> bool {
- ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z')
- || c == '_'
- || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ c == '_' || unicode_ident::is_xid_start(c)
}
pub(crate) fn is_ident_continue(c: char) -> bool {
- ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z')
- || c == '_'
- || ('0' <= c && c <= '9')
- || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
+ unicode_ident::is_xid_continue(c)
}
-fn validate_ident(string: &str) {
- let validate = string;
- if validate.is_empty() {
+fn validate_ident(string: &str, raw: bool) {
+ if string.is_empty() {
panic!("Ident is not allowed to be empty; use Option<Ident>");
}
- if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
+ if string.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
panic!("Ident cannot be a number; use Literal instead");
}
@@ -704,9 +807,18 @@ fn validate_ident(string: &str) {
true
}
- if !ident_ok(validate) {
+ if !ident_ok(string) {
panic!("{:?} is not a valid Ident", string);
}
+
+ if raw {
+ match string {
+ "_" | "super" | "self" | "Self" | "crate" => {
+ panic!("`r#{}` cannot be a raw identifier", string);
+ }
+ _ => {}
+ }
+ }
}
impl PartialEq for Ident {
@@ -883,7 +995,9 @@ impl Literal {
b'"' => escaped.push_str("\\\""),
b'\\' => escaped.push_str("\\\\"),
b'\x20'..=b'\x7E' => escaped.push(*b as char),
- _ => escaped.push_str(&format!("\\x{:02X}", b)),
+ _ => {
+ let _ = write!(escaped, "\\x{:02X}", b);
+ }
}
}
escaped.push('"');
diff --git a/src/lib.rs b/src/lib.rs
index 6edaf42..944bc8a 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -2,7 +2,7 @@
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
-//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
+//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
//!
//! <br>
//!
@@ -86,8 +86,11 @@
//! a different thread.
// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.36")]
-#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.54")]
+#![cfg_attr(
+ any(proc_macro_span, super_unstable),
+ feature(proc_macro_span, proc_macro_span_shrink)
+)]
#![cfg_attr(super_unstable, feature(proc_macro_def_site))]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![allow(
@@ -95,6 +98,7 @@
clippy::cast_possible_truncation,
clippy::doc_markdown,
clippy::items_after_statements,
+ clippy::let_underscore_untyped,
clippy::manual_assert,
clippy::must_use_candidate,
clippy::needless_doctest_main,
@@ -120,6 +124,7 @@ extern crate proc_macro;
mod marker;
mod parse;
+mod rcvec;
#[cfg(wrap_proc_macro)]
mod detection;
@@ -129,22 +134,31 @@ mod detection;
#[doc(hidden)]
pub mod fallback;
+pub mod extra;
+
#[cfg(not(wrap_proc_macro))]
use crate::fallback as imp;
#[path = "wrapper.rs"]
#[cfg(wrap_proc_macro)]
mod imp;
+#[cfg(span_locations)]
+mod location;
+
+use crate::extra::DelimSpan;
use crate::marker::Marker;
-use std::cmp::Ordering;
+use core::cmp::Ordering;
+use core::fmt::{self, Debug, Display};
+use core::hash::{Hash, Hasher};
+use core::iter::FromIterator;
+use core::ops::RangeBounds;
+use core::str::FromStr;
use std::error::Error;
-use std::fmt::{self, Debug, Display};
-use std::hash::{Hash, Hasher};
-use std::iter::FromIterator;
-use std::ops::RangeBounds;
#[cfg(procmacro2_semver_exempt)]
use std::path::PathBuf;
-use std::str::FromStr;
+
+#[cfg(span_locations)]
+pub use crate::location::LineColumn;
/// An abstract stream of tokens, or more concretely a sequence of token trees.
///
@@ -173,7 +187,7 @@ impl TokenStream {
}
}
- fn _new_stable(inner: fallback::TokenStream) -> Self {
+ fn _new_fallback(inner: fallback::TokenStream) -> Self {
TokenStream {
inner: inner.into(),
_marker: Marker,
@@ -221,14 +235,14 @@ impl FromStr for TokenStream {
#[cfg(use_proc_macro)]
impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> Self {
TokenStream::_new(inner.into())
}
}
#[cfg(use_proc_macro)]
impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> Self {
inner.inner.into()
}
}
@@ -352,37 +366,6 @@ impl Debug for SourceFile {
}
}
-/// A line-column pair representing the start or end of a `Span`.
-///
-/// This type is semver exempt and not exposed by default.
-#[cfg(span_locations)]
-#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub struct LineColumn {
- /// The 1-indexed line in the source file on which the span starts or ends
- /// (inclusive).
- pub line: usize,
- /// The 0-indexed column (in UTF-8 characters) in the source file on which
- /// the span starts or ends (inclusive).
- pub column: usize,
-}
-
-#[cfg(span_locations)]
-impl Ord for LineColumn {
- fn cmp(&self, other: &Self) -> Ordering {
- self.line
- .cmp(&other.line)
- .then(self.column.cmp(&other.column))
- }
-}
-
-#[cfg(span_locations)]
-impl PartialOrd for LineColumn {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
/// A region of source code, along with macro expansion information.
#[derive(Copy, Clone)]
pub struct Span {
@@ -398,7 +381,7 @@ impl Span {
}
}
- fn _new_stable(inner: fallback::Span) -> Self {
+ fn _new_fallback(inner: fallback::Span) -> Self {
Span {
inner: inner.into(),
_marker: Marker,
@@ -488,8 +471,7 @@ impl Span {
#[cfg(span_locations)]
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn start(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.start();
- LineColumn { line, column }
+ self.inner.start()
}
/// Get the ending line/column in the source file for this span.
@@ -504,8 +486,25 @@ impl Span {
#[cfg(span_locations)]
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn end(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.end();
- LineColumn { line, column }
+ self.inner.end()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ ///
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))]
+ #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
+ pub fn before(&self) -> Span {
+ Span::_new(self.inner.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ ///
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))]
+ #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
+ pub fn after(&self) -> Span {
+ Span::_new(self.inner.after())
}
/// Create a new span encompassing `self` and `other`.
@@ -529,6 +528,17 @@ impl Span {
pub fn eq(&self, other: &Span) -> bool {
self.inner.eq(&other.inner)
}
+
+ /// Returns the source text behind a span. This preserves the original
+ /// source code, including spaces and comments. It only returns a result if
+ /// the span corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens
+ /// and not on this source text. The result of this function is a best
+ /// effort to be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.inner.source_text()
+ }
}
/// Prints a span in a form convenient for debugging.
@@ -579,25 +589,25 @@ impl TokenTree {
}
impl From<Group> for TokenTree {
- fn from(g: Group) -> TokenTree {
+ fn from(g: Group) -> Self {
TokenTree::Group(g)
}
}
impl From<Ident> for TokenTree {
- fn from(g: Ident) -> TokenTree {
+ fn from(g: Ident) -> Self {
TokenTree::Ident(g)
}
}
impl From<Punct> for TokenTree {
- fn from(g: Punct) -> TokenTree {
+ fn from(g: Punct) -> Self {
TokenTree::Punct(g)
}
}
impl From<Literal> for TokenTree {
- fn from(g: Literal) -> TokenTree {
+ fn from(g: Literal) -> Self {
TokenTree::Literal(g)
}
}
@@ -669,7 +679,7 @@ impl Group {
Group { inner }
}
- fn _new_stable(inner: fallback::Group) -> Self {
+ fn _new_fallback(inner: fallback::Group) -> Self {
Group {
inner: inner.into(),
}
@@ -686,7 +696,8 @@ impl Group {
}
}
- /// Returns the delimiter of this `Group`
+ /// Returns the punctuation used as the delimiter for this group: a set of
+ /// parentheses, square brackets, or curly braces.
pub fn delimiter(&self) -> Delimiter {
self.inner.delimiter()
}
@@ -730,6 +741,13 @@ impl Group {
Span::_new(self.inner.span_close())
}
+ /// Returns an object that holds this group's `span_open()` and
+ /// `span_close()` together (in a more compact representation than holding
+ /// those 2 spans individually).
+ pub fn delim_span(&self) -> DelimSpan {
+ DelimSpan::new(&self.inner)
+ }
+
/// Configures the span for this `Group`'s delimiters, but not its internal
/// tokens.
///
@@ -952,11 +970,11 @@ impl Ident {
Ident::_new(imp::Ident::new(string, span.inner))
}
- /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). The
+ /// `string` argument must be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path
+ /// segments (e.g. `self`, `super`) are not supported, and will cause a
+ /// panic.
pub fn new_raw(string: &str, span: Span) -> Self {
Ident::_new_raw(string, span)
}
@@ -1086,7 +1104,7 @@ impl Literal {
}
}
- fn _new_stable(inner: fallback::Literal) -> Self {
+ fn _new_fallback(inner: fallback::Literal) -> Self {
Literal {
inner: inner.into(),
_marker: Marker,
@@ -1128,9 +1146,9 @@ impl Literal {
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
/// the float's value is emitted directly into the token but no suffix is
/// used, so it may be inferred to be a `f64` later in the compiler.
- /// Literals created from negative numbers may not survive rountrips through
- /// `TokenStream` or strings and may be broken into two tokens (`-` and
- /// positive literal).
+ /// Literals created from negative numbers may not survive round-trips
+ /// through `TokenStream` or strings and may be broken into two tokens (`-`
+ /// and positive literal).
///
/// # Panics
///
@@ -1147,7 +1165,7 @@ impl Literal {
/// specified is the preceding part of the token and `f64` is the suffix of
/// the token. This token will always be inferred to be an `f64` in the
/// compiler. Literals created from negative numbers may not survive
- /// rountrips through `TokenStream` or strings and may be broken into two
+ /// round-trips through `TokenStream` or strings and may be broken into two
/// tokens (`-` and positive literal).
///
/// # Panics
@@ -1164,9 +1182,9 @@ impl Literal {
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
/// the float's value is emitted directly into the token but no suffix is
/// used, so it may be inferred to be a `f64` later in the compiler.
- /// Literals created from negative numbers may not survive rountrips through
- /// `TokenStream` or strings and may be broken into two tokens (`-` and
- /// positive literal).
+ /// Literals created from negative numbers may not survive round-trips
+ /// through `TokenStream` or strings and may be broken into two tokens (`-`
+ /// and positive literal).
///
/// # Panics
///
@@ -1183,7 +1201,7 @@ impl Literal {
/// specified is the preceding part of the token and `f32` is the suffix of
/// the token. This token will always be inferred to be an `f32` in the
/// compiler. Literals created from negative numbers may not survive
- /// rountrips through `TokenStream` or strings and may be broken into two
+ /// round-trips through `TokenStream` or strings and may be broken into two
/// tokens (`-` and positive literal).
///
/// # Panics
@@ -1270,7 +1288,7 @@ impl Display for Literal {
pub mod token_stream {
use crate::marker::Marker;
use crate::{imp, TokenTree};
- use std::fmt::{self, Debug};
+ use core::fmt::{self, Debug};
pub use crate::TokenStream;
@@ -1290,11 +1308,16 @@ pub mod token_stream {
fn next(&mut self) -> Option<TokenTree> {
self.inner.next()
}
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
}
impl Debug for IntoIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, f)
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
}
}
diff --git a/src/location.rs b/src/location.rs
new file mode 100644
index 0000000..463026c
--- /dev/null
+++ b/src/location.rs
@@ -0,0 +1,29 @@
+use core::cmp::Ordering;
+
+/// A line-column pair representing the start or end of a `Span`.
+///
+/// This type is semver exempt and not exposed by default.
+#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends
+ /// (inclusive).
+ pub line: usize,
+ /// The 0-indexed column (in UTF-8 characters) in the source file on which
+ /// the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line
+ .cmp(&other.line)
+ .then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
diff --git a/src/marker.rs b/src/marker.rs
index 58729ba..59fd096 100644
--- a/src/marker.rs
+++ b/src/marker.rs
@@ -1,4 +1,4 @@
-use std::marker::PhantomData;
+use core::marker::PhantomData;
use std::panic::{RefUnwindSafe, UnwindSafe};
use std::rc::Rc;
@@ -9,7 +9,7 @@ pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
pub(crate) use self::value::*;
mod value {
- pub(crate) use std::marker::PhantomData as Marker;
+ pub(crate) use core::marker::PhantomData as Marker;
}
pub(crate) struct ProcMacroAutoTraits(Rc<()>);
diff --git a/src/parse.rs b/src/parse.rs
index f77213a..82291da 100644
--- a/src/parse.rs
+++ b/src/parse.rs
@@ -1,9 +1,10 @@
use crate::fallback::{
is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
+ TokenStreamBuilder,
};
use crate::{Delimiter, Punct, Spacing, TokenTree};
-use std::char;
-use std::str::{Bytes, CharIndices, Chars};
+use core::char;
+use core::str::{Bytes, CharIndices, Chars};
#[derive(Copy, Clone, Eq, PartialEq)]
pub(crate) struct Cursor<'a> {
@@ -13,7 +14,7 @@ pub(crate) struct Cursor<'a> {
}
impl<'a> Cursor<'a> {
- fn advance(&self, bytes: usize) -> Cursor<'a> {
+ pub fn advance(&self, bytes: usize) -> Cursor<'a> {
let (_front, rest) = self.rest.split_at(bytes);
Cursor {
rest,
@@ -22,7 +23,7 @@ impl<'a> Cursor<'a> {
}
}
- fn starts_with(&self, s: &str) -> bool {
+ pub fn starts_with(&self, s: &str) -> bool {
self.rest.starts_with(s)
}
@@ -115,9 +116,9 @@ fn block_comment(input: Cursor) -> PResult<&str> {
return Err(Reject);
}
- let mut depth = 0;
+ let mut depth = 0usize;
let bytes = input.as_bytes();
- let mut i = 0;
+ let mut i = 0usize;
let upper = bytes.len() - 1;
while i < upper {
@@ -150,14 +151,13 @@ fn word_break(input: Cursor) -> Result<Cursor, Reject> {
}
pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
- let mut trees = Vec::new();
+ let mut trees = TokenStreamBuilder::new();
let mut stack = Vec::new();
loop {
input = skip_whitespace(input);
- if let Ok((rest, tt)) = doc_comment(input) {
- trees.extend(tt);
+ if let Ok((rest, ())) = doc_comment(input, &mut trees) {
input = rest;
continue;
}
@@ -168,7 +168,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
let first = match input.bytes().next() {
Some(first) => first,
None => match stack.last() {
- None => return Ok(TokenStream::from(trees)),
+ None => return Ok(trees.build()),
#[cfg(span_locations)]
Some((lo, _frame)) => {
return Err(LexError {
@@ -191,7 +191,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
#[cfg(span_locations)]
let frame = (lo, frame);
stack.push(frame);
- trees = Vec::new();
+ trees = TokenStreamBuilder::new();
} else if let Some(close_delimiter) = match first {
b')' => Some(Delimiter::Parenthesis),
b']' => Some(Delimiter::Bracket),
@@ -209,7 +209,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
return Err(lex_error(input));
}
input = input.advance(1);
- let mut g = Group::new(open_delimiter, TokenStream::from(trees));
+ let mut g = Group::new(open_delimiter, trees.build());
g.set_span(Span {
#[cfg(span_locations)]
lo,
@@ -217,19 +217,19 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
hi: input.off,
});
trees = outer;
- trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
+ trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g)));
} else {
let (rest, mut tt) = match leaf_token(input) {
Ok((rest, tt)) => (rest, tt),
Err(Reject) => return Err(lex_error(input)),
};
- tt.set_span(crate::Span::_new_stable(Span {
+ tt.set_span(crate::Span::_new_fallback(Span {
#[cfg(span_locations)]
lo,
#[cfg(span_locations)]
hi: rest.off,
}));
- trees.push(tt);
+ trees.push_token_from_parser(tt);
input = rest;
}
}
@@ -251,7 +251,7 @@ fn lex_error(cursor: Cursor) -> LexError {
fn leaf_token(input: Cursor) -> PResult<TokenTree> {
if let Ok((input, l)) = literal(input) {
// must be parsed before ident
- Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
+ Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l))))
} else if let Ok((input, p)) = punct(input) {
Ok((input, TokenTree::Punct(p)))
} else if let Ok((input, i)) = ident(input) {
@@ -283,8 +283,9 @@ fn ident_any(input: Cursor) -> PResult<crate::Ident> {
return Ok((rest, ident));
}
- if sym == "_" {
- return Err(Reject);
+ match sym {
+ "_" | "super" | "self" | "Self" | "crate" => return Err(Reject),
+ _ => {}
}
let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
@@ -471,6 +472,10 @@ fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
_ => return Err(Reject),
}
}
+ if n > 255 {
+ // https://github.com/rust-lang/rust/pull/95251
+ return Err(Reject);
+ }
while let Some((i, ch)) = chars.next() {
match ch {
'"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
@@ -786,11 +791,11 @@ fn punct_char(input: Cursor) -> PResult<char> {
}
}
-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> {
#[cfg(span_locations)]
let lo = input.off;
let (rest, (comment, inner)) = doc_comment_contents(input)?;
- let span = crate::Span::_new_stable(Span {
+ let span = crate::Span::_new_fallback(Span {
#[cfg(span_locations)]
lo,
#[cfg(span_locations)]
@@ -806,25 +811,31 @@ fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
scan_for_bare_cr = rest;
}
- let mut trees = Vec::new();
- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+ let mut pound = Punct::new('#', Spacing::Alone);
+ pound.set_span(span);
+ trees.push_token_from_parser(TokenTree::Punct(pound));
+
if inner {
- trees.push(Punct::new('!', Spacing::Alone).into());
- }
- let mut stream = vec![
- TokenTree::Ident(crate::Ident::new("doc", span)),
- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
- TokenTree::Literal(crate::Literal::string(comment)),
- ];
- for tt in &mut stream {
- tt.set_span(span);
- }
- let group = Group::new(Delimiter::Bracket, TokenStream::from(stream));
- trees.push(crate::Group::_new_stable(group).into());
- for tt in &mut trees {
- tt.set_span(span);
- }
- Ok((rest, trees))
+ let mut bang = Punct::new('!', Spacing::Alone);
+ bang.set_span(span);
+ trees.push_token_from_parser(TokenTree::Punct(bang));
+ }
+
+ let doc_ident = crate::Ident::new("doc", span);
+ let mut equal = Punct::new('=', Spacing::Alone);
+ equal.set_span(span);
+ let mut literal = crate::Literal::string(comment);
+ literal.set_span(span);
+ let mut bracketed = TokenStreamBuilder::with_capacity(3);
+ bracketed.push_token_from_parser(TokenTree::Ident(doc_ident));
+ bracketed.push_token_from_parser(TokenTree::Punct(equal));
+ bracketed.push_token_from_parser(TokenTree::Literal(literal));
+ let group = Group::new(Delimiter::Bracket, bracketed.build());
+ let mut group = crate::Group::_new_fallback(group);
+ group.set_span(span);
+ trees.push_token_from_parser(TokenTree::Group(group));
+
+ Ok((rest, ()))
}
fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
diff --git a/src/rcvec.rs b/src/rcvec.rs
new file mode 100644
index 0000000..86ca7d8
--- /dev/null
+++ b/src/rcvec.rs
@@ -0,0 +1,142 @@
+use core::mem;
+use core::slice;
+use std::rc::Rc;
+use std::vec;
+
+pub(crate) struct RcVec<T> {
+ inner: Rc<Vec<T>>,
+}
+
+pub(crate) struct RcVecBuilder<T> {
+ inner: Vec<T>,
+}
+
+pub(crate) struct RcVecMut<'a, T> {
+ inner: &'a mut Vec<T>,
+}
+
+#[derive(Clone)]
+pub(crate) struct RcVecIntoIter<T> {
+ inner: vec::IntoIter<T>,
+}
+
+impl<T> RcVec<T> {
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.inner.len()
+ }
+
+ pub fn iter(&self) -> slice::Iter<T> {
+ self.inner.iter()
+ }
+
+ pub fn make_mut(&mut self) -> RcVecMut<T>
+ where
+ T: Clone,
+ {
+ RcVecMut {
+ inner: Rc::make_mut(&mut self.inner),
+ }
+ }
+
+ pub fn get_mut(&mut self) -> Option<RcVecMut<T>> {
+ let inner = Rc::get_mut(&mut self.inner)?;
+ Some(RcVecMut { inner })
+ }
+
+ pub fn make_owned(mut self) -> RcVecBuilder<T>
+ where
+ T: Clone,
+ {
+ let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) {
+ mem::replace(owned, Vec::new())
+ } else {
+ Vec::clone(&self.inner)
+ };
+ RcVecBuilder { inner: vec }
+ }
+}
+
+impl<T> RcVecBuilder<T> {
+ pub fn new() -> Self {
+ RcVecBuilder { inner: Vec::new() }
+ }
+
+ pub fn with_capacity(cap: usize) -> Self {
+ RcVecBuilder {
+ inner: Vec::with_capacity(cap),
+ }
+ }
+
+ pub fn push(&mut self, element: T) {
+ self.inner.push(element);
+ }
+
+ pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
+ self.inner.extend(iter);
+ }
+
+ pub fn as_mut(&mut self) -> RcVecMut<T> {
+ RcVecMut {
+ inner: &mut self.inner,
+ }
+ }
+
+ pub fn build(self) -> RcVec<T> {
+ RcVec {
+ inner: Rc::new(self.inner),
+ }
+ }
+}
+
+impl<'a, T> RcVecMut<'a, T> {
+ pub fn push(&mut self, element: T) {
+ self.inner.push(element);
+ }
+
+ pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
+ self.inner.extend(iter);
+ }
+
+ pub fn pop(&mut self) -> Option<T> {
+ self.inner.pop()
+ }
+
+ pub fn as_mut(&mut self) -> RcVecMut<T> {
+ RcVecMut { inner: self.inner }
+ }
+}
+
+impl<T> Clone for RcVec<T> {
+ fn clone(&self) -> Self {
+ RcVec {
+ inner: Rc::clone(&self.inner),
+ }
+ }
+}
+
+impl<T> IntoIterator for RcVecBuilder<T> {
+ type Item = T;
+ type IntoIter = RcVecIntoIter<T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ RcVecIntoIter {
+ inner: self.inner.into_iter(),
+ }
+ }
+}
+
+impl<T> Iterator for RcVecIntoIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
diff --git a/src/wrapper.rs b/src/wrapper.rs
index 2ba76cc..00f67cd 100644
--- a/src/wrapper.rs
+++ b/src/wrapper.rs
@@ -1,12 +1,14 @@
use crate::detection::inside_proc_macro;
+#[cfg(span_locations)]
+use crate::location::LineColumn;
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
-use std::fmt::{self, Debug, Display};
-use std::iter::FromIterator;
-use std::ops::RangeBounds;
+use core::fmt::{self, Debug, Display};
+use core::iter::FromIterator;
+use core::ops::RangeBounds;
+use core::str::FromStr;
use std::panic;
#[cfg(super_unstable)]
use std::path::PathBuf;
-use std::str::FromStr;
#[derive(Clone)]
pub(crate) enum TokenStream {
@@ -38,7 +40,7 @@ impl LexError {
}
fn mismatch() -> ! {
- panic!("stable/nightly mismatch")
+ panic!("compiler/fallback mismatch")
}
impl DeferredTokenStream {
@@ -129,13 +131,13 @@ impl Display for TokenStream {
}
impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> Self {
TokenStream::Compiler(DeferredTokenStream::new(inner))
}
}
impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> Self {
match inner {
TokenStream::Compiler(inner) => inner.into_token_stream(),
TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
@@ -144,7 +146,7 @@ impl From<TokenStream> for proc_macro::TokenStream {
}
impl From<fallback::TokenStream> for TokenStream {
- fn from(inner: fallback::TokenStream) -> TokenStream {
+ fn from(inner: fallback::TokenStream) -> Self {
TokenStream::Fallback(inner)
}
}
@@ -168,7 +170,7 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
}
impl From<TokenTree> for TokenStream {
- fn from(token: TokenTree) -> TokenStream {
+ fn from(token: TokenTree) -> Self {
if inside_proc_macro() {
TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
} else {
@@ -261,13 +263,13 @@ impl LexError {
}
impl From<proc_macro::LexError> for LexError {
- fn from(e: proc_macro::LexError) -> LexError {
+ fn from(e: proc_macro::LexError) -> Self {
LexError::Compiler(e)
}
}
impl From<fallback::LexError> for LexError {
- fn from(e: fallback::LexError) -> LexError {
+ fn from(e: fallback::LexError) -> Self {
LexError::Fallback(e)
}
}
@@ -350,12 +352,6 @@ impl Iterator for TokenTreeIter {
}
}
-impl Debug for TokenTreeIter {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("TokenTreeIter").finish()
- }
-}
-
#[derive(Clone, PartialEq, Eq)]
#[cfg(super_unstable)]
pub(crate) enum SourceFile {
@@ -395,12 +391,6 @@ impl Debug for SourceFile {
}
}
-#[cfg(any(super_unstable, feature = "span-locations"))]
-pub(crate) struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
#[derive(Copy, Clone)]
pub(crate) enum Span {
Compiler(proc_macro::Span),
@@ -477,7 +467,7 @@ impl Span {
}
}
- #[cfg(any(super_unstable, feature = "span-locations"))]
+ #[cfg(span_locations)]
pub fn start(&self) -> LineColumn {
match self {
#[cfg(proc_macro_span)]
@@ -487,14 +477,11 @@ impl Span {
}
#[cfg(not(proc_macro_span))]
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => {
- let fallback::LineColumn { line, column } = s.start();
- LineColumn { line, column }
- }
+ Span::Fallback(s) => s.start(),
}
}
- #[cfg(any(super_unstable, feature = "span-locations"))]
+ #[cfg(span_locations)]
pub fn end(&self) -> LineColumn {
match self {
#[cfg(proc_macro_span)]
@@ -504,10 +491,23 @@ impl Span {
}
#[cfg(not(proc_macro_span))]
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => {
- let fallback::LineColumn { line, column } = s.end();
- LineColumn { line, column }
- }
+ Span::Fallback(s) => s.end(),
+ }
+ }
+
+ #[cfg(super_unstable)]
+ pub fn before(&self) -> Span {
+ match self {
+ Span::Compiler(s) => Span::Compiler(s.before()),
+ Span::Fallback(s) => Span::Fallback(s.before()),
+ }
+ }
+
+ #[cfg(super_unstable)]
+ pub fn after(&self) -> Span {
+ match self {
+ Span::Compiler(s) => Span::Compiler(s.after()),
+ Span::Fallback(s) => Span::Fallback(s.after()),
}
}
@@ -530,6 +530,16 @@ impl Span {
}
}
+ pub fn source_text(&self) -> Option<String> {
+ match self {
+ #[cfg(not(no_source_text))]
+ Span::Compiler(s) => s.source_text(),
+ #[cfg(no_source_text)]
+ Span::Compiler(_) => None,
+ Span::Fallback(s) => s.source_text(),
+ }
+ }
+
fn unwrap_nightly(self) -> proc_macro::Span {
match self {
Span::Compiler(s) => s,
@@ -539,13 +549,13 @@ impl Span {
}
impl From<proc_macro::Span> for crate::Span {
- fn from(proc_span: proc_macro::Span) -> crate::Span {
+ fn from(proc_span: proc_macro::Span) -> Self {
crate::Span::_new(Span::Compiler(proc_span))
}
}
impl From<fallback::Span> for Span {
- fn from(inner: fallback::Span) -> Span {
+ fn from(inner: fallback::Span) -> Self {
Span::Fallback(inner)
}
}
@@ -694,16 +704,26 @@ impl Ident {
pub fn new_raw(string: &str, span: Span) -> Self {
match span {
+ #[cfg(not(no_ident_new_raw))]
+ Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)),
+ #[cfg(no_ident_new_raw)]
Span::Compiler(s) => {
- let p: proc_macro::TokenStream = string.parse().unwrap();
- let ident = match p.into_iter().next() {
- Some(proc_macro::TokenTree::Ident(mut i)) => {
- i.set_span(s);
- i
+ let _ = proc_macro::Ident::new(string, s);
+ // At this point the un-r#-prefixed string is known to be a
+ // valid identifier. Try to produce a valid raw identifier by
+ // running the `TokenStream` parser, and unwrapping the first
+ // token as an `Ident`.
+ let raw_prefixed = format!("r#{}", string);
+ if let Ok(ts) = raw_prefixed.parse::<proc_macro::TokenStream>() {
+ let mut iter = ts.into_iter();
+ if let (Some(proc_macro::TokenTree::Ident(mut id)), None) =
+ (iter.next(), iter.next())
+ {
+ id.set_span(s);
+ return Ident::Compiler(id);
}
- _ => panic!(),
- };
- Ident::Compiler(ident)
+ }
+ panic!("not allowed as a raw identifier: `{}`", raw_prefixed)
}
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)),
}
@@ -919,7 +939,7 @@ impl Literal {
}
impl From<fallback::Literal> for Literal {
- fn from(s: fallback::Literal) -> Literal {
+ fn from(s: fallback::Literal) -> Self {
Literal::Fallback(s)
}
}
diff --git a/tests/comments.rs b/tests/comments.rs
index 7174108..4f7236d 100644
--- a/tests/comments.rs
+++ b/tests/comments.rs
@@ -1,3 +1,5 @@
+#![allow(clippy::assertions_on_result_states)]
+
use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
// #[doc = "..."] -> "..."
diff --git a/tests/marker.rs b/tests/marker.rs
index 4fb2beb..5b45733 100644
--- a/tests/marker.rs
+++ b/tests/marker.rs
@@ -1,3 +1,5 @@
+#![allow(clippy::extra_unused_type_parameters)]
+
use proc_macro2::{
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
};
diff --git a/tests/test.rs b/tests/test.rs
index ab82390..e0af151 100644
--- a/tests/test.rs
+++ b/tests/test.rs
@@ -1,6 +1,11 @@
-#![allow(clippy::non_ascii_literal)]
+#![allow(
+ clippy::assertions_on_result_states,
+ clippy::items_after_statements,
+ clippy::non_ascii_literal
+)]
use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::iter;
use std::panic;
use std::str::{self, FromStr};
@@ -15,14 +20,24 @@ fn idents() {
}
#[test]
-#[cfg(procmacro2_semver_exempt)]
fn raw_idents() {
assert_eq!(
Ident::new_raw("String", Span::call_site()).to_string(),
"r#String"
);
assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
- assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
+}
+
+#[test]
+#[should_panic(expected = "`r#_` cannot be a raw identifier")]
+fn ident_raw_underscore() {
+ Ident::new_raw("_", Span::call_site());
+}
+
+#[test]
+#[should_panic(expected = "`r#super` cannot be a raw identifier")]
+fn ident_raw_reserved() {
+ Ident::new_raw("super", Span::call_site());
}
#[test]
@@ -104,6 +119,34 @@ fn literal_string() {
#[test]
fn literal_raw_string() {
"r\"\r\n\"".parse::<TokenStream>().unwrap();
+
+ fn raw_string_literal_with_hashes(n: usize) -> String {
+ let mut literal = String::new();
+ literal.push('r');
+ literal.extend(iter::repeat('#').take(n));
+ literal.push('"');
+ literal.push('"');
+ literal.extend(iter::repeat('#').take(n));
+ literal
+ }
+
+ raw_string_literal_with_hashes(255)
+ .parse::<TokenStream>()
+ .unwrap();
+
+ // https://github.com/rust-lang/rust/pull/95251
+ raw_string_literal_with_hashes(256)
+ .parse::<TokenStream>()
+ .unwrap_err();
+}
+
+#[test]
+fn literal_byte_string() {
+ assert_eq!(Literal::byte_string(b"").to_string(), "b\"\"");
+ assert_eq!(
+ Literal::byte_string(b"\0\t\n\r\"\\2\x10").to_string(),
+ "b\"\\0\\t\\n\\r\\\"\\\\2\\x10\"",
+ );
}
#[test]
@@ -114,8 +157,43 @@ fn literal_character() {
}
#[test]
+fn literal_integer() {
+ assert_eq!(Literal::u8_suffixed(10).to_string(), "10u8");
+ assert_eq!(Literal::u16_suffixed(10).to_string(), "10u16");
+ assert_eq!(Literal::u32_suffixed(10).to_string(), "10u32");
+ assert_eq!(Literal::u64_suffixed(10).to_string(), "10u64");
+ assert_eq!(Literal::u128_suffixed(10).to_string(), "10u128");
+ assert_eq!(Literal::usize_suffixed(10).to_string(), "10usize");
+
+ assert_eq!(Literal::i8_suffixed(10).to_string(), "10i8");
+ assert_eq!(Literal::i16_suffixed(10).to_string(), "10i16");
+ assert_eq!(Literal::i32_suffixed(10).to_string(), "10i32");
+ assert_eq!(Literal::i64_suffixed(10).to_string(), "10i64");
+ assert_eq!(Literal::i128_suffixed(10).to_string(), "10i128");
+ assert_eq!(Literal::isize_suffixed(10).to_string(), "10isize");
+
+ assert_eq!(Literal::u8_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::u16_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::u32_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::u64_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::u128_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::usize_unsuffixed(10).to_string(), "10");
+
+ assert_eq!(Literal::i8_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::i16_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::i32_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::i64_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::i128_unsuffixed(10).to_string(), "10");
+ assert_eq!(Literal::isize_unsuffixed(10).to_string(), "10");
+}
+
+#[test]
fn literal_float() {
+ assert_eq!(Literal::f32_suffixed(10.0).to_string(), "10f32");
+ assert_eq!(Literal::f64_suffixed(10.0).to_string(), "10f64");
+
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
+ assert_eq!(Literal::f64_unsuffixed(10.0).to_string(), "10.0");
}
#[test]
@@ -494,6 +572,13 @@ fn default_tokenstream_is_empty() {
}
#[test]
+fn tokenstream_size_hint() {
+ let tokens = "a b (c d) e".parse::<TokenStream>().unwrap();
+
+ assert_eq!(tokens.into_iter().size_hint(), (4, Some(4)));
+}
+
+#[test]
fn tuple_indexing() {
// This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
@@ -569,3 +654,16 @@ fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usi
}
}
}
+
+#[test]
+fn byte_order_mark() {
+ let string = "\u{feff}foo";
+ let tokens = string.parse::<TokenStream>().unwrap();
+ match tokens.into_iter().next().unwrap() {
+ TokenTree::Ident(ident) => assert_eq!(ident, "foo"),
+ _ => unreachable!(),
+ }
+
+ let string = "foo\u{feff}";
+ string.parse::<TokenStream>().unwrap_err();
+}
diff --git a/tests/test_size.rs b/tests/test_size.rs
new file mode 100644
index 0000000..46e58db
--- /dev/null
+++ b/tests/test_size.rs
@@ -0,0 +1,42 @@
+extern crate proc_macro;
+
+use std::mem;
+
+#[rustversion::attr(before(1.32), ignore)]
+#[test]
+fn test_proc_macro_span_size() {
+ assert_eq!(mem::size_of::<proc_macro::Span>(), 4);
+ assert_eq!(mem::size_of::<Option<proc_macro::Span>>(), 4);
+}
+
+#[cfg_attr(not(all(not(wrap_proc_macro), not(span_locations))), ignore)]
+#[test]
+fn test_proc_macro2_fallback_span_size_without_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 0);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 1);
+}
+
+#[cfg_attr(not(all(not(wrap_proc_macro), span_locations)), ignore)]
+#[test]
+fn test_proc_macro2_fallback_span_size_with_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 8);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
+}
+
+#[rustversion::attr(before(1.32), ignore)]
+#[rustversion::attr(
+ since(1.32),
+ cfg_attr(not(all(wrap_proc_macro, not(span_locations))), ignore)
+)]
+#[test]
+fn test_proc_macro2_wrapper_span_size_without_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 4);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 8);
+}
+
+#[cfg_attr(not(all(wrap_proc_macro, span_locations)), ignore)]
+#[test]
+fn test_proc_macro2_wrapper_span_size_with_locations() {
+ assert_eq!(mem::size_of::<proc_macro2::Span>(), 12);
+ assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
+}