summaryrefslogtreecommitdiff
path: root/patches/syn-2.patch
blob: f956ee365277f1e4eecfed828e035690e7326cd1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
diff --git a/src/generator.rs b/src/generator.rs
index 87d1f00..0dbcaa3 100644
--- a/src/generator.rs
+++ b/src/generator.rs
@@ -22,7 +22,7 @@ use crate::docs::DocComment;
 pub(crate) fn generate(
     name: Ident,
     generics: &Generics,
-    path: Option<PathBuf>,
+    paths: Vec<PathBuf>,
     rules: Vec<OptimizedRule>,
     defaults: Vec<&str>,
     doc_comment: &DocComment,
@@ -32,10 +32,7 @@ pub(crate) fn generate(
 
     let builtins = generate_builtin_rules();
     let include_fix = if include_grammar {
-        match path {
-            Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")),
-            None => quote!(),
-        }
+        generate_include(&name, paths)
     } else {
         quote!()
     };
@@ -170,17 +167,33 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
     builtins
 }
 
-// Needed because Cargo doesn't watch for changes in grammars.
-fn generate_include(name: &Ident, path: &str) -> TokenStream {
+/// Generate Rust `include_str!` for grammar files, then Cargo will watch changes in grammars.
+fn generate_include(name: &Ident, paths: Vec<PathBuf>) -> TokenStream {
     let const_name = format_ident!("_PEST_GRAMMAR_{}", name);
     // Need to make this relative to the current directory since the path to the file
     // is derived from the CARGO_MANIFEST_DIR environment variable
-    let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
-    current_dir.push(path);
-    let relative_path = current_dir.to_str().expect("path contains invalid unicode");
+    let current_dir = std::env::current_dir().expect("Unable to get current directory");
+
+    let include_tokens = paths.iter().map(|path| {
+        let path = path.to_str().expect("non-Unicode path");
+
+        let relative_path = current_dir
+            .join(path)
+            .to_str()
+            .expect("path contains invalid unicode")
+            .to_string();
+
+        quote! {
+            include_str!(#relative_path)
+        }
+    });
+
+    let len = include_tokens.len();
     quote! {
         #[allow(non_upper_case_globals)]
-        const #const_name: &'static str = include_str!(#relative_path);
+        const #const_name: [&'static str; #len] = [
+            #(#include_tokens),*
+        ];
     }
 }
 
@@ -1016,14 +1029,16 @@ mod tests {
         let defaults = vec!["ANY"];
         let result = result_type();
         let box_ty = box_type();
-        let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
-        current_dir.push("test.pest");
-        let test_path = current_dir.to_str().expect("path contains invalid unicode");
+        let current_dir = std::env::current_dir().expect("Unable to get current directory");
+
+        let base_path = current_dir.join("base.pest").to_str().unwrap().to_string();
+        let test_path = current_dir.join("test.pest").to_str().unwrap().to_string();
+
         assert_eq!(
-            generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, doc_comment, true).to_string(),
+            generate(name, &generics, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
             quote! {
                 #[allow(non_upper_case_globals)]
-                const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path);
+                const _PEST_GRAMMAR_MyParser: [&'static str; 2usize] = [include_str!(#base_path), include_str!(#test_path)];
 
                 #[doc = "This is Rule doc\nThis is second line"]
                 #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
diff --git a/src/lib.rs b/src/lib.rs
index f808987..7aed193 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -27,7 +27,7 @@ use std::io::{self, Read};
 use std::path::Path;
 
 use proc_macro2::TokenStream;
-use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};
+use syn::{Attribute, DeriveInput, Expr, ExprLit, Generics, Ident, Lit, Meta};
 
 #[macro_use]
 mod macros;
@@ -45,7 +45,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
     let (name, generics, contents) = parse_derive(ast);
 
     let mut data = String::new();
-    let mut path = None;
+    let mut paths = vec![];
 
     for content in contents {
         let (_data, _path) = match content {
@@ -81,8 +81,9 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
         };
 
         data.push_str(&_data);
-        if _path.is_some() {
-            path = _path;
+        match _path {
+            Some(path) => paths.push(path),
+            None => (),
         }
     }
 
@@ -99,7 +100,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
     generator::generate(
         name,
         &generics,
-        path,
+        paths,
         optimized,
         defaults,
         &doc_comment,
@@ -127,11 +128,9 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
     let grammar: Vec<&Attribute> = ast
         .attrs
         .iter()
-        .filter(|attr| match attr.parse_meta() {
-            Ok(Meta::NameValue(name_value)) => {
-                name_value.path.is_ident("grammar") || name_value.path.is_ident("grammar_inline")
-            }
-            _ => false,
+        .filter(|attr| {
+            let path = attr.meta.path();
+            path.is_ident("grammar") || path.is_ident("grammar_inline")
         })
         .collect();
 
@@ -148,9 +147,12 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
 }
 
 fn get_attribute(attr: &Attribute) -> GrammarSource {
-    match attr.parse_meta() {
-        Ok(Meta::NameValue(name_value)) => match name_value.lit {
-            Lit::Str(string) => {
+    match &attr.meta {
+        Meta::NameValue(name_value) => match &name_value.value {
+            Expr::Lit(ExprLit {
+                lit: Lit::Str(string),
+                ..
+            }) => {
                 if name_value.path.is_ident("grammar") {
                     GrammarSource::File(string.value())
                 } else {