diff options
| author | Miguel Ojeda <ojeda@kernel.org> | 2025-11-24 16:18:27 +0100 |
|---|---|---|
| committer | Miguel Ojeda <ojeda@kernel.org> | 2025-11-24 17:15:44 +0100 |
| commit | 808c999fc9e7c366fd47da564e69d579c1dc8279 (patch) | |
| tree | d81985de64150acef12e038e98ef950e1b41b2d6 /rust/syn/tt.rs | |
| parent | 88de91cc1ce7b3069ccabc1a5fbe16d41c663093 (diff) | |
rust: syn: import crate
This is a subset of the Rust `syn` crate, version 2.0.106 (released
2025-08-16), licensed under "Apache-2.0 OR MIT", from:
https://github.com/dtolnay/syn/raw/2.0.106/src
The files are copied as-is, with no modifications whatsoever (not even
adding the SPDX identifiers).
For copyright details, please see:
https://github.com/dtolnay/syn/blob/2.0.106/README.md#license
https://github.com/dtolnay/syn/blob/2.0.106/LICENSE-APACHE
https://github.com/dtolnay/syn/blob/2.0.106/LICENSE-MIT
The next two patches modify these files as needed for use within the
kernel. This patch split allows reviewers to double-check the import
and to clearly see the differences introduced.
The following script may be used to verify the contents:
for path in $(cd rust/syn/ && find . -type f -name '*.rs'); do
curl --silent --show-error --location \
https://github.com/dtolnay/syn/raw/2.0.106/src/$path \
| diff --unified rust/syn/$path - && echo $path: OK
done
Reviewed-by: Gary Guo <gary@garyguo.net>
Tested-by: Gary Guo <gary@garyguo.net>
Tested-by: Jesung Yang <y.j3ms.n@gmail.com>
Link: https://patch.msgid.link/20251124151837.2184382-16-ojeda@kernel.org
Signed-off-by: Miguel Ojeda <ojeda@kernel.org>
Diffstat (limited to 'rust/syn/tt.rs')
| -rw-r--r-- | rust/syn/tt.rs | 107 |
1 files changed, 107 insertions, 0 deletions
diff --git a/rust/syn/tt.rs b/rust/syn/tt.rs new file mode 100644 index 000000000000..7d5d6a1ac326 --- /dev/null +++ b/rust/syn/tt.rs @@ -0,0 +1,107 @@ +use proc_macro2::{Delimiter, TokenStream, TokenTree}; +use std::hash::{Hash, Hasher}; + +pub(crate) struct TokenTreeHelper<'a>(pub &'a TokenTree); + +impl<'a> PartialEq for TokenTreeHelper<'a> { + fn eq(&self, other: &Self) -> bool { + use proc_macro2::Spacing; + + match (self.0, other.0) { + (TokenTree::Group(g1), TokenTree::Group(g2)) => { + match (g1.delimiter(), g2.delimiter()) { + (Delimiter::Parenthesis, Delimiter::Parenthesis) + | (Delimiter::Brace, Delimiter::Brace) + | (Delimiter::Bracket, Delimiter::Bracket) + | (Delimiter::None, Delimiter::None) => {} + _ => return false, + } + + let s1 = g1.stream().into_iter(); + let mut s2 = g2.stream().into_iter(); + + for item1 in s1 { + let item2 = match s2.next() { + Some(item) => item, + None => return false, + }; + if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { + return false; + } + } + s2.next().is_none() + } + (TokenTree::Punct(o1), TokenTree::Punct(o2)) => { + o1.as_char() == o2.as_char() + && match (o1.spacing(), o2.spacing()) { + (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, + _ => false, + } + } + (TokenTree::Literal(l1), TokenTree::Literal(l2)) => l1.to_string() == l2.to_string(), + (TokenTree::Ident(s1), TokenTree::Ident(s2)) => s1 == s2, + _ => false, + } + } +} + +impl<'a> Hash for TokenTreeHelper<'a> { + fn hash<H: Hasher>(&self, h: &mut H) { + use proc_macro2::Spacing; + + match self.0 { + TokenTree::Group(g) => { + 0u8.hash(h); + match g.delimiter() { + Delimiter::Parenthesis => 0u8.hash(h), + Delimiter::Brace => 1u8.hash(h), + Delimiter::Bracket => 2u8.hash(h), + Delimiter::None => 3u8.hash(h), + } + + for item in g.stream() { + TokenTreeHelper(&item).hash(h); + } + 0xFFu8.hash(h); // terminator w/ a variant we don't normally hash + } + TokenTree::Punct(op) => { + 1u8.hash(h); + op.as_char().hash(h); + match op.spacing() { + Spacing::Alone => 0u8.hash(h), + Spacing::Joint => 1u8.hash(h), + } + } + TokenTree::Literal(lit) => (2u8, lit.to_string()).hash(h), + TokenTree::Ident(word) => (3u8, word).hash(h), + } + } +} + +pub(crate) struct TokenStreamHelper<'a>(pub &'a TokenStream); + +impl<'a> PartialEq for TokenStreamHelper<'a> { + fn eq(&self, other: &Self) -> bool { + let left = self.0.clone().into_iter().collect::<Vec<_>>(); + let right = other.0.clone().into_iter().collect::<Vec<_>>(); + if left.len() != right.len() { + return false; + } + for (a, b) in left.into_iter().zip(right) { + if TokenTreeHelper(&a) != TokenTreeHelper(&b) { + return false; + } + } + true + } +} + +impl<'a> Hash for TokenStreamHelper<'a> { + fn hash<H: Hasher>(&self, state: &mut H) { + let tts = self.0.clone().into_iter().collect::<Vec<_>>(); + tts.len().hash(state); + for tt in tts { + TokenTreeHelper(&tt).hash(state); + } + } +} |