binnpatch/macro/src/lib.rs

247 lines
8.5 KiB
Rust

extern crate proc_macro;
use proc_macro::{TokenStream, TokenTree};
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span};
macro_rules! forest {
($($x:expr),+) => {[$( TokenTree::from($x.clone()), )+]};
}
fn parse_hex_values(items: TokenStream) -> TokenStream {
let mut new_tokens: Vec<TokenTree> = vec![];
let comma: Punct = Punct::new(',', Spacing::Alone);
let zero = Literal::u8_suffixed(0);
let mut prev_separated: bool = true;
let mut prev_is_mask: bool = false;
for token in items {
match token {
TokenTree::Punct(ref x) if x.as_char() == ',' => {
new_tokens.push(token);
prev_separated = true;
continue;
}
TokenTree::Punct(ref x) if x.as_char() == '?' => {
if prev_is_mask {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let mut val = zero.clone();
val.set_span(x.span());
new_tokens.push(zero.clone().into());
prev_is_mask = false;
prev_separated = false;
} else if !prev_is_mask {
prev_is_mask = true;
}
continue;
}
TokenTree::Ident(ref x) if x.to_string().len() == 2 => {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let lit_str = format!("0x{}u8", x);
let mut val: Literal = lit_str.as_str().parse().expect("parse literal from ident");
val.set_span(x.span());
new_tokens.push(val.into());
}
TokenTree::Literal(ref x) => {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let in_str = x.to_string();
if in_str.starts_with("0x") {
new_tokens.push(token);
} else {
let lit_str = format!("0x{}u8", in_str);
let mut val: Literal = lit_str.as_str().parse().expect("parse literal");
val.set_span(x.span());
new_tokens.push(val.into());
}
}
_ => {}
}
prev_separated = false;
}
let mut new_stream = TokenStream::new();
new_stream.extend(new_tokens);
new_stream
}
fn parse_hex_pattern(items: TokenStream) -> (TokenStream, TokenStream) {
let mut vec_tokens: Vec<TokenTree> = vec![];
let mut mask_tokens: Vec<TokenTree> = vec![];
let comma: Punct = Punct::new(',', Spacing::Alone);
let zero = Literal::u8_suffixed(0);
let i_true = Ident::new("true", Span::call_site());
let i_false = Ident::new("false", Span::call_site());
let mut prev_separated: bool = true;
let mut prev_is_mask: bool = false;
for token in items {
match token {
TokenTree::Punct(ref x) if x.as_char() == ',' => {
vec_tokens.push(token.clone());
mask_tokens.push(token);
prev_separated = true;
continue;
}
TokenTree::Punct(ref x) if x.as_char() == '?' => {
if prev_is_mask {
if !prev_separated {
vec_tokens.push(comma.clone().into());
mask_tokens.push(comma.clone().into());
}
let mut val = zero.clone();
val.set_span(x.span());
vec_tokens.push(zero.clone().into());
let mut mask_mark = i_true.clone();
mask_mark.set_span(x.span());
mask_tokens.push(mask_mark.into());
prev_is_mask = false;
prev_separated = false;
} else if !prev_is_mask {
prev_is_mask = true;
}
continue;
}
TokenTree::Ident(ref x) if x.to_string().len() == 2 => {
if !prev_separated {
vec_tokens.push(comma.clone().into());
mask_tokens.push(comma.clone().into());
}
let lit_str = format!("0x{}u8", x);
let mut val: Literal = lit_str.as_str().parse().expect("parse literal from ident");
val.set_span(x.span());
vec_tokens.push(val.into());
let mut mask_mark = i_false.clone();
mask_mark.set_span(x.span());
mask_tokens.push(mask_mark.into());
}
TokenTree::Literal(ref x) => {
if !prev_separated {
vec_tokens.push(comma.clone().into());
mask_tokens.push(comma.clone().into());
}
let mut mask_mark = i_false.clone();
mask_mark.set_span(x.span());
let in_str = x.to_string();
if in_str.starts_with("0x") {
vec_tokens.push(token);
} else {
let lit_str = format!("0x{}u8", in_str);
let mut val: Literal = lit_str.as_str().parse().expect("parse literal");
val.set_span(x.span());
vec_tokens.push(val.into());
}
mask_tokens.push(mask_mark.into());
}
_ => {}
}
prev_separated = false;
}
let mut vec_stream = TokenStream::new();
let mut mask_stream = TokenStream::new();
vec_stream.extend(vec_tokens);
mask_stream.extend(mask_tokens);
(vec_stream, mask_stream)
}
#[proc_macro]
pub fn binnvec(_item: TokenStream) -> TokenStream {
let group_elements = parse_hex_values(_item);
let group = Group::new(Delimiter::Bracket, group_elements);
let mut result = TokenStream::new();
let vec_ident = Ident::new("vec", Span::call_site());
let macro_marc = Punct::new('!', Spacing::Joint);
result.extend(forest!(vec_ident, macro_marc, group));
result
}
#[proc_macro]
pub fn binnpat(_item: TokenStream) -> TokenStream {
let (vec_elements, mask_elements) = parse_hex_pattern(_item);
let comma: Punct = Punct::new(',', Spacing::Alone);
let vec_group = Group::new(Delimiter::Bracket, vec_elements);
let mask_group = Group::new(Delimiter::Bracket, mask_elements);
let vec_ident = Ident::new("vec", Span::call_site());
let macro_marc = Punct::new('!', Spacing::Joint);
let mut tupple_elements = TokenStream::new();
tupple_elements.extend(forest!(
vec_ident, macro_marc, vec_group, comma, vec_ident, macro_marc, mask_group
));
let tupple_group = Group::new(Delimiter::Parenthesis, tupple_elements);
let mut param_elements = TokenStream::new();
param_elements.extend(forest!(tupple_group));
let param_group = Group::new(Delimiter::Parenthesis, param_elements);
let sep_colon = Punct::new(':', Spacing::Joint);
let i_struct = Ident::new("BytesPattern", Span::call_site());
let i_from = Ident::new("from", Span::call_site());
let mut result = TokenStream::new();
result.extend(forest!(
i_struct,
sep_colon,
sep_colon,
i_from,
param_group
));
result
}
fn unquote_str(items: TokenStream) -> TokenStream {
let first = items.clone().into_iter().next();
match first {
Some(TokenTree::Literal(lit)) => {
let mut lit_str = lit.to_string();
if !(lit_str.starts_with('\"') && lit_str.ends_with('\"')) {
panic!("unexpected literal: \"{}\"", lit_str);
}
lit_str = lit_str.trim_matches('"').to_string();
let lit_str_copy = lit_str.clone();
(0u8..=9)
.map(|i| i.to_string())
.filter(|x| lit_str_copy.contains(format!("{}E", x).as_str()))
.for_each(|x| {
lit_str =
lit_str.replace(format!("{}E", x).as_str(), format!("0x{}E", x).as_str());
});
let stream: TokenStream = lit_str.parse().unwrap();
stream
}
_ => {
panic!("invalid item: \"{}\"", items);
}
}
}
#[proc_macro]
pub fn binnvecs(_item: TokenStream) -> TokenStream {
let stream = unquote_str(_item);
binnvec(stream)
}
#[proc_macro]
pub fn binnpats(_item: TokenStream) -> TokenStream {
let stream = unquote_str(_item);
binnpat(stream)
}