macro: fix 0E decode

binnvecs! for str arg
This commit is contained in:
Dmitry Belyaev 2022-08-27 22:28:51 +03:00
parent 4c8ce5c648
commit 46526d7c27
Signed by: b4tman
GPG Key ID: 41A00BF15EA7E5F3
2 changed files with 117 additions and 97 deletions

View File

@ -2,7 +2,6 @@ extern crate binnpatch;
extern crate data_encoding;
extern crate memmap;
extern crate binnpatch_macro;
use data_encoding::HEXUPPER;
@ -11,15 +10,15 @@ use std::fs;
use std::fs::{File, OpenOptions};
use std::ops::{Deref, DerefMut};
use binnpatch::{BytesPattern, FindPattern, ApplyPatch};
use binnpatch_macro::binnvec;
use binnpatch::{ApplyPatch, BytesPattern, FindPattern};
use binnpatch_macro::binnvecs;
fn main() {
let src_file = "Some.dll";
let dst_file = "Some.dll.patched";
let src_pattern =
"40 3E 1D ?? ?? 12 1C 7C 48 ?? 73 6F 02 22 ?? 61 19 4E 13 60 48 45 19 27 5B";
let replacement = binnvec!(06 5A 18 74 2D 62 12 6A 13 4A 2B 0E 6F 0F 36 7A 28 0A 37 67 0A 4B 01 73 0x14);
let src_pattern = "40 3E 1D ?? ?? 12 1C 7C 48 ?? 73 6F 02 22 ?? 61 19 4E 13 60 48 45 19 27 5B";
let replacement =
binnvecs!("06 5A 18 74 2D 62 12 6A 13 4A 2B 0E 6F 0F 36 7A 28 0A 37 67 0A 4B 01 73 14");
let pattern = BytesPattern::from(src_pattern);

View File

@ -1,91 +1,112 @@
extern crate proc_macro;
use proc_macro::{TokenStream, TokenTree};
use proc_macro::{Punct, Literal, Spacing, Span, Ident, Group, Delimiter};
fn parse_str(s: &str) -> TokenStream {
s.parse().unwrap()
}
#[proc_macro]
pub fn binnvec(_item: TokenStream) -> TokenStream {
println!("- - - - -");
println!("{}", _item.to_string());
println!("- - - - -");
println!("{:?}", _item);
println!("- - - - -");
let mut new_tokens: Vec<TokenTree> = vec![];
let comma: Punct = Punct::new(',', Spacing::Alone);
let zero = Literal::u8_suffixed(0);
let mut prev_separated: bool = true;
let mut prev_is_mask: bool = false;
for token in _item {
match token {
TokenTree::Punct(ref x) if x.as_char() == ',' => {
new_tokens.push(token.into());
prev_separated = true;
continue;
}
TokenTree::Punct(ref x) if x.as_char() == '?' => {
if prev_is_mask {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let mut val = zero.clone();
val.set_span(x.span());
new_tokens.push(zero.clone().into());
prev_is_mask = false;
prev_separated = false;
} else if !prev_is_mask {
prev_is_mask = true;
}
continue;
}
TokenTree::Ident(ref x) if x.to_string().len() == 2 => {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let lit_str = format!("0x{}u8", x.to_string());
let mut val:Literal = lit_str.as_str().parse().expect("parse literal from ident");
val.set_span(x.span());
new_tokens.push(val.into());
}
TokenTree::Literal(ref x) => {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let in_str = x.to_string();
if in_str.starts_with("0x") {
new_tokens.push(token.into());
} else {
let lit_str = format!("0x{}u8", in_str);
let mut val:Literal = lit_str.as_str().parse().expect("parse literal");
val.set_span(x.span());
new_tokens.push(val.into());
}
}
_ => {
}
}
prev_separated = false;
}
let mut ts_new = TokenStream::new();
ts_new.extend(new_tokens);
let group = Group::new(Delimiter::Bracket, ts_new);
let mut result = TokenStream::new();
let vec_ident = Ident::new("vec", Span::call_site());
let macro_marc = Punct::new('!', Spacing::Joint);
result.extend([TokenTree::from(vec_ident), TokenTree::from(macro_marc)]);
result.extend([TokenTree::from(group)]);
println!("result: \"{}\"", result.to_string());
result
}
extern crate proc_macro;
use proc_macro::{TokenStream, TokenTree};
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span};
macro_rules! forest {
($($x:expr),+) => {[$( TokenTree::from($x), )+]};
}
fn parse_hex_values(items: TokenStream) -> TokenStream {
let mut new_tokens: Vec<TokenTree> = vec![];
let comma: Punct = Punct::new(',', Spacing::Alone);
let zero = Literal::u8_suffixed(0);
let mut prev_separated: bool = true;
let mut prev_is_mask: bool = false;
for token in items {
match token {
TokenTree::Punct(ref x) if x.as_char() == ',' => {
new_tokens.push(token);
prev_separated = true;
continue;
}
TokenTree::Punct(ref x) if x.as_char() == '?' => {
if prev_is_mask {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let mut val = zero.clone();
val.set_span(x.span());
new_tokens.push(zero.clone().into());
prev_is_mask = false;
prev_separated = false;
} else if !prev_is_mask {
prev_is_mask = true;
}
continue;
}
TokenTree::Ident(ref x) if x.to_string().len() == 2 => {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let lit_str = format!("0x{}u8", x);
let mut val: Literal = lit_str.as_str().parse().expect("parse literal from ident");
val.set_span(x.span());
new_tokens.push(val.into());
}
TokenTree::Literal(ref x) => {
if !prev_separated {
new_tokens.push(comma.clone().into());
}
let in_str = x.to_string();
if in_str.starts_with("0x") {
new_tokens.push(token);
} else {
let lit_str = format!("0x{}u8", in_str);
let mut val: Literal = lit_str.as_str().parse().expect("parse literal");
val.set_span(x.span());
new_tokens.push(val.into());
}
}
_ => {}
}
prev_separated = false;
}
let mut new_stream = TokenStream::new();
new_stream.extend(new_tokens);
new_stream
}
#[proc_macro]
pub fn binnvec(_item: TokenStream) -> TokenStream {
let group_elements = parse_hex_values(_item);
let group = Group::new(Delimiter::Bracket, group_elements);
let mut result = TokenStream::new();
let vec_ident = Ident::new("vec", Span::call_site());
let macro_marc = Punct::new('!', Spacing::Joint);
result.extend(forest!(vec_ident, macro_marc, group));
result
}
#[proc_macro]
pub fn binnvecs(_item: TokenStream) -> TokenStream {
let first = _item.clone().into_iter().next();
match first {
Some(TokenTree::Literal(lit)) => {
let mut lit_str = lit.to_string();
if !(lit_str.starts_with('\"') && lit_str.ends_with('\"')) {
panic!("unexpected literal: \"{}\"", lit_str);
}
lit_str = lit_str.trim_matches('"').to_string();
let lit_str_copy = lit_str.clone();
(0u8..=9)
.map(|i| i.to_string())
.filter(|x| lit_str_copy.contains(format!("{}E", x).as_str()))
.for_each(|x| {
lit_str =
lit_str.replace(format!("{}E", x).as_str(), format!("0x{}E", x).as_str());
});
let stream: TokenStream = lit_str.parse().unwrap();
binnvec(stream)
}
_ => {
panic!("invalid item: \"{}\"", _item);
}
}
}