add binnpat, binnpats
for parse pattern
This commit is contained in:
parent
22bdffa92f
commit
2d5b27d423
@ -9,18 +9,17 @@ use std::fs;
|
||||
use std::fs::{File, OpenOptions};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use binnpatch::{ApplyPatch, BytesPattern, FindPattern};
|
||||
use binnpatch_macro::binnvecs;
|
||||
use binnpatch::{ApplyPatch, FindPattern};
|
||||
use binnpatch_macro::{binnpats, binnvecs};
|
||||
|
||||
fn main() {
|
||||
let src_file = "Some.dll";
|
||||
let dst_file = "Some.dll.patched";
|
||||
let src_pattern = "40 3E 1D ?? ?? 12 1C 7C 48 ?? 73 6F 02 22 ?? 61 19 4E 13 60 48 45 19 27 5B";
|
||||
let pattern =
|
||||
binnpats!("40 3E 1D ?? ?? 12 1C 7C 48 ?? 73 6F 02 22 ?? 61 19 4E 13 60 48 45 19 27 5B");
|
||||
let replacement =
|
||||
binnvecs!("06 5A 18 74 2D 62 12 6A 13 4A 2B 0E 6F 0F 36 7A 28 0A 37 67 0A 4B 01 73 14");
|
||||
|
||||
let pattern = src_pattern.parse().unwrap();
|
||||
|
||||
let file = File::open(src_file).expect("src open");
|
||||
|
||||
let src_map = unsafe { Mmap::map(&file) }.expect("src map");
|
||||
|
148
macro/src/lib.rs
148
macro/src/lib.rs
@ -4,7 +4,7 @@ use proc_macro::{TokenStream, TokenTree};
|
||||
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span};
|
||||
|
||||
macro_rules! forest {
|
||||
($($x:expr),+) => {[$( TokenTree::from($x), )+]};
|
||||
($($x:expr),+) => {[$( TokenTree::from($x.clone()), )+]};
|
||||
}
|
||||
|
||||
fn parse_hex_values(items: TokenStream) -> TokenStream {
|
||||
@ -69,6 +69,92 @@ fn parse_hex_values(items: TokenStream) -> TokenStream {
|
||||
new_stream
|
||||
}
|
||||
|
||||
fn parse_hex_pattern(items: TokenStream) -> (TokenStream, TokenStream) {
|
||||
let mut vec_tokens: Vec<TokenTree> = vec![];
|
||||
let mut mask_tokens: Vec<TokenTree> = vec![];
|
||||
let comma: Punct = Punct::new(',', Spacing::Alone);
|
||||
let zero = Literal::u8_suffixed(0);
|
||||
let i_true = Ident::new("true", Span::call_site());
|
||||
let i_false = Ident::new("false", Span::call_site());
|
||||
let mut prev_separated: bool = true;
|
||||
let mut prev_is_mask: bool = false;
|
||||
|
||||
for token in items {
|
||||
match token {
|
||||
TokenTree::Punct(ref x) if x.as_char() == ',' => {
|
||||
vec_tokens.push(token.clone());
|
||||
mask_tokens.push(token);
|
||||
prev_separated = true;
|
||||
continue;
|
||||
}
|
||||
TokenTree::Punct(ref x) if x.as_char() == '?' => {
|
||||
if prev_is_mask {
|
||||
if !prev_separated {
|
||||
vec_tokens.push(comma.clone().into());
|
||||
mask_tokens.push(comma.clone().into());
|
||||
}
|
||||
let mut val = zero.clone();
|
||||
val.set_span(x.span());
|
||||
vec_tokens.push(zero.clone().into());
|
||||
|
||||
let mut mask_mark = i_true.clone();
|
||||
mask_mark.set_span(x.span());
|
||||
mask_tokens.push(mask_mark.into());
|
||||
|
||||
prev_is_mask = false;
|
||||
prev_separated = false;
|
||||
} else if !prev_is_mask {
|
||||
prev_is_mask = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
TokenTree::Ident(ref x) if x.to_string().len() == 2 => {
|
||||
if !prev_separated {
|
||||
vec_tokens.push(comma.clone().into());
|
||||
mask_tokens.push(comma.clone().into());
|
||||
}
|
||||
let lit_str = format!("0x{}u8", x);
|
||||
let mut val: Literal = lit_str.as_str().parse().expect("parse literal from ident");
|
||||
val.set_span(x.span());
|
||||
vec_tokens.push(val.into());
|
||||
|
||||
let mut mask_mark = i_false.clone();
|
||||
mask_mark.set_span(x.span());
|
||||
mask_tokens.push(mask_mark.into());
|
||||
}
|
||||
TokenTree::Literal(ref x) => {
|
||||
if !prev_separated {
|
||||
vec_tokens.push(comma.clone().into());
|
||||
mask_tokens.push(comma.clone().into());
|
||||
}
|
||||
|
||||
let mut mask_mark = i_false.clone();
|
||||
mask_mark.set_span(x.span());
|
||||
|
||||
let in_str = x.to_string();
|
||||
if in_str.starts_with("0x") {
|
||||
vec_tokens.push(token);
|
||||
} else {
|
||||
let lit_str = format!("0x{}u8", in_str);
|
||||
let mut val: Literal = lit_str.as_str().parse().expect("parse literal");
|
||||
val.set_span(x.span());
|
||||
vec_tokens.push(val.into());
|
||||
}
|
||||
|
||||
mask_tokens.push(mask_mark.into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
prev_separated = false;
|
||||
}
|
||||
let mut vec_stream = TokenStream::new();
|
||||
let mut mask_stream = TokenStream::new();
|
||||
vec_stream.extend(vec_tokens);
|
||||
mask_stream.extend(mask_tokens);
|
||||
|
||||
(vec_stream, mask_stream)
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn binnvec(_item: TokenStream) -> TokenStream {
|
||||
let group_elements = parse_hex_values(_item);
|
||||
@ -83,8 +169,48 @@ pub fn binnvec(_item: TokenStream) -> TokenStream {
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn binnvecs(_item: TokenStream) -> TokenStream {
|
||||
let first = _item.clone().into_iter().next();
|
||||
pub fn binnpat(_item: TokenStream) -> TokenStream {
|
||||
let (vec_elements, mask_elements) = parse_hex_pattern(_item);
|
||||
|
||||
let comma: Punct = Punct::new(',', Spacing::Alone);
|
||||
let vec_group = Group::new(Delimiter::Bracket, vec_elements);
|
||||
let mask_group = Group::new(Delimiter::Bracket, mask_elements);
|
||||
|
||||
let vec_ident = Ident::new("vec", Span::call_site());
|
||||
let macro_marc = Punct::new('!', Spacing::Joint);
|
||||
|
||||
let mut tupple_elements = TokenStream::new();
|
||||
tupple_elements.extend(forest!(
|
||||
vec_ident, macro_marc, vec_group, comma, vec_ident, macro_marc, mask_group
|
||||
));
|
||||
let tupple_group = Group::new(Delimiter::Parenthesis, tupple_elements);
|
||||
|
||||
let mut param_elements = TokenStream::new();
|
||||
param_elements.extend(forest!(tupple_group));
|
||||
let param_group = Group::new(Delimiter::Parenthesis, param_elements);
|
||||
|
||||
let sep_colon = Punct::new(':', Spacing::Joint);
|
||||
let i_lib = Ident::new("binnpatch", Span::call_site());
|
||||
let i_struct = Ident::new("BytesPattern", Span::call_site());
|
||||
let i_from = Ident::new("from", Span::call_site());
|
||||
|
||||
let mut result = TokenStream::new();
|
||||
result.extend(forest!(
|
||||
i_lib,
|
||||
sep_colon,
|
||||
sep_colon,
|
||||
i_struct,
|
||||
sep_colon,
|
||||
sep_colon,
|
||||
i_from,
|
||||
param_group
|
||||
));
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn unquote_str(items: TokenStream) -> TokenStream {
|
||||
let first = items.clone().into_iter().next();
|
||||
|
||||
match first {
|
||||
Some(TokenTree::Literal(lit)) => {
|
||||
@ -103,10 +229,22 @@ pub fn binnvecs(_item: TokenStream) -> TokenStream {
|
||||
lit_str.replace(format!("{}E", x).as_str(), format!("0x{}E", x).as_str());
|
||||
});
|
||||
let stream: TokenStream = lit_str.parse().unwrap();
|
||||
binnvec(stream)
|
||||
stream
|
||||
}
|
||||
_ => {
|
||||
panic!("invalid item: \"{}\"", _item);
|
||||
panic!("invalid item: \"{}\"", items);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn binnvecs(_item: TokenStream) -> TokenStream {
|
||||
let stream = unquote_str(_item);
|
||||
binnvec(stream)
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn binnpats(_item: TokenStream) -> TokenStream {
|
||||
let stream = unquote_str(_item);
|
||||
binnpat(stream)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user