14 Commits

Author SHA1 Message Date
5f5b299eb3 Merge branch 'master' into postcard
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-26 13:56:30 +03:00
1b88db07be add questions benches
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-26 13:42:50 +03:00
49a214283c Merge branch 'master' into postcard
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-25 15:32:42 +03:00
c4b68cc727 apps: handle err's
All checks were successful
continuous-integration/drone/push Build is passing
+ ord fn's
2023-08-25 15:28:19 +03:00
f3197603f6 Merge branch 'master' into postcard
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-25 14:43:11 +03:00
9c5f05b6a9 app_async: fix print random question 2023-08-25 14:41:53 +03:00
bd67e3ee85 question: split struct for binary/text serialize
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-24 09:27:42 +03:00
e4001ee69f questions: remove "skip_serializing_if"
All checks were successful
continuous-integration/drone/push Build is passing
to fix ser/de to postcard

err "Hit the end of buffer, expected more data"
more info: https://github.com/apache/arrow-rs/issues/3082
2023-08-23 23:50:31 +03:00
85a879e9ec apps: don't use bincode 2023-08-23 22:40:54 +03:00
fad2d1d63d async db: postcard 2023-08-23 22:33:59 +03:00
cb781ccf76 postcard no features 2023-08-23 22:33:36 +03:00
7e6b513179 sync db: postcard 2023-08-23 21:40:07 +03:00
1d4005abdb postcard instead of bincode 2023-08-23 21:39:23 +03:00
760f6d9415 add async bench
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-23 13:30:51 +03:00
14 changed files with 1002 additions and 364 deletions

78
Cargo.lock generated
View File

@@ -207,25 +207,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a32fd6af2b5827bce66c29053ba0e7c42b9dcab01835835058558c10851a46b" checksum = "8a32fd6af2b5827bce66c29053ba0e7c42b9dcab01835835058558c10851a46b"
[[package]]
name = "bincode"
version = "2.0.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f11ea1a0346b94ef188834a65c068a03aec181c94896d481d7a0a40d85b0ce95"
dependencies = [
"bincode_derive",
"serde",
]
[[package]]
name = "bincode_derive"
version = "2.0.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e30759b3b99a1b802a7a3aa21c85c3ded5c28e1c83170d82d70f08bbf7f3e4c"
dependencies = [
"virtue",
]
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "1.3.2" version = "1.3.2"
@@ -311,10 +292,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
name = "chgk_ledb" name = "chgk_ledb"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"bincode",
"chgk_ledb_lib", "chgk_ledb_lib",
"clap 4.3.16", "clap 4.3.16",
"criterion", "criterion 0.4.0",
"rand", "rand",
"serde", "serde",
"serde_derive", "serde_derive",
@@ -327,11 +307,13 @@ dependencies = [
name = "chgk_ledb_async" name = "chgk_ledb_async"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"async-compression 0.4.1",
"async_zip", "async_zip",
"bincode",
"chgk_ledb_lib", "chgk_ledb_lib",
"clap 4.3.16", "clap 4.3.16",
"criterion 0.5.1",
"futures", "futures",
"lazy_static",
"rand", "rand",
"serde", "serde",
"serde_derive", "serde_derive",
@@ -348,7 +330,6 @@ dependencies = [
"async-compression 0.4.1", "async-compression 0.4.1",
"async-stream", "async-stream",
"async_zip", "async_zip",
"bincode",
"fmmap", "fmmap",
"futures", "futures",
"futures-core", "futures-core",
@@ -356,6 +337,7 @@ dependencies = [
"insta", "insta",
"memmap", "memmap",
"pin-project", "pin-project",
"postcard",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@@ -464,6 +446,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
[[package]]
name = "cobs"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.0" version = "1.0.0"
@@ -532,6 +520,34 @@ dependencies = [
"walkdir", "walkdir",
] ]
[[package]]
name = "criterion"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
dependencies = [
"anes",
"cast",
"ciborium",
"clap 4.3.16",
"criterion-plot",
"futures",
"is-terminal",
"itertools",
"num-traits",
"once_cell",
"oorandom",
"plotters",
"rayon",
"regex",
"serde",
"serde_derive",
"serde_json",
"tinytemplate",
"tokio",
"walkdir",
]
[[package]] [[package]]
name = "criterion-plot" name = "criterion-plot"
version = "0.5.0" version = "0.5.0"
@@ -1217,6 +1233,16 @@ dependencies = [
"plotters-backend", "plotters-backend",
] ]
[[package]]
name = "postcard"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9ee729232311d3cd113749948b689627618133b1c5012b77342c1950b25eaeb"
dependencies = [
"cobs",
"serde",
]
[[package]] [[package]]
name = "ppv-lite86" name = "ppv-lite86"
version = "0.2.17" version = "0.2.17"
@@ -1670,12 +1696,6 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "virtue"
version = "0.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dcc60c0624df774c82a0ef104151231d37da4962957d691c011c852b2473314"
[[package]] [[package]]
name = "walkdir" name = "walkdir"
version = "2.3.3" version = "2.3.3"

View File

@@ -13,6 +13,10 @@ description = "Утилита загружающая базу данных ЧГ
name = "db_bench" name = "db_bench"
harness = false harness = false
[[bench]]
name = "questions_bench"
harness = false
[dependencies] [dependencies]
chgk_ledb_lib = {path = "../lib", features = ["sync", "source", "convert"]} chgk_ledb_lib = {path = "../lib", features = ["sync", "source", "convert"]}
serde_json="1.0" serde_json="1.0"
@@ -23,6 +27,5 @@ clap = { version = "4.2.7", features = ["derive"] }
[dev-dependencies] [dev-dependencies]
criterion = "0.4.0" criterion = "0.4.0"
tempfile = "3.3" tempfile = "3.3"
bincode = "^2.0.0-rc.2"
serde="1.0" serde="1.0"
serde_derive="1.0" serde_derive="1.0"

View File

@@ -1,6 +1,5 @@
#[macro_use] #[macro_use]
extern crate criterion; extern crate criterion;
extern crate bincode;
extern crate serde; extern crate serde;
extern crate serde_derive; extern crate serde_derive;
extern crate serde_json; extern crate serde_json;
@@ -16,18 +15,7 @@ use tempfile::{tempdir, NamedTempFile};
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
#[derive( #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
bincode::Encode,
bincode::Decode,
Clone,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
)]
struct TestData { struct TestData {
num1: u64, num1: u64,
num2: u64, num2: u64,

View File

@@ -0,0 +1,100 @@
#[macro_use]
extern crate criterion;
extern crate serde;
extern crate serde_derive;
extern crate serde_json;
extern crate tempfile;
use chgk_ledb_lib::db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{binary::Question, QuestionsConverter};
use chgk_ledb_lib::source::ReadSourceQuestionsBatches;
use std::path::Path;
use std::time::Duration;
use std::{fs, io};
use criterion::{BatchSize, Criterion};
use tempfile::{tempdir, NamedTempFile};
const ZIP_FILENAME: &str = "../json.zip";
const NEW_DB_FILENAME: &str = "../db.dat";
const N: usize = 4096;
fn read_sample() -> Vec<Question> {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
source_questions
.convert()
.take(N)
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
})
.collect()
}
fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let opts = WriterOpts {
compress_lvl: 1,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::new(path, opts).expect("new writer")
}
fn questions_read(c: &mut Criterion) {
c.bench_function("questions_read", |b| {
b.iter_batched(
|| {
let reader: Reader<Question> =
Reader::new(NEW_DB_FILENAME, 4096).expect("new reader");
reader.into_iter().take(N)
},
|reader| {
for item in reader {
drop(item);
}
},
BatchSize::SmallInput,
)
});
}
fn questions_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("questions_write", |b| {
b.iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = read_sample().into_iter();
let writer = prepare_db_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| {
writer.load(&mut src).unwrap();
writer.finish().unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default()
.sample_size(40)
.warm_up_time(Duration::from_secs(7))
.measurement_time(Duration::from_secs(20))
}
criterion_group! {name=benches; config = config(); targets = questions_read, questions_write}
criterion_main!(benches);

View File

@@ -10,8 +10,9 @@ use chgk_ledb_lib::db;
use chgk_ledb_lib::questions; use chgk_ledb_lib::questions;
use chgk_ledb_lib::source; use chgk_ledb_lib::source;
use crate::questions::{Question, QuestionsConverter}; use crate::questions::{binary::Question, QuestionsConverter};
use crate::source::ReadSourceQuestionsBatches; use crate::source::ReadSourceQuestionsBatches;
use chgk_ledb_lib::util::ErrorToString;
const ZIP_FILENAME: &str = "json.zip"; const ZIP_FILENAME: &str = "json.zip";
const NEW_DB_FILENAME: &str = "db.dat"; const NEW_DB_FILENAME: &str = "db.dat";
@@ -41,70 +42,6 @@ struct Cli {
measure: bool, measure: bool,
} }
fn zip_reader_task(tx: mpsc::Sender<Question>) {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
let questions = source_questions
.convert()
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
});
for question in questions {
let res = tx.send(question);
if res.is_err() {
break;
}
}
println!("read done");
}
fn print_question_from<F>(get_q: F)
where
F: FnOnce() -> Option<Question>,
{
let q = get_q().expect("question not found");
println!("{:#?}", q)
}
fn read_from_zip(file_num: usize, mut num: usize) -> Option<Question> {
let mut rng = rand::thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
let (filename, batch) = if file_num == 0 {
source_questions.choose(&mut rng).unwrap()
} else {
source_questions.nth(file_num - 1).unwrap()
};
let mut batch = batch.unwrap();
batch.filename = filename;
let questions: Vec<Question> = batch.into();
if num == 0 {
num = (1..=questions.len()).choose(&mut rng).unwrap();
}
Some(questions[num - 1].clone())
}
// measure and return time elapsed in `func` in seconds
pub fn measure<F: FnOnce()>(func: F) -> f64 {
let start = Instant::now();
func();
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub fn measure_and_print<F: FnOnce()>(func: F) {
let m = measure(func);
eprintln!("{}", m);
}
fn main() { fn main() {
let args = Cli::parse(); let args = Cli::parse();
@@ -127,20 +64,75 @@ fn main() {
action(); action();
} }
fn read_from_db(id: u32) -> Option<Question> { // measure and return time elapsed in `func` in seconds
let reader: db::Reader<Question> = pub fn measure<F: FnOnce()>(func: F) -> f64 {
db::Reader::new(NEW_DB_FILENAME, 2048).expect("new db reader"); let start = Instant::now();
func();
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub fn measure_and_print<F: FnOnce()>(func: F) {
let m = measure(func);
eprintln!("{}", m);
}
fn print_question_from<F>(get_q: F)
where
F: FnOnce() -> Result<Question, String>,
{
let q = get_q().expect("question not found");
println!("{:#?}", q)
}
fn read_from_zip(file_num: usize, mut num: usize) -> Result<Question, String> {
let mut rng = rand::thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).str_err()?;
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).str_err()?;
let mut source_questions = archive.source_questions();
let (filename, batch) = if file_num == 0 {
source_questions
.choose(&mut rng)
.ok_or("rand choose".to_string())?
} else {
source_questions
.nth(file_num - 1)
.ok_or(format!("file nth #{file_num} => None"))?
};
let mut batch = batch.map_err(|e| format!("get batch from file #{file_num} => {e}"))?;
batch.filename = filename;
let questions: Vec<Question> = batch.into();
if num == 0 {
num = (1..=questions.len())
.choose(&mut rng)
.ok_or("rand choose".to_string())?;
}
Ok(questions[num - 1].clone())
}
fn read_from_db(id: u32) -> Result<Question, String> {
let reader: db::Reader<Question> = db::Reader::new(NEW_DB_FILENAME, 2048)?;
let len = reader.len();
let mut questions = reader.into_iter(); let mut questions = reader.into_iter();
match id { let question = match id {
0 => { 0 => {
let mut rng = rand::thread_rng(); let mut rng = rand::thread_rng();
questions.choose(&mut rng) questions
.choose(&mut rng)
.ok_or(format!("rand choose, len = {len}"))?
} }
_ => questions.nth((id - 1) as usize), _ => questions
} .nth((id - 1) as usize)
.ok_or(format!("get nth #{id} => None"))?,
};
Ok(question)
} }
fn write_db() { fn write_db() {
let (tx, rx) = mpsc::channel::<Question>(); let (tx, rx) = mpsc::channel::<Question>();
[ [
@@ -151,6 +143,29 @@ fn write_db() {
.for_each(|handle| handle.join().expect("thread panic")); .for_each(|handle| handle.join().expect("thread panic"));
println!("all done"); println!("all done");
} }
fn zip_reader_task(tx: mpsc::Sender<Question>) {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
let questions = source_questions
.convert()
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
});
for question in questions {
let res = tx.send(question);
if res.is_err() {
break;
}
}
println!("read done");
}
fn db_writer_task(rx: mpsc::Receiver<Question>) { fn db_writer_task(rx: mpsc::Receiver<Question>) {
let writer_opts = db::WriterOpts::default(); let writer_opts = db::WriterOpts::default();
let mut writer: db::Writer<Question> = let mut writer: db::Writer<Question> =

View File

@@ -9,6 +9,14 @@ description = "Утилита загружающая базу данных ЧГ
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bench]]
name = "async_bench"
harness = false
[[bench]]
name = "questions_async_bench"
harness = false
[dependencies] [dependencies]
chgk_ledb_lib = {path = "../lib", features = ["async", "convert_async"]} chgk_ledb_lib = {path = "../lib", features = ["async", "convert_async"]}
serde_json="1.0" serde_json="1.0"
@@ -27,7 +35,10 @@ clap = { version = "4.2.7", features = ["derive"] }
futures = "0.3" futures = "0.3"
[dev-dependencies] [dev-dependencies]
criterion = { version = "0.5.1", features = ["async_tokio"]}
tempfile = "3.3" tempfile = "3.3"
bincode = "^2.0.0-rc.2"
serde="1.0" serde="1.0"
serde_derive="1.0" serde_derive="1.0"
futures = "0.3"
async-compression = { version = "0.4.1", default-features = false }
lazy_static = "1.4.0"

View File

@@ -0,0 +1,141 @@
#[macro_use]
extern crate criterion;
extern crate serde;
extern crate serde_derive;
extern crate tempfile;
use async_compression::Level;
use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts};
use futures::StreamExt;
use std::{ops::Deref, path::Path};
use criterion::{BatchSize, Criterion};
use lazy_static::lazy_static;
use tempfile::{tempdir, NamedTempFile};
use serde_derive::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
struct TestData {
num1: u64,
num2: u64,
test: String,
}
use tokio::runtime;
lazy_static! {
static ref RUNTIME: tokio::runtime::Runtime =
runtime::Builder::new_current_thread().build().unwrap();
}
const N: usize = 4096;
fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count).map(|i| 143 + i as u64).map(|i| TestData {
num1: i,
num2: (i * 100) ^ 0xDF0E441122334455,
test: "test ---- Test ____".repeat(123 + i as usize % 15),
})
}
async fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<TestData> {
let opts = WriterOpts {
compress_lvl: Level::Fastest,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::new(path, opts).await.expect("new writer")
}
fn write_sample<P: AsRef<Path>>(path: P) {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move {
let mut writer = prepare_db_writer(rp).await;
let items_iter = gen_data(N).collect::<Vec<TestData>>().into_iter();
let mut samples_stream = futures::stream::iter(items_iter);
writer.load(&mut samples_stream).await.unwrap();
writer.finish().await.unwrap();
})
})
.join()
.expect("spawn thread");
}
fn setup_writer<P: AsRef<Path>>(path: P) -> Writer<TestData> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(prepare_db_writer(rp))
})
.join()
.expect("spawn thread")
}
fn setup_reader<P: AsRef<Path>>(path: P) -> Reader<TestData> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move { Reader::new(rp).await.expect("new reader") })
})
.join()
.expect("spawn thread")
}
fn async_read(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
write_sample(&tmpfile);
c.bench_function("async_read", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| setup_reader(&tmpfile),
|reader| async move { reader.stream().for_each(|item| async { drop(item) }).await },
BatchSize::SmallInput,
)
});
}
fn async_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("async_write", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = gen_data(N).collect::<Vec<TestData>>().into_iter();
let src = futures::stream::iter(src);
let writer = setup_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| async move {
writer.load(&mut src).await.unwrap();
writer.finish().await.unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default().sample_size(20)
}
criterion_group! {name=benches; config = config(); targets = async_read, async_write}
criterion_main!(benches);

View File

@@ -0,0 +1,151 @@
#[macro_use]
extern crate criterion;
extern crate serde;
extern crate serde_derive;
extern crate tempfile;
use async_compression::Level;
use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{binary::Question, QuestionsConverterAsyncForStream};
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use futures::pin_mut;
use futures::StreamExt;
use std::time::Duration;
use std::{ops::Deref, path::Path};
use async_zip::tokio::read::seek::ZipFileReader;
use criterion::{BatchSize, Criterion};
use lazy_static::lazy_static;
use tempfile::{tempdir, NamedTempFile};
use tokio::{fs, runtime};
const ZIP_FILENAME: &str = "../json.zip";
const NEW_DB_FILENAME: &str = "../db.dat";
lazy_static! {
static ref RUNTIME: tokio::runtime::Runtime =
runtime::Builder::new_current_thread().build().unwrap();
}
const N: usize = 4096;
async fn read_sample() -> Vec<Question> {
let mut file = fs::File::open(ZIP_FILENAME).await.expect("open zip");
let archive = ZipFileReader::with_tokio(&mut file)
.await
.expect("open zip file reader");
let mut source_questions = archive.source_questions();
let source_questions = source_questions.stream();
pin_mut!(source_questions);
source_questions
.converter()
.convert()
.take(N)
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
})
.collect()
.await
}
fn read_sample_sync() -> Vec<Question> {
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(read_sample())
})
.join()
.expect("spawn thread")
}
async fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let opts = WriterOpts {
compress_lvl: Level::Fastest,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::<Question>::new(path, opts)
.await
.expect("new writer")
}
fn setup_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(prepare_db_writer(rp))
})
.join()
.expect("spawn thread")
}
fn setup_reader<P: AsRef<Path>>(path: P) -> Reader<Question> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move { Reader::new(rp).await.expect("new reader") })
})
.join()
.expect("spawn thread")
}
fn questions_async_read(c: &mut Criterion) {
c.bench_function("questions_async_read", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| setup_reader(NEW_DB_FILENAME),
|reader| async move {
reader
.stream()
.take(N)
.for_each(|item| async { drop(item) })
.await
},
BatchSize::SmallInput,
)
});
}
fn questions_async_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("questions_async_write", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = read_sample_sync().into_iter();
let src = futures::stream::iter(src);
let writer = setup_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| async move {
writer.load(&mut src).await.unwrap();
writer.finish().await.unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default()
.sample_size(40)
.warm_up_time(Duration::from_secs(7))
.measurement_time(Duration::from_secs(20))
}
criterion_group! {name=benches; config = config(); targets = questions_async_read, questions_async_write}
criterion_main!(benches);

View File

@@ -16,9 +16,10 @@ use tokio::{fs, io};
use tokio_stream::wrappers::UnboundedReceiverStream; use tokio_stream::wrappers::UnboundedReceiverStream;
use chgk_ledb_lib::async_db; use chgk_ledb_lib::async_db;
use chgk_ledb_lib::questions::Question; use chgk_ledb_lib::questions::binary::Question;
use chgk_ledb_lib::questions::QuestionsConverterAsyncForStream; use chgk_ledb_lib::questions::QuestionsConverterAsyncForStream;
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync; use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use chgk_ledb_lib::util::ErrorToString;
const ZIP_FILENAME: &str = "json.zip"; const ZIP_FILENAME: &str = "json.zip";
const NEW_DB_FILENAME: &str = "db.dat"; const NEW_DB_FILENAME: &str = "db.dat";
@@ -48,6 +49,113 @@ struct Cli {
measure: bool, measure: bool,
} }
#[tokio::main]
async fn main() {
let args = Cli::parse();
let mut action: Box<dyn Future<Output = _>> = match &args.command {
Command::Write => Box::new(write_db()),
Command::Print { id } => {
let get_question = read_from_db(*id);
Box::new(print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = read_from_zip(*file_num, *num);
Box::new(print_question_from(get_question))
}
};
if args.measure {
action = Box::new(measure_and_print(Box::into_pin(action)));
}
Box::into_pin(action).await;
}
// measure and return time elapsed in `fut` in seconds
pub async fn measure<F: Future>(fut: F) -> f64 {
let start = Instant::now();
fut.await;
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub async fn measure_and_print<F: Future>(fut: F) {
let m = measure(fut).await;
eprintln!("{}", m);
}
async fn print_question_from<F>(get_q: F)
where
F: Future<Output = Result<Question, String>>,
{
let q = get_q.await.expect("question not found");
println!("{:#?}", q)
}
async fn read_from_zip(file_num: usize, mut num: usize) -> Result<Question, String> {
let mut rng = thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).await.str_err()?;
let mut zip_reader = io::BufReader::new(zip_file);
let archive = ZipFileReader::with_tokio(&mut zip_reader).await.str_err()?;
let mut source = archive.source_questions();
let files_count = source.len();
let file_index = if file_num == 0 {
let files = Uniform::new(0, files_count);
rng.sample(files)
} else {
file_num - 1
};
let src = source
.get(file_index)
.await
.map_err(|e| format!("get file {file_num} => {e}"))?;
let src = stream::once(async { src });
pin_mut!(src);
let converter = src.converter();
let questions: Vec<_> = converter.convert().collect().await;
if num == 0 {
num = (1..=questions.len()).choose(&mut rng).unwrap();
}
let mut question = questions
.get(num - 1)
.ok_or(format!("get question #{num} => None"))?
.clone();
question.num = num as u32;
Ok(question)
}
async fn read_from_db(id: u32) -> Result<Question, String> {
let reader: async_db::Reader<Question> = async_db::Reader::new(NEW_DB_FILENAME).await?;
let len = reader.len();
let index = if id == 0 {
let mut rng = thread_rng();
let questions = Uniform::new(0, len);
rng.sample(questions)
} else {
id as usize - 1
};
reader
.get(index)
.await
.map_err(|e| format!("get #{index} => {e}"))
}
async fn write_db() {
let (tx, rx) = mpsc::unbounded_channel::<Question>();
tokio::try_join!(
tokio::spawn(zip_reader_task(tx)),
tokio::spawn(db_writer_task(rx))
)
.expect("tokio join");
println!("all done");
}
async fn zip_reader_task(tx: UnboundedSender<Question>) { async fn zip_reader_task(tx: UnboundedSender<Question>) {
let mut file = fs::File::open(ZIP_FILENAME).await.expect("open zip"); let mut file = fs::File::open(ZIP_FILENAME).await.expect("open zip");
let archive = ZipFileReader::with_tokio(&mut file) let archive = ZipFileReader::with_tokio(&mut file)
@@ -73,109 +181,6 @@ async fn zip_reader_task(tx: UnboundedSender<Question>) {
println!("read done"); println!("read done");
} }
async fn print_question_from<F>(get_q: F)
where
F: Future<Output = Option<Question>>,
{
let q = get_q.await.expect("question not found");
println!("{:#?}", q)
}
async fn read_from_zip(file_num: usize, mut num: usize) -> Option<Question> {
let mut rng = thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).await.expect("open zip file");
let mut zip_reader = io::BufReader::new(zip_file);
let archive = ZipFileReader::with_tokio(&mut zip_reader)
.await
.expect("open zip file reader");
let mut source = archive.source_questions();
let files_count = source.len();
let file_index = if file_num == 0 {
let files = Uniform::new(0, files_count);
rng.sample(files)
} else {
file_num - 1
};
let src = source.get(file_index).await;
let src = stream::once(async { src.expect("get source file") });
pin_mut!(src);
let converter = src.converter();
let questions: Vec<_> = converter.convert().collect().await;
if num == 0 {
num = (1..=questions.len()).choose(&mut rng).unwrap();
}
let mut question = questions.get(num - 1).expect("get question").clone();
question.num = num as u32;
Some(question)
}
// measure and return time elapsed in `fut` in seconds
pub async fn measure<F: Future>(fut: F) -> f64 {
let start = Instant::now();
fut.await;
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub async fn measure_and_print<F: Future>(fut: F) {
let m = measure(fut).await;
eprintln!("{}", m);
}
#[tokio::main]
async fn main() {
let args = Cli::parse();
let mut action: Box<dyn Future<Output = _>> = match &args.command {
Command::Write => Box::new(write_db()),
Command::Print { id } => {
let get_question = read_from_db(*id);
Box::new(print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = read_from_zip(*file_num, *num);
Box::new(print_question_from(get_question))
}
};
if args.measure {
action = Box::new(measure_and_print(Box::into_pin(action)));
}
Box::into_pin(action).await;
}
async fn read_from_db(id: u32) -> Option<Question> {
let reader: async_db::Reader<Question> = async_db::Reader::new(NEW_DB_FILENAME)
.await
.expect("new db reader");
let len = reader.len();
let index = if len == 0 {
let mut rng = thread_rng();
let questions = Uniform::new(0, len);
rng.sample(questions)
} else {
id as usize - 1
};
match reader.get(index).await {
Ok(question) => Some(question),
Err(_) => None,
}
}
async fn write_db() {
let (tx, rx) = mpsc::unbounded_channel::<Question>();
tokio::try_join!(
tokio::spawn(zip_reader_task(tx)),
tokio::spawn(db_writer_task(rx))
)
.expect("tokio join");
println!("all done");
}
async fn db_writer_task(rx: UnboundedReceiver<Question>) { async fn db_writer_task(rx: UnboundedReceiver<Question>) {
let writer_opts = WriterOpts::default(); let writer_opts = WriterOpts::default();
let mut writer: async_db::Writer<Question> = let mut writer: async_db::Writer<Question> =

View File

@@ -12,40 +12,16 @@ description = "Библиотека для доступа к файлу базы
[features] [features]
default = [] default = []
sync = ["zstd", "memmap"] sync = ["zstd", "memmap"]
async = [ async = ["futures", "futures-core", "futures-util", "fmmap", "tokio", "async-compression", "async-stream", "pin-project"]
"futures",
"futures-core",
"futures-util",
"fmmap",
"tokio",
"async-compression",
"async-stream",
"pin-project",
]
source = ["zip"] source = ["zip"]
source_async = [ source_async = ["async_zip", "tokio", "futures", "futures-core", "futures-util", "async-stream"]
"async_zip",
"tokio",
"futures",
"futures-core",
"futures-util",
"async-stream",
]
convert = ["zip"] convert = ["zip"]
convert_async = [ convert_async = ["futures", "futures-core", "futures-util", "async-stream", "async_zip", "tokio"]
"futures",
"futures-core",
"futures-util",
"async-stream",
"async_zip",
"tokio",
]
[dependencies] [dependencies]
serde = "1.0" serde = "1.0"
serde_derive = "1.0" serde_derive = "1.0"
serde_json = "1.0" serde_json = "1.0"
bincode = "^2.0.0-rc.2"
zip = { version = "0.6", optional = true } zip = { version = "0.6", optional = true }
async_zip = { version = "0.0.15" , features = [ async_zip = { version = "0.0.15" , features = [
"zstd", "zstd",
@@ -70,6 +46,7 @@ async-stream = { version = "0.3", optional = true }
zstd = { version = "^0.12", default-features = false, optional = true } zstd = { version = "^0.12", default-features = false, optional = true }
memmap = { version = "0.7.0", optional = true } memmap = { version = "0.7.0", optional = true }
pin-project = { version = "1.1.3", optional = true } pin-project = { version = "1.1.3", optional = true }
postcard = { version = "1.0.6", default-features = false }
[dev-dependencies] [dev-dependencies]
insta = { version = "1.31.0", features = ["yaml"] } insta = { version = "1.31.0", features = ["yaml"] }

View File

@@ -24,9 +24,9 @@ use fmmap::tokio::{AsyncMmapFile, AsyncMmapFileExt, AsyncOptions};
type LSize = u32; type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>(); const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
use crate::util::BincodeVecWriter; use serde::{de::DeserializeOwned, Serialize};
use crate::util::ErrorToString; use crate::util::ErrorToString;
pub struct WriterOpts { pub struct WriterOpts {
@@ -42,18 +42,18 @@ impl Default for WriterOpts {
compress_lvl: Level::Default, compress_lvl: Level::Default,
data_buf_size: 500 * 1024 * 1024, data_buf_size: 500 * 1024 * 1024,
out_buf_size: 200 * 1024 * 1024, out_buf_size: 200 * 1024 * 1024,
current_buf_size: 100 * 1024, current_buf_size: 1024 * 1024,
} }
} }
} }
pub struct Writer<T> pub struct Writer<T>
where where
T: bincode::Encode, T: Serialize,
{ {
out: io::BufWriter<fs::File>, out: io::BufWriter<fs::File>,
data_buf: Vec<u8>, data_buf: Vec<u8>,
cur_buf_item: BincodeVecWriter, cur_buf_item: Vec<u8>,
table: Vec<LSize>, table: Vec<LSize>,
compress_lvl: Level, compress_lvl: Level,
_t: PhantomData<Arc<T>>, _t: PhantomData<Arc<T>>,
@@ -61,14 +61,13 @@ where
impl<T> Writer<T> impl<T> Writer<T>
where where
T: bincode::Encode, T: Serialize,
{ {
pub async fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> { pub async fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
let out = fs::File::create(path).await.str_err()?; let out = fs::File::create(path).await.str_err()?;
let out = io::BufWriter::with_capacity(opts.out_buf_size, out); let out = io::BufWriter::with_capacity(opts.out_buf_size, out);
let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size); let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size);
let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size); let cur_buf_item: Vec<u8> = vec![0; opts.current_buf_size];
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl; let compress_lvl = opts.compress_lvl;
@@ -88,16 +87,16 @@ where
self.push_by_ref(&item).await self.push_by_ref(&item).await
} }
#[allow(clippy::useless_asref)]
pub async fn push_by_ref(&mut self, item: &T) -> Result<(), String> { pub async fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.len() as LSize; let pos: LSize = self.data_buf.len() as LSize;
let cur_item_data = postcard::to_slice(item, self.cur_buf_item.as_mut_slice()).str_err()?;
bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?; let mut zencoder = ZstdEncoder::with_quality(cur_item_data.as_ref(), self.compress_lvl);
let mut zencoder = ZstdEncoder::with_quality(&self.cur_buf_item[..], self.compress_lvl);
io::copy(&mut zencoder, &mut self.data_buf) io::copy(&mut zencoder, &mut self.data_buf)
.await .await
.str_err()?; .str_err()?;
self.cur_buf_item.clear(); cur_item_data.fill(0);
self.table.push(pos); self.table.push(pos);
@@ -159,7 +158,7 @@ use pin_project::pin_project;
/// only work when ..push.poll() returns Ready immediately /// only work when ..push.poll() returns Ready immediately
pub struct WriterSink<'a, T> pub struct WriterSink<'a, T>
where where
T: bincode::Encode, T: Serialize,
{ {
#[pin] #[pin]
writer: &'a mut Writer<T>, writer: &'a mut Writer<T>,
@@ -168,7 +167,7 @@ where
impl<'a, T> Sink<T> for WriterSink<'a, T> impl<'a, T> Sink<T> for WriterSink<'a, T>
where where
T: bincode::Encode, T: Serialize,
{ {
type Error = String; type Error = String;
@@ -213,7 +212,7 @@ where
pub struct Reader<T> pub struct Reader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
mmap: AsyncMmapFile, mmap: AsyncMmapFile,
count: usize, count: usize,
@@ -223,7 +222,7 @@ where
impl<T> Reader<T> impl<T> Reader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
pub async fn new<P: AsRef<Path>>(path: P) -> Result<Self, String> { pub async fn new<P: AsRef<Path>>(path: P) -> Result<Self, String> {
let mmap = AsyncOptions::new() let mmap = AsyncOptions::new()
@@ -292,10 +291,10 @@ where
decoder.read_to_end(data_buf).await.str_err()?; decoder.read_to_end(data_buf).await.str_err()?;
// decode item // decode item
let item: (T, usize) = bincode::decode_from_slice(data_buf, BINCODE_CFG).str_err()?; let (item, _): (T, _) = postcard::take_from_bytes(data_buf).str_err()?;
data_buf.clear(); data_buf.clear();
Ok(item.0) Ok(item)
} }
/// get item at index /// get item at index
@@ -311,7 +310,7 @@ where
pub struct ReaderStream<'a, T> pub struct ReaderStream<'a, T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
reader: &'a Reader<T>, reader: &'a Reader<T>,
index: Option<usize>, index: Option<usize>,
@@ -319,7 +318,7 @@ where
impl<'a, T> ReaderStream<'a, T> impl<'a, T> ReaderStream<'a, T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn new(reader: &'a Reader<T>) -> Self { fn new(reader: &'a Reader<T>) -> Self {
ReaderStream { ReaderStream {
@@ -331,7 +330,7 @@ where
impl<'a, T> Stream for ReaderStream<'a, T> impl<'a, T> Stream for ReaderStream<'a, T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
type Item = T; type Item = T;
@@ -375,7 +374,7 @@ where
pub struct BufReader<T> pub struct BufReader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
inner: Reader<T>, inner: Reader<T>,
buf: Vec<u8>, buf: Vec<u8>,
@@ -383,7 +382,7 @@ where
impl<T> BufReader<T> impl<T> BufReader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
pub async fn new<P: AsRef<Path>>(path: P, buf_size: usize) -> Result<Self, String> { pub async fn new<P: AsRef<Path>>(path: P, buf_size: usize) -> Result<Self, String> {
match Reader::<T>::new(path).await { match Reader::<T>::new(path).await {
@@ -410,7 +409,7 @@ where
impl<T> From<Reader<T>> for BufReader<T> impl<T> From<Reader<T>> for BufReader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn from(inner: Reader<T>) -> Self { fn from(inner: Reader<T>) -> Self {
Self { Self {
@@ -422,7 +421,7 @@ where
impl<T> From<BufReader<T>> for Reader<T> impl<T> From<BufReader<T>> for Reader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn from(value: BufReader<T>) -> Self { fn from(value: BufReader<T>) -> Self {
value.into_inner() value.into_inner()
@@ -431,7 +430,7 @@ where
impl<T> Deref for BufReader<T> impl<T> Deref for BufReader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
type Target = Reader<T>; type Target = Reader<T>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
@@ -441,7 +440,7 @@ where
pub struct BufReaderStream<T> pub struct BufReaderStream<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
reader: BufReader<T>, reader: BufReader<T>,
index: Option<usize>, index: Option<usize>,
@@ -449,7 +448,7 @@ where
impl<T> BufReaderStream<T> impl<T> BufReaderStream<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn new(reader: BufReader<T>) -> Self { fn new(reader: BufReader<T>) -> Self {
BufReaderStream { BufReaderStream {
@@ -473,7 +472,7 @@ where
impl<T> Stream for BufReaderStream<T> impl<T> Stream for BufReaderStream<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
type Item = T; type Item = T;
@@ -516,9 +515,10 @@ where
mod test { mod test {
use super::*; use super::*;
use core::fmt::Debug; use core::fmt::Debug;
use serde_derive::Deserialize;
use tempfile::tempdir; use tempfile::tempdir;
#[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
struct TestData { struct TestData {
num: u64, num: u64,
test: String, test: String,

View File

@@ -6,13 +6,13 @@ use std::{
sync::Arc, sync::Arc,
}; };
use serde::{de::DeserializeOwned, Serialize};
use memmap::{Mmap, MmapOptions}; use memmap::{Mmap, MmapOptions};
type LSize = u32; type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>(); const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
use crate::util::BincodeVecWriter;
use crate::util::ErrorToString; use crate::util::ErrorToString;
pub struct WriterOpts { pub struct WriterOpts {
@@ -28,19 +28,19 @@ impl Default for WriterOpts {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 500 * 1024 * 1024, data_buf_size: 500 * 1024 * 1024,
out_buf_size: 200 * 1024 * 1024, out_buf_size: 200 * 1024 * 1024,
current_buf_size: 100 * 1024, current_buf_size: 20 * 1024,
} }
} }
} }
pub struct Writer<T> pub struct Writer<T>
where where
T: bincode::Encode, T: Serialize,
{ {
out: io::BufWriter<fs::File>, out: io::BufWriter<fs::File>,
data_buf: Cursor<Vec<u8>>, data_buf: Cursor<Vec<u8>>,
cur_buf_raw: Cursor<Vec<u8>>, cur_buf_raw: Cursor<Vec<u8>>,
cur_buf_item: BincodeVecWriter, cur_buf_item: Vec<u8>,
table: Vec<LSize>, table: Vec<LSize>,
compress_lvl: i32, compress_lvl: i32,
_t: PhantomData<Arc<T>>, _t: PhantomData<Arc<T>>,
@@ -48,7 +48,7 @@ where
impl<T> Writer<T> impl<T> Writer<T>
where where
T: bincode::Encode, T: Serialize,
{ {
pub fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> { pub fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
let out = fs::File::create(path).str_err()?; let out = fs::File::create(path).str_err()?;
@@ -58,8 +58,7 @@ where
let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size); let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_raw = Cursor::new(cur_buf_raw); let cur_buf_raw = Cursor::new(cur_buf_raw);
let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size); let cur_buf_item: Vec<u8> = vec![0; opts.current_buf_size];
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl; let compress_lvl = opts.compress_lvl;
@@ -83,19 +82,20 @@ where
pub fn push_by_ref(&mut self, item: &T) -> Result<(), String> { pub fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.position() as LSize; let pos: LSize = self.data_buf.position() as LSize;
bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?; let cur_item_data = postcard::to_slice(item, self.cur_buf_item.as_mut_slice()).str_err()?;
let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?; let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?;
zencoder zencoder
.set_pledged_src_size(Some(self.cur_buf_item.len() as u64)) .set_pledged_src_size(Some(cur_item_data.len() as u64))
.str_err()?; .str_err()?;
self.cur_buf_raw.set_position(0); self.cur_buf_raw.set_position(0);
let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder); let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder);
cur_buf_z.write_all(&self.cur_buf_item).str_err()?; cur_buf_z.write_all(cur_item_data).str_err()?;
cur_buf_z.finish().str_err()?; cur_buf_z.finish().str_err()?;
cur_buf_z.flush().str_err()?; cur_buf_z.flush().str_err()?;
self.cur_buf_item.clear();
cur_item_data.fill(0);
self.table.push(pos); self.table.push(pos);
let (cur_buf_raw, _) = cur_buf_z.into_inner(); let (cur_buf_raw, _) = cur_buf_z.into_inner();
@@ -150,7 +150,7 @@ where
pub struct Reader<T> pub struct Reader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
mmap: Mmap, mmap: Mmap,
count: usize, count: usize,
@@ -160,7 +160,7 @@ where
impl<T> Reader<T> impl<T> Reader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
pub fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> { pub fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> {
let file = fs::File::open(path).str_err()?; let file = fs::File::open(path).str_err()?;
@@ -215,9 +215,9 @@ where
let data = zstd::decode_all(reader).str_err()?; let data = zstd::decode_all(reader).str_err()?;
// decode item // decode item
let item: (T, usize) = bincode::decode_from_slice(&data, BINCODE_CFG).str_err()?; let (item, _): (T, _) = postcard::take_from_bytes(&data).str_err()?;
Ok(item.0) Ok(item)
} }
pub fn iter(&self) -> ReaderIter<'_, T> { pub fn iter(&self) -> ReaderIter<'_, T> {
@@ -227,7 +227,7 @@ where
pub struct ReaderIter<'a, T> pub struct ReaderIter<'a, T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
reader: &'a Reader<T>, reader: &'a Reader<T>,
index: Option<usize>, index: Option<usize>,
@@ -235,7 +235,7 @@ where
impl<'a, T> ReaderIter<'a, T> impl<'a, T> ReaderIter<'a, T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn new(reader: &'a Reader<T>) -> Self { fn new(reader: &'a Reader<T>) -> Self {
ReaderIter { ReaderIter {
@@ -247,7 +247,7 @@ where
impl<'a, T> Iterator for ReaderIter<'a, T> impl<'a, T> Iterator for ReaderIter<'a, T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
type Item = T; type Item = T;
@@ -300,7 +300,7 @@ where
impl<'a, T> ExactSizeIterator for ReaderIter<'a, T> impl<'a, T> ExactSizeIterator for ReaderIter<'a, T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn len(&self) -> usize { fn len(&self) -> usize {
self.reader.len() self.reader.len()
@@ -309,7 +309,7 @@ where
pub struct ReaderIntoIter<T> pub struct ReaderIntoIter<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
reader: Reader<T>, reader: Reader<T>,
index: Option<usize>, index: Option<usize>,
@@ -317,7 +317,7 @@ where
impl<T> ReaderIntoIter<T> impl<T> ReaderIntoIter<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn new(reader: Reader<T>) -> Self { fn new(reader: Reader<T>) -> Self {
Self { Self {
@@ -329,7 +329,7 @@ where
impl<T> Iterator for ReaderIntoIter<T> impl<T> Iterator for ReaderIntoIter<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
type Item = T; type Item = T;
@@ -382,7 +382,7 @@ where
impl<T> ExactSizeIterator for ReaderIntoIter<T> impl<T> ExactSizeIterator for ReaderIntoIter<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
fn len(&self) -> usize { fn len(&self) -> usize {
self.reader.len() self.reader.len()
@@ -391,7 +391,7 @@ where
impl<T> IntoIterator for Reader<T> impl<T> IntoIterator for Reader<T>
where where
T: bincode::Decode, T: DeserializeOwned,
{ {
type Item = T; type Item = T;
type IntoIter = ReaderIntoIter<Self::Item>; type IntoIter = ReaderIntoIter<Self::Item>;
@@ -404,9 +404,10 @@ where
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use serde::Deserialize;
use tempfile::tempdir; use tempfile::tempdir;
#[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
struct TestData { struct TestData {
num: u64, num: u64,
test: String, test: String,
@@ -427,7 +428,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 4096, current_buf_size: 20 * 1024,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
@@ -454,7 +455,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 4096, current_buf_size: 20 * 1024,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
@@ -480,7 +481,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 4096, current_buf_size: 20 * 1024,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
@@ -509,7 +510,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 4096, current_buf_size: 20 * 1024,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");

View File

@@ -1,8 +1,6 @@
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
#[derive( #[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)]
pub struct BatchInfo { pub struct BatchInfo {
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default, skip_serializing_if = "String::is_empty")]
pub filename: String, pub filename: String,
@@ -32,9 +30,7 @@ pub struct BatchInfo {
pub rating: String, pub rating: String,
} }
#[derive( #[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)]
pub struct Question { pub struct Question {
#[serde(default, skip_serializing_if = "u32_is_zero")] #[serde(default, skip_serializing_if = "u32_is_zero")]
pub num: u32, pub num: u32,
@@ -83,9 +79,187 @@ impl BatchInfo {
} }
} }
pub mod binary {
use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
pub struct BatchInfo {
#[serde(default)]
pub filename: String,
#[serde(default)]
pub description: String,
#[serde(default)]
pub author: String,
#[serde(default)]
pub comment: String,
#[serde(default)]
pub url: String,
#[serde(default)]
pub date: String,
#[serde(default)]
pub processed_by: String,
#[serde(default)]
pub redacted_by: String,
#[serde(default)]
pub copyright: String,
#[serde(default)]
pub theme: String,
#[serde(default)]
pub kind: String,
#[serde(default)]
pub source: String,
#[serde(default)]
pub rating: String,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
pub struct Question {
#[serde(default)]
pub num: u32,
pub id: String,
pub description: String,
pub answer: String,
#[serde(default)]
pub author: String,
#[serde(default)]
pub comment: String,
#[serde(default)]
pub comment1: String,
#[serde(default)]
pub tour: String,
#[serde(default)]
pub url: String,
#[serde(default)]
pub date: String,
#[serde(default)]
pub processed_by: String,
#[serde(default)]
pub redacted_by: String,
#[serde(default)]
pub copyright: String,
#[serde(default)]
pub theme: String,
#[serde(default)]
pub kind: String,
#[serde(default)]
pub source: String,
#[serde(default)]
pub rating: String,
#[serde(default)]
pub batch_info: BatchInfo,
}
#[cfg(test)]
mod test {
use super::*;
use insta::assert_yaml_snapshot;
use serde_json::json;
pub fn sample_question() -> Question {
Question {
id: "Вопрос 1".into(),
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(),
answer: "42".into(),
batch_info: BatchInfo {
description: "Тестовый".into(),
date: "00-000-2000".into(),
..Default::default()
},
..Default::default()
}
}
#[test]
fn test_question_ser() {
assert_yaml_snapshot!(sample_question(), @r#"
---
num: 0
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info:
filename: ""
description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#);
}
#[test]
fn test_question_de() {
let question_from_json: Result<Question, _> = serde_json::from_value(json!({
"id": "Вопрос 1",
"description": "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2",
"answer": "42",
"batch_info": {
"description": "Тестовый",
"date": "00-000-2000"
}
}));
assert!(question_from_json.is_ok());
assert_yaml_snapshot!(question_from_json.unwrap(), @r#"
---
num: 0
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info:
filename: ""
description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#);
}
}
}
#[cfg(any(feature = "convert", feature = "convert_async"))] #[cfg(any(feature = "convert", feature = "convert_async"))]
pub mod convert_common { pub mod convert_common {
use super::{BatchInfo, Question}; use super::binary::{BatchInfo, Question};
use crate::source::{SourceQuestion, SourceQuestionsBatch}; use crate::source::{SourceQuestion, SourceQuestionsBatch};
macro_rules! make { macro_rules! make {
@@ -133,7 +307,7 @@ pub mod convert_common {
#[cfg(feature = "convert")] #[cfg(feature = "convert")]
pub mod convert { pub mod convert {
use super::Question; use super::binary::Question;
use crate::source::SourceQuestionsBatch; use crate::source::SourceQuestionsBatch;
pub trait QuestionsConverter { pub trait QuestionsConverter {
@@ -174,20 +348,68 @@ pub mod convert {
let converted: Vec<_> = source.convert().collect(); let converted: Vec<_> = source.convert().collect();
assert_yaml_snapshot!(converted, @r#" assert_yaml_snapshot!(converted, @r#"
--- ---
- id: Вопрос 1 - num: 0
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2 description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42" answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
- id: Вопрос 2 processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
- num: 0
id: Вопрос 2
description: Зимой и летом одним цветом description: Зимой и летом одним цветом
answer: ёлка answer: ёлка
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#); "#);
} }
@@ -202,7 +424,7 @@ pub mod convert_async {
use futures_core::stream::Stream; use futures_core::stream::Stream;
use futures_util::StreamExt; use futures_util::StreamExt;
use super::Question; use super::binary::Question;
use crate::source::SourceQuestionsBatch; use crate::source::SourceQuestionsBatch;
pub struct QuestionsConverterAsync<T> pub struct QuestionsConverterAsync<T>
@@ -289,20 +511,68 @@ pub mod convert_async {
let converted: Vec<_> = converter.collect().await; let converted: Vec<_> = converter.collect().await;
assert_yaml_snapshot!(converted, @r#" assert_yaml_snapshot!(converted, @r#"
--- ---
- id: Вопрос 1 - num: 0
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2 description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42" answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
- id: Вопрос 2 processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
- num: 0
id: Вопрос 2
description: Зимой и летом одним цветом description: Зимой и летом одним цветом
answer: ёлка answer: ёлка
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#); "#);
} }
@@ -316,7 +586,6 @@ mod test {
use super::*; use super::*;
use insta::assert_yaml_snapshot; use insta::assert_yaml_snapshot;
use serde_json::json; use serde_json::json;
#[cfg(any(feature = "convert", feature = "convert_async"))] #[cfg(any(feature = "convert", feature = "convert_async"))]
pub mod convert_common { pub mod convert_common {
use crate::source::{SourceQuestion, SourceQuestionsBatch}; use crate::source::{SourceQuestion, SourceQuestionsBatch};

View File

@@ -12,46 +12,3 @@ where
self.map_err(|e| e.to_string()) self.map_err(|e| e.to_string())
} }
} }
#[cfg(any(feature = "sync", feature = "async"))]
mod bincode_utils {
use std::ops::{Deref, DerefMut};
use bincode::enc::write::Writer;
use bincode::error::EncodeError;
/// struct that allows [`Vec<u8>`] to implement [bincode::enc::write::Writer] trait
pub struct BincodeVecWriter {
vec: Vec<u8>,
}
impl BincodeVecWriter {
pub fn new(vec: Vec<u8>) -> BincodeVecWriter {
BincodeVecWriter { vec }
}
}
impl Deref for BincodeVecWriter {
type Target = Vec<u8>;
fn deref(&self) -> &Self::Target {
&self.vec
}
}
impl DerefMut for BincodeVecWriter {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.vec
}
}
impl Writer for BincodeVecWriter {
fn write(&mut self, bytes: &[u8]) -> Result<(), EncodeError> {
self.vec.extend_from_slice(bytes);
Ok(())
}
}
}
#[cfg(any(feature = "sync", feature = "async"))]
pub use bincode_utils::BincodeVecWriter;