Compare commits

..

No commits in common. "master" and "temp/fs4" have entirely different histories.

18 changed files with 593 additions and 3512 deletions

View File

@ -1,5 +1,4 @@
kind: pipeline kind: pipeline
type: docker
name: default name: default
steps: steps:
@ -7,8 +6,8 @@ steps:
image: rust:1-alpine image: rust:1-alpine
commands: commands:
- apk add --no-cache musl-dev - apk add --no-cache musl-dev
- cargo build --verbose --all-features --all - cargo build --verbose --all
- cargo test --verbose --all-features --all - cargo test --verbose --all
environment: environment:
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
@ -18,7 +17,6 @@ trigger:
--- ---
kind: pipeline kind: pipeline
type: docker
name: publish name: publish
steps: steps:
@ -26,8 +24,8 @@ steps:
image: rust:1-alpine image: rust:1-alpine
commands: commands:
- apk add --no-cache musl-dev - apk add --no-cache musl-dev
- cargo build -p chgk_ledb_lib --all-features - cargo build -p chgk_ledb_lib
- cargo publish --registry gitea -p chgk_ledb_lib --all-features - cargo publish --registry gitea -p chgk_ledb_lib
environment: environment:
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
CARGO_REGISTRIES_GITEA_INDEX: https://gitea.b4tman.ru/b4tman/_cargo-index.git CARGO_REGISTRIES_GITEA_INDEX: https://gitea.b4tman.ru/b4tman/_cargo-index.git

1
.gitignore vendored
View File

@ -7,4 +7,3 @@ json.zip
/.vscode /.vscode
test*.bin test*.bin
db.dat db.dat
*.pending-snap

1194
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,6 @@
[workspace] [workspace]
resolver = "2"
members = [ members = [
"app", "app",
"app_async",
"lib" "lib"
] ]

View File

@ -13,16 +13,12 @@ description = "Утилита загружающая базу данных ЧГ
name = "db_bench" name = "db_bench"
harness = false harness = false
[[bench]]
name = "questions_bench"
harness = false
[dependencies] [dependencies]
chgk_ledb_lib = {path = "../lib", features = ["sync", "source", "convert"]} chgk_ledb_lib = {path = "../lib"}
serde_json="1.0" serde_json="1.0"
zip="0.6" zip="0.6"
rand="0.8" rand="0.8"
clap = { version = "4.2.7", features = ["derive"] } clap = { version = "3.2.22", features = ["derive"] }
[dev-dependencies] [dev-dependencies]
criterion = "0.4.0" criterion = "0.4.0"

View File

@ -1,101 +0,0 @@
#[macro_use]
extern crate criterion;
extern crate bincode;
extern crate serde;
extern crate serde_derive;
extern crate serde_json;
extern crate tempfile;
use chgk_ledb_lib::db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{Question, QuestionsConverter};
use chgk_ledb_lib::source::ReadSourceQuestionsBatches;
use std::path::Path;
use std::time::Duration;
use std::{fs, io};
use criterion::{BatchSize, Criterion};
use tempfile::{tempdir, NamedTempFile};
const ZIP_FILENAME: &str = "../json.zip";
const NEW_DB_FILENAME: &str = "../db.dat";
const N: usize = 4096;
fn read_sample() -> Vec<Question> {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
source_questions
.convert()
.take(N)
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
})
.collect()
}
fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let opts = WriterOpts {
compress_lvl: 1,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::new(path, opts).expect("new writer")
}
fn questions_read(c: &mut Criterion) {
c.bench_function("questions_read", |b| {
b.iter_batched(
|| {
let reader: Reader<Question> =
Reader::new(NEW_DB_FILENAME, 4096).expect("new reader");
reader.into_iter().take(N)
},
|reader| {
for item in reader {
drop(item);
}
},
BatchSize::SmallInput,
)
});
}
fn questions_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("questions_write", |b| {
b.iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = read_sample().into_iter();
let writer = prepare_db_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| {
writer.load(&mut src).unwrap();
writer.finish().unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default()
.sample_size(40)
.warm_up_time(Duration::from_secs(7))
.measurement_time(Duration::from_secs(20))
}
criterion_group! {name=benches; config = config(); targets = questions_read, questions_write}
criterion_main!(benches);

View File

@ -12,7 +12,6 @@ use chgk_ledb_lib::source;
use crate::questions::{Question, QuestionsConverter}; use crate::questions::{Question, QuestionsConverter};
use crate::source::ReadSourceQuestionsBatches; use crate::source::ReadSourceQuestionsBatches;
use chgk_ledb_lib::util::ErrorToString;
const ZIP_FILENAME: &str = "json.zip"; const ZIP_FILENAME: &str = "json.zip";
const NEW_DB_FILENAME: &str = "db.dat"; const NEW_DB_FILENAME: &str = "db.dat";
@ -42,108 +41,6 @@ struct Cli {
measure: bool, measure: bool,
} }
fn main() {
let args = Cli::parse();
let mut action: Box<dyn FnOnce()> = match &args.command {
Command::Write => Box::new(write_db),
Command::Print { id } => {
let get_question = Box::new(|| read_from_db(*id));
Box::new(|| print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = Box::new(|| read_from_zip(*file_num, *num));
Box::new(|| print_question_from(get_question))
}
};
if args.measure {
action = Box::new(|| measure_and_print(action));
}
action();
}
// measure and return time elapsed in `func` in seconds
pub fn measure<F: FnOnce()>(func: F) -> f64 {
let start = Instant::now();
func();
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub fn measure_and_print<F: FnOnce()>(func: F) {
let m = measure(func);
eprintln!("{}", m);
}
fn print_question_from<F>(get_q: F)
where
F: FnOnce() -> Result<Question, String>,
{
let q = get_q().expect("question not found");
println!("{:#?}", q)
}
fn read_from_zip(file_num: usize, mut num: usize) -> Result<Question, String> {
let mut rng = rand::thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).str_err()?;
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).str_err()?;
let mut source_questions = archive.source_questions();
let (filename, batch) = if file_num == 0 {
source_questions
.choose(&mut rng)
.ok_or("rand choose".to_string())?
} else {
source_questions
.nth(file_num - 1)
.ok_or(format!("file nth #{file_num} => None"))?
};
let mut batch = batch.map_err(|e| format!("get batch from file #{file_num} => {e}"))?;
batch.filename = filename;
let questions: Vec<Question> = batch.into();
if num == 0 {
num = (1..=questions.len())
.choose(&mut rng)
.ok_or("rand choose".to_string())?;
}
Ok(questions[num - 1].clone())
}
fn read_from_db(id: u32) -> Result<Question, String> {
let reader: db::Reader<Question> = db::Reader::new(NEW_DB_FILENAME, 2048)?;
let len = reader.len();
let mut questions = reader.into_iter();
let question = match id {
0 => {
let mut rng = rand::thread_rng();
questions
.choose(&mut rng)
.ok_or(format!("rand choose, len = {len}"))?
}
_ => questions
.nth((id - 1) as usize)
.ok_or(format!("get nth #{id} => None"))?,
};
Ok(question)
}
fn write_db() {
let (tx, rx) = mpsc::channel::<Question>();
[
thread::spawn(move || zip_reader_task(tx)),
thread::spawn(move || db_writer_task(rx)),
]
.into_iter()
.for_each(|handle| handle.join().expect("thread panic"));
println!("all done");
}
fn zip_reader_task(tx: mpsc::Sender<Question>) { fn zip_reader_task(tx: mpsc::Sender<Question>) {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap(); let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file); let zip_reader = io::BufReader::new(zip_file);
@ -166,6 +63,94 @@ fn zip_reader_task(tx: mpsc::Sender<Question>) {
println!("read done"); println!("read done");
} }
fn print_question_from<F>(get_q: F)
where
F: FnOnce() -> Option<Question>,
{
let q = get_q().expect("question not found");
println!("{:#?}", q)
}
fn read_from_zip(file_num: usize, mut num: usize) -> Option<Question> {
let mut rng = rand::thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
let (filename, batch) = if file_num == 0 {
source_questions.choose(&mut rng).unwrap()
} else {
source_questions.nth(file_num - 1).unwrap()
};
let mut batch = batch.unwrap();
batch.filename = filename;
let questions: Vec<Question> = batch.into();
if num == 0 {
num = (1..=questions.len()).choose(&mut rng).unwrap();
}
Some(questions[num - 1].clone())
}
// measure and return time elapsed in `func` in seconds
pub fn measure<F: FnOnce()>(func: F) -> f64 {
let start = Instant::now();
func();
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub fn measure_and_print<F: FnOnce()>(func: F) {
let m = measure(func);
eprintln!("{}", m);
}
fn main() {
let args = Cli::parse();
let mut action: Box<dyn FnOnce()> = match &args.command {
Command::Write => Box::new(write_db),
Command::Print { id } => {
let get_question = Box::new(|| read_from_db(*id));
Box::new(|| print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = Box::new(|| read_from_zip(*file_num, *num));
Box::new(|| print_question_from(get_question))
}
};
if args.measure {
action = Box::new(|| measure_and_print(action));
}
action();
}
fn read_from_db(id: u32) -> Option<Question> {
let reader: db::Reader<Question> =
db::Reader::new(NEW_DB_FILENAME, 2048).expect("new db reader");
let mut questions = reader.into_iter();
match id {
0 => {
let mut rng = rand::thread_rng();
questions.choose(&mut rng)
}
_ => questions.nth((id - 1) as usize),
}
}
fn write_db() {
let (tx, rx) = mpsc::channel::<Question>();
[
thread::spawn(move || zip_reader_task(tx)),
thread::spawn(move || db_writer_task(rx)),
]
.into_iter()
.for_each(|handle| handle.join().expect("thread panic"));
println!("all done");
}
fn db_writer_task(rx: mpsc::Receiver<Question>) { fn db_writer_task(rx: mpsc::Receiver<Question>) {
let writer_opts = db::WriterOpts::default(); let writer_opts = db::WriterOpts::default();
let mut writer: db::Writer<Question> = let mut writer: db::Writer<Question> =

View File

@ -1,45 +0,0 @@
[package]
name = "chgk_ledb_async"
version = "1.1.0"
authors = ["Dmitry <b4tm4n@mail.ru>"]
edition = "2021"
repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb"
license = "MIT"
description = "Утилита загружающая базу данных ЧГК вопросов из ZIP файла в JSON формате в базу данных."
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bench]]
name = "async_bench"
harness = false
[[bench]]
name = "questions_async_bench"
harness = false
[dependencies]
chgk_ledb_lib = {path = "../lib", features = ["async", "convert_async"]}
serde_json="1.0"
async_zip = { version = "0.0.15", features = [
"zstd",
"tokio",
"tokio-fs"] }
tokio = { version = "1", features = [
"io-util",
"fs",
"rt-multi-thread"
] }
tokio-stream = "0.1"
rand="0.8"
clap = { version = "4.2.7", features = ["derive"] }
futures = "0.3"
[dev-dependencies]
criterion = { version = "0.5.1", features = ["async_tokio"]}
tempfile = "3.3"
bincode = "^2.0.0-rc.2"
serde="1.0"
serde_derive="1.0"
futures = "0.3"
async-compression = { version = "0.4.1", default-features = false }
lazy_static = "1.4.0"

View File

@ -1,153 +0,0 @@
#[macro_use]
extern crate criterion;
extern crate bincode;
extern crate serde;
extern crate serde_derive;
extern crate tempfile;
use async_compression::Level;
use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts};
use futures::StreamExt;
use std::{ops::Deref, path::Path};
use criterion::{BatchSize, Criterion};
use lazy_static::lazy_static;
use tempfile::{tempdir, NamedTempFile};
use serde_derive::{Deserialize, Serialize};
#[derive(
bincode::Encode,
bincode::Decode,
Clone,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
)]
struct TestData {
num1: u64,
num2: u64,
test: String,
}
use tokio::runtime;
lazy_static! {
static ref RUNTIME: tokio::runtime::Runtime =
runtime::Builder::new_current_thread().build().unwrap();
}
const N: usize = 4096;
fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count).map(|i| 143 + i as u64).map(|i| TestData {
num1: i,
num2: (i * 100) ^ 0xDF0E441122334455,
test: "test ---- Test ____".repeat(123 + i as usize % 15),
})
}
async fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<TestData> {
let opts = WriterOpts {
compress_lvl: Level::Fastest,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::new(path, opts).await.expect("new writer")
}
fn write_sample<P: AsRef<Path>>(path: P) {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move {
let mut writer = prepare_db_writer(rp).await;
let items_iter = gen_data(N).collect::<Vec<TestData>>().into_iter();
let mut samples_stream = futures::stream::iter(items_iter);
writer.load(&mut samples_stream).await.unwrap();
writer.finish().await.unwrap();
})
})
.join()
.expect("spawn thread");
}
fn setup_writer<P: AsRef<Path>>(path: P) -> Writer<TestData> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(prepare_db_writer(rp))
})
.join()
.expect("spawn thread")
}
fn setup_reader<P: AsRef<Path>>(path: P) -> Reader<TestData> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move { Reader::new(rp).await.expect("new reader") })
})
.join()
.expect("spawn thread")
}
fn async_read(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
write_sample(&tmpfile);
c.bench_function("async_read", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| setup_reader(&tmpfile),
|reader| async move { reader.stream().for_each(|item| async { drop(item) }).await },
BatchSize::SmallInput,
)
});
}
fn async_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("async_write", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = gen_data(N).collect::<Vec<TestData>>().into_iter();
let src = futures::stream::iter(src);
let writer = setup_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| async move {
writer.load(&mut src).await.unwrap();
writer.finish().await.unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default().sample_size(20)
}
criterion_group! {name=benches; config = config(); targets = async_read, async_write}
criterion_main!(benches);

View File

@ -1,152 +0,0 @@
#[macro_use]
extern crate criterion;
extern crate bincode;
extern crate serde;
extern crate serde_derive;
extern crate tempfile;
use async_compression::Level;
use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{Question, QuestionsConverterAsyncForStream};
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use futures::pin_mut;
use futures::StreamExt;
use std::time::Duration;
use std::{ops::Deref, path::Path};
use async_zip::tokio::read::seek::ZipFileReader;
use criterion::{BatchSize, Criterion};
use lazy_static::lazy_static;
use tempfile::{tempdir, NamedTempFile};
use tokio::{fs, runtime};
const ZIP_FILENAME: &str = "../json.zip";
const NEW_DB_FILENAME: &str = "../db.dat";
lazy_static! {
static ref RUNTIME: tokio::runtime::Runtime =
runtime::Builder::new_current_thread().build().unwrap();
}
const N: usize = 4096;
async fn read_sample() -> Vec<Question> {
let mut file = fs::File::open(ZIP_FILENAME).await.expect("open zip");
let archive = ZipFileReader::with_tokio(&mut file)
.await
.expect("open zip file reader");
let mut source_questions = archive.source_questions();
let source_questions = source_questions.stream();
pin_mut!(source_questions);
source_questions
.converter()
.convert()
.take(N)
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
})
.collect()
.await
}
fn read_sample_sync() -> Vec<Question> {
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(read_sample())
})
.join()
.expect("spawn thread")
}
async fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let opts = WriterOpts {
compress_lvl: Level::Fastest,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::<Question>::new(path, opts)
.await
.expect("new writer")
}
fn setup_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(prepare_db_writer(rp))
})
.join()
.expect("spawn thread")
}
fn setup_reader<P: AsRef<Path>>(path: P) -> Reader<Question> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move { Reader::new(rp).await.expect("new reader") })
})
.join()
.expect("spawn thread")
}
fn questions_async_read(c: &mut Criterion) {
c.bench_function("questions_async_read", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| setup_reader(NEW_DB_FILENAME),
|reader| async move {
reader
.stream()
.take(N)
.for_each(|item| async { drop(item) })
.await
},
BatchSize::SmallInput,
)
});
}
fn questions_async_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("questions_async_write", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = read_sample_sync().into_iter();
let src = futures::stream::iter(src);
let writer = setup_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| async move {
writer.load(&mut src).await.unwrap();
writer.finish().await.unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default()
.sample_size(40)
.warm_up_time(Duration::from_secs(7))
.measurement_time(Duration::from_secs(20))
}
criterion_group! {name=benches; config = config(); targets = questions_async_read, questions_async_write}
criterion_main!(benches);

View File

@ -1,197 +0,0 @@
extern crate serde_json;
use clap::{Parser, Subcommand};
use futures::{pin_mut, Future};
use rand::distributions::Uniform;
use rand::seq::IteratorRandom;
use rand::{thread_rng, Rng};
use async_zip::tokio::read::seek::ZipFileReader;
use futures::stream::{self, StreamExt};
use std::time::Instant;
use tokio::sync::mpsc::{self, UnboundedReceiver, UnboundedSender};
use async_db::WriterOpts;
use tokio::{fs, io};
use tokio_stream::wrappers::UnboundedReceiverStream;
use chgk_ledb_lib::async_db;
use chgk_ledb_lib::questions::Question;
use chgk_ledb_lib::questions::QuestionsConverterAsyncForStream;
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use chgk_ledb_lib::util::ErrorToString;
const ZIP_FILENAME: &str = "json.zip";
const NEW_DB_FILENAME: &str = "db.dat";
#[derive(Subcommand, Debug)]
enum Command {
Write,
Print {
#[clap(value_parser, default_value = "0")]
id: u32,
},
ZipPrint {
#[clap(value_parser, default_value = "0")]
file_num: usize,
#[clap(value_parser, default_value = "0")]
num: usize,
},
}
#[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None)]
#[clap(propagate_version = true)]
struct Cli {
#[clap(subcommand)]
command: Command,
#[clap(short, long, action)]
measure: bool,
}
#[tokio::main]
async fn main() {
let args = Cli::parse();
let mut action: Box<dyn Future<Output = _>> = match &args.command {
Command::Write => Box::new(write_db()),
Command::Print { id } => {
let get_question = read_from_db(*id);
Box::new(print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = read_from_zip(*file_num, *num);
Box::new(print_question_from(get_question))
}
};
if args.measure {
action = Box::new(measure_and_print(Box::into_pin(action)));
}
Box::into_pin(action).await;
}
// measure and return time elapsed in `fut` in seconds
pub async fn measure<F: Future>(fut: F) -> f64 {
let start = Instant::now();
fut.await;
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub async fn measure_and_print<F: Future>(fut: F) {
let m = measure(fut).await;
eprintln!("{}", m);
}
async fn print_question_from<F>(get_q: F)
where
F: Future<Output = Result<Question, String>>,
{
let q = get_q.await.expect("question not found");
println!("{:#?}", q)
}
async fn read_from_zip(file_num: usize, mut num: usize) -> Result<Question, String> {
let mut rng = thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).await.str_err()?;
let mut zip_reader = io::BufReader::new(zip_file);
let archive = ZipFileReader::with_tokio(&mut zip_reader).await.str_err()?;
let mut source = archive.source_questions();
let files_count = source.len();
let file_index = if file_num == 0 {
let files = Uniform::new(0, files_count);
rng.sample(files)
} else {
file_num - 1
};
let src = source
.get(file_index)
.await
.map_err(|e| format!("get file {file_num} => {e}"))?;
let src = stream::once(async { src });
pin_mut!(src);
let converter = src.converter();
let questions: Vec<_> = converter.convert().collect().await;
if num == 0 {
num = (1..=questions.len()).choose(&mut rng).unwrap();
}
let mut question = questions
.get(num - 1)
.ok_or(format!("get question #{num} => None"))?
.clone();
question.num = num as u32;
Ok(question)
}
async fn read_from_db(id: u32) -> Result<Question, String> {
let reader: async_db::Reader<Question> = async_db::Reader::new(NEW_DB_FILENAME).await?;
let len = reader.len();
let index = if id == 0 {
let mut rng = thread_rng();
let questions = Uniform::new(0, len);
rng.sample(questions)
} else {
id as usize - 1
};
reader
.get(index)
.await
.map_err(|e| format!("get #{index} => {e}"))
}
async fn write_db() {
let (tx, rx) = mpsc::unbounded_channel::<Question>();
tokio::try_join!(
tokio::spawn(zip_reader_task(tx)),
tokio::spawn(db_writer_task(rx))
)
.expect("tokio join");
println!("all done");
}
async fn zip_reader_task(tx: UnboundedSender<Question>) {
let mut file = fs::File::open(ZIP_FILENAME).await.expect("open zip");
let archive = ZipFileReader::with_tokio(&mut file)
.await
.expect("open zip file reader");
let mut source_questions = archive.source_questions();
let source_questions = source_questions.stream();
pin_mut!(source_questions);
source_questions
.converter()
.convert()
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + (num as u32);
question
})
.for_each_concurrent(None, |question| async {
tx.send(question).expect("send");
})
.await;
println!("read done");
}
async fn db_writer_task(rx: UnboundedReceiver<Question>) {
let writer_opts = WriterOpts::default();
let mut writer: async_db::Writer<Question> =
async_db::Writer::new(NEW_DB_FILENAME, writer_opts)
.await
.unwrap_or_else(|e| panic!("db writer load, {e:#?}"));
let stream: UnboundedReceiverStream<_> = rx.into();
let stream = stream;
writer.load(stream).await.expect("load");
writer.finish().await.expect("db writer finish");
println!("write done");
}

View File

@ -1,6 +1,6 @@
[package] [package]
name = "chgk_ledb_lib" name = "chgk_ledb_lib"
version = "1.2.0" version = "1.1.0"
authors = ["Dmitry <b4tm4n@mail.ru>"] authors = ["Dmitry <b4tm4n@mail.ru>"]
edition = "2021" edition = "2021"
repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb" repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb"
@ -9,68 +9,15 @@ description = "Библиотека для доступа к файлу базы
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
sync = ["zstd", "memmap"]
async = [
"futures",
"futures-core",
"futures-util",
"fmmap",
"tokio",
"async-compression",
"async-stream",
"pin-project",
]
source = ["zip"]
source_async = [
"async_zip",
"tokio",
"futures",
"futures-core",
"futures-util",
"async-stream",
]
convert = ["zip"]
convert_async = [
"futures",
"futures-core",
"futures-util",
"async-stream",
"async_zip",
"tokio",
]
[dependencies] [dependencies]
serde = "1.0" serde="1.0"
serde_derive = "1.0" serde_derive="1.0"
serde_json = "1.0" serde_json="1.0"
zip="0.6"
bincode = "^2.0.0-rc.2" bincode = "^2.0.0-rc.2"
zip = { version = "0.6", optional = true } zstd = "^0.10"
async_zip = { version = "0.0.15" , features = [ memmap = "0.7.0"
"zstd", fs4 = { version = "0.6.3", features = ["sync"] }
"tokio",
"tokio-fs",
], optional = true }
fmmap = { version = "0.3", features = ["tokio-async"], optional = true }
tokio = { version = "1", features = [
"fs",
"io-util",
"rt",
"macros",
], optional = true }
futures-core = { version = "0.3", optional = true }
futures = { version = "0.3", optional = true }
futures-util = { version = "0.3", optional = true }
async-compression = { version = "0.4.1", default-features = false, features = [
"zstd",
"tokio",
], optional = true }
async-stream = { version = "0.3", optional = true }
zstd = { version = "^0.12", default-features = false, optional = true }
memmap = { version = "0.7.0", optional = true }
pin-project = { version = "1.1.3", optional = true }
[dev-dependencies] [dev-dependencies]
insta = { version = "1.31.0", features = ["yaml"] }
tempfile = "3.3" tempfile = "3.3"

View File

@ -1,787 +0,0 @@
use std::marker::PhantomData;
use std::ops::Deref;
use std::vec;
use std::{path::Path, sync::Arc};
use async_compression::tokio::bufread::ZstdDecoder;
use async_compression::tokio::bufread::ZstdEncoder;
use async_compression::Level;
use futures::sink::Sink;
use futures::stream::StreamExt;
use futures_core::stream::Stream;
use futures_core::Future;
use futures_util::pin_mut;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::{
fs,
io::{self, AsyncReadExt, AsyncWriteExt},
};
use fmmap::tokio::{AsyncMmapFile, AsyncMmapFileExt, AsyncOptions};
type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
use crate::util::BincodeVecWriter;
use crate::util::ErrorToString;
pub struct WriterOpts {
pub compress_lvl: Level,
pub data_buf_size: usize,
pub out_buf_size: usize,
pub current_buf_size: usize,
}
impl Default for WriterOpts {
fn default() -> Self {
Self {
compress_lvl: Level::Default,
data_buf_size: 500 * 1024 * 1024,
out_buf_size: 200 * 1024 * 1024,
current_buf_size: 100 * 1024,
}
}
}
pub struct Writer<T>
where
T: bincode::Encode,
{
out: io::BufWriter<fs::File>,
data_buf: Vec<u8>,
cur_buf_item: BincodeVecWriter,
table: Vec<LSize>,
compress_lvl: Level,
_t: PhantomData<Arc<T>>,
}
impl<T> Writer<T>
where
T: bincode::Encode,
{
pub async fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
let out = fs::File::create(path).await.str_err()?;
let out = io::BufWriter::with_capacity(opts.out_buf_size, out);
let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size);
let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl;
let table: Vec<LSize> = vec![];
Ok(Self {
out,
data_buf,
cur_buf_item,
table,
compress_lvl,
_t: PhantomData,
})
}
pub async fn push(&mut self, item: T) -> Result<(), String> {
self.push_by_ref(&item).await
}
pub async fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.len() as LSize;
bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?;
let mut zencoder = ZstdEncoder::with_quality(&self.cur_buf_item[..], self.compress_lvl);
io::copy(&mut zencoder, &mut self.data_buf)
.await
.str_err()?;
self.cur_buf_item.clear();
self.table.push(pos);
// FIXME
// this will break WriterSink::poll_ready (will wait forever), but not Writer::load
// tokio::time::sleep(std::time::Duration::from_secs(1)).await;
Ok(())
}
pub async fn load<S>(&mut self, source: S) -> Result<(), String>
where
S: Stream<Item = T> + std::marker::Unpin,
{
let hint = source.size_hint();
let hint = std::cmp::max(hint.0, hint.1.unwrap_or(0));
if hint > 0 {
self.table.reserve(hint);
}
pin_mut!(source);
while let Some(item) = source.next().await {
self.push(item).await?;
}
Ok(())
}
pub async fn finish(mut self) -> Result<(), String> {
// finish tab
let pos: LSize = self.data_buf.len() as LSize;
self.table.push(pos);
// write tab
let tab_size = (self.table.len() * LEN_SIZE) as LSize;
for pos in self.table {
let pos_data = (pos + tab_size).to_le_bytes();
self.out.write_all(&pos_data).await.str_err()?;
}
// copy data
self.out.write_all(&self.data_buf[..]).await.str_err()?;
self.out.flush().await.str_err()?;
Ok(())
}
pub fn sink(&mut self) -> WriterSink<'_, T> {
WriterSink {
writer: self,
item: None,
}
}
}
use pin_project::pin_project;
#[pin_project]
/// FIXME: not really async
/// only work when ..push.poll() returns Ready immediately
pub struct WriterSink<'a, T>
where
T: bincode::Encode,
{
#[pin]
writer: &'a mut Writer<T>,
item: Option<T>,
}
impl<'a, T> Sink<T> for WriterSink<'a, T>
where
T: bincode::Encode,
{
type Error = String;
fn poll_ready(
self: std::pin::Pin<&mut Self>,
ctx: &mut std::task::Context<'_>,
) -> Poll<Result<(), String>> {
let mut this = self.project();
if this.item.is_none() {
return Poll::Ready(Ok(()));
}
let item = this.item.take().unwrap();
let push_fut = this.writer.push(item); // FIXME:: how to save this future???
pin_mut!(push_fut);
push_fut.poll(ctx)
}
fn start_send(self: std::pin::Pin<&mut Self>, item: T) -> Result<(), Self::Error> {
let this = self.project();
*this.item = Some(item);
Ok(())
}
fn poll_flush(
self: std::pin::Pin<&mut Self>,
ctx: &mut std::task::Context<'_>,
) -> Poll<Result<(), Self::Error>> {
self.poll_ready(ctx)
}
fn poll_close(
mut self: std::pin::Pin<&mut Self>,
ctx: &mut std::task::Context<'_>,
) -> Poll<Result<(), Self::Error>> {
futures::ready!(self.as_mut().poll_ready(ctx))?;
Poll::Ready(Ok(()))
}
}
pub struct Reader<T>
where
T: bincode::Decode,
{
mmap: AsyncMmapFile,
count: usize,
first_pos: LSize,
_t: PhantomData<Arc<T>>,
}
impl<T> Reader<T>
where
T: bincode::Decode,
{
pub async fn new<P: AsRef<Path>>(path: P) -> Result<Self, String> {
let mmap = AsyncOptions::new()
.read(true)
.open_mmap_file(path)
.await
.str_err()?;
mmap.try_lock_shared().str_err()?;
// read first pos and records count
let first_data: [u8; LEN_SIZE] = mmap.bytes(0, LEN_SIZE).str_err()?.try_into().str_err()?;
let first_pos = LSize::from_le_bytes(first_data);
let tab_len = (first_pos as usize) / LEN_SIZE;
let count = tab_len - 1;
Ok(Self {
mmap,
count,
first_pos,
_t: PhantomData,
})
}
pub fn len(&self) -> usize {
self.count
}
pub fn is_empty(&self) -> bool {
0 == self.len()
}
/// get item at index, reuse data buffer
pub async fn get_with_buf(&self, index: usize, data_buf: &mut Vec<u8>) -> Result<T, String> {
if index >= self.len() {
return Err("index out of range".into());
}
let next_pos: usize = (index + 1) * LEN_SIZE;
// read item data pos
let data_pos = if 0 == index {
self.first_pos
} else {
let tab_pos: usize = index * LEN_SIZE;
let pos_curr_data: [u8; LEN_SIZE] = self
.mmap
.bytes(tab_pos, LEN_SIZE)
.str_err()?
.try_into()
.str_err()?;
LSize::from_le_bytes(pos_curr_data)
} as usize;
// read next item pos
let pos_next_data: [u8; LEN_SIZE] = self
.mmap
.bytes(next_pos, LEN_SIZE)
.str_err()?
.try_into()
.str_err()?;
let data_pos_next = LSize::from_le_bytes(pos_next_data) as usize;
let data_len = data_pos_next - data_pos;
// read & unpack item data
let mut decoder = ZstdDecoder::new(self.mmap.range_reader(data_pos, data_len).str_err()?);
decoder.read_to_end(data_buf).await.str_err()?;
// decode item
let item: (T, usize) = bincode::decode_from_slice(data_buf, BINCODE_CFG).str_err()?;
data_buf.clear();
Ok(item.0)
}
/// get item at index
pub async fn get(&self, index: usize) -> Result<T, String> {
let mut data_buf: Vec<u8> = vec![];
self.get_with_buf(index, &mut data_buf).await
}
pub fn stream(&self) -> ReaderStream<'_, T> {
ReaderStream::new(self)
}
}
pub struct ReaderStream<'a, T>
where
T: bincode::Decode,
{
reader: &'a Reader<T>,
index: Option<usize>,
}
impl<'a, T> ReaderStream<'a, T>
where
T: bincode::Decode,
{
fn new(reader: &'a Reader<T>) -> Self {
ReaderStream {
reader,
index: None,
}
}
}
impl<'a, T> Stream for ReaderStream<'a, T>
where
T: bincode::Decode,
{
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {
if self.index.is_none() && !self.reader.is_empty() {
self.index = Some(0);
}
if self.index.unwrap() == self.reader.len() {
return Poll::Ready(None);
}
// FIXME: mayby work only if reader.get().poll() return Ready immediately
let future = self.reader.get(self.index.unwrap());
pin_mut!(future);
match Pin::new(&mut future).poll(cx) {
Poll::Ready(Ok(item)) => {
self.index = Some(self.index.unwrap() + 1);
Poll::Ready(Some(item))
}
Poll::Ready(Err(_)) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.reader.len();
if self.index.is_none() {
return (len, Some(len));
}
let index = self.index.unwrap();
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
}
pub struct BufReader<T>
where
T: bincode::Decode,
{
inner: Reader<T>,
buf: Vec<u8>,
}
impl<T> BufReader<T>
where
T: bincode::Decode,
{
pub async fn new<P: AsRef<Path>>(path: P, buf_size: usize) -> Result<Self, String> {
match Reader::<T>::new(path).await {
Ok(inner) => Ok(Self {
inner,
buf: Vec::with_capacity(buf_size),
}),
Err(e) => Err(e),
}
}
pub async fn get(&mut self, index: usize) -> Result<T, String> {
self.inner.get_with_buf(index, &mut self.buf).await
}
pub fn into_inner(self) -> Reader<T> {
self.inner
}
pub fn stream(self) -> BufReaderStream<T> {
BufReaderStream::new(self)
}
}
impl<T> From<Reader<T>> for BufReader<T>
where
T: bincode::Decode,
{
fn from(inner: Reader<T>) -> Self {
Self {
inner,
buf: Vec::new(),
}
}
}
impl<T> From<BufReader<T>> for Reader<T>
where
T: bincode::Decode,
{
fn from(value: BufReader<T>) -> Self {
value.into_inner()
}
}
impl<T> Deref for BufReader<T>
where
T: bincode::Decode,
{
type Target = Reader<T>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
pub struct BufReaderStream<T>
where
T: bincode::Decode,
{
reader: BufReader<T>,
index: Option<usize>,
}
impl<T> BufReaderStream<T>
where
T: bincode::Decode,
{
fn new(reader: BufReader<T>) -> Self {
BufReaderStream {
reader,
index: None,
}
}
async fn get_next(&mut self) -> Result<T, String> {
match self.index {
None => Err("index is None".into()),
Some(index) => {
let res = self.reader.get(index).await;
self.index = Some(index + 1);
res
}
}
}
}
impl<T> Stream for BufReaderStream<T>
where
T: bincode::Decode,
{
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {
if self.index.is_none() && !self.reader.is_empty() {
self.index = Some(0);
}
if self.index.unwrap() == self.reader.len() {
return Poll::Ready(None);
}
// FIXME: mayby work only if reader.get().poll() return Ready immediately
let future = self.get_next();
pin_mut!(future);
match Pin::new(&mut future).poll(cx) {
Poll::Ready(Ok(item)) => Poll::Ready(Some(item)),
Poll::Ready(Err(_)) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.reader.len();
if self.index.is_none() {
return (len, Some(len));
}
let index = self.index.unwrap();
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
}
#[cfg(test)]
mod test {
use super::*;
use core::fmt::Debug;
use tempfile::tempdir;
#[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
struct TestData {
num: u64,
test: String,
vnum: Vec<u64>,
vstr: Vec<String>,
}
fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count).map(|i| TestData {
num: i as u64,
test: "test".repeat(i),
vnum: (0..i * 120).map(|x| (x ^ 0x345FE34) as u64).collect(),
vstr: (0..i * 111).map(|x| "test".repeat(x)).collect(),
})
}
async fn assert_data_eq((x, y): (&TestData, TestData)) {
assert_eq!(*x, y);
}
#[tokio::test]
async fn test_write() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
}
#[tokio::test]
async fn test_write_read() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let ritem = reader.get(idx).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_sink_read() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone()).map(Ok);
pin_mut!(src);
src.forward(writer.sink()).await.expect("forward");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let ritem = reader.get(idx).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_read_get_with_buf() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let mut data_buf: Vec<u8> = vec![];
let ritem = reader.get_with_buf(idx, &mut data_buf).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_read_stream() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
let dst_stream = reader.stream();
let src_stream = futures::stream::iter(items.iter());
let mut count = 0;
src_stream
.zip(dst_stream)
.map(|x| {
count += 1;
x
})
.for_each(assert_data_eq)
.await;
assert_eq!(count, items.len())
}
/// sharing Reader instance between threads
#[tokio::test]
async fn test_share_reader() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
let reader = Arc::new(reader);
for _ in 0..=3 {
let cur_items = items.clone();
let cur_reader = Arc::clone(&reader);
tokio::spawn(async move {
let dst_stream = cur_reader.stream();
let src_stream = futures::stream::iter(cur_items.iter());
src_stream.zip(dst_stream).for_each(assert_data_eq).await;
});
}
}
#[tokio::test]
async fn test_write_bufread() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let mut reader = BufReader::<TestData>::new(&tmpfile, 4096)
.await
.expect("new buf reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let ritem = reader.get(idx).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_bufread_stream() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader = BufReader::<TestData>::new(&tmpfile, 4096)
.await
.expect("new buf reader");
assert_eq!(items.len(), reader.len());
let dst_stream = reader.stream();
let src_stream = futures::stream::iter(items.iter());
let mut count = 0;
src_stream
.zip(dst_stream)
.map(|x| {
count += 1;
x
})
.for_each(assert_data_eq)
.await;
assert_eq!(count, items.len())
}
}

View File

@ -8,12 +8,26 @@ use std::{
use memmap::{Mmap, MmapOptions}; use memmap::{Mmap, MmapOptions};
use fs4::FileExt;
type LSize = u32; type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>(); const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard(); const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
use crate::util::BincodeVecWriter; trait ErrorToString {
use crate::util::ErrorToString; type Output;
fn str_err(self) -> std::result::Result<Self::Output, String>;
}
impl<T, E> ErrorToString for std::result::Result<T, E>
where
E: std::error::Error,
{
type Output = T;
fn str_err(self) -> std::result::Result<Self::Output, String> {
self.map_err(|e| e.to_string())
}
}
pub struct WriterOpts { pub struct WriterOpts {
pub compress_lvl: i32, pub compress_lvl: i32,
@ -40,10 +54,9 @@ where
out: io::BufWriter<fs::File>, out: io::BufWriter<fs::File>,
data_buf: Cursor<Vec<u8>>, data_buf: Cursor<Vec<u8>>,
cur_buf_raw: Cursor<Vec<u8>>, cur_buf_raw: Cursor<Vec<u8>>,
cur_buf_item: BincodeVecWriter,
table: Vec<LSize>, table: Vec<LSize>,
compress_lvl: i32, compress_lvl: i32,
_t: PhantomData<Arc<T>>, _t: PhantomData<*const T>,
} }
impl<T> Writer<T> impl<T> Writer<T>
@ -52,14 +65,13 @@ where
{ {
pub fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> { pub fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
let out = fs::File::create(path).str_err()?; let out = fs::File::create(path).str_err()?;
out.try_lock_exclusive().str_err()?;
let out = io::BufWriter::with_capacity(opts.out_buf_size, out); let out = io::BufWriter::with_capacity(opts.out_buf_size, out);
let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size); let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size);
let data_buf = Cursor::new(data_buf); let data_buf = Cursor::new(data_buf);
let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size); let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_raw = Cursor::new(cur_buf_raw); let cur_buf_raw = Cursor::new(cur_buf_raw);
let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl; let compress_lvl = opts.compress_lvl;
@ -69,7 +81,6 @@ where
out, out,
data_buf, data_buf,
cur_buf_raw, cur_buf_raw,
cur_buf_item,
table, table,
compress_lvl, compress_lvl,
_t: PhantomData, _t: PhantomData,
@ -77,25 +88,20 @@ where
} }
pub fn push(&mut self, item: T) -> Result<(), String> { pub fn push(&mut self, item: T) -> Result<(), String> {
self.push_by_ref(&item)
}
pub fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.position() as LSize; let pos: LSize = self.data_buf.position() as LSize;
bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?; let item_data = bincode::encode_to_vec(item, BINCODE_CFG).str_err()?;
let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?; let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?;
zencoder zencoder
.set_pledged_src_size(Some(self.cur_buf_item.len() as u64)) .set_pledged_src_size(item_data.len() as u64)
.str_err()?; .str_err()?;
self.cur_buf_raw.set_position(0); self.cur_buf_raw.set_position(0);
let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder); let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder);
cur_buf_z.write_all(&self.cur_buf_item).str_err()?; cur_buf_z.write_all(&item_data).str_err()?;
cur_buf_z.finish().str_err()?; cur_buf_z.finish().str_err()?;
cur_buf_z.flush().str_err()?; cur_buf_z.flush().str_err()?;
self.cur_buf_item.clear();
self.table.push(pos); self.table.push(pos);
let (cur_buf_raw, _) = cur_buf_z.into_inner(); let (cur_buf_raw, _) = cur_buf_z.into_inner();
@ -130,6 +136,9 @@ where
let pos: LSize = self.data_buf.position() as LSize; let pos: LSize = self.data_buf.position() as LSize;
self.table.push(pos); self.table.push(pos);
let output_size: u64 = (self.table.len() * LEN_SIZE) as u64 + self.data_buf.position();
self.out.get_ref().allocate(output_size).str_err()?;
// write tab // write tab
let tab_size = (self.table.len() * LEN_SIZE) as LSize; let tab_size = (self.table.len() * LEN_SIZE) as LSize;
for pos in self.table { for pos in self.table {
@ -144,6 +153,7 @@ where
io::copy(&mut data, &mut self.out).str_err()?; io::copy(&mut data, &mut self.out).str_err()?;
self.out.flush().str_err()?; self.out.flush().str_err()?;
self.out.get_ref().unlock().str_err()?;
Ok(()) Ok(())
} }
} }
@ -155,7 +165,7 @@ where
mmap: Mmap, mmap: Mmap,
count: usize, count: usize,
first_pos: LSize, first_pos: LSize,
_t: PhantomData<Arc<T>>, _t: Option<Arc<T>>, // PhantomData replacement
} }
impl<T> Reader<T> impl<T> Reader<T>
@ -164,6 +174,7 @@ where
{ {
pub fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> { pub fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> {
let file = fs::File::open(path).str_err()?; let file = fs::File::open(path).str_err()?;
file.try_lock_shared().str_err()?;
let mmap = unsafe { MmapOptions::new().map(&file).str_err()? }; let mmap = unsafe { MmapOptions::new().map(&file).str_err()? };
// read first pos and records count // read first pos and records count
@ -176,7 +187,7 @@ where
mmap, mmap,
count, count,
first_pos, first_pos,
_t: PhantomData, _t: None,
}) })
} }
@ -413,7 +424,7 @@ mod test {
} }
fn gen_data(count: usize) -> impl Iterator<Item = TestData> { fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count).map(|i| TestData { (0..count).into_iter().map(|i| TestData {
num: i as u64, num: i as u64,
test: "test".repeat(i), test: "test".repeat(i),
}) })

View File

@ -1,13 +1,3 @@
#[cfg(feature = "async")]
pub mod async_db;
#[cfg(feature = "sync")]
pub mod db; pub mod db;
pub mod questions; pub mod questions;
#[cfg(any(
feature = "source",
feature = "source_async",
feature = "convert",
feature = "convert_async"
))]
pub mod source; pub mod source;
pub mod util;

View File

@ -1,398 +1,136 @@
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
#[derive( use crate::source::{SourceQuestion, SourceQuestionsBatch};
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)] macro_rules! make {
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+}};
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+ ,..$Target::default()}}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode)]
pub struct BatchInfo { pub struct BatchInfo {
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub filename: String, pub filename: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub description: String, pub description: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub author: String, pub author: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub comment: String, pub comment: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub url: String, pub url: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub date: String, pub date: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub processed_by: String, pub processed_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub redacted_by: String, pub redacted_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub copyright: String, pub copyright: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub theme: String, pub theme: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub kind: String, pub kind: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub source: String, pub source: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub rating: String, pub rating: String,
} }
#[derive( #[derive(Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode)]
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)]
pub struct Question { pub struct Question {
#[serde(default, skip_serializing_if = "u32_is_zero")] #[serde(default)]
pub num: u32, pub num: u32,
pub id: String, pub id: String,
pub description: String, pub description: String,
pub answer: String, pub answer: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub author: String, pub author: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub comment: String, pub comment: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub comment1: String, pub comment1: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub tour: String, pub tour: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub url: String, pub url: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub date: String, pub date: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub processed_by: String, pub processed_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub redacted_by: String, pub redacted_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub copyright: String, pub copyright: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub theme: String, pub theme: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub kind: String, pub kind: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub source: String, pub source: String,
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub rating: String, pub rating: String,
#[serde(default, skip_serializing_if = "BatchInfo::is_default")] #[serde(default)]
pub batch_info: BatchInfo, pub batch_info: BatchInfo,
} }
fn u32_is_zero(num: &u32) -> bool { impl From<SourceQuestion> for Question {
*num == 0 fn from(src: SourceQuestion) -> Self {
} make! {Self; with defaults and by {
num, id, description, answer, author, comment, comment1, tour, url,
impl BatchInfo { date, processed_by, redacted_by, copyright, theme, kind, source, rating
pub fn is_default(&self) -> bool { }; from src}
*self == BatchInfo::default()
} }
} }
#[cfg(any(feature = "convert", feature = "convert_async"))] impl From<SourceQuestionsBatch> for BatchInfo {
pub mod convert_common { fn from(src: SourceQuestionsBatch) -> Self {
use super::{BatchInfo, Question}; make! {Self; by {
use crate::source::{SourceQuestion, SourceQuestionsBatch}; filename, description, author, comment, url, date,
processed_by, redacted_by, copyright, theme, kind, source, rating
macro_rules! make { }; from src}
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+}};
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+ ,..$Target::default()}}
} }
}
impl From<SourceQuestion> for Question { impl From<SourceQuestionsBatch> for Vec<Question> {
fn from(src: SourceQuestion) -> Self { fn from(src: SourceQuestionsBatch) -> Self {
make! {Self; with defaults and by { let mut result: Vec<Question> = src
num, id, description, answer, author, comment, comment1, tour, url, .questions
date, processed_by, redacted_by, copyright, theme, kind, source, rating .iter()
}; from src} .map(|item| item.clone().into())
} .collect();
let batch_info = BatchInfo::from(src);
result.iter_mut().for_each(|mut question| {
question.batch_info = batch_info.clone();
});
result
} }
}
impl From<SourceQuestionsBatch> for BatchInfo { pub trait QuestionsConverter {
fn from(src: SourceQuestionsBatch) -> Self { fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a>;
make! {Self; by { }
filename, description, author, comment, url, date,
processed_by, redacted_by, copyright, theme, kind, source, rating
}; from src}
}
}
impl From<SourceQuestionsBatch> for Vec<Question> { impl<T> QuestionsConverter for T
fn from(src: SourceQuestionsBatch) -> Self { where
let mut src = src; T: Iterator<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>,
let mut questions: Vec<SourceQuestion> = vec![]; {
std::mem::swap(&mut src.questions, &mut questions); fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a> {
let mut result: Vec<Question> = questions.into_iter().map(|item| item.into()).collect(); let iter = self
let batch_info = BatchInfo::from(src); .filter(|(_, data)| data.is_ok())
result.iter_mut().for_each(|question| { .flat_map(|(filename, data)| {
question.batch_info = batch_info.clone(); let mut batch = data.unwrap();
batch.filename = filename;
let questions: Vec<Question> = batch.into();
questions
}); });
Box::new(iter)
result
}
}
}
#[cfg(feature = "convert")]
pub mod convert {
use super::Question;
use crate::source::SourceQuestionsBatch;
pub trait QuestionsConverter {
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a>;
}
impl<T> QuestionsConverter for T
where
T: Iterator<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>,
{
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a> {
let iter = self
.filter(|(_, data)| data.is_ok())
.flat_map(|(filename, data)| {
let mut batch = data.unwrap();
batch.filename = filename;
let questions: Vec<Question> = batch.into();
questions
});
Box::new(iter)
}
}
#[cfg(test)]
mod test {
use crate::questions::test::convert_common::sample_batch;
use super::*;
use insta::assert_yaml_snapshot;
use std::iter;
#[test]
fn test_convert() {
let mut source = iter::once((
String::from("test.json"),
Ok::<SourceQuestionsBatch, serde_json::Error>(sample_batch()),
));
let converted: Vec<_> = source.convert().collect();
assert_yaml_snapshot!(converted, @r#"
---
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
"#);
}
}
}
#[cfg(feature = "convert")]
pub use convert::QuestionsConverter;
#[cfg(feature = "convert_async")]
pub mod convert_async {
use futures::stream;
use futures_core::stream::Stream;
use futures_util::StreamExt;
use super::Question;
use crate::source::SourceQuestionsBatch;
pub struct QuestionsConverterAsync<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
inner: T,
}
impl<T> From<T> for QuestionsConverterAsync<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
fn from(inner: T) -> Self {
Self { inner }
}
}
pub trait QuestionsConverterAsyncForStream<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
fn converter(&mut self) -> QuestionsConverterAsync<&mut T>;
}
impl<T> QuestionsConverterAsyncForStream<T> for T
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
fn converter(&mut self) -> QuestionsConverterAsync<&mut T> {
QuestionsConverterAsync::from(self)
}
}
impl<T> QuestionsConverterAsync<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
pub fn convert(self) -> impl Stream<Item = Question> {
self.inner
.filter_map(|(name, res)| async move {
if let Ok(item) = res {
Some((name, item))
} else {
None
}
})
.flat_map(|(filename, batch)| {
stream::iter({
let mut batch = batch;
batch.filename = filename;
let questions: Vec<Question> = batch.into();
questions
})
})
}
}
#[cfg(test)]
mod test {
use crate::questions::test::convert_common::sample_batch;
use super::*;
use futures_util::{pin_mut, StreamExt};
use insta::assert_yaml_snapshot;
#[tokio::test]
async fn test_convert_stream() {
let source = futures::stream::once(async {
(
String::from("test.json"),
Ok::<SourceQuestionsBatch, serde_json::Error>(sample_batch()),
)
});
pin_mut!(source);
let converter = source.converter();
let converter = converter.convert();
let converted: Vec<_> = converter.collect().await;
assert_yaml_snapshot!(converted, @r#"
---
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
"#);
}
}
}
#[cfg(feature = "convert_async")]
pub use convert_async::{QuestionsConverterAsync, QuestionsConverterAsyncForStream};
#[cfg(test)]
mod test {
use super::*;
use insta::assert_yaml_snapshot;
use serde_json::json;
#[cfg(any(feature = "convert", feature = "convert_async"))]
pub mod convert_common {
use crate::source::{SourceQuestion, SourceQuestionsBatch};
pub fn sample_batch() -> SourceQuestionsBatch {
SourceQuestionsBatch {
description: "Тестовый".into(),
date: "00-000-2000".into(),
questions: vec![
SourceQuestion {
id: "Вопрос 1".into(),
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(),
answer: "42".into(),
..Default::default()
},
SourceQuestion {
id: "Вопрос 2".into(),
description: "Зимой и летом одним цветом".into(),
answer: "ёлка".into(),
..Default::default()
},
],
..Default::default()
}
}
}
pub fn sample_question() -> Question {
Question {
id: "Вопрос 1".into(),
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(),
answer: "42".into(),
batch_info: BatchInfo {
description: "Тестовый".into(),
date: "00-000-2000".into(),
..Default::default()
},
..Default::default()
}
}
#[test]
fn test_question_ser() {
assert_yaml_snapshot!(sample_question(), @r#"
---
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
description: Тестовый
date: 00-000-2000
"#);
}
#[test]
fn test_question_de() {
let question_from_json: Result<Question, _> = serde_json::from_value(json!({
"id": "Вопрос 1",
"description": "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2",
"answer": "42",
"batch_info": {
"description": "Тестовый",
"date": "00-000-2000"
}
}));
assert!(question_from_json.is_ok());
assert_yaml_snapshot!(question_from_json.unwrap(), @r#"
---
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
description: Тестовый
date: 00-000-2000
"#);
} }
} }

View File

@ -1,10 +1,11 @@
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use std::io::{Read, Seek};
use zip::ZipArchive;
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct SourceQuestion { pub struct SourceQuestion {
#[serde(default, skip_serializing_if = "u32_is_zero")]
pub num: u32,
#[serde(default)] #[serde(default)]
pub num: u32,
pub id: String, pub id: String,
#[serde(alias = "Вопрос")] #[serde(alias = "Вопрос")]
@ -12,568 +13,194 @@ pub struct SourceQuestion {
#[serde(alias = "Ответ")] #[serde(alias = "Ответ")]
pub answer: String, pub answer: String,
#[serde(alias = "Автор", default, skip_serializing_if = "String::is_empty")] #[serde(alias = "Автор")]
#[serde(default)]
pub author: String, pub author: String,
#[serde( #[serde(alias = "Комментарий")]
default, #[serde(default)]
alias = "Комментарий",
skip_serializing_if = "String::is_empty"
)]
pub comment: String, pub comment: String,
#[serde( #[serde(alias = "Комментарии")]
default, #[serde(alias = "Инфо")]
alias = "Комментарии", #[serde(default)]
alias = "Инфо",
skip_serializing_if = "String::is_empty"
)]
pub comment1: String, pub comment1: String,
#[serde(default, alias = "Тур", skip_serializing_if = "String::is_empty")] #[serde(alias = "Тур")]
#[serde(default)]
pub tour: String, pub tour: String,
#[serde( #[serde(alias = "Ссылка")]
default, #[serde(alias = "URL")]
alias = "Ссылка", #[serde(default)]
alias = "URL",
skip_serializing_if = "String::is_empty"
)]
pub url: String, pub url: String,
#[serde(default, alias = "Дата", skip_serializing_if = "String::is_empty")] #[serde(alias = "Дата")]
#[serde(default)]
pub date: String, pub date: String,
#[serde(default, alias = "Обработан", skip_serializing_if = "String::is_empty")] #[serde(alias = "Обработан")]
#[serde(default)]
pub processed_by: String, pub processed_by: String,
#[serde(default, alias = "Редактор", skip_serializing_if = "String::is_empty")] #[serde(alias = "Редактор")]
#[serde(default)]
pub redacted_by: String, pub redacted_by: String,
#[serde(default, alias = "Копирайт", skip_serializing_if = "String::is_empty")] #[serde(alias = "Копирайт")]
#[serde(default)]
pub copyright: String, pub copyright: String,
#[serde(default, alias = "Тема", skip_serializing_if = "String::is_empty")] #[serde(alias = "Тема")]
#[serde(default)]
pub theme: String, pub theme: String,
#[serde( #[serde(alias = "Вид")]
default, #[serde(alias = "Тип")]
alias = "Вид", #[serde(default)]
alias = "Тип",
skip_serializing_if = "String::is_empty"
)]
pub kind: String, pub kind: String,
#[serde(default, alias = "Источник", skip_serializing_if = "String::is_empty")] #[serde(alias = "Источник")]
#[serde(default)]
pub source: String, pub source: String,
#[serde(default, alias = "Рейтинг", skip_serializing_if = "String::is_empty")] #[serde(alias = "Рейтинг")]
#[serde(default)]
pub rating: String, pub rating: String,
} }
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)] #[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct SourceQuestionsBatch { pub struct SourceQuestionsBatch {
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default)]
pub filename: String, pub filename: String,
#[serde(alias = "Пакет", alias = "Чемпионат")] #[serde(alias = "Пакет")]
#[serde(alias = "Чемпионат")]
pub description: String, pub description: String,
#[serde(default, alias = "Автор", skip_serializing_if = "String::is_empty")] #[serde(alias = "Автор")]
#[serde(default)]
pub author: String, pub author: String,
#[serde( #[serde(alias = "Комментарий")]
default, #[serde(alias = "Комментарии")]
alias = "Комментарий", #[serde(alias = "Инфо")]
alias = "Комментарии", #[serde(default)]
alias = "Инфо",
skip_serializing_if = "String::is_empty"
)]
pub comment: String, pub comment: String,
#[serde( #[serde(alias = "Ссылка")]
default, #[serde(alias = "URL")]
alias = "Ссылка", #[serde(default)]
alias = "URL",
skip_serializing_if = "String::is_empty"
)]
pub url: String, pub url: String,
#[serde(default, alias = "Дата", skip_serializing_if = "String::is_empty")] #[serde(alias = "Дата")]
#[serde(default)]
pub date: String, pub date: String,
#[serde(default, alias = "Обработан", skip_serializing_if = "String::is_empty")] #[serde(alias = "Обработан")]
#[serde(default)]
pub processed_by: String, pub processed_by: String,
#[serde(default, alias = "Редактор", skip_serializing_if = "String::is_empty")] #[serde(alias = "Редактор")]
#[serde(default)]
pub redacted_by: String, pub redacted_by: String,
#[serde(default, alias = "Копирайт", skip_serializing_if = "String::is_empty")] #[serde(alias = "Копирайт")]
#[serde(default)]
pub copyright: String, pub copyright: String,
#[serde(default, alias = "Тема", skip_serializing_if = "String::is_empty")] #[serde(alias = "Тема")]
#[serde(default)]
pub theme: String, pub theme: String,
#[serde( #[serde(alias = "Вид")]
default, #[serde(alias = "Тип")]
alias = "Вид", #[serde(default)]
alias = "Тип",
skip_serializing_if = "String::is_empty"
)]
pub kind: String, pub kind: String,
#[serde(default, alias = "Источник", skip_serializing_if = "String::is_empty")] #[serde(alias = "Источник")]
#[serde(default)]
pub source: String, pub source: String,
#[serde(default, alias = "Рейтинг", skip_serializing_if = "String::is_empty")] #[serde(alias = "Рейтинг")]
#[serde(default)]
pub rating: String, pub rating: String,
#[serde(alias = "Вопросы")] #[serde(alias = "Вопросы")]
pub questions: Vec<SourceQuestion>, pub questions: Vec<SourceQuestion>,
} }
fn u32_is_zero(num: &u32) -> bool { pub struct SourceQuestionsZipReader<R>
*num == 0 where
R: Read + Seek,
{
zipfile: ZipArchive<R>,
index: Option<usize>,
} }
#[cfg(any(feature = "convert", feature = "source"))] impl<R> SourceQuestionsZipReader<R>
pub mod reader_sync { where
use std::io::{Read, Seek}; R: Read + Seek,
use zip::ZipArchive; {
fn new(zipfile: ZipArchive<R>) -> Self {
use super::SourceQuestionsBatch; SourceQuestionsZipReader {
zipfile,
pub struct SourceQuestionsZipReader<R> index: None,
where
R: Read + Seek,
{
zipfile: ZipArchive<R>,
index: Option<usize>,
}
impl<R> SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
fn new(zipfile: ZipArchive<R>) -> Self {
SourceQuestionsZipReader {
zipfile,
index: None,
}
}
}
impl<R> Iterator for SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
type Item = (String, Result<SourceQuestionsBatch, serde_json::Error>);
fn next(&mut self) -> Option<Self::Item> {
if self.index.is_none() && !self.zipfile.is_empty() {
self.index = Some(0);
}
match self.index {
Some(i) if i < self.zipfile.len() => {
self.index = Some(i + 1);
self.nth(i)
}
_ => None,
}
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
if self.zipfile.len() <= n {
return None;
}
self.index = Some(n + 1);
let file = self.zipfile.by_index(n).unwrap();
let name = file.mangled_name();
let name_str = name.to_str().unwrap();
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
Some((String::from(name_str), data))
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.zipfile.len();
let index = self.index.unwrap_or(0);
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
fn count(self) -> usize
where
Self: Sized,
{
self.zipfile.len()
}
}
impl<R> ExactSizeIterator for SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
fn len(&self) -> usize {
self.zipfile.len()
}
}
pub trait ReadSourceQuestionsBatches<R>
where
R: Read + Seek,
{
fn source_questions(self) -> SourceQuestionsZipReader<R>;
}
impl<R> ReadSourceQuestionsBatches<R> for ZipArchive<R>
where
R: Read + Seek,
{
fn source_questions(self) -> SourceQuestionsZipReader<R> {
SourceQuestionsZipReader::new(self)
}
}
#[cfg(test)]
mod test {
use super::super::test::sample_batch;
use super::*;
use std::fs;
use std::{io::Write, iter, path::Path};
use tempfile::tempdir;
fn write_sample_zip<P>(path: P)
where
P: AsRef<Path>,
{
let batch = sample_batch();
let z_file = fs::File::create(path).expect("crerate zip file");
let mut zip_file = zip::ZipWriter::new(z_file);
let options =
zip::write::FileOptions::default().compression_method(zip::CompressionMethod::Zstd);
zip_file
.start_file("test.json", options)
.expect("zip start file");
let data = serde_json::to_vec(&batch).unwrap();
let amount = zip_file.write(data.as_slice()).expect("write entry");
assert_eq!(amount, data.len());
zip_file.finish().expect("finish zip file");
}
#[test]
fn test_source_questions_get() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip);
let z_file = fs::File::open(tmpfile_zip).expect("open zip file");
let zip_file = zip::ZipArchive::new(z_file).expect("open zip file reader");
let mut source = zip_file.source_questions();
assert_eq!(source.len(), 1);
let actual = source.next().expect("get batch");
assert_eq!(actual.0, "test.json");
assert_eq!(actual.1.expect("parse batch"), expected_batch);
}
#[test]
fn test_source_questions_iter() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip);
let z_file = fs::File::open(tmpfile_zip).expect("open zip file");
let zip_file = zip::ZipArchive::new(z_file).expect("open zip file reader");
let source = zip_file.source_questions();
assert_eq!(source.len(), 1);
let expected_iter = iter::once((String::from("test.json"), Ok(expected_batch)));
assert!(source
.map(|x| (x.0, x.1.map_err(|e| e.to_string())))
.eq(expected_iter));
} }
} }
} }
#[cfg(any(feature = "convert", feature = "source"))] impl<R> Iterator for SourceQuestionsZipReader<R>
pub use reader_sync::{ReadSourceQuestionsBatches, SourceQuestionsZipReader}; where
R: Read + Seek,
{
type Item = (String, Result<SourceQuestionsBatch, serde_json::Error>);
#[cfg(any(feature = "convert_async", feature = "source_async"))] fn next(&mut self) -> Option<Self::Item> {
pub mod reader_async { if self.index.is_none() && !self.zipfile.is_empty() {
use async_stream::stream; self.index = Some(0);
use async_zip::tokio::read::seek::ZipFileReader;
use futures_core::stream::Stream;
use futures_util::AsyncReadExt;
use tokio::io::{AsyncRead, AsyncSeek};
use super::SourceQuestionsBatch;
pub struct SourceQuestionsZipReaderAsync<R>
where
R: AsyncRead + AsyncSeek + Unpin,
{
zipfile: ZipFileReader<R>,
index: Option<usize>,
}
impl<R> SourceQuestionsZipReaderAsync<R>
where
R: AsyncRead + AsyncSeek + Unpin,
{
pub fn new(zipfile: ZipFileReader<R>) -> Self {
SourceQuestionsZipReaderAsync {
zipfile,
index: None,
}
} }
pub fn len(&self) -> usize { match self.index {
self.zipfile.file().entries().len() Some(i) if i < self.zipfile.len() => {
} self.index = Some(i + 1);
pub fn is_empty(&self) -> bool { self.nth(i)
self.len() == 0
}
pub async fn get(
&mut self,
index: usize,
) -> Result<(String, Result<SourceQuestionsBatch, serde_json::Error>), String>
where
R: AsyncRead + AsyncSeek + Unpin,
{
let len = self.len();
if index >= len {
return Err(format!("get index={index}, when len={len}"));
}
let reader = self.zipfile.reader_with_entry(index).await;
if let Err(error) = reader {
return Err(format!("reader_with_entry: {error:?}"));
}
let mut reader = reader.unwrap();
let filename = reader.entry().filename().clone().into_string().unwrap();
let mut data: Vec<u8> = Vec::new();
let readed = reader.read_to_end(&mut data).await;
if let Err(error) = readed {
return Err(format!("read_to_end: {error:?}"));
}
let parsed: Result<SourceQuestionsBatch, _> = serde_json::from_slice(&data);
Ok((filename, parsed))
}
pub async fn get_next(
&mut self,
) -> Option<Result<(String, Result<SourceQuestionsBatch, serde_json::Error>), String>>
where
R: AsyncRead + AsyncSeek + Unpin,
{
if self.index.is_none() && !self.is_empty() {
self.index = Some(0);
}
if self.index.unwrap() >= self.len() {
return None;
}
let item = self.get(self.index.unwrap()).await;
self.index = Some(self.index.unwrap() + 1);
Some(item)
}
pub fn stream(
&mut self,
) -> impl Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)> + '_
{
stream! {
while let Some(Ok(item)) = self.get_next().await {
yield item
}
} }
_ => None,
} }
} }
pub trait ReadSourceQuestionsBatchesAsync<R> fn nth(&mut self, n: usize) -> Option<Self::Item> {
where if self.zipfile.len() <= n {
R: AsyncRead + AsyncSeek + Unpin, return None;
{ }
fn source_questions(self) -> SourceQuestionsZipReaderAsync<R>; self.index = Some(n + 1);
let file = self.zipfile.by_index(n).unwrap();
let name = file.mangled_name();
let name_str = name.to_str().unwrap();
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
Some((String::from(name_str), data))
} }
impl<R> ReadSourceQuestionsBatchesAsync<R> for ZipFileReader<R> fn size_hint(&self) -> (usize, Option<usize>) {
where let len = self.zipfile.len();
R: AsyncRead + AsyncSeek + Unpin, let index = self.index.unwrap_or(0);
{ let rem = if len > index + 1 {
fn source_questions(self) -> SourceQuestionsZipReaderAsync<R> { len - (index + 1)
SourceQuestionsZipReaderAsync::new(self) } else {
} 0
};
(rem, Some(rem))
} }
#[cfg(test)] fn count(self) -> usize
mod test { where
use crate::source::SourceQuestion; Self: Sized,
{
use super::super::test::sample_batch; self.zipfile.len()
use super::*;
use async_zip::{base::write::ZipFileWriter, ZipEntryBuilder};
use core::fmt::Debug;
use futures_util::StreamExt;
use std::path::Path;
use tempfile::tempdir;
use tokio::fs;
async fn write_sample_zip<P>(path: P)
where
P: AsRef<Path>,
{
let batch = sample_batch();
let z_file = fs::File::create(path).await.expect("crerate zip file");
let mut zip_file = ZipFileWriter::with_tokio(z_file);
let entry =
ZipEntryBuilder::new("test.json".into(), async_zip::Compression::Zstd).build();
zip_file
.write_entry_whole(entry, serde_json::to_vec(&batch).unwrap().as_slice())
.await
.expect("write entry");
zip_file.close().await.expect("close zip");
}
async fn assert_data_rref_eq<T>((x, y): (T, &T))
where
T: PartialEq + Debug,
{
assert_eq!(x, *y);
}
#[tokio::test]
async fn test_source_questions_stream() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip).await;
let mut z_file = fs::File::open(tmpfile_zip).await.expect("open zip file");
let zip_file = ZipFileReader::with_tokio(&mut z_file)
.await
.expect("open zip file reader");
let expected_count = expected_batch.questions.len();
let expected_stream = futures::stream::iter(expected_batch.questions.iter());
let mut actual_source = zip_file.source_questions();
let actual_stream = actual_source.stream();
let mut actual_count: usize = 0;
actual_stream
.flat_map(|x| futures::stream::iter(x.1.expect("parse batch").questions))
.zip(expected_stream)
.map(|x| {
actual_count += 1;
x
})
.for_each(assert_data_rref_eq::<SourceQuestion>)
.await;
assert_eq!(actual_count, expected_count);
}
#[tokio::test]
async fn test_source_questions_get() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip).await;
let mut z_file = fs::File::open(tmpfile_zip).await.expect("open zip file");
let zip_file = ZipFileReader::with_tokio(&mut z_file)
.await
.expect("open zip file reader");
let mut source = zip_file.source_questions();
assert_eq!(source.len(), 1);
let actual = source.get(0).await.expect("get batch");
assert_eq!(actual.0, "test.json");
assert_eq!(actual.1.expect("parse batch"), expected_batch);
}
} }
} }
#[cfg(any(feature = "convert_async", feature = "source_async"))]
pub use reader_async::{ReadSourceQuestionsBatchesAsync, SourceQuestionsZipReaderAsync};
#[cfg(test)] impl<R> ExactSizeIterator for SourceQuestionsZipReader<R>
mod test { where
use super::*; R: Read + Seek,
use insta::assert_yaml_snapshot; {
use serde_json::json; fn len(&self) -> usize {
self.zipfile.len()
pub fn sample_batch() -> SourceQuestionsBatch { }
SourceQuestionsBatch { }
description: "Тестовый".into(),
date: "00-000-2000".into(), pub trait ReadSourceQuestionsBatches<R>
questions: vec![ where
SourceQuestion { R: Read + Seek,
id: "Вопрос 1".into(), {
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(), fn source_questions(self) -> SourceQuestionsZipReader<R>;
answer: "42".into(), }
..Default::default()
}, impl<R> ReadSourceQuestionsBatches<R> for ZipArchive<R>
SourceQuestion { where
id: "Вопрос 2".into(), R: Read + Seek,
description: "Зимой и летом одним цветом".into(), {
answer: "ёлка".into(), fn source_questions(self) -> SourceQuestionsZipReader<R> {
..Default::default() SourceQuestionsZipReader::new(self)
},
],
..Default::default()
}
}
#[test]
fn test_batch_ser() {
let batch = sample_batch();
assert_yaml_snapshot!(batch, @r#"
---
description: Тестовый
date: 00-000-2000
questions:
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
"#);
}
#[test]
fn test_batch_de() {
let batch_from_json: Result<SourceQuestionsBatch, _> = serde_json::from_value(json!({
"Чемпионат": "Тестовый",
"Дата": "00-000-2000",
"Вопросы": [
{
"id": "Вопрос 1",
"Вопрос": "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2",
"Ответ": "42",
},
{
"id": "Вопрос 2",
"Вопрос": "Зимой и летом одним цветом",
"Ответ": "ёлка",
},
]
}));
assert!(batch_from_json.is_ok());
assert_yaml_snapshot!(batch_from_json.unwrap(), @r#"
---
description: Тестовый
date: 00-000-2000
questions:
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
"#);
} }
} }

View File

@ -1,57 +0,0 @@
pub trait ErrorToString {
type Output;
fn str_err(self) -> std::result::Result<Self::Output, String>;
}
impl<T, E> ErrorToString for std::result::Result<T, E>
where
E: std::error::Error,
{
type Output = T;
fn str_err(self) -> std::result::Result<Self::Output, String> {
self.map_err(|e| e.to_string())
}
}
#[cfg(any(feature = "sync", feature = "async"))]
mod bincode_utils {
use std::ops::{Deref, DerefMut};
use bincode::enc::write::Writer;
use bincode::error::EncodeError;
/// struct that allows [`Vec<u8>`] to implement [bincode::enc::write::Writer] trait
pub struct BincodeVecWriter {
vec: Vec<u8>,
}
impl BincodeVecWriter {
pub fn new(vec: Vec<u8>) -> BincodeVecWriter {
BincodeVecWriter { vec }
}
}
impl Deref for BincodeVecWriter {
type Target = Vec<u8>;
fn deref(&self) -> &Self::Target {
&self.vec
}
}
impl DerefMut for BincodeVecWriter {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.vec
}
}
impl Writer for BincodeVecWriter {
fn write(&mut self, bytes: &[u8]) -> Result<(), EncodeError> {
self.vec.extend_from_slice(bytes);
Ok(())
}
}
}
#[cfg(any(feature = "sync", feature = "async"))]
pub use bincode_utils::BincodeVecWriter;