Compare commits

..

94 Commits

Author SHA1 Message Date
1b88db07be add questions benches
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-26 13:42:50 +03:00
c4b68cc727
apps: handle err's
All checks were successful
continuous-integration/drone/push Build is passing
+ ord fn's
2023-08-25 15:28:19 +03:00
9c5f05b6a9
app_async: fix print random question 2023-08-25 14:41:53 +03:00
760f6d9415
add async bench
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-23 13:30:51 +03:00
699478f85e
async_zip, async-compression from crates.io
All checks were successful
continuous-integration/drone/push Build is passing
instead of git
2023-08-18 10:05:22 +03:00
23f8537e4e
source: pub fn new in async zip reader 2023-08-18 10:03:50 +03:00
cd93f577be
lib: v1.2.0
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-18 09:37:27 +03:00
cd7f3674ca
ci: publish lib with all features
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-18 09:33:42 +03:00
a0c13ea205 Merge pull request 'add async feature' (#1) from async into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #1
2023-08-18 06:29:44 +00:00
797f781959
app_async: use for_each_concurrent
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-16 14:48:47 +03:00
6d850be1dc
async_db: add [bufread, bufread_stream] tests
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-16 12:04:57 +03:00
33abc15268
app_async: fix Reader::new 2023-08-16 11:49:08 +03:00
feb2303db9
async_db: add BufReader, BufReaderStream
for using as mutable reader
2023-08-16 11:48:45 +03:00
ae96fb3bf8
add FIXME comment
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-15 14:54:40 +03:00
5ed0398203
db(+async) writer: add push_by_ref 2023-08-15 11:03:15 +03:00
9196c61a12
add test for writer.sink() 2023-08-15 10:45:08 +03:00
0adc11cabf
app_async: don't use writer.sink() 2023-08-15 10:44:32 +03:00
07d7ba50cc
async_db "fixme" comments 2023-08-15 10:36:42 +03:00
f1d4cf05c5
async_db: refactor WriterSink::poll_ready
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-14 17:11:53 +03:00
3ea4930105
async_db: add Reader::get_with_buf +test
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-14 16:24:36 +03:00
645b654dac
use BincodeVecWriter in (sync) db
All checks were successful
continuous-integration/drone/push Build is passing
for reusing current item buf
2023-08-14 15:54:26 +03:00
e074108560
use BincodeVecWriter in async_db
for reusing current item buf
2023-08-14 15:53:17 +03:00
ddd728cd5d
add utils::BincodeVecWriter 2023-08-14 15:50:50 +03:00
c326fc59d3
questions converter: remove src clone 2023-08-14 12:21:14 +03:00
31c02ae8ef async_db: add WriterSink
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-13 18:16:49 +03:00
3e3572ce12 async deps: add pin-project 2023-08-13 18:14:41 +03:00
908a6d8cae fix source decode 2023-08-13 18:11:29 +03:00
cd87529ede questions: add QuestionsConverterAsyncForStream 2023-08-13 16:01:10 +03:00
270af2c373 questions: add async convert_stream test
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-13 13:19:40 +03:00
ff6af8389d questions: add sync convert test 2023-08-13 12:55:35 +03:00
8e87e5e17b source tests: reuse fn sample_batch() 2023-08-13 12:34:22 +03:00
ed37fee697 fix clippy warnings in tests 2023-08-13 12:33:19 +03:00
e0326e3b0a add questions ser/de tests
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-12 23:35:17 +03:00
c061fef267 questions: add skip_serializing_if attr's
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-12 22:53:36 +03:00
acf741e721 source tests: check write on write_sample_zip 2023-08-12 22:51:39 +03:00
e32316c6aa source: reformat struct's
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-12 22:34:01 +03:00
ef98b1fea4 add source::reader_async get/stream tests
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-12 22:11:36 +03:00
08acb4e71b add source::reader_sync get/iter tests 2023-08-12 22:10:48 +03:00
7e633577b1 add source batch ser/de tests 2023-08-12 22:08:25 +03:00
d4913a13f9 deps: add insta for tests 2023-08-12 21:42:14 +03:00
21571787f2 source: add skip_serializing_if attr's 2023-08-12 21:40:33 +03:00
f874e59a11
fix convert feature without source
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-10 14:20:58 +03:00
9c4882fbbb
Merge branch 'master' into async
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-10 11:50:20 +03:00
b12048c38d
PhantomData<Arc<T>> for Send to Reader/Writer
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-10 11:48:34 +03:00
68b276ce5e
PhantomData<Arc<T>>
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-10 11:20:38 +03:00
48438c1aec
Revert "ci: fix pipeline build"
All checks were successful
continuous-integration/drone/push Build is passing
This reverts commit 851058245d.
2023-08-08 17:10:14 +03:00
f2fc72056b
fmt
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-08 16:55:22 +03:00
4c555df8ca
fix async_db tests
using ZstdEncoder from ..:tokio::bufread instead of ..:tokio::write
2023-08-08 16:54:40 +03:00
daf41625ff
remove debug prints 2023-08-08 16:52:30 +03:00
2c3950ccfc
fix write_db tokio join 2023-08-08 16:50:02 +03:00
8306c76a0c
fix app_async
Some checks failed
continuous-integration/drone/push Build is failing
2023-08-08 14:31:08 +03:00
965c021d83
fix async_db tests (to fail)
---
failures:
    async_db::test::test_write_read
    async_db::test::test_write_read_stream

thread 'async_db::test::test_write_read' panicked at 'get: "UnexpectedEnd { additional: 1 }
2023-08-08 14:29:18 +03:00
a1a1d41403
replace PhantomData in async_db::Writer
for Send for Writer
2023-08-08 14:25:01 +03:00
b768d27f83
combine convert sync/async features 2023-08-08 14:22:29 +03:00
7cbf5439b7
rewrite SourceQuestionsZipReaderAsync
"impl Stream for SourceQuestionsZipReaderAsync" dropped
2023-08-08 14:20:40 +03:00
8c4631e89f
add app_async
-- write invalid:
"UnexpectedEnd { additional: 385 }"
2023-08-07 22:06:21 +03:00
467ebfcc67
rewrite async converter
(poll_next removed // not work)
2023-08-07 22:04:29 +03:00
80dda8d821
rewrite SourceQuestionsZipReaderAsync stream
(poll_next not work)
2023-08-07 22:02:41 +03:00
851058245d ci: fix pipeline build
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-06 17:03:18 +03:00
ecfc34e821 + async source
(reader and converter)
2023-08-06 17:01:22 +03:00
f55b6f681c move str_err to util.rs 2023-08-06 16:58:43 +03:00
c0193e4f74 ci: add "publish" pipeline type
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-06 01:19:19 +03:00
a496f37ef2 ci: build/test with --all-features
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-06 01:09:57 +03:00
5632c19866 ci: add pipeline type 2023-08-06 01:09:32 +03:00
cad8ff0404 add async test_share_reader 2023-08-06 01:05:36 +03:00
39ce0d8ceb add async reader stream 2023-08-06 00:56:49 +03:00
1ba645f337 add async reader 2023-08-06 00:12:15 +03:00
150527efeb move macro to convert mod 2023-08-06 00:11:44 +03:00
0c0a4e31ba optional feature for source
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-05 23:26:45 +03:00
abcb7d6dc2 add async writer
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-05 23:10:30 +03:00
307790ce49 fix set_pledged_src_size 2023-08-05 23:07:16 +03:00
a6e5931f35 add sync/async features 2023-08-05 23:06:55 +03:00
1e8cd4b461
cargo update
All checks were successful
continuous-integration/drone/push Build is passing
2023-07-19 14:43:22 +03:00
9246d06783
cargo fix 2023-07-19 14:40:56 +03:00
cc5ab52f32
ref bench 2023-07-19 14:40:25 +03:00
e7491f6d3a upd clap to v4 2023-05-07 18:15:45 +03:00
39d4d6b610
add locks + file alloc in Writer::finish
All checks were successful
continuous-integration/drone/push Build is passing
2023-04-05 16:53:40 +03:00
27260695f7
upd lock
All checks were successful
continuous-integration/drone/push Build is passing
2023-04-03 15:35:58 +03:00
f5bd72b298
fix publish 2
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-29 13:46:21 +03:00
dda50e7d2b
fix build on publish
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-29 13:31:23 +03:00
693f349ae3
fix license 2023-03-29 13:30:57 +03:00
7a2e58b1b9
remove branch filter for publish
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-29 13:23:03 +03:00
9570b1d6e2
add manifest info 2023-03-29 13:22:37 +03:00
5355d0398d
remove ledb
All checks were successful
continuous-integration/drone Build is passing
2023-03-29 13:00:17 +03:00
b63e9aa45c
add publish pipeline 2023-03-29 12:59:35 +03:00
8120a996a3
add test_share_reader
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-28 15:43:08 +03:00
3a26a4aa7f
upd bench 2023-03-28 15:37:21 +03:00
103b677d21
into_iter() for reader 2023-03-28 15:01:14 +03:00
e18539a982
reader without mut 2023-03-28 14:59:44 +03:00
249ac3a4ef
ci: use sparse protocol for crates.io
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-13 17:10:09 +03:00
e940f1c37c
cargo update
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-02 14:50:06 +03:00
f3dabe7a06
new db file rename 2023-01-03 23:52:50 +03:00
e521e39f5e add ledb read/write benches
All checks were successful
continuous-integration/drone/push Build is passing
2022-11-10 22:21:55 +03:00
cf591198a0 separate lib
All checks were successful
continuous-integration/drone/push Build is passing
2022-11-10 21:43:15 +03:00
22 changed files with 4143 additions and 1087 deletions

View File

@ -1,4 +1,5 @@
kind: pipeline kind: pipeline
type: docker
name: default name: default
steps: steps:
@ -6,6 +7,33 @@ steps:
image: rust:1-alpine image: rust:1-alpine
commands: commands:
- apk add --no-cache musl-dev - apk add --no-cache musl-dev
- cargo build --verbose --all - cargo build --verbose --all-features --all
- cargo test --verbose --all - cargo test --verbose --all-features --all
environment:
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
trigger:
event:
- push
---
kind: pipeline
type: docker
name: publish
steps:
- name: push
image: rust:1-alpine
commands:
- apk add --no-cache musl-dev
- cargo build -p chgk_ledb_lib --all-features
- cargo publish --registry gitea -p chgk_ledb_lib --all-features
environment:
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
CARGO_REGISTRIES_GITEA_INDEX: https://gitea.b4tman.ru/b4tman/_cargo-index.git
CARGO_REGISTRIES_GITEA_TOKEN:
from_secret: cargo_gitea_token
trigger:
event:
- tag

2
.gitignore vendored
View File

@ -6,3 +6,5 @@ json.zip
/exp /exp
/.vscode /.vscode
test*.bin test*.bin
db.dat
*.pending-snap

1489
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,32 +1,10 @@
[package] [workspace]
name = "chgk_ledb" resolver = "2"
version = "0.1.0" members = [
authors = ["Dmitry <b4tm4n@mail.ru>"] "app",
edition = "2021" "app_async",
"lib"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html ]
[[bench]]
name = "db_bench"
harness = false
[dependencies]
serde="1.0"
serde_derive="1.0"
serde_json="1.0"
ledb = {git = "https://github.com/b4tman/ledb.git", rev="a646b90e", package="ledb"}
ledb-derive = {git = "https://github.com/b4tman/ledb.git", rev="a646b90e", package="ledb-derive"}
ledb-types = {git = "https://github.com/b4tman/ledb.git", rev="a646b90e", package="ledb-types"}
zip="0.6"
rand="0.8"
clap = { version = "3.2.22", features = ["derive"] }
bincode = "^2.0.0-rc.2"
zstd = "^0.10"
memmap = "0.7.0"
[dev-dependencies]
criterion = "0.4.0"
tempfile = "3.3"
[profile.release] [profile.release]
opt-level = 3 opt-level = 3

32
app/Cargo.toml Normal file
View File

@ -0,0 +1,32 @@
[package]
name = "chgk_ledb"
version = "1.1.0"
authors = ["Dmitry <b4tm4n@mail.ru>"]
edition = "2021"
repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb"
license = "MIT"
description = "Утилита загружающая базу данных ЧГК вопросов из ZIP файла в JSON формате в базу данных."
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bench]]
name = "db_bench"
harness = false
[[bench]]
name = "questions_bench"
harness = false
[dependencies]
chgk_ledb_lib = {path = "../lib", features = ["sync", "source", "convert"]}
serde_json="1.0"
zip="0.6"
rand="0.8"
clap = { version = "4.2.7", features = ["derive"] }
[dev-dependencies]
criterion = "0.4.0"
tempfile = "3.3"
bincode = "^2.0.0-rc.2"
serde="1.0"
serde_derive="1.0"

View File

@ -1,34 +1,50 @@
#[macro_use] #[macro_use]
extern crate criterion; extern crate criterion;
extern crate bincode;
extern crate serde;
extern crate serde_derive;
extern crate serde_json;
extern crate tempfile;
#[path = "../src/db.rs"] use chgk_ledb_lib::db;
mod db; use std::path::Path;
use std::path::PathBuf;
use db::{Reader, Writer, WriterOpts}; use db::{Reader, Writer, WriterOpts};
use criterion::{BatchSize, Criterion}; use criterion::{BatchSize, Criterion};
use tempfile::tempdir; use tempfile::{tempdir, NamedTempFile};
#[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] use serde_derive::{Deserialize, Serialize};
#[derive(
bincode::Encode,
bincode::Decode,
Clone,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
)]
struct TestData { struct TestData {
num: u128, num1: u64,
num2: u64,
test: String, test: String,
} }
const N: usize = 4096; const N: usize = 4096;
fn gen_data(count: usize) -> impl Iterator<Item = TestData> { fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count) (0..count).map(|i| 143 + i as u64).map(|i| TestData {
.into_iter() num1: i,
.map(|i| 143 + i as u128) num2: (i * 100) ^ 0xDF0E441122334455,
.map(|i| TestData {
num: i,
test: "test ---- Test ____".repeat(123 + i as usize % 15), test: "test ---- Test ____".repeat(123 + i as usize % 15),
}) })
} }
fn prepare_db_writer(path: &PathBuf) -> Writer<TestData> { fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<TestData> {
let opts = WriterOpts { let opts = WriterOpts {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 100 * 1024 * 1024, data_buf_size: 100 * 1024 * 1024,
@ -41,7 +57,9 @@ fn prepare_db_writer(path: &PathBuf) -> Writer<TestData> {
fn db_read(c: &mut Criterion) { fn db_read(c: &mut Criterion) {
let dir = tempdir().expect("tempdir"); let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp"); let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let mut writer = prepare_db_writer(&tmpfile); let mut writer = prepare_db_writer(&tmpfile);
let mut items_iter = gen_data(N).collect::<Vec<TestData>>().into_iter(); let mut items_iter = gen_data(N).collect::<Vec<TestData>>().into_iter();
@ -54,9 +72,8 @@ fn db_read(c: &mut Criterion) {
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader"); let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
reader reader
}, },
|mut reader| { |reader| {
let mut reader_iter = reader.iter(); for item in reader {
while let Some(item) = reader_iter.next() {
drop(item); drop(item);
} }
}, },
@ -67,10 +84,13 @@ fn db_read(c: &mut Criterion) {
fn db_write(c: &mut Criterion) { fn db_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir"); let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
c.bench_function("write", |b| { c.bench_function("write", |b| {
b.iter_batched( b.iter_batched(
|| { || {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = gen_data(N).collect::<Vec<TestData>>().into_iter(); let src = gen_data(N).collect::<Vec<TestData>>().into_iter();
let writer = prepare_db_writer(&tmpfile); let writer = prepare_db_writer(&tmpfile);
(src, writer) (src, writer)

View File

@ -0,0 +1,101 @@
#[macro_use]
extern crate criterion;
extern crate bincode;
extern crate serde;
extern crate serde_derive;
extern crate serde_json;
extern crate tempfile;
use chgk_ledb_lib::db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{Question, QuestionsConverter};
use chgk_ledb_lib::source::ReadSourceQuestionsBatches;
use std::path::Path;
use std::time::Duration;
use std::{fs, io};
use criterion::{BatchSize, Criterion};
use tempfile::{tempdir, NamedTempFile};
const ZIP_FILENAME: &str = "../json.zip";
const NEW_DB_FILENAME: &str = "../db.dat";
const N: usize = 4096;
fn read_sample() -> Vec<Question> {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
source_questions
.convert()
.take(N)
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
})
.collect()
}
fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let opts = WriterOpts {
compress_lvl: 1,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::new(path, opts).expect("new writer")
}
fn questions_read(c: &mut Criterion) {
c.bench_function("questions_read", |b| {
b.iter_batched(
|| {
let reader: Reader<Question> =
Reader::new(NEW_DB_FILENAME, 4096).expect("new reader");
reader.into_iter().take(N)
},
|reader| {
for item in reader {
drop(item);
}
},
BatchSize::SmallInput,
)
});
}
fn questions_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("questions_write", |b| {
b.iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = read_sample().into_iter();
let writer = prepare_db_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| {
writer.load(&mut src).unwrap();
writer.finish().unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default()
.sample_size(40)
.warm_up_time(Duration::from_secs(7))
.measurement_time(Duration::from_secs(20))
}
criterion_group! {name=benches; config = config(); targets = questions_read, questions_write}
criterion_main!(benches);

181
app/src/main.rs Normal file
View File

@ -0,0 +1,181 @@
extern crate serde_json;
use clap::{Parser, Subcommand};
use rand::seq::IteratorRandom;
use std::io;
use std::time::Instant;
use std::{fs, sync::mpsc, thread};
use chgk_ledb_lib::db;
use chgk_ledb_lib::questions;
use chgk_ledb_lib::source;
use crate::questions::{Question, QuestionsConverter};
use crate::source::ReadSourceQuestionsBatches;
use chgk_ledb_lib::util::ErrorToString;
const ZIP_FILENAME: &str = "json.zip";
const NEW_DB_FILENAME: &str = "db.dat";
#[derive(Subcommand, Debug)]
enum Command {
Write,
Print {
#[clap(value_parser, default_value = "0")]
id: u32,
},
ZipPrint {
#[clap(value_parser, default_value = "0")]
file_num: usize,
#[clap(value_parser, default_value = "0")]
num: usize,
},
}
#[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None)]
#[clap(propagate_version = true)]
struct Cli {
#[clap(subcommand)]
command: Command,
#[clap(short, long, action)]
measure: bool,
}
fn main() {
let args = Cli::parse();
let mut action: Box<dyn FnOnce()> = match &args.command {
Command::Write => Box::new(write_db),
Command::Print { id } => {
let get_question = Box::new(|| read_from_db(*id));
Box::new(|| print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = Box::new(|| read_from_zip(*file_num, *num));
Box::new(|| print_question_from(get_question))
}
};
if args.measure {
action = Box::new(|| measure_and_print(action));
}
action();
}
// measure and return time elapsed in `func` in seconds
pub fn measure<F: FnOnce()>(func: F) -> f64 {
let start = Instant::now();
func();
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub fn measure_and_print<F: FnOnce()>(func: F) {
let m = measure(func);
eprintln!("{}", m);
}
fn print_question_from<F>(get_q: F)
where
F: FnOnce() -> Result<Question, String>,
{
let q = get_q().expect("question not found");
println!("{:#?}", q)
}
fn read_from_zip(file_num: usize, mut num: usize) -> Result<Question, String> {
let mut rng = rand::thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).str_err()?;
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).str_err()?;
let mut source_questions = archive.source_questions();
let (filename, batch) = if file_num == 0 {
source_questions
.choose(&mut rng)
.ok_or("rand choose".to_string())?
} else {
source_questions
.nth(file_num - 1)
.ok_or(format!("file nth #{file_num} => None"))?
};
let mut batch = batch.map_err(|e| format!("get batch from file #{file_num} => {e}"))?;
batch.filename = filename;
let questions: Vec<Question> = batch.into();
if num == 0 {
num = (1..=questions.len())
.choose(&mut rng)
.ok_or("rand choose".to_string())?;
}
Ok(questions[num - 1].clone())
}
fn read_from_db(id: u32) -> Result<Question, String> {
let reader: db::Reader<Question> = db::Reader::new(NEW_DB_FILENAME, 2048)?;
let len = reader.len();
let mut questions = reader.into_iter();
let question = match id {
0 => {
let mut rng = rand::thread_rng();
questions
.choose(&mut rng)
.ok_or(format!("rand choose, len = {len}"))?
}
_ => questions
.nth((id - 1) as usize)
.ok_or(format!("get nth #{id} => None"))?,
};
Ok(question)
}
fn write_db() {
let (tx, rx) = mpsc::channel::<Question>();
[
thread::spawn(move || zip_reader_task(tx)),
thread::spawn(move || db_writer_task(rx)),
]
.into_iter()
.for_each(|handle| handle.join().expect("thread panic"));
println!("all done");
}
fn zip_reader_task(tx: mpsc::Sender<Question>) {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
let questions = source_questions
.convert()
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
});
for question in questions {
let res = tx.send(question);
if res.is_err() {
break;
}
}
println!("read done");
}
fn db_writer_task(rx: mpsc::Receiver<Question>) {
let writer_opts = db::WriterOpts::default();
let mut writer: db::Writer<Question> =
db::Writer::new(NEW_DB_FILENAME, writer_opts).expect("new db writer");
writer
.load(&mut rx.iter())
.unwrap_or_else(|e| panic!("db writer load, {e:#?}"));
writer.finish().expect("db writer finish");
println!("write done");
}

45
app_async/Cargo.toml Normal file
View File

@ -0,0 +1,45 @@
[package]
name = "chgk_ledb_async"
version = "1.1.0"
authors = ["Dmitry <b4tm4n@mail.ru>"]
edition = "2021"
repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb"
license = "MIT"
description = "Утилита загружающая базу данных ЧГК вопросов из ZIP файла в JSON формате в базу данных."
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bench]]
name = "async_bench"
harness = false
[[bench]]
name = "questions_async_bench"
harness = false
[dependencies]
chgk_ledb_lib = {path = "../lib", features = ["async", "convert_async"]}
serde_json="1.0"
async_zip = { version = "0.0.15", features = [
"zstd",
"tokio",
"tokio-fs"] }
tokio = { version = "1", features = [
"io-util",
"fs",
"rt-multi-thread"
] }
tokio-stream = "0.1"
rand="0.8"
clap = { version = "4.2.7", features = ["derive"] }
futures = "0.3"
[dev-dependencies]
criterion = { version = "0.5.1", features = ["async_tokio"]}
tempfile = "3.3"
bincode = "^2.0.0-rc.2"
serde="1.0"
serde_derive="1.0"
futures = "0.3"
async-compression = { version = "0.4.1", default-features = false }
lazy_static = "1.4.0"

View File

@ -0,0 +1,153 @@
#[macro_use]
extern crate criterion;
extern crate bincode;
extern crate serde;
extern crate serde_derive;
extern crate tempfile;
use async_compression::Level;
use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts};
use futures::StreamExt;
use std::{ops::Deref, path::Path};
use criterion::{BatchSize, Criterion};
use lazy_static::lazy_static;
use tempfile::{tempdir, NamedTempFile};
use serde_derive::{Deserialize, Serialize};
#[derive(
bincode::Encode,
bincode::Decode,
Clone,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
)]
struct TestData {
num1: u64,
num2: u64,
test: String,
}
use tokio::runtime;
lazy_static! {
static ref RUNTIME: tokio::runtime::Runtime =
runtime::Builder::new_current_thread().build().unwrap();
}
const N: usize = 4096;
fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count).map(|i| 143 + i as u64).map(|i| TestData {
num1: i,
num2: (i * 100) ^ 0xDF0E441122334455,
test: "test ---- Test ____".repeat(123 + i as usize % 15),
})
}
async fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<TestData> {
let opts = WriterOpts {
compress_lvl: Level::Fastest,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::new(path, opts).await.expect("new writer")
}
fn write_sample<P: AsRef<Path>>(path: P) {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move {
let mut writer = prepare_db_writer(rp).await;
let items_iter = gen_data(N).collect::<Vec<TestData>>().into_iter();
let mut samples_stream = futures::stream::iter(items_iter);
writer.load(&mut samples_stream).await.unwrap();
writer.finish().await.unwrap();
})
})
.join()
.expect("spawn thread");
}
fn setup_writer<P: AsRef<Path>>(path: P) -> Writer<TestData> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(prepare_db_writer(rp))
})
.join()
.expect("spawn thread")
}
fn setup_reader<P: AsRef<Path>>(path: P) -> Reader<TestData> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move { Reader::new(rp).await.expect("new reader") })
})
.join()
.expect("spawn thread")
}
fn async_read(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
write_sample(&tmpfile);
c.bench_function("async_read", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| setup_reader(&tmpfile),
|reader| async move { reader.stream().for_each(|item| async { drop(item) }).await },
BatchSize::SmallInput,
)
});
}
fn async_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("async_write", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = gen_data(N).collect::<Vec<TestData>>().into_iter();
let src = futures::stream::iter(src);
let writer = setup_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| async move {
writer.load(&mut src).await.unwrap();
writer.finish().await.unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default().sample_size(20)
}
criterion_group! {name=benches; config = config(); targets = async_read, async_write}
criterion_main!(benches);

View File

@ -0,0 +1,152 @@
#[macro_use]
extern crate criterion;
extern crate bincode;
extern crate serde;
extern crate serde_derive;
extern crate tempfile;
use async_compression::Level;
use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{Question, QuestionsConverterAsyncForStream};
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use futures::pin_mut;
use futures::StreamExt;
use std::time::Duration;
use std::{ops::Deref, path::Path};
use async_zip::tokio::read::seek::ZipFileReader;
use criterion::{BatchSize, Criterion};
use lazy_static::lazy_static;
use tempfile::{tempdir, NamedTempFile};
use tokio::{fs, runtime};
const ZIP_FILENAME: &str = "../json.zip";
const NEW_DB_FILENAME: &str = "../db.dat";
lazy_static! {
static ref RUNTIME: tokio::runtime::Runtime =
runtime::Builder::new_current_thread().build().unwrap();
}
const N: usize = 4096;
async fn read_sample() -> Vec<Question> {
let mut file = fs::File::open(ZIP_FILENAME).await.expect("open zip");
let archive = ZipFileReader::with_tokio(&mut file)
.await
.expect("open zip file reader");
let mut source_questions = archive.source_questions();
let source_questions = source_questions.stream();
pin_mut!(source_questions);
source_questions
.converter()
.convert()
.take(N)
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
})
.collect()
.await
}
fn read_sample_sync() -> Vec<Question> {
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(read_sample())
})
.join()
.expect("spawn thread")
}
async fn prepare_db_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let opts = WriterOpts {
compress_lvl: Level::Fastest,
data_buf_size: 100 * 1024 * 1024,
out_buf_size: 100 * 1024 * 1024,
current_buf_size: 10240,
};
Writer::<Question>::new(path, opts)
.await
.expect("new writer")
}
fn setup_writer<P: AsRef<Path>>(path: P) -> Writer<Question> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(prepare_db_writer(rp))
})
.join()
.expect("spawn thread")
}
fn setup_reader<P: AsRef<Path>>(path: P) -> Reader<Question> {
let rp = path.as_ref().to_str().unwrap().to_string();
std::thread::spawn(|| {
runtime::Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move { Reader::new(rp).await.expect("new reader") })
})
.join()
.expect("spawn thread")
}
fn questions_async_read(c: &mut Criterion) {
c.bench_function("questions_async_read", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| setup_reader(NEW_DB_FILENAME),
|reader| async move {
reader
.stream()
.take(N)
.for_each(|item| async { drop(item) })
.await
},
BatchSize::SmallInput,
)
});
}
fn questions_async_write(c: &mut Criterion) {
let dir = tempdir().expect("tempdir");
c.bench_function("questions_async_write", |b| {
b.to_async(RUNTIME.deref()).iter_batched(
|| {
let tmpfile = NamedTempFile::new_in(dir.path())
.expect("new tempfile")
.into_temp_path();
let src = read_sample_sync().into_iter();
let src = futures::stream::iter(src);
let writer = setup_writer(&tmpfile);
(src, writer)
},
|(mut src, mut writer)| async move {
writer.load(&mut src).await.unwrap();
writer.finish().await.unwrap();
},
BatchSize::SmallInput,
)
});
}
fn config() -> Criterion {
Criterion::default()
.sample_size(40)
.warm_up_time(Duration::from_secs(7))
.measurement_time(Duration::from_secs(20))
}
criterion_group! {name=benches; config = config(); targets = questions_async_read, questions_async_write}
criterion_main!(benches);

197
app_async/src/main.rs Normal file
View File

@ -0,0 +1,197 @@
extern crate serde_json;
use clap::{Parser, Subcommand};
use futures::{pin_mut, Future};
use rand::distributions::Uniform;
use rand::seq::IteratorRandom;
use rand::{thread_rng, Rng};
use async_zip::tokio::read::seek::ZipFileReader;
use futures::stream::{self, StreamExt};
use std::time::Instant;
use tokio::sync::mpsc::{self, UnboundedReceiver, UnboundedSender};
use async_db::WriterOpts;
use tokio::{fs, io};
use tokio_stream::wrappers::UnboundedReceiverStream;
use chgk_ledb_lib::async_db;
use chgk_ledb_lib::questions::Question;
use chgk_ledb_lib::questions::QuestionsConverterAsyncForStream;
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use chgk_ledb_lib::util::ErrorToString;
const ZIP_FILENAME: &str = "json.zip";
const NEW_DB_FILENAME: &str = "db.dat";
#[derive(Subcommand, Debug)]
enum Command {
Write,
Print {
#[clap(value_parser, default_value = "0")]
id: u32,
},
ZipPrint {
#[clap(value_parser, default_value = "0")]
file_num: usize,
#[clap(value_parser, default_value = "0")]
num: usize,
},
}
#[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None)]
#[clap(propagate_version = true)]
struct Cli {
#[clap(subcommand)]
command: Command,
#[clap(short, long, action)]
measure: bool,
}
#[tokio::main]
async fn main() {
let args = Cli::parse();
let mut action: Box<dyn Future<Output = _>> = match &args.command {
Command::Write => Box::new(write_db()),
Command::Print { id } => {
let get_question = read_from_db(*id);
Box::new(print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = read_from_zip(*file_num, *num);
Box::new(print_question_from(get_question))
}
};
if args.measure {
action = Box::new(measure_and_print(Box::into_pin(action)));
}
Box::into_pin(action).await;
}
// measure and return time elapsed in `fut` in seconds
pub async fn measure<F: Future>(fut: F) -> f64 {
let start = Instant::now();
fut.await;
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub async fn measure_and_print<F: Future>(fut: F) {
let m = measure(fut).await;
eprintln!("{}", m);
}
async fn print_question_from<F>(get_q: F)
where
F: Future<Output = Result<Question, String>>,
{
let q = get_q.await.expect("question not found");
println!("{:#?}", q)
}
async fn read_from_zip(file_num: usize, mut num: usize) -> Result<Question, String> {
let mut rng = thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).await.str_err()?;
let mut zip_reader = io::BufReader::new(zip_file);
let archive = ZipFileReader::with_tokio(&mut zip_reader).await.str_err()?;
let mut source = archive.source_questions();
let files_count = source.len();
let file_index = if file_num == 0 {
let files = Uniform::new(0, files_count);
rng.sample(files)
} else {
file_num - 1
};
let src = source
.get(file_index)
.await
.map_err(|e| format!("get file {file_num} => {e}"))?;
let src = stream::once(async { src });
pin_mut!(src);
let converter = src.converter();
let questions: Vec<_> = converter.convert().collect().await;
if num == 0 {
num = (1..=questions.len()).choose(&mut rng).unwrap();
}
let mut question = questions
.get(num - 1)
.ok_or(format!("get question #{num} => None"))?
.clone();
question.num = num as u32;
Ok(question)
}
async fn read_from_db(id: u32) -> Result<Question, String> {
let reader: async_db::Reader<Question> = async_db::Reader::new(NEW_DB_FILENAME).await?;
let len = reader.len();
let index = if id == 0 {
let mut rng = thread_rng();
let questions = Uniform::new(0, len);
rng.sample(questions)
} else {
id as usize - 1
};
reader
.get(index)
.await
.map_err(|e| format!("get #{index} => {e}"))
}
async fn write_db() {
let (tx, rx) = mpsc::unbounded_channel::<Question>();
tokio::try_join!(
tokio::spawn(zip_reader_task(tx)),
tokio::spawn(db_writer_task(rx))
)
.expect("tokio join");
println!("all done");
}
async fn zip_reader_task(tx: UnboundedSender<Question>) {
let mut file = fs::File::open(ZIP_FILENAME).await.expect("open zip");
let archive = ZipFileReader::with_tokio(&mut file)
.await
.expect("open zip file reader");
let mut source_questions = archive.source_questions();
let source_questions = source_questions.stream();
pin_mut!(source_questions);
source_questions
.converter()
.convert()
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + (num as u32);
question
})
.for_each_concurrent(None, |question| async {
tx.send(question).expect("send");
})
.await;
println!("read done");
}
async fn db_writer_task(rx: UnboundedReceiver<Question>) {
let writer_opts = WriterOpts::default();
let mut writer: async_db::Writer<Question> =
async_db::Writer::new(NEW_DB_FILENAME, writer_opts)
.await
.unwrap_or_else(|e| panic!("db writer load, {e:#?}"));
let stream: UnboundedReceiverStream<_> = rx.into();
let stream = stream;
writer.load(stream).await.expect("load");
writer.finish().await.expect("db writer finish");
println!("write done");
}

76
lib/Cargo.toml Normal file
View File

@ -0,0 +1,76 @@
[package]
name = "chgk_ledb_lib"
version = "1.2.0"
authors = ["Dmitry <b4tm4n@mail.ru>"]
edition = "2021"
repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb"
license = "MIT"
description = "Библиотека для доступа к файлу базы данных вопросов ЧГК"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
sync = ["zstd", "memmap"]
async = [
"futures",
"futures-core",
"futures-util",
"fmmap",
"tokio",
"async-compression",
"async-stream",
"pin-project",
]
source = ["zip"]
source_async = [
"async_zip",
"tokio",
"futures",
"futures-core",
"futures-util",
"async-stream",
]
convert = ["zip"]
convert_async = [
"futures",
"futures-core",
"futures-util",
"async-stream",
"async_zip",
"tokio",
]
[dependencies]
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0"
bincode = "^2.0.0-rc.2"
zip = { version = "0.6", optional = true }
async_zip = { version = "0.0.15" , features = [
"zstd",
"tokio",
"tokio-fs",
], optional = true }
fmmap = { version = "0.3", features = ["tokio-async"], optional = true }
tokio = { version = "1", features = [
"fs",
"io-util",
"rt",
"macros",
], optional = true }
futures-core = { version = "0.3", optional = true }
futures = { version = "0.3", optional = true }
futures-util = { version = "0.3", optional = true }
async-compression = { version = "0.4.1", default-features = false, features = [
"zstd",
"tokio",
], optional = true }
async-stream = { version = "0.3", optional = true }
zstd = { version = "^0.12", default-features = false, optional = true }
memmap = { version = "0.7.0", optional = true }
pin-project = { version = "1.1.3", optional = true }
[dev-dependencies]
insta = { version = "1.31.0", features = ["yaml"] }
tempfile = "3.3"

787
lib/src/async_db.rs Normal file
View File

@ -0,0 +1,787 @@
use std::marker::PhantomData;
use std::ops::Deref;
use std::vec;
use std::{path::Path, sync::Arc};
use async_compression::tokio::bufread::ZstdDecoder;
use async_compression::tokio::bufread::ZstdEncoder;
use async_compression::Level;
use futures::sink::Sink;
use futures::stream::StreamExt;
use futures_core::stream::Stream;
use futures_core::Future;
use futures_util::pin_mut;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::{
fs,
io::{self, AsyncReadExt, AsyncWriteExt},
};
use fmmap::tokio::{AsyncMmapFile, AsyncMmapFileExt, AsyncOptions};
type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
use crate::util::BincodeVecWriter;
use crate::util::ErrorToString;
pub struct WriterOpts {
pub compress_lvl: Level,
pub data_buf_size: usize,
pub out_buf_size: usize,
pub current_buf_size: usize,
}
impl Default for WriterOpts {
fn default() -> Self {
Self {
compress_lvl: Level::Default,
data_buf_size: 500 * 1024 * 1024,
out_buf_size: 200 * 1024 * 1024,
current_buf_size: 100 * 1024,
}
}
}
pub struct Writer<T>
where
T: bincode::Encode,
{
out: io::BufWriter<fs::File>,
data_buf: Vec<u8>,
cur_buf_item: BincodeVecWriter,
table: Vec<LSize>,
compress_lvl: Level,
_t: PhantomData<Arc<T>>,
}
impl<T> Writer<T>
where
T: bincode::Encode,
{
pub async fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
let out = fs::File::create(path).await.str_err()?;
let out = io::BufWriter::with_capacity(opts.out_buf_size, out);
let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size);
let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl;
let table: Vec<LSize> = vec![];
Ok(Self {
out,
data_buf,
cur_buf_item,
table,
compress_lvl,
_t: PhantomData,
})
}
pub async fn push(&mut self, item: T) -> Result<(), String> {
self.push_by_ref(&item).await
}
pub async fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.len() as LSize;
bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?;
let mut zencoder = ZstdEncoder::with_quality(&self.cur_buf_item[..], self.compress_lvl);
io::copy(&mut zencoder, &mut self.data_buf)
.await
.str_err()?;
self.cur_buf_item.clear();
self.table.push(pos);
// FIXME
// this will break WriterSink::poll_ready (will wait forever), but not Writer::load
// tokio::time::sleep(std::time::Duration::from_secs(1)).await;
Ok(())
}
pub async fn load<S>(&mut self, source: S) -> Result<(), String>
where
S: Stream<Item = T> + std::marker::Unpin,
{
let hint = source.size_hint();
let hint = std::cmp::max(hint.0, hint.1.unwrap_or(0));
if hint > 0 {
self.table.reserve(hint);
}
pin_mut!(source);
while let Some(item) = source.next().await {
self.push(item).await?;
}
Ok(())
}
pub async fn finish(mut self) -> Result<(), String> {
// finish tab
let pos: LSize = self.data_buf.len() as LSize;
self.table.push(pos);
// write tab
let tab_size = (self.table.len() * LEN_SIZE) as LSize;
for pos in self.table {
let pos_data = (pos + tab_size).to_le_bytes();
self.out.write_all(&pos_data).await.str_err()?;
}
// copy data
self.out.write_all(&self.data_buf[..]).await.str_err()?;
self.out.flush().await.str_err()?;
Ok(())
}
pub fn sink(&mut self) -> WriterSink<'_, T> {
WriterSink {
writer: self,
item: None,
}
}
}
use pin_project::pin_project;
#[pin_project]
/// FIXME: not really async
/// only work when ..push.poll() returns Ready immediately
pub struct WriterSink<'a, T>
where
T: bincode::Encode,
{
#[pin]
writer: &'a mut Writer<T>,
item: Option<T>,
}
impl<'a, T> Sink<T> for WriterSink<'a, T>
where
T: bincode::Encode,
{
type Error = String;
fn poll_ready(
self: std::pin::Pin<&mut Self>,
ctx: &mut std::task::Context<'_>,
) -> Poll<Result<(), String>> {
let mut this = self.project();
if this.item.is_none() {
return Poll::Ready(Ok(()));
}
let item = this.item.take().unwrap();
let push_fut = this.writer.push(item); // FIXME:: how to save this future???
pin_mut!(push_fut);
push_fut.poll(ctx)
}
fn start_send(self: std::pin::Pin<&mut Self>, item: T) -> Result<(), Self::Error> {
let this = self.project();
*this.item = Some(item);
Ok(())
}
fn poll_flush(
self: std::pin::Pin<&mut Self>,
ctx: &mut std::task::Context<'_>,
) -> Poll<Result<(), Self::Error>> {
self.poll_ready(ctx)
}
fn poll_close(
mut self: std::pin::Pin<&mut Self>,
ctx: &mut std::task::Context<'_>,
) -> Poll<Result<(), Self::Error>> {
futures::ready!(self.as_mut().poll_ready(ctx))?;
Poll::Ready(Ok(()))
}
}
pub struct Reader<T>
where
T: bincode::Decode,
{
mmap: AsyncMmapFile,
count: usize,
first_pos: LSize,
_t: PhantomData<Arc<T>>,
}
impl<T> Reader<T>
where
T: bincode::Decode,
{
pub async fn new<P: AsRef<Path>>(path: P) -> Result<Self, String> {
let mmap = AsyncOptions::new()
.read(true)
.open_mmap_file(path)
.await
.str_err()?;
mmap.try_lock_shared().str_err()?;
// read first pos and records count
let first_data: [u8; LEN_SIZE] = mmap.bytes(0, LEN_SIZE).str_err()?.try_into().str_err()?;
let first_pos = LSize::from_le_bytes(first_data);
let tab_len = (first_pos as usize) / LEN_SIZE;
let count = tab_len - 1;
Ok(Self {
mmap,
count,
first_pos,
_t: PhantomData,
})
}
pub fn len(&self) -> usize {
self.count
}
pub fn is_empty(&self) -> bool {
0 == self.len()
}
/// get item at index, reuse data buffer
pub async fn get_with_buf(&self, index: usize, data_buf: &mut Vec<u8>) -> Result<T, String> {
if index >= self.len() {
return Err("index out of range".into());
}
let next_pos: usize = (index + 1) * LEN_SIZE;
// read item data pos
let data_pos = if 0 == index {
self.first_pos
} else {
let tab_pos: usize = index * LEN_SIZE;
let pos_curr_data: [u8; LEN_SIZE] = self
.mmap
.bytes(tab_pos, LEN_SIZE)
.str_err()?
.try_into()
.str_err()?;
LSize::from_le_bytes(pos_curr_data)
} as usize;
// read next item pos
let pos_next_data: [u8; LEN_SIZE] = self
.mmap
.bytes(next_pos, LEN_SIZE)
.str_err()?
.try_into()
.str_err()?;
let data_pos_next = LSize::from_le_bytes(pos_next_data) as usize;
let data_len = data_pos_next - data_pos;
// read & unpack item data
let mut decoder = ZstdDecoder::new(self.mmap.range_reader(data_pos, data_len).str_err()?);
decoder.read_to_end(data_buf).await.str_err()?;
// decode item
let item: (T, usize) = bincode::decode_from_slice(data_buf, BINCODE_CFG).str_err()?;
data_buf.clear();
Ok(item.0)
}
/// get item at index
pub async fn get(&self, index: usize) -> Result<T, String> {
let mut data_buf: Vec<u8> = vec![];
self.get_with_buf(index, &mut data_buf).await
}
pub fn stream(&self) -> ReaderStream<'_, T> {
ReaderStream::new(self)
}
}
pub struct ReaderStream<'a, T>
where
T: bincode::Decode,
{
reader: &'a Reader<T>,
index: Option<usize>,
}
impl<'a, T> ReaderStream<'a, T>
where
T: bincode::Decode,
{
fn new(reader: &'a Reader<T>) -> Self {
ReaderStream {
reader,
index: None,
}
}
}
impl<'a, T> Stream for ReaderStream<'a, T>
where
T: bincode::Decode,
{
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {
if self.index.is_none() && !self.reader.is_empty() {
self.index = Some(0);
}
if self.index.unwrap() == self.reader.len() {
return Poll::Ready(None);
}
// FIXME: mayby work only if reader.get().poll() return Ready immediately
let future = self.reader.get(self.index.unwrap());
pin_mut!(future);
match Pin::new(&mut future).poll(cx) {
Poll::Ready(Ok(item)) => {
self.index = Some(self.index.unwrap() + 1);
Poll::Ready(Some(item))
}
Poll::Ready(Err(_)) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.reader.len();
if self.index.is_none() {
return (len, Some(len));
}
let index = self.index.unwrap();
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
}
pub struct BufReader<T>
where
T: bincode::Decode,
{
inner: Reader<T>,
buf: Vec<u8>,
}
impl<T> BufReader<T>
where
T: bincode::Decode,
{
pub async fn new<P: AsRef<Path>>(path: P, buf_size: usize) -> Result<Self, String> {
match Reader::<T>::new(path).await {
Ok(inner) => Ok(Self {
inner,
buf: Vec::with_capacity(buf_size),
}),
Err(e) => Err(e),
}
}
pub async fn get(&mut self, index: usize) -> Result<T, String> {
self.inner.get_with_buf(index, &mut self.buf).await
}
pub fn into_inner(self) -> Reader<T> {
self.inner
}
pub fn stream(self) -> BufReaderStream<T> {
BufReaderStream::new(self)
}
}
impl<T> From<Reader<T>> for BufReader<T>
where
T: bincode::Decode,
{
fn from(inner: Reader<T>) -> Self {
Self {
inner,
buf: Vec::new(),
}
}
}
impl<T> From<BufReader<T>> for Reader<T>
where
T: bincode::Decode,
{
fn from(value: BufReader<T>) -> Self {
value.into_inner()
}
}
impl<T> Deref for BufReader<T>
where
T: bincode::Decode,
{
type Target = Reader<T>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
pub struct BufReaderStream<T>
where
T: bincode::Decode,
{
reader: BufReader<T>,
index: Option<usize>,
}
impl<T> BufReaderStream<T>
where
T: bincode::Decode,
{
fn new(reader: BufReader<T>) -> Self {
BufReaderStream {
reader,
index: None,
}
}
async fn get_next(&mut self) -> Result<T, String> {
match self.index {
None => Err("index is None".into()),
Some(index) => {
let res = self.reader.get(index).await;
self.index = Some(index + 1);
res
}
}
}
}
impl<T> Stream for BufReaderStream<T>
where
T: bincode::Decode,
{
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {
if self.index.is_none() && !self.reader.is_empty() {
self.index = Some(0);
}
if self.index.unwrap() == self.reader.len() {
return Poll::Ready(None);
}
// FIXME: mayby work only if reader.get().poll() return Ready immediately
let future = self.get_next();
pin_mut!(future);
match Pin::new(&mut future).poll(cx) {
Poll::Ready(Ok(item)) => Poll::Ready(Some(item)),
Poll::Ready(Err(_)) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.reader.len();
if self.index.is_none() {
return (len, Some(len));
}
let index = self.index.unwrap();
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
}
#[cfg(test)]
mod test {
use super::*;
use core::fmt::Debug;
use tempfile::tempdir;
#[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
struct TestData {
num: u64,
test: String,
vnum: Vec<u64>,
vstr: Vec<String>,
}
fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count).map(|i| TestData {
num: i as u64,
test: "test".repeat(i),
vnum: (0..i * 120).map(|x| (x ^ 0x345FE34) as u64).collect(),
vstr: (0..i * 111).map(|x| "test".repeat(x)).collect(),
})
}
async fn assert_data_eq((x, y): (&TestData, TestData)) {
assert_eq!(*x, y);
}
#[tokio::test]
async fn test_write() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
}
#[tokio::test]
async fn test_write_read() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let ritem = reader.get(idx).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_sink_read() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone()).map(Ok);
pin_mut!(src);
src.forward(writer.sink()).await.expect("forward");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let ritem = reader.get(idx).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_read_get_with_buf() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let mut data_buf: Vec<u8> = vec![];
let ritem = reader.get_with_buf(idx, &mut data_buf).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_read_stream() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
let dst_stream = reader.stream();
let src_stream = futures::stream::iter(items.iter());
let mut count = 0;
src_stream
.zip(dst_stream)
.map(|x| {
count += 1;
x
})
.for_each(assert_data_eq)
.await;
assert_eq!(count, items.len())
}
/// sharing Reader instance between threads
#[tokio::test]
async fn test_share_reader() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile).await.expect("new reader");
assert_eq!(items.len(), reader.len());
let reader = Arc::new(reader);
for _ in 0..=3 {
let cur_items = items.clone();
let cur_reader = Arc::clone(&reader);
tokio::spawn(async move {
let dst_stream = cur_reader.stream();
let src_stream = futures::stream::iter(cur_items.iter());
src_stream.zip(dst_stream).for_each(assert_data_eq).await;
});
}
}
#[tokio::test]
async fn test_write_bufread() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let mut reader = BufReader::<TestData>::new(&tmpfile, 4096)
.await
.expect("new buf reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let ritem = reader.get(idx).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_bufread_stream() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
..Default::default()
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = futures::stream::iter(items.clone());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader = BufReader::<TestData>::new(&tmpfile, 4096)
.await
.expect("new buf reader");
assert_eq!(items.len(), reader.len());
let dst_stream = reader.stream();
let src_stream = futures::stream::iter(items.iter());
let mut count = 0;
src_stream
.zip(dst_stream)
.map(|x| {
count += 1;
x
})
.for_each(assert_data_eq)
.await;
assert_eq!(count, items.len())
}
}

View File

@ -3,6 +3,7 @@ use std::{
io::{self, Cursor, Read, Write}, io::{self, Cursor, Read, Write},
marker::PhantomData, marker::PhantomData,
path::Path, path::Path,
sync::Arc,
}; };
use memmap::{Mmap, MmapOptions}; use memmap::{Mmap, MmapOptions};
@ -11,20 +12,8 @@ type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>(); const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard(); const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
trait ErrorToString { use crate::util::BincodeVecWriter;
type Output; use crate::util::ErrorToString;
fn str_err(self) -> std::result::Result<Self::Output, String>;
}
impl<T, E> ErrorToString for std::result::Result<T, E>
where
E: std::error::Error,
{
type Output = T;
fn str_err(self) -> std::result::Result<Self::Output, String> {
self.map_err(|e| e.to_string())
}
}
pub struct WriterOpts { pub struct WriterOpts {
pub compress_lvl: i32, pub compress_lvl: i32,
@ -51,9 +40,10 @@ where
out: io::BufWriter<fs::File>, out: io::BufWriter<fs::File>,
data_buf: Cursor<Vec<u8>>, data_buf: Cursor<Vec<u8>>,
cur_buf_raw: Cursor<Vec<u8>>, cur_buf_raw: Cursor<Vec<u8>>,
cur_buf_item: BincodeVecWriter,
table: Vec<LSize>, table: Vec<LSize>,
compress_lvl: i32, compress_lvl: i32,
_t: PhantomData<*const T>, _t: PhantomData<Arc<T>>,
} }
impl<T> Writer<T> impl<T> Writer<T>
@ -68,6 +58,8 @@ where
let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size); let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_raw = Cursor::new(cur_buf_raw); let cur_buf_raw = Cursor::new(cur_buf_raw);
let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl; let compress_lvl = opts.compress_lvl;
@ -77,6 +69,7 @@ where
out, out,
data_buf, data_buf,
cur_buf_raw, cur_buf_raw,
cur_buf_item,
table, table,
compress_lvl, compress_lvl,
_t: PhantomData, _t: PhantomData,
@ -84,20 +77,25 @@ where
} }
pub fn push(&mut self, item: T) -> Result<(), String> { pub fn push(&mut self, item: T) -> Result<(), String> {
self.push_by_ref(&item)
}
pub fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.position() as LSize; let pos: LSize = self.data_buf.position() as LSize;
let item_data = bincode::encode_to_vec(item, BINCODE_CFG).str_err()?; bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?;
let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?; let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?;
zencoder zencoder
.set_pledged_src_size(item_data.len() as u64) .set_pledged_src_size(Some(self.cur_buf_item.len() as u64))
.str_err()?; .str_err()?;
self.cur_buf_raw.set_position(0); self.cur_buf_raw.set_position(0);
let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder); let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder);
cur_buf_z.write_all(&item_data).str_err()?; cur_buf_z.write_all(&self.cur_buf_item).str_err()?;
cur_buf_z.finish().str_err()?; cur_buf_z.finish().str_err()?;
cur_buf_z.flush().str_err()?; cur_buf_z.flush().str_err()?;
self.cur_buf_item.clear();
self.table.push(pos); self.table.push(pos);
let (cur_buf_raw, _) = cur_buf_z.into_inner(); let (cur_buf_raw, _) = cur_buf_z.into_inner();
@ -157,7 +155,7 @@ where
mmap: Mmap, mmap: Mmap,
count: usize, count: usize,
first_pos: LSize, first_pos: LSize,
_t: PhantomData<*const T>, _t: PhantomData<Arc<T>>,
} }
impl<T> Reader<T> impl<T> Reader<T>
@ -186,7 +184,11 @@ where
self.count self.count
} }
pub fn get(&mut self, index: usize) -> Result<T, String> { pub fn is_empty(&self) -> bool {
0 == self.len()
}
pub fn get(&self, index: usize) -> Result<T, String> {
if index >= self.len() { if index >= self.len() {
return Err("index out of range".into()); return Err("index out of range".into());
} }
@ -218,7 +220,7 @@ where
Ok(item.0) Ok(item.0)
} }
pub fn iter(&mut self) -> ReaderIter<'_, T> { pub fn iter(&self) -> ReaderIter<'_, T> {
ReaderIter::new(self) ReaderIter::new(self)
} }
} }
@ -227,7 +229,7 @@ pub struct ReaderIter<'a, T>
where where
T: bincode::Decode, T: bincode::Decode,
{ {
reader: &'a mut Reader<T>, reader: &'a Reader<T>,
index: Option<usize>, index: Option<usize>,
} }
@ -235,7 +237,7 @@ impl<'a, T> ReaderIter<'a, T>
where where
T: bincode::Decode, T: bincode::Decode,
{ {
fn new(reader: &'a mut Reader<T>) -> Self { fn new(reader: &'a Reader<T>) -> Self {
ReaderIter { ReaderIter {
reader, reader,
index: None, index: None,
@ -250,7 +252,7 @@ where
type Item = T; type Item = T;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if self.index.is_none() && self.reader.len() != 0 { if self.index.is_none() && !self.reader.is_empty() {
self.index = Some(0); self.index = Some(0);
} }
@ -305,6 +307,100 @@ where
} }
} }
pub struct ReaderIntoIter<T>
where
T: bincode::Decode,
{
reader: Reader<T>,
index: Option<usize>,
}
impl<T> ReaderIntoIter<T>
where
T: bincode::Decode,
{
fn new(reader: Reader<T>) -> Self {
Self {
reader,
index: None,
}
}
}
impl<T> Iterator for ReaderIntoIter<T>
where
T: bincode::Decode,
{
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if self.index.is_none() && !self.reader.is_empty() {
self.index = Some(0);
}
match self.index {
Some(i) if i < self.reader.len() => self.nth(i),
_ => None,
}
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
if self.reader.len() <= n {
return None;
}
self.index = Some(n + 1);
let item = self.reader.get(n);
match item {
Ok(item) => Some(item),
Err(_) => None,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.reader.len();
if self.index.is_none() {
return (len, Some(len));
}
let index = self.index.unwrap();
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
fn count(self) -> usize
where
Self: Sized,
{
self.reader.len()
}
}
impl<T> ExactSizeIterator for ReaderIntoIter<T>
where
T: bincode::Decode,
{
fn len(&self) -> usize {
self.reader.len()
}
}
impl<T> IntoIterator for Reader<T>
where
T: bincode::Decode,
{
type Item = T;
type IntoIter = ReaderIntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
Self::IntoIter::new(self)
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
@ -317,7 +413,7 @@ mod test {
} }
fn gen_data(count: usize) -> impl Iterator<Item = TestData> { fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
(0..count).into_iter().map(|i| TestData { (0..count).map(|i| TestData {
num: i as u64, num: i as u64,
test: "test".repeat(i), test: "test".repeat(i),
}) })
@ -341,7 +437,7 @@ mod test {
writer.load(&mut items.clone().into_iter()).expect("load"); writer.load(&mut items.clone().into_iter()).expect("load");
writer.finish().expect("finish write"); writer.finish().expect("finish write");
let mut reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader"); let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
assert_eq!(items.len(), reader.len()); assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() { for (idx, item) in items.iter().enumerate() {
@ -368,11 +464,76 @@ mod test {
writer.load(&mut items.clone().into_iter()).expect("load"); writer.load(&mut items.clone().into_iter()).expect("load");
writer.finish().expect("finish write"); writer.finish().expect("finish write");
let mut reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader"); let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
assert_eq!(items.len(), reader.len()); assert_eq!(items.len(), reader.len());
items.into_iter().zip(reader.iter()).for_each(|pair| { items.into_iter().zip(reader.iter()).for_each(|pair| {
assert_eq!(pair.0, pair.1); assert_eq!(pair.0, pair.1);
}); });
} }
#[test]
fn test_write_read_into_iter() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
current_buf_size: 4096,
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
let items_iter = gen_data(10);
let items: Vec<TestData> = items_iter.collect();
writer.load(&mut items.clone().into_iter()).expect("load");
writer.finish().expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
assert_eq!(items.len(), reader.len());
items.into_iter().zip(reader).for_each(|pair| {
assert_eq!(pair.0, pair.1);
});
}
/// sharing Reader instance between threads
#[test]
fn test_share_reader() {
use std::thread;
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
current_buf_size: 4096,
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
let items_iter = gen_data(10);
let items: Vec<TestData> = items_iter.collect();
writer.load(&mut items.clone().into_iter()).expect("load");
writer.finish().expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
assert_eq!(items.len(), reader.len());
let reader = Arc::new(reader);
for _ in 0..=3 {
let cur_items = items.clone();
let cur_reader = Arc::clone(&reader);
thread::spawn(move || {
cur_items
.into_iter()
.zip(cur_reader.iter())
.for_each(|pair| {
assert_eq!(pair.0, pair.1);
});
});
}
}
} }

13
lib/src/lib.rs Normal file
View File

@ -0,0 +1,13 @@
#[cfg(feature = "async")]
pub mod async_db;
#[cfg(feature = "sync")]
pub mod db;
pub mod questions;
#[cfg(any(
feature = "source",
feature = "source_async",
feature = "convert",
feature = "convert_async"
))]
pub mod source;
pub mod util;

398
lib/src/questions.rs Normal file
View File

@ -0,0 +1,398 @@
use serde_derive::{Deserialize, Serialize};
#[derive(
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)]
pub struct BatchInfo {
#[serde(default, skip_serializing_if = "String::is_empty")]
pub filename: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub description: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub author: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub comment: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub url: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub date: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub processed_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub redacted_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub copyright: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub theme: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub kind: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub source: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub rating: String,
}
#[derive(
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)]
pub struct Question {
#[serde(default, skip_serializing_if = "u32_is_zero")]
pub num: u32,
pub id: String,
pub description: String,
pub answer: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub author: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub comment: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub comment1: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub tour: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub url: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub date: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub processed_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub redacted_by: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub copyright: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub theme: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub kind: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub source: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub rating: String,
#[serde(default, skip_serializing_if = "BatchInfo::is_default")]
pub batch_info: BatchInfo,
}
fn u32_is_zero(num: &u32) -> bool {
*num == 0
}
impl BatchInfo {
pub fn is_default(&self) -> bool {
*self == BatchInfo::default()
}
}
#[cfg(any(feature = "convert", feature = "convert_async"))]
pub mod convert_common {
use super::{BatchInfo, Question};
use crate::source::{SourceQuestion, SourceQuestionsBatch};
macro_rules! make {
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+}};
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+ ,..$Target::default()}}
}
impl From<SourceQuestion> for Question {
fn from(src: SourceQuestion) -> Self {
make! {Self; with defaults and by {
num, id, description, answer, author, comment, comment1, tour, url,
date, processed_by, redacted_by, copyright, theme, kind, source, rating
}; from src}
}
}
impl From<SourceQuestionsBatch> for BatchInfo {
fn from(src: SourceQuestionsBatch) -> Self {
make! {Self; by {
filename, description, author, comment, url, date,
processed_by, redacted_by, copyright, theme, kind, source, rating
}; from src}
}
}
impl From<SourceQuestionsBatch> for Vec<Question> {
fn from(src: SourceQuestionsBatch) -> Self {
let mut src = src;
let mut questions: Vec<SourceQuestion> = vec![];
std::mem::swap(&mut src.questions, &mut questions);
let mut result: Vec<Question> = questions.into_iter().map(|item| item.into()).collect();
let batch_info = BatchInfo::from(src);
result.iter_mut().for_each(|question| {
question.batch_info = batch_info.clone();
});
result
}
}
}
#[cfg(feature = "convert")]
pub mod convert {
use super::Question;
use crate::source::SourceQuestionsBatch;
pub trait QuestionsConverter {
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a>;
}
impl<T> QuestionsConverter for T
where
T: Iterator<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>,
{
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a> {
let iter = self
.filter(|(_, data)| data.is_ok())
.flat_map(|(filename, data)| {
let mut batch = data.unwrap();
batch.filename = filename;
let questions: Vec<Question> = batch.into();
questions
});
Box::new(iter)
}
}
#[cfg(test)]
mod test {
use crate::questions::test::convert_common::sample_batch;
use super::*;
use insta::assert_yaml_snapshot;
use std::iter;
#[test]
fn test_convert() {
let mut source = iter::once((
String::from("test.json"),
Ok::<SourceQuestionsBatch, serde_json::Error>(sample_batch()),
));
let converted: Vec<_> = source.convert().collect();
assert_yaml_snapshot!(converted, @r#"
---
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
"#);
}
}
}
#[cfg(feature = "convert")]
pub use convert::QuestionsConverter;
#[cfg(feature = "convert_async")]
pub mod convert_async {
use futures::stream;
use futures_core::stream::Stream;
use futures_util::StreamExt;
use super::Question;
use crate::source::SourceQuestionsBatch;
pub struct QuestionsConverterAsync<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
inner: T,
}
impl<T> From<T> for QuestionsConverterAsync<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
fn from(inner: T) -> Self {
Self { inner }
}
}
pub trait QuestionsConverterAsyncForStream<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
fn converter(&mut self) -> QuestionsConverterAsync<&mut T>;
}
impl<T> QuestionsConverterAsyncForStream<T> for T
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
fn converter(&mut self) -> QuestionsConverterAsync<&mut T> {
QuestionsConverterAsync::from(self)
}
}
impl<T> QuestionsConverterAsync<T>
where
T: Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>
+ std::marker::Unpin,
{
pub fn convert(self) -> impl Stream<Item = Question> {
self.inner
.filter_map(|(name, res)| async move {
if let Ok(item) = res {
Some((name, item))
} else {
None
}
})
.flat_map(|(filename, batch)| {
stream::iter({
let mut batch = batch;
batch.filename = filename;
let questions: Vec<Question> = batch.into();
questions
})
})
}
}
#[cfg(test)]
mod test {
use crate::questions::test::convert_common::sample_batch;
use super::*;
use futures_util::{pin_mut, StreamExt};
use insta::assert_yaml_snapshot;
#[tokio::test]
async fn test_convert_stream() {
let source = futures::stream::once(async {
(
String::from("test.json"),
Ok::<SourceQuestionsBatch, serde_json::Error>(sample_batch()),
)
});
pin_mut!(source);
let converter = source.converter();
let converter = converter.convert();
let converted: Vec<_> = converter.collect().await;
assert_yaml_snapshot!(converted, @r#"
---
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
batch_info:
filename: test.json
description: Тестовый
date: 00-000-2000
"#);
}
}
}
#[cfg(feature = "convert_async")]
pub use convert_async::{QuestionsConverterAsync, QuestionsConverterAsyncForStream};
#[cfg(test)]
mod test {
use super::*;
use insta::assert_yaml_snapshot;
use serde_json::json;
#[cfg(any(feature = "convert", feature = "convert_async"))]
pub mod convert_common {
use crate::source::{SourceQuestion, SourceQuestionsBatch};
pub fn sample_batch() -> SourceQuestionsBatch {
SourceQuestionsBatch {
description: "Тестовый".into(),
date: "00-000-2000".into(),
questions: vec![
SourceQuestion {
id: "Вопрос 1".into(),
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(),
answer: "42".into(),
..Default::default()
},
SourceQuestion {
id: "Вопрос 2".into(),
description: "Зимой и летом одним цветом".into(),
answer: "ёлка".into(),
..Default::default()
},
],
..Default::default()
}
}
}
pub fn sample_question() -> Question {
Question {
id: "Вопрос 1".into(),
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(),
answer: "42".into(),
batch_info: BatchInfo {
description: "Тестовый".into(),
date: "00-000-2000".into(),
..Default::default()
},
..Default::default()
}
}
#[test]
fn test_question_ser() {
assert_yaml_snapshot!(sample_question(), @r#"
---
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
description: Тестовый
date: 00-000-2000
"#);
}
#[test]
fn test_question_de() {
let question_from_json: Result<Question, _> = serde_json::from_value(json!({
"id": "Вопрос 1",
"description": "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2",
"answer": "42",
"batch_info": {
"description": "Тестовый",
"date": "00-000-2000"
}
}));
assert!(question_from_json.is_ok());
assert_yaml_snapshot!(question_from_json.unwrap(), @r#"
---
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
batch_info:
description: Тестовый
date: 00-000-2000
"#);
}
}

579
lib/src/source.rs Normal file
View File

@ -0,0 +1,579 @@
use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
pub struct SourceQuestion {
#[serde(default, skip_serializing_if = "u32_is_zero")]
pub num: u32,
#[serde(default)]
pub id: String,
#[serde(alias = "Вопрос")]
pub description: String,
#[serde(alias = "Ответ")]
pub answer: String,
#[serde(alias = "Автор", default, skip_serializing_if = "String::is_empty")]
pub author: String,
#[serde(
default,
alias = "Комментарий",
skip_serializing_if = "String::is_empty"
)]
pub comment: String,
#[serde(
default,
alias = "Комментарии",
alias = "Инфо",
skip_serializing_if = "String::is_empty"
)]
pub comment1: String,
#[serde(default, alias = "Тур", skip_serializing_if = "String::is_empty")]
pub tour: String,
#[serde(
default,
alias = "Ссылка",
alias = "URL",
skip_serializing_if = "String::is_empty"
)]
pub url: String,
#[serde(default, alias = "Дата", skip_serializing_if = "String::is_empty")]
pub date: String,
#[serde(default, alias = "Обработан", skip_serializing_if = "String::is_empty")]
pub processed_by: String,
#[serde(default, alias = "Редактор", skip_serializing_if = "String::is_empty")]
pub redacted_by: String,
#[serde(default, alias = "Копирайт", skip_serializing_if = "String::is_empty")]
pub copyright: String,
#[serde(default, alias = "Тема", skip_serializing_if = "String::is_empty")]
pub theme: String,
#[serde(
default,
alias = "Вид",
alias = "Тип",
skip_serializing_if = "String::is_empty"
)]
pub kind: String,
#[serde(default, alias = "Источник", skip_serializing_if = "String::is_empty")]
pub source: String,
#[serde(default, alias = "Рейтинг", skip_serializing_if = "String::is_empty")]
pub rating: String,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
pub struct SourceQuestionsBatch {
#[serde(default, skip_serializing_if = "String::is_empty")]
pub filename: String,
#[serde(alias = "Пакет", alias = "Чемпионат")]
pub description: String,
#[serde(default, alias = "Автор", skip_serializing_if = "String::is_empty")]
pub author: String,
#[serde(
default,
alias = "Комментарий",
alias = "Комментарии",
alias = "Инфо",
skip_serializing_if = "String::is_empty"
)]
pub comment: String,
#[serde(
default,
alias = "Ссылка",
alias = "URL",
skip_serializing_if = "String::is_empty"
)]
pub url: String,
#[serde(default, alias = "Дата", skip_serializing_if = "String::is_empty")]
pub date: String,
#[serde(default, alias = "Обработан", skip_serializing_if = "String::is_empty")]
pub processed_by: String,
#[serde(default, alias = "Редактор", skip_serializing_if = "String::is_empty")]
pub redacted_by: String,
#[serde(default, alias = "Копирайт", skip_serializing_if = "String::is_empty")]
pub copyright: String,
#[serde(default, alias = "Тема", skip_serializing_if = "String::is_empty")]
pub theme: String,
#[serde(
default,
alias = "Вид",
alias = "Тип",
skip_serializing_if = "String::is_empty"
)]
pub kind: String,
#[serde(default, alias = "Источник", skip_serializing_if = "String::is_empty")]
pub source: String,
#[serde(default, alias = "Рейтинг", skip_serializing_if = "String::is_empty")]
pub rating: String,
#[serde(alias = "Вопросы")]
pub questions: Vec<SourceQuestion>,
}
fn u32_is_zero(num: &u32) -> bool {
*num == 0
}
#[cfg(any(feature = "convert", feature = "source"))]
pub mod reader_sync {
use std::io::{Read, Seek};
use zip::ZipArchive;
use super::SourceQuestionsBatch;
pub struct SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
zipfile: ZipArchive<R>,
index: Option<usize>,
}
impl<R> SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
fn new(zipfile: ZipArchive<R>) -> Self {
SourceQuestionsZipReader {
zipfile,
index: None,
}
}
}
impl<R> Iterator for SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
type Item = (String, Result<SourceQuestionsBatch, serde_json::Error>);
fn next(&mut self) -> Option<Self::Item> {
if self.index.is_none() && !self.zipfile.is_empty() {
self.index = Some(0);
}
match self.index {
Some(i) if i < self.zipfile.len() => {
self.index = Some(i + 1);
self.nth(i)
}
_ => None,
}
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
if self.zipfile.len() <= n {
return None;
}
self.index = Some(n + 1);
let file = self.zipfile.by_index(n).unwrap();
let name = file.mangled_name();
let name_str = name.to_str().unwrap();
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
Some((String::from(name_str), data))
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.zipfile.len();
let index = self.index.unwrap_or(0);
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
fn count(self) -> usize
where
Self: Sized,
{
self.zipfile.len()
}
}
impl<R> ExactSizeIterator for SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
fn len(&self) -> usize {
self.zipfile.len()
}
}
pub trait ReadSourceQuestionsBatches<R>
where
R: Read + Seek,
{
fn source_questions(self) -> SourceQuestionsZipReader<R>;
}
impl<R> ReadSourceQuestionsBatches<R> for ZipArchive<R>
where
R: Read + Seek,
{
fn source_questions(self) -> SourceQuestionsZipReader<R> {
SourceQuestionsZipReader::new(self)
}
}
#[cfg(test)]
mod test {
use super::super::test::sample_batch;
use super::*;
use std::fs;
use std::{io::Write, iter, path::Path};
use tempfile::tempdir;
fn write_sample_zip<P>(path: P)
where
P: AsRef<Path>,
{
let batch = sample_batch();
let z_file = fs::File::create(path).expect("crerate zip file");
let mut zip_file = zip::ZipWriter::new(z_file);
let options =
zip::write::FileOptions::default().compression_method(zip::CompressionMethod::Zstd);
zip_file
.start_file("test.json", options)
.expect("zip start file");
let data = serde_json::to_vec(&batch).unwrap();
let amount = zip_file.write(data.as_slice()).expect("write entry");
assert_eq!(amount, data.len());
zip_file.finish().expect("finish zip file");
}
#[test]
fn test_source_questions_get() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip);
let z_file = fs::File::open(tmpfile_zip).expect("open zip file");
let zip_file = zip::ZipArchive::new(z_file).expect("open zip file reader");
let mut source = zip_file.source_questions();
assert_eq!(source.len(), 1);
let actual = source.next().expect("get batch");
assert_eq!(actual.0, "test.json");
assert_eq!(actual.1.expect("parse batch"), expected_batch);
}
#[test]
fn test_source_questions_iter() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip);
let z_file = fs::File::open(tmpfile_zip).expect("open zip file");
let zip_file = zip::ZipArchive::new(z_file).expect("open zip file reader");
let source = zip_file.source_questions();
assert_eq!(source.len(), 1);
let expected_iter = iter::once((String::from("test.json"), Ok(expected_batch)));
assert!(source
.map(|x| (x.0, x.1.map_err(|e| e.to_string())))
.eq(expected_iter));
}
}
}
#[cfg(any(feature = "convert", feature = "source"))]
pub use reader_sync::{ReadSourceQuestionsBatches, SourceQuestionsZipReader};
#[cfg(any(feature = "convert_async", feature = "source_async"))]
pub mod reader_async {
use async_stream::stream;
use async_zip::tokio::read::seek::ZipFileReader;
use futures_core::stream::Stream;
use futures_util::AsyncReadExt;
use tokio::io::{AsyncRead, AsyncSeek};
use super::SourceQuestionsBatch;
pub struct SourceQuestionsZipReaderAsync<R>
where
R: AsyncRead + AsyncSeek + Unpin,
{
zipfile: ZipFileReader<R>,
index: Option<usize>,
}
impl<R> SourceQuestionsZipReaderAsync<R>
where
R: AsyncRead + AsyncSeek + Unpin,
{
pub fn new(zipfile: ZipFileReader<R>) -> Self {
SourceQuestionsZipReaderAsync {
zipfile,
index: None,
}
}
pub fn len(&self) -> usize {
self.zipfile.file().entries().len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub async fn get(
&mut self,
index: usize,
) -> Result<(String, Result<SourceQuestionsBatch, serde_json::Error>), String>
where
R: AsyncRead + AsyncSeek + Unpin,
{
let len = self.len();
if index >= len {
return Err(format!("get index={index}, when len={len}"));
}
let reader = self.zipfile.reader_with_entry(index).await;
if let Err(error) = reader {
return Err(format!("reader_with_entry: {error:?}"));
}
let mut reader = reader.unwrap();
let filename = reader.entry().filename().clone().into_string().unwrap();
let mut data: Vec<u8> = Vec::new();
let readed = reader.read_to_end(&mut data).await;
if let Err(error) = readed {
return Err(format!("read_to_end: {error:?}"));
}
let parsed: Result<SourceQuestionsBatch, _> = serde_json::from_slice(&data);
Ok((filename, parsed))
}
pub async fn get_next(
&mut self,
) -> Option<Result<(String, Result<SourceQuestionsBatch, serde_json::Error>), String>>
where
R: AsyncRead + AsyncSeek + Unpin,
{
if self.index.is_none() && !self.is_empty() {
self.index = Some(0);
}
if self.index.unwrap() >= self.len() {
return None;
}
let item = self.get(self.index.unwrap()).await;
self.index = Some(self.index.unwrap() + 1);
Some(item)
}
pub fn stream(
&mut self,
) -> impl Stream<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)> + '_
{
stream! {
while let Some(Ok(item)) = self.get_next().await {
yield item
}
}
}
}
pub trait ReadSourceQuestionsBatchesAsync<R>
where
R: AsyncRead + AsyncSeek + Unpin,
{
fn source_questions(self) -> SourceQuestionsZipReaderAsync<R>;
}
impl<R> ReadSourceQuestionsBatchesAsync<R> for ZipFileReader<R>
where
R: AsyncRead + AsyncSeek + Unpin,
{
fn source_questions(self) -> SourceQuestionsZipReaderAsync<R> {
SourceQuestionsZipReaderAsync::new(self)
}
}
#[cfg(test)]
mod test {
use crate::source::SourceQuestion;
use super::super::test::sample_batch;
use super::*;
use async_zip::{base::write::ZipFileWriter, ZipEntryBuilder};
use core::fmt::Debug;
use futures_util::StreamExt;
use std::path::Path;
use tempfile::tempdir;
use tokio::fs;
async fn write_sample_zip<P>(path: P)
where
P: AsRef<Path>,
{
let batch = sample_batch();
let z_file = fs::File::create(path).await.expect("crerate zip file");
let mut zip_file = ZipFileWriter::with_tokio(z_file);
let entry =
ZipEntryBuilder::new("test.json".into(), async_zip::Compression::Zstd).build();
zip_file
.write_entry_whole(entry, serde_json::to_vec(&batch).unwrap().as_slice())
.await
.expect("write entry");
zip_file.close().await.expect("close zip");
}
async fn assert_data_rref_eq<T>((x, y): (T, &T))
where
T: PartialEq + Debug,
{
assert_eq!(x, *y);
}
#[tokio::test]
async fn test_source_questions_stream() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip).await;
let mut z_file = fs::File::open(tmpfile_zip).await.expect("open zip file");
let zip_file = ZipFileReader::with_tokio(&mut z_file)
.await
.expect("open zip file reader");
let expected_count = expected_batch.questions.len();
let expected_stream = futures::stream::iter(expected_batch.questions.iter());
let mut actual_source = zip_file.source_questions();
let actual_stream = actual_source.stream();
let mut actual_count: usize = 0;
actual_stream
.flat_map(|x| futures::stream::iter(x.1.expect("parse batch").questions))
.zip(expected_stream)
.map(|x| {
actual_count += 1;
x
})
.for_each(assert_data_rref_eq::<SourceQuestion>)
.await;
assert_eq!(actual_count, expected_count);
}
#[tokio::test]
async fn test_source_questions_get() {
let expected_batch = sample_batch();
let dir = tempdir().expect("tempdir");
// write sample
let tmpfile_zip = dir.path().join("test.zip");
write_sample_zip(&tmpfile_zip).await;
let mut z_file = fs::File::open(tmpfile_zip).await.expect("open zip file");
let zip_file = ZipFileReader::with_tokio(&mut z_file)
.await
.expect("open zip file reader");
let mut source = zip_file.source_questions();
assert_eq!(source.len(), 1);
let actual = source.get(0).await.expect("get batch");
assert_eq!(actual.0, "test.json");
assert_eq!(actual.1.expect("parse batch"), expected_batch);
}
}
}
#[cfg(any(feature = "convert_async", feature = "source_async"))]
pub use reader_async::{ReadSourceQuestionsBatchesAsync, SourceQuestionsZipReaderAsync};
#[cfg(test)]
mod test {
use super::*;
use insta::assert_yaml_snapshot;
use serde_json::json;
pub fn sample_batch() -> SourceQuestionsBatch {
SourceQuestionsBatch {
description: "Тестовый".into(),
date: "00-000-2000".into(),
questions: vec![
SourceQuestion {
id: "Вопрос 1".into(),
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(),
answer: "42".into(),
..Default::default()
},
SourceQuestion {
id: "Вопрос 2".into(),
description: "Зимой и летом одним цветом".into(),
answer: "ёлка".into(),
..Default::default()
},
],
..Default::default()
}
}
#[test]
fn test_batch_ser() {
let batch = sample_batch();
assert_yaml_snapshot!(batch, @r#"
---
description: Тестовый
date: 00-000-2000
questions:
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
"#);
}
#[test]
fn test_batch_de() {
let batch_from_json: Result<SourceQuestionsBatch, _> = serde_json::from_value(json!({
"Чемпионат": "Тестовый",
"Дата": "00-000-2000",
"Вопросы": [
{
"id": "Вопрос 1",
"Вопрос": "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2",
"Ответ": "42",
},
{
"id": "Вопрос 2",
"Вопрос": "Зимой и летом одним цветом",
"Ответ": "ёлка",
},
]
}));
assert!(batch_from_json.is_ok());
assert_yaml_snapshot!(batch_from_json.unwrap(), @r#"
---
description: Тестовый
date: 00-000-2000
questions:
- id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
- id: Вопрос 2
description: Зимой и летом одним цветом
answer: ёлка
"#);
}
}

57
lib/src/util.rs Normal file
View File

@ -0,0 +1,57 @@
pub trait ErrorToString {
type Output;
fn str_err(self) -> std::result::Result<Self::Output, String>;
}
impl<T, E> ErrorToString for std::result::Result<T, E>
where
E: std::error::Error,
{
type Output = T;
fn str_err(self) -> std::result::Result<Self::Output, String> {
self.map_err(|e| e.to_string())
}
}
#[cfg(any(feature = "sync", feature = "async"))]
mod bincode_utils {
use std::ops::{Deref, DerefMut};
use bincode::enc::write::Writer;
use bincode::error::EncodeError;
/// struct that allows [`Vec<u8>`] to implement [bincode::enc::write::Writer] trait
pub struct BincodeVecWriter {
vec: Vec<u8>,
}
impl BincodeVecWriter {
pub fn new(vec: Vec<u8>) -> BincodeVecWriter {
BincodeVecWriter { vec }
}
}
impl Deref for BincodeVecWriter {
type Target = Vec<u8>;
fn deref(&self) -> &Self::Target {
&self.vec
}
}
impl DerefMut for BincodeVecWriter {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.vec
}
}
impl Writer for BincodeVecWriter {
fn write(&mut self, bytes: &[u8]) -> Result<(), EncodeError> {
self.vec.extend_from_slice(bytes);
Ok(())
}
}
}
#[cfg(any(feature = "sync", feature = "async"))]
pub use bincode_utils::BincodeVecWriter;

View File

@ -1,276 +0,0 @@
extern crate serde;
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
extern crate ledb;
extern crate ledb_types;
extern crate zip;
use clap::{Parser, Subcommand};
use rand::seq::IteratorRandom;
use std::io;
use std::path::PathBuf;
use std::time::Instant;
use std::{fs, sync::mpsc, thread};
use ledb::{Options, Storage};
mod db;
mod questions;
mod source;
use crate::questions::{Question, QuestionsConverter};
use crate::source::ReadSourceQuestionsBatches;
const ZIP_FILENAME: &str = "json.zip";
const NEW_DB_FILENAME: &str = "test.bin";
const DB_DIR: &str = "db";
#[derive(Subcommand, Debug)]
enum Command {
Write,
Compact,
Print {
#[clap(value_parser, default_value = "0")]
id: u32,
},
ZipPrint {
#[clap(value_parser, default_value = "0")]
file_num: usize,
#[clap(value_parser, default_value = "0")]
num: usize,
},
Write2,
Print2 {
#[clap(value_parser, default_value = "0")]
id: u32,
},
}
#[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None)]
#[clap(propagate_version = true)]
struct Cli {
#[clap(subcommand)]
command: Command,
#[clap(short, long, action)]
measure: bool,
}
fn zip_reader_task(tx: mpsc::Sender<Question>) {
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
let questions = source_questions
.convert()
.enumerate()
.map(|(num, mut question)| {
question.num = 1 + num as u32;
question
});
for question in questions {
let res = tx.send(question);
if res.is_err() {
break;
}
}
println!("read done");
}
fn db_writer_task(rx: mpsc::Receiver<Question>) {
let out_file: PathBuf = [DB_DIR, "data.mdb"].into_iter().collect();
match fs::metadata(&out_file) {
Ok(x) if x.is_file() => {
fs::remove_file(&out_file).unwrap();
println!(r#""{}" removed"#, out_file.to_str().unwrap());
}
_ => {}
};
let options: Options = serde_json::from_value(json!({
"map_size": 900 * 1024 * 1024, // 900mb
"write_map": true,
"map_async": true,
"no_lock": true,
"no_meta_sync": true,
"no_sync": true,
}))
.unwrap();
let storage = Storage::new(DB_DIR, options).unwrap();
let collection = storage.collection("questions").unwrap();
let count = collection.load(rx).expect("load");
println!("loaded {count}");
println!("syncing to disk...");
storage.sync(true).unwrap();
print!("stats: ");
let stats = storage.stat().unwrap();
println!("{:?}", stats);
drop(storage);
println!("write done");
}
fn write_db() {
let (tx, rx) = mpsc::channel::<Question>();
[
thread::spawn(move || zip_reader_task(tx)),
thread::spawn(move || db_writer_task(rx)),
]
.into_iter()
.for_each(|handle| handle.join().expect("thread panic"));
println!("all done");
}
fn print_question_from<F>(get_q: F)
where
F: FnOnce() -> Option<Question>,
{
let q = get_q().unwrap();
println!("{:#?}", q)
}
fn read_from_zip(file_num: usize, mut num: usize) -> Option<Question> {
let mut rng = rand::thread_rng();
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
let zip_reader = io::BufReader::new(zip_file);
let archive = zip::ZipArchive::new(zip_reader).unwrap();
let mut source_questions = archive.source_questions();
let (filename, batch) = if file_num == 0 {
source_questions.choose(&mut rng).unwrap()
} else {
source_questions.nth(file_num - 1).unwrap()
};
let mut batch = batch.unwrap();
batch.filename = filename;
let questions: Vec<Question> = batch.into();
if num == 0 {
num = (1..=questions.len()).choose(&mut rng).unwrap();
}
Some(questions[num - 1].clone())
}
fn compact_db() {
let options: Options = serde_json::from_value(json!({
"write_map": true,
"map_async": true,
"no_lock": true,
"no_meta_sync": true,
"no_sync": true,
"compact": true,
}))
.unwrap();
let storage = Storage::new(DB_DIR, options).unwrap();
storage.sync(true).unwrap();
let stats = storage.stat().unwrap();
println!("{:?}", stats);
drop(storage);
}
fn read_from_db(mut id: u32) -> Option<Question> {
let options: Options = serde_json::from_value(json!({
"read_only": true,
"map_async": true,
"no_lock": true,
}))
.unwrap();
let storage = Storage::new(DB_DIR, options).unwrap();
let collection = storage.collection("questions").unwrap();
let mut rng = rand::thread_rng();
if id == 0 {
let last_id = collection.last_id().unwrap();
id = (1..=last_id).choose(&mut rng).unwrap();
}
collection.get::<Question>(id).unwrap()
}
// measure and return time elapsed in `func` in seconds
pub fn measure<F: FnOnce()>(func: F) -> f64 {
let start = Instant::now();
func();
let elapsed = start.elapsed();
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
}
pub fn measure_and_print<F: FnOnce()>(func: F) {
let m = measure(func);
eprintln!("{}", m);
}
fn main() {
let args = Cli::parse();
let mut action: Box<dyn FnOnce()> = match &args.command {
Command::Write => Box::new(write_db),
Command::Compact => Box::new(compact_db),
Command::Print { id } => {
let get_question = Box::new(|| read_from_db(*id));
Box::new(|| print_question_from(get_question))
}
Command::ZipPrint { file_num, num } => {
let get_question = Box::new(|| read_from_zip(*file_num, *num));
Box::new(|| print_question_from(get_question))
}
Command::Write2 => Box::new(write_db2),
Command::Print2 { id } => {
let get_question = Box::new(|| read_from_db2(*id));
Box::new(|| print_question_from(get_question))
}
};
if args.measure {
action = Box::new(|| measure_and_print(action));
}
action();
}
fn read_from_db2(id: u32) -> Option<Question> {
let mut reader: db::Reader<Question> =
db::Reader::new(NEW_DB_FILENAME, 2048).expect("new db reader");
let mut questions = reader.iter();
match id {
0 => {
let mut rng = rand::thread_rng();
questions.choose(&mut rng)
}
_ => questions.nth((id - 1) as usize),
}
}
fn write_db2() {
let (tx, rx) = mpsc::channel::<Question>();
[
thread::spawn(move || zip_reader_task(tx)),
thread::spawn(move || db_writer2_task(rx)),
]
.into_iter()
.for_each(|handle| handle.join().expect("thread panic"));
println!("all done");
}
fn db_writer2_task(rx: mpsc::Receiver<Question>) {
let writer_opts = db::WriterOpts::default();
let mut writer: db::Writer<Question> =
db::Writer::new(NEW_DB_FILENAME, writer_opts).expect("new db writer");
writer
.load(&mut rx.iter())
.unwrap_or_else(|e| panic!("db writer load, {e:#?}"));
writer.finish().expect("db writer finish");
println!("write done");
}

View File

@ -1,145 +0,0 @@
use ledb::Document;
use serde_derive::{Deserialize, Serialize};
use crate::source::{SourceQuestion, SourceQuestionsBatch};
macro_rules! make {
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+}};
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+ ,..$Target::default()}}
}
#[derive(
Debug, Default, Clone, Serialize, Deserialize, Document, bincode::Decode, bincode::Encode,
)]
pub struct BatchInfo {
#[document(primary)]
#[serde(default)]
pub filename: String,
#[serde(default)]
pub description: String,
#[serde(default)]
pub author: String,
#[serde(default)]
pub comment: String,
#[serde(default)]
pub url: String,
#[serde(default)]
pub date: String,
#[serde(default)]
pub processed_by: String,
#[serde(default)]
pub redacted_by: String,
#[serde(default)]
pub copyright: String,
#[serde(default)]
pub theme: String,
#[serde(default)]
pub kind: String,
#[serde(default)]
pub source: String,
#[serde(default)]
pub rating: String,
}
#[derive(
Debug, Default, Clone, Serialize, Deserialize, Document, bincode::Decode, bincode::Encode,
)]
pub struct Question {
#[document(primary)]
#[serde(default)]
pub num: u32,
#[document(index)]
pub id: String,
pub description: String,
pub answer: String,
#[serde(default)]
pub author: String,
#[serde(default)]
pub comment: String,
#[serde(default)]
pub comment1: String,
#[serde(default)]
pub tour: String,
#[serde(default)]
pub url: String,
#[serde(default)]
pub date: String,
#[serde(default)]
pub processed_by: String,
#[serde(default)]
pub redacted_by: String,
#[serde(default)]
pub copyright: String,
#[serde(default)]
pub theme: String,
#[serde(default)]
pub kind: String,
#[serde(default)]
pub source: String,
#[serde(default)]
pub rating: String,
#[document(nested)]
#[serde(default)]
pub batch_info: BatchInfo,
}
impl From<SourceQuestion> for Question {
fn from(src: SourceQuestion) -> Self {
make! {Self; with defaults and by {
num, id, description, answer, author, comment, comment1, tour, url,
date, processed_by, redacted_by, copyright, theme, kind, source, rating
}; from src}
}
}
impl From<SourceQuestionsBatch> for BatchInfo {
fn from(src: SourceQuestionsBatch) -> Self {
make! {Self; by {
filename, description, author, comment, url, date,
processed_by, redacted_by, copyright, theme, kind, source, rating
}; from src}
}
}
impl From<SourceQuestionsBatch> for Vec<Question> {
fn from(src: SourceQuestionsBatch) -> Self {
let mut result: Vec<Question> = src
.questions
.iter()
.map(|item| item.clone().into())
.collect();
let batch_info = BatchInfo::from(src);
result.iter_mut().for_each(|mut question| {
question.batch_info = batch_info.clone();
});
result
}
}
pub trait QuestionsConverter {
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a>;
}
impl<T> QuestionsConverter for T
where
T: Iterator<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>,
{
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a> {
let iter = self
.filter(|(_, data)| data.is_ok())
.flat_map(|(filename, data)| {
let mut batch = data.unwrap();
batch.filename = filename;
let questions: Vec<Question> = batch.into();
questions
});
Box::new(iter)
}
}

View File

@ -1,206 +0,0 @@
use serde_derive::{Deserialize, Serialize};
use std::io::{Read, Seek};
use zip::ZipArchive;
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct SourceQuestion {
#[serde(default)]
pub num: u32,
pub id: String,
#[serde(alias = "Вопрос")]
pub description: String,
#[serde(alias = "Ответ")]
pub answer: String,
#[serde(alias = "Автор")]
#[serde(default)]
pub author: String,
#[serde(alias = "Комментарий")]
#[serde(default)]
pub comment: String,
#[serde(alias = "Комментарии")]
#[serde(alias = "Инфо")]
#[serde(default)]
pub comment1: String,
#[serde(alias = "Тур")]
#[serde(default)]
pub tour: String,
#[serde(alias = "Ссылка")]
#[serde(alias = "URL")]
#[serde(default)]
pub url: String,
#[serde(alias = "Дата")]
#[serde(default)]
pub date: String,
#[serde(alias = "Обработан")]
#[serde(default)]
pub processed_by: String,
#[serde(alias = "Редактор")]
#[serde(default)]
pub redacted_by: String,
#[serde(alias = "Копирайт")]
#[serde(default)]
pub copyright: String,
#[serde(alias = "Тема")]
#[serde(default)]
pub theme: String,
#[serde(alias = "Вид")]
#[serde(alias = "Тип")]
#[serde(default)]
pub kind: String,
#[serde(alias = "Источник")]
#[serde(default)]
pub source: String,
#[serde(alias = "Рейтинг")]
#[serde(default)]
pub rating: String,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct SourceQuestionsBatch {
#[serde(default)]
pub filename: String,
#[serde(alias = "Пакет")]
#[serde(alias = "Чемпионат")]
pub description: String,
#[serde(alias = "Автор")]
#[serde(default)]
pub author: String,
#[serde(alias = "Комментарий")]
#[serde(alias = "Комментарии")]
#[serde(alias = "Инфо")]
#[serde(default)]
pub comment: String,
#[serde(alias = "Ссылка")]
#[serde(alias = "URL")]
#[serde(default)]
pub url: String,
#[serde(alias = "Дата")]
#[serde(default)]
pub date: String,
#[serde(alias = "Обработан")]
#[serde(default)]
pub processed_by: String,
#[serde(alias = "Редактор")]
#[serde(default)]
pub redacted_by: String,
#[serde(alias = "Копирайт")]
#[serde(default)]
pub copyright: String,
#[serde(alias = "Тема")]
#[serde(default)]
pub theme: String,
#[serde(alias = "Вид")]
#[serde(alias = "Тип")]
#[serde(default)]
pub kind: String,
#[serde(alias = "Источник")]
#[serde(default)]
pub source: String,
#[serde(alias = "Рейтинг")]
#[serde(default)]
pub rating: String,
#[serde(alias = "Вопросы")]
pub questions: Vec<SourceQuestion>,
}
pub struct SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
zipfile: ZipArchive<R>,
index: Option<usize>,
}
impl<R> SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
fn new(zipfile: ZipArchive<R>) -> Self {
SourceQuestionsZipReader {
zipfile,
index: None,
}
}
}
impl<R> Iterator for SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
type Item = (String, Result<SourceQuestionsBatch, serde_json::Error>);
fn next(&mut self) -> Option<Self::Item> {
if self.index.is_none() && !self.zipfile.is_empty() {
self.index = Some(0);
}
match self.index {
Some(i) if i < self.zipfile.len() => {
self.index = Some(i + 1);
self.nth(i)
}
_ => None,
}
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
if self.zipfile.len() <= n {
return None;
}
self.index = Some(n + 1);
let file = self.zipfile.by_index(n).unwrap();
let name = file.mangled_name();
let name_str = name.to_str().unwrap();
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
Some((String::from(name_str), data))
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.zipfile.len();
let index = self.index.unwrap_or(0);
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
fn count(self) -> usize
where
Self: Sized,
{
self.zipfile.len()
}
}
impl<R> ExactSizeIterator for SourceQuestionsZipReader<R>
where
R: Read + Seek,
{
fn len(&self) -> usize {
self.zipfile.len()
}
}
pub trait ReadSourceQuestionsBatches<R>
where
R: Read + Seek,
{
fn source_questions(self) -> SourceQuestionsZipReader<R>;
}
impl<R> ReadSourceQuestionsBatches<R> for ZipArchive<R>
where
R: Read + Seek,
{
fn source_questions(self) -> SourceQuestionsZipReader<R> {
SourceQuestionsZipReader::new(self)
}
}