Compare commits
49 Commits
Author | SHA1 | Date | |
---|---|---|---|
f5bd72b298
|
|||
dda50e7d2b
|
|||
693f349ae3
|
|||
7a2e58b1b9
|
|||
9570b1d6e2
|
|||
5355d0398d
|
|||
b63e9aa45c
|
|||
8120a996a3
|
|||
3a26a4aa7f
|
|||
103b677d21
|
|||
e18539a982
|
|||
249ac3a4ef
|
|||
e940f1c37c
|
|||
f3dabe7a06
|
|||
e521e39f5e | |||
cf591198a0 | |||
be6b17a8e2 | |||
6a3b3647b3 | |||
7efd03d624 | |||
a05edad5f7 | |||
5f4fc35b71 | |||
78b319e41a | |||
38cee92d5f | |||
3f4a144431 | |||
bef08e6166 | |||
5ed8c12f91
|
|||
2acb324da9
|
|||
dbf9e8cdac
|
|||
f275069f23
|
|||
17446a6318
|
|||
7c27a20ebd | |||
f361a35b65 | |||
efec662700 | |||
a5ca0c65a7 | |||
c28508b147 | |||
f51f2be18f | |||
3b24a1be2f | |||
35b8aaadc7 | |||
76f794f167 | |||
8ad88490e7 | |||
73e8b34af4 | |||
9316bbbf5c
|
|||
7ee0b62ed7
|
|||
16784a3319
|
|||
42fc486872
|
|||
5a4086cfdf | |||
2996aefa7b
|
|||
f7f713ade8
|
|||
7389290d80 |
37
.drone.yml
Normal file
37
.drone.yml
Normal file
@@ -0,0 +1,37 @@
|
||||
kind: pipeline
|
||||
name: default
|
||||
|
||||
steps:
|
||||
- name: test
|
||||
image: rust:1-alpine
|
||||
commands:
|
||||
- apk add --no-cache musl-dev
|
||||
- cargo build --verbose --all
|
||||
- cargo test --verbose --all
|
||||
environment:
|
||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
|
||||
|
||||
trigger:
|
||||
event:
|
||||
- push
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: publish
|
||||
|
||||
steps:
|
||||
- name: push
|
||||
image: rust:1-alpine
|
||||
commands:
|
||||
- apk add --no-cache musl-dev
|
||||
- cargo build -p chgk_ledb_lib
|
||||
- cargo publish --registry gitea -p chgk_ledb_lib
|
||||
environment:
|
||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
|
||||
CARGO_REGISTRIES_GITEA_INDEX: https://gitea.b4tman.ru/b4tman/_cargo-index.git
|
||||
CARGO_REGISTRIES_GITEA_TOKEN:
|
||||
from_secret: cargo_gitea_token
|
||||
|
||||
trigger:
|
||||
event:
|
||||
- tag
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -4,4 +4,6 @@
|
||||
test?.zip
|
||||
json.zip
|
||||
/exp
|
||||
/.vscode
|
||||
/.vscode
|
||||
test*.bin
|
||||
db.dat
|
||||
|
832
Cargo.lock
generated
832
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
23
Cargo.toml
23
Cargo.toml
@@ -1,21 +1,8 @@
|
||||
[package]
|
||||
name = "chgk_ledb"
|
||||
version = "0.1.0"
|
||||
authors = ["Dmitry <b4tm4n@mail.ru>"]
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
serde="1.0"
|
||||
serde_derive="1.0"
|
||||
serde_json="1.0"
|
||||
ledb="0.4"
|
||||
ledb-derive="0.4"
|
||||
ledb-types="0.4"
|
||||
zip="0.6"
|
||||
rand="0.8"
|
||||
clap = { version = "3.2.22", features = ["derive"] }
|
||||
[workspace]
|
||||
members = [
|
||||
"app",
|
||||
"lib"
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
|
54
README.md
54
README.md
@@ -4,12 +4,58 @@
|
||||
|
||||
Исходный файл вопросов: `json.zip`, кодировка `UTF-8`.
|
||||
|
||||
Выходная база: файл `./db/data.mdb`, формат базы [LMDB](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database).
|
||||
|
||||
Работа с базой выполняется с помощью [ledb](https://crates.io/crates/ledb).
|
||||
|
||||
При загрузке базы информация о пакете(файле/турнире) дублируется в каждом вопросе.
|
||||
|
||||
## Выходной формат данных
|
||||
|
||||
Для хранения данных используется [bincode](https://crates.io/crates/bincode) и [zstd](https://crates.io/crates/zstd).
|
||||
Данные вопросов в виде структуры сериализуются через `bincode` в бинарные данные и сжимаются `zstd`. Каждый вопрос сериализуется и сжимается отдельно.
|
||||
В файле сжатые данные храняться последовательно, после заголовка файла.
|
||||
|
||||
### Заголовок файла
|
||||
|
||||
В заголовке хранятся только смещения каждого сжатого блока данных, и дополнительно, смещение указывающее на конец файла.
|
||||
Смещение указывается в виде 32-битного беззнакового целового числа `u32`, сохраненного в виде 4-х байтов от младшего к старшему ([Little Endian](https://ru.wikipedia.org/wiki/Порядок_байтов#Порядок_от_младшего_к_старшему)).
|
||||
|
||||
Пример заголовка:
|
||||
|
||||
~~~
|
||||
00: 10 00 00 00
|
||||
04: 1A 00 00 00
|
||||
08: 2E 00 00 00
|
||||
0А: 3A 00 00 00
|
||||
~~~
|
||||
|
||||
данные:
|
||||
|
||||
~~~
|
||||
10: 00 00 00 00 00 00 00 00 00 00
|
||||
1A: 00 00 00 00 00 00 00 00 00 00
|
||||
00 00 00 00 00 00 00 00 00 00
|
||||
2E: 00 00 00 00 00 00 00 00 00 00
|
||||
00 00
|
||||
3A: (EOF)
|
||||
~~~
|
||||
|
||||
В этом примере сохранены 3 записи:
|
||||
|
||||
1. смещение **0x10**, длина **10** байт
|
||||
2. смещение **0x1A**, длина **20** байт
|
||||
3. смещение **0x2E**, длина **12** байт
|
||||
|
||||
Размер файла - 58 байт (0x3A), размер заголовка 16 байт (0x10).
|
||||
|
||||
### Чтение данных
|
||||
|
||||
Пусть размер записи заголовка в байтах = **`M`**, количество записей в файле = **`N`**.
|
||||
|
||||
Тогда для того чтобы:
|
||||
|
||||
- Найти **`N`**, нужно прочитать первую запись (**`M`** байт) в начале файла и разделить её значение на **`M`**;
|
||||
- Найти элемент данных с индексом **`i`**, нужно последовательно прочитать 2 записи (**`M * 2`** байт), начиная c индекса **`i`** (по смещению в файле: **`i * M`**). Смещением будет значение по индексу **`i`**, длинной - разница между значениями по индексам **`i + 1`** и **`i`**.
|
||||
|
||||
Далее для каждого вопроса отдельно предполагается распаковка данных через `zstd` и десериализация через `bincode`.
|
||||
|
||||
## Ссылки
|
||||
|
||||
- Источник вопросов: http://db.chgk.info
|
||||
|
28
app/Cargo.toml
Normal file
28
app/Cargo.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[package]
|
||||
name = "chgk_ledb"
|
||||
version = "1.1.0"
|
||||
authors = ["Dmitry <b4tm4n@mail.ru>"]
|
||||
edition = "2021"
|
||||
repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb"
|
||||
license = "MIT"
|
||||
description = "Утилита загружающая базу данных ЧГК вопросов из ZIP файла в JSON формате в базу данных."
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[[bench]]
|
||||
name = "db_bench"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
chgk_ledb_lib = {path = "../lib"}
|
||||
serde_json="1.0"
|
||||
zip="0.6"
|
||||
rand="0.8"
|
||||
clap = { version = "3.2.22", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = "0.4.0"
|
||||
tempfile = "3.3"
|
||||
bincode = "^2.0.0-rc.2"
|
||||
serde="1.0"
|
||||
serde_derive="1.0"
|
110
app/benches/db_bench.rs
Normal file
110
app/benches/db_bench.rs
Normal file
@@ -0,0 +1,110 @@
|
||||
#[macro_use]
|
||||
extern crate criterion;
|
||||
extern crate bincode;
|
||||
extern crate serde;
|
||||
extern crate serde_derive;
|
||||
extern crate serde_json;
|
||||
extern crate tempfile;
|
||||
|
||||
use chgk_ledb_lib::db;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use db::{Reader, Writer, WriterOpts};
|
||||
|
||||
use criterion::{BatchSize, Criterion};
|
||||
use tempfile::tempdir;
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
bincode::Encode,
|
||||
bincode::Decode,
|
||||
Clone,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
)]
|
||||
struct TestData {
|
||||
num1: u64,
|
||||
num2: u64,
|
||||
test: String,
|
||||
}
|
||||
|
||||
const N: usize = 4096;
|
||||
|
||||
fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
|
||||
(0..count)
|
||||
.into_iter()
|
||||
.map(|i| 143 + i as u64)
|
||||
.map(|i| TestData {
|
||||
num1: i,
|
||||
num2: i * 100 ^ 0xDF0E441122334455,
|
||||
test: "test ---- Test ____".repeat(123 + i as usize % 15),
|
||||
})
|
||||
}
|
||||
|
||||
fn prepare_db_writer(path: &PathBuf) -> Writer<TestData> {
|
||||
let opts = WriterOpts {
|
||||
compress_lvl: 1,
|
||||
data_buf_size: 100 * 1024 * 1024,
|
||||
out_buf_size: 100 * 1024 * 1024,
|
||||
current_buf_size: 10240,
|
||||
};
|
||||
|
||||
Writer::new(path, opts).expect("new writer")
|
||||
}
|
||||
|
||||
fn db_read(c: &mut Criterion) {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let tmpfile = dir.path().join("test.tmp");
|
||||
let mut writer = prepare_db_writer(&tmpfile);
|
||||
|
||||
let mut items_iter = gen_data(N).collect::<Vec<TestData>>().into_iter();
|
||||
writer.load(&mut items_iter).unwrap();
|
||||
writer.finish().unwrap();
|
||||
|
||||
c.bench_function("read", |b| {
|
||||
b.iter_batched(
|
||||
|| {
|
||||
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
|
||||
reader
|
||||
},
|
||||
|reader| {
|
||||
for item in reader {
|
||||
drop(item);
|
||||
}
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
fn db_write(c: &mut Criterion) {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let tmpfile = dir.path().join("test.tmp");
|
||||
c.bench_function("write", |b| {
|
||||
b.iter_batched(
|
||||
|| {
|
||||
let src = gen_data(N).collect::<Vec<TestData>>().into_iter();
|
||||
let writer = prepare_db_writer(&tmpfile);
|
||||
(src, writer)
|
||||
},
|
||||
|(mut src, mut writer)| {
|
||||
writer.load(&mut src).unwrap();
|
||||
writer.finish().unwrap();
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
fn config() -> Criterion {
|
||||
Criterion::default().sample_size(40)
|
||||
}
|
||||
|
||||
criterion_group! {name=benches; config = config(); targets = db_read, db_write}
|
||||
criterion_main!(benches);
|
166
app/src/main.rs
Normal file
166
app/src/main.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
extern crate serde_json;
|
||||
use clap::{Parser, Subcommand};
|
||||
use rand::seq::IteratorRandom;
|
||||
|
||||
use std::io;
|
||||
use std::time::Instant;
|
||||
use std::{fs, sync::mpsc, thread};
|
||||
|
||||
use chgk_ledb_lib::db;
|
||||
use chgk_ledb_lib::questions;
|
||||
use chgk_ledb_lib::source;
|
||||
|
||||
use crate::questions::{Question, QuestionsConverter};
|
||||
use crate::source::ReadSourceQuestionsBatches;
|
||||
|
||||
const ZIP_FILENAME: &str = "json.zip";
|
||||
const NEW_DB_FILENAME: &str = "db.dat";
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
enum Command {
|
||||
Write,
|
||||
Print {
|
||||
#[clap(value_parser, default_value = "0")]
|
||||
id: u32,
|
||||
},
|
||||
ZipPrint {
|
||||
#[clap(value_parser, default_value = "0")]
|
||||
file_num: usize,
|
||||
#[clap(value_parser, default_value = "0")]
|
||||
num: usize,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(author, version, about, long_about = None)]
|
||||
#[clap(propagate_version = true)]
|
||||
struct Cli {
|
||||
#[clap(subcommand)]
|
||||
command: Command,
|
||||
#[clap(short, long, action)]
|
||||
measure: bool,
|
||||
}
|
||||
|
||||
fn zip_reader_task(tx: mpsc::Sender<Question>) {
|
||||
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
|
||||
let zip_reader = io::BufReader::new(zip_file);
|
||||
let archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
let mut source_questions = archive.source_questions();
|
||||
|
||||
let questions = source_questions
|
||||
.convert()
|
||||
.enumerate()
|
||||
.map(|(num, mut question)| {
|
||||
question.num = 1 + num as u32;
|
||||
question
|
||||
});
|
||||
for question in questions {
|
||||
let res = tx.send(question);
|
||||
if res.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
println!("read done");
|
||||
}
|
||||
|
||||
fn print_question_from<F>(get_q: F)
|
||||
where
|
||||
F: FnOnce() -> Option<Question>,
|
||||
{
|
||||
let q = get_q().expect("question not found");
|
||||
println!("{:#?}", q)
|
||||
}
|
||||
|
||||
fn read_from_zip(file_num: usize, mut num: usize) -> Option<Question> {
|
||||
let mut rng = rand::thread_rng();
|
||||
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
|
||||
let zip_reader = io::BufReader::new(zip_file);
|
||||
let archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
|
||||
let mut source_questions = archive.source_questions();
|
||||
let (filename, batch) = if file_num == 0 {
|
||||
source_questions.choose(&mut rng).unwrap()
|
||||
} else {
|
||||
source_questions.nth(file_num - 1).unwrap()
|
||||
};
|
||||
let mut batch = batch.unwrap();
|
||||
batch.filename = filename;
|
||||
let questions: Vec<Question> = batch.into();
|
||||
if num == 0 {
|
||||
num = (1..=questions.len()).choose(&mut rng).unwrap();
|
||||
}
|
||||
Some(questions[num - 1].clone())
|
||||
}
|
||||
|
||||
// measure and return time elapsed in `func` in seconds
|
||||
pub fn measure<F: FnOnce()>(func: F) -> f64 {
|
||||
let start = Instant::now();
|
||||
func();
|
||||
let elapsed = start.elapsed();
|
||||
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
|
||||
}
|
||||
|
||||
pub fn measure_and_print<F: FnOnce()>(func: F) {
|
||||
let m = measure(func);
|
||||
eprintln!("{}", m);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args = Cli::parse();
|
||||
|
||||
let mut action: Box<dyn FnOnce()> = match &args.command {
|
||||
Command::Write => Box::new(write_db),
|
||||
Command::Print { id } => {
|
||||
let get_question = Box::new(|| read_from_db(*id));
|
||||
Box::new(|| print_question_from(get_question))
|
||||
}
|
||||
Command::ZipPrint { file_num, num } => {
|
||||
let get_question = Box::new(|| read_from_zip(*file_num, *num));
|
||||
Box::new(|| print_question_from(get_question))
|
||||
}
|
||||
};
|
||||
|
||||
if args.measure {
|
||||
action = Box::new(|| measure_and_print(action));
|
||||
}
|
||||
|
||||
action();
|
||||
}
|
||||
|
||||
fn read_from_db(id: u32) -> Option<Question> {
|
||||
let reader: db::Reader<Question> =
|
||||
db::Reader::new(NEW_DB_FILENAME, 2048).expect("new db reader");
|
||||
|
||||
let mut questions = reader.into_iter();
|
||||
|
||||
match id {
|
||||
0 => {
|
||||
let mut rng = rand::thread_rng();
|
||||
questions.choose(&mut rng)
|
||||
}
|
||||
_ => questions.nth((id - 1) as usize),
|
||||
}
|
||||
}
|
||||
fn write_db() {
|
||||
let (tx, rx) = mpsc::channel::<Question>();
|
||||
[
|
||||
thread::spawn(move || zip_reader_task(tx)),
|
||||
thread::spawn(move || db_writer_task(rx)),
|
||||
]
|
||||
.into_iter()
|
||||
.for_each(|handle| handle.join().expect("thread panic"));
|
||||
println!("all done");
|
||||
}
|
||||
fn db_writer_task(rx: mpsc::Receiver<Question>) {
|
||||
let writer_opts = db::WriterOpts::default();
|
||||
let mut writer: db::Writer<Question> =
|
||||
db::Writer::new(NEW_DB_FILENAME, writer_opts).expect("new db writer");
|
||||
|
||||
writer
|
||||
.load(&mut rx.iter())
|
||||
.unwrap_or_else(|e| panic!("db writer load, {e:#?}"));
|
||||
|
||||
writer.finish().expect("db writer finish");
|
||||
|
||||
println!("write done");
|
||||
}
|
23
bench.txt
Normal file
23
bench.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
866 MB db/data.mdb
|
||||
232 MB test.bin
|
||||
95 MB json.zip
|
||||
---
|
||||
|
||||
hyperfine -n print -n print2 -n zip-print -w 400 -m 400 ".\target\release\chgk_ledb.exe print 444" ".\target\release\chgk_ledb.exe print2 444" ".\target\release\chgk_ledb.exe zip-print 4 84"
|
||||
|
||||
Benchmark 1: print
|
||||
Time (mean ± σ): 19.0 ms ± 1.5 ms [User: 5.6 ms, System: 13.1 ms]
|
||||
Range (min … max): 16.8 ms … 24.5 ms 400 runs
|
||||
|
||||
Benchmark 2: print2
|
||||
Time (mean ± σ): 18.6 ms ± 1.6 ms [User: 5.5 ms, System: 12.6 ms]
|
||||
Range (min … max): 16.1 ms … 29.5 ms 400 runs
|
||||
|
||||
Benchmark 3: zip-print
|
||||
Time (mean ± σ): 40.8 ms ± 3.3 ms [User: 15.4 ms, System: 21.6 ms]
|
||||
Range (min … max): 36.5 ms … 67.5 ms 400 runs
|
||||
|
||||
Summary
|
||||
'print2' ran
|
||||
1.02 ± 0.12 times faster than 'print'
|
||||
2.20 ± 0.26 times faster than 'zip-print'
|
22
lib/Cargo.toml
Normal file
22
lib/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "chgk_ledb_lib"
|
||||
version = "1.1.0"
|
||||
authors = ["Dmitry <b4tm4n@mail.ru>"]
|
||||
edition = "2021"
|
||||
repository = "https://gitea.b4tman.ru/b4tman/chgk_ledb"
|
||||
license = "MIT"
|
||||
description = "Библиотека для доступа к файлу базы данных вопросов ЧГК"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
serde="1.0"
|
||||
serde_derive="1.0"
|
||||
serde_json="1.0"
|
||||
zip="0.6"
|
||||
bincode = "^2.0.0-rc.2"
|
||||
zstd = "^0.10"
|
||||
memmap = "0.7.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.3"
|
542
lib/src/db.rs
Normal file
542
lib/src/db.rs
Normal file
@@ -0,0 +1,542 @@
|
||||
use std::{
|
||||
fs,
|
||||
io::{self, Cursor, Read, Write},
|
||||
marker::PhantomData,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use memmap::{Mmap, MmapOptions};
|
||||
|
||||
type LSize = u32;
|
||||
const LEN_SIZE: usize = std::mem::size_of::<LSize>();
|
||||
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
|
||||
|
||||
trait ErrorToString {
|
||||
type Output;
|
||||
fn str_err(self) -> std::result::Result<Self::Output, String>;
|
||||
}
|
||||
|
||||
impl<T, E> ErrorToString for std::result::Result<T, E>
|
||||
where
|
||||
E: std::error::Error,
|
||||
{
|
||||
type Output = T;
|
||||
fn str_err(self) -> std::result::Result<Self::Output, String> {
|
||||
self.map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WriterOpts {
|
||||
pub compress_lvl: i32,
|
||||
pub data_buf_size: usize,
|
||||
pub out_buf_size: usize,
|
||||
pub current_buf_size: usize,
|
||||
}
|
||||
|
||||
impl Default for WriterOpts {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
compress_lvl: 1,
|
||||
data_buf_size: 500 * 1024 * 1024,
|
||||
out_buf_size: 200 * 1024 * 1024,
|
||||
current_buf_size: 100 * 1024,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Writer<T>
|
||||
where
|
||||
T: bincode::Encode,
|
||||
{
|
||||
out: io::BufWriter<fs::File>,
|
||||
data_buf: Cursor<Vec<u8>>,
|
||||
cur_buf_raw: Cursor<Vec<u8>>,
|
||||
table: Vec<LSize>,
|
||||
compress_lvl: i32,
|
||||
_t: PhantomData<*const T>,
|
||||
}
|
||||
|
||||
impl<T> Writer<T>
|
||||
where
|
||||
T: bincode::Encode,
|
||||
{
|
||||
pub fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
|
||||
let out = fs::File::create(path).str_err()?;
|
||||
let out = io::BufWriter::with_capacity(opts.out_buf_size, out);
|
||||
let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size);
|
||||
let data_buf = Cursor::new(data_buf);
|
||||
|
||||
let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
|
||||
let cur_buf_raw = Cursor::new(cur_buf_raw);
|
||||
|
||||
let compress_lvl = opts.compress_lvl;
|
||||
|
||||
let table: Vec<LSize> = vec![];
|
||||
|
||||
Ok(Self {
|
||||
out,
|
||||
data_buf,
|
||||
cur_buf_raw,
|
||||
table,
|
||||
compress_lvl,
|
||||
_t: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn push(&mut self, item: T) -> Result<(), String> {
|
||||
let pos: LSize = self.data_buf.position() as LSize;
|
||||
|
||||
let item_data = bincode::encode_to_vec(item, BINCODE_CFG).str_err()?;
|
||||
|
||||
let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?;
|
||||
zencoder
|
||||
.set_pledged_src_size(item_data.len() as u64)
|
||||
.str_err()?;
|
||||
|
||||
self.cur_buf_raw.set_position(0);
|
||||
let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder);
|
||||
cur_buf_z.write_all(&item_data).str_err()?;
|
||||
cur_buf_z.finish().str_err()?;
|
||||
cur_buf_z.flush().str_err()?;
|
||||
|
||||
self.table.push(pos);
|
||||
let (cur_buf_raw, _) = cur_buf_z.into_inner();
|
||||
let size = cur_buf_raw.position();
|
||||
|
||||
cur_buf_raw.set_position(0);
|
||||
let mut chunk = cur_buf_raw.take(size);
|
||||
io::copy(&mut chunk, &mut self.data_buf).str_err()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load<I>(&mut self, iter: &mut I) -> Result<(), String>
|
||||
where
|
||||
I: Iterator<Item = T>,
|
||||
{
|
||||
let hint = iter.size_hint();
|
||||
let hint = std::cmp::max(hint.0, hint.1.unwrap_or(0));
|
||||
if hint > 0 {
|
||||
self.table.reserve(hint);
|
||||
}
|
||||
|
||||
for item in iter {
|
||||
self.push(item)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn finish(mut self) -> Result<(), String> {
|
||||
// finish tab
|
||||
let pos: LSize = self.data_buf.position() as LSize;
|
||||
self.table.push(pos);
|
||||
|
||||
// write tab
|
||||
let tab_size = (self.table.len() * LEN_SIZE) as LSize;
|
||||
for pos in self.table {
|
||||
let pos_data = (pos + tab_size).to_le_bytes();
|
||||
self.out.write_all(&pos_data).str_err()?;
|
||||
}
|
||||
|
||||
// copy data
|
||||
let data_size = self.data_buf.position();
|
||||
self.data_buf.set_position(0);
|
||||
let mut data = self.data_buf.take(data_size);
|
||||
io::copy(&mut data, &mut self.out).str_err()?;
|
||||
|
||||
self.out.flush().str_err()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Reader<T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
mmap: Mmap,
|
||||
count: usize,
|
||||
first_pos: LSize,
|
||||
_t: Option<Arc<T>>, // PhantomData replacement
|
||||
}
|
||||
|
||||
impl<T> Reader<T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
pub fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> {
|
||||
let file = fs::File::open(path).str_err()?;
|
||||
let mmap = unsafe { MmapOptions::new().map(&file).str_err()? };
|
||||
|
||||
// read first pos and records count
|
||||
let first_data: [u8; LEN_SIZE] = mmap[0..LEN_SIZE].try_into().str_err()?;
|
||||
let first_pos = LSize::from_le_bytes(first_data);
|
||||
let tab_len = (first_pos as usize) / LEN_SIZE;
|
||||
let count = tab_len - 1;
|
||||
|
||||
Ok(Self {
|
||||
mmap,
|
||||
count,
|
||||
first_pos,
|
||||
_t: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.count
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
0 == self.len()
|
||||
}
|
||||
|
||||
pub fn get(&self, index: usize) -> Result<T, String> {
|
||||
if index >= self.len() {
|
||||
return Err("index out of range".into());
|
||||
}
|
||||
|
||||
let next_pos: usize = (index + 1) * LEN_SIZE;
|
||||
let next_end: usize = next_pos + LEN_SIZE;
|
||||
|
||||
// read item data pos
|
||||
let data_pos = if 0 == index {
|
||||
self.first_pos
|
||||
} else {
|
||||
let tab_pos: usize = index * LEN_SIZE;
|
||||
let pos_curr_data: [u8; LEN_SIZE] =
|
||||
self.mmap[tab_pos..next_pos].try_into().str_err()?;
|
||||
LSize::from_le_bytes(pos_curr_data)
|
||||
} as usize;
|
||||
|
||||
// read next item pos
|
||||
let pos_next_data: [u8; LEN_SIZE] = self.mmap[next_pos..next_end].try_into().str_err()?;
|
||||
let data_pos_next = LSize::from_le_bytes(pos_next_data) as usize;
|
||||
|
||||
// read & unpack item data
|
||||
let reader = io::Cursor::new(self.mmap[data_pos..data_pos_next].as_ref());
|
||||
let data = zstd::decode_all(reader).str_err()?;
|
||||
|
||||
// decode item
|
||||
let item: (T, usize) = bincode::decode_from_slice(&data, BINCODE_CFG).str_err()?;
|
||||
|
||||
Ok(item.0)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> ReaderIter<'_, T> {
|
||||
ReaderIter::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ReaderIter<'a, T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
reader: &'a Reader<T>,
|
||||
index: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a, T> ReaderIter<'a, T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
fn new(reader: &'a Reader<T>) -> Self {
|
||||
ReaderIter {
|
||||
reader,
|
||||
index: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Iterator for ReaderIter<'a, T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index.is_none() && !self.reader.is_empty() {
|
||||
self.index = Some(0);
|
||||
}
|
||||
|
||||
match self.index {
|
||||
Some(i) if i < self.reader.len() => self.nth(i),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
if self.reader.len() <= n {
|
||||
return None;
|
||||
}
|
||||
self.index = Some(n + 1);
|
||||
|
||||
let item = self.reader.get(n);
|
||||
match item {
|
||||
Ok(item) => Some(item),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.reader.len();
|
||||
if self.index.is_none() {
|
||||
return (len, Some(len));
|
||||
}
|
||||
|
||||
let index = self.index.unwrap();
|
||||
let rem = if len > index + 1 {
|
||||
len - (index + 1)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
(rem, Some(rem))
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.reader.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> ExactSizeIterator for ReaderIter<'a, T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
fn len(&self) -> usize {
|
||||
self.reader.len()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ReaderIntoIter<T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
reader: Reader<T>,
|
||||
index: Option<usize>,
|
||||
}
|
||||
|
||||
impl<T> ReaderIntoIter<T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
fn new(reader: Reader<T>) -> Self {
|
||||
Self {
|
||||
reader,
|
||||
index: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Iterator for ReaderIntoIter<T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index.is_none() && !self.reader.is_empty() {
|
||||
self.index = Some(0);
|
||||
}
|
||||
|
||||
match self.index {
|
||||
Some(i) if i < self.reader.len() => self.nth(i),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
if self.reader.len() <= n {
|
||||
return None;
|
||||
}
|
||||
self.index = Some(n + 1);
|
||||
|
||||
let item = self.reader.get(n);
|
||||
match item {
|
||||
Ok(item) => Some(item),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.reader.len();
|
||||
if self.index.is_none() {
|
||||
return (len, Some(len));
|
||||
}
|
||||
|
||||
let index = self.index.unwrap();
|
||||
let rem = if len > index + 1 {
|
||||
len - (index + 1)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
(rem, Some(rem))
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.reader.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ExactSizeIterator for ReaderIntoIter<T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
fn len(&self) -> usize {
|
||||
self.reader.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for Reader<T>
|
||||
where
|
||||
T: bincode::Decode,
|
||||
{
|
||||
type Item = T;
|
||||
type IntoIter = ReaderIntoIter<Self::Item>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Self::IntoIter::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct TestData {
|
||||
num: u64,
|
||||
test: String,
|
||||
}
|
||||
|
||||
fn gen_data(count: usize) -> impl Iterator<Item = TestData> {
|
||||
(0..count).into_iter().map(|i| TestData {
|
||||
num: i as u64,
|
||||
test: "test".repeat(i),
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_read() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let tmpfile = dir.path().join("test.tmp");
|
||||
let opts = WriterOpts {
|
||||
compress_lvl: 1,
|
||||
data_buf_size: 10 * 1024 * 1024,
|
||||
out_buf_size: 10 * 1024 * 1024,
|
||||
current_buf_size: 4096,
|
||||
};
|
||||
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
|
||||
|
||||
let items_iter = gen_data(5);
|
||||
let items: Vec<TestData> = items_iter.collect();
|
||||
|
||||
writer.load(&mut items.clone().into_iter()).expect("load");
|
||||
writer.finish().expect("finish write");
|
||||
|
||||
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
|
||||
assert_eq!(items.len(), reader.len());
|
||||
|
||||
for (idx, item) in items.iter().enumerate() {
|
||||
let ritem = reader.get(idx).expect("get");
|
||||
assert_eq!(*item, ritem);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_read_iter() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let tmpfile = dir.path().join("test.tmp");
|
||||
let opts = WriterOpts {
|
||||
compress_lvl: 1,
|
||||
data_buf_size: 10 * 1024 * 1024,
|
||||
out_buf_size: 10 * 1024 * 1024,
|
||||
current_buf_size: 4096,
|
||||
};
|
||||
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
|
||||
|
||||
let items_iter = gen_data(10);
|
||||
let items: Vec<TestData> = items_iter.collect();
|
||||
|
||||
writer.load(&mut items.clone().into_iter()).expect("load");
|
||||
writer.finish().expect("finish write");
|
||||
|
||||
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
|
||||
assert_eq!(items.len(), reader.len());
|
||||
|
||||
items.into_iter().zip(reader.iter()).for_each(|pair| {
|
||||
assert_eq!(pair.0, pair.1);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_read_into_iter() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let tmpfile = dir.path().join("test.tmp");
|
||||
let opts = WriterOpts {
|
||||
compress_lvl: 1,
|
||||
data_buf_size: 10 * 1024 * 1024,
|
||||
out_buf_size: 10 * 1024 * 1024,
|
||||
current_buf_size: 4096,
|
||||
};
|
||||
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
|
||||
|
||||
let items_iter = gen_data(10);
|
||||
let items: Vec<TestData> = items_iter.collect();
|
||||
|
||||
writer.load(&mut items.clone().into_iter()).expect("load");
|
||||
writer.finish().expect("finish write");
|
||||
|
||||
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
|
||||
assert_eq!(items.len(), reader.len());
|
||||
|
||||
items.into_iter().zip(reader).for_each(|pair| {
|
||||
assert_eq!(pair.0, pair.1);
|
||||
});
|
||||
}
|
||||
|
||||
/// sharing Reader instance between threads
|
||||
#[test]
|
||||
fn test_share_reader() {
|
||||
use std::thread;
|
||||
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let tmpfile = dir.path().join("test.tmp");
|
||||
let opts = WriterOpts {
|
||||
compress_lvl: 1,
|
||||
data_buf_size: 10 * 1024 * 1024,
|
||||
out_buf_size: 10 * 1024 * 1024,
|
||||
current_buf_size: 4096,
|
||||
};
|
||||
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
|
||||
|
||||
let items_iter = gen_data(10);
|
||||
let items: Vec<TestData> = items_iter.collect();
|
||||
|
||||
writer.load(&mut items.clone().into_iter()).expect("load");
|
||||
writer.finish().expect("finish write");
|
||||
|
||||
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).expect("new reader");
|
||||
assert_eq!(items.len(), reader.len());
|
||||
|
||||
let reader = Arc::new(reader);
|
||||
for _ in 0..=3 {
|
||||
let cur_items = items.clone();
|
||||
let cur_reader = Arc::clone(&reader);
|
||||
thread::spawn(move || {
|
||||
cur_items
|
||||
.into_iter()
|
||||
.zip(cur_reader.iter())
|
||||
.for_each(|pair| {
|
||||
assert_eq!(pair.0, pair.1);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
3
lib/src/lib.rs
Normal file
3
lib/src/lib.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod db;
|
||||
pub mod questions;
|
||||
pub mod source;
|
136
lib/src/questions.rs
Normal file
136
lib/src/questions.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
use crate::source::{SourceQuestion, SourceQuestionsBatch};
|
||||
|
||||
macro_rules! make {
|
||||
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
|
||||
$field: $src.$field
|
||||
),+}};
|
||||
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
|
||||
$field: $src.$field
|
||||
),+ ,..$Target::default()}}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode)]
|
||||
pub struct BatchInfo {
|
||||
#[serde(default)]
|
||||
pub filename: String,
|
||||
#[serde(default)]
|
||||
pub description: String,
|
||||
#[serde(default)]
|
||||
pub author: String,
|
||||
#[serde(default)]
|
||||
pub comment: String,
|
||||
#[serde(default)]
|
||||
pub url: String,
|
||||
#[serde(default)]
|
||||
pub date: String,
|
||||
#[serde(default)]
|
||||
pub processed_by: String,
|
||||
#[serde(default)]
|
||||
pub redacted_by: String,
|
||||
#[serde(default)]
|
||||
pub copyright: String,
|
||||
#[serde(default)]
|
||||
pub theme: String,
|
||||
#[serde(default)]
|
||||
pub kind: String,
|
||||
#[serde(default)]
|
||||
pub source: String,
|
||||
#[serde(default)]
|
||||
pub rating: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode)]
|
||||
pub struct Question {
|
||||
#[serde(default)]
|
||||
pub num: u32,
|
||||
pub id: String,
|
||||
|
||||
pub description: String,
|
||||
pub answer: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub author: String,
|
||||
#[serde(default)]
|
||||
pub comment: String,
|
||||
#[serde(default)]
|
||||
pub comment1: String,
|
||||
#[serde(default)]
|
||||
pub tour: String,
|
||||
#[serde(default)]
|
||||
pub url: String,
|
||||
#[serde(default)]
|
||||
pub date: String,
|
||||
#[serde(default)]
|
||||
pub processed_by: String,
|
||||
#[serde(default)]
|
||||
pub redacted_by: String,
|
||||
#[serde(default)]
|
||||
pub copyright: String,
|
||||
#[serde(default)]
|
||||
pub theme: String,
|
||||
#[serde(default)]
|
||||
pub kind: String,
|
||||
#[serde(default)]
|
||||
pub source: String,
|
||||
#[serde(default)]
|
||||
pub rating: String,
|
||||
#[serde(default)]
|
||||
pub batch_info: BatchInfo,
|
||||
}
|
||||
|
||||
impl From<SourceQuestion> for Question {
|
||||
fn from(src: SourceQuestion) -> Self {
|
||||
make! {Self; with defaults and by {
|
||||
num, id, description, answer, author, comment, comment1, tour, url,
|
||||
date, processed_by, redacted_by, copyright, theme, kind, source, rating
|
||||
}; from src}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SourceQuestionsBatch> for BatchInfo {
|
||||
fn from(src: SourceQuestionsBatch) -> Self {
|
||||
make! {Self; by {
|
||||
filename, description, author, comment, url, date,
|
||||
processed_by, redacted_by, copyright, theme, kind, source, rating
|
||||
}; from src}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SourceQuestionsBatch> for Vec<Question> {
|
||||
fn from(src: SourceQuestionsBatch) -> Self {
|
||||
let mut result: Vec<Question> = src
|
||||
.questions
|
||||
.iter()
|
||||
.map(|item| item.clone().into())
|
||||
.collect();
|
||||
let batch_info = BatchInfo::from(src);
|
||||
result.iter_mut().for_each(|mut question| {
|
||||
question.batch_info = batch_info.clone();
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
pub trait QuestionsConverter {
|
||||
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a>;
|
||||
}
|
||||
|
||||
impl<T> QuestionsConverter for T
|
||||
where
|
||||
T: Iterator<Item = (String, Result<SourceQuestionsBatch, serde_json::Error>)>,
|
||||
{
|
||||
fn convert<'a>(&'a mut self) -> Box<dyn Iterator<Item = Question> + 'a> {
|
||||
let iter = self
|
||||
.filter(|(_, data)| data.is_ok())
|
||||
.flat_map(|(filename, data)| {
|
||||
let mut batch = data.unwrap();
|
||||
batch.filename = filename;
|
||||
let questions: Vec<Question> = batch.into();
|
||||
questions
|
||||
});
|
||||
Box::new(iter)
|
||||
}
|
||||
}
|
206
lib/src/source.rs
Normal file
206
lib/src/source.rs
Normal file
@@ -0,0 +1,206 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::io::{Read, Seek};
|
||||
use zip::ZipArchive;
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
pub struct SourceQuestion {
|
||||
#[serde(default)]
|
||||
pub num: u32,
|
||||
pub id: String,
|
||||
|
||||
#[serde(alias = "Вопрос")]
|
||||
pub description: String,
|
||||
#[serde(alias = "Ответ")]
|
||||
pub answer: String,
|
||||
|
||||
#[serde(alias = "Автор")]
|
||||
#[serde(default)]
|
||||
pub author: String,
|
||||
#[serde(alias = "Комментарий")]
|
||||
#[serde(default)]
|
||||
pub comment: String,
|
||||
#[serde(alias = "Комментарии")]
|
||||
#[serde(alias = "Инфо")]
|
||||
#[serde(default)]
|
||||
pub comment1: String,
|
||||
#[serde(alias = "Тур")]
|
||||
#[serde(default)]
|
||||
pub tour: String,
|
||||
#[serde(alias = "Ссылка")]
|
||||
#[serde(alias = "URL")]
|
||||
#[serde(default)]
|
||||
pub url: String,
|
||||
#[serde(alias = "Дата")]
|
||||
#[serde(default)]
|
||||
pub date: String,
|
||||
#[serde(alias = "Обработан")]
|
||||
#[serde(default)]
|
||||
pub processed_by: String,
|
||||
#[serde(alias = "Редактор")]
|
||||
#[serde(default)]
|
||||
pub redacted_by: String,
|
||||
#[serde(alias = "Копирайт")]
|
||||
#[serde(default)]
|
||||
pub copyright: String,
|
||||
#[serde(alias = "Тема")]
|
||||
#[serde(default)]
|
||||
pub theme: String,
|
||||
#[serde(alias = "Вид")]
|
||||
#[serde(alias = "Тип")]
|
||||
#[serde(default)]
|
||||
pub kind: String,
|
||||
#[serde(alias = "Источник")]
|
||||
#[serde(default)]
|
||||
pub source: String,
|
||||
#[serde(alias = "Рейтинг")]
|
||||
#[serde(default)]
|
||||
pub rating: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
pub struct SourceQuestionsBatch {
|
||||
#[serde(default)]
|
||||
pub filename: String,
|
||||
#[serde(alias = "Пакет")]
|
||||
#[serde(alias = "Чемпионат")]
|
||||
pub description: String,
|
||||
#[serde(alias = "Автор")]
|
||||
#[serde(default)]
|
||||
pub author: String,
|
||||
#[serde(alias = "Комментарий")]
|
||||
#[serde(alias = "Комментарии")]
|
||||
#[serde(alias = "Инфо")]
|
||||
#[serde(default)]
|
||||
pub comment: String,
|
||||
#[serde(alias = "Ссылка")]
|
||||
#[serde(alias = "URL")]
|
||||
#[serde(default)]
|
||||
pub url: String,
|
||||
#[serde(alias = "Дата")]
|
||||
#[serde(default)]
|
||||
pub date: String,
|
||||
#[serde(alias = "Обработан")]
|
||||
#[serde(default)]
|
||||
pub processed_by: String,
|
||||
#[serde(alias = "Редактор")]
|
||||
#[serde(default)]
|
||||
pub redacted_by: String,
|
||||
#[serde(alias = "Копирайт")]
|
||||
#[serde(default)]
|
||||
pub copyright: String,
|
||||
#[serde(alias = "Тема")]
|
||||
#[serde(default)]
|
||||
pub theme: String,
|
||||
#[serde(alias = "Вид")]
|
||||
#[serde(alias = "Тип")]
|
||||
#[serde(default)]
|
||||
pub kind: String,
|
||||
#[serde(alias = "Источник")]
|
||||
#[serde(default)]
|
||||
pub source: String,
|
||||
#[serde(alias = "Рейтинг")]
|
||||
#[serde(default)]
|
||||
pub rating: String,
|
||||
#[serde(alias = "Вопросы")]
|
||||
pub questions: Vec<SourceQuestion>,
|
||||
}
|
||||
|
||||
pub struct SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
zipfile: ZipArchive<R>,
|
||||
index: Option<usize>,
|
||||
}
|
||||
|
||||
impl<R> SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
fn new(zipfile: ZipArchive<R>) -> Self {
|
||||
SourceQuestionsZipReader {
|
||||
zipfile,
|
||||
index: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Iterator for SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
type Item = (String, Result<SourceQuestionsBatch, serde_json::Error>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index.is_none() && !self.zipfile.is_empty() {
|
||||
self.index = Some(0);
|
||||
}
|
||||
|
||||
match self.index {
|
||||
Some(i) if i < self.zipfile.len() => {
|
||||
self.index = Some(i + 1);
|
||||
|
||||
self.nth(i)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
if self.zipfile.len() <= n {
|
||||
return None;
|
||||
}
|
||||
self.index = Some(n + 1);
|
||||
|
||||
let file = self.zipfile.by_index(n).unwrap();
|
||||
let name = file.mangled_name();
|
||||
let name_str = name.to_str().unwrap();
|
||||
|
||||
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
|
||||
|
||||
Some((String::from(name_str), data))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.zipfile.len();
|
||||
let index = self.index.unwrap_or(0);
|
||||
let rem = if len > index + 1 {
|
||||
len - (index + 1)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
(rem, Some(rem))
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.zipfile.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> ExactSizeIterator for SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
fn len(&self) -> usize {
|
||||
self.zipfile.len()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ReadSourceQuestionsBatches<R>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
fn source_questions(self) -> SourceQuestionsZipReader<R>;
|
||||
}
|
||||
|
||||
impl<R> ReadSourceQuestionsBatches<R> for ZipArchive<R>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
fn source_questions(self) -> SourceQuestionsZipReader<R> {
|
||||
SourceQuestionsZipReader::new(self)
|
||||
}
|
||||
}
|
437
src/main.rs
437
src/main.rs
@@ -1,437 +0,0 @@
|
||||
extern crate serde;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
// This allows inserting JSON documents
|
||||
#[macro_use]
|
||||
extern crate serde_json;
|
||||
extern crate ledb;
|
||||
// This allows define typed documents easy
|
||||
#[macro_use]
|
||||
extern crate ledb_derive;
|
||||
extern crate ledb_types;
|
||||
extern crate zip;
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use rand::seq::IteratorRandom;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Instant;
|
||||
use std::{fs, io};
|
||||
|
||||
use ledb::{Options, Storage};
|
||||
|
||||
const ZIP_FILENAME: &str = "json.zip";
|
||||
const DB_DIR: &str = "db";
|
||||
|
||||
macro_rules! make {
|
||||
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
|
||||
$field: $src.$field
|
||||
),+}};
|
||||
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
|
||||
$field: $src.$field
|
||||
),+ ,..$Target::default()}}
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
enum Command {
|
||||
Write,
|
||||
Compact,
|
||||
Print {
|
||||
#[clap(value_parser, default_value = "0")]
|
||||
id: u32,
|
||||
},
|
||||
ZipPrint {
|
||||
#[clap(value_parser, default_value = "0")]
|
||||
file_num: usize,
|
||||
#[clap(value_parser, default_value = "0")]
|
||||
num: usize,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(author, version, about, long_about = None)]
|
||||
#[clap(propagate_version = true)]
|
||||
struct Cli {
|
||||
#[clap(subcommand)]
|
||||
command: Command,
|
||||
#[clap(short, long, action)]
|
||||
measure: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
struct SourceQuestion {
|
||||
#[serde(default)]
|
||||
num: u32,
|
||||
id: String,
|
||||
|
||||
#[serde(alias = "Вопрос")]
|
||||
description: String,
|
||||
#[serde(alias = "Ответ")]
|
||||
answer: String,
|
||||
|
||||
#[serde(alias = "Автор")]
|
||||
#[serde(default)]
|
||||
author: String,
|
||||
#[serde(alias = "Комментарий")]
|
||||
#[serde(default)]
|
||||
comment: String,
|
||||
#[serde(alias = "Комментарии")]
|
||||
#[serde(alias = "Инфо")]
|
||||
#[serde(default)]
|
||||
comment1: String,
|
||||
#[serde(alias = "Тур")]
|
||||
#[serde(default)]
|
||||
tour: String,
|
||||
#[serde(alias = "Ссылка")]
|
||||
#[serde(alias = "URL")]
|
||||
#[serde(default)]
|
||||
url: String,
|
||||
#[serde(alias = "Дата")]
|
||||
#[serde(default)]
|
||||
date: String,
|
||||
#[serde(alias = "Обработан")]
|
||||
#[serde(default)]
|
||||
processed_by: String,
|
||||
#[serde(alias = "Редактор")]
|
||||
#[serde(default)]
|
||||
redacted_by: String,
|
||||
#[serde(alias = "Копирайт")]
|
||||
#[serde(default)]
|
||||
copyright: String,
|
||||
#[serde(alias = "Тема")]
|
||||
#[serde(default)]
|
||||
theme: String,
|
||||
#[serde(alias = "Вид")]
|
||||
#[serde(alias = "Тип")]
|
||||
#[serde(default)]
|
||||
kind: String,
|
||||
#[serde(alias = "Источник")]
|
||||
#[serde(default)]
|
||||
source: String,
|
||||
#[serde(alias = "Рейтинг")]
|
||||
#[serde(default)]
|
||||
rating: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
struct SourceQuestionsBatch {
|
||||
#[serde(default)]
|
||||
filename: String,
|
||||
#[serde(alias = "Пакет")]
|
||||
#[serde(alias = "Чемпионат")]
|
||||
description: String,
|
||||
#[serde(alias = "Автор")]
|
||||
#[serde(default)]
|
||||
author: String,
|
||||
#[serde(alias = "Комментарий")]
|
||||
#[serde(alias = "Комментарии")]
|
||||
#[serde(alias = "Инфо")]
|
||||
#[serde(default)]
|
||||
comment: String,
|
||||
#[serde(alias = "Ссылка")]
|
||||
#[serde(alias = "URL")]
|
||||
#[serde(default)]
|
||||
url: String,
|
||||
#[serde(alias = "Дата")]
|
||||
#[serde(default)]
|
||||
date: String,
|
||||
#[serde(alias = "Обработан")]
|
||||
#[serde(default)]
|
||||
processed_by: String,
|
||||
#[serde(alias = "Редактор")]
|
||||
#[serde(default)]
|
||||
redacted_by: String,
|
||||
#[serde(alias = "Копирайт")]
|
||||
#[serde(default)]
|
||||
copyright: String,
|
||||
#[serde(alias = "Тема")]
|
||||
#[serde(default)]
|
||||
theme: String,
|
||||
#[serde(alias = "Вид")]
|
||||
#[serde(alias = "Тип")]
|
||||
#[serde(default)]
|
||||
kind: String,
|
||||
#[serde(alias = "Источник")]
|
||||
#[serde(default)]
|
||||
source: String,
|
||||
#[serde(alias = "Рейтинг")]
|
||||
#[serde(default)]
|
||||
rating: String,
|
||||
#[serde(alias = "Вопросы")]
|
||||
questions: Vec<SourceQuestion>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, Document)]
|
||||
struct BatchInfo {
|
||||
#[document(primary)]
|
||||
#[serde(default)]
|
||||
filename: String,
|
||||
#[serde(default)]
|
||||
description: String,
|
||||
#[serde(default)]
|
||||
author: String,
|
||||
#[serde(default)]
|
||||
comment: String,
|
||||
#[serde(default)]
|
||||
url: String,
|
||||
#[serde(default)]
|
||||
date: String,
|
||||
#[serde(default)]
|
||||
processed_by: String,
|
||||
#[serde(default)]
|
||||
redacted_by: String,
|
||||
#[serde(default)]
|
||||
copyright: String,
|
||||
#[serde(default)]
|
||||
theme: String,
|
||||
#[serde(default)]
|
||||
kind: String,
|
||||
#[serde(default)]
|
||||
source: String,
|
||||
#[serde(default)]
|
||||
rating: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, Document)]
|
||||
struct Question {
|
||||
#[document(primary)]
|
||||
#[serde(default)]
|
||||
num: u32,
|
||||
#[document(index)]
|
||||
id: String,
|
||||
|
||||
description: String,
|
||||
answer: String,
|
||||
|
||||
#[serde(default)]
|
||||
author: String,
|
||||
#[serde(default)]
|
||||
comment: String,
|
||||
#[serde(default)]
|
||||
comment1: String,
|
||||
#[serde(default)]
|
||||
tour: String,
|
||||
#[serde(default)]
|
||||
url: String,
|
||||
#[serde(default)]
|
||||
date: String,
|
||||
#[serde(default)]
|
||||
processed_by: String,
|
||||
#[serde(default)]
|
||||
redacted_by: String,
|
||||
#[serde(default)]
|
||||
copyright: String,
|
||||
#[serde(default)]
|
||||
theme: String,
|
||||
#[serde(default)]
|
||||
kind: String,
|
||||
#[serde(default)]
|
||||
source: String,
|
||||
#[serde(default)]
|
||||
rating: String,
|
||||
#[document(nested)]
|
||||
#[serde(default)]
|
||||
batch_info: BatchInfo,
|
||||
}
|
||||
|
||||
impl From<SourceQuestion> for Question {
|
||||
fn from(src: SourceQuestion) -> Self {
|
||||
make! {Self; with defaults and by {
|
||||
num, id, description, answer, author, comment, comment1, tour, url,
|
||||
date, processed_by, redacted_by, copyright, theme, kind, source, rating
|
||||
}; from src}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SourceQuestionsBatch> for BatchInfo {
|
||||
fn from(src: SourceQuestionsBatch) -> Self {
|
||||
make! {Self; by {
|
||||
filename, description, author, comment, url, date,
|
||||
processed_by, redacted_by, copyright, theme, kind, source, rating
|
||||
}; from src}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SourceQuestionsBatch> for Vec<Question> {
|
||||
fn from(src: SourceQuestionsBatch) -> Self {
|
||||
let mut result: Vec<Question> = src
|
||||
.questions
|
||||
.iter()
|
||||
.map(|item| item.clone().into())
|
||||
.collect();
|
||||
let batch_info = BatchInfo::from(src);
|
||||
result.iter_mut().for_each(|mut question| {
|
||||
question.batch_info = batch_info.clone();
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
// measure and return time elapsed in `func` in seconds
|
||||
pub fn measure<F: FnOnce()>(func: F) -> f64 {
|
||||
let start = Instant::now();
|
||||
func();
|
||||
let elapsed = start.elapsed();
|
||||
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
|
||||
}
|
||||
|
||||
pub fn measure_and_print<F: FnOnce()>(func: F) {
|
||||
let m = measure(func);
|
||||
eprintln!("{}", m);
|
||||
}
|
||||
|
||||
fn write_db() {
|
||||
let out_file: PathBuf = [DB_DIR, "data.mdb"].into_iter().collect();
|
||||
match fs::metadata(&out_file) {
|
||||
Ok(x) if x.is_file() => {
|
||||
fs::remove_file(&out_file).unwrap();
|
||||
println!(r#""{}" removed"#, out_file.to_str().unwrap());
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
|
||||
let zip_reader = io::BufReader::new(zip_file);
|
||||
let mut archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
|
||||
let options: Options = serde_json::from_value(json!({
|
||||
"map_size": 900 * 1024 * 1024, // 900mb
|
||||
"write_map": true,
|
||||
"map_async": true,
|
||||
"no_lock": true,
|
||||
"no_meta_sync": true,
|
||||
"no_sync": true,
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let storage = Storage::new(DB_DIR, options).unwrap();
|
||||
let collection = storage.collection("questions").unwrap();
|
||||
|
||||
println!("converting...");
|
||||
|
||||
let mut count: usize = 0;
|
||||
let count = &mut count;
|
||||
(0..archive.len())
|
||||
.map(|i| {
|
||||
let file = archive.by_index(i).unwrap();
|
||||
let name = file.mangled_name();
|
||||
let name_str = name.to_str().unwrap();
|
||||
|
||||
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
|
||||
|
||||
(String::from(name_str), data)
|
||||
})
|
||||
.filter(|(_, data)| data.is_ok())
|
||||
.flat_map(|(filename, data)| {
|
||||
let mut data = data.unwrap();
|
||||
data.filename = filename;
|
||||
let questions: Vec<Question> = data.into();
|
||||
questions
|
||||
})
|
||||
.for_each(|question| {
|
||||
let result = collection.insert(&question);
|
||||
if result.is_err() {
|
||||
println!("-- {:#?}", question);
|
||||
panic!("{:#?}", result);
|
||||
} else {
|
||||
*count += 1;
|
||||
}
|
||||
});
|
||||
|
||||
println!("inserted {}\nwriting...", count);
|
||||
storage.sync(true).unwrap();
|
||||
print!("stats: ");
|
||||
let stats = storage.stat().unwrap();
|
||||
println!("{:?}", stats);
|
||||
drop(storage);
|
||||
}
|
||||
|
||||
fn print_question_from<F>(get_q: F)
|
||||
where
|
||||
F: FnOnce() -> Option<Question>,
|
||||
{
|
||||
let q = get_q().unwrap();
|
||||
println!("{:#?}", q)
|
||||
}
|
||||
|
||||
fn read_from_zip(mut file_num: usize, mut num: usize) -> Option<Question> {
|
||||
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
|
||||
let zip_reader = io::BufReader::new(zip_file);
|
||||
let mut archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
if file_num == 0 {
|
||||
file_num = (1..=archive.len()).choose(&mut rng).unwrap();
|
||||
}
|
||||
let file = archive.by_index(file_num - 1).unwrap();
|
||||
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
|
||||
let data = data.unwrap();
|
||||
let questions: Vec<Question> = data.into();
|
||||
|
||||
if num == 0 {
|
||||
num = (1..=questions.len()).choose(&mut rng).unwrap();
|
||||
}
|
||||
Some(questions[num - 1].clone())
|
||||
}
|
||||
|
||||
fn compact_db() {
|
||||
let options: Options = serde_json::from_value(json!({
|
||||
"write_map": true,
|
||||
"map_async": true,
|
||||
"no_lock": true,
|
||||
"no_meta_sync": true,
|
||||
"no_sync": true,
|
||||
"compact": true,
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let storage = Storage::new(DB_DIR, options).unwrap();
|
||||
|
||||
storage.sync(true).unwrap();
|
||||
let stats = storage.stat().unwrap();
|
||||
println!("{:?}", stats);
|
||||
drop(storage);
|
||||
}
|
||||
|
||||
fn read_from_db(mut id: u32) -> Option<Question> {
|
||||
let options: Options = serde_json::from_value(json!({
|
||||
"read_only": true,
|
||||
"map_async": true,
|
||||
"no_lock": true,
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let storage = Storage::new(DB_DIR, options).unwrap();
|
||||
let collection = storage.collection("questions").unwrap();
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
if id == 0 {
|
||||
let last_id = collection.last_id().unwrap();
|
||||
id = (1..=last_id).choose(&mut rng).unwrap();
|
||||
}
|
||||
|
||||
collection.get::<Question>(id).unwrap()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args = Cli::parse();
|
||||
|
||||
let mut action: Box<dyn FnOnce()> = match &args.command {
|
||||
Command::Write => Box::new(write_db),
|
||||
Command::Compact => Box::new(compact_db),
|
||||
Command::Print { id } => {
|
||||
let get_question = Box::new(|| read_from_db(*id));
|
||||
Box::new(|| print_question_from(get_question))
|
||||
}
|
||||
Command::ZipPrint { file_num, num } => {
|
||||
let get_question = Box::new(|| read_from_zip(*file_num, *num));
|
||||
Box::new(|| print_question_from(get_question))
|
||||
}
|
||||
};
|
||||
|
||||
if args.measure {
|
||||
action = Box::new(|| measure_and_print(action));
|
||||
}
|
||||
|
||||
action();
|
||||
}
|
Reference in New Issue
Block a user