Compare commits

..

No commits in common. "postcard" and "master" have entirely different histories.

14 changed files with 206 additions and 371 deletions

45
Cargo.lock generated
View File

@ -207,6 +207,25 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a32fd6af2b5827bce66c29053ba0e7c42b9dcab01835835058558c10851a46b" checksum = "8a32fd6af2b5827bce66c29053ba0e7c42b9dcab01835835058558c10851a46b"
[[package]]
name = "bincode"
version = "2.0.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f11ea1a0346b94ef188834a65c068a03aec181c94896d481d7a0a40d85b0ce95"
dependencies = [
"bincode_derive",
"serde",
]
[[package]]
name = "bincode_derive"
version = "2.0.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e30759b3b99a1b802a7a3aa21c85c3ded5c28e1c83170d82d70f08bbf7f3e4c"
dependencies = [
"virtue",
]
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "1.3.2" version = "1.3.2"
@ -292,6 +311,7 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
name = "chgk_ledb" name = "chgk_ledb"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"bincode",
"chgk_ledb_lib", "chgk_ledb_lib",
"clap 4.3.16", "clap 4.3.16",
"criterion 0.4.0", "criterion 0.4.0",
@ -309,6 +329,7 @@ version = "1.1.0"
dependencies = [ dependencies = [
"async-compression 0.4.1", "async-compression 0.4.1",
"async_zip", "async_zip",
"bincode",
"chgk_ledb_lib", "chgk_ledb_lib",
"clap 4.3.16", "clap 4.3.16",
"criterion 0.5.1", "criterion 0.5.1",
@ -330,6 +351,7 @@ dependencies = [
"async-compression 0.4.1", "async-compression 0.4.1",
"async-stream", "async-stream",
"async_zip", "async_zip",
"bincode",
"fmmap", "fmmap",
"futures", "futures",
"futures-core", "futures-core",
@ -337,7 +359,6 @@ dependencies = [
"insta", "insta",
"memmap", "memmap",
"pin-project", "pin-project",
"postcard",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@ -446,12 +467,6 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
[[package]]
name = "cobs"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.0" version = "1.0.0"
@ -1233,16 +1248,6 @@ dependencies = [
"plotters-backend", "plotters-backend",
] ]
[[package]]
name = "postcard"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9ee729232311d3cd113749948b689627618133b1c5012b77342c1950b25eaeb"
dependencies = [
"cobs",
"serde",
]
[[package]] [[package]]
name = "ppv-lite86" name = "ppv-lite86"
version = "0.2.17" version = "0.2.17"
@ -1696,6 +1701,12 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "virtue"
version = "0.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dcc60c0624df774c82a0ef104151231d37da4962957d691c011c852b2473314"
[[package]] [[package]]
name = "walkdir" name = "walkdir"
version = "2.3.3" version = "2.3.3"

View File

@ -27,5 +27,6 @@ clap = { version = "4.2.7", features = ["derive"] }
[dev-dependencies] [dev-dependencies]
criterion = "0.4.0" criterion = "0.4.0"
tempfile = "3.3" tempfile = "3.3"
bincode = "^2.0.0-rc.2"
serde="1.0" serde="1.0"
serde_derive="1.0" serde_derive="1.0"

View File

@ -1,5 +1,6 @@
#[macro_use] #[macro_use]
extern crate criterion; extern crate criterion;
extern crate bincode;
extern crate serde; extern crate serde;
extern crate serde_derive; extern crate serde_derive;
extern crate serde_json; extern crate serde_json;
@ -15,7 +16,18 @@ use tempfile::{tempdir, NamedTempFile};
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] #[derive(
bincode::Encode,
bincode::Decode,
Clone,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
)]
struct TestData { struct TestData {
num1: u64, num1: u64,
num2: u64, num2: u64,

View File

@ -1,12 +1,13 @@
#[macro_use] #[macro_use]
extern crate criterion; extern crate criterion;
extern crate bincode;
extern crate serde; extern crate serde;
extern crate serde_derive; extern crate serde_derive;
extern crate serde_json; extern crate serde_json;
extern crate tempfile; extern crate tempfile;
use chgk_ledb_lib::db::{Reader, Writer, WriterOpts}; use chgk_ledb_lib::db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{binary::Question, QuestionsConverter}; use chgk_ledb_lib::questions::{Question, QuestionsConverter};
use chgk_ledb_lib::source::ReadSourceQuestionsBatches; use chgk_ledb_lib::source::ReadSourceQuestionsBatches;
use std::path::Path; use std::path::Path;

View File

@ -10,7 +10,7 @@ use chgk_ledb_lib::db;
use chgk_ledb_lib::questions; use chgk_ledb_lib::questions;
use chgk_ledb_lib::source; use chgk_ledb_lib::source;
use crate::questions::{binary::Question, QuestionsConverter}; use crate::questions::{Question, QuestionsConverter};
use crate::source::ReadSourceQuestionsBatches; use crate::source::ReadSourceQuestionsBatches;
use chgk_ledb_lib::util::ErrorToString; use chgk_ledb_lib::util::ErrorToString;

View File

@ -37,6 +37,7 @@ futures = "0.3"
[dev-dependencies] [dev-dependencies]
criterion = { version = "0.5.1", features = ["async_tokio"]} criterion = { version = "0.5.1", features = ["async_tokio"]}
tempfile = "3.3" tempfile = "3.3"
bincode = "^2.0.0-rc.2"
serde="1.0" serde="1.0"
serde_derive="1.0" serde_derive="1.0"
futures = "0.3" futures = "0.3"

View File

@ -1,5 +1,6 @@
#[macro_use] #[macro_use]
extern crate criterion; extern crate criterion;
extern crate bincode;
extern crate serde; extern crate serde;
extern crate serde_derive; extern crate serde_derive;
extern crate tempfile; extern crate tempfile;
@ -15,7 +16,18 @@ use tempfile::{tempdir, NamedTempFile};
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] #[derive(
bincode::Encode,
bincode::Decode,
Clone,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
)]
struct TestData { struct TestData {
num1: u64, num1: u64,
num2: u64, num2: u64,

View File

@ -1,12 +1,13 @@
#[macro_use] #[macro_use]
extern crate criterion; extern crate criterion;
extern crate bincode;
extern crate serde; extern crate serde;
extern crate serde_derive; extern crate serde_derive;
extern crate tempfile; extern crate tempfile;
use async_compression::Level; use async_compression::Level;
use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts}; use chgk_ledb_lib::async_db::{Reader, Writer, WriterOpts};
use chgk_ledb_lib::questions::{binary::Question, QuestionsConverterAsyncForStream}; use chgk_ledb_lib::questions::{Question, QuestionsConverterAsyncForStream};
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync; use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use futures::pin_mut; use futures::pin_mut;
use futures::StreamExt; use futures::StreamExt;

View File

@ -16,7 +16,7 @@ use tokio::{fs, io};
use tokio_stream::wrappers::UnboundedReceiverStream; use tokio_stream::wrappers::UnboundedReceiverStream;
use chgk_ledb_lib::async_db; use chgk_ledb_lib::async_db;
use chgk_ledb_lib::questions::binary::Question; use chgk_ledb_lib::questions::Question;
use chgk_ledb_lib::questions::QuestionsConverterAsyncForStream; use chgk_ledb_lib::questions::QuestionsConverterAsyncForStream;
use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync; use chgk_ledb_lib::source::ReadSourceQuestionsBatchesAsync;
use chgk_ledb_lib::util::ErrorToString; use chgk_ledb_lib::util::ErrorToString;

View File

@ -12,16 +12,40 @@ description = "Библиотека для доступа к файлу базы
[features] [features]
default = [] default = []
sync = ["zstd", "memmap"] sync = ["zstd", "memmap"]
async = ["futures", "futures-core", "futures-util", "fmmap", "tokio", "async-compression", "async-stream", "pin-project"] async = [
"futures",
"futures-core",
"futures-util",
"fmmap",
"tokio",
"async-compression",
"async-stream",
"pin-project",
]
source = ["zip"] source = ["zip"]
source_async = ["async_zip", "tokio", "futures", "futures-core", "futures-util", "async-stream"] source_async = [
"async_zip",
"tokio",
"futures",
"futures-core",
"futures-util",
"async-stream",
]
convert = ["zip"] convert = ["zip"]
convert_async = ["futures", "futures-core", "futures-util", "async-stream", "async_zip", "tokio"] convert_async = [
"futures",
"futures-core",
"futures-util",
"async-stream",
"async_zip",
"tokio",
]
[dependencies] [dependencies]
serde = "1.0" serde = "1.0"
serde_derive = "1.0" serde_derive = "1.0"
serde_json = "1.0" serde_json = "1.0"
bincode = "^2.0.0-rc.2"
zip = { version = "0.6", optional = true } zip = { version = "0.6", optional = true }
async_zip = { version = "0.0.15" , features = [ async_zip = { version = "0.0.15" , features = [
"zstd", "zstd",
@ -46,7 +70,6 @@ async-stream = { version = "0.3", optional = true }
zstd = { version = "^0.12", default-features = false, optional = true } zstd = { version = "^0.12", default-features = false, optional = true }
memmap = { version = "0.7.0", optional = true } memmap = { version = "0.7.0", optional = true }
pin-project = { version = "1.1.3", optional = true } pin-project = { version = "1.1.3", optional = true }
postcard = { version = "1.0.6", default-features = false }
[dev-dependencies] [dev-dependencies]
insta = { version = "1.31.0", features = ["yaml"] } insta = { version = "1.31.0", features = ["yaml"] }

View File

@ -24,9 +24,9 @@ use fmmap::tokio::{AsyncMmapFile, AsyncMmapFileExt, AsyncOptions};
type LSize = u32; type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>(); const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
use serde::{de::DeserializeOwned, Serialize}; use crate::util::BincodeVecWriter;
use crate::util::ErrorToString; use crate::util::ErrorToString;
pub struct WriterOpts { pub struct WriterOpts {
@ -42,18 +42,18 @@ impl Default for WriterOpts {
compress_lvl: Level::Default, compress_lvl: Level::Default,
data_buf_size: 500 * 1024 * 1024, data_buf_size: 500 * 1024 * 1024,
out_buf_size: 200 * 1024 * 1024, out_buf_size: 200 * 1024 * 1024,
current_buf_size: 1024 * 1024, current_buf_size: 100 * 1024,
} }
} }
} }
pub struct Writer<T> pub struct Writer<T>
where where
T: Serialize, T: bincode::Encode,
{ {
out: io::BufWriter<fs::File>, out: io::BufWriter<fs::File>,
data_buf: Vec<u8>, data_buf: Vec<u8>,
cur_buf_item: Vec<u8>, cur_buf_item: BincodeVecWriter,
table: Vec<LSize>, table: Vec<LSize>,
compress_lvl: Level, compress_lvl: Level,
_t: PhantomData<Arc<T>>, _t: PhantomData<Arc<T>>,
@ -61,13 +61,14 @@ where
impl<T> Writer<T> impl<T> Writer<T>
where where
T: Serialize, T: bincode::Encode,
{ {
pub async fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> { pub async fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
let out = fs::File::create(path).await.str_err()?; let out = fs::File::create(path).await.str_err()?;
let out = io::BufWriter::with_capacity(opts.out_buf_size, out); let out = io::BufWriter::with_capacity(opts.out_buf_size, out);
let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size); let data_buf: Vec<u8> = Vec::with_capacity(opts.data_buf_size);
let cur_buf_item: Vec<u8> = vec![0; opts.current_buf_size]; let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl; let compress_lvl = opts.compress_lvl;
@ -87,16 +88,16 @@ where
self.push_by_ref(&item).await self.push_by_ref(&item).await
} }
#[allow(clippy::useless_asref)]
pub async fn push_by_ref(&mut self, item: &T) -> Result<(), String> { pub async fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.len() as LSize; let pos: LSize = self.data_buf.len() as LSize;
let cur_item_data = postcard::to_slice(item, self.cur_buf_item.as_mut_slice()).str_err()?;
let mut zencoder = ZstdEncoder::with_quality(cur_item_data.as_ref(), self.compress_lvl); bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?;
let mut zencoder = ZstdEncoder::with_quality(&self.cur_buf_item[..], self.compress_lvl);
io::copy(&mut zencoder, &mut self.data_buf) io::copy(&mut zencoder, &mut self.data_buf)
.await .await
.str_err()?; .str_err()?;
cur_item_data.fill(0); self.cur_buf_item.clear();
self.table.push(pos); self.table.push(pos);
@ -158,7 +159,7 @@ use pin_project::pin_project;
/// only work when ..push.poll() returns Ready immediately /// only work when ..push.poll() returns Ready immediately
pub struct WriterSink<'a, T> pub struct WriterSink<'a, T>
where where
T: Serialize, T: bincode::Encode,
{ {
#[pin] #[pin]
writer: &'a mut Writer<T>, writer: &'a mut Writer<T>,
@ -167,7 +168,7 @@ where
impl<'a, T> Sink<T> for WriterSink<'a, T> impl<'a, T> Sink<T> for WriterSink<'a, T>
where where
T: Serialize, T: bincode::Encode,
{ {
type Error = String; type Error = String;
@ -212,7 +213,7 @@ where
pub struct Reader<T> pub struct Reader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
mmap: AsyncMmapFile, mmap: AsyncMmapFile,
count: usize, count: usize,
@ -222,7 +223,7 @@ where
impl<T> Reader<T> impl<T> Reader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
pub async fn new<P: AsRef<Path>>(path: P) -> Result<Self, String> { pub async fn new<P: AsRef<Path>>(path: P) -> Result<Self, String> {
let mmap = AsyncOptions::new() let mmap = AsyncOptions::new()
@ -291,10 +292,10 @@ where
decoder.read_to_end(data_buf).await.str_err()?; decoder.read_to_end(data_buf).await.str_err()?;
// decode item // decode item
let (item, _): (T, _) = postcard::take_from_bytes(data_buf).str_err()?; let item: (T, usize) = bincode::decode_from_slice(data_buf, BINCODE_CFG).str_err()?;
data_buf.clear(); data_buf.clear();
Ok(item) Ok(item.0)
} }
/// get item at index /// get item at index
@ -310,7 +311,7 @@ where
pub struct ReaderStream<'a, T> pub struct ReaderStream<'a, T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
reader: &'a Reader<T>, reader: &'a Reader<T>,
index: Option<usize>, index: Option<usize>,
@ -318,7 +319,7 @@ where
impl<'a, T> ReaderStream<'a, T> impl<'a, T> ReaderStream<'a, T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn new(reader: &'a Reader<T>) -> Self { fn new(reader: &'a Reader<T>) -> Self {
ReaderStream { ReaderStream {
@ -330,7 +331,7 @@ where
impl<'a, T> Stream for ReaderStream<'a, T> impl<'a, T> Stream for ReaderStream<'a, T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
type Item = T; type Item = T;
@ -374,7 +375,7 @@ where
pub struct BufReader<T> pub struct BufReader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
inner: Reader<T>, inner: Reader<T>,
buf: Vec<u8>, buf: Vec<u8>,
@ -382,7 +383,7 @@ where
impl<T> BufReader<T> impl<T> BufReader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
pub async fn new<P: AsRef<Path>>(path: P, buf_size: usize) -> Result<Self, String> { pub async fn new<P: AsRef<Path>>(path: P, buf_size: usize) -> Result<Self, String> {
match Reader::<T>::new(path).await { match Reader::<T>::new(path).await {
@ -409,7 +410,7 @@ where
impl<T> From<Reader<T>> for BufReader<T> impl<T> From<Reader<T>> for BufReader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn from(inner: Reader<T>) -> Self { fn from(inner: Reader<T>) -> Self {
Self { Self {
@ -421,7 +422,7 @@ where
impl<T> From<BufReader<T>> for Reader<T> impl<T> From<BufReader<T>> for Reader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn from(value: BufReader<T>) -> Self { fn from(value: BufReader<T>) -> Self {
value.into_inner() value.into_inner()
@ -430,7 +431,7 @@ where
impl<T> Deref for BufReader<T> impl<T> Deref for BufReader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
type Target = Reader<T>; type Target = Reader<T>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
@ -440,7 +441,7 @@ where
pub struct BufReaderStream<T> pub struct BufReaderStream<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
reader: BufReader<T>, reader: BufReader<T>,
index: Option<usize>, index: Option<usize>,
@ -448,7 +449,7 @@ where
impl<T> BufReaderStream<T> impl<T> BufReaderStream<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn new(reader: BufReader<T>) -> Self { fn new(reader: BufReader<T>) -> Self {
BufReaderStream { BufReaderStream {
@ -472,7 +473,7 @@ where
impl<T> Stream for BufReaderStream<T> impl<T> Stream for BufReaderStream<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
type Item = T; type Item = T;
@ -515,10 +516,9 @@ where
mod test { mod test {
use super::*; use super::*;
use core::fmt::Debug; use core::fmt::Debug;
use serde_derive::Deserialize;
use tempfile::tempdir; use tempfile::tempdir;
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
struct TestData { struct TestData {
num: u64, num: u64,
test: String, test: String,

View File

@ -6,13 +6,13 @@ use std::{
sync::Arc, sync::Arc,
}; };
use serde::{de::DeserializeOwned, Serialize};
use memmap::{Mmap, MmapOptions}; use memmap::{Mmap, MmapOptions};
type LSize = u32; type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>(); const LEN_SIZE: usize = std::mem::size_of::<LSize>();
const BINCODE_CFG: bincode::config::Configuration = bincode::config::standard();
use crate::util::BincodeVecWriter;
use crate::util::ErrorToString; use crate::util::ErrorToString;
pub struct WriterOpts { pub struct WriterOpts {
@ -28,19 +28,19 @@ impl Default for WriterOpts {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 500 * 1024 * 1024, data_buf_size: 500 * 1024 * 1024,
out_buf_size: 200 * 1024 * 1024, out_buf_size: 200 * 1024 * 1024,
current_buf_size: 20 * 1024, current_buf_size: 100 * 1024,
} }
} }
} }
pub struct Writer<T> pub struct Writer<T>
where where
T: Serialize, T: bincode::Encode,
{ {
out: io::BufWriter<fs::File>, out: io::BufWriter<fs::File>,
data_buf: Cursor<Vec<u8>>, data_buf: Cursor<Vec<u8>>,
cur_buf_raw: Cursor<Vec<u8>>, cur_buf_raw: Cursor<Vec<u8>>,
cur_buf_item: Vec<u8>, cur_buf_item: BincodeVecWriter,
table: Vec<LSize>, table: Vec<LSize>,
compress_lvl: i32, compress_lvl: i32,
_t: PhantomData<Arc<T>>, _t: PhantomData<Arc<T>>,
@ -48,7 +48,7 @@ where
impl<T> Writer<T> impl<T> Writer<T>
where where
T: Serialize, T: bincode::Encode,
{ {
pub fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> { pub fn new<P: AsRef<Path>>(path: P, opts: WriterOpts) -> Result<Self, String> {
let out = fs::File::create(path).str_err()?; let out = fs::File::create(path).str_err()?;
@ -58,7 +58,8 @@ where
let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size); let cur_buf_raw: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_raw = Cursor::new(cur_buf_raw); let cur_buf_raw = Cursor::new(cur_buf_raw);
let cur_buf_item: Vec<u8> = vec![0; opts.current_buf_size]; let cur_buf_item: Vec<u8> = Vec::with_capacity(opts.current_buf_size);
let cur_buf_item = BincodeVecWriter::new(cur_buf_item);
let compress_lvl = opts.compress_lvl; let compress_lvl = opts.compress_lvl;
@ -82,20 +83,19 @@ where
pub fn push_by_ref(&mut self, item: &T) -> Result<(), String> { pub fn push_by_ref(&mut self, item: &T) -> Result<(), String> {
let pos: LSize = self.data_buf.position() as LSize; let pos: LSize = self.data_buf.position() as LSize;
let cur_item_data = postcard::to_slice(item, self.cur_buf_item.as_mut_slice()).str_err()?; bincode::encode_into_writer(item, &mut self.cur_buf_item, BINCODE_CFG).str_err()?;
let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?; let mut zencoder = zstd::stream::raw::Encoder::new(self.compress_lvl).str_err()?;
zencoder zencoder
.set_pledged_src_size(Some(cur_item_data.len() as u64)) .set_pledged_src_size(Some(self.cur_buf_item.len() as u64))
.str_err()?; .str_err()?;
self.cur_buf_raw.set_position(0); self.cur_buf_raw.set_position(0);
let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder); let mut cur_buf_z = zstd::stream::zio::Writer::new(&mut self.cur_buf_raw, zencoder);
cur_buf_z.write_all(cur_item_data).str_err()?; cur_buf_z.write_all(&self.cur_buf_item).str_err()?;
cur_buf_z.finish().str_err()?; cur_buf_z.finish().str_err()?;
cur_buf_z.flush().str_err()?; cur_buf_z.flush().str_err()?;
self.cur_buf_item.clear();
cur_item_data.fill(0);
self.table.push(pos); self.table.push(pos);
let (cur_buf_raw, _) = cur_buf_z.into_inner(); let (cur_buf_raw, _) = cur_buf_z.into_inner();
@ -150,7 +150,7 @@ where
pub struct Reader<T> pub struct Reader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
mmap: Mmap, mmap: Mmap,
count: usize, count: usize,
@ -160,7 +160,7 @@ where
impl<T> Reader<T> impl<T> Reader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
pub fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> { pub fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> {
let file = fs::File::open(path).str_err()?; let file = fs::File::open(path).str_err()?;
@ -215,9 +215,9 @@ where
let data = zstd::decode_all(reader).str_err()?; let data = zstd::decode_all(reader).str_err()?;
// decode item // decode item
let (item, _): (T, _) = postcard::take_from_bytes(&data).str_err()?; let item: (T, usize) = bincode::decode_from_slice(&data, BINCODE_CFG).str_err()?;
Ok(item) Ok(item.0)
} }
pub fn iter(&self) -> ReaderIter<'_, T> { pub fn iter(&self) -> ReaderIter<'_, T> {
@ -227,7 +227,7 @@ where
pub struct ReaderIter<'a, T> pub struct ReaderIter<'a, T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
reader: &'a Reader<T>, reader: &'a Reader<T>,
index: Option<usize>, index: Option<usize>,
@ -235,7 +235,7 @@ where
impl<'a, T> ReaderIter<'a, T> impl<'a, T> ReaderIter<'a, T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn new(reader: &'a Reader<T>) -> Self { fn new(reader: &'a Reader<T>) -> Self {
ReaderIter { ReaderIter {
@ -247,7 +247,7 @@ where
impl<'a, T> Iterator for ReaderIter<'a, T> impl<'a, T> Iterator for ReaderIter<'a, T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
type Item = T; type Item = T;
@ -300,7 +300,7 @@ where
impl<'a, T> ExactSizeIterator for ReaderIter<'a, T> impl<'a, T> ExactSizeIterator for ReaderIter<'a, T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn len(&self) -> usize { fn len(&self) -> usize {
self.reader.len() self.reader.len()
@ -309,7 +309,7 @@ where
pub struct ReaderIntoIter<T> pub struct ReaderIntoIter<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
reader: Reader<T>, reader: Reader<T>,
index: Option<usize>, index: Option<usize>,
@ -317,7 +317,7 @@ where
impl<T> ReaderIntoIter<T> impl<T> ReaderIntoIter<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn new(reader: Reader<T>) -> Self { fn new(reader: Reader<T>) -> Self {
Self { Self {
@ -329,7 +329,7 @@ where
impl<T> Iterator for ReaderIntoIter<T> impl<T> Iterator for ReaderIntoIter<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
type Item = T; type Item = T;
@ -382,7 +382,7 @@ where
impl<T> ExactSizeIterator for ReaderIntoIter<T> impl<T> ExactSizeIterator for ReaderIntoIter<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
fn len(&self) -> usize { fn len(&self) -> usize {
self.reader.len() self.reader.len()
@ -391,7 +391,7 @@ where
impl<T> IntoIterator for Reader<T> impl<T> IntoIterator for Reader<T>
where where
T: DeserializeOwned, T: bincode::Decode,
{ {
type Item = T; type Item = T;
type IntoIter = ReaderIntoIter<Self::Item>; type IntoIter = ReaderIntoIter<Self::Item>;
@ -404,10 +404,9 @@ where
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use serde::Deserialize;
use tempfile::tempdir; use tempfile::tempdir;
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
struct TestData { struct TestData {
num: u64, num: u64,
test: String, test: String,
@ -428,7 +427,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 20 * 1024, current_buf_size: 4096,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
@ -455,7 +454,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 20 * 1024, current_buf_size: 4096,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
@ -481,7 +480,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 20 * 1024, current_buf_size: 4096,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");
@ -510,7 +509,7 @@ mod test {
compress_lvl: 1, compress_lvl: 1,
data_buf_size: 10 * 1024 * 1024, data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024, out_buf_size: 10 * 1024 * 1024,
current_buf_size: 20 * 1024, current_buf_size: 4096,
}; };
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer"); let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).expect("new writer");

View File

@ -1,6 +1,8 @@
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)] #[derive(
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)]
pub struct BatchInfo { pub struct BatchInfo {
#[serde(default, skip_serializing_if = "String::is_empty")] #[serde(default, skip_serializing_if = "String::is_empty")]
pub filename: String, pub filename: String,
@ -30,7 +32,9 @@ pub struct BatchInfo {
pub rating: String, pub rating: String,
} }
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)] #[derive(
Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode, PartialEq,
)]
pub struct Question { pub struct Question {
#[serde(default, skip_serializing_if = "u32_is_zero")] #[serde(default, skip_serializing_if = "u32_is_zero")]
pub num: u32, pub num: u32,
@ -79,187 +83,9 @@ impl BatchInfo {
} }
} }
pub mod binary {
use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
pub struct BatchInfo {
#[serde(default)]
pub filename: String,
#[serde(default)]
pub description: String,
#[serde(default)]
pub author: String,
#[serde(default)]
pub comment: String,
#[serde(default)]
pub url: String,
#[serde(default)]
pub date: String,
#[serde(default)]
pub processed_by: String,
#[serde(default)]
pub redacted_by: String,
#[serde(default)]
pub copyright: String,
#[serde(default)]
pub theme: String,
#[serde(default)]
pub kind: String,
#[serde(default)]
pub source: String,
#[serde(default)]
pub rating: String,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq)]
pub struct Question {
#[serde(default)]
pub num: u32,
pub id: String,
pub description: String,
pub answer: String,
#[serde(default)]
pub author: String,
#[serde(default)]
pub comment: String,
#[serde(default)]
pub comment1: String,
#[serde(default)]
pub tour: String,
#[serde(default)]
pub url: String,
#[serde(default)]
pub date: String,
#[serde(default)]
pub processed_by: String,
#[serde(default)]
pub redacted_by: String,
#[serde(default)]
pub copyright: String,
#[serde(default)]
pub theme: String,
#[serde(default)]
pub kind: String,
#[serde(default)]
pub source: String,
#[serde(default)]
pub rating: String,
#[serde(default)]
pub batch_info: BatchInfo,
}
#[cfg(test)]
mod test {
use super::*;
use insta::assert_yaml_snapshot;
use serde_json::json;
pub fn sample_question() -> Question {
Question {
id: "Вопрос 1".into(),
description: "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2".into(),
answer: "42".into(),
batch_info: BatchInfo {
description: "Тестовый".into(),
date: "00-000-2000".into(),
..Default::default()
},
..Default::default()
}
}
#[test]
fn test_question_ser() {
assert_yaml_snapshot!(sample_question(), @r#"
---
num: 0
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info:
filename: ""
description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#);
}
#[test]
fn test_question_de() {
let question_from_json: Result<Question, _> = serde_json::from_value(json!({
"id": "Вопрос 1",
"description": "Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2",
"answer": "42",
"batch_info": {
"description": "Тестовый",
"date": "00-000-2000"
}
}));
assert!(question_from_json.is_ok());
assert_yaml_snapshot!(question_from_json.unwrap(), @r#"
---
num: 0
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info:
filename: ""
description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#);
}
}
}
#[cfg(any(feature = "convert", feature = "convert_async"))] #[cfg(any(feature = "convert", feature = "convert_async"))]
pub mod convert_common { pub mod convert_common {
use super::binary::{BatchInfo, Question}; use super::{BatchInfo, Question};
use crate::source::{SourceQuestion, SourceQuestionsBatch}; use crate::source::{SourceQuestion, SourceQuestionsBatch};
macro_rules! make { macro_rules! make {
@ -307,7 +133,7 @@ pub mod convert_common {
#[cfg(feature = "convert")] #[cfg(feature = "convert")]
pub mod convert { pub mod convert {
use super::binary::Question; use super::Question;
use crate::source::SourceQuestionsBatch; use crate::source::SourceQuestionsBatch;
pub trait QuestionsConverter { pub trait QuestionsConverter {
@ -348,68 +174,20 @@ pub mod convert {
let converted: Vec<_> = source.convert().collect(); let converted: Vec<_> = source.convert().collect();
assert_yaml_snapshot!(converted, @r#" assert_yaml_snapshot!(converted, @r#"
--- ---
- num: 0 - id: Вопрос 1
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2 description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42" answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
processed_by: "" - id: Вопрос 2
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
- num: 0
id: Вопрос 2
description: Зимой и летом одним цветом description: Зимой и летом одним цветом
answer: ёлка answer: ёлка
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#); "#);
} }
@ -424,7 +202,7 @@ pub mod convert_async {
use futures_core::stream::Stream; use futures_core::stream::Stream;
use futures_util::StreamExt; use futures_util::StreamExt;
use super::binary::Question; use super::Question;
use crate::source::SourceQuestionsBatch; use crate::source::SourceQuestionsBatch;
pub struct QuestionsConverterAsync<T> pub struct QuestionsConverterAsync<T>
@ -511,68 +289,20 @@ pub mod convert_async {
let converted: Vec<_> = converter.collect().await; let converted: Vec<_> = converter.collect().await;
assert_yaml_snapshot!(converted, @r#" assert_yaml_snapshot!(converted, @r#"
--- ---
- num: 0 - id: Вопрос 1
id: Вопрос 1
description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2 description: Сколько будет (2 * 2 * 2 + 2) * 2 * 2 + 2
answer: "42" answer: "42"
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
processed_by: "" - id: Вопрос 2
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
- num: 0
id: Вопрос 2
description: Зимой и летом одним цветом description: Зимой и летом одним цветом
answer: ёлка answer: ёлка
author: ""
comment: ""
comment1: ""
tour: ""
url: ""
date: ""
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
batch_info: batch_info:
filename: test.json filename: test.json
description: Тестовый description: Тестовый
author: ""
comment: ""
url: ""
date: 00-000-2000 date: 00-000-2000
processed_by: ""
redacted_by: ""
copyright: ""
theme: ""
kind: ""
source: ""
rating: ""
"#); "#);
} }
@ -586,6 +316,7 @@ mod test {
use super::*; use super::*;
use insta::assert_yaml_snapshot; use insta::assert_yaml_snapshot;
use serde_json::json; use serde_json::json;
#[cfg(any(feature = "convert", feature = "convert_async"))] #[cfg(any(feature = "convert", feature = "convert_async"))]
pub mod convert_common { pub mod convert_common {
use crate::source::{SourceQuestion, SourceQuestionsBatch}; use crate::source::{SourceQuestion, SourceQuestionsBatch};

View File

@ -12,3 +12,46 @@ where
self.map_err(|e| e.to_string()) self.map_err(|e| e.to_string())
} }
} }
#[cfg(any(feature = "sync", feature = "async"))]
mod bincode_utils {
use std::ops::{Deref, DerefMut};
use bincode::enc::write::Writer;
use bincode::error::EncodeError;
/// struct that allows [`Vec<u8>`] to implement [bincode::enc::write::Writer] trait
pub struct BincodeVecWriter {
vec: Vec<u8>,
}
impl BincodeVecWriter {
pub fn new(vec: Vec<u8>) -> BincodeVecWriter {
BincodeVecWriter { vec }
}
}
impl Deref for BincodeVecWriter {
type Target = Vec<u8>;
fn deref(&self) -> &Self::Target {
&self.vec
}
}
impl DerefMut for BincodeVecWriter {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.vec
}
}
impl Writer for BincodeVecWriter {
fn write(&mut self, bytes: &[u8]) -> Result<(), EncodeError> {
self.vec.extend_from_slice(bytes);
Ok(())
}
}
}
#[cfg(any(feature = "sync", feature = "async"))]
pub use bincode_utils::BincodeVecWriter;