Compare commits

...

6 Commits

Author SHA1 Message Date
a496f37ef2 ci: build/test with --all-features
All checks were successful
continuous-integration/drone/push Build is passing
2023-08-06 01:09:57 +03:00
5632c19866 ci: add pipeline type 2023-08-06 01:09:32 +03:00
cad8ff0404 add async test_share_reader 2023-08-06 01:05:36 +03:00
39ce0d8ceb add async reader stream 2023-08-06 00:56:49 +03:00
1ba645f337 add async reader 2023-08-06 00:12:15 +03:00
150527efeb move macro to convert mod 2023-08-06 00:11:44 +03:00
3 changed files with 274 additions and 18 deletions

View File

@ -1,4 +1,5 @@
kind: pipeline
type: docker
name: default
steps:
@ -6,8 +7,8 @@ steps:
image: rust:1-alpine
commands:
- apk add --no-cache musl-dev
- cargo build --verbose --all
- cargo test --verbose --all
- cargo build --verbose --all-features --all
- cargo test --verbose --all-features --all
environment:
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse

View File

@ -1,21 +1,22 @@
use std::{marker::PhantomData, path::Path, sync::Arc};
use async_stream::stream;
use tokio::pin;
use async_compression::tokio::bufread::ZstdDecoder;
use async_compression::tokio::write::ZstdEncoder;
use async_compression::Level;
use futures::stream::{self, StreamExt};
use futures::stream::StreamExt;
use futures_core::stream::Stream;
use futures_core::Future;
use futures_util::pin_mut;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::{
fs,
io::{self, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt},
io::{self, AsyncReadExt, AsyncWriteExt},
};
use fmmap::tokio::{AsyncMmapFile, AsyncOptions};
use fmmap::tokio::{AsyncMmapFile, AsyncMmapFileExt, AsyncOptions};
type LSize = u32;
const LEN_SIZE: usize = std::mem::size_of::<LSize>();
@ -137,9 +138,165 @@ where
}
}
pub struct Reader<T>
where
T: bincode::Decode,
{
mmap: AsyncMmapFile,
count: usize,
first_pos: LSize,
_t: Option<Arc<T>>, // PhantomData replacement
}
impl<T> Reader<T>
where
T: bincode::Decode,
{
pub async fn new<P: AsRef<Path>>(path: P, _buf_size: usize) -> Result<Self, String> {
let mmap = AsyncOptions::new()
.read(true)
.open_mmap_file(path)
.await
.str_err()?;
mmap.try_lock_shared().str_err()?;
// read first pos and records count
let first_data: [u8; LEN_SIZE] = mmap.bytes(0, LEN_SIZE).str_err()?.try_into().str_err()?;
let first_pos = LSize::from_le_bytes(first_data);
let tab_len = (first_pos as usize) / LEN_SIZE;
let count = tab_len - 1;
Ok(Self {
mmap,
count,
first_pos,
_t: None,
})
}
pub fn len(&self) -> usize {
self.count
}
pub fn is_empty(&self) -> bool {
0 == self.len()
}
pub async fn get(&self, index: usize) -> Result<T, String> {
if index >= self.len() {
return Err("index out of range".into());
}
let next_pos: usize = (index + 1) * LEN_SIZE;
// read item data pos
let data_pos = if 0 == index {
self.first_pos
} else {
let tab_pos: usize = index * LEN_SIZE;
let pos_curr_data: [u8; LEN_SIZE] = self
.mmap
.bytes(tab_pos, LEN_SIZE)
.str_err()?
.try_into()
.str_err()?;
LSize::from_le_bytes(pos_curr_data)
} as usize;
// read next item pos
let pos_next_data: [u8; LEN_SIZE] = self
.mmap
.bytes(next_pos, LEN_SIZE)
.str_err()?
.try_into()
.str_err()?;
let data_pos_next = LSize::from_le_bytes(pos_next_data) as usize;
let data_len = data_pos_next - data_pos;
// read & unpack item data
let mut decoder = ZstdDecoder::new(self.mmap.range_reader(data_pos, data_len).str_err()?);
let mut data = Vec::<u8>::new();
decoder.read_to_end(&mut data).await.str_err()?;
// decode item
let item: (T, usize) = bincode::decode_from_slice(&data, BINCODE_CFG).str_err()?;
Ok(item.0)
}
pub fn stream(&self) -> ReaderStream<'_, T> {
ReaderStream::new(self)
}
}
pub struct ReaderStream<'a, T>
where
T: bincode::Decode,
{
reader: &'a Reader<T>,
index: Option<usize>,
}
impl<'a, T> ReaderStream<'a, T>
where
T: bincode::Decode,
{
fn new(reader: &'a Reader<T>) -> Self {
ReaderStream {
reader,
index: None,
}
}
}
impl<'a, T> Stream for ReaderStream<'a, T>
where
T: bincode::Decode,
{
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {
if self.index.is_none() && !self.reader.is_empty() {
self.index = Some(0);
}
if self.index.unwrap() == self.reader.len() {
return Poll::Ready(None);
}
let future = self.reader.get(self.index.unwrap());
pin_mut!(future);
match Pin::new(&mut future).poll(cx) {
Poll::Ready(Ok(item)) => {
self.index = Some(self.index.unwrap() + 1);
Poll::Ready(Some(item))
}
Poll::Ready(Err(_)) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.reader.len();
if self.index.is_none() {
return (len, Some(len));
}
let index = self.index.unwrap();
let rem = if len > index + 1 {
len - (index + 1)
} else {
0
};
(rem, Some(rem))
}
}
#[cfg(test)]
mod test {
use super::*;
use async_stream::stream;
use core::fmt::Debug;
use tempfile::tempdir;
#[derive(bincode::Encode, bincode::Decode, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
@ -185,4 +342,103 @@ mod test {
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
}
#[tokio::test]
async fn test_write_read() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
compress_lvl: Level::Default,
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = stream_iter(items.clone().into_iter());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).await.expect("new reader");
assert_eq!(items.len(), reader.len());
for (idx, item) in items.iter().enumerate() {
let ritem = reader.get(idx).await.expect("get");
assert_eq!(*item, ritem);
}
}
#[tokio::test]
async fn test_write_read_stream() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
compress_lvl: Level::Default,
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = stream_iter(items.clone().into_iter());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).await.expect("new reader");
assert_eq!(items.len(), reader.len());
let dst_stream = reader.stream();
let src_stream = stream_iter(items.iter());
async fn test_values((x, y): (&TestData, TestData)) {
assert_eq!(*x, y);
}
src_stream.zip(dst_stream).for_each(test_values).await;
}
/// sharing Reader instance between threads
#[tokio::test]
async fn test_share_reader() {
let dir = tempdir().expect("tempdir");
let tmpfile = dir.path().join("test.tmp");
let opts = WriterOpts {
compress_lvl: Level::Default,
data_buf_size: 10 * 1024 * 1024,
out_buf_size: 10 * 1024 * 1024,
};
let mut writer: Writer<TestData> = Writer::new(&tmpfile, opts).await.expect("new writer");
let items_iter = gen_data(5);
let items: Vec<TestData> = items_iter.collect();
let src = stream_iter(items.clone().into_iter());
pin_mut!(src);
writer.load(src).await.expect("load");
writer.finish().await.expect("finish write");
let reader: Reader<TestData> = Reader::new(&tmpfile, 2048).await.expect("new reader");
assert_eq!(items.len(), reader.len());
async fn test_values((x, y): (&TestData, TestData)) {
assert_eq!(*x, y);
}
let reader = Arc::new(reader);
for _ in 0..=3 {
let cur_items = items.clone();
let cur_reader = Arc::clone(&reader);
tokio::spawn(async move {
let dst_stream = cur_reader.stream();
let src_stream = stream_iter(cur_items.iter());
src_stream.zip(dst_stream).for_each(test_values).await;
});
}
}
}

View File

@ -1,14 +1,5 @@
use serde_derive::{Deserialize, Serialize};
macro_rules! make {
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+}};
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+ ,..$Target::default()}}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, bincode::Decode, bincode::Encode)]
pub struct BatchInfo {
#[serde(default)]
@ -80,9 +71,17 @@ pub struct Question {
#[cfg(feature = "source")]
pub mod convert {
use super::{BatchInfo, Question};
use crate::source::{SourceQuestion, SourceQuestionsBatch};
use super::{BatchInfo, Question};
macro_rules! make {
($Target:ident; by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+}};
($Target:ident; with defaults and by {$($field:ident),+}; from $src:expr) => {$Target {$(
$field: $src.$field
),+ ,..$Target::default()}}
}
impl From<SourceQuestion> for Question {
fn from(src: SourceQuestion) -> Self {