SourceQuestionsZipReader: Iterator
This commit is contained in:
parent
0b20188fce
commit
7389290d80
153
src/main.rs
153
src/main.rs
@ -267,17 +267,104 @@ impl From<SourceQuestionsBatch> for Vec<Question> {
|
||||
}
|
||||
}
|
||||
|
||||
// measure and return time elapsed in `func` in seconds
|
||||
pub fn measure<F: FnOnce()>(func: F) -> f64 {
|
||||
let start = Instant::now();
|
||||
func();
|
||||
let elapsed = start.elapsed();
|
||||
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
|
||||
struct SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: std::io::Read + std::io::Seek,
|
||||
{
|
||||
zipfile: zip::ZipArchive<R>,
|
||||
index: Option<usize>,
|
||||
}
|
||||
|
||||
pub fn measure_and_print<F: FnOnce()>(func: F) {
|
||||
let m = measure(func);
|
||||
eprintln!("{}", m);
|
||||
impl<R> SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: std::io::Read + std::io::Seek,
|
||||
{
|
||||
fn new(zipfile: zip::ZipArchive<R>) -> Self {
|
||||
SourceQuestionsZipReader {
|
||||
zipfile,
|
||||
index: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Iterator for SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: std::io::Read + std::io::Seek,
|
||||
{
|
||||
type Item = (String, Result<SourceQuestionsBatch, serde_json::Error>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index.is_none() && !self.zipfile.is_empty() {
|
||||
self.index = Some(0);
|
||||
}
|
||||
|
||||
match self.index {
|
||||
Some(i) if i < self.zipfile.len() => {
|
||||
self.index = Some(i + 1);
|
||||
|
||||
self.nth(i)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
if self.zipfile.len() <= n {
|
||||
return None;
|
||||
}
|
||||
self.index = Some(n + 1);
|
||||
|
||||
let file = self.zipfile.by_index(n).unwrap();
|
||||
let name = file.mangled_name();
|
||||
let name_str = name.to_str().unwrap();
|
||||
|
||||
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
|
||||
|
||||
Some((String::from(name_str), data))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.zipfile.len();
|
||||
let index = self.index.unwrap_or(0);
|
||||
let rem = if len > index + 1 {
|
||||
len - (index + 1)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
(rem, Some(rem))
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.zipfile.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> ExactSizeIterator for SourceQuestionsZipReader<R>
|
||||
where
|
||||
R: std::io::Read + std::io::Seek,
|
||||
{
|
||||
fn len(&self) -> usize {
|
||||
self.zipfile.len()
|
||||
}
|
||||
}
|
||||
|
||||
trait ReadSourceQuestionsBatches<R>
|
||||
where
|
||||
R: std::io::Read + std::io::Seek,
|
||||
{
|
||||
fn source_questions(self) -> SourceQuestionsZipReader<R>;
|
||||
}
|
||||
|
||||
impl<R> ReadSourceQuestionsBatches<R> for zip::ZipArchive<R>
|
||||
where
|
||||
R: std::io::Read + std::io::Seek,
|
||||
{
|
||||
fn source_questions(self) -> SourceQuestionsZipReader<R> {
|
||||
SourceQuestionsZipReader::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
fn write_db() {
|
||||
@ -292,7 +379,7 @@ fn write_db() {
|
||||
|
||||
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
|
||||
let zip_reader = io::BufReader::new(zip_file);
|
||||
let mut archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
let archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
|
||||
let options: Options = serde_json::from_value(json!({
|
||||
"map_size": 900 * 1024 * 1024, // 900mb
|
||||
@ -311,16 +398,8 @@ fn write_db() {
|
||||
|
||||
let mut count: usize = 0;
|
||||
let count = &mut count;
|
||||
(0..archive.len())
|
||||
.map(|i| {
|
||||
let file = archive.by_index(i).unwrap();
|
||||
let name = file.mangled_name();
|
||||
let name_str = name.to_str().unwrap();
|
||||
|
||||
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
|
||||
|
||||
(String::from(name_str), data)
|
||||
})
|
||||
archive
|
||||
.source_questions()
|
||||
.filter(|(_, data)| data.is_ok())
|
||||
.flat_map(|(filename, data)| {
|
||||
let mut data = data.unwrap();
|
||||
@ -354,19 +433,20 @@ where
|
||||
println!("{:#?}", q)
|
||||
}
|
||||
|
||||
fn read_from_zip(mut file_num: usize, mut num: usize) -> Option<Question> {
|
||||
fn read_from_zip(file_num: usize, mut num: usize) -> Option<Question> {
|
||||
let mut rng = rand::thread_rng();
|
||||
let zip_file = fs::File::open(ZIP_FILENAME).unwrap();
|
||||
let zip_reader = io::BufReader::new(zip_file);
|
||||
let mut archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
let archive = zip::ZipArchive::new(zip_reader).unwrap();
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
if file_num == 0 {
|
||||
file_num = (1..=archive.len()).choose(&mut rng).unwrap();
|
||||
}
|
||||
let file = archive.by_index(file_num - 1).unwrap();
|
||||
let data: Result<SourceQuestionsBatch, _> = serde_json::from_reader(file);
|
||||
let data = data.unwrap();
|
||||
let questions: Vec<Question> = data.into();
|
||||
let mut source_questions = archive.source_questions();
|
||||
let (_, batch) = if file_num == 0 {
|
||||
source_questions.choose(&mut rng).unwrap()
|
||||
} else {
|
||||
source_questions.nth(file_num - 1).unwrap()
|
||||
};
|
||||
let batch = batch.unwrap();
|
||||
let questions: Vec<Question> = batch.into();
|
||||
|
||||
if num == 0 {
|
||||
num = (1..=questions.len()).choose(&mut rng).unwrap();
|
||||
@ -413,6 +493,19 @@ fn read_from_db(mut id: u32) -> Option<Question> {
|
||||
collection.get::<Question>(id).unwrap()
|
||||
}
|
||||
|
||||
// measure and return time elapsed in `func` in seconds
|
||||
pub fn measure<F: FnOnce()>(func: F) -> f64 {
|
||||
let start = Instant::now();
|
||||
func();
|
||||
let elapsed = start.elapsed();
|
||||
(elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1_000_000_000.0)
|
||||
}
|
||||
|
||||
pub fn measure_and_print<F: FnOnce()>(func: F) {
|
||||
let m = measure(func);
|
||||
eprintln!("{}", m);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args = Cli::parse();
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user