Skip to content

Commit c825746

Browse files
sontdhustmetalwhale
authored andcommitted
Add test to check if inserting news doesn't wait for fetching to finish.
1 parent 6c7121f commit c825746

File tree

2 files changed

+58
-13
lines changed

2 files changed

+58
-13
lines changed

chloria-backend/chloria-job/src/execution/cases/collect_news.rs

Lines changed: 41 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -143,8 +143,8 @@ mod tests {
143143
use std::{sync::Arc, time::Duration};
144144

145145
use anyhow::Result;
146-
use chrono::Local;
147-
use tokio::time;
146+
use chrono::{DateTime, Local};
147+
use tokio::{sync::Mutex, time};
148148

149149
use super::super::super::{
150150
ports::{
@@ -160,7 +160,7 @@ mod tests {
160160
async fn check_required_duration() -> Result<()> {
161161
const CASE_PERMITS_NUM: usize = 2;
162162
const CASES_NUM: usize = 3;
163-
const PAGES_NUM: usize = 2;
163+
const PAGES_NUM: usize = 4;
164164
const PAGE_LOAD_DURATION: usize = 1000;
165165
const PAGE_NEWS_NUM: usize = 6;
166166
const NEWS_LOAD_DURATION: usize = 2000;
@@ -170,7 +170,11 @@ mod tests {
170170
assert!(CASES_NUM > CASE_PERMITS_NUM);
171171
assert!(PAGES_NUM * PAGE_NEWS_NUM > TASK_PERMITS_NUM);
172172
assert!(NEWS_LOAD_DURATION > PAGE_LOAD_DURATION);
173-
async fn fetch_news(handler: FetchNewsHandler) -> Vec<FetchNewsOutput> {
173+
assert!(PAGES_NUM * NEWS_LOAD_DURATION > PAGE_LOAD_DURATION + NEWS_LOAD_DURATION);
174+
async fn fetch_news(
175+
finish_fetch_time: Arc<Mutex<Option<DateTime<Local>>>>,
176+
handler: FetchNewsHandler,
177+
) -> Vec<FetchNewsOutput> {
174178
let mut outputs = vec![];
175179
for _ in 0..PAGES_NUM {
176180
time::sleep(Duration::from_millis(PAGE_LOAD_DURATION as u64)).await;
@@ -187,16 +191,33 @@ mod tests {
187191
}));
188192
}
189193
}
194+
// Time when the first case finishes fetching news
195+
let mut finish_fetch_time = finish_fetch_time.lock().await;
196+
if finish_fetch_time.is_none() {
197+
*finish_fetch_time = Some(Local::now());
198+
}
190199
outputs
191200
}
192201
async fn get() -> Result<Vec<u8>> {
193202
time::sleep(Duration::from_millis(NEWS_LOAD_DURATION as u64)).await;
194203
Ok(vec![])
195204
}
205+
async fn insert_news(start_insert_time: Arc<Mutex<Option<DateTime<Local>>>>) -> Result<Vec<i32>> {
206+
// Time when the first case starts inserting news
207+
let mut start_insert_time = start_insert_time.lock().await;
208+
if start_insert_time.is_none() {
209+
*start_insert_time = Some(Local::now());
210+
}
211+
Ok(vec![])
212+
}
213+
let finish_fetch_time = Arc::new(Mutex::new(None));
196214
let mut mock_news_fetcher = MockNewsFetcher::new();
197-
mock_news_fetcher
198-
.expect_fetch_news()
199-
.returning(|h| Box::pin(fetch_news(h)));
215+
{
216+
let finish_fetch_time = Arc::clone(&finish_fetch_time);
217+
mock_news_fetcher
218+
.expect_fetch_news()
219+
.returning(move |h| Box::pin(fetch_news(Arc::clone(&finish_fetch_time), h)));
220+
}
200221
let mut mock_http_helper = MockHttpHelper::new();
201222
mock_http_helper
202223
.expect_get()
@@ -207,11 +228,15 @@ mod tests {
207228
.expect_upload_file()
208229
.times(CASES_NUM * PAGES_NUM * PAGE_NEWS_NUM)
209230
.returning(|_| Ok("".to_string()));
231+
let start_insert_time = Arc::new(Mutex::new(None));
210232
let mut mock_repository = MockRepository::new();
211-
mock_repository
212-
.expect_insert_news()
213-
.times(CASES_NUM * ((PAGES_NUM * PAGE_NEWS_NUM) as f64 / INSERT_BATCH_SIZE as f64).ceil() as usize)
214-
.returning(|_| Ok(vec![]));
233+
{
234+
let start_insert_time = Arc::clone(&start_insert_time);
235+
mock_repository
236+
.expect_insert_news()
237+
.times(CASES_NUM * ((PAGES_NUM * PAGE_NEWS_NUM) as f64 / INSERT_BATCH_SIZE as f64).ceil() as usize)
238+
.returning(move |_| Box::pin(insert_news(Arc::clone(&start_insert_time))));
239+
}
215240
let workshop = Workshop::new(
216241
vec![Arc::new(mock_news_fetcher)],
217242
Arc::new(mock_http_helper),
@@ -227,6 +252,11 @@ mod tests {
227252
cases.push(workshop.execute_collect_news_case(TASK_PERMITS_NUM, INSERT_BATCH_SIZE));
228253
}
229254
futures::future::join_all(cases).await;
255+
let finish_fetch_time = finish_fetch_time.lock().await;
256+
let start_insert_time = start_insert_time.lock().await;
257+
assert!(finish_fetch_time.is_some());
258+
assert!(start_insert_time.is_some());
259+
assert!(finish_fetch_time.unwrap() > start_insert_time.unwrap());
230260
let measured_duration = (Local::now() - start_time).num_milliseconds() as usize;
231261
let estimated_duration = (PAGE_LOAD_DURATION
232262
+ ((PAGES_NUM * PAGE_NEWS_NUM) as f64 / TASK_PERMITS_NUM as f64).ceil() as usize * NEWS_LOAD_DURATION)

chloria-backend/chloria-job/src/execution/ports/repository.rs

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1+
use std::{future::Future, pin::Pin};
2+
13
use anyhow::Result;
24
use async_trait::async_trait;
35
use chrono::{DateTime, Local};
4-
use mockall::automock;
6+
use mockall::mock;
57

68
pub(crate) struct InsertNewsInput {
79
pub(crate) source_name: String, // Code name of the source used to fetch the news
@@ -14,8 +16,21 @@ pub(crate) struct InsertNewsInput {
1416
pub(crate) published_time: Option<DateTime<Local>>, // Date and time when the news was published
1517
}
1618

17-
#[automock]
1819
#[async_trait]
1920
pub(crate) trait Repository: Send + Sync {
2021
async fn insert_news(&self, inputs: Vec<InsertNewsInput>) -> Result<Vec<i32>>;
2122
}
23+
24+
mock! {
25+
pub(in super::super) Repository {}
26+
27+
impl Repository for Repository {
28+
fn insert_news<'life0, 'async_trait>(
29+
&'life0 self,
30+
inputs: Vec<InsertNewsInput>,
31+
) -> Pin<Box<dyn Future<Output = Result<Vec<i32>>> + Send + 'async_trait>>
32+
where
33+
'life0: 'async_trait,
34+
Self: 'async_trait;
35+
}
36+
}

0 commit comments

Comments
 (0)