refac: add support for multiple threads for tests

This commit is contained in:
Dmitriy Pleshevskiy 2021-12-24 23:43:47 +03:00
parent b1b0b71f79
commit 9d6f811287
3 changed files with 46 additions and 23 deletions

View File

@ -13,6 +13,6 @@ pub fn search_start() -> SearchChannel {
SearchChannel::start(HOST, PASS).expect("The Sonic server must be running")
}
pub fn flush_collection(collection: &str) {
ingest_start().flushc(collection).unwrap();
pub fn flush_bucket(collection: &str, bucket: &str) {
ingest_start().flushb(collection, bucket).unwrap();
}

View File

@ -2,27 +2,28 @@ mod common;
use common::*;
const COLLECTION: &str = "Ingest";
const BUCKET: &str = "Push";
#[test]
fn should_push_new_object_to_sonic() {
let ingest_channel = ingest_start();
let bucket = "push_simple";
match ingest_channel.push(COLLECTION, BUCKET, "1", "Sweet Teriyaki Beef Skewers") {
let ingest_channel = ingest_start();
match ingest_channel.push(COLLECTION, bucket, "1", "Sweet Teriyaki Beef Skewers") {
Ok(res) => assert!(res),
Err(_) => unreachable!(),
}
flush_collection(COLLECTION);
flush_bucket(COLLECTION, bucket);
}
#[test]
fn should_push_new_object_to_sonic_with_russian_locale() {
let ingest_channel = ingest_start();
let bucket = "push_locale";
let ingest_channel = ingest_start();
match ingest_channel.push_with_locale(
COLLECTION,
BUCKET,
bucket,
"1",
"Открытый пирог с орехами и сгущенкой",
"rus",
@ -31,5 +32,24 @@ fn should_push_new_object_to_sonic_with_russian_locale() {
Err(_) => unreachable!(),
}
flush_collection(COLLECTION);
flush_bucket(COLLECTION, bucket);
}
#[test]
fn should_push_multiline_text() {
let bucket = "push_multiline";
let multiline_text = "
Sweet
Teriyaki
Beef
Skewers
";
let ingest_channel = ingest_start();
match ingest_channel.push(COLLECTION, bucket, "1", multiline_text) {
Ok(res) => assert!(res),
Err(_) => unreachable!(),
}
flush_bucket(COLLECTION, bucket);
}

View File

@ -2,61 +2,64 @@ mod common;
use common::*;
const COLLECTION: &str = "Search";
const BUCKET: &str = "Query";
#[test]
fn should_find_object_by_full_text() {
fn should_find_object_by_exact_match() {
let bucket = "query_by_exact_match";
let title = "Sweet Teriyaki Beef Skewers";
let ingest_channel = ingest_start();
ingest_channel.push(COLLECTION, BUCKET, "1", title).unwrap();
ingest_channel.push(COLLECTION, bucket, "1", title).unwrap();
let search_channel = search_start();
match search_channel.query(COLLECTION, BUCKET, title) {
match search_channel.query(COLLECTION, bucket, title) {
Ok(object_ids) => assert_eq!(object_ids, vec!["1"]),
Err(_) => unreachable!(),
}
flush_collection(COLLECTION);
flush_bucket(COLLECTION, bucket);
}
#[test]
fn should_find_object_by_parts() {
fn should_find_object_by_partial_match() {
let bucket = "query_by_partial_match";
let title = "Sweet Teriyaki Beef Skewers";
let ingest_channel = ingest_start();
ingest_channel.push(COLLECTION, BUCKET, "1", title).unwrap();
ingest_channel.push(COLLECTION, bucket, "1", title).unwrap();
let search_channel = search_start();
for word in title.split_whitespace() {
match search_channel.query(COLLECTION, BUCKET, word) {
match search_channel.query(COLLECTION, bucket, word) {
Ok(object_ids) => assert_eq!(object_ids, vec!["1"]),
Err(_) => unreachable!(),
}
}
flush_collection(COLLECTION);
flush_bucket(COLLECTION, bucket);
}
#[test]
fn should_find_many_objects() {
let bucket = "query_many_objects";
let ingest_channel = ingest_start();
ingest_channel
.push(COLLECTION, BUCKET, "1", "Sweet Teriyaki Beef Skewers")
.push(COLLECTION, bucket, "1", "Sweet Teriyaki Beef Skewers")
.unwrap();
ingest_channel
.push(COLLECTION, BUCKET, "2", "Slow Cooker Beef Stew I")
.push(COLLECTION, bucket, "2", "Slow Cooker Beef Stew I")
.unwrap();
ingest_channel
.push(COLLECTION, BUCKET, "3", "Christmas Prime Rib")
.push(COLLECTION, bucket, "3", "Christmas Prime Rib")
.unwrap();
let search_channel = search_start();
match search_channel.query(COLLECTION, BUCKET, "Beef") {
match search_channel.query(COLLECTION, bucket, "Beef") {
Ok(object_ids) => assert_eq!(object_ids, vec!["2", "1"]),
Err(_) => unreachable!(),
}
flush_collection(COLLECTION);
flush_bucket(COLLECTION, bucket);
}