Merge pull request 'Get list of finished tasks' (#32) from finished into main
Reviewed-on: #32
This commit is contained in:
commit
2702cfd049
10 changed files with 135 additions and 45 deletions
9
database/migrations/202208201623.sql
Normal file
9
database/migrations/202208201623.sql
Normal file
|
@ -0,0 +1,9 @@
|
|||
CREATE VIEW finished_tasks
|
||||
AS
|
||||
SELECT
|
||||
t.*,
|
||||
row_number() OVER (ORDER BY t.finished_at DESC) AS idx
|
||||
FROM tasks AS t
|
||||
WHERE t.finished_at IS NOT NULL
|
||||
ORDER BY t.finished_at DESC
|
||||
;
|
|
@ -24,3 +24,13 @@ FROM tasks AS t
|
|||
WHERE t.finished_at IS NULL
|
||||
ORDER BY t.created_at
|
||||
;
|
||||
|
||||
CREATE VIEW finished_tasks
|
||||
AS
|
||||
SELECT
|
||||
t.*,
|
||||
row_number() OVER (ORDER BY t.finished_at DESC) AS idx
|
||||
FROM tasks AS t
|
||||
WHERE t.finished_at IS NOT NULL
|
||||
ORDER BY t.finished_at DESC
|
||||
;
|
||||
|
|
8
makefile
Normal file
8
makefile
Normal file
|
@ -0,0 +1,8 @@
|
|||
|
||||
|
||||
install:
|
||||
cargo install --path .
|
||||
|
||||
new-migration:
|
||||
touch ./database/migrations/$$(date +%Y%m%d%H%M).sql
|
||||
|
|
@ -4,23 +4,58 @@ use xdg::BaseDirectories;
|
|||
fn main() {
|
||||
let xdg_dirs = BaseDirectories::with_prefix(env!("CARGO_PKG_NAME")).unwrap();
|
||||
let fs_repo = repo::fs::FsRepo::new(xdg_dirs.clone());
|
||||
let tasks = fs_repo.get_tasks().unwrap();
|
||||
let sqlite_repo = repo::sqlite::SqliteRepo::new(xdg_dirs.clone()).unwrap();
|
||||
|
||||
let sqlite_repo = repo::sqlite::SqliteRepo::new(xdg_dirs).unwrap();
|
||||
for task in tasks {
|
||||
log::info!("task: {}", task.name);
|
||||
log::info!(" inserting...");
|
||||
log::info!("active tasks");
|
||||
|
||||
sqlite_repo
|
||||
.insert_task(repo::InsertTaskData {
|
||||
name: task.name,
|
||||
project: task.project,
|
||||
link: task.link,
|
||||
dir_path: task.dir_path,
|
||||
index: None,
|
||||
})
|
||||
.unwrap();
|
||||
let fs_tasks = fs_repo.get_tasks(false).unwrap();
|
||||
if !fs_tasks.is_empty() {
|
||||
for task in fs_tasks {
|
||||
log::info!("task: {}", task.name);
|
||||
log::info!(" inserting...");
|
||||
|
||||
log::info!(" inserted");
|
||||
sqlite_repo
|
||||
.insert_task(repo::InsertTaskData {
|
||||
name: task.name,
|
||||
project: task.project,
|
||||
link: task.link,
|
||||
dir_path: task.dir_path,
|
||||
index: None,
|
||||
finished_at: None,
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
log::info!(" inserted");
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("finished tasks");
|
||||
|
||||
let fs_tasks = fs_repo.get_tasks(true).unwrap();
|
||||
if !fs_tasks.is_empty() {
|
||||
let meta = std::fs::metadata(xdg_dirs.get_data_file(repo::fs::FINISHED_DATA_FILE)).unwrap();
|
||||
let finished_at = meta
|
||||
.modified()
|
||||
.map(time::OffsetDateTime::from)
|
||||
.unwrap_or_else(|_| time::OffsetDateTime::now_utc());
|
||||
|
||||
for task in fs_tasks {
|
||||
log::info!("task: {}", task.name);
|
||||
log::info!(" inserting...");
|
||||
|
||||
sqlite_repo
|
||||
.insert_task(repo::InsertTaskData {
|
||||
name: task.name,
|
||||
project: task.project,
|
||||
link: task.link,
|
||||
dir_path: task.dir_path,
|
||||
index: None,
|
||||
finished_at: Some(finished_at),
|
||||
})
|
||||
// TODO: think of a better solution than idx
|
||||
.ok();
|
||||
|
||||
log::info!(" inserted");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ pub fn execute(repo: impl Repository, args: Args) {
|
|||
.transpose()
|
||||
.unwrap(),
|
||||
index: None,
|
||||
finished_at: None,
|
||||
});
|
||||
|
||||
match res {
|
||||
|
|
|
@ -18,19 +18,26 @@ use crate::repo::Repository;
|
|||
|
||||
#[derive(clap::Args)]
|
||||
pub struct Args {
|
||||
#[clap(short, long)]
|
||||
finished: bool,
|
||||
|
||||
projects: Vec<String>,
|
||||
}
|
||||
|
||||
pub fn execute(repo: impl Repository, args: Args) {
|
||||
let tasks = match repo.get_tasks() {
|
||||
let tasks = match repo.get_tasks(args.finished) {
|
||||
Ok(tasks) => tasks,
|
||||
Err(err) => return eprintln!("Cannot read tasks: {}", err),
|
||||
};
|
||||
|
||||
let cur_task = match repo.get_current_task_opt() {
|
||||
Ok(cur_task) => cur_task,
|
||||
Err(err) => {
|
||||
return eprintln!("Cannot read current task: {}", err);
|
||||
let cur_task = if args.finished {
|
||||
None
|
||||
} else {
|
||||
match repo.get_current_task_opt() {
|
||||
Ok(cur_task) => cur_task,
|
||||
Err(err) => {
|
||||
return eprintln!("Cannot read current task: {}", err);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -77,6 +77,7 @@ pub fn execute(repo: impl Repository, args: Args) {
|
|||
project: target.project,
|
||||
link: target.link,
|
||||
dir_path: target.dir_path,
|
||||
finished_at: None,
|
||||
});
|
||||
match res {
|
||||
Ok(task) => {
|
||||
|
|
|
@ -55,6 +55,7 @@ pub struct InsertTaskData {
|
|||
pub link: Option<String>,
|
||||
pub dir_path: Option<PathBuf>,
|
||||
pub index: Option<usize>,
|
||||
pub finished_at: Option<time::OffsetDateTime>,
|
||||
}
|
||||
|
||||
pub struct UpdateTaskData {
|
||||
|
@ -69,7 +70,7 @@ pub trait Repository {
|
|||
|
||||
fn get_task_opt(&self, id: domain::TaskIdx) -> Result<Option<domain::Task>, Error>;
|
||||
|
||||
fn get_tasks(&self) -> Result<Vec<domain::Task>, Error>;
|
||||
fn get_tasks(&self, finished: bool) -> Result<Vec<domain::Task>, Error>;
|
||||
|
||||
fn remove_task(&self, id: domain::TaskIdx) -> Result<domain::Task, Error>;
|
||||
|
||||
|
|
|
@ -60,9 +60,9 @@ impl From<CurrentTaskInfo> for domain::CurrentTaskInfo {
|
|||
}
|
||||
}
|
||||
|
||||
const CURRENT_TASK_FILE: &str = "current.json";
|
||||
const DATA_FILE: &str = "data.json";
|
||||
const FINISHED_DATA_FILE: &str = "finished_data.json";
|
||||
pub const CURRENT_TASK_FILE: &str = "current.json";
|
||||
pub const DATA_FILE: &str = "data.json";
|
||||
pub const FINISHED_DATA_FILE: &str = "finished_data.json";
|
||||
|
||||
pub struct FsRepo {
|
||||
xdg_dirs: BaseDirectories,
|
||||
|
@ -89,9 +89,14 @@ impl Repository for FsRepo {
|
|||
Ok(Some(tasks[id - 1].clone().into()))
|
||||
}
|
||||
|
||||
fn get_tasks(&self) -> Result<Vec<domain::Task>, Error> {
|
||||
self.get_tasks_impl()
|
||||
.map(|tasks| tasks.into_iter().map(Task::into).collect())
|
||||
fn get_tasks(&self, finished: bool) -> Result<Vec<domain::Task>, Error> {
|
||||
let fs_tasks = if finished {
|
||||
self.get_finished_tasks_impl()?.into_iter().rev().collect()
|
||||
} else {
|
||||
self.get_tasks_impl()?
|
||||
};
|
||||
|
||||
Ok(fs_tasks.into_iter().map(Task::into).collect())
|
||||
}
|
||||
|
||||
fn remove_task(&self, id: domain::TaskIdx) -> Result<domain::Task, Error> {
|
||||
|
|
|
@ -53,7 +53,7 @@ impl<'r> TryFrom<&'r rusqlite::Row<'_>> for Task {
|
|||
}
|
||||
}
|
||||
|
||||
const SCHEMA_FILE: &str = "schema.sql";
|
||||
const SCHEMA_FILE: &str = "tas.db";
|
||||
|
||||
pub struct SqliteRepo {
|
||||
conn: Connection,
|
||||
|
@ -82,11 +82,16 @@ impl Repository for SqliteRepo {
|
|||
self.get_task_opt_impl(id).map(|t| t.map(From::from))
|
||||
}
|
||||
|
||||
fn get_tasks(&self) -> Result<Vec<domain::Task>, Error> {
|
||||
let mut stmt = self
|
||||
.conn
|
||||
.prepare("SELECT * FROM active_tasks")
|
||||
.map_err(|_| Error::PrepareQuery)?;
|
||||
fn get_tasks(&self, finished: bool) -> Result<Vec<domain::Task>, Error> {
|
||||
let mut stmt = if finished {
|
||||
self.conn
|
||||
.prepare("SELECT * FROM finished_tasks")
|
||||
.map_err(|_| Error::PrepareQuery)?
|
||||
} else {
|
||||
self.conn
|
||||
.prepare("SELECT * FROM active_tasks")
|
||||
.map_err(|_| Error::PrepareQuery)?
|
||||
};
|
||||
|
||||
let rows = stmt
|
||||
.query_map([], |row| Task::try_from(row))
|
||||
|
@ -116,8 +121,8 @@ impl Repository for SqliteRepo {
|
|||
let mut stmt = self
|
||||
.conn
|
||||
.prepare(
|
||||
"INSERT INTO tasks (name, project, link, dir_path)
|
||||
VALUES (?1, ?2, ?3, ?4)",
|
||||
"INSERT INTO tasks (name, project, link, dir_path, created_at, finished_at)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?5)",
|
||||
)
|
||||
.map_err(|_| Error::PrepareQuery)?;
|
||||
|
||||
|
@ -129,6 +134,7 @@ impl Repository for SqliteRepo {
|
|||
&insert_data
|
||||
.dir_path
|
||||
.and_then(|p| p.into_os_string().into_string().ok()),
|
||||
&insert_data.finished_at,
|
||||
))
|
||||
.map_err(|_| Error::InsertData)?;
|
||||
|
||||
|
@ -251,12 +257,16 @@ impl SqliteRepo {
|
|||
}
|
||||
|
||||
macro_rules! run_migration {
|
||||
($this:ident, $ver:ident = $version:literal) => {
|
||||
($this:ident, $ver:ident = $version:expr) => {
|
||||
run_migration!($this, $ver = $version => concat!("migrations/", $version));
|
||||
};
|
||||
|
||||
($this:ident, $ver:ident = $version:expr => $sql_name:expr) => {
|
||||
$this
|
||||
.conn
|
||||
.execute_batch(&format!(
|
||||
"BEGIN; {} COMMIT;",
|
||||
include_str!(concat!("../../database/migrations/", $version, ".sql"))
|
||||
include_str!(concat!("../../database/", $sql_name, ".sql"))
|
||||
))
|
||||
.map_err(|_| MigrationError::Upgrade)?;
|
||||
|
||||
|
@ -288,18 +298,21 @@ impl std::fmt::Display for MigrationError {
|
|||
|
||||
impl std::error::Error for MigrationError {}
|
||||
|
||||
const LATEST_VERSION: i64 = 202208162308;
|
||||
const LATEST_VERSION: i64 = 202208201623;
|
||||
|
||||
impl SqliteRepo {
|
||||
pub fn upgrade(&self) -> Result<(), MigrationError> {
|
||||
let mut version = self.version();
|
||||
if version == Some(LATEST_VERSION) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// TODO: execute full schema if version is none
|
||||
if version.is_none() {
|
||||
run_migration!(self, version = 202208162308);
|
||||
match version {
|
||||
Some(LATEST_VERSION) => return Ok(()),
|
||||
None => {
|
||||
run_migration!(self, version = LATEST_VERSION => "schema");
|
||||
}
|
||||
Some(v) => {
|
||||
if v == 202208162308 {
|
||||
run_migration!(self, version = 202208201623);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.conn
|
||||
|
|
Loading…
Reference in a new issue