migra/migra-cli/src/config.rs

302 lines
9.4 KiB
Rust
Raw Normal View History

2021-02-23 18:44:37 +03:00
use crate::database::migration::Migration;
2021-02-22 23:06:08 +03:00
use crate::error::{Error, MigraResult};
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use std::{env, fs, io};
//===========================================================================//
// Internal Config Utils / Macros //
//===========================================================================//
fn search_for_directory_containing_file(path: &Path, file_name: &str) -> MigraResult<PathBuf> {
let file_path = path.join(file_name);
if file_path.is_file() {
Ok(path.to_owned())
} else {
path.parent()
.ok_or(Error::RootNotFound)
.and_then(|p| search_for_directory_containing_file(p, file_name))
}
}
fn recursive_find_project_root() -> MigraResult<PathBuf> {
let current_dir = std::env::current_dir()?;
search_for_directory_containing_file(&current_dir, MIGRA_TOML_FILENAME)
2021-03-26 02:10:41 +03:00
}
2021-05-17 10:06:33 +03:00
#[cfg(any(
not(feature = "postgres"),
not(feature = "mysql"),
not(any(feature = "sqlite", feature = "rusqlite"))
))]
2021-03-26 02:10:41 +03:00
macro_rules! please_install_with {
(feature $database_name:expr) => {
panic!(
r#"You cannot use migra for "{database_name}".
You need to reinstall crate with "{database_name}" feature.
cargo install migra-cli --features ${database_name}"#,
database_name = $database_name
);
};
}
//===========================================================================//
// Database config //
//===========================================================================//
fn default_database_connection_env() -> String {
String::from("$DATABASE_URL")
}
#[derive(Debug, Clone, Serialize, Deserialize)]
2021-03-26 02:10:41 +03:00
#[serde(rename_all = "lowercase")]
pub(crate) enum SupportedDatabaseClient {
2021-03-26 02:10:41 +03:00
#[cfg(feature = "postgres")]
Postgres,
2021-03-26 02:10:41 +03:00
#[cfg(feature = "mysql")]
Mysql,
2021-05-17 10:06:33 +03:00
#[cfg(any(feature = "sqlite", feature = "rusqlite"))]
Sqlite,
2021-03-26 02:10:41 +03:00
}
impl Default for SupportedDatabaseClient {
fn default() -> Self {
cfg_if! {
if #[cfg(feature = "postgres")] {
SupportedDatabaseClient::Postgres
} else if #[cfg(feature = "mysql")] {
SupportedDatabaseClient::Mysql
2021-05-17 10:06:33 +03:00
} else if #[cfg(any(feature = "sqlite", feature = "rusqlite"))] {
SupportedDatabaseClient::Sqlite
2021-03-26 02:10:41 +03:00
}
}
}
2021-01-31 13:40:02 +03:00
}
#[derive(Debug, Clone, Serialize, Deserialize)]
2021-01-31 13:40:02 +03:00
pub(crate) struct DatabaseConfig {
pub client: Option<SupportedDatabaseClient>,
2021-03-26 02:10:41 +03:00
#[serde(default = "default_database_connection_env")]
pub connection: String,
}
impl Default for DatabaseConfig {
fn default() -> Self {
DatabaseConfig {
connection: default_database_connection_env(),
client: None,
}
}
}
impl DatabaseConfig {
2021-03-26 02:10:41 +03:00
pub fn client(&self) -> SupportedDatabaseClient {
self.client.clone().unwrap_or_else(|| {
self.connection_string()
.ok()
.and_then(|connection_string| {
if connection_string.starts_with("postgres://") {
cfg_if! {
if #[cfg(feature = "postgres")] {
Some(SupportedDatabaseClient::Postgres)
} else {
please_install_with!(feature "postgres")
}
}
} else if connection_string.starts_with("mysql://") {
cfg_if! {
if #[cfg(feature = "mysql")] {
Some(SupportedDatabaseClient::Mysql)
} else {
please_install_with!(feature "mysql")
}
}
2021-05-17 10:06:33 +03:00
} else if connection_string.ends_with(".db") {
cfg_if! {
if #[cfg(any(feature = "sqlite", feature = "rusqlite"))] {
Some(SupportedDatabaseClient::Sqlite)
} else {
please_install_with!(feature "sqlite")
}
}
2021-03-26 02:10:41 +03:00
} else {
None
}
})
.unwrap_or_default()
})
}
2021-02-22 23:06:08 +03:00
pub fn connection_string(&self) -> MigraResult<String> {
2021-03-26 02:10:41 +03:00
if let Some(connection_env) = self.connection.strip_prefix("$") {
2021-02-22 23:06:08 +03:00
env::var(connection_env).map_err(|_| Error::MissedEnvVar(connection_env.to_string()))
} else {
2021-03-26 02:10:41 +03:00
Ok(self.connection.clone())
}
}
}
//===========================================================================//
// Migrations config //
//===========================================================================//
fn default_migrations_directory() -> String {
String::from("migrations")
}
fn default_migrations_table_name() -> String {
String::from("migrations")
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct MigrationsConfig {
#[serde(rename = "directory", default = "default_migrations_directory")]
directory: String,
#[serde(default = "default_migrations_table_name")]
table_name: String,
2021-05-16 16:55:59 +03:00
date_format: Option<String>,
}
impl Default for MigrationsConfig {
fn default() -> Self {
MigrationsConfig {
directory: default_migrations_directory(),
table_name: default_migrations_table_name(),
2021-05-16 16:55:59 +03:00
date_format: None,
}
}
}
impl MigrationsConfig {
pub fn directory(&self) -> String {
if let Some(directory_env) = self.directory.strip_prefix("$") {
env::var(directory_env).unwrap_or_else(|_| {
println!(
"WARN: Cannot read {} variable and use {} directory by default",
directory_env,
default_migrations_directory()
);
default_migrations_directory()
})
} else {
self.directory.clone()
}
}
pub fn table_name(&self) -> String {
if let Some(table_name_env) = self.table_name.strip_prefix("$") {
env::var(table_name_env).unwrap_or_else(|_| {
println!(
"WARN: Cannot read {} variable and use {} table_name by default",
table_name_env,
default_migrations_table_name()
);
default_migrations_table_name()
})
} else {
self.table_name.clone()
}
2021-02-22 23:06:08 +03:00
}
2021-05-16 16:55:59 +03:00
pub fn date_format(&self) -> String {
self.date_format
.clone()
.unwrap_or_else(|| String::from("%y%m%d%H%M%S"))
}
2021-02-22 23:06:08 +03:00
}
//===========================================================================//
// Main config //
//===========================================================================//
2021-05-16 16:57:20 +03:00
pub(crate) const MIGRA_TOML_FILENAME: &str = "Migra.toml";
#[derive(Debug, Serialize, Deserialize)]
pub(crate) struct Config {
#[serde(skip)]
manifest_root: PathBuf,
root: PathBuf,
#[serde(default)]
pub(crate) database: DatabaseConfig,
#[serde(default)]
pub(crate) migrations: MigrationsConfig,
}
impl Default for Config {
fn default() -> Config {
Config {
manifest_root: PathBuf::default(),
root: PathBuf::from("database"),
database: DatabaseConfig::default(),
migrations: MigrationsConfig::default(),
}
}
2021-02-02 00:53:33 +03:00
}
2021-02-02 00:53:33 +03:00
impl Config {
pub fn read(config_path: Option<&PathBuf>) -> MigraResult<Config> {
2021-02-02 00:53:33 +03:00
let config_path = match config_path {
2021-03-25 00:41:22 +03:00
Some(config_path) if config_path.is_dir() => {
Some(config_path.join(MIGRA_TOML_FILENAME))
}
Some(config_path) => Some(config_path.clone()),
2021-03-25 00:41:22 +03:00
None => recursive_find_project_root()
.map(|path| path.join(MIGRA_TOML_FILENAME))
.ok(),
2021-02-02 00:53:33 +03:00
};
match config_path {
None => Ok(Config::default()),
Some(config_path) => {
let content = fs::read_to_string(&config_path)?;
2021-02-02 00:53:33 +03:00
let mut config: Config = toml::from_str(&content).expect("Cannot parse Migra.toml");
config.manifest_root = config_path
.parent()
.unwrap_or_else(|| Path::new(""))
.to_path_buf();
2021-02-02 00:53:33 +03:00
Ok(config)
}
}
}
pub fn directory_path(&self) -> PathBuf {
2021-02-22 23:13:50 +03:00
self.manifest_root.join(&self.root)
}
pub fn migration_dir_path(&self) -> PathBuf {
self.directory_path().join(self.migrations.directory())
}
2021-02-22 23:06:08 +03:00
pub fn migrations(&self) -> MigraResult<Vec<Migration>> {
2021-02-10 01:13:27 +03:00
let mut entries = match self.migration_dir_path().read_dir() {
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(Vec::new()),
entries => entries?
.map(|res| res.map(|e| e.path()))
2021-04-09 01:09:49 +03:00
.collect::<Result<Vec<_>, _>>()?,
2021-02-10 01:13:27 +03:00
};
if entries.is_empty() {
return Ok(vec![]);
}
entries.sort();
let migrations = entries
.iter()
2021-02-21 17:53:43 +03:00
.filter_map(|path| Migration::new(&path))
.collect::<Vec<_>>();
Ok(migrations)
}
}