2021-02-22 23:06:08 +03:00
|
|
|
use crate::error::{Error, MigraResult};
|
2021-01-31 03:23:43 +03:00
|
|
|
use serde::{Deserialize, Serialize};
|
2021-02-01 23:51:23 +03:00
|
|
|
use std::path::{Path, PathBuf};
|
2021-06-13 01:39:56 +03:00
|
|
|
use std::{env, fs};
|
2021-01-31 03:23:43 +03:00
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
//===========================================================================//
|
|
|
|
// Internal Config Utils / Macros //
|
|
|
|
//===========================================================================//
|
|
|
|
|
|
|
|
fn search_for_directory_containing_file(path: &Path, file_name: &str) -> MigraResult<PathBuf> {
|
|
|
|
let file_path = path.join(file_name);
|
|
|
|
if file_path.is_file() {
|
|
|
|
Ok(path.to_owned())
|
|
|
|
} else {
|
|
|
|
path.parent()
|
|
|
|
.ok_or(Error::RootNotFound)
|
|
|
|
.and_then(|p| search_for_directory_containing_file(p, file_name))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn recursive_find_project_root() -> MigraResult<PathBuf> {
|
|
|
|
let current_dir = std::env::current_dir()?;
|
|
|
|
|
|
|
|
search_for_directory_containing_file(¤t_dir, MIGRA_TOML_FILENAME)
|
2021-03-26 02:10:41 +03:00
|
|
|
}
|
|
|
|
|
2021-05-17 10:06:33 +03:00
|
|
|
#[cfg(any(
|
|
|
|
not(feature = "postgres"),
|
|
|
|
not(feature = "mysql"),
|
2021-05-23 13:35:11 +03:00
|
|
|
not(feature = "sqlite")
|
2021-05-17 10:06:33 +03:00
|
|
|
))]
|
2021-03-26 02:10:41 +03:00
|
|
|
macro_rules! please_install_with {
|
|
|
|
(feature $database_name:expr) => {
|
|
|
|
panic!(
|
|
|
|
r#"You cannot use migra for "{database_name}".
|
|
|
|
You need to reinstall crate with "{database_name}" feature.
|
|
|
|
|
|
|
|
cargo install migra-cli --features ${database_name}"#,
|
|
|
|
database_name = $database_name
|
|
|
|
);
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
//===========================================================================//
|
|
|
|
// Database config //
|
|
|
|
//===========================================================================//
|
2021-02-08 07:01:31 +03:00
|
|
|
|
2021-06-13 01:39:56 +03:00
|
|
|
fn is_sqlite_database_file(filename: &str) -> bool {
|
|
|
|
filename
|
|
|
|
.rsplit('.')
|
|
|
|
.next()
|
|
|
|
.map(|ext| ext.eq_ignore_ascii_case("db"))
|
2021-07-30 23:25:43 +03:00
|
|
|
.unwrap_or_default()
|
2021-06-13 01:39:56 +03:00
|
|
|
}
|
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
fn default_database_connection_env() -> String {
|
|
|
|
String::from("$DATABASE_URL")
|
2021-02-18 12:29:09 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
2021-03-26 02:10:41 +03:00
|
|
|
#[serde(rename_all = "lowercase")]
|
2021-06-13 01:39:56 +03:00
|
|
|
pub enum SupportedDatabaseClient {
|
2021-03-26 02:10:41 +03:00
|
|
|
#[cfg(feature = "postgres")]
|
2021-02-18 12:29:09 +03:00
|
|
|
Postgres,
|
2021-03-26 02:10:41 +03:00
|
|
|
#[cfg(feature = "mysql")]
|
|
|
|
Mysql,
|
2021-05-23 13:35:11 +03:00
|
|
|
#[cfg(feature = "sqlite")]
|
2021-05-17 10:06:33 +03:00
|
|
|
Sqlite,
|
2021-03-26 02:10:41 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for SupportedDatabaseClient {
|
|
|
|
fn default() -> Self {
|
|
|
|
cfg_if! {
|
|
|
|
if #[cfg(feature = "postgres")] {
|
|
|
|
SupportedDatabaseClient::Postgres
|
|
|
|
} else if #[cfg(feature = "mysql")] {
|
|
|
|
SupportedDatabaseClient::Mysql
|
2021-05-23 13:35:11 +03:00
|
|
|
} else if #[cfg(feature = "sqlite")] {
|
2021-05-17 10:06:33 +03:00
|
|
|
SupportedDatabaseClient::Sqlite
|
2021-03-26 02:10:41 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-01-31 13:40:02 +03:00
|
|
|
}
|
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
2021-01-31 13:40:02 +03:00
|
|
|
pub(crate) struct DatabaseConfig {
|
2021-02-18 12:29:09 +03:00
|
|
|
pub client: Option<SupportedDatabaseClient>,
|
2021-03-26 02:10:41 +03:00
|
|
|
|
|
|
|
#[serde(default = "default_database_connection_env")]
|
|
|
|
pub connection: String,
|
2021-01-31 03:23:43 +03:00
|
|
|
}
|
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
impl Default for DatabaseConfig {
|
|
|
|
fn default() -> Self {
|
|
|
|
DatabaseConfig {
|
|
|
|
connection: default_database_connection_env(),
|
|
|
|
client: None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-18 12:29:09 +03:00
|
|
|
impl DatabaseConfig {
|
2021-03-26 02:10:41 +03:00
|
|
|
pub fn client(&self) -> SupportedDatabaseClient {
|
|
|
|
self.client.clone().unwrap_or_else(|| {
|
|
|
|
self.connection_string()
|
|
|
|
.ok()
|
|
|
|
.and_then(|connection_string| {
|
|
|
|
if connection_string.starts_with("postgres://") {
|
|
|
|
cfg_if! {
|
|
|
|
if #[cfg(feature = "postgres")] {
|
|
|
|
Some(SupportedDatabaseClient::Postgres)
|
|
|
|
} else {
|
|
|
|
please_install_with!(feature "postgres")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if connection_string.starts_with("mysql://") {
|
|
|
|
cfg_if! {
|
|
|
|
if #[cfg(feature = "mysql")] {
|
|
|
|
Some(SupportedDatabaseClient::Mysql)
|
|
|
|
} else {
|
|
|
|
please_install_with!(feature "mysql")
|
|
|
|
}
|
|
|
|
}
|
2021-06-13 01:39:56 +03:00
|
|
|
} else if is_sqlite_database_file(&connection_string) {
|
2021-05-17 10:06:33 +03:00
|
|
|
cfg_if! {
|
2021-05-23 13:35:11 +03:00
|
|
|
if #[cfg(feature = "sqlite")] {
|
2021-05-17 10:06:33 +03:00
|
|
|
Some(SupportedDatabaseClient::Sqlite)
|
|
|
|
} else {
|
|
|
|
please_install_with!(feature "sqlite")
|
|
|
|
}
|
|
|
|
}
|
2021-03-26 02:10:41 +03:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.unwrap_or_default()
|
|
|
|
})
|
2021-02-18 12:29:09 +03:00
|
|
|
}
|
|
|
|
|
2021-02-22 23:06:08 +03:00
|
|
|
pub fn connection_string(&self) -> MigraResult<String> {
|
2021-08-23 10:18:03 +03:00
|
|
|
self.connection.strip_prefix('$').map_or_else(
|
2021-06-13 01:39:56 +03:00
|
|
|
|| Ok(self.connection.clone()),
|
|
|
|
|connection_env| {
|
|
|
|
env::var(connection_env)
|
|
|
|
.map_err(|_| Error::MissedEnvVar(connection_env.to_string()))
|
|
|
|
},
|
|
|
|
)
|
2021-02-18 12:29:09 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
//===========================================================================//
|
|
|
|
// Migrations config //
|
|
|
|
//===========================================================================//
|
|
|
|
|
|
|
|
fn default_migrations_directory() -> String {
|
|
|
|
String::from("migrations")
|
|
|
|
}
|
|
|
|
|
|
|
|
fn default_migrations_table_name() -> String {
|
|
|
|
String::from("migrations")
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
pub(crate) struct MigrationsConfig {
|
|
|
|
#[serde(rename = "directory", default = "default_migrations_directory")]
|
|
|
|
directory: String,
|
|
|
|
|
|
|
|
#[serde(default = "default_migrations_table_name")]
|
|
|
|
table_name: String,
|
2021-05-16 16:55:59 +03:00
|
|
|
|
|
|
|
date_format: Option<String>,
|
2021-05-16 16:39:21 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for MigrationsConfig {
|
|
|
|
fn default() -> Self {
|
|
|
|
MigrationsConfig {
|
|
|
|
directory: default_migrations_directory(),
|
|
|
|
table_name: default_migrations_table_name(),
|
2021-05-16 16:55:59 +03:00
|
|
|
date_format: None,
|
2021-01-31 03:23:43 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
impl MigrationsConfig {
|
|
|
|
pub fn directory(&self) -> String {
|
2021-08-23 10:18:03 +03:00
|
|
|
self.directory.strip_prefix('$').map_or_else(
|
2021-06-13 01:39:56 +03:00
|
|
|
|| self.directory.clone(),
|
|
|
|
|directory_env| {
|
|
|
|
env::var(directory_env).unwrap_or_else(|_| {
|
|
|
|
println!(
|
|
|
|
"WARN: Cannot read {} variable and use {} directory by default",
|
|
|
|
directory_env,
|
|
|
|
default_migrations_directory()
|
|
|
|
);
|
2021-05-16 16:39:21 +03:00
|
|
|
default_migrations_directory()
|
2021-06-13 01:39:56 +03:00
|
|
|
})
|
|
|
|
},
|
|
|
|
)
|
2021-05-16 16:39:21 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn table_name(&self) -> String {
|
2021-08-23 10:18:03 +03:00
|
|
|
self.table_name.strip_prefix('$').map_or_else(
|
2021-06-13 01:39:56 +03:00
|
|
|
|| self.table_name.clone(),
|
|
|
|
|table_name_env| {
|
|
|
|
env::var(table_name_env).unwrap_or_else(|_| {
|
|
|
|
println!(
|
|
|
|
"WARN: Cannot read {} variable and use {} table_name by default",
|
|
|
|
table_name_env,
|
|
|
|
default_migrations_table_name()
|
|
|
|
);
|
2021-05-16 16:39:21 +03:00
|
|
|
default_migrations_table_name()
|
2021-06-13 01:39:56 +03:00
|
|
|
})
|
|
|
|
},
|
|
|
|
)
|
2021-02-22 23:06:08 +03:00
|
|
|
}
|
2021-05-16 16:55:59 +03:00
|
|
|
|
|
|
|
pub fn date_format(&self) -> String {
|
|
|
|
self.date_format
|
|
|
|
.clone()
|
|
|
|
.unwrap_or_else(|| String::from("%y%m%d%H%M%S"))
|
|
|
|
}
|
2021-02-22 23:06:08 +03:00
|
|
|
}
|
2021-02-01 23:51:23 +03:00
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
//===========================================================================//
|
|
|
|
// Main config //
|
|
|
|
//===========================================================================//
|
2021-02-01 23:51:23 +03:00
|
|
|
|
2021-05-16 16:57:20 +03:00
|
|
|
pub(crate) const MIGRA_TOML_FILENAME: &str = "Migra.toml";
|
|
|
|
|
2021-05-16 16:39:21 +03:00
|
|
|
#[derive(Debug, Serialize, Deserialize)]
|
2021-06-13 01:39:56 +03:00
|
|
|
pub struct Config {
|
2021-05-16 16:39:21 +03:00
|
|
|
#[serde(skip)]
|
|
|
|
manifest_root: PathBuf,
|
|
|
|
|
|
|
|
root: PathBuf,
|
|
|
|
|
|
|
|
#[serde(default)]
|
|
|
|
pub(crate) database: DatabaseConfig,
|
|
|
|
|
|
|
|
#[serde(default)]
|
|
|
|
pub(crate) migrations: MigrationsConfig,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for Config {
|
|
|
|
fn default() -> Config {
|
|
|
|
Config {
|
|
|
|
manifest_root: PathBuf::default(),
|
|
|
|
root: PathBuf::from("database"),
|
|
|
|
database: DatabaseConfig::default(),
|
|
|
|
migrations: MigrationsConfig::default(),
|
|
|
|
}
|
|
|
|
}
|
2021-02-02 00:53:33 +03:00
|
|
|
}
|
2021-02-01 23:51:23 +03:00
|
|
|
|
2021-02-02 00:53:33 +03:00
|
|
|
impl Config {
|
2021-04-08 01:26:30 +03:00
|
|
|
pub fn read(config_path: Option<&PathBuf>) -> MigraResult<Config> {
|
2021-02-02 00:53:33 +03:00
|
|
|
let config_path = match config_path {
|
2021-03-25 00:41:22 +03:00
|
|
|
Some(config_path) if config_path.is_dir() => {
|
|
|
|
Some(config_path.join(MIGRA_TOML_FILENAME))
|
2021-02-02 23:50:42 +03:00
|
|
|
}
|
2021-04-08 01:26:30 +03:00
|
|
|
Some(config_path) => Some(config_path.clone()),
|
2021-03-25 00:41:22 +03:00
|
|
|
None => recursive_find_project_root()
|
|
|
|
.map(|path| path.join(MIGRA_TOML_FILENAME))
|
|
|
|
.ok(),
|
2021-02-02 00:53:33 +03:00
|
|
|
};
|
2021-02-01 23:51:23 +03:00
|
|
|
|
2021-02-08 07:01:31 +03:00
|
|
|
match config_path {
|
|
|
|
None => Ok(Config::default()),
|
|
|
|
Some(config_path) => {
|
|
|
|
let content = fs::read_to_string(&config_path)?;
|
2021-02-02 00:53:33 +03:00
|
|
|
|
2021-02-08 07:01:31 +03:00
|
|
|
let mut config: Config = toml::from_str(&content).expect("Cannot parse Migra.toml");
|
2021-02-13 00:39:39 +03:00
|
|
|
config.manifest_root = config_path
|
2021-02-08 07:01:31 +03:00
|
|
|
.parent()
|
|
|
|
.unwrap_or_else(|| Path::new(""))
|
|
|
|
.to_path_buf();
|
2021-02-02 00:53:33 +03:00
|
|
|
|
2021-02-08 07:01:31 +03:00
|
|
|
Ok(config)
|
|
|
|
}
|
|
|
|
}
|
2021-01-31 03:23:43 +03:00
|
|
|
}
|
2021-02-02 23:50:42 +03:00
|
|
|
|
2021-02-08 07:01:31 +03:00
|
|
|
pub fn directory_path(&self) -> PathBuf {
|
2021-02-22 23:13:50 +03:00
|
|
|
self.manifest_root.join(&self.root)
|
2021-02-08 07:01:31 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn migration_dir_path(&self) -> PathBuf {
|
2021-05-16 16:39:21 +03:00
|
|
|
self.directory_path().join(self.migrations.directory())
|
2021-02-08 07:01:31 +03:00
|
|
|
}
|
|
|
|
}
|