Archived
1
0
Fork 0

feat: add mysql database supporting

This commit is contained in:
Dmitriy Pleshevskiy 2021-03-26 02:10:41 +03:00
parent c05bac36e7
commit 18bf265510
24 changed files with 433 additions and 166 deletions

View file

@ -31,6 +31,7 @@ jobs:
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
@ -48,7 +49,7 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: test
args: -- --test-threads=1
args: --all-features -- --test-threads=1
clippy:
name: clippy (ubuntu-latest, stable)

View file

@ -2,5 +2,6 @@
"cSpell.words": [
"migra"
],
"editor.formatOnSave": true
"editor.formatOnSave": true,
"rust.all_features": true
}

View file

@ -46,6 +46,7 @@ For more information about the commands, simply run `migra help`
### Supported databases
- [x] Postgres
- [x] MySQL
## License

View file

@ -1,4 +1,4 @@
version: '3'
version: "3"
services:
postgres:
@ -13,6 +13,22 @@ services:
ports:
- 6000:5432
mysql:
image: mysql
container_name: migra.mysql
command: --default-authentication-plugin=mysql_native_password
environment:
MYSQL_ROOT_PASSWORD: example
MYSQL_DATABASE: "migra_tests"
MYSQL_USER: "mysql"
MYSQL_PASSWORD: "mysql"
volumes:
- mysql_data:/var/lib/mysql
ports:
- 6001:3306
volumes:
postgres_data:
driver: local
mysql_data:
driver: local

View file

@ -14,15 +14,17 @@ readme = "../README.md"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
cfg-if = "1.0"
structopt = "0.3"
serde = { version = "1.0", features = ["derive"] }
toml = "0.5"
chrono = "0.4"
postgres = "0.19"
postgres = { version = "0.19", optional = true }
mysql = { version = "20.1", optional = true }
dotenv = { version = "0.15", optional = true }
[features]
default = []
default = ["postgres"]
[badges]
maintenance = { status = "actively-developed" }

View file

@ -7,6 +7,23 @@ use std::{env, fs, io};
pub(crate) const MIGRA_TOML_FILENAME: &str = "Migra.toml";
pub(crate) const DEFAULT_DATABASE_CONNECTION_ENV: &str = "$DATABASE_URL";
fn default_database_connection_env() -> String {
DEFAULT_DATABASE_CONNECTION_ENV.to_owned()
}
#[cfg(any(not(feature = "postgres"), not(feature = "mysql")))]
macro_rules! please_install_with {
(feature $database_name:expr) => {
panic!(
r#"You cannot use migra for "{database_name}".
You need to reinstall crate with "{database_name}" feature.
cargo install migra-cli --features ${database_name}"#,
database_name = $database_name
);
};
}
#[derive(Debug, Serialize, Deserialize)]
pub(crate) struct Config {
#[serde(skip)]
@ -19,30 +36,69 @@ pub(crate) struct Config {
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub(crate) enum SupportedDatabaseClient {
#[cfg(feature = "postgres")]
Postgres,
#[cfg(feature = "mysql")]
Mysql,
}
impl Default for SupportedDatabaseClient {
fn default() -> Self {
cfg_if! {
if #[cfg(feature = "postgres")] {
SupportedDatabaseClient::Postgres
} else if #[cfg(feature = "mysql")] {
SupportedDatabaseClient::Mysql
}
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub(crate) struct DatabaseConfig {
pub client: Option<SupportedDatabaseClient>,
pub connection: Option<String>,
#[serde(default = "default_database_connection_env")]
pub connection: String,
}
impl DatabaseConfig {
pub fn client(&self) -> MigraResult<SupportedDatabaseClient> {
Ok(SupportedDatabaseClient::Postgres)
pub fn client(&self) -> SupportedDatabaseClient {
self.client.clone().unwrap_or_else(|| {
self.connection_string()
.ok()
.and_then(|connection_string| {
if connection_string.starts_with("postgres://") {
cfg_if! {
if #[cfg(feature = "postgres")] {
Some(SupportedDatabaseClient::Postgres)
} else {
please_install_with!(feature "postgres")
}
}
} else if connection_string.starts_with("mysql://") {
cfg_if! {
if #[cfg(feature = "mysql")] {
Some(SupportedDatabaseClient::Mysql)
} else {
please_install_with!(feature "mysql")
}
}
} else {
None
}
})
.unwrap_or_default()
})
}
pub fn connection_string(&self) -> MigraResult<String> {
let connection = self
.connection
.clone()
.unwrap_or_else(|| String::from(DEFAULT_DATABASE_CONNECTION_ENV));
if let Some(connection_env) = connection.strip_prefix("$") {
if let Some(connection_env) = self.connection.strip_prefix("$") {
env::var(connection_env).map_err(|_| Error::MissedEnvVar(connection_env.to_string()))
} else {
Ok(connection)
Ok(self.connection.clone())
}
}
}
@ -53,7 +109,7 @@ impl Default for Config {
manifest_root: PathBuf::default(),
root: PathBuf::from("database"),
database: DatabaseConfig {
connection: Some(String::from(DEFAULT_DATABASE_CONNECTION_ENV)),
connection: default_database_connection_env(),
..Default::default()
},
}

View file

@ -1,3 +1,13 @@
mod postgres;
cfg_if! {
if #[cfg(feature = "postgres")] {
mod postgres;
pub use self::postgres::*;
}
}
pub use self::postgres::*;
cfg_if! {
if #[cfg(feature = "mysql")] {
mod mysql;
pub use self::mysql::*;
}
}

View file

@ -0,0 +1,53 @@
use crate::database::builder::merge_query_with_params;
use crate::database::prelude::*;
use crate::error::StdResult;
use mysql::prelude::*;
use mysql::{Pool, PooledConn};
pub struct MySqlConnection {
pool: Pool,
}
impl MySqlConnection {
fn client(&self) -> StdResult<PooledConn> {
let conn = self.pool.get_conn()?;
Ok(conn)
}
}
impl OpenDatabaseConnection for MySqlConnection {
fn open(connection_string: &str) -> StdResult<Self> {
let pool = Pool::new(connection_string)?;
Ok(MySqlConnection { pool })
}
}
impl DatabaseConnection for MySqlConnection {
fn migration_table_stmt(&self) -> String {
r#"CREATE TABLE IF NOT EXISTS migrations (
id int AUTO_INCREMENT PRIMARY KEY,
name varchar(256) NOT NULL UNIQUE
)"#
.to_string()
}
fn batch_execute(&mut self, query: &str) -> StdResult<()> {
self.client()?.query_drop(query)?;
Ok(())
}
fn execute<'b>(&mut self, query: &str, params: ToSqlParams<'b>) -> StdResult<u64> {
let stmt = merge_query_with_params(query, params);
let res = self.client()?.query_first(stmt)?.unwrap_or_default();
Ok(res)
}
fn query<'b>(&mut self, query: &str, params: ToSqlParams<'b>) -> StdResult<Vec<Vec<String>>> {
let stmt = merge_query_with_params(query, params);
let res = self.client()?.query_map(stmt, |(column,)| vec![column])?;
Ok(res)
}
}

View file

@ -15,6 +15,14 @@ impl OpenDatabaseConnection for PostgresConnection {
}
impl DatabaseConnection for PostgresConnection {
fn migration_table_stmt(&self) -> String {
r#"CREATE TABLE IF NOT EXISTS migrations (
id serial PRIMARY KEY,
name text NOT NULL UNIQUE
)"#
.to_string()
}
fn batch_execute(&mut self, query: &str) -> StdResult<()> {
self.client.batch_execute(query)?;
Ok(())

View file

@ -10,6 +10,8 @@ pub trait OpenDatabaseConnection: Sized {
}
pub trait DatabaseConnection {
fn migration_table_stmt(&self) -> String;
fn batch_execute(&mut self, query: &str) -> StdResult<()>;
fn execute<'b>(&mut self, query: &str, params: ToSqlParams<'b>) -> StdResult<u64>;
@ -26,13 +28,16 @@ impl DatabaseConnectionManager {
config: &DatabaseConfig,
connection_string: &str,
) -> StdResult<Self> {
let conn = match config.client()? {
SupportedDatabaseClient::Postgres => PostgresConnection::open(&connection_string)?,
let conn: AnyConnection = match config.client() {
#[cfg(feature = "postgres")]
SupportedDatabaseClient::Postgres => {
Box::new(PostgresConnection::open(&connection_string)?)
}
#[cfg(feature = "mysql")]
SupportedDatabaseClient::Mysql => Box::new(MySqlConnection::open(&connection_string)?),
};
Ok(DatabaseConnectionManager {
conn: Box::new(conn),
})
Ok(DatabaseConnectionManager { conn })
}
pub fn connect(config: &DatabaseConfig) -> StdResult<Self> {

View file

@ -59,9 +59,12 @@ impl MigrationManager {
}
pub fn is_migrations_table_not_found<D: std::fmt::Display>(error: D) -> bool {
error
.to_string()
.contains(r#"relation "migrations" does not exist"#)
let error_message = error.to_string();
// Postgres error
error_message.contains(r#"relation "migrations" does not exist"#)
// MySQL error
|| error_message.contains("ERROR 1146 (42S02)")
}
pub trait ManageMigration {
@ -101,12 +104,8 @@ impl ManageMigration for MigrationManager {
}
fn create_migrations_table(&self, conn: &mut AnyConnection) -> StdResult<()> {
conn.batch_execute(
r#"CREATE TABLE IF NOT EXISTS migrations (
id serial PRIMARY KEY,
name text NOT NULL UNIQUE
)"#,
)
let stmt = conn.migration_table_stmt();
conn.batch_execute(&stmt)
}
fn insert_migration_info(&self, conn: &mut AnyConnection, name: &str) -> StdResult<u64> {

View file

@ -1,6 +1,12 @@
#![deny(clippy::all)]
#![forbid(unsafe_code)]
#[macro_use]
extern crate cfg_if;
#[cfg(not(any(feature = "postgres", feature = "mysql")))]
compile_error!(r#"Either features "postgres" or "mysql" must be enabled for "migra" crate"#);
mod commands;
mod config;
mod database;

View file

@ -1,4 +1,5 @@
pub use assert_cmd::prelude::*;
pub use cfg_if::cfg_if;
pub use predicates::str::contains;
pub use std::process::Command;
@ -6,12 +7,17 @@ pub type TestResult = std::result::Result<(), Box<dyn std::error::Error>>;
pub const ROOT_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/data/");
pub fn path_to_file(file_name: &'static str) -> String {
ROOT_PATH.to_owned() + file_name
pub fn path_to_file<D: std::fmt::Display>(file_name: D) -> String {
format!("{}{}", ROOT_PATH, file_name)
}
pub fn database_manifest_path<D: std::fmt::Display>(database_name: D) -> String {
path_to_file(format!("Migra_{}.toml", database_name))
}
pub const DATABASE_URL_DEFAULT_ENV_NAME: &str = "DATABASE_URL";
pub const DATABASE_URL_ENV_VALUE: &str = "postgres://postgres:postgres@localhost:6000/migra_tests";
pub const POSTGRES_URL: &str = "postgres://postgres:postgres@localhost:6000/migra_tests";
pub const MYSQL_URL: &str = "mysql://mysql:mysql@localhost:6001/migra_tests";
pub struct Env {
key: &'static str,
@ -112,26 +118,37 @@ Pending migrations:
#[test]
fn empty_migration_list_with_db() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE);
fn inner(connection_string: &'static str) -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, connection_string);
Command::cargo_bin("migra")?
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
Command::cargo_bin("migra")?
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
Pending migrations:
"#,
));
));
drop(env);
drop(env);
Ok(())
}
#[cfg(feature = "postgres")]
inner(POSTGRES_URL)?;
#[cfg(feature = "mysql")]
inner(MYSQL_URL)?;
Ok(())
}
#[test]
#[cfg(feature = "postgres")]
fn empty_migration_list_with_url_in_manifest() -> TestResult {
Command::cargo_bin("migra")?
.arg("-c")
@ -151,8 +168,9 @@ Pending migrations:
}
#[test]
#[cfg(feature = "postgres")]
fn empty_migration_list_with_env_in_manifest() -> TestResult {
let env = Env::new("DB_URL", DATABASE_URL_ENV_VALUE);
let env = Env::new("DB_URL", POSTGRES_URL);
Command::cargo_bin("migra")?
.arg("-c")
@ -175,105 +193,127 @@ Pending migrations:
#[test]
fn empty_applied_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE);
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
fn inner(database_name: &'static str) -> TestResult {
Command::cargo_bin("migra")?
.arg("-c")
.arg(database_manifest_path(database_name))
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
Pending migrations:
210218232851_create_articles
210218233414_create_persons
"#,
));
));
drop(env);
Ok(())
}
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(())
}
#[test]
fn applied_all_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE);
fn inner(database_name: &'static str) -> TestResult {
let manifest_path = database_manifest_path(database_name);
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("up")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("up")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
210218232851_create_articles
210218233414_create_persons
Pending migrations:
"#,
));
));
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("down")
.arg("--all")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("down")
.arg("--all")
.assert()
.success();
drop(env);
Ok(())
}
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(())
}
#[test]
fn applied_one_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE);
fn inner(database_name: &'static str) -> TestResult {
let manifest_path = database_manifest_path(database_name);
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("up")
.arg("-n")
.arg("1")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("up")
.arg("-n")
.arg("1")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("ls")
.assert()
.success()
.stdout(contains(
r#"Applied migrations:
210218232851_create_articles
Pending migrations:
210218233414_create_persons
"#,
));
));
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("down")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("down")
.assert()
.success();
drop(env);
Ok(())
}
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(())
}
@ -285,46 +325,56 @@ mod make {
#[test]
fn make_migration_directory() -> TestResult {
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_url.toml"))
.arg("make")
.arg("test")
.assert()
.success()
.stdout(contains("Structure for migration has been created in"));
fn inner(database_name: &'static str) -> TestResult {
Command::cargo_bin("migra")?
.arg("-c")
.arg(database_manifest_path(database_name))
.arg("make")
.arg("test")
.assert()
.success()
.stdout(contains("Structure for migration has been created in"));
let entries = fs::read_dir(path_to_file("migrations"))?
.map(|entry| entry.map(|e| e.path()))
.collect::<Result<Vec<_>, std::io::Error>>()?;
let entries = fs::read_dir(path_to_file(format!("{}/migrations", database_name)))?
.map(|entry| entry.map(|e| e.path()))
.collect::<Result<Vec<_>, std::io::Error>>()?;
let dir_paths = entries
.iter()
.filter_map(|path| {
path.to_str().and_then(|path| {
if path.ends_with("_test") {
Some(path)
} else {
None
}
let dir_paths = entries
.iter()
.filter_map(|path| {
path.to_str().and_then(|path| {
if path.ends_with("_test") {
Some(path)
} else {
None
}
})
})
})
.collect::<Vec<_>>();
.collect::<Vec<_>>();
for dir_path in dir_paths.iter() {
let upgrade_content = fs::read_to_string(format!("{}/up.sql", dir_path))?;
let downgrade_content = fs::read_to_string(format!("{}/down.sql", dir_path))?;
for dir_path in dir_paths.iter() {
let upgrade_content = fs::read_to_string(format!("{}/up.sql", dir_path))?;
let downgrade_content = fs::read_to_string(format!("{}/down.sql", dir_path))?;
assert_eq!(upgrade_content, "-- Your SQL goes here\n\n");
assert_eq!(upgrade_content, "-- Your SQL goes here\n\n");
assert_eq!(
downgrade_content,
"-- This file should undo anything in `up.sql`\n\n"
);
assert_eq!(
downgrade_content,
"-- This file should undo anything in `up.sql`\n\n"
);
fs::remove_dir_all(dir_path)?;
fs::remove_dir_all(dir_path)?;
}
Ok(())
}
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(())
}
}
@ -334,40 +384,66 @@ mod upgrade {
#[test]
fn applied_all_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE);
fn inner<ValidateFn>(database_name: &'static str, validate: ValidateFn) -> TestResult
where
ValidateFn: Fn() -> TestResult,
{
let manifest_path = database_manifest_path(database_name);
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("up")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("up")
.assert()
.success();
let mut conn = postgres::Client::connect(DATABASE_URL_ENV_VALUE, postgres::NoTls)?;
let res = conn.query("SELECT p.id, a.id FROM persons AS p, articles AS a", &[])?;
validate()?;
assert_eq!(
res.into_iter()
.map(|row| (row.get(0), row.get(1)))
.collect::<Vec<(i32, i32)>>(),
Vec::new()
);
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("down")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("down")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(&manifest_path)
.arg("down")
.assert()
.success();
Command::cargo_bin("migra")?
.arg("-c")
.arg(path_to_file("Migra_env.toml"))
.arg("down")
.assert()
.success();
Ok(())
}
drop(env);
#[cfg(feature = "postgres")]
inner("postgres", || {
let mut conn = postgres::Client::connect(POSTGRES_URL, postgres::NoTls)?;
let res = conn.query("SELECT p.id, a.id FROM persons AS p, articles AS a", &[])?;
assert_eq!(
res.into_iter()
.map(|row| (row.get(0), row.get(1)))
.collect::<Vec<(i32, i32)>>(),
Vec::new()
);
Ok(())
})?;
#[cfg(feature = "mysql")]
inner("mysql", || {
use mysql::prelude::*;
let pool = mysql::Pool::new(MYSQL_URL)?;
let mut conn = pool.get_conn()?;
let res = conn.query_drop("SELECT p.id, a.id FROM persons AS p, articles AS a")?;
assert_eq!(res, ());
Ok(())
})?;
Ok(())
}

View file

@ -1,4 +1,4 @@
root = "./"
root = "./postgres"
[database]
connection = "$DATABASE_URL"

View file

@ -0,0 +1,4 @@
root = "./mysql"
[database]
connection = "mysql://mysql:mysql@localhost:6001/migra_tests"

View file

@ -1,4 +1,4 @@
root = "./"
root = "./postgres"
[database]
connection = "postgres://postgres:postgres@localhost:6000/migra_tests"

View file

@ -0,0 +1,8 @@
-- Your SQL goes here
CREATE TABLE articles (
id int AUTO_INCREMENT PRIMARY KEY,
title text NOT NULL CHECK (length(title) > 0),
content text NOT NULL,
created_at timestamp NOT NULL DEFAULT current_timestamp
);

View file

@ -0,0 +1,12 @@
-- Your SQL goes here
CREATE TABLE persons (
id int AUTO_INCREMENT PRIMARY KEY,
email varchar(256) NOT NULL UNIQUE,
display_name text NOT NULL,
created_at timestamp NOT NULL DEFAULT current_timestamp
);
ALTER TABLE articles
ADD COLUMN author_person_id int NULL
REFERENCES persons (id) ON UPDATE CASCADE ON DELETE CASCADE;

View file

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
DROP TABLE articles;

View file

@ -0,0 +1,6 @@
-- This file should undo anything in `up.sql`
ALTER TABLE articles
DROP COLUMN author_person_id;
DROP TABLE persons;