feat: add mysql database supporting

This commit is contained in:
Dmitriy Pleshevskiy 2021-03-26 02:10:41 +03:00
parent c05bac36e7
commit 18bf265510
24 changed files with 433 additions and 166 deletions

View File

@ -31,6 +31,7 @@ jobs:
with: with:
path: ~/.cargo/registry path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('Cargo.lock') }} key: ${{ runner.os }}-cargo-registry-${{ hashFiles('Cargo.lock') }}
- name: Cache cargo index - name: Cache cargo index
uses: actions/cache@v1 uses: actions/cache@v1
with: with:
@ -48,7 +49,7 @@ jobs:
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: -- --test-threads=1 args: --all-features -- --test-threads=1
clippy: clippy:
name: clippy (ubuntu-latest, stable) name: clippy (ubuntu-latest, stable)

View File

@ -2,5 +2,6 @@
"cSpell.words": [ "cSpell.words": [
"migra" "migra"
], ],
"editor.formatOnSave": true "editor.formatOnSave": true,
"rust.all_features": true
} }

View File

@ -46,6 +46,7 @@ For more information about the commands, simply run `migra help`
### Supported databases ### Supported databases
- [x] Postgres - [x] Postgres
- [x] MySQL
## License ## License

View File

@ -1,4 +1,4 @@
version: '3' version: "3"
services: services:
postgres: postgres:
@ -13,6 +13,22 @@ services:
ports: ports:
- 6000:5432 - 6000:5432
mysql:
image: mysql
container_name: migra.mysql
command: --default-authentication-plugin=mysql_native_password
environment:
MYSQL_ROOT_PASSWORD: example
MYSQL_DATABASE: "migra_tests"
MYSQL_USER: "mysql"
MYSQL_PASSWORD: "mysql"
volumes:
- mysql_data:/var/lib/mysql
ports:
- 6001:3306
volumes: volumes:
postgres_data: postgres_data:
driver: local driver: local
mysql_data:
driver: local

View File

@ -14,15 +14,17 @@ readme = "../README.md"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
cfg-if = "1.0"
structopt = "0.3" structopt = "0.3"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
toml = "0.5" toml = "0.5"
chrono = "0.4" chrono = "0.4"
postgres = "0.19" postgres = { version = "0.19", optional = true }
mysql = { version = "20.1", optional = true }
dotenv = { version = "0.15", optional = true } dotenv = { version = "0.15", optional = true }
[features] [features]
default = [] default = ["postgres"]
[badges] [badges]
maintenance = { status = "actively-developed" } maintenance = { status = "actively-developed" }

View File

@ -7,6 +7,23 @@ use std::{env, fs, io};
pub(crate) const MIGRA_TOML_FILENAME: &str = "Migra.toml"; pub(crate) const MIGRA_TOML_FILENAME: &str = "Migra.toml";
pub(crate) const DEFAULT_DATABASE_CONNECTION_ENV: &str = "$DATABASE_URL"; pub(crate) const DEFAULT_DATABASE_CONNECTION_ENV: &str = "$DATABASE_URL";
fn default_database_connection_env() -> String {
DEFAULT_DATABASE_CONNECTION_ENV.to_owned()
}
#[cfg(any(not(feature = "postgres"), not(feature = "mysql")))]
macro_rules! please_install_with {
(feature $database_name:expr) => {
panic!(
r#"You cannot use migra for "{database_name}".
You need to reinstall crate with "{database_name}" feature.
cargo install migra-cli --features ${database_name}"#,
database_name = $database_name
);
};
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub(crate) struct Config { pub(crate) struct Config {
#[serde(skip)] #[serde(skip)]
@ -19,30 +36,69 @@ pub(crate) struct Config {
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub(crate) enum SupportedDatabaseClient { pub(crate) enum SupportedDatabaseClient {
#[cfg(feature = "postgres")]
Postgres, Postgres,
#[cfg(feature = "mysql")]
Mysql,
}
impl Default for SupportedDatabaseClient {
fn default() -> Self {
cfg_if! {
if #[cfg(feature = "postgres")] {
SupportedDatabaseClient::Postgres
} else if #[cfg(feature = "mysql")] {
SupportedDatabaseClient::Mysql
}
}
}
} }
#[derive(Debug, Clone, Default, Serialize, Deserialize)] #[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub(crate) struct DatabaseConfig { pub(crate) struct DatabaseConfig {
pub client: Option<SupportedDatabaseClient>, pub client: Option<SupportedDatabaseClient>,
pub connection: Option<String>,
#[serde(default = "default_database_connection_env")]
pub connection: String,
} }
impl DatabaseConfig { impl DatabaseConfig {
pub fn client(&self) -> MigraResult<SupportedDatabaseClient> { pub fn client(&self) -> SupportedDatabaseClient {
Ok(SupportedDatabaseClient::Postgres) self.client.clone().unwrap_or_else(|| {
self.connection_string()
.ok()
.and_then(|connection_string| {
if connection_string.starts_with("postgres://") {
cfg_if! {
if #[cfg(feature = "postgres")] {
Some(SupportedDatabaseClient::Postgres)
} else {
please_install_with!(feature "postgres")
}
}
} else if connection_string.starts_with("mysql://") {
cfg_if! {
if #[cfg(feature = "mysql")] {
Some(SupportedDatabaseClient::Mysql)
} else {
please_install_with!(feature "mysql")
}
}
} else {
None
}
})
.unwrap_or_default()
})
} }
pub fn connection_string(&self) -> MigraResult<String> { pub fn connection_string(&self) -> MigraResult<String> {
let connection = self if let Some(connection_env) = self.connection.strip_prefix("$") {
.connection
.clone()
.unwrap_or_else(|| String::from(DEFAULT_DATABASE_CONNECTION_ENV));
if let Some(connection_env) = connection.strip_prefix("$") {
env::var(connection_env).map_err(|_| Error::MissedEnvVar(connection_env.to_string())) env::var(connection_env).map_err(|_| Error::MissedEnvVar(connection_env.to_string()))
} else { } else {
Ok(connection) Ok(self.connection.clone())
} }
} }
} }
@ -53,7 +109,7 @@ impl Default for Config {
manifest_root: PathBuf::default(), manifest_root: PathBuf::default(),
root: PathBuf::from("database"), root: PathBuf::from("database"),
database: DatabaseConfig { database: DatabaseConfig {
connection: Some(String::from(DEFAULT_DATABASE_CONNECTION_ENV)), connection: default_database_connection_env(),
..Default::default() ..Default::default()
}, },
} }

View File

@ -1,3 +1,13 @@
mod postgres; cfg_if! {
if #[cfg(feature = "postgres")] {
mod postgres;
pub use self::postgres::*;
}
}
pub use self::postgres::*; cfg_if! {
if #[cfg(feature = "mysql")] {
mod mysql;
pub use self::mysql::*;
}
}

View File

@ -0,0 +1,53 @@
use crate::database::builder::merge_query_with_params;
use crate::database::prelude::*;
use crate::error::StdResult;
use mysql::prelude::*;
use mysql::{Pool, PooledConn};
pub struct MySqlConnection {
pool: Pool,
}
impl MySqlConnection {
fn client(&self) -> StdResult<PooledConn> {
let conn = self.pool.get_conn()?;
Ok(conn)
}
}
impl OpenDatabaseConnection for MySqlConnection {
fn open(connection_string: &str) -> StdResult<Self> {
let pool = Pool::new(connection_string)?;
Ok(MySqlConnection { pool })
}
}
impl DatabaseConnection for MySqlConnection {
fn migration_table_stmt(&self) -> String {
r#"CREATE TABLE IF NOT EXISTS migrations (
id int AUTO_INCREMENT PRIMARY KEY,
name varchar(256) NOT NULL UNIQUE
)"#
.to_string()
}
fn batch_execute(&mut self, query: &str) -> StdResult<()> {
self.client()?.query_drop(query)?;
Ok(())
}
fn execute<'b>(&mut self, query: &str, params: ToSqlParams<'b>) -> StdResult<u64> {
let stmt = merge_query_with_params(query, params);
let res = self.client()?.query_first(stmt)?.unwrap_or_default();
Ok(res)
}
fn query<'b>(&mut self, query: &str, params: ToSqlParams<'b>) -> StdResult<Vec<Vec<String>>> {
let stmt = merge_query_with_params(query, params);
let res = self.client()?.query_map(stmt, |(column,)| vec![column])?;
Ok(res)
}
}

View File

@ -15,6 +15,14 @@ impl OpenDatabaseConnection for PostgresConnection {
} }
impl DatabaseConnection for PostgresConnection { impl DatabaseConnection for PostgresConnection {
fn migration_table_stmt(&self) -> String {
r#"CREATE TABLE IF NOT EXISTS migrations (
id serial PRIMARY KEY,
name text NOT NULL UNIQUE
)"#
.to_string()
}
fn batch_execute(&mut self, query: &str) -> StdResult<()> { fn batch_execute(&mut self, query: &str) -> StdResult<()> {
self.client.batch_execute(query)?; self.client.batch_execute(query)?;
Ok(()) Ok(())

View File

@ -10,6 +10,8 @@ pub trait OpenDatabaseConnection: Sized {
} }
pub trait DatabaseConnection { pub trait DatabaseConnection {
fn migration_table_stmt(&self) -> String;
fn batch_execute(&mut self, query: &str) -> StdResult<()>; fn batch_execute(&mut self, query: &str) -> StdResult<()>;
fn execute<'b>(&mut self, query: &str, params: ToSqlParams<'b>) -> StdResult<u64>; fn execute<'b>(&mut self, query: &str, params: ToSqlParams<'b>) -> StdResult<u64>;
@ -26,13 +28,16 @@ impl DatabaseConnectionManager {
config: &DatabaseConfig, config: &DatabaseConfig,
connection_string: &str, connection_string: &str,
) -> StdResult<Self> { ) -> StdResult<Self> {
let conn = match config.client()? { let conn: AnyConnection = match config.client() {
SupportedDatabaseClient::Postgres => PostgresConnection::open(&connection_string)?, #[cfg(feature = "postgres")]
SupportedDatabaseClient::Postgres => {
Box::new(PostgresConnection::open(&connection_string)?)
}
#[cfg(feature = "mysql")]
SupportedDatabaseClient::Mysql => Box::new(MySqlConnection::open(&connection_string)?),
}; };
Ok(DatabaseConnectionManager { Ok(DatabaseConnectionManager { conn })
conn: Box::new(conn),
})
} }
pub fn connect(config: &DatabaseConfig) -> StdResult<Self> { pub fn connect(config: &DatabaseConfig) -> StdResult<Self> {

View File

@ -59,9 +59,12 @@ impl MigrationManager {
} }
pub fn is_migrations_table_not_found<D: std::fmt::Display>(error: D) -> bool { pub fn is_migrations_table_not_found<D: std::fmt::Display>(error: D) -> bool {
error let error_message = error.to_string();
.to_string()
.contains(r#"relation "migrations" does not exist"#) // Postgres error
error_message.contains(r#"relation "migrations" does not exist"#)
// MySQL error
|| error_message.contains("ERROR 1146 (42S02)")
} }
pub trait ManageMigration { pub trait ManageMigration {
@ -101,12 +104,8 @@ impl ManageMigration for MigrationManager {
} }
fn create_migrations_table(&self, conn: &mut AnyConnection) -> StdResult<()> { fn create_migrations_table(&self, conn: &mut AnyConnection) -> StdResult<()> {
conn.batch_execute( let stmt = conn.migration_table_stmt();
r#"CREATE TABLE IF NOT EXISTS migrations ( conn.batch_execute(&stmt)
id serial PRIMARY KEY,
name text NOT NULL UNIQUE
)"#,
)
} }
fn insert_migration_info(&self, conn: &mut AnyConnection, name: &str) -> StdResult<u64> { fn insert_migration_info(&self, conn: &mut AnyConnection, name: &str) -> StdResult<u64> {

View File

@ -1,6 +1,12 @@
#![deny(clippy::all)] #![deny(clippy::all)]
#![forbid(unsafe_code)] #![forbid(unsafe_code)]
#[macro_use]
extern crate cfg_if;
#[cfg(not(any(feature = "postgres", feature = "mysql")))]
compile_error!(r#"Either features "postgres" or "mysql" must be enabled for "migra" crate"#);
mod commands; mod commands;
mod config; mod config;
mod database; mod database;

View File

@ -1,4 +1,5 @@
pub use assert_cmd::prelude::*; pub use assert_cmd::prelude::*;
pub use cfg_if::cfg_if;
pub use predicates::str::contains; pub use predicates::str::contains;
pub use std::process::Command; pub use std::process::Command;
@ -6,12 +7,17 @@ pub type TestResult = std::result::Result<(), Box<dyn std::error::Error>>;
pub const ROOT_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/data/"); pub const ROOT_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/data/");
pub fn path_to_file(file_name: &'static str) -> String { pub fn path_to_file<D: std::fmt::Display>(file_name: D) -> String {
ROOT_PATH.to_owned() + file_name format!("{}{}", ROOT_PATH, file_name)
}
pub fn database_manifest_path<D: std::fmt::Display>(database_name: D) -> String {
path_to_file(format!("Migra_{}.toml", database_name))
} }
pub const DATABASE_URL_DEFAULT_ENV_NAME: &str = "DATABASE_URL"; pub const DATABASE_URL_DEFAULT_ENV_NAME: &str = "DATABASE_URL";
pub const DATABASE_URL_ENV_VALUE: &str = "postgres://postgres:postgres@localhost:6000/migra_tests"; pub const POSTGRES_URL: &str = "postgres://postgres:postgres@localhost:6000/migra_tests";
pub const MYSQL_URL: &str = "mysql://mysql:mysql@localhost:6001/migra_tests";
pub struct Env { pub struct Env {
key: &'static str, key: &'static str,
@ -112,26 +118,37 @@ Pending migrations:
#[test] #[test]
fn empty_migration_list_with_db() -> TestResult { fn empty_migration_list_with_db() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE); fn inner(connection_string: &'static str) -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, connection_string);
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("ls") .arg("ls")
.assert() .assert()
.success() .success()
.stdout(contains( .stdout(contains(
r#"Applied migrations: r#"Applied migrations:
Pending migrations: Pending migrations:
"#, "#,
)); ));
drop(env); drop(env);
Ok(())
}
#[cfg(feature = "postgres")]
inner(POSTGRES_URL)?;
#[cfg(feature = "mysql")]
inner(MYSQL_URL)?;
Ok(()) Ok(())
} }
#[test] #[test]
#[cfg(feature = "postgres")]
fn empty_migration_list_with_url_in_manifest() -> TestResult { fn empty_migration_list_with_url_in_manifest() -> TestResult {
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
@ -151,8 +168,9 @@ Pending migrations:
} }
#[test] #[test]
#[cfg(feature = "postgres")]
fn empty_migration_list_with_env_in_manifest() -> TestResult { fn empty_migration_list_with_env_in_manifest() -> TestResult {
let env = Env::new("DB_URL", DATABASE_URL_ENV_VALUE); let env = Env::new("DB_URL", POSTGRES_URL);
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
@ -175,105 +193,127 @@ Pending migrations:
#[test] #[test]
fn empty_applied_migrations() -> TestResult { fn empty_applied_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE); fn inner(database_name: &'static str) -> TestResult {
Command::cargo_bin("migra")?
Command::cargo_bin("migra")? .arg("-c")
.arg("-c") .arg(database_manifest_path(database_name))
.arg(path_to_file("Migra_env.toml")) .arg("ls")
.arg("ls") .assert()
.assert() .success()
.success() .stdout(contains(
.stdout(contains( r#"Applied migrations:
r#"Applied migrations:
Pending migrations: Pending migrations:
210218232851_create_articles 210218232851_create_articles
210218233414_create_persons 210218233414_create_persons
"#, "#,
)); ));
drop(env); Ok(())
}
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(()) Ok(())
} }
#[test] #[test]
fn applied_all_migrations() -> TestResult { fn applied_all_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE); fn inner(database_name: &'static str) -> TestResult {
let manifest_path = database_manifest_path(database_name);
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("up") .arg("up")
.assert() .assert()
.success(); .success();
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("ls") .arg("ls")
.assert() .assert()
.success() .success()
.stdout(contains( .stdout(contains(
r#"Applied migrations: r#"Applied migrations:
210218232851_create_articles 210218232851_create_articles
210218233414_create_persons 210218233414_create_persons
Pending migrations: Pending migrations:
"#, "#,
)); ));
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("down") .arg("down")
.arg("--all") .arg("--all")
.assert() .assert()
.success(); .success();
drop(env); Ok(())
}
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(()) Ok(())
} }
#[test] #[test]
fn applied_one_migrations() -> TestResult { fn applied_one_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE); fn inner(database_name: &'static str) -> TestResult {
let manifest_path = database_manifest_path(database_name);
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("up") .arg("up")
.arg("-n") .arg("-n")
.arg("1") .arg("1")
.assert() .assert()
.success(); .success();
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("ls") .arg("ls")
.assert() .assert()
.success() .success()
.stdout(contains( .stdout(contains(
r#"Applied migrations: r#"Applied migrations:
210218232851_create_articles 210218232851_create_articles
Pending migrations: Pending migrations:
210218233414_create_persons 210218233414_create_persons
"#, "#,
)); ));
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("down") .arg("down")
.assert() .assert()
.success(); .success();
drop(env); Ok(())
}
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(()) Ok(())
} }
@ -285,46 +325,56 @@ mod make {
#[test] #[test]
fn make_migration_directory() -> TestResult { fn make_migration_directory() -> TestResult {
Command::cargo_bin("migra")? fn inner(database_name: &'static str) -> TestResult {
.arg("-c") Command::cargo_bin("migra")?
.arg(path_to_file("Migra_url.toml")) .arg("-c")
.arg("make") .arg(database_manifest_path(database_name))
.arg("test") .arg("make")
.assert() .arg("test")
.success() .assert()
.stdout(contains("Structure for migration has been created in")); .success()
.stdout(contains("Structure for migration has been created in"));
let entries = fs::read_dir(path_to_file("migrations"))? let entries = fs::read_dir(path_to_file(format!("{}/migrations", database_name)))?
.map(|entry| entry.map(|e| e.path())) .map(|entry| entry.map(|e| e.path()))
.collect::<Result<Vec<_>, std::io::Error>>()?; .collect::<Result<Vec<_>, std::io::Error>>()?;
let dir_paths = entries let dir_paths = entries
.iter() .iter()
.filter_map(|path| { .filter_map(|path| {
path.to_str().and_then(|path| { path.to_str().and_then(|path| {
if path.ends_with("_test") { if path.ends_with("_test") {
Some(path) Some(path)
} else { } else {
None None
} }
})
}) })
}) .collect::<Vec<_>>();
.collect::<Vec<_>>();
for dir_path in dir_paths.iter() { for dir_path in dir_paths.iter() {
let upgrade_content = fs::read_to_string(format!("{}/up.sql", dir_path))?; let upgrade_content = fs::read_to_string(format!("{}/up.sql", dir_path))?;
let downgrade_content = fs::read_to_string(format!("{}/down.sql", dir_path))?; let downgrade_content = fs::read_to_string(format!("{}/down.sql", dir_path))?;
assert_eq!(upgrade_content, "-- Your SQL goes here\n\n"); assert_eq!(upgrade_content, "-- Your SQL goes here\n\n");
assert_eq!( assert_eq!(
downgrade_content, downgrade_content,
"-- This file should undo anything in `up.sql`\n\n" "-- This file should undo anything in `up.sql`\n\n"
); );
fs::remove_dir_all(dir_path)?; fs::remove_dir_all(dir_path)?;
}
Ok(())
} }
#[cfg(feature = "postgres")]
inner("postgres")?;
#[cfg(feature = "mysql")]
inner("mysql")?;
Ok(()) Ok(())
} }
} }
@ -334,40 +384,66 @@ mod upgrade {
#[test] #[test]
fn applied_all_migrations() -> TestResult { fn applied_all_migrations() -> TestResult {
let env = Env::new(DATABASE_URL_DEFAULT_ENV_NAME, DATABASE_URL_ENV_VALUE); fn inner<ValidateFn>(database_name: &'static str, validate: ValidateFn) -> TestResult
where
ValidateFn: Fn() -> TestResult,
{
let manifest_path = database_manifest_path(database_name);
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("up") .arg("up")
.assert() .assert()
.success(); .success();
let mut conn = postgres::Client::connect(DATABASE_URL_ENV_VALUE, postgres::NoTls)?; validate()?;
let res = conn.query("SELECT p.id, a.id FROM persons AS p, articles AS a", &[])?;
assert_eq!( Command::cargo_bin("migra")?
res.into_iter() .arg("-c")
.map(|row| (row.get(0), row.get(1))) .arg(&manifest_path)
.collect::<Vec<(i32, i32)>>(), .arg("down")
Vec::new() .assert()
); .success();
Command::cargo_bin("migra")? Command::cargo_bin("migra")?
.arg("-c") .arg("-c")
.arg(path_to_file("Migra_env.toml")) .arg(&manifest_path)
.arg("down") .arg("down")
.assert() .assert()
.success(); .success();
Command::cargo_bin("migra")? Ok(())
.arg("-c") }
.arg(path_to_file("Migra_env.toml"))
.arg("down")
.assert()
.success();
drop(env); #[cfg(feature = "postgres")]
inner("postgres", || {
let mut conn = postgres::Client::connect(POSTGRES_URL, postgres::NoTls)?;
let res = conn.query("SELECT p.id, a.id FROM persons AS p, articles AS a", &[])?;
assert_eq!(
res.into_iter()
.map(|row| (row.get(0), row.get(1)))
.collect::<Vec<(i32, i32)>>(),
Vec::new()
);
Ok(())
})?;
#[cfg(feature = "mysql")]
inner("mysql", || {
use mysql::prelude::*;
let pool = mysql::Pool::new(MYSQL_URL)?;
let mut conn = pool.get_conn()?;
let res = conn.query_drop("SELECT p.id, a.id FROM persons AS p, articles AS a")?;
assert_eq!(res, ());
Ok(())
})?;
Ok(()) Ok(())
} }

View File

@ -1,4 +1,4 @@
root = "./" root = "./postgres"
[database] [database]
connection = "$DATABASE_URL" connection = "$DATABASE_URL"

View File

@ -0,0 +1,4 @@
root = "./mysql"
[database]
connection = "mysql://mysql:mysql@localhost:6001/migra_tests"

View File

@ -1,4 +1,4 @@
root = "./" root = "./postgres"
[database] [database]
connection = "postgres://postgres:postgres@localhost:6000/migra_tests" connection = "postgres://postgres:postgres@localhost:6000/migra_tests"

View File

@ -0,0 +1,8 @@
-- Your SQL goes here
CREATE TABLE articles (
id int AUTO_INCREMENT PRIMARY KEY,
title text NOT NULL CHECK (length(title) > 0),
content text NOT NULL,
created_at timestamp NOT NULL DEFAULT current_timestamp
);

View File

@ -0,0 +1,12 @@
-- Your SQL goes here
CREATE TABLE persons (
id int AUTO_INCREMENT PRIMARY KEY,
email varchar(256) NOT NULL UNIQUE,
display_name text NOT NULL,
created_at timestamp NOT NULL DEFAULT current_timestamp
);
ALTER TABLE articles
ADD COLUMN author_person_id int NULL
REFERENCES persons (id) ON UPDATE CASCADE ON DELETE CASCADE;

View File

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
DROP TABLE articles;

View File

@ -0,0 +1,6 @@
-- This file should undo anything in `up.sql`
ALTER TABLE articles
DROP COLUMN author_person_id;
DROP TABLE persons;