diff --git a/Cargo.toml b/Cargo.toml index d66bb9b..bff1f03 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,16 +9,33 @@ repository = "https://github.com/andrewbaxter/good-ormning" readme = "readme.md" [dependencies] -chrono = "0.4.23" -enum_dispatch = "0.3.9" -genemichaels = "0.1.15" -hex = "0.4.3" -proc-macro2 = "1.0.49" -quote = "1.0.23" -rpds = "0.12.0" -samevariant = "0.0.1" -stable-hash = "0.4.3" -syn = "1.0.107" +chrono = { version = "0.4.23", optional = true } +enum_dispatch = { version = "0.3.9", optional = true } +genemichaels = { version = "0.1.15", optional = true } +hex = { version = "0.4.3", optional = true } +proc-macro2 = { version = "1.0.49", optional = true } +quote = { version = "1.0.23", optional = true } +rpds = { version = "0.12.0", optional = true } +rusqlite = { version = "0.28.0", optional = true } +samevariant = { version = "0.0.1", optional = true } +syn = { version = "1.0.107", optional = true } +tokio-postgres = { version = "0.7.7", optional = true } + +[features] +build = [ + "dep:chrono", + "dep:enum_dispatch", + "dep:genemichaels", + "dep:hex", + "dep:proc-macro2", + "dep:quote", + "dep:rpds", + "dep:samevariant", + "dep:syn", +] +run = [] +sqlite = ["dep:rusqlite", "run"] +pg = ["dep:tokio-postgres", "run"] [workspace] members = ["integration_tests"] diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index 8954849..770b231 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -8,9 +8,10 @@ chrono = "0.4.23" rusqlite = "0.28.0" testcontainers = { version = "0.14.0", features = ["tokio"] } tokio = { version = "1.24.1", features = ["macros", "rt"] } +"good-ormning" = { path = "..", features = ["sqlite", "pg"] } [build-dependencies] -"good-ormning" = { path = ".." } +"good-ormning" = { path = "..", features = ["build"] } [dev-dependencies] anyhow = "1.0.68" diff --git a/integration_tests/build_pg.rs b/integration_tests/build_pg.rs index 0f74050..3a5c946 100644 --- a/integration_tests/build_pg.rs +++ b/integration_tests/build_pg.rs @@ -1,5 +1,5 @@ use std::path::Path; -use good_ormning::pg::{ +use good_ormning::buildtime::pg::{ Version, schema::field::{ field_str, @@ -37,7 +37,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_base_insert.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_base_insert.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), @@ -52,7 +52,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zJCPRHK37"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().build()); - generate(&root.join("tests/pg_gen_param_i32.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_param_i32.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -67,7 +67,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zJCPRHK37"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_utctime().build()); - generate(&root.join("tests/pg_gen_param_utctime.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_param_utctime.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -82,7 +82,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("z8JI0I1E4"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().opt().build()); - generate(&root.join("tests/pg_gen_param_opt_i32.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_param_opt_i32.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -97,7 +97,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zT7F4746C"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().opt().build()); - generate(&root.join("tests/pg_gen_param_opt_i32_null.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_param_opt_i32_null.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -113,7 +113,7 @@ pub fn build(root: &Path) { let bananna = v.table("zH2Q9TOLG"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().custom("integration_tests::MyString").build()); - generate(&root.join("tests/pg_gen_param_custom.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_param_custom.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -134,7 +134,7 @@ pub fn build(root: &Path) { "hizat", field_str().custom("integration_tests::MyString").opt().build(), ); - generate(&root.join("tests/pg_gen_param_opt_custom.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_param_opt_custom.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), @@ -152,8 +152,8 @@ pub fn build(root: &Path) { bananna.index("zPRVXKY6D", &[&hizat]).unique().build(&mut v); generate( &root.join("tests/pg_gen_insert_on_conflict_do_nothing.rs"), - vec![(0usize, v)], - vec![ + &vec![(0, v)], + &vec![ new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), type_: hizat.def.type_.type_.clone(), @@ -174,8 +174,8 @@ pub fn build(root: &Path) { bananna.index("zPRVXKY6D", &[&hizat]).unique().build(&mut v); generate( &root.join("tests/pg_gen_insert_on_conflict_update.rs"), - vec![(0usize, v)], - vec![new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { + &vec![(0, v)], + &vec![new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), type_: hizat.def.type_.type_.clone(), }), (two.id.clone(), Expr::Param { @@ -206,7 +206,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zSPEZNHA8"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_update.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_update.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -225,7 +225,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zSPEZNHA8"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_update_where.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_update_where.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -251,7 +251,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zSPEZNHA8"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_update_returning.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_update_returning.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -268,7 +268,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zLBDEHGRB"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_delete.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_delete.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -284,7 +284,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zLBDEHGRB"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_delete_where.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_delete_where.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -307,7 +307,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zLBDEHGRB"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_delete_returning.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_delete_returning.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -355,7 +355,7 @@ pub fn build(root: &Path) { ], ).build_migration(), ); - generate(&root.join("tests/pg_gen_select_join.rs"), vec![(0usize, v)], vec![new_select(&bananna).join(Join { + generate(&root.join("tests/pg_gen_select_join.rs"), &vec![(0, v)], &vec![new_select(&bananna).join(Join { source: Box::new(NamedSelectSource { source: JoinSource::Table(one.0.clone()), alias: None, @@ -374,7 +374,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_select_limit.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_select_limit.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), @@ -389,7 +389,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().build()); - generate(&root.join("tests/pg_gen_select_order.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_select_order.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "v".into(), @@ -408,7 +408,7 @@ pub fn build(root: &Path) { let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().build()); let hizat2 = bananna.field(&mut v, "z3CRAVV3M", "hizat2", field_i32().build()); - generate(&root.join("tests/pg_gen_select_group_by.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/pg_gen_select_group_by.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "v".into(), @@ -432,9 +432,9 @@ pub fn build(root: &Path) { let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); let zomzom = bananna.field(&mut v, "zPREUVAOD", "zomzom", field_bool().migrate_fill(Expr::LitBool(true)).build()); - generate(&root.join("tests/pg_gen_migrate_add_field.rs"), vec![ + generate(&root.join("tests/pg_gen_migrate_add_field.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("zTWA93SX0"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -446,29 +446,73 @@ pub fn build(root: &Path) { ); v }), - (1usize, v) - ], vec![ + (1, v) + ], &vec![ // Queries new_select(&bananna).return_fields(&[&hizat, &zomzom]).build_query("get_banan", QueryResCount::MaybeOne) ]).unwrap(); } + // # Migrate - detect + { + let mut v0 = Version::default(); + let bananna_v0 = v0.table("zTWA93SX0"); + let hizat_v0 = bananna_v0.field(&mut v0, "z437INV6D", "hizat", field_str().build()); + let mut versions = vec![ + // Versions (previous) + (0, { + let mut v = Version::default(); + let bananna = v.table("zTWA93SX0"); + let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); + v.post_migration( + new_insert( + &bananna, + vec![(hizat.id.clone(), Expr::LitString("nizoot".into()))], + ).build_migration(), + ); + v + }) + ]; + generate( + &root.join("tests/pg_gen_migrate_detect_1.rs"), + &versions, + &vec![ + new_select(&bananna_v0) + .return_fields(&[&hizat_v0]) + .build_query("get_banan", QueryResCount::MaybeOne) + ], + ).unwrap(); + let mut v1 = Version::default(); + let bananna_v1 = v1.table("zTWA93SX0"); + let hizat_v1 = bananna_v1.field(&mut v1, "z437INV6D", "hizat", field_str().build()); + versions.push((1, v1)); + generate( + &root.join("tests/pg_gen_migrate_detect_2.rs"), + &versions, + &vec![ + new_select(&bananna_v1) + .return_fields(&[&hizat_v1]) + .build_query("get_banan", QueryResCount::MaybeOne) + ], + ).unwrap(); + } + // # Migrate - remove field { let mut v = Version::default(); let bananna = v.table("z1MD8L1CZ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_migrate_remove_field.rs"), vec![ + generate(&root.join("tests/pg_gen_migrate_remove_field.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("z1MD8L1CZ"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); bananna.field(&mut v, "zPREUVAOD", "zomzom", field_bool().build()); v }), - (1usize, v) - ], vec![ + (1, v) + ], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "okolor".into(), @@ -484,16 +528,16 @@ pub fn build(root: &Path) { bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); let two = v.table("zHXF3YVGQ"); let field_two = two.field(&mut v, "z156A4Q8W", "two", field_i32().build()); - generate(&root.join("tests/pg_gen_migrate_add_table.rs"), vec![ + generate(&root.join("tests/pg_gen_migrate_add_table.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("z4RGW742J"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); v }), - (1usize, v) - ], vec![ + (1, v) + ], &vec![ // Queries new_insert(&two, vec![(field_two.id.clone(), Expr::Param { name: "two".into(), @@ -507,9 +551,9 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zX7CEK8JC"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_migrate_remove_table.rs"), vec![ + generate(&root.join("tests/pg_gen_migrate_remove_table.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("zX7CEK8JC"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -517,8 +561,8 @@ pub fn build(root: &Path) { two.field(&mut v, "z156A4Q8W", "two", field_i32().build()); v }), - (1usize, v) - ], vec![]).unwrap(); + (1, v) + ], &vec![]).unwrap(); } // # Migrate - remove index @@ -540,10 +584,10 @@ pub fn build(root: &Path) { v1.pre_migration(new_insert(&v0_two, vec![(v0_field_two.id.clone(), Expr::LitI32(7))]).build_migration()); let v1_bananna = v1.table("zMI5V9F2V"); v1_bananna.field(&mut v1, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/pg_gen_migrate_pre_migration.rs"), vec![ + generate(&root.join("tests/pg_gen_migrate_pre_migration.rs"), &vec![ // Versions (previous) - (0usize, v0), - (1usize, v1) - ], vec![]).unwrap(); + (0, v0), + (1, v1) + ], &vec![]).unwrap(); } } diff --git a/integration_tests/build_sqlite.rs b/integration_tests/build_sqlite.rs index 8ff51a8..bde3018 100644 --- a/integration_tests/build_sqlite.rs +++ b/integration_tests/build_sqlite.rs @@ -1,5 +1,5 @@ use std::path::Path; -use good_ormning::sqlite::{ +use good_ormning::buildtime::sqlite::{ Version, schema::field::{ field_str, @@ -40,10 +40,10 @@ pub fn build(root: &Path) { let id = users.rowid_field(&mut latest_version, None); let name = users.field(&mut latest_version, "zLQI9HQUQ", "name", field_str().build()); let points = users.field(&mut latest_version, "zLAPH3H29", "points", field_i64().build()); - generate(&root.join("tests/sqlite_gen_hello_world.rs"), vec![ + generate(&root.join("tests/sqlite_gen_hello_world.rs"), &vec![ // Versions - (0usize, latest_version) - ], vec![ + (0, latest_version) + ], &vec![ // Queries new_insert(&users, vec![(name.id.clone(), Expr::Param { name: "name".into(), @@ -69,7 +69,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_base_insert.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_base_insert.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), @@ -84,7 +84,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zJCPRHK37"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().build()); - generate(&root.join("tests/sqlite_gen_param_i32.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_param_i32.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -99,7 +99,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zJCPRHK37"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_utctime_s().build()); - generate(&root.join("tests/sqlite_gen_param_utctime_s.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_param_utctime_s.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -114,7 +114,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zJCPRHK37"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_utctime_ms().build()); - generate(&root.join("tests/sqlite_gen_param_utctime_ms.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_param_utctime_ms.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -129,7 +129,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("z8JI0I1E4"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().opt().build()); - generate(&root.join("tests/sqlite_gen_param_opt_i32.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_param_opt_i32.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -144,7 +144,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zT7F4746C"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().opt().build()); - generate(&root.join("tests/sqlite_gen_param_opt_i32_null.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_param_opt_i32_null.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -160,7 +160,7 @@ pub fn build(root: &Path) { let bananna = v.table("zH2Q9TOLG"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().custom("integration_tests::MyString").build()); - generate(&root.join("tests/sqlite_gen_param_custom.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_param_custom.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "val".into(), @@ -181,7 +181,7 @@ pub fn build(root: &Path) { "hizat", field_str().custom("integration_tests::MyString").opt().build(), ); - generate(&root.join("tests/sqlite_gen_param_opt_custom.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_param_opt_custom.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), @@ -199,8 +199,8 @@ pub fn build(root: &Path) { bananna.index("zPRVXKY6D", &[&hizat]).unique().build(&mut v); generate( &root.join("tests/sqlite_gen_insert_on_conflict_do_nothing.rs"), - vec![(0usize, v)], - vec![ + &vec![(0, v)], + &vec![ new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), type_: hizat.def.type_.type_.clone(), @@ -221,8 +221,8 @@ pub fn build(root: &Path) { bananna.index("zPRVXKY6D", &[&hizat]).unique().build(&mut v); generate( &root.join("tests/sqlite_gen_insert_on_conflict_update.rs"), - vec![(0usize, v)], - vec![new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { + &vec![(0, v)], + &vec![new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), type_: hizat.def.type_.type_.clone(), }), (two.id.clone(), Expr::Param { @@ -253,7 +253,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zSPEZNHA8"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_update.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_update.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -272,7 +272,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zSPEZNHA8"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_update_where.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_update_where.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -298,7 +298,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zSPEZNHA8"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_update_returning.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_update_returning.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -315,7 +315,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zLBDEHGRB"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_delete.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_delete.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -331,7 +331,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zLBDEHGRB"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_delete_where.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_delete_where.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -354,7 +354,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zLBDEHGRB"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_delete_returning.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_delete_returning.rs"), &vec![(0, v)], &vec![ // Queries new_insert( &bananna, @@ -404,8 +404,8 @@ pub fn build(root: &Path) { ); generate( &root.join("tests/sqlite_gen_select_join.rs"), - vec![(0usize, v)], - vec![new_select(&bananna).join(Join { + &vec![(0, v)], + &vec![new_select(&bananna).join(Join { source: Box::new(NamedSelectSource { source: JoinSource::Table(one.0.clone()), alias: None, @@ -425,7 +425,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_select_limit.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_select_limit.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "text".into(), @@ -440,7 +440,7 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().build()); - generate(&root.join("tests/sqlite_gen_select_order.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_select_order.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "v".into(), @@ -459,7 +459,7 @@ pub fn build(root: &Path) { let bananna = v.table("zEOIWAACJ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_i32().build()); let hizat2 = bananna.field(&mut v, "z3CRAVV3M", "hizat2", field_i32().build()); - generate(&root.join("tests/sqlite_gen_select_group_by.rs"), vec![(0usize, v)], vec![ + generate(&root.join("tests/sqlite_gen_select_group_by.rs"), &vec![(0, v)], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "v".into(), @@ -483,9 +483,9 @@ pub fn build(root: &Path) { let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); let zomzom = bananna.field(&mut v, "zPREUVAOD", "zomzom", field_bool().migrate_fill(Expr::LitBool(true)).build()); - generate(&root.join("tests/sqlite_gen_migrate_add_field.rs"), vec![ + generate(&root.join("tests/sqlite_gen_migrate_add_field.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("zTWA93SX0"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -497,29 +497,73 @@ pub fn build(root: &Path) { ); v }), - (1usize, v) - ], vec![ + (1, v) + ], &vec![ // Queries new_select(&bananna).return_fields(&[&hizat, &zomzom]).build_query("get_banan", QueryResCount::MaybeOne) ]).unwrap(); } + // # Migrate - detect + { + let mut v0 = Version::default(); + let bananna_v0 = v0.table("zTWA93SX0"); + let hizat_v0 = bananna_v0.field(&mut v0, "z437INV6D", "hizat", field_str().build()); + let mut versions = vec![ + // Versions (previous) + (0, { + let mut v = Version::default(); + let bananna = v.table("zTWA93SX0"); + let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); + v.post_migration( + new_insert( + &bananna, + vec![(hizat.id.clone(), Expr::LitString("nizoot".into()))], + ).build_migration(), + ); + v + }) + ]; + generate( + &root.join("tests/sqlite_gen_migrate_detect_1.rs"), + &versions, + &vec![ + new_select(&bananna_v0) + .return_fields(&[&hizat_v0]) + .build_query("get_banan", QueryResCount::MaybeOne) + ], + ).unwrap(); + let mut v1 = Version::default(); + let bananna_v1 = v1.table("zTWA93SX0"); + let hizat_v1 = bananna_v1.field(&mut v1, "z437INV6D", "hizat", field_str().build()); + versions.push((1, v1)); + generate( + &root.join("tests/sqlite_gen_migrate_detect_2.rs"), + &versions, + &vec![ + new_select(&bananna_v1) + .return_fields(&[&hizat_v1]) + .build_query("get_banan", QueryResCount::MaybeOne) + ], + ).unwrap(); + } + // # Migrate - remove field { let mut v = Version::default(); let bananna = v.table("z1MD8L1CZ"); let hizat = bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_migrate_remove_field.rs"), vec![ + generate(&root.join("tests/sqlite_gen_migrate_remove_field.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("z1MD8L1CZ"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); bananna.field(&mut v, "zPREUVAOD", "zomzom", field_bool().build()); v }), - (1usize, v) - ], vec![ + (1, v) + ], &vec![ // Queries new_insert(&bananna, vec![(hizat.id.clone(), Expr::Param { name: "okolor".into(), @@ -535,16 +579,16 @@ pub fn build(root: &Path) { bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); let two = v.table("zHXF3YVGQ"); let field_two = two.field(&mut v, "z156A4Q8W", "two", field_i32().build()); - generate(&root.join("tests/sqlite_gen_migrate_add_table.rs"), vec![ + generate(&root.join("tests/sqlite_gen_migrate_add_table.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("z4RGW742J"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); v }), - (1usize, v) - ], vec![ + (1, v) + ], &vec![ // Queries new_insert(&two, vec![(field_two.id.clone(), Expr::Param { name: "two".into(), @@ -558,9 +602,9 @@ pub fn build(root: &Path) { let mut v = Version::default(); let bananna = v.table("zX7CEK8JC"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_migrate_remove_table.rs"), vec![ + generate(&root.join("tests/sqlite_gen_migrate_remove_table.rs"), &vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("zX7CEK8JC"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -568,8 +612,8 @@ pub fn build(root: &Path) { two.field(&mut v, "z156A4Q8W", "two", field_i32().build()); v }), - (1usize, v) - ], vec![]).unwrap(); + (1, v) + ], &vec![]).unwrap(); } // # Migrate - remove index @@ -591,10 +635,10 @@ pub fn build(root: &Path) { v1.pre_migration(new_insert(&v0_two, vec![(v0_field_two.id.clone(), Expr::LitI32(7))]).build_migration()); let v1_bananna = v1.table("zMI5V9F2V"); v1_bananna.field(&mut v1, "z437INV6D", "hizat", field_str().build()); - generate(&root.join("tests/sqlite_gen_migrate_pre_migration.rs"), vec![ + generate(&root.join("tests/sqlite_gen_migrate_pre_migration.rs"), &vec![ // Versions (previous) - (0usize, v0), - (1usize, v1) - ], vec![]).unwrap(); + (0, v0), + (1, v1) + ], &vec![]).unwrap(); } } diff --git a/integration_tests/tests/pg.rs b/integration_tests/tests/pg.rs index cc7a43f..78e7120 100644 --- a/integration_tests/tests/pg.rs +++ b/integration_tests/tests/pg.rs @@ -26,6 +26,8 @@ pub mod pg_gen_select_group_by; pub mod pg_gen_select_order; pub mod pg_gen_select_limit; pub mod pg_gen_migrate_add_field; +pub mod pg_gen_migrate_detect_1; +pub mod pg_gen_migrate_detect_2; pub mod pg_gen_migrate_remove_field; pub mod pg_gen_migrate_add_table; pub mod pg_gen_migrate_remove_table; @@ -278,6 +280,17 @@ async fn test_migrate_add_field() -> Result<()> { Ok(()) } +#[tokio::test] +async fn test_migrate_detect() -> Result<()> { + let docker = testcontainers::clients::Cli::default(); + let (mut db, _cont) = db(&docker).await?; + pg_gen_migrate_detect_2::migrate(&mut db).await?; + assert!( + matches!(pg_gen_migrate_detect_1::get_banan(&mut db).await, Err(good_ormning::runtime::Error::BadSchema)) + ); + Ok(()) +} + #[tokio::test] async fn test_migrate_remove_field() -> Result<()> { let docker = testcontainers::clients::Cli::default(); diff --git a/integration_tests/tests/sqlite.rs b/integration_tests/tests/sqlite.rs index 5be9d5f..d3b03a4 100644 --- a/integration_tests/tests/sqlite.rs +++ b/integration_tests/tests/sqlite.rs @@ -22,6 +22,8 @@ pub mod sqlite_gen_select_group_by; pub mod sqlite_gen_select_order; pub mod sqlite_gen_select_limit; pub mod sqlite_gen_migrate_add_field; +pub mod sqlite_gen_migrate_detect_1; +pub mod sqlite_gen_migrate_detect_2; pub mod sqlite_gen_migrate_remove_field; pub mod sqlite_gen_migrate_add_table; pub mod sqlite_gen_migrate_remove_table; @@ -261,6 +263,14 @@ fn test_migrate_add_field() -> Result<()> { Ok(()) } +#[test] +fn test_migrate_detect() -> Result<()> { + let mut db = rusqlite::Connection::open_in_memory()?; + sqlite_gen_migrate_detect_2::migrate(&mut db)?; + assert!(matches!(sqlite_gen_migrate_detect_1::get_banan(&mut db), Err(good_ormning::runtime::Error::BadSchema))); + Ok(()) +} + #[test] fn test_migrate_remove_field() -> Result<()> { let mut db = rusqlite::Connection::open_in_memory()?; diff --git a/src/graphmigrate/graph.rs b/src/buildtime/graphmigrate/graph.rs similarity index 100% rename from src/graphmigrate/graph.rs rename to src/buildtime/graphmigrate/graph.rs diff --git a/src/graphmigrate/mod.rs b/src/buildtime/graphmigrate/mod.rs similarity index 98% rename from src/graphmigrate/mod.rs rename to src/buildtime/graphmigrate/mod.rs index 77108ca..675ef45 100644 --- a/src/graphmigrate/mod.rs +++ b/src/buildtime/graphmigrate/mod.rs @@ -6,7 +6,7 @@ use std::{ hash::Hash, }; use std::fmt::Debug; -use crate::graphmigrate::graph::{ +use crate::buildtime::graphmigrate::graph::{ Graph, TopoWalker, }; @@ -53,7 +53,7 @@ pub enum Comparison { type Version = HashMap<::I, Node>; -pub fn migrate(output: &mut T::O, prev_version: Option>, version: &Version) { +pub fn migrate(output: &mut T::O, prev_version: Option<&Version>, version: &Version) { enum DiffNode { Create { new: T, @@ -68,11 +68,11 @@ pub fn migrate(output: &mut T::O, prev_version: Option>, let mut delete_graph = Graph::new(); let mut delete_graph_lookup = HashMap::new(); if let Some(prev_version) = &prev_version { - for (k, n) in prev_version { + for (k, n) in *prev_version { let id = delete_graph.add(Some(n.body.clone())); delete_graph_lookup.insert(k, id); } - for (k, n) in prev_version { + for (k, n) in *prev_version { let gk = *delete_graph_lookup.get(k).unwrap(); for dep in &n.deps { delete_graph.edge(*delete_graph_lookup.get(dep).unwrap(), gk); diff --git a/src/buildtime/mod.rs b/src/buildtime/mod.rs new file mode 100644 index 0000000..2830d7e --- /dev/null +++ b/src/buildtime/mod.rs @@ -0,0 +1,4 @@ +pub mod pg; +pub mod sqlite; +mod graphmigrate; +mod utils; diff --git a/src/pg/mod.rs b/src/buildtime/pg/mod.rs similarity index 66% rename from src/pg/mod.rs rename to src/buildtime/pg/mod.rs index 0bbd9e4..0a1cf73 100644 --- a/src/pg/mod.rs +++ b/src/buildtime/pg/mod.rs @@ -10,14 +10,16 @@ use quote::{ use std::{ collections::HashMap, path::Path, - fs, }; -use crate::{ +use crate::buildtime::{ pg::{ types::Type, queries::expr::ExprValName, }, - utils::Errs, + utils::{ + Errs, + Output, + }, }; use self::{ queries::{ @@ -696,33 +698,44 @@ impl IndexBuilder { } } -/// Generate Rust code for migrations and queries. -/// -/// # Arguments -/// -/// * `output` - the path to a single rust source file where the output will be written -/// -/// # Returns -/// -/// * Error - a list of validation or generation errors that occurred -pub fn generate(output: &Path, versions: Vec<(usize, Version)>, queries: Vec) -> Result<(), Vec> { +fn build_field_lookup(field_lookup: &mut HashMap>, version: &Version) { + for v in version.schema.values() { + match &v.body { + Node::Field(f) => { + match field_lookup.entry(f.id.0.clone()) { + std::collections::hash_map::Entry::Occupied(_) => { }, + std::collections::hash_map::Entry::Vacant(e) => { + e.insert(HashMap::new()); + }, + }; + let table = field_lookup.get_mut(&f.id.0).unwrap(); + table.insert(f.id.clone(), (f.def.name.clone(), f.def.type_.type_.clone())); + }, + _ => { }, + }; + } +} + +pub fn generate_migrations(output: &mut Output, versions: &Vec<(i64, Version)>) -> Result<(), Vec> { let mut errs = Errs::new(); let mut migrations = vec![]; - let mut prev_version: Option = None; + let mut prev_version: Option<&Version> = None; let mut prev_version_i: Option = None; let mut field_lookup = HashMap::new(); for (version_i, version) in versions { + let version_i = *version_i; let path = rpds::vector![format!("Migration to {}", version_i)]; let mut migration = vec![]; fn do_migration_query( errs: &mut Errs, + version_i: i64, path: &rpds::Vector, migration: &mut Vec, field_lookup: &HashMap>, q: &dyn QueryBody, ) { - let mut qctx = PgQueryCtx::new(errs.clone(), &field_lookup); + let mut qctx = PgQueryCtx::new(errs.clone(), version_i, &field_lookup); let e_res = q.build(&mut qctx, path, QueryResCount::None); if !qctx.rust_args.is_empty() { qctx.errs.err(path, format!("Migration statements can't receive arguments")); @@ -738,6 +751,7 @@ pub fn generate(output: &Path, versions: Vec<(usize, Version)>, queries: Vec, queries: Vec, queries: Vec { - match field_lookup.entry(f.id.0.clone()) { - std::collections::hash_map::Entry::Occupied(_) => { }, - std::collections::hash_map::Entry::Vacant(e) => { - e.insert(HashMap::new()); - }, - }; - let table = field_lookup.get_mut(&f.id.0).unwrap(); - table.insert(f.id.clone(), (f.def.name.clone(), f.def.type_.type_.clone())); - }, - _ => { }, - }; - } + build_field_lookup(&mut field_lookup, &version); // Main migrations { - let mut state = PgMigrateCtx::new(errs.clone()); - crate::graphmigrate::migrate(&mut state, prev_version.take().map(|s| s.schema), &version.schema); + let mut state = PgMigrateCtx::new(errs.clone(), version_i); + crate::buildtime::graphmigrate::migrate( + &mut state, + prev_version.take().map(|s| &s.schema), + &version.schema, + ); for statement in &state.statements { migration.push(quote!{ txn.execute(#statement, &[]).await ?; @@ -793,6 +794,7 @@ pub fn generate(output: &Path, versions: Vec<(usize, Version)>, queries: Vec, queries: Vec = HashMap::new(); - for q in queries { - let path = rpds::vector![format!("Query {}", q.name)]; - let mut ctx = PgQueryCtx::new(errs.clone(), &field_lookup); - let res = QueryBody::build(q.body.as_ref(), &mut ctx, &path, q.res_count.clone()); - let ident = format_ident!("{}", q.name); - let q_text = res.1.to_string(); - let args = ctx.rust_args.split_off(0); - let args_forward = ctx.query_args.split_off(0); - drop(ctx); - let (res_ident, res_def, unforward_res) = { - fn convert_one_res( - errs: &mut Errs, - path: &rpds::Vector, - i: usize, - k: &ExprValName, - v: &Type, - ) -> Option<(Ident, TokenStream, TokenStream)> { - if k.name.is_empty() { - errs.err( - path, - format!("Result element {} has no name; name it using `rename` if this is intentional", i), - ); - return None; + let last_version_i = prev_version_i.unwrap() as i64; + output.data.push(quote!{ + pub async fn migrate(db: &mut tokio_postgres::Client) -> Result <(), + good_ormning:: runtime:: Error > { + db + .execute( + "create table if not exists __good_version (rid int primary key, version bigint not null, lock int not null);", + &[], + ) + .await?; + db + .execute( + "insert into __good_version (rid, version, lock) values (0, -1, 0) on conflict do nothing;", + &[], + ) + .await?; + loop { + let txn = db.transaction().await?; + match(|| { + async { + let version = + match txn + .query_opt( + "update __good_version set lock = 1 where rid = 0 and lock = 0 returning version", + &[], + ) + .await? { + Some(r) => { + let ver: i64 = r.get("version"); + ver + }, + None => { + return Ok(false); + }, + }; + if version > #last_version_i { + return Err( + good_ormning::runtime::Error::Other( + format!( + "The latest known version is {}, but the schema is at unknown version {}", + #last_version_i, + version + ), + ), + ); + } + #( + #migrations + ) * txn.execute( + "update __good_version set version = $1, lock = 0", + &[& #last_version_i] + ).await ?; + let out: Result < bool, + good_ormning::runtime::Error >= Ok(true); + out } - let mut ident: TokenStream = match v.type_.type_ { - types::SimpleSimpleType::Auto => quote!(i64), - types::SimpleSimpleType::U32 => quote!(u32), - types::SimpleSimpleType::I32 => quote!(i32), - types::SimpleSimpleType::I64 => quote!(i64), - types::SimpleSimpleType::F32 => quote!(f32), - types::SimpleSimpleType::F64 => quote!(f64), - types::SimpleSimpleType::Bool => quote!(bool), - types::SimpleSimpleType::String => quote!(String), - types::SimpleSimpleType::Bytes => quote!(Vec < u8 >), - types::SimpleSimpleType::UtcTime => quote!(chrono:: DateTime < chrono:: Utc >), - }; - if v.opt { - ident = quote!(Option < #ident >); + })().await { + Err(e) => { + match txn.rollback().await { + Err(e1) => { + return Err( + good_ormning::runtime::Error::Other( + format!( + "{}\n\nRolling back the transaction due to the above also failed: {}", + e, + e1 + ), + ), + ); + }, + Ok(_) => { + return Err(good_ormning::runtime::Error::Other(e.to_string())); + }, + }; } - let mut unforward = quote!{ - let x: #ident = r.get(#i); - }; - if let Some(custom) = &v.type_.custom { - ident = match syn::parse_str::(&custom) { - Ok(i) => i.to_token_stream(), + Ok(migrated) => { + match txn.commit().await { Err(e) => { - errs.err( - path, - format!( - "Couldn't parse provided custom type name [{}] as identifier path: {:?}", - custom, - e + return Err( + good_ormning::runtime::Error::Other( + format!("Error committing the migration transaction: {}", e), ), ); - return None; }, - }; - if v.opt { - unforward = quote!{ - #unforward let x = if let Some(x) = x { - Some(#ident:: from_sql(x).map_err(|e| GoodError(e.to_string())) ?) + Ok(_) => { + if migrated { + return Ok(()) + } else { + tokio::time::sleep(tokio::time::Duration::from_millis(5 * 1000)).await; } - else { - None - }; - }; - ident = quote!(Option < #ident >); - } else { - unforward = quote!{ - #unforward let x = #ident:: from_sql(x).map_err(|e| GoodError(e.to_string())) ?; - }; - } + }, + }; } - return Some((format_ident!("{}", utils::sanitize(&k.name).1), ident, quote!({ - #unforward x - }))); } + } + } + }); + errs.raise()?; + Ok(()) +} - if res.0.0.len() == 1 { - let e = &res.0.0[0]; - let (_, type_ident, unforward) = match convert_one_res(&mut errs, &path, 0, &e.0, &e.1) { - None => { - continue; +pub fn generate_queries( + output: &mut Output, + version: &(i64, Version), + queries: &Vec, +) -> Result<(), Vec> { + let mut errs = Errs::new(); + let mut field_lookup = HashMap::new(); + build_field_lookup(&mut field_lookup, &version.1); + let mut res_type_idents: HashMap = HashMap::new(); + for q in queries { + let path = rpds::vector![format!("Query {}", q.name)]; + let mut ctx = PgQueryCtx::new(errs.clone(), version.0, &field_lookup); + let res = QueryBody::build(q.body.as_ref(), &mut ctx, &path, q.res_count.clone()); + let ident = format_ident!("{}", q.name); + let q_text = res.1.to_string(); + let args = ctx.rust_args.split_off(0); + let args_forward = ctx.query_args.split_off(0); + drop(ctx); + let (res_ident, res_def, unforward_res) = { + fn convert_one_res( + errs: &mut Errs, + path: &rpds::Vector, + i: usize, + k: &ExprValName, + v: &Type, + ) -> Option<(Ident, TokenStream, TokenStream)> { + if k.name.is_empty() { + errs.err( + path, + format!("Result element {} has no name; name it using `rename` if this is intentional", i), + ); + return None; + } + let mut ident: TokenStream = match v.type_.type_ { + types::SimpleSimpleType::Auto => quote!(i64), + types::SimpleSimpleType::U32 => quote!(u32), + types::SimpleSimpleType::I32 => quote!(i32), + types::SimpleSimpleType::I64 => quote!(i64), + types::SimpleSimpleType::F32 => quote!(f32), + types::SimpleSimpleType::F64 => quote!(f64), + types::SimpleSimpleType::Bool => quote!(bool), + types::SimpleSimpleType::String => quote!(String), + types::SimpleSimpleType::Bytes => quote!(Vec < u8 >), + types::SimpleSimpleType::UtcTime => quote!(chrono:: DateTime < chrono:: Utc >), + }; + if v.opt { + ident = quote!(Option < #ident >); + } + let mut unforward = quote!{ + let x: #ident = r.get(#i); + }; + if let Some(custom) = &v.type_.custom { + ident = match syn::parse_str::(&custom) { + Ok(i) => i.to_token_stream(), + Err(e) => { + errs.err( + path, + format!( + "Couldn't parse provided custom type name [{}] as identifier path: {:?}", + custom, + e + ), + ); + return None; }, - Some(x) => x, }; - (type_ident, None, unforward) - } else { - let mut fields = vec![]; - let mut unforward_fields = vec![]; - for (i, (k, v)) in res.0.0.into_iter().enumerate() { - let (k_ident, type_ident, unforward) = match convert_one_res(&mut errs, &path, i, &k, &v) { - Some(x) => x, - None => continue, + if v.opt { + unforward = quote!{ + #unforward let x = if let Some(x) = x { + Some( + #ident:: from_sql( + x + ).map_err(|e| good_ormning::runtime::Error::Other(e.to_string())) ? + ) + } + else { + None + }; + }; + ident = quote!(Option < #ident >); + } else { + unforward = quote!{ + #unforward let x = #ident:: from_sql( + x + ).map_err(|e| good_ormning::runtime::Error::Other(e.to_string())) ?; }; - fields.push(quote!{ - pub #k_ident: #type_ident - }); - unforward_fields.push(quote!{ - #k_ident: #unforward - }); } - let body = quote!({ - #(#fields,) * - }); - let res_type_count = res_type_idents.len(); - let (res_ident, res_def) = match res_type_idents.entry(body.to_string()) { - std::collections::hash_map::Entry::Occupied(e) => { - (e.get().clone(), None) - }, - std::collections::hash_map::Entry::Vacant(e) => { - let ident = if let Some(name) = q.res_name { - format_ident!("{}", name) - } else { - format_ident!("DbRes{}", res_type_count) - }; - e.insert(ident.clone()); - let res_def = quote!(pub struct #ident #body); - (ident, Some(res_def)) - }, + } + return Some((format_ident!("{}", utils::sanitize(&k.name).1), ident, quote!({ + #unforward x + }))); + } + + if res.0.0.len() == 1 { + let e = &res.0.0[0]; + let (_, type_ident, unforward) = match convert_one_res(&mut errs, &path, 0, &e.0, &e.1) { + None => { + continue; + }, + Some(x) => x, + }; + (type_ident, None, unforward) + } else { + let mut fields = vec![]; + let mut unforward_fields = vec![]; + for (i, (k, v)) in res.0.0.into_iter().enumerate() { + let (k_ident, type_ident, unforward) = match convert_one_res(&mut errs, &path, i, &k, &v) { + Some(x) => x, + None => continue, }; - let unforward = quote!(#res_ident { - #(#unforward_fields,) * + fields.push(quote!{ + pub #k_ident: #type_ident }); - (res_ident.to_token_stream(), res_def, unforward) - } - }; - let db_arg = quote!(db: &mut impl tokio_postgres::GenericClient); - match q.res_count { - QueryResCount::None => { - db_others.push(quote!{ - pub async fn #ident(#db_arg, #(#args,) *) -> Result <(), - GoodError > { - db.execute( - #q_text, - &[#(& #args_forward,) *] - ).await.map_err(|e| GoodError(e.to_string())) ?; - Ok(()) - } + unforward_fields.push(quote!{ + #k_ident: #unforward }); - }, - QueryResCount::MaybeOne => { - if let Some(res_def) = res_def { - db_others.push(res_def); + } + let body = quote!({ + #(#fields,) * + }); + let res_type_count = res_type_idents.len(); + let (res_ident, res_def) = match res_type_idents.entry(body.to_string()) { + std::collections::hash_map::Entry::Occupied(e) => { + (e.get().clone(), None) + }, + std::collections::hash_map::Entry::Vacant(e) => { + let ident = if let Some(name) = &q.res_name { + format_ident!("{}", name) + } else { + format_ident!("DbRes{}", res_type_count) + }; + e.insert(ident.clone()); + let res_def = quote!(pub struct #ident #body); + (ident, Some(res_def)) + }, + }; + let unforward = quote!(#res_ident { + #(#unforward_fields,) * + }); + (res_ident.to_token_stream(), res_def, unforward) + } + }; + let db_arg = quote!(db: &mut impl tokio_postgres::GenericClient); + match q.res_count { + QueryResCount::None => { + output.data.push(quote!{ + pub async fn #ident(#db_arg, #(#args,) *) -> Result <(), + good_ormning:: runtime:: Error > { + db.execute(#q_text, &[#(& #args_forward,) *]).await ?; + Ok(()) } - db_others.push(quote!{ - pub async fn #ident(#db_arg, #(#args,) *) -> Result < Option < #res_ident >, - GoodError > { - let r = db.query_opt( - #q_text, - &[#(& #args_forward,) *] - ).await.map_err(|e| GoodError(e.to_string())) ?; - if let Some(r) = r { - return Ok(Some(#unforward_res)); - } - Ok(None) + }); + }, + QueryResCount::MaybeOne => { + if let Some(res_def) = res_def { + output.data.push(res_def); + } + output.data.push(quote!{ + pub async fn #ident(#db_arg, #(#args,) *) -> Result < Option < #res_ident >, + good_ormning:: runtime:: Error > { + let r = db.query_opt(#q_text, &[#(& #args_forward,) *]).await ?; + if let Some(r) = r { + return Ok(Some(#unforward_res)); } - }); - }, - QueryResCount::One => { - if let Some(res_def) = res_def { - db_others.push(res_def); + Ok(None) } - db_others.push(quote!{ - pub async fn #ident(#db_arg, #(#args,) *) -> Result < #res_ident, - GoodError > { - let r = db.query_one( - #q_text, - &[#(& #args_forward,) *] - ).await.map_err(|e| GoodError(e.to_string())) ?; - Ok(#unforward_res) - } - }); - }, - QueryResCount::Many => { - if let Some(res_def) = res_def { - db_others.push(res_def); + }); + }, + QueryResCount::One => { + if let Some(res_def) = res_def { + output.data.push(res_def); + } + output.data.push(quote!{ + pub async fn #ident(#db_arg, #(#args,) *) -> Result < #res_ident, + good_ormning:: runtime:: Error > { + let r = db.query_one(#q_text, &[#(& #args_forward,) *]).await ?; + Ok(#unforward_res) } - db_others.push(quote!{ - pub async fn #ident(#db_arg, #(#args,) *) -> Result < Vec < #res_ident >, - GoodError > { - let mut out = vec![]; - for r in db.query( - #q_text, - &[#(& #args_forward,) *] - ).await.map_err(|e| GoodError(e.to_string())) ? { - out.push(#unforward_res); - } - Ok(out) + }); + }, + QueryResCount::Many => { + if let Some(res_def) = res_def { + output.data.push(res_def); + } + output.data.push(quote!{ + pub async fn #ident(#db_arg, #(#args,) *) -> Result < Vec < #res_ident >, + good_ormning:: runtime:: Error > { + let mut out = vec![]; + for r in db.query(#q_text, &[#(& #args_forward,) *]).await ? { + out.push(#unforward_res); } - }); - }, - } + Ok(out) + } + }); + }, } } + errs.raise()?; + Ok(()) +} - // Compile, output - let last_version_i = prev_version_i.unwrap() as i64; - let tokens = quote!{ - #[derive(Debug)] - pub struct GoodError(pub String); - impl std::fmt::Display for GoodError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } - } - impl std::error::Error for GoodError { } - pub async fn migrate(db: &mut tokio_postgres::Client) -> Result <(), - GoodError > { - let txn = db.transaction().await.map_err(|e| GoodError(e.to_string()))?; - match(|| { - async { - txn.execute("create table if not exists __good_version (version bigint not null);", &[]).await?; - let version = match txn.query_opt("select * from __good_version limit 1", &[]).await? { - Some(r) => { - let ver: i64 = r.get("version"); - ver - }, - None => { - let ver: i64 = - txn - .query_one( - "insert into __good_version (version) values (-1) returning version", - &[], - ) - .await? - .get("version"); - ver - }, - }; - #( - #migrations - ) * txn.execute("update __good_version set version = $1", &[& #last_version_i]).await ?; - let out: Result <(), - tokio_postgres::Error >= Ok(()); - out - } - })().await { - Err(e) => { - match txn.rollback().await { - Err(e1) => { - return Err( - GoodError( - format!( - "{}\n\nRolling back the transaction due to the above also failed: {}", - e, - e1 - ), - ), - ); - }, - Ok(_) => { - return Err(GoodError(e.to_string())); - }, - }; - } - Ok(_) => { - match txn.commit().await { - Err(e) => { - return Err(GoodError(format!("Error committing the migration transaction: {}", e))); - }, - Ok(_) => { }, - }; - } - } - Ok(()) - } - #(#db_others) * - }; - if let Some(p) = output.parent() { - if let Err(e) = fs::create_dir_all(&p) { - errs.err( - &rpds::vector![], - format!("Error creating output parent directories {}: {:?}", p.to_string_lossy(), e), - ); - } +/// Generate Rust code for migrations and queries. +/// +/// # Arguments +/// +/// * `output` - the path to a single rust source file where the output will be written +/// +/// # Returns +/// +/// * Error - a list of validation or generation errors that occurred +pub fn generate(output: &Path, versions: &Vec<(i64, Version)>, queries: &Vec) -> Result<(), Vec> { + let mut out = Output::new(); + let mut errs = vec![]; + if let Err(e) = generate_migrations(&mut out, &versions) { + errs.extend(e); + } + let last_version = versions.last().unwrap(); + if let Err(e) = generate_queries(&mut out, last_version, queries) { + errs.extend(e); + } + if let Err(e) = out.write(output) { + errs.push(e.to_string()); + } + if !errs.is_empty() { + return Err(errs); } - match genemichaels::format_str(&tokens.to_string(), &genemichaels::FormatConfig::default()) { - Ok(src) => { - match fs::write(output, src.rendered.as_bytes()) { - Ok(_) => { }, - Err(e) => errs.err( - &rpds::vector![], - format!("Failed to write generated code to {}: {:?}", output.to_string_lossy(), e), - ), - }; - }, - Err(e) => { - errs.err(&rpds::vector![], format!("Error formatting generated code: {:?}\n{}", e, tokens)); - }, - }; - errs.raise()?; Ok(()) } @@ -1117,7 +1149,7 @@ mod test { path::PathBuf, str::FromStr, }; - use crate::pg::{ + use crate::buildtime::pg::{ new_select, QueryResCount, new_insert, @@ -1136,13 +1168,13 @@ mod test { fn test_add_field_serial_bad() { assert!(generate(&PathBuf::from_str("/dev/null").unwrap(), vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); v }), - (1usize, { + (1, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -1157,13 +1189,13 @@ mod test { fn test_add_field_dup_bad() { generate(&PathBuf::from_str("/dev/null").unwrap(), vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); v }), - (1usize, { + (1, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -1178,13 +1210,13 @@ mod test { fn test_add_table_dup_bad() { generate(&PathBuf::from_str("/dev/null").unwrap(), vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); v }), - (1usize, { + (1, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -1203,7 +1235,7 @@ mod test { assert!( generate( &PathBuf::from_str("/dev/null").unwrap(), - vec![(0usize, v)], + vec![(0, v)], vec![new_select(&bananna).return_field(&hizat).build_query("x", QueryResCount::None)], ).is_err() ); @@ -1217,7 +1249,7 @@ mod test { assert!( generate( &PathBuf::from_str("/dev/null").unwrap(), - vec![(0usize, v)], + vec![(0, v)], vec![new_select(&bananna).build_query("x", QueryResCount::None)], ).is_err() ); @@ -1231,7 +1263,7 @@ mod test { assert!( generate( &PathBuf::from_str("/dev/null").unwrap(), - vec![(0usize, v)], + vec![(0, v)], vec![ new_insert(&bananna, vec![(hizat.id.clone(), Expr::LitString("hoy".into()))]) .return_field(&hizat) diff --git a/src/pg/queries/delete.rs b/src/buildtime/pg/queries/delete.rs similarity index 91% rename from src/pg/queries/delete.rs rename to src/buildtime/pg/queries/delete.rs index a7035a6..b2b1ac9 100644 --- a/src/pg/queries/delete.rs +++ b/src/buildtime/pg/queries/delete.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ utils::Tokens, pg::{ schema::table::TableId, @@ -31,7 +31,7 @@ impl QueryBody for Delete { ctx: &mut super::utils::PgQueryCtx, path: &rpds::Vector, res_count: QueryResCount, - ) -> (super::expr::ExprType, crate::utils::Tokens) { + ) -> (super::expr::ExprType, Tokens) { // Prep let mut scope = HashMap::new(); for (k, v) in match ctx.tables.get(&self.table) { @@ -46,7 +46,7 @@ impl QueryBody for Delete { // Build query let mut out = Tokens::new(); - out.s("delete from").id(&self.table.0); + out.s("delete from").id(&self.table.at(ctx.version)); if let Some(where_) = &self.where_ { out.s("where"); let path = path.push_back("Where".into()); diff --git a/src/pg/queries/expr.rs b/src/buildtime/pg/queries/expr.rs similarity index 99% rename from src/pg/queries/expr.rs rename to src/buildtime/pg/queries/expr.rs index f5225d1..ce8cfde 100644 --- a/src/pg/queries/expr.rs +++ b/src/buildtime/pg/queries/expr.rs @@ -10,7 +10,7 @@ use quote::{ use samevariant::samevariant; use syn::Path; use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ pg::{ types::{ Type, @@ -538,7 +538,7 @@ impl Expr { x, scope .iter() - .map(|e| format!("{}.{} ({})", e.0.0.0, e.0.1, e.1.0)) + .map(|e| format!("{}.{} ({})", e.0.0.at(ctx.version), e.0.1, e.1.0)) .collect::>() ), ); @@ -546,7 +546,7 @@ impl Expr { }, }; let mut out = Tokens::new(); - out.id(&x.0.0).s(".").id(&x.1); + out.id(&x.0.at(ctx.version)).s(".").id(&x.1); return (ExprType(vec![(ExprValName::field(x.clone(), t.0), t.1.clone())]), out); }, Expr::BinOp { left, op, right } => { diff --git a/src/pg/queries/insert.rs b/src/buildtime/pg/queries/insert.rs similarity index 97% rename from src/pg/queries/insert.rs rename to src/buildtime/pg/queries/insert.rs index e451e40..11889db 100644 --- a/src/pg/queries/insert.rs +++ b/src/buildtime/pg/queries/insert.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ pg::{ schema::{ field::FieldId, @@ -59,7 +59,7 @@ impl QueryBody for Insert { // Build query let mut out = Tokens::new(); - out.s("insert into").id(&self.table.0).s("("); + out.s("insert into").id(&self.table.at(ctx.version)).s("("); for (i, (k, _)) in self.values.iter().enumerate() { if i > 0 { out.s(","); diff --git a/src/pg/queries/mod.rs b/src/buildtime/pg/queries/mod.rs similarity index 100% rename from src/pg/queries/mod.rs rename to src/buildtime/pg/queries/mod.rs diff --git a/src/pg/queries/select.rs b/src/buildtime/pg/queries/select.rs similarity index 98% rename from src/pg/queries/select.rs rename to src/buildtime/pg/queries/select.rs index 002e584..46c47df 100644 --- a/src/pg/queries/select.rs +++ b/src/buildtime/pg/queries/select.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ utils::Tokens, pg::{ types::Type, @@ -64,7 +64,7 @@ impl NamedSelectSource { return (vec![], Tokens::new()); }, }; - out.id(&s.0); + out.id(&s.at(ctx.version)); new_fields.iter().map(|e| (e.0.clone(), e.1.clone())).collect() }, }; diff --git a/src/pg/queries/update.rs b/src/buildtime/pg/queries/update.rs similarity index 92% rename from src/pg/queries/update.rs rename to src/buildtime/pg/queries/update.rs index a3efea1..2321c55 100644 --- a/src/pg/queries/update.rs +++ b/src/buildtime/pg/queries/update.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ pg::{ schema::{ table::TableId, @@ -36,7 +36,7 @@ impl QueryBody for Update { ctx: &mut super::utils::PgQueryCtx, path: &rpds::Vector, res_count: QueryResCount, - ) -> (super::expr::ExprType, crate::utils::Tokens) { + ) -> (super::expr::ExprType, Tokens) { // Prep let mut scope = HashMap::new(); for (k, v) in match ctx.tables.get(&self.table) { @@ -51,7 +51,7 @@ impl QueryBody for Update { // Build query let mut out = Tokens::new(); - out.s("update").id(&self.table.0); + out.s("update").id(&self.table.at(ctx.version)); build_set(ctx, path, &scope, &mut out, &self.values); if let Some(where_) = &self.where_ { out.s("where"); diff --git a/src/pg/queries/utils.rs b/src/buildtime/pg/queries/utils.rs similarity index 93% rename from src/pg/queries/utils.rs rename to src/buildtime/pg/queries/utils.rs index 790eff5..cc8726c 100644 --- a/src/pg/queries/utils.rs +++ b/src/buildtime/pg/queries/utils.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; use proc_macro2::TokenStream; -use crate::{ +use crate::buildtime::{ pg::{ types::Type, schema::{ @@ -27,16 +27,22 @@ use super::{ pub struct PgQueryCtx<'a> { pub(crate) tables: &'a HashMap>, pub(crate) errs: Errs, + pub(crate) version: i64, pub(crate) rust_arg_lookup: HashMap, pub(crate) rust_args: Vec, pub(crate) query_args: Vec, } impl<'a> PgQueryCtx<'a> { - pub(crate) fn new(errs: Errs, tables: &'a HashMap>) -> Self { + pub(crate) fn new( + errs: Errs, + version: i64, + tables: &'a HashMap>, + ) -> Self { Self { tables: tables, errs: errs, + version: version, rust_arg_lookup: Default::default(), rust_args: Default::default(), query_args: Default::default(), diff --git a/src/pg/schema/constraint.rs b/src/buildtime/pg/schema/constraint.rs similarity index 87% rename from src/pg/schema/constraint.rs rename to src/buildtime/pg/schema/constraint.rs index 9e502ce..9137cfa 100644 --- a/src/pg/schema/constraint.rs +++ b/src/buildtime/pg/schema/constraint.rs @@ -2,7 +2,7 @@ use std::{ fmt::Display, collections::HashSet, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, graphmigrate::Comparison, }; @@ -73,7 +73,7 @@ impl NodeDataDispatch for NodeConstraint_ { fn create(&self, ctx: &mut PgMigrateCtx) { let mut stmt = Tokens::new(); - stmt.s("alter table").id(&self.id.0.0).s("add constraint").id(&self.id.1); + stmt.s("alter table").id(&self.id.0.at(ctx.version)).s("add constraint").id(&self.id.1); match &self.def.type_ { ConstraintType::PrimaryKey(x) => { stmt.s("primary key (").f(|t| { @@ -96,7 +96,7 @@ impl NodeDataDispatch for NodeConstraint_ { }).s(") references ").f(|t| { for (i, id) in x.fields.iter().enumerate() { if i == 0 { - t.id(&id.1.0.0).s("("); + t.id(&id.1.0.at(ctx.version)).s("("); } else { t.s(","); } @@ -116,7 +116,12 @@ impl NodeDataDispatch for NodeConstraint_ { ctx .statements .push( - Tokens::new().s("alter table").id(&self.id.0.0).s("drop constraint").id(&self.id.1).to_string(), + Tokens::new() + .s("alter table") + .id(&self.id.0.at(ctx.version - 1)) + .s("drop constraint") + .id(&self.id.1) + .to_string(), ); } } diff --git a/src/pg/schema/field.rs b/src/buildtime/pg/schema/field.rs similarity index 94% rename from src/pg/schema/field.rs rename to src/buildtime/pg/schema/field.rs index 18de03f..c1573c8 100644 --- a/src/pg/schema/field.rs +++ b/src/buildtime/pg/schema/field.rs @@ -8,7 +8,7 @@ use std::{ HashSet, }, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, pg::{ types::{ @@ -220,7 +220,7 @@ impl NodeData for NodeField_ { .push( Tokens::new() .s("alter table") - .id(&self.id.0.0) + .id(&self.id.0.at(ctx.version)) .s("alter column") .id(&self.id.1) .s("set type") @@ -240,7 +240,7 @@ impl NodeDataDispatch for NodeField_ { let mut stmt = Tokens::new(); stmt .s("alter table") - .id(&self.id.0.0) + .id(&self.id.0.at(ctx.version)) .s("add column") .id(&self.id.1) .s(to_sql_type(&self.def.type_.type_.type_.type_)); @@ -248,7 +248,7 @@ impl NodeDataDispatch for NodeField_ { if let Some(d) = &self.def.type_.migration_default { stmt.s("not null default"); let qctx_fields = HashMap::new(); - let mut qctx = PgQueryCtx::new(ctx.errs.clone(), &qctx_fields); + let mut qctx = PgQueryCtx::new(ctx.errs.clone(), ctx.version, &qctx_fields); let e_res = d.build(&mut qctx, &path, &HashMap::new()); check_same(&mut qctx.errs, &path, &ExprType(vec![(ExprValName::empty(), Type { type_: self.def.type_.type_.type_.clone(), @@ -277,7 +277,7 @@ impl NodeDataDispatch for NodeField_ { .push( Tokens::new() .s("alter table") - .id(&self.id.0.0) + .id(&self.id.0.at(ctx.version)) .s("alter column") .id(&self.id.1) .s("drop default") @@ -289,7 +289,14 @@ impl NodeDataDispatch for NodeField_ { fn delete(&self, ctx: &mut PgMigrateCtx) { ctx .statements - .push(Tokens::new().s("alter table").id(&self.id.0.0).s("drop column").id(&self.id.1).to_string()); + .push( + Tokens::new() + .s("alter table") + .id(&self.id.0.at(ctx.version - 1)) + .s("drop column") + .id(&self.id.1) + .to_string(), + ); } fn create_coalesce(&mut self, other: Node) -> Option { diff --git a/src/pg/schema/index.rs b/src/buildtime/pg/schema/index.rs similarity index 94% rename from src/pg/schema/index.rs rename to src/buildtime/pg/schema/index.rs index 84ee22a..fcd2cd9 100644 --- a/src/pg/schema/index.rs +++ b/src/buildtime/pg/schema/index.rs @@ -2,7 +2,7 @@ use std::{ fmt::Display, collections::HashSet, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, graphmigrate::Comparison, }; @@ -61,7 +61,7 @@ impl NodeDataDispatch for NodeIndex_ { if self.def.unique { t.s("unique"); } - }).s("index").id(&self.id.1).s("on").id(&self.id.0.0).s("(").f(|t| { + }).s("index").id(&self.id.1).s("on").id(&self.id.0.at(ctx.version)).s("(").f(|t| { for (i, id) in self.def.field_ids.iter().enumerate() { if i > 0 { t.s(","); diff --git a/src/pg/schema/mod.rs b/src/buildtime/pg/schema/mod.rs similarity index 100% rename from src/pg/schema/mod.rs rename to src/buildtime/pg/schema/mod.rs diff --git a/src/pg/schema/node.rs b/src/buildtime/pg/schema/node.rs similarity index 96% rename from src/pg/schema/node.rs rename to src/buildtime/pg/schema/node.rs index 04a0140..f9b4f4a 100644 --- a/src/pg/schema/node.rs +++ b/src/buildtime/pg/schema/node.rs @@ -3,7 +3,7 @@ use std::{ }; use enum_dispatch::enum_dispatch; use samevariant::samevariant; -use crate::graphmigrate::Comparison; +use crate::buildtime::graphmigrate::Comparison; use super::{ table::{ NodeTable_, @@ -64,7 +64,7 @@ impl Node { } } -impl<'a> crate::graphmigrate::NodeData for Node { +impl<'a> crate::buildtime::graphmigrate::NodeData for Node { type O = PgMigrateCtx; type I = Id; diff --git a/src/pg/schema/table.rs b/src/buildtime/pg/schema/table.rs similarity index 79% rename from src/pg/schema/table.rs rename to src/buildtime/pg/schema/table.rs index 88721c0..7a2159c 100644 --- a/src/pg/schema/table.rs +++ b/src/buildtime/pg/schema/table.rs @@ -5,7 +5,7 @@ use std::{ }, collections::HashSet, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, pg::types::to_sql_type, graphmigrate::Comparison, @@ -35,6 +35,12 @@ impl Display for TableId { } } +impl TableId { + pub fn at(&self, version: i64) -> String { + format!("{}_v{}", self.0, version) + } +} + #[derive(Clone)] pub struct NodeTable_ { pub id: TableId, @@ -43,13 +49,15 @@ pub struct NodeTable_ { impl NodeTable_ { pub fn compare(&self, _old: &Self, _created: &HashSet) -> Comparison { - Comparison::DoNothing + Comparison::Update } } impl NodeData for NodeTable_ { - fn update(&self, _ctx: &mut PgMigrateCtx, _old: &Self) { - unreachable!(); + fn update(&self, ctx: &mut PgMigrateCtx, _old: &Self) { + let mut stmt = Tokens::new(); + stmt.s("alter table").id(&self.id.at(ctx.version - 1)).s("rename to").id(&self.id.at(ctx.version)); + ctx.statements.push(stmt.to_string()); } } @@ -75,7 +83,7 @@ impl NodeDataDispatch for NodeTable_ { fn create(&self, ctx: &mut PgMigrateCtx) { let mut stmt = Tokens::new(); - stmt.s("create table").id(&self.id.0).s("("); + stmt.s("create table").id(&self.id.at(ctx.version)).s("("); for (i, f) in self.fields.iter().enumerate() { if i > 0 { stmt.s(","); @@ -90,6 +98,6 @@ impl NodeDataDispatch for NodeTable_ { } fn delete(&self, ctx: &mut PgMigrateCtx) { - ctx.statements.push(Tokens::new().s("drop table").id(&self.id.0).to_string()); + ctx.statements.push(Tokens::new().s("drop table").id(&self.id.at(ctx.version - 1)).to_string()); } } diff --git a/src/pg/schema/utils.rs b/src/buildtime/pg/schema/utils.rs similarity index 71% rename from src/pg/schema/utils.rs rename to src/buildtime/pg/schema/utils.rs index 2bf7cef..918cd1d 100644 --- a/src/pg/schema/utils.rs +++ b/src/buildtime/pg/schema/utils.rs @@ -1,5 +1,5 @@ use enum_dispatch::enum_dispatch; -use crate::utils::Errs; +use crate::buildtime::utils::Errs; use super::{ node::{ Node, @@ -8,19 +8,21 @@ use super::{ pub(crate) struct PgMigrateCtx { pub(crate) errs: Errs, - pub statements: Vec, + pub(crate) statements: Vec, + pub(crate) version: i64, } impl PgMigrateCtx { - pub fn new(errs: Errs) -> Self { + pub fn new(errs: Errs, version: i64) -> Self { Self { errs: errs, + version: version, statements: Default::default(), } } } -pub(crate) type MigrateNode = crate::graphmigrate::Node; +pub(crate) type MigrateNode = crate::buildtime::graphmigrate::Node; #[enum_dispatch] pub(crate) trait NodeDataDispatch { diff --git a/src/pg/types.rs b/src/buildtime/pg/types.rs similarity index 100% rename from src/pg/types.rs rename to src/buildtime/pg/types.rs diff --git a/src/pg/utils.rs b/src/buildtime/pg/utils.rs similarity index 100% rename from src/pg/utils.rs rename to src/buildtime/pg/utils.rs diff --git a/src/sqlite/mod.rs b/src/buildtime/sqlite/mod.rs similarity index 64% rename from src/sqlite/mod.rs rename to src/buildtime/sqlite/mod.rs index 3bb9503..89a0c9b 100644 --- a/src/sqlite/mod.rs +++ b/src/buildtime/sqlite/mod.rs @@ -10,16 +10,18 @@ use quote::{ use std::{ collections::HashMap, path::Path, - fs, }; -use crate::{ +use crate::buildtime::{ sqlite::{ types::{ Type, }, queries::expr::ExprValName, }, - utils::Errs, + utils::{ + Errs, + Output, + }, }; use self::{ queries::{ @@ -731,33 +733,44 @@ impl IndexBuilder { } } -/// Generate Rust code for migrations and queries. -/// -/// # Arguments -/// -/// * `output` - the path to a single rust source file where the output will be written -/// -/// # Returns -/// -/// * Error - a list of validation or generation errors that occurred -pub fn generate(output: &Path, versions: Vec<(usize, Version)>, queries: Vec) -> Result<(), Vec> { +fn build_field_lookup(field_lookup: &mut HashMap>, version: &Version) { + for v in version.schema.values() { + match &v.body { + Node::Field(f) => { + match field_lookup.entry(f.id.0.clone()) { + std::collections::hash_map::Entry::Occupied(_) => { }, + std::collections::hash_map::Entry::Vacant(e) => { + e.insert(HashMap::new()); + }, + }; + let table = field_lookup.get_mut(&f.id.0).unwrap(); + table.insert(f.id.clone(), (f.def.name.clone(), f.def.type_.type_.clone())); + }, + _ => { }, + }; + } +} + +pub fn generate_migrations(output: &mut Output, versions: &Vec<(i64, Version)>) -> Result<(), Vec> { let mut errs = Errs::new(); let mut migrations = vec![]; - let mut prev_version: Option = None; + let mut prev_version: Option<&Version> = None; let mut prev_version_i: Option = None; let mut field_lookup = HashMap::new(); for (version_i, version) in versions { + let version_i = *version_i; let path = rpds::vector![format!("Migration to {}", version_i)]; let mut migration = vec![]; fn do_migration_query( errs: &mut Errs, + version_i: i64, path: &rpds::Vector, migration: &mut Vec, field_lookup: &HashMap>, q: &dyn QueryBody, ) { - let mut qctx = SqliteQueryCtx::new(errs.clone(), &field_lookup); + let mut qctx = SqliteQueryCtx::new(errs.clone(), version_i, &field_lookup); let e_res = q.build(&mut qctx, path, QueryResCount::None); if !qctx.rust_args.is_empty() { qctx.errs.err(path, format!("Migration statements can't receive arguments")); @@ -773,6 +786,7 @@ pub fn generate(output: &Path, versions: Vec<(usize, Version)>, queries: Vec, queries: Vec, queries: Vec { - match field_lookup.entry(f.id.0.clone()) { - std::collections::hash_map::Entry::Occupied(_) => { }, - std::collections::hash_map::Entry::Vacant(e) => { - e.insert(HashMap::new()); - }, - }; - let table = field_lookup.get_mut(&f.id.0).unwrap(); - table.insert(f.id.clone(), (f.def.name.clone(), f.def.type_.type_.clone())); - }, - _ => { }, - }; - } + build_field_lookup(&mut field_lookup, &version); // Main migrations { - let mut state = SqliteMigrateCtx::new(errs.clone()); - crate::graphmigrate::migrate(&mut state, prev_version.take().map(|s| s.schema), &version.schema); + let mut state = SqliteMigrateCtx::new(errs.clone(), version_i); + crate::buildtime::graphmigrate::migrate( + &mut state, + prev_version.take().map(|s| &s.schema), + &version.schema, + ); for statement in &state.statements { migration.push(quote!{ txn.execute(#statement, ()) ?; @@ -828,6 +829,7 @@ pub fn generate(output: &Path, versions: Vec<(usize, Version)>, queries: Vec, queries: Vec = HashMap::new(); - for q in queries { - let path = rpds::vector![format!("Query {}", q.name)]; - let mut ctx = SqliteQueryCtx::new(errs.clone(), &field_lookup); - let res = QueryBody::build(q.body.as_ref(), &mut ctx, &path, q.res_count.clone()); - let ident = format_ident!("{}", q.name); - let q_text = res.1.to_string(); - let args = ctx.rust_args.split_off(0); - let args_forward = ctx.query_args.split_off(0); - drop(ctx); - let (res_ident, res_def, unforward_res) = { - fn convert_one_res( - errs: &mut Errs, - path: &rpds::Vector, - i: usize, - k: &ExprValName, - v: &Type, - ) -> Option<(Ident, TokenStream, TokenStream)> { - if k.name.is_empty() { - errs.err( - path, - format!("Result element {} has no name; name it using `rename` if this is intentional", i), + let last_version_i = prev_version_i.unwrap() as i64; + output.data.push(quote!{ + pub fn migrate(db: &mut rusqlite::Connection) -> Result <(), + good_ormning:: runtime:: Error > { + db.execute( + "create table if not exists __good_version (rid int primary key, version bigint not null, lock int not null);", + (), + )?; + db.execute( + "insert into __good_version (rid, version, lock) values (0, -1, 0) on conflict do nothing;", + (), + )?; + loop { + let txn = db.transaction()?; + match(|| { + let mut stmt = + txn.prepare( + "update __good_version set lock = 1 where rid = 0 and lock = 0 returning version", + )?; + let mut rows = stmt.query(())?; + let version = match rows.next()? { + Some(r) => { + let ver: i64 = r.get(0usize)?; + ver + }, + None => return Ok(false), + }; + drop(rows); + stmt.finalize()?; + if version > #last_version_i { + return Err( + good_ormning::runtime::Error::Other( + format!( + "The latest known version is {}, but the schema is at unknown version {}", + #last_version_i, + version + ), + ), ); - return None; } - let mut ident: TokenStream = match v.type_.type_ { - types::SimpleSimpleType::U32 => quote!(u32), - types::SimpleSimpleType::I32 => quote!(i32), - types::SimpleSimpleType::I64 => quote!(i64), - types::SimpleSimpleType::F32 => quote!(f32), - types::SimpleSimpleType::F64 => quote!(f64), - types::SimpleSimpleType::Bool => quote!(bool), - types::SimpleSimpleType::String => quote!(String), - types::SimpleSimpleType::Bytes => quote!(Vec < u8 >), - types::SimpleSimpleType::UtcTimeS => quote!(chrono:: DateTime < chrono:: Utc >), - types::SimpleSimpleType::UtcTimeMs => quote!(chrono:: DateTime < chrono:: Utc >), - }; - if v.opt { - ident = quote!(Option < #ident >); + #( + #migrations + ) * txn.execute( + "update __good_version set version = $1, lock = 0", + rusqlite::params![#last_version_i] + ) ?; + let out: Result < bool, + good_ormning::runtime::Error >= Ok(true); + out + })() { + Err(e) => { + match txn.rollback() { + Err(e1) => { + return Err( + good_ormning::runtime::Error::Other( + format!( + "{}\n\nRolling back the transaction due to the above also failed: {}", + e, + e1 + ), + ), + ); + }, + Ok(_) => { + return Err(e); + }, + }; } - let mut unforward = match v.type_.type_ { - types::SimpleSimpleType::U32 | - types::SimpleSimpleType::I32 | - types::SimpleSimpleType::I64 | - types::SimpleSimpleType::F32 | - types::SimpleSimpleType::F64 | - types::SimpleSimpleType::Bool | - types::SimpleSimpleType::String | - types::SimpleSimpleType::Bytes => { - quote!{ - let x: #ident = r.get(#i) ?; - } - }, - types::SimpleSimpleType::UtcTimeS => { - quote!{ - let x: i64 = r.get(#i) ?; - let x = chrono::TimeZone::timestamp_opt(&chrono::Utc, x, 0).unwrap(); - } - }, - types::SimpleSimpleType::UtcTimeMs => { - quote!{ - let x: String = r.get(#i) ?; - let x = - chrono::DateTime::::from( - chrono::DateTime::::parse_from_rfc3339( - &x, - ).map_err(|e| GoodError(e.to_string()))?, - ); - } - }, - }; - if let Some(custom) = &v.type_.custom { - ident = match syn::parse_str::(&custom) { - Ok(i) => i.to_token_stream(), + Ok(migrated) => { + match txn.commit() { Err(e) => { - errs.err( - path, - format!( - "Couldn't parse provided custom type name [{}] as identifier path: {:?}", - custom, - e + return Err( + good_ormning::runtime::Error::Other( + format!("Error committing the migration transaction: {}", e), ), ); - return None; }, - }; - if v.opt { - unforward = quote!{ - #unforward let x = if let Some(x) = x { - Some(#ident:: from_sql(x).map_err(|e| GoodError(e.to_string())) ?) + Ok(_) => { + if migrated { + return Ok(()) + } else { + std::thread::sleep(std::time::Duration::from_millis(5 * 1000)); } - else { - None - }; - }; - ident = quote!(Option < #ident >); - } else { - unforward = quote!{ - #unforward let x = #ident:: from_sql(x).map_err(|e| GoodError(e.to_string())) ?; - }; - } - } - return Some((format_ident!("{}", utils::sanitize(&k.name).1), ident, quote!({ - #unforward x - }))); - } - - if res.0.0.len() == 1 { - let e = &res.0.0[0]; - let (_, type_ident, unforward) = match convert_one_res(&mut errs, &path, 0, &e.0, &e.1) { - None => { - continue; - }, - Some(x) => x, - }; - (type_ident, None, unforward) - } else { - let mut fields = vec![]; - let mut unforward_fields = vec![]; - for (i, (k, v)) in res.0.0.into_iter().enumerate() { - let (k_ident, type_ident, unforward) = match convert_one_res(&mut errs, &path, i, &k, &v) { - Some(x) => x, - None => continue, + }, }; - fields.push(quote!{ - pub #k_ident: #type_ident - }); - unforward_fields.push(quote!{ - #k_ident: #unforward - }); } - let body = quote!({ - #(#fields,) * - }); - let res_type_count = res_type_idents.len(); - let (res_ident, res_def) = match res_type_idents.entry(body.to_string()) { - std::collections::hash_map::Entry::Occupied(e) => { - (e.get().clone(), None) - }, - std::collections::hash_map::Entry::Vacant(e) => { - let ident = if let Some(name) = q.res_name { - format_ident!("{}", name) - } else { - format_ident!("DbRes{}", res_type_count) - }; - e.insert(ident.clone()); - let res_def = quote!(pub struct #ident #body); - (ident, Some(res_def)) - }, - }; - let unforward = quote!(#res_ident { - #(#unforward_fields,) * - }); - (res_ident.to_token_stream(), res_def, unforward) } - }; - let db_arg = quote!(db: &mut rusqlite::Connection); - match q.res_count { - QueryResCount::None => { - db_others.push(quote!{ - pub fn #ident(#db_arg, #(#args,) *) -> Result <(), - GoodError > { - db.execute( - #q_text, - rusqlite::params![#(#args_forward,) *] - ).map_err(|e| GoodError(e.to_string())) ?; - Ok(()) - } - }); - }, - QueryResCount::MaybeOne => { - if let Some(res_def) = res_def { - db_others.push(res_def); - } - db_others.push(quote!{ - pub fn #ident(#db_arg, #(#args,) *) -> Result < Option < #res_ident >, - GoodError > { - let mut stmt = db.prepare(#q_text) ?; - let mut rows = - stmt - .query(rusqlite::params![#(#args_forward,) *]) - .map_err(|e| GoodError(e.to_string()))?; - let r = rows.next()?; - if let Some(r) = r { - return Ok(Some(#unforward_res)); - } - Ok(None) - } - }); - }, - QueryResCount::One => { - if let Some(res_def) = res_def { - db_others.push(res_def); - } - db_others.push(quote!{ - pub fn #ident(#db_arg, #(#args,) *) -> Result < #res_ident, - GoodError > { - let mut stmt = db.prepare(#q_text) ?; - let mut rows = - stmt - .query(rusqlite::params![#(#args_forward,) *]) - .map_err(|e| GoodError(e.to_string()))?; - let r = - rows - .next()? - .ok_or_else( - || GoodError("Query expected to return one row but returned no rows".into()), - )?; - Ok(#unforward_res) - } - }); - }, - QueryResCount::Many => { - if let Some(res_def) = res_def { - db_others.push(res_def); - } - db_others.push(quote!{ - pub fn #ident(#db_arg, #(#args,) *) -> Result < Vec < #res_ident >, - GoodError > { - let mut out = vec![]; - let mut stmt = db.prepare(#q_text) ?; - let mut rows = - stmt - .query(rusqlite::params![#(#args_forward,) *]) - .map_err(|e| GoodError(e.to_string()))?; - while let Some(r) = rows.next() ? { - out.push(#unforward_res); - } - Ok(out) - } - }); - }, } } - } + }); + errs.raise()?; + Ok(()) +} - // Compile, output - let last_version_i = prev_version_i.unwrap() as i64; - let tokens = quote!{ - #[derive(Debug)] - pub struct GoodError(pub String); - impl std::fmt::Display for GoodError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } - } - impl std::error::Error for GoodError { } - impl From for GoodError { - fn from(value: rusqlite::Error) -> Self { - GoodError(value.to_string()) - } - } - pub fn migrate(db: &mut rusqlite::Connection) -> Result <(), - GoodError > { - let txn = db.transaction().map_err(|e| GoodError(e.to_string()))?; - match(|| { - txn.execute("create table if not exists __good_version (version bigint not null);", ())?; - let mut stmt = txn.prepare("select version from __good_version limit 1")?; - let mut rows = stmt.query(())?; - let version = match rows.next()? { - Some(r) => { - let ver: i64 = r.get(0usize)?; - ver +pub fn generate_queries( + output: &mut Output, + version: &(i64, Version), + queries: &Vec, +) -> Result<(), Vec> { + let mut errs = Errs::new(); + let mut field_lookup = HashMap::new(); + build_field_lookup(&mut field_lookup, &version.1); + let mut res_type_idents: HashMap = HashMap::new(); + for q in queries { + let path = rpds::vector![format!("Query {}", q.name)]; + let mut ctx = SqliteQueryCtx::new(errs.clone(), version.0, &field_lookup); + let res = QueryBody::build(q.body.as_ref(), &mut ctx, &path, q.res_count.clone()); + let ident = format_ident!("{}", q.name); + let q_text = res.1.to_string(); + let args = ctx.rust_args.split_off(0); + let args_forward = ctx.query_args.split_off(0); + drop(ctx); + let (res_ident, res_def, unforward_res) = { + fn convert_one_res( + errs: &mut Errs, + path: &rpds::Vector, + i: usize, + k: &ExprValName, + v: &Type, + ) -> Option<(Ident, TokenStream, TokenStream)> { + if k.name.is_empty() { + errs.err( + path, + format!("Result element {} has no name; name it using `rename` if this is intentional", i), + ); + return None; + } + let mut ident: TokenStream = match v.type_.type_ { + types::SimpleSimpleType::U32 => quote!(u32), + types::SimpleSimpleType::I32 => quote!(i32), + types::SimpleSimpleType::I64 => quote!(i64), + types::SimpleSimpleType::F32 => quote!(f32), + types::SimpleSimpleType::F64 => quote!(f64), + types::SimpleSimpleType::Bool => quote!(bool), + types::SimpleSimpleType::String => quote!(String), + types::SimpleSimpleType::Bytes => quote!(Vec < u8 >), + types::SimpleSimpleType::UtcTimeS => quote!(chrono:: DateTime < chrono:: Utc >), + types::SimpleSimpleType::UtcTimeMs => quote!(chrono:: DateTime < chrono:: Utc >), + }; + if v.opt { + ident = quote!(Option < #ident >); + } + let mut unforward = match v.type_.type_ { + types::SimpleSimpleType::U32 | + types::SimpleSimpleType::I32 | + types::SimpleSimpleType::I64 | + types::SimpleSimpleType::F32 | + types::SimpleSimpleType::F64 | + types::SimpleSimpleType::Bool | + types::SimpleSimpleType::String | + types::SimpleSimpleType::Bytes => { + quote!{ + let x: #ident = r.get(#i) ?; + } }, - None => { - let mut stmt = - txn.prepare("insert into __good_version (version) values (-1) returning version")?; - let mut rows = stmt.query(())?; - let ver: i64 = - rows - .next()? - .ok_or_else(|| GoodError("Insert version failed to return any values".into()))? - .get(0usize)?; - ver + types::SimpleSimpleType::UtcTimeS => { + quote!{ + let x: i64 = r.get(#i) ?; + let x = chrono::TimeZone::timestamp_opt(&chrono::Utc, x, 0).unwrap(); + } + }, + types::SimpleSimpleType::UtcTimeMs => { + quote!{ + let x: String = r.get(#i) ?; + let x = + chrono::DateTime::::from( + chrono::DateTime::::parse_from_rfc3339( + &x, + ).map_err(|e| good_ormning::runtime::Error::Other(e.to_string()))?, + ); + } }, }; - #( - #migrations - ) * txn.execute("update __good_version set version = $1", rusqlite::params![#last_version_i]) ?; - let out: Result <(), - GoodError >= Ok(()); - out - })() { - Err(e) => { - match txn.rollback() { - Err(e1) => { - return Err( - GoodError( - format!( - "{}\n\nRolling back the transaction due to the above also failed: {}", - e, - e1 - ), + if let Some(custom) = &v.type_.custom { + ident = match syn::parse_str::(&custom) { + Ok(i) => i.to_token_stream(), + Err(e) => { + errs.err( + path, + format!( + "Couldn't parse provided custom type name [{}] as identifier path: {:?}", + custom, + e ), ); - }, - Ok(_) => { - return Err(e); + return None; }, }; + if v.opt { + unforward = quote!{ + #unforward let x = if let Some(x) = x { + Some( + #ident:: from_sql( + x + ).map_err(|e| good_ormning::runtime::Error::Other(e.to_string())) ? + ) + } + else { + None + }; + }; + ident = quote!(Option < #ident >); + } else { + unforward = quote!{ + #unforward let x = #ident:: from_sql( + x + ).map_err(|e| good_ormning::runtime::Error::Other(e.to_string())) ?; + }; + } } - Ok(_) => { - match txn.commit() { - Err(e) => { - return Err(GoodError(format!("Error committing the migration transaction: {}", e))); - }, - Ok(_) => { }, + return Some((format_ident!("{}", utils::sanitize(&k.name).1), ident, quote!({ + #unforward x + }))); + } + + if res.0.0.len() == 1 { + let e = &res.0.0[0]; + let (_, type_ident, unforward) = match convert_one_res(&mut errs, &path, 0, &e.0, &e.1) { + None => { + continue; + }, + Some(x) => x, + }; + (type_ident, None, unforward) + } else { + let mut fields = vec![]; + let mut unforward_fields = vec![]; + for (i, (k, v)) in res.0.0.into_iter().enumerate() { + let (k_ident, type_ident, unforward) = match convert_one_res(&mut errs, &path, i, &k, &v) { + Some(x) => x, + None => continue, }; + fields.push(quote!{ + pub #k_ident: #type_ident + }); + unforward_fields.push(quote!{ + #k_ident: #unforward + }); } + let body = quote!({ + #(#fields,) * + }); + let res_type_count = res_type_idents.len(); + let (res_ident, res_def) = match res_type_idents.entry(body.to_string()) { + std::collections::hash_map::Entry::Occupied(e) => { + (e.get().clone(), None) + }, + std::collections::hash_map::Entry::Vacant(e) => { + let ident = if let Some(name) = &q.res_name { + format_ident!("{}", name) + } else { + format_ident!("DbRes{}", res_type_count) + }; + e.insert(ident.clone()); + let res_def = quote!(pub struct #ident #body); + (ident, Some(res_def)) + }, + }; + let unforward = quote!(#res_ident { + #(#unforward_fields,) * + }); + (res_ident.to_token_stream(), res_def, unforward) } - Ok(()) - } - #(#db_others) * - }; - if let Some(p) = output.parent() { - if let Err(e) = fs::create_dir_all(&p) { - errs.err( - &rpds::vector![], - format!("Error creating output parent directories {}: {:?}", p.to_string_lossy(), e), - ); + }; + let db_arg = quote!(db: &mut rusqlite::Connection); + match q.res_count { + QueryResCount::None => { + output.data.push(quote!{ + pub fn #ident(#db_arg, #(#args,) *) -> Result <(), + good_ormning:: runtime:: Error > { + db.execute(#q_text, rusqlite::params![#(#args_forward,) *]) ?; + Ok(()) + } + }); + }, + QueryResCount::MaybeOne => { + if let Some(res_def) = res_def { + output.data.push(res_def); + } + output.data.push(quote!{ + pub fn #ident(#db_arg, #(#args,) *) -> Result < Option < #res_ident >, + good_ormning:: runtime:: Error > { + let mut stmt = db.prepare(#q_text) ?; + let mut rows = stmt.query(rusqlite::params![#(#args_forward,) *])?; + let r = rows.next()?; + if let Some(r) = r { + return Ok(Some(#unforward_res)); + } + Ok(None) + } + }); + }, + QueryResCount::One => { + if let Some(res_def) = res_def { + output.data.push(res_def); + } + output.data.push(quote!{ + pub fn #ident(#db_arg, #(#args,) *) -> Result < #res_ident, + good_ormning:: runtime:: Error > { + let mut stmt = db.prepare(#q_text) ?; + let mut rows = stmt.query(rusqlite::params![#(#args_forward,) *])?; + let r = + rows + .next()? + .ok_or_else( + || good_ormning::runtime::Error::Other( + "Query expected to return one row but returned no rows".into(), + ), + )?; + Ok(#unforward_res) + } + }); + }, + QueryResCount::Many => { + if let Some(res_def) = res_def { + output.data.push(res_def); + } + output.data.push(quote!{ + pub fn #ident(#db_arg, #(#args,) *) -> Result < Vec < #res_ident >, + good_ormning:: runtime:: Error > { + let mut out = vec![]; + let mut stmt = db.prepare(#q_text) ?; + let mut rows = stmt.query(rusqlite::params![#(#args_forward,) *])?; + while let Some(r) = rows.next() ? { + out.push(#unforward_res); + } + Ok(out) + } + }); + }, } } - match genemichaels::format_str(&tokens.to_string(), &genemichaels::FormatConfig::default()) { - Ok(src) => { - match fs::write(output, src.rendered.as_bytes()) { - Ok(_) => { }, - Err(e) => errs.err( - &rpds::vector![], - format!("Failed to write generated code to {}: {:?}", output.to_string_lossy(), e), - ), - }; - }, - Err(e) => { - errs.err(&rpds::vector![], format!("Error formatting generated code: {:?}\n{}", e, tokens)); - }, - }; errs.raise()?; Ok(()) } +/// Generate Rust code for migrations and queries. +/// +/// # Arguments +/// +/// * `output` - the path to a single rust source file where the output will be written +/// +/// # Returns +/// +/// * Error - a list of validation or generation errors that occurred +pub fn generate(output: &Path, versions: &Vec<(i64, Version)>, queries: &Vec) -> Result<(), Vec> { + let mut out = Output::new(); + let mut errs = vec![]; + if let Err(e) = generate_migrations(&mut out, &versions) { + errs.extend(e); + } + let last_version = versions.last().unwrap(); + if let Err(e) = generate_queries(&mut out, last_version, queries) { + errs.extend(e); + } + if let Err(e) = out.write(output) { + errs.push(e.to_string()); + } + if !errs.is_empty() { + return Err(errs); + } + Ok(()) +} + #[cfg(test)] mod test { use std::{ path::PathBuf, str::FromStr, }; - use crate::sqlite::{ + use crate::buildtime::sqlite::{ new_select, QueryResCount, new_insert, @@ -1216,13 +1238,13 @@ mod test { fn test_add_field_dup_bad() { generate(&PathBuf::from_str("/dev/null").unwrap(), vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); v }), - (1usize, { + (1, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -1237,13 +1259,13 @@ mod test { fn test_add_table_dup_bad() { generate(&PathBuf::from_str("/dev/null").unwrap(), vec![ // Versions (previous) - (0usize, { + (0, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); v }), - (1usize, { + (1, { let mut v = Version::default(); let bananna = v.table("bananna"); bananna.field(&mut v, "z437INV6D", "hizat", field_str().build()); @@ -1262,7 +1284,7 @@ mod test { assert!( generate( &PathBuf::from_str("/dev/null").unwrap(), - vec![(0usize, v)], + vec![(0, v)], vec![new_select(&bananna).return_field(&hizat).build_query("x", QueryResCount::None)], ).is_err() ); @@ -1276,7 +1298,7 @@ mod test { assert!( generate( &PathBuf::from_str("/dev/null").unwrap(), - vec![(0usize, v)], + vec![(0, v)], vec![new_select(&bananna).build_query("x", QueryResCount::None)], ).is_err() ); @@ -1290,7 +1312,7 @@ mod test { assert!( generate( &PathBuf::from_str("/dev/null").unwrap(), - vec![(0usize, v)], + vec![(0, v)], vec![ new_insert(&bananna, vec![(hizat.id.clone(), Expr::LitString("hoy".into()))]) .return_field(&hizat) diff --git a/src/sqlite/queries/delete.rs b/src/buildtime/sqlite/queries/delete.rs similarity index 92% rename from src/sqlite/queries/delete.rs rename to src/buildtime/sqlite/queries/delete.rs index cab0d02..94b0db3 100644 --- a/src/sqlite/queries/delete.rs +++ b/src/buildtime/sqlite/queries/delete.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ utils::Tokens, sqlite::{ schema::table::TableId, @@ -31,7 +31,7 @@ impl QueryBody for Delete { ctx: &mut super::utils::SqliteQueryCtx, path: &rpds::Vector, res_count: QueryResCount, - ) -> (super::expr::ExprType, crate::utils::Tokens) { + ) -> (super::expr::ExprType, Tokens) { // Prep let mut scope = HashMap::new(); for (k, v) in match ctx.tables.get(&self.table) { @@ -46,7 +46,7 @@ impl QueryBody for Delete { // Build query let mut out = Tokens::new(); - out.s("delete from").id(&self.table.0); + out.s("delete from").id(&self.table.at(ctx.version)); if let Some(where_) = &self.where_ { out.s("where"); let path = path.push_back("Where".into()); diff --git a/src/sqlite/queries/expr.rs b/src/buildtime/sqlite/queries/expr.rs similarity index 99% rename from src/sqlite/queries/expr.rs rename to src/buildtime/sqlite/queries/expr.rs index bddc6ec..c59bd83 100644 --- a/src/sqlite/queries/expr.rs +++ b/src/buildtime/sqlite/queries/expr.rs @@ -10,7 +10,7 @@ use quote::{ use samevariant::samevariant; use syn::Path; use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ sqlite::{ types::{ Type, @@ -554,7 +554,7 @@ impl Expr { x, scope .iter() - .map(|e| format!("{}.{} ({})", e.0.0.0, e.0.1, e.1.0)) + .map(|e| format!("{}.{} ({})", e.0.0.at(ctx.version), e.0.1, e.1.0)) .collect::>() ), ); @@ -562,7 +562,7 @@ impl Expr { }, }; let mut out = Tokens::new(); - out.id(&x.0.0).s(".").id(&x.1); + out.id(&x.0.at(ctx.version)).s(".").id(&x.1); return (ExprType(vec![(ExprValName::field(x.clone(), t.0), t.1.clone())]), out); }, Expr::BinOp { left, op, right } => { diff --git a/src/sqlite/queries/insert.rs b/src/buildtime/sqlite/queries/insert.rs similarity index 97% rename from src/sqlite/queries/insert.rs rename to src/buildtime/sqlite/queries/insert.rs index 6d1940a..ae04fb0 100644 --- a/src/sqlite/queries/insert.rs +++ b/src/buildtime/sqlite/queries/insert.rs @@ -1,7 +1,7 @@ use std::{ collections::HashMap, }; -use crate::{ +use crate::buildtime::{ sqlite::{ schema::{ field::FieldId, @@ -58,7 +58,7 @@ impl QueryBody for Insert { // Build query let mut out = Tokens::new(); - out.s("insert into").id(&self.table.0).s("("); + out.s("insert into").id(&self.table.at(ctx.version)).s("("); for (i, (k, _)) in self.values.iter().enumerate() { if i > 0 { out.s(","); diff --git a/src/sqlite/queries/mod.rs b/src/buildtime/sqlite/queries/mod.rs similarity index 100% rename from src/sqlite/queries/mod.rs rename to src/buildtime/sqlite/queries/mod.rs diff --git a/src/sqlite/queries/select.rs b/src/buildtime/sqlite/queries/select.rs similarity index 98% rename from src/sqlite/queries/select.rs rename to src/buildtime/sqlite/queries/select.rs index 88c5e09..8d7b97d 100644 --- a/src/sqlite/queries/select.rs +++ b/src/buildtime/sqlite/queries/select.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ utils::Tokens, sqlite::{ types::Type, @@ -64,7 +64,7 @@ impl NamedSelectSource { return (vec![], Tokens::new()); }, }; - out.id(&s.0); + out.id(&s.at(ctx.version)); new_fields.iter().map(|e| (e.0.clone(), e.1.clone())).collect() }, }; diff --git a/src/sqlite/queries/update.rs b/src/buildtime/sqlite/queries/update.rs similarity index 93% rename from src/sqlite/queries/update.rs rename to src/buildtime/sqlite/queries/update.rs index b1cd20d..6353648 100644 --- a/src/sqlite/queries/update.rs +++ b/src/buildtime/sqlite/queries/update.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use crate::{ +use crate::buildtime::{ sqlite::{ schema::{ table::TableId, @@ -36,7 +36,7 @@ impl QueryBody for Update { ctx: &mut super::utils::SqliteQueryCtx, path: &rpds::Vector, res_count: QueryResCount, - ) -> (super::expr::ExprType, crate::utils::Tokens) { + ) -> (super::expr::ExprType, Tokens) { // Prep let mut scope = HashMap::new(); for (k, v) in match ctx.tables.get(&self.table) { @@ -51,7 +51,7 @@ impl QueryBody for Update { // Build query let mut out = Tokens::new(); - out.s("update").id(&self.table.0); + out.s("update").id(&self.table.at(ctx.version)); build_set(ctx, path, &scope, &mut out, &self.values); if let Some(where_) = &self.where_ { out.s("where"); diff --git a/src/sqlite/queries/utils.rs b/src/buildtime/sqlite/queries/utils.rs similarity index 93% rename from src/sqlite/queries/utils.rs rename to src/buildtime/sqlite/queries/utils.rs index 6fb8e96..f04ef2f 100644 --- a/src/sqlite/queries/utils.rs +++ b/src/buildtime/sqlite/queries/utils.rs @@ -2,7 +2,7 @@ use std::{ collections::HashMap, }; use proc_macro2::TokenStream; -use crate::{ +use crate::buildtime::{ sqlite::{ types::Type, schema::{ @@ -32,11 +32,17 @@ pub struct SqliteQueryCtx<'a> { pub(crate) rust_arg_lookup: HashMap, pub(crate) rust_args: Vec, pub(crate) query_args: Vec, + pub(crate) version: i64, } impl<'a> SqliteQueryCtx<'a> { - pub(crate) fn new(errs: Errs, tables: &'a HashMap>) -> Self { + pub(crate) fn new( + errs: Errs, + version: i64, + tables: &'a HashMap>, + ) -> Self { Self { + version: version, tables: tables, errs: errs, rust_arg_lookup: Default::default(), diff --git a/src/sqlite/schema/constraint.rs b/src/buildtime/sqlite/schema/constraint.rs similarity index 88% rename from src/sqlite/schema/constraint.rs rename to src/buildtime/sqlite/schema/constraint.rs index 14925f7..732fd3f 100644 --- a/src/sqlite/schema/constraint.rs +++ b/src/buildtime/sqlite/schema/constraint.rs @@ -2,7 +2,7 @@ use std::{ fmt::Display, collections::HashSet, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, graphmigrate::Comparison, }; @@ -73,7 +73,7 @@ impl SqliteNodeDataDispatch for NodeConstraint_ { fn create(&self, ctx: &mut SqliteMigrateCtx) { let mut stmt = Tokens::new(); - stmt.s("alter table").id(&self.id.0.0).s("add constraint").id(&self.id.1); + stmt.s("alter table").id(&self.id.0.at(ctx.version)).s("add constraint").id(&self.id.1); match &self.def.type_ { ConstraintType::PrimaryKey(x) => { stmt.s("primary key (").f(|t| { @@ -96,7 +96,7 @@ impl SqliteNodeDataDispatch for NodeConstraint_ { }).s(") references ").f(|t| { for (i, id) in x.fields.iter().enumerate() { if i == 0 { - t.id(&id.1.0.0).s("("); + t.id(&id.1.0.at(ctx.version)).s("("); } else { t.s(","); } @@ -116,7 +116,12 @@ impl SqliteNodeDataDispatch for NodeConstraint_ { ctx .statements .push( - Tokens::new().s("alter table").id(&self.id.0.0).s("drop constraint").id(&self.id.1).to_string(), + Tokens::new() + .s("alter table") + .id(&self.id.0.at(ctx.version - 1)) + .s("drop constraint") + .id(&self.id.1) + .to_string(), ); } } diff --git a/src/sqlite/schema/field.rs b/src/buildtime/sqlite/schema/field.rs similarity index 95% rename from src/sqlite/schema/field.rs rename to src/buildtime/sqlite/schema/field.rs index c41273b..dcf4dcd 100644 --- a/src/sqlite/schema/field.rs +++ b/src/buildtime/sqlite/schema/field.rs @@ -8,7 +8,7 @@ use std::{ HashSet, }, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, sqlite::{ types::{ @@ -225,7 +225,7 @@ impl SqliteNodeDataDispatch for NodeField_ { let mut stmt = Tokens::new(); stmt .s("alter table") - .id(&self.id.0.0) + .id(&self.id.0.at(ctx.version)) .s("add column") .id(&self.id.1) .s(to_sql_type(&self.def.type_.type_.type_.type_)); @@ -233,7 +233,7 @@ impl SqliteNodeDataDispatch for NodeField_ { if let Some(d) = &self.def.type_.migration_default { stmt.s("not null default"); let qctx_fields = HashMap::new(); - let mut qctx = SqliteQueryCtx::new(ctx.errs.clone(), &qctx_fields); + let mut qctx = SqliteQueryCtx::new(ctx.errs.clone(), ctx.version, &qctx_fields); let e_res = d.build(&mut qctx, &path, &HashMap::new()); check_same(&mut qctx.errs, &path, &ExprType(vec![(ExprValName::empty(), Type { type_: self.def.type_.type_.type_.clone(), @@ -264,7 +264,14 @@ impl SqliteNodeDataDispatch for NodeField_ { } ctx .statements - .push(Tokens::new().s("alter table").id(&self.id.0.0).s("drop column").id(&self.id.1).to_string()); + .push( + Tokens::new() + .s("alter table") + .id(&self.id.0.at(ctx.version - 1)) + .s("drop column") + .id(&self.id.1) + .to_string(), + ); } fn create_coalesce(&mut self, other: Node) -> Option { diff --git a/src/sqlite/schema/index.rs b/src/buildtime/sqlite/schema/index.rs similarity index 94% rename from src/sqlite/schema/index.rs rename to src/buildtime/sqlite/schema/index.rs index 327f68b..dee6c0a 100644 --- a/src/sqlite/schema/index.rs +++ b/src/buildtime/sqlite/schema/index.rs @@ -2,7 +2,7 @@ use std::{ fmt::Display, collections::HashSet, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, graphmigrate::Comparison, }; @@ -61,7 +61,7 @@ impl SqliteNodeDataDispatch for NodeIndex_ { if self.def.unique { t.s("unique"); } - }).s("index").id(&self.id.1).s("on").id(&self.id.0.0).s("(").f(|t| { + }).s("index").id(&self.id.1).s("on").id(&self.id.0.at(ctx.version)).s("(").f(|t| { for (i, id) in self.def.field_ids.iter().enumerate() { if i > 0 { t.s(","); diff --git a/src/sqlite/schema/mod.rs b/src/buildtime/sqlite/schema/mod.rs similarity index 100% rename from src/sqlite/schema/mod.rs rename to src/buildtime/sqlite/schema/mod.rs diff --git a/src/sqlite/schema/node.rs b/src/buildtime/sqlite/schema/node.rs similarity index 96% rename from src/sqlite/schema/node.rs rename to src/buildtime/sqlite/schema/node.rs index 3b01567..1de2abb 100644 --- a/src/sqlite/schema/node.rs +++ b/src/buildtime/sqlite/schema/node.rs @@ -3,7 +3,7 @@ use std::{ }; use enum_dispatch::enum_dispatch; use samevariant::samevariant; -use crate::graphmigrate::Comparison; +use crate::buildtime::graphmigrate::Comparison; use super::{ table::{ NodeTable_, @@ -64,7 +64,7 @@ impl Node { } } -impl<'a> crate::graphmigrate::NodeData for Node { +impl<'a> crate::buildtime::graphmigrate::NodeData for Node { type O = SqliteMigrateCtx; type I = Id; diff --git a/src/sqlite/schema/table.rs b/src/buildtime/sqlite/schema/table.rs similarity index 80% rename from src/sqlite/schema/table.rs rename to src/buildtime/sqlite/schema/table.rs index 8a36fe9..3778447 100644 --- a/src/sqlite/schema/table.rs +++ b/src/buildtime/sqlite/schema/table.rs @@ -5,7 +5,7 @@ use std::{ }, collections::HashSet, }; -use crate::{ +use crate::buildtime::{ utils::Tokens, sqlite::types::to_sql_type, graphmigrate::Comparison, @@ -35,6 +35,12 @@ impl Display for TableId { } } +impl TableId { + pub fn at(&self, version: i64) -> String { + format!("{}_v{}", self.0, version) + } +} + #[derive(Clone)] pub struct NodeTable_ { pub id: TableId, @@ -43,13 +49,15 @@ pub struct NodeTable_ { impl NodeTable_ { pub fn compare(&self, _old: &Self, _created: &HashSet) -> Comparison { - Comparison::DoNothing + Comparison::Update } } impl SqliteNodeData for NodeTable_ { - fn update(&self, _ctx: &mut SqliteMigrateCtx, _old: &Self) { - unreachable!(); + fn update(&self, ctx: &mut SqliteMigrateCtx, _old: &Self) { + let mut stmt = Tokens::new(); + stmt.s("alter table").id(&self.id.at(ctx.version - 1)).s("rename to").id(&self.id.at(ctx.version)); + ctx.statements.push(stmt.to_string()); } } @@ -75,7 +83,7 @@ impl SqliteNodeDataDispatch for NodeTable_ { fn create(&self, ctx: &mut SqliteMigrateCtx) { let mut stmt = Tokens::new(); - stmt.s("create table").id(&self.id.0).s("("); + stmt.s("create table").id(&self.id.at(ctx.version)).s("("); for (i, f) in self.fields.iter().filter(|f| &f.0.1 != "rowid").enumerate() { if i > 0 { stmt.s(","); @@ -90,6 +98,6 @@ impl SqliteNodeDataDispatch for NodeTable_ { } fn delete(&self, ctx: &mut SqliteMigrateCtx) { - ctx.statements.push(Tokens::new().s("drop table").id(&self.id.0).to_string()); + ctx.statements.push(Tokens::new().s("drop table").id(&self.id.at(ctx.version - 1)).to_string()); } } diff --git a/src/sqlite/schema/utils.rs b/src/buildtime/sqlite/schema/utils.rs similarity index 77% rename from src/sqlite/schema/utils.rs rename to src/buildtime/sqlite/schema/utils.rs index 2de3476..e6dff00 100644 --- a/src/sqlite/schema/utils.rs +++ b/src/buildtime/sqlite/schema/utils.rs @@ -1,5 +1,5 @@ use enum_dispatch::enum_dispatch; -use crate::utils::Errs; +use crate::buildtime::utils::Errs; use super::{ node::{ Node, @@ -9,18 +9,20 @@ use super::{ pub(crate) struct SqliteMigrateCtx { pub(crate) errs: Errs, pub statements: Vec, + pub version: i64, } impl SqliteMigrateCtx { - pub fn new(errs: Errs) -> Self { + pub fn new(errs: Errs, version: i64) -> Self { Self { errs: errs, statements: Default::default(), + version: version, } } } -pub(crate) type MigrateNode = crate::graphmigrate::Node; +pub(crate) type MigrateNode = crate::buildtime::graphmigrate::Node; #[enum_dispatch] pub(crate) trait SqliteNodeDataDispatch { diff --git a/src/sqlite/types.rs b/src/buildtime/sqlite/types.rs similarity index 100% rename from src/sqlite/types.rs rename to src/buildtime/sqlite/types.rs diff --git a/src/sqlite/utils.rs b/src/buildtime/sqlite/utils.rs similarity index 100% rename from src/sqlite/utils.rs rename to src/buildtime/sqlite/utils.rs diff --git a/src/utils.rs b/src/buildtime/utils.rs similarity index 56% rename from src/utils.rs rename to src/buildtime/utils.rs index a97b2ba..cf935bf 100644 --- a/src/utils.rs +++ b/src/buildtime/utils.rs @@ -1,7 +1,47 @@ use std::{ cell::RefCell, rc::Rc, + path::Path, + fs, }; +use quote::quote; +use proc_macro2::TokenStream; + +pub struct Output { + pub(crate) data: Vec, +} + +impl Output { + pub fn new() -> Output { + Output { data: vec![] } + } + + pub fn write(self, path: &Path) -> Result<(), String> { + if let Some(p) = path.parent() { + if let Err(e) = fs::create_dir_all(&p) { + return Err(format!("Error creating output parent directories {}: {:?}", p.to_string_lossy(), e)); + } + } + let data = self.data; + let tokens = quote!{ + #(#data) * + }; + match genemichaels::format_str(&tokens.to_string(), &genemichaels::FormatConfig::default()) { + Ok(src) => { + match fs::write(path, src.rendered.as_bytes()) { + Ok(_) => { }, + Err(e) => return Err( + format!("Failed to write generated code to {}: {:?}", path.to_string_lossy(), e), + ), + }; + }, + Err(e) => { + return Err(format!("Error formatting generated code: {:?}\n{}", e, tokens)); + }, + }; + Ok(()) + } +} pub struct Tokens(String); diff --git a/src/lib.rs b/src/lib.rs index 2830d7e..8be3ba0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,4 @@ -pub mod pg; -pub mod sqlite; -mod graphmigrate; -mod utils; +#[cfg(feature = "run")] +pub mod runtime; +#[cfg(feature = "build")] +pub mod buildtime; diff --git a/src/runtime/mod.rs b/src/runtime/mod.rs new file mode 100644 index 0000000..a186597 --- /dev/null +++ b/src/runtime/mod.rs @@ -0,0 +1,43 @@ +use tokio_postgres::error::SqlState; + +#[derive(Debug)] +pub enum Error { + BadSchema, + Other(String), +} + +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Error::BadSchema => "Current DB schema doesn't match query schema".fmt(f), + Error::Other(s) => s.fmt(f), + } + } +} + +impl std::error::Error for Error { } + +#[cfg(feature = "sqlite")] +impl From for Error { + fn from(value: rusqlite::Error) -> Self { + match &value { + rusqlite::Error::SqliteFailure(_, Some(m)) => { + if m.starts_with("no such table") { + return Self::BadSchema + } + return Self::Other(value.to_string()) + }, + _ => return Self::Other(value.to_string()), + } + } +} + +#[cfg(feature = "pg")] +impl From for Error { + fn from(value: tokio_postgres::Error) -> Self { + if value.code() == Some(&SqlState::UNDEFINED_TABLE) { + return Self::BadSchema + } + return Self::Other(value.to_string()) + } +}