Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion server/catalog.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,17 @@ package server

import (
"database/sql"
"log"
)

// initPgCatalog creates PostgreSQL compatibility functions and views in DuckDB
// DuckDB already has a pg_catalog schema with basic views, so we just add missing functions
func initPgCatalog(db *sql.DB) error {
// Switch to memory database to avoid creating views in DuckLake
// (DuckLake may be the default, but we want views in the local memory db)
db.Exec("USE memory")
defer db.Exec("USE ducklake") // Switch back if DuckLake is attached

// Create our own pg_database view that has all the columns psql expects
// We put it in main schema and rewrite queries to use it
pgDatabaseSQL := `
Expand Down Expand Up @@ -286,6 +292,11 @@ func initPgCatalog(db *sql.DB) error {
// initInformationSchema creates the column metadata table and information_schema wrapper views.
// This enables accurate type information (VARCHAR lengths, NUMERIC precision) in information_schema.
func initInformationSchema(db *sql.DB) error {
// Switch to memory database to avoid creating views in DuckLake
// (DuckLake may be the default, but we want views in the local memory db)
db.Exec("USE memory")
defer db.Exec("USE ducklake") // Switch back if DuckLake is attached

// Create metadata table to store column type information that DuckDB doesn't preserve
metadataTableSQL := `
CREATE TABLE IF NOT EXISTS __duckgres_column_metadata (
Expand Down Expand Up @@ -415,7 +426,9 @@ func initInformationSchema(db *sql.DB) error {
'YES' AS is_updatable
FROM information_schema.columns
`
db.Exec(columnsViewSimpleSQL)
if _, err := db.Exec(columnsViewSimpleSQL); err != nil {
log.Printf("Warning: failed to create information_schema_columns_compat view: %v", err)
}
}

// Create information_schema.tables wrapper view with additional PostgreSQL columns
Expand Down
2 changes: 2 additions & 0 deletions server/conn.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ func (c *clientConn) serve() error {
if c.db != nil {
// Detach DuckLake to release the RDS metadata connection
if c.server.cfg.DuckLake.MetadataStore != "" {
// Switch to memory database first (can't detach the default database)
c.db.Exec("USE memory")
if _, err := c.db.Exec("DETACH ducklake"); err != nil {
log.Printf("Warning: failed to detach DuckLake for user %q: %v", c.username, err)
}
Expand Down
5 changes: 5 additions & 0 deletions server/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,11 @@ func (s *Server) createDBConnection(username string) (*sql.DB, error) {
// Continue anyway - basic queries will still work
}

// Initialize information_schema wrapper views
if err := initInformationSchema(db); err != nil {
log.Printf("Warning: failed to initialize information_schema views for user %q: %v", username, err)
}

return db, nil
}

Expand Down
35 changes: 25 additions & 10 deletions transpiler/transform/pgcatalog.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ type PgCatalogTransform struct {
// ViewMappings maps pg_catalog table names to our compatibility views
ViewMappings map[string]string

// InformationSchemaMappings maps information_schema table names to our compatibility views
InformationSchemaMappings map[string]string

// Functions that need pg_catalog prefix stripped
Functions map[string]bool
}
Expand All @@ -20,17 +23,20 @@ type PgCatalogTransform struct {
func NewPgCatalogTransform() *PgCatalogTransform {
return &PgCatalogTransform{
ViewMappings: map[string]string{
"pg_class": "pg_class_full",
"pg_database": "pg_database",
"pg_collation": "pg_collation",
"pg_policy": "pg_policy",
"pg_roles": "pg_roles",
"pg_statistic_ext": "pg_statistic_ext",
"pg_class": "pg_class_full",
"pg_database": "pg_database",
"pg_collation": "pg_collation",
"pg_policy": "pg_policy",
"pg_roles": "pg_roles",
"pg_statistic_ext": "pg_statistic_ext",
"pg_publication_tables": "pg_publication_tables",
"pg_rules": "pg_rules",
"pg_publication": "pg_publication",
"pg_publication_rel": "pg_publication_rel",
"pg_inherits": "pg_inherits",
"pg_rules": "pg_rules",
"pg_publication": "pg_publication",
"pg_publication_rel": "pg_publication_rel",
"pg_inherits": "pg_inherits",
},
InformationSchemaMappings: map[string]string{
"columns": "information_schema_columns_compat",
},
Functions: map[string]bool{
"pg_get_userbyid": true,
Expand Down Expand Up @@ -90,6 +96,15 @@ func (t *PgCatalogTransform) walkAndTransform(node *pg_query.Node, changed *bool
n.RangeVar.Schemaname = ""
*changed = true
}
// Table references: information_schema.columns -> information_schema_columns_compat
if n.RangeVar != nil && strings.EqualFold(n.RangeVar.Schemaname, "information_schema") {
relname := strings.ToLower(n.RangeVar.Relname)
if newName, ok := t.InformationSchemaMappings[relname]; ok {
n.RangeVar.Relname = newName
n.RangeVar.Schemaname = ""
*changed = true
}
}

case *pg_query.Node_FuncCall:
// Function calls: pg_catalog.format_type() -> format_type()
Expand Down
24 changes: 24 additions & 0 deletions transpiler/transpiler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,10 @@ func TestTranspile_ETLPatterns(t *testing.T) {
name: "information_schema query",
input: "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'",
},
{
name: "information_schema.columns rewrite",
input: "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = 'users'",
},
{
name: "pg_namespace query",
input: "SELECT nspname FROM pg_catalog.pg_namespace WHERE nspname NOT LIKE 'pg_%'",
Expand Down Expand Up @@ -439,6 +443,26 @@ func TestTranspile_DDL_Complex(t *testing.T) {
}
}

func TestTranspile_InformationSchemaColumnsRewrite(t *testing.T) {
tr := New(DefaultConfig())

input := "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = 'users'"
result, err := tr.Transpile(input)
if err != nil {
t.Fatalf("Transpile error: %v", err)
}

// Should rewrite information_schema.columns to information_schema_columns_compat
if !strings.Contains(result.SQL, "information_schema_columns_compat") {
t.Errorf("Expected information_schema.columns to be rewritten to information_schema_columns_compat, got: %s", result.SQL)
}

// Should NOT contain original schema-qualified name
if strings.Contains(result.SQL, "information_schema.columns") {
t.Errorf("Should NOT contain information_schema.columns after rewrite, got: %s", result.SQL)
}
}

func TestTranspile_EmptyQuery(t *testing.T) {
tr := New(DefaultConfig())

Expand Down