diff --git a/.gitignore b/.gitignore index fd9f7b6..f985c5d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,2 @@ -/pkg -/sql -/bin +/pgrebase +/test_data/pg diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..99d02d0 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,40 @@ +stages: + - test + +sast: + stage: test + +include: + - template: Security/SAST.gitlab-ci.yml + +vet: + stage: test + image: golang:1.20 + script: + go vet . + +lib_integration: + stage: test + image: golang:1.20 + script: + - apt update && apt install -y postgresql + - chown -R postgres:postgres . + - su postgres -c "cd core && go test" + artifacts: + when: on_failure + paths: + - test_data/pg/server_logs + expire_in: 1 sec + +cli_integration: + stage: test + image: golang:1.20 + script: + - apt update && apt install -y postgresql + - chown -R postgres:postgres . + - su postgres -c "go test" + artifacts: + when: on_failure + paths: + - test_data/pg/server_logs + expire_in: 1 sec diff --git a/cli_integration_test.go b/cli_integration_test.go new file mode 100644 index 0000000..1e3fccb --- /dev/null +++ b/cli_integration_test.go @@ -0,0 +1,275 @@ +package main + +import ( + "database/sql" + "fmt" + "os" + "os/exec" + "testing" + "time" +) + +var dbConnectionScheme string + +func TestMain(m *testing.M) { + os.Setenv("QUIET", "true") + + start := exec.Command("./test_data/reset_db.sh") + err := start.Run() + if err != nil { + fmt.Println("Can't start the database") + os.Exit(1) + } + + exitVal := 1 + + defer (func() { + stop := exec.Command("./test_data/stop_db.sh") + out, err := stop.Output() + if err != nil { + fmt.Printf("%s\n", out) + os.Exit(1) + } + + os.Exit(exitVal) + })() + + port := os.Getenv("PG_TEST_PORT") + if len(port) == 0 { + port = "5433" + } + dbConnectionScheme = fmt.Sprintf("postgres://postgres:@localhost:%s/pgrebase?sslmode=disable", port) + os.Setenv("DATABASE_URL", dbConnectionScheme) + + // We want to test the binary directly, so let's build it. + // for testing the functions, see core package. + start = exec.Command("go", "build") + out, err := start.Output() + if err != nil { + fmt.Printf("%s\n", out) + } else { + exitVal = m.Run() + } +} + +func query(query string, parameters ...interface{}) (Rows *sql.Rows, err error) { + var co *sql.DB + co, err = sql.Open("postgres", dbConnectionScheme) + if err != nil { + err = fmt.Errorf("can't connect to db : %v", err) + return + } + defer co.Close() + + Rows, err = co.Query(query, parameters...) + if err != nil { + err = fmt.Errorf("error while executing query : %v", err) + return + } + + return +} + +func TestLoadingAFunction(t *testing.T) { + start := exec.Command("./pgrebase", "test_data/fixtures/loading_a_function/") + out, err := start.Output() + if err != nil { + fmt.Printf("%s\n", out) + t.Fatalf("Error while executing pgrebase: %v", err) + } + + rows, err := query("SELECT test_function()") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } +} + +func TestLoadingAView(t *testing.T) { + t.Cleanup(func() { + rows, err := query("DELETE FROM users") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + rows.Close() + }) + + start := exec.Command("./pgrebase", "test_data/fixtures/loading_a_view/") + out, err := start.Output() + if err != nil { + fmt.Printf("%s\n", out) + t.Fatalf("Error while executing pgrebase: %v", err) + } + + rows, err := query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + fmt.Printf("Can't create mock record : %v\n", err) + t.Fatalf("Can't insert test record.") + } + rows.Close() + + rows, err = query("SELECT * FROM test_view") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } + + id := 0 + name := "" + err = rows.Scan(&id, &name) + if err != nil { + t.Fatalf("Can't fetch columns : %v", err) + } + + if id != 1 { + t.Errorf("ID 1 expected, got %d", id) + } + + if name != "John Doe" { + t.Errorf("Name \"John Doe\" expected, got %s", name) + } +} + +func TestLoadingATrigger(t *testing.T) { + t.Cleanup(func() { + rows, err := query("DELETE FROM users") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + rows.Close() + }) + + start := exec.Command("./pgrebase", "test_data/fixtures/loading_a_trigger/") + out, err := start.Output() + if err != nil { + fmt.Printf("%s\n", out) + t.Fatalf("Error while executing pgrebase: %v", err) + } + + rows, err := query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + fmt.Printf("Can't create mock record : %v\n", err) + t.Fatalf("Can't insert test record.") + } + rows.Close() + + rows, err = query("SELECT active FROM users") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } + + active := false + err = rows.Scan(&active) + if err != nil { + t.Fatalf("Can't fetch columns : %v", err) + } + + if !active { + t.Errorf("Trigger expected to set `active` to true, it's false.") + } +} + +func TestLoadingAllTypes(t *testing.T) { + start := exec.Command("./pgrebase", "test_data/fixtures/loading_all/") + out, err := start.Output() + if err != nil { + fmt.Printf("%s\n", out) + t.Fatalf("Error while executing pgrebase: %v", err) + } +} + +func TestLoadingWithDependencies(t *testing.T) { + start := exec.Command("./pgrebase", "test_data/fixtures/dependencies/") + out, err := start.Output() + if err != nil { + fmt.Printf("%s\n", out) + t.Fatalf("Error while executing pgrebase: %v", err) + } +} + +func TestLoadingWithWatcher(t *testing.T) { + go (func() { + start := exec.Command("./pgrebase", "test_data/fixtures/watcher/") + out, err := start.Output() + if err != nil { + fmt.Printf("%s\n", out) + fmt.Printf("Error while executing pgrebase: %v", err) + os.Exit(1) + } + })() + + time.Sleep(1 * time.Second) + + rows, err := query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + t.Fatalf("Can't insert test record : %v.", err) + } + rows.Close() + + testFile := "./test_data/fixtures/watcher/views/test_view5.sql" + file, err := os.Create(testFile) + if err != nil { + t.Fatalf("Can't create test view file : %v.", err) + } + + _, err = fmt.Fprintf(file, "CREATE VIEW test_view5 AS SELECT id, name FROM users;") + if err != nil { + t.Fatalf("Can't write test view in file : %v.", err) + } + + file.Close() + t.Cleanup(func() { + os.Remove(testFile) + }) + + time.Sleep(1 * time.Second) + + rows, err = query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + fmt.Printf("Can't create mock record : %v\n", err) + t.Fatalf("Can't insert test record.") + } + rows.Close() + + rows, err = query("SELECT * FROM test_view") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } + + id := 0 + name := "" + err = rows.Scan(&id, &name) + if err != nil { + t.Fatalf("Can't fetch columns : %v", err) + } + + if name != "John Doe" { + t.Errorf("Name \"John Doe\" expected, got %s", name) + } +} diff --git a/core/Init.go b/core/Init.go deleted file mode 100644 index 74c0a2e..0000000 --- a/core/Init.go +++ /dev/null @@ -1,33 +0,0 @@ -package core - -import ( - "github.com/oelmekki/pgrebase/core/config" - "github.com/oelmekki/pgrebase/core/connection" - "github.com/oelmekki/pgrebase/core/function" - "github.com/oelmekki/pgrebase/core/trigger" - "github.com/oelmekki/pgrebase/core/view" -) - -// conf is the global level configuration data structure. -var conf config.Config - -// Init stores the global config object. -// -// databaseUrl should be a connection string to the database (eg: postgres://postgres:@localhost/database). -// -// sqlDir is the path to the directory where sql source files live. -// -// watch should be true if you want to keep watching for changes in source files rather -// than just loading them once. -func Init(databaseUrl, sqlDir string) (err error) { - conf = config.NewConfig(databaseUrl, sqlDir) - connection.Init(&conf) - function.Init(&conf) - trigger.Init(&conf) - view.Init(&conf) - - checker := sanity{} - err = checker.Check() - - return -} diff --git a/core/Process.go b/core/Process.go deleted file mode 100644 index dd2c66a..0000000 --- a/core/Process.go +++ /dev/null @@ -1,22 +0,0 @@ -package core - -import ( - "github.com/oelmekki/pgrebase/core/function" - "github.com/oelmekki/pgrebase/core/trigger" - "github.com/oelmekki/pgrebase/core/view" -) - -// Process loads sql code, just once. -func Process() (err error) { - if err = view.LoadViews(); err != nil { - return err - } - if err = function.LoadFunctions(); err != nil { - return err - } - if err = trigger.LoadTriggers(); err != nil { - return err - } - - return -} diff --git a/core/README.md b/core/README.md new file mode 100644 index 0000000..58f6678 --- /dev/null +++ b/core/README.md @@ -0,0 +1,17 @@ +Package core allows to use pgrebase as a library in your own code. + +To use it, your first need to initialize it: + + err := core.Init(databaseUrl, sqlDir) + +`databaseUrl` is the postgres connection string as used by +`database/sql.Open`, `sqlDir` is the path to your sql sources, and watch is +a flag you can set to true to keep watching sqlDir for changes. + +Once pgrebase is initialized, call `Process()` to load your source files +into database. + +If you want to keep watching FS for changes after than, you can call +`Watch()`. Note that this function won't return, so unless that's what you +want, you should run it in a goroutine. + diff --git a/core/Watch.go b/core/Watch.go deleted file mode 100644 index 4dca41b..0000000 --- a/core/Watch.go +++ /dev/null @@ -1,41 +0,0 @@ -package core - -import ( - "fmt" - "log" - "time" -) - -// Watch listens to FS change in sql dir and processes them. -func Watch() { - fmt.Println("Watching filesystem for changes...") - - errorChan := make(chan error) - doneChan := make(chan bool) - - if err := startWatching(errorChan, doneChan); err != nil { - log.Fatal(err) - } - - for { - select { - case <-doneChan: - time.Sleep(300 * time.Millisecond) // without this, new file watcher is started faster than file writing has ended - scanFiles(&conf) - - if err := Process(); err != nil { - fmt.Printf("Error: %v\n", err) - } - - if err := startWatching(errorChan, doneChan); err != nil { - log.Fatal(err) - } - - case err := <-errorChan: - fmt.Printf("Error: %v\n", err) - if err := startWatching(errorChan, doneChan); err != nil { - log.Fatal(err) - } - } - } -} diff --git a/core/api.go b/core/api.go new file mode 100644 index 0000000..7759b83 --- /dev/null +++ b/core/api.go @@ -0,0 +1,87 @@ +package core + +import ( + "fmt" + "log" + "os" + "time" +) + +// conf is the global level configuration data structure. +var conf config + +// Init stores the global config object. +// +// databaseUrl should be a connection string to the database (eg: postgres://postgres:@localhost/database). +// +// sqlDir is the path to the directory where sql source files live. +// +// watch should be true if you want to keep watching for changes in source files rather +// than just loading them once. +func Init(databaseUrl, sqlDir string) (err error) { + conf = newConfig(databaseUrl, sqlDir) + + checker := sanity{} + err = checker.check() + + return +} + +// Process loads sql code, just once. +func Process() (err error) { + if err = loadViews(); err != nil { + return err + } + if err = loadFunctions(); err != nil { + return err + } + if err = loadTriggers(); err != nil { + return err + } + + return +} + +// startWatching fires a watcher, will die as soon something changed. +func startWatching(errorChan chan error, doneChan chan bool) (err error) { + w := watcher{Done: doneChan, Error: errorChan} + go w.Start() + + return +} + +// Watch listens to FS change in sql dir and processes them. +func Watch() { + if os.Getenv("QUIET") != "true" { + fmt.Println("Watching filesystem for changes...") + } + + errorChan := make(chan error) + doneChan := make(chan bool) + + if err := startWatching(errorChan, doneChan); err != nil { + log.Fatal(err) + } + + for { + select { + case <-doneChan: + time.Sleep(300 * time.Millisecond) // without this, new file watcher is started faster than file writing has ended + scanFiles(&conf) + + if err := Process(); err != nil { + fmt.Printf("Error: %v\n", err) + } + + if err := startWatching(errorChan, doneChan); err != nil { + log.Fatal(err) + } + + case err := <-errorChan: + fmt.Printf("Error: %v\n", err) + if err := startWatching(errorChan, doneChan); err != nil { + log.Fatal(err) + } + } + } +} diff --git a/core/codeunit.go b/core/codeunit.go new file mode 100644 index 0000000..05fd346 --- /dev/null +++ b/core/codeunit.go @@ -0,0 +1,75 @@ +package core + +import ( + "fmt" +) + +// codeUnit is the generic representation of any code, be it +// a function, a view, etc. +type codeUnit struct { + path string // the absolute path to code file + name string // the name of function/view/trigger + definition string // the actual code + previousExists bool // true if this code unit already exists in database + parseSignature bool // true if we need to generate signature (for new functions) +} + +// drop is the generic drop function for code units. +func (unit codeUnit) drop(dropQuery string) (err error) { + rows, err := query(dropQuery) + if err != nil { + return err + } + rows.Close() + + return +} + +// create is the eneric creation function for code units. +func (unit codeUnit) create(definition string) (err error) { + rows, err := query(definition) + if err != nil { + return err + } + rows.Close() + + return +} + +// codeUnitCreator is the interface that repesents what +// can manipulate code units. +type codeUnitCreator interface { + load() error + parse() error + drop() error + create() error +} + +// downPass performs the Steps used in down pass, when dropping existing code, in dependency +// graph reverse order. +func downPass(unit codeUnitCreator, path string) (err error) { + errFmt := " error while loading %s\n %v\n" + + if err = unit.load(); err != nil { + return fmt.Errorf(errFmt, path, err) + } + if err = unit.parse(); err != nil { + return fmt.Errorf(errFmt, path, err) + } + if err = unit.drop(); err != nil { + return fmt.Errorf(errFmt, path, err) + } + + return +} + +// upPass performs the steps used in up pass, when creating existing code, in dependency +// graph order +func upPass(unit codeUnitCreator, path string) (err error) { + errFmt := " error while creating %s\n %v\n" + if err = unit.create(); err != nil { + return fmt.Errorf(errFmt, path, err) + } + + return +} diff --git a/core/codeunit/CodeUnit.go b/core/codeunit/CodeUnit.go deleted file mode 100644 index 7467da6..0000000 --- a/core/codeunit/CodeUnit.go +++ /dev/null @@ -1,76 +0,0 @@ -package codeunit - -import ( - "fmt" - "github.com/oelmekki/pgrebase/core/connection" -) - -// CodeUnit is the generic representation of any code, be it -// a function, a view, etc. -type CodeUnit struct { - Path string // the absolute path to code file - Name string // the name of function/view/trigger - Definition string // the actual code - PreviousExists bool // true if this code unit already exists in database - ParseSignature bool // true if we need to generate signature (for new functions) -} - -// Drop is the generic drop function for code units. -func (unit CodeUnit) Drop(dropQuery string) (err error) { - rows, err := connection.Query(dropQuery) - if err != nil { - return err - } - rows.Close() - - return -} - -// Create is the eneric creation function for code units. -func (unit CodeUnit) Create(definition string) (err error) { - rows, err := connection.Query(definition) - if err != nil { - return err - } - rows.Close() - - return -} - -// CodeUnitCreator is the interface that repesents what -// can manipulate code units. -type CodeUnitCreator interface { - Load() error - Parse() error - Drop() error - Create() error -} - -// DownPass performs the Steps used in down pass, when dropping existing code, in dependency -// graph reverse order. -func DownPass(unit CodeUnitCreator, path string) (err error) { - errFmt := " error while loading %s\n %v\n" - - if err = unit.Load(); err != nil { - return fmt.Errorf(errFmt, path, err) - } - if err = unit.Parse(); err != nil { - return fmt.Errorf(errFmt, path, err) - } - if err = unit.Drop(); err != nil { - return fmt.Errorf(errFmt, path, err) - } - - return -} - -// UpPass performs the steps used in up pass, when creating existing code, in dependency -// graph order -func UpPass(unit CodeUnitCreator, path string) (err error) { - errFmt := " error while creating %s\n %v\n" - if err = unit.Create(); err != nil { - return fmt.Errorf(errFmt, path, err) - } - - return -} diff --git a/core/codeunit/doc.go b/core/codeunit/doc.go deleted file mode 100644 index 8a32583..0000000 --- a/core/codeunit/doc.go +++ /dev/null @@ -1,4 +0,0 @@ -/* -Package codeunit contains the shared behavior for all code unit types (functions, views, triggers). -*/ -package codeunit diff --git a/core/config.go b/core/config.go new file mode 100644 index 0000000..99c93ef --- /dev/null +++ b/core/config.go @@ -0,0 +1,34 @@ +package core + +// config is the global configuration for execution. +type config struct { + databaseUrl string // connection info for the database + sqlDirPath string // place where to find the code units + functionFiles []string // paths of all function files + triggerFiles []string // paths of all trigger files + viewFiles []string // paths of all view files +} + +// parseSqlDir retrieves sql source directory. +func sanitizeSqlPath(path string) (newPath string) { + if string(path[len(path)-1]) != "/" { + path += "/" + } + + newPath = path + + return +} + +// newConfig creates a config data structure with provided values. +// +// databaseUrl is a postgres connection url. +// +// sqlDir is the path to the directory where your sql sources are. +// +// watch is a flag you may set to true to keep watching for change +// in sqlDir after processing it a first time. +func newConfig(databaseUrl, sqlDir string) (conf config) { + conf = config{databaseUrl: databaseUrl, sqlDirPath: sanitizeSqlPath(sqlDir)} + return +} diff --git a/core/config/Config.go b/core/config/Config.go deleted file mode 100644 index d818303..0000000 --- a/core/config/Config.go +++ /dev/null @@ -1,11 +0,0 @@ -package config - -// Config is the global configuration for execution. -type Config struct { - DatabaseUrl string // connection info for the database - SqlDirPath string // place where to find the code units - FunctionFiles []string // paths of all function files - TriggerFiles []string // paths of all trigger files - TypeFiles []string // paths of all type files - ViewFiles []string // paths of all view files -} diff --git a/core/config/NewConfig.go b/core/config/NewConfig.go deleted file mode 100644 index b5a1f3c..0000000 --- a/core/config/NewConfig.go +++ /dev/null @@ -1,14 +0,0 @@ -package config - -// NewConfig creates a config data structure with provided values. -// -// databaseUrl is a postgres connection url. -// -// sqlDir is the path to the directory where your sql sources are. -// -// watch is a flag you may set to true to keep watching for change -// in sqlDir after processing it a first time. -func NewConfig(databaseUrl, sqlDir string) (config Config) { - config = Config{DatabaseUrl: databaseUrl, SqlDirPath: sanitizeSqlPath(sqlDir)} - return -} diff --git a/core/config/doc.go b/core/config/doc.go deleted file mode 100644 index 0895542..0000000 --- a/core/config/doc.go +++ /dev/null @@ -1,4 +0,0 @@ -/* -Package config handles global configuration for pgrebase. -*/ -package config diff --git a/core/config/sanitizeSqlPath.go b/core/config/sanitizeSqlPath.go deleted file mode 100644 index b10eca6..0000000 --- a/core/config/sanitizeSqlPath.go +++ /dev/null @@ -1,12 +0,0 @@ -package config - -// parseSqlDir retrieves sql source directory. -func sanitizeSqlPath(path string) (newPath string) { - if string(path[len(path)-1]) != "/" { - path += "/" - } - - newPath = path - - return -} diff --git a/core/connection/Init.go b/core/connection/Init.go deleted file mode 100644 index faaeaef..0000000 --- a/core/connection/Init.go +++ /dev/null @@ -1,12 +0,0 @@ -package connection - -import ( - "github.com/oelmekki/pgrebase/core/config" -) - -var conf *config.Config - -// Init stores configuration for further usage. -func Init(cfg *config.Config) { - conf = cfg -} diff --git a/core/connection/Query.go b/core/connection/Query.go deleted file mode 100644 index d952c51..0000000 --- a/core/connection/Query.go +++ /dev/null @@ -1,26 +0,0 @@ -package connection - -import ( - "database/sql" - "fmt" - _ "github.com/lib/pq" -) - -// Query is a wrapper for sql.Query, meant to be the main query interface. -func Query(query string, parameters ...interface{}) (rows *sql.Rows, err error) { - var co *sql.DB - - co, err = sql.Open("postgres", conf.DatabaseUrl) - if err != nil { - fmt.Printf("can't connect to database : %v\n", err) - return rows, err - } - defer co.Close() - - rows, err = co.Query(query, parameters...) - if err != nil { - return rows, err - } - - return -} diff --git a/core/connection/doc.go b/core/connection/doc.go deleted file mode 100644 index 61ef105..0000000 --- a/core/connection/doc.go +++ /dev/null @@ -1,4 +0,0 @@ -/* -Package connection allow to connect and query the database. -*/ -package connection diff --git a/core/doc.go b/core/doc.go deleted file mode 100644 index 1d42930..0000000 --- a/core/doc.go +++ /dev/null @@ -1,16 +0,0 @@ -/* -Package core allows to use pgrebase as a library in your own code. - -To use it, your first need to initialize it: - - err := core.Init(databaseUrl, sqlDir, watch) - -databaseUrl is the postgres connection string, sqlDir is the path to your sql sources, -and watch is a flag you can set to true to keep watching sqlDir for changes. - -Once pgrebase is initialized, call Process() to load your source files into -database. - -If you want to keep watching FS for changes after than, you can call Watch(). -*/ -package core diff --git a/core/function.go b/core/function.go new file mode 100644 index 0000000..8fe4cf1 --- /dev/null +++ b/core/function.go @@ -0,0 +1,160 @@ +package core + +import ( + "fmt" + "io/ioutil" + "regexp" + "strings" +) + +// function is the code unit for functions. +type function struct { + codeUnit + signature string // function signature, unparsed +} + +// loadFunctions loads or reload all functions found in FS. +func loadFunctions() (err error) { + successfulCount := len(conf.functionFiles) + errors := make([]string, 0) + bypass := make(map[string]bool) + + files, err := resolveDependencies(conf.functionFiles, conf.sqlDirPath+"functions") + if err != nil { + return err + } + + functions := make([]*function, 0) + for i := len(files) - 1; i >= 0; i-- { + file := files[i] + f := function{} + f.path = file + functions = append(functions, &f) + + err = downPass(&f, f.path) + if err != nil { + successfulCount-- + errors = append(errors, fmt.Sprintf("%v\n", err)) + bypass[f.path] = true + } + } + + for i := len(functions) - 1; i >= 0; i-- { + f := functions[i] + if _, ignore := bypass[f.path]; !ignore { + err = upPass(f, f.path) + if err != nil { + successfulCount-- + errors = append(errors, fmt.Sprintf("%v\n", err)) + } + } + } + + report("functions", successfulCount, len(conf.functionFiles), errors) + + return +} + +// parse parses function for name and signature. +func (function *function) parse() (err error) { + signatureFinder := regexp.MustCompile(`(?is)CREATE(?:\s+OR\s+REPLACE)?\s+FUNCTION\s+(\S+?)\((.*?)\)`) + subMatches := signatureFinder.FindStringSubmatch(function.definition) + + if len(subMatches) < 3 { + return fmt.Errorf("Can't find a function in %s", function.path) + } + + function.name = subMatches[1] + + if function.parseSignature { + function.signature = subMatches[2] + } else { + function.signature, function.previousExists, err = function.previousSignature() + if err != nil { + return err + } + } + + err = function.removeDefaultFromSignature() + if err != nil { + return + } + + return +} + +// load loads function definition from file. +func (f *function) load() (err error) { + definition, err := ioutil.ReadFile(f.path) + if err != nil { + return err + } + f.definition = string(definition) + + return +} + +// drop removes existing function from pg. +func (f *function) drop() (err error) { + if f.previousExists { + return f.codeUnit.drop(`DROP FUNCTION IF EXISTS ` + f.name + `(` + f.signature + `)`) + } + + return +} + +// create adds the function in pg. +func (f *function) create() (err error) { + return f.codeUnit.create(f.definition) +} + +// previousSignature retrieves old signature from function in database, if any. +func (f *function) previousSignature() (signature string, exists bool, err error) { + rows, err := query(`SELECT pg_get_functiondef(oid) FROM pg_proc WHERE proname = $1`, f.name) + if err != nil { + return + } + defer rows.Close() + + if rows.Next() { + exists = true + + var body string + if err = rows.Scan(&body); err != nil { + return + } + oldFunction := function{codeUnit: codeUnit{definition: body, parseSignature: true}} + if err = oldFunction.parse(); err != nil { + return + } + signature = oldFunction.signature + } + + return +} + +// removeDefaultFromSignature sanitizes function signature. +// +// `DROP FUNCTION` raises error if the signature contains `DEFAULT` values for +// parameters, so we must remove them. +func (f *function) removeDefaultFromSignature() (err error) { + anyDefault, err := regexp.MatchString("(?i)DEFAULT", f.signature) + if err != nil { + return + } + + if anyDefault { + arguments := strings.Split(f.signature, ",") + newArgs := make([]string, 0) + + for _, arg := range arguments { + arg = strings.Replace(arg, " DEFAULT ", " default ", -1) + newArg := strings.Split(arg, " default ")[0] + newArgs = append(newArgs, newArg) + } + + f.signature = strings.Join(newArgs, ",") + } + + return +} diff --git a/core/function/Function.go b/core/function/Function.go deleted file mode 100644 index 25e8b59..0000000 --- a/core/function/Function.go +++ /dev/null @@ -1,120 +0,0 @@ -package function - -import ( - "fmt" - "github.com/oelmekki/pgrebase/core/codeunit" - "github.com/oelmekki/pgrebase/core/connection" - "io/ioutil" - "regexp" - "strings" -) - -// Function is the code unit for functions. -type Function struct { - codeunit.CodeUnit - Signature string // function signature, unparsed -} - -// Parse parses function for name and signature. -func (function *Function) Parse() (err error) { - signatureFinder := regexp.MustCompile(`(?is)CREATE(?:\s+OR\s+REPLACE)?\s+FUNCTION\s+(\S+?)\((.*?)\)`) - subMatches := signatureFinder.FindStringSubmatch(function.Definition) - - if len(subMatches) < 3 { - return fmt.Errorf("Can't find a function in %s", function.Path) - } - - function.Name = subMatches[1] - - if function.ParseSignature { - function.Signature = subMatches[2] - } else { - function.Signature, function.PreviousExists, err = function.previousSignature() - if err != nil { - return err - } - } - - err = function.removeDefaultFromSignature() - if err != nil { - return - } - - return -} - -// Load loads function definition from file. -func (function *Function) Load() (err error) { - definition, err := ioutil.ReadFile(function.Path) - if err != nil { - return err - } - function.Definition = string(definition) - - return -} - -// Drop removes existing function from pg. -func (function *Function) Drop() (err error) { - if function.PreviousExists { - return function.CodeUnit.Drop(`DROP FUNCTION IF EXISTS ` + function.Name + `(` + function.Signature + `)`) - } - - return -} - -// Create adds the function in pg. -func (function *Function) Create() (err error) { - return function.CodeUnit.Create(function.Definition) -} - -// previousSignature retrieves old signature from function in database, if any. -func (function *Function) previousSignature() (signature string, exists bool, err error) { - rows, err := connection.Query(`SELECT pg_get_functiondef(oid) FROM pg_proc WHERE proname = $1`, function.Name) - if err != nil { - return - } - defer rows.Close() - - if rows.Next() { - exists = true - - var body string - if err = rows.Scan(&body); err != nil { - return - } - oldFunction := Function{CodeUnit: codeunit.CodeUnit{Definition: body, ParseSignature: true}} - if err = oldFunction.Parse(); err != nil { - return - } - signature = oldFunction.Signature - } - - return -} - -// removeDefaultFromSignature sanitizes function signature. -// -// `DROP FUNCTION` raises error if the signature contains `DEFAULT` values for -// parameters, so we must remove them. -func (function *Function) removeDefaultFromSignature() (err error) { - anyDefault, err := regexp.MatchString("(?i)DEFAULT", function.Signature) - if err != nil { - return - } - - if anyDefault { - arguments := strings.Split(function.Signature, ",") - newArgs := make([]string, 0) - - for _, arg := range arguments { - arg = strings.Replace(arg, " DEFAULT ", " default ", -1) - newArg := strings.Split(arg, " default ")[0] - newArgs = append(newArgs, newArg) - } - - function.Signature = strings.Join(newArgs, ",") - } - - return -} diff --git a/core/function/Init.go b/core/function/Init.go deleted file mode 100644 index 50c8871..0000000 --- a/core/function/Init.go +++ /dev/null @@ -1,12 +0,0 @@ -package function - -import ( - "github.com/oelmekki/pgrebase/core/config" -) - -var conf *config.Config - -// Init stores configuration for further usage. -func Init(cfg *config.Config) { - conf = cfg -} diff --git a/core/function/LoadFunctions.go b/core/function/LoadFunctions.go deleted file mode 100644 index 35c561e..0000000 --- a/core/function/LoadFunctions.go +++ /dev/null @@ -1,50 +0,0 @@ -package function - -import ( - "fmt" - "github.com/oelmekki/pgrebase/core/codeunit" - "github.com/oelmekki/pgrebase/core/resolver" - "github.com/oelmekki/pgrebase/core/utils" -) - -// LoadFunctions loads or reload all functions found in FS. -func LoadFunctions() (err error) { - successfulCount := len(conf.FunctionFiles) - errors := make([]string, 0) - bypass := make(map[string]bool) - - files, err := resolver.ResolveDependencies(conf.FunctionFiles, conf.SqlDirPath+"functions") - if err != nil { - return err - } - - functions := make([]*Function, 0) - for i := len(files) - 1; i >= 0; i-- { - file := files[i] - function := Function{} - function.Path = file - functions = append(functions, &function) - - err = codeunit.DownPass(&function, function.Path) - if err != nil { - successfulCount-- - errors = append(errors, fmt.Sprintf("%v\n", err)) - bypass[function.Path] = true - } - } - - for i := len(functions) - 1; i >= 0; i-- { - function := functions[i] - if _, ignore := bypass[function.Path]; !ignore { - err = codeunit.UpPass(function, function.Path) - if err != nil { - successfulCount-- - errors = append(errors, fmt.Sprintf("%v\n", err)) - } - } - } - - utils.Report("functions", successfulCount, len(conf.FunctionFiles), errors) - - return -} diff --git a/core/function/doc.go b/core/function/doc.go deleted file mode 100644 index bc71300..0000000 --- a/core/function/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package function allow to manage function code units. -package function diff --git a/core/lib_integration_test.go b/core/lib_integration_test.go new file mode 100644 index 0000000..65e1595 --- /dev/null +++ b/core/lib_integration_test.go @@ -0,0 +1,282 @@ +package core + +import ( + "database/sql" + "fmt" + "os" + "os/exec" + "testing" + "time" +) + +var dbConnectionScheme string + +func TestMain(m *testing.M) { + os.Setenv("QUIET", "true") + + start := exec.Command("../test_data/reset_db.sh") + err := start.Run() + if err != nil { + fmt.Println("Can't start the database") + os.Exit(1) + } + + exitVal := 1 + + defer (func() { + stop := exec.Command("../test_data/stop_db.sh") + out, err := stop.Output() + if err != nil { + fmt.Printf("%s\n", out) + os.Exit(1) + } + + os.Exit(exitVal) + })() + + port := os.Getenv("PG_TEST_PORT") + if len(port) == 0 { + port = "5433" + } + dbConnectionScheme = fmt.Sprintf("postgres://postgres:@localhost:%s/pgrebase?sslmode=disable", port) + + exitVal = m.Run() +} + +func test_query(query string, parameters ...interface{}) (Rows *sql.Rows, err error) { + var co *sql.DB + co, err = sql.Open("postgres", dbConnectionScheme) + if err != nil { + err = fmt.Errorf("can't connect to db : %v", err) + return + } + defer co.Close() + + Rows, err = co.Query(query, parameters...) + if err != nil { + err = fmt.Errorf("error while executing query : %v", err) + return + } + + return +} + +func TestLoadingAFunction(t *testing.T) { + err := Init(dbConnectionScheme, "../test_data/fixtures/loading_a_function/") + if err != nil { + t.Fatalf("Can't init pgrebase : %v", err) + } + + err = Process() + if err != nil { + t.Fatalf("Can't process : %v", err) + } + + rows, err := test_query("SELECT test_function()") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } +} + +func TestLoadingAView(t *testing.T) { + t.Cleanup(func() { + rows, err := test_query("DELETE FROM users") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + rows.Close() + }) + + err := Init(dbConnectionScheme, "../test_data/fixtures/loading_a_view/") + if err != nil { + t.Fatalf("Can't init pgrebase : %v", err) + } + + err = Process() + if err != nil { + t.Fatalf("Can't process : %v", err) + } + + rows, err := test_query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + fmt.Printf("Can't create mock record : %v\n", err) + t.Fatalf("Can't insert test record.") + } + rows.Close() + + rows, err = test_query("SELECT * FROM test_view") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } + + id := 0 + name := "" + err = rows.Scan(&id, &name) + if err != nil { + t.Fatalf("Can't fetch columns : %v", err) + } + + if id != 1 { + t.Errorf("ID 1 expected, got %d", id) + } + + if name != "John Doe" { + t.Errorf("Name \"John Doe\" expected, got %s", name) + } +} + +func TestLoadingATrigger(t *testing.T) { + t.Cleanup(func() { + rows, err := test_query("DELETE FROM users") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + rows.Close() + }) + + err := Init(dbConnectionScheme, "../test_data/fixtures/loading_a_trigger/") + if err != nil { + t.Fatalf("Can't init pgrebase : %v", err) + } + + err = Process() + if err != nil { + t.Fatalf("Can't process : %v", err) + } + + rows, err := test_query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + fmt.Printf("Can't create mock record : %v\n", err) + t.Fatalf("Can't insert test record.") + } + rows.Close() + + rows, err = test_query("SELECT active FROM users") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } + + active := false + err = rows.Scan(&active) + if err != nil { + t.Fatalf("Can't fetch columns : %v", err) + } + + if !active { + t.Errorf("Trigger expected to set `active` to true, it's false.") + } +} + +func TestLoadingAllTypes(t *testing.T) { + err := Init(dbConnectionScheme, "../test_data/fixtures/loading_all/") + if err != nil { + t.Fatalf("Can't init pgrebase : %v", err) + } + + err = Process() + if err != nil { + t.Fatalf("Can't load all supported types : %v", err) + } +} + +func TestLoadingWithDependencies(t *testing.T) { + err := Init(dbConnectionScheme, "../test_data/fixtures/dependencies/") + if err != nil { + t.Fatalf("Can't init pgrebase : %v", err) + } + + err = Process() + if err != nil { + t.Fatalf("Can't load with dependencies : %v", err) + } +} + +func TestLoadingWithWatcher(t *testing.T) { + err := Init(dbConnectionScheme, "../test_data/fixtures/watcher/") + if err != nil { + t.Fatalf("Can't init pgrebase : %v", err) + } + + err = Process() + if err != nil { + t.Fatalf("Can't load with dependencies : %v", err) + } + + rows, err := test_query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + t.Fatalf("Can't insert test record : %v.", err) + } + rows.Close() + + go Watch() + time.Sleep(1 * time.Second) + + testFile := "../test_data/fixtures/watcher/views/test_view5.sql" + file, err := os.Create(testFile) + if err != nil { + t.Fatalf("Can't create test view file : %v.", err) + } + + _, err = fmt.Fprintf(file, "CREATE VIEW test_view5 AS SELECT id, name FROM users;") + if err != nil { + t.Fatalf("Can't write test view in file : %v.", err) + } + + file.Close() + t.Cleanup(func() { + os.Remove(testFile) + }) + + time.Sleep(1 * time.Second) + + rows, err = test_query("INSERT INTO users(name, bio) VALUES('John Doe', 'John Doe does stuff.')") + if err != nil { + fmt.Printf("Can't create mock record : %v\n", err) + t.Fatalf("Can't insert test record.") + } + rows.Close() + + rows, err = test_query("SELECT * FROM test_view") + if err != nil { + t.Fatalf("Can't query : %v", err) + } + + defer rows.Close() + + if !rows.Next() { + t.Fatalf("Calling function does not provide any result.") + return + } + + id := 0 + name := "" + err = rows.Scan(&id, &name) + if err != nil { + t.Fatalf("Can't fetch columns : %v", err) + } + + if name != "John Doe" { + t.Errorf("Name \"John Doe\" expected, got %s", name) + } +} diff --git a/core/resolver.go b/core/resolver.go new file mode 100644 index 0000000..a609e27 --- /dev/null +++ b/core/resolver.go @@ -0,0 +1,134 @@ +package core + +import ( + "fmt" + "io/ioutil" + "regexp" +) + +type sourceFile struct { + path string + dependencies []string +} + +// parseDependencies reads dependencies from source file. +func (source *sourceFile) parseDependencies(base string) (err error) { + source.dependencies = make([]string, 0) + + file, err := ioutil.ReadFile(source.path) + dependencyFinder := regexp.MustCompile(`--\s+require\s+['"](.*)['"]`) + dependencies := dependencyFinder.FindAllStringSubmatch(string(file), -1) + + for _, submatches := range dependencies { + if len(submatches) > 1 { + dependency := base + "/" + submatches[1] + alreadyExists := false + + for _, existing := range source.dependencies { + if existing == dependency { + alreadyExists = true + } + } + + if !alreadyExists { + source.dependencies = append(source.dependencies, dependency) + } + } + } + + return +} + +// resolved checks if all dependencies of current file are resolved +func (source *sourceFile) resolved(readyFiles []string) bool { + for _, file := range source.dependencies { + resolved := false + + for _, readyFile := range readyFiles { + if readyFile == file { + resolved = true + } + } + + if !resolved { + return false + } + } + + return true +} + +// dependencyResolver holds info about dependencies, like their resolve order and the +// current state of resolving. +type dependencyResolver struct { + base string // the path to resolving root + initialFiles []string // list of found file, unordered + sortedFiles []string // list of found file, sorted by resolving order + pendingFiles []sourceFile // list of found files we're not sure yet of resolving order +} + +// resolve is the actual resolve looping. +func (resolver *dependencyResolver) resolve() (sortedFiles []string, err error) { + for _, file := range resolver.initialFiles { + source := sourceFile{path: file} + err = source.parseDependencies(resolver.base) + if err != nil { + return + } + + if source.resolved(resolver.sortedFiles) { + resolver.sortedFiles = append(resolver.sortedFiles, source.path) + resolver.removePending(source) + resolver.processPendings() + } else { + resolver.pendingFiles = append(resolver.pendingFiles, source) + } + } + + if len(resolver.pendingFiles) > 0 { + for i := 0; i < len(resolver.pendingFiles); i++ { + resolver.processPendings() + if len(resolver.pendingFiles) == 0 { + break + } + } + } + + if len(resolver.pendingFiles) > 0 { + err = fmt.Errorf("Can't resolve dependencies in %s. Circular dependencies?", resolver.base) + } else { + sortedFiles = resolver.sortedFiles + } + + return +} + +// processPendings checks if previously unresolved dependencies now are. +func (resolver *dependencyResolver) processPendings() { + for _, source := range resolver.pendingFiles { + if source.resolved(resolver.sortedFiles) { + resolver.sortedFiles = append(resolver.sortedFiles, source.path) + resolver.removePending(source) + } + } +} + +// removePending removes a resolved source file from pending files. +func (resolver *dependencyResolver) removePending(source sourceFile) { + newPendings := make([]sourceFile, 0) + + for _, pending := range resolver.pendingFiles { + if pending.path != source.path { + newPendings = append(newPendings, pending) + } + } + + resolver.pendingFiles = newPendings +} + +// resolveDependencies parses files to find their dependencies requirements, and return them. +// sorted accordingly. +func resolveDependencies(files []string, base string) (sortedFiles []string, err error) { + resolver := dependencyResolver{initialFiles: files, base: base} + return resolver.resolve() +} diff --git a/core/resolver/ResolveDependencies.go b/core/resolver/ResolveDependencies.go deleted file mode 100644 index 0784b52..0000000 --- a/core/resolver/ResolveDependencies.go +++ /dev/null @@ -1,8 +0,0 @@ -package resolver - -// DependencyResolver parses files to find their dependencies requirements, and return them. -// sorted accordingly. -func ResolveDependencies(files []string, base string) (sortedFiles []string, err error) { - resolver := dependencyResolver{initialFiles: files, Base: base} - return resolver.Resolve() -} diff --git a/core/resolver/dependencyResolver.go b/core/resolver/dependencyResolver.go deleted file mode 100644 index ebc3838..0000000 --- a/core/resolver/dependencyResolver.go +++ /dev/null @@ -1,73 +0,0 @@ -package resolver - -import ( - "fmt" -) - -// dependencyResolver holds info about dependencies, like their resolve order and the -// current state of resolving. -type dependencyResolver struct { - Base string // the path to resolving root - initialFiles []string // list of found file, unordered - sortedFiles []string // list of found file, sorted by resolving order - pendingFiles []sourceFile // list of found files we're not sure yet of resolving order -} - -// Resolve is the actual resolve looping. -func (resolver *dependencyResolver) Resolve() (sortedFiles []string, err error) { - for _, file := range resolver.initialFiles { - source := sourceFile{path: file} - err = source.ParseDependencies(resolver.Base) - if err != nil { - return - } - - if source.Resolved(resolver.sortedFiles) { - resolver.sortedFiles = append(resolver.sortedFiles, source.path) - resolver.RemovePending(source) - resolver.ProcessPendings() - } else { - resolver.pendingFiles = append(resolver.pendingFiles, source) - } - } - - if len(resolver.pendingFiles) > 0 { - for i := 0; i < len(resolver.pendingFiles); i++ { - resolver.ProcessPendings() - if len(resolver.pendingFiles) == 0 { - break - } - } - } - - if len(resolver.pendingFiles) > 0 { - err = fmt.Errorf("Can't resolve dependencies in %s. Circular dependencies?", resolver.Base) - } else { - sortedFiles = resolver.sortedFiles - } - - return -} - -// ProcessPending checks if previously unresolved dependencies now are. -func (resolver *dependencyResolver) ProcessPendings() { - for _, source := range resolver.pendingFiles { - if source.Resolved(resolver.sortedFiles) { - resolver.sortedFiles = append(resolver.sortedFiles, source.path) - resolver.RemovePending(source) - } - } -} - -// RemovePending removes a resolved source file from pending files. -func (resolver *dependencyResolver) RemovePending(source sourceFile) { - newPendings := make([]sourceFile, 0) - - for _, pending := range resolver.pendingFiles { - if pending.path != source.path { - newPendings = append(newPendings, pending) - } - } - - resolver.pendingFiles = newPendings -} diff --git a/core/resolver/doc.go b/core/resolver/doc.go deleted file mode 100644 index 5142380..0000000 --- a/core/resolver/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package resolver maps the dependency graph for code units. -package resolver diff --git a/core/resolver/sourceFile.go b/core/resolver/sourceFile.go deleted file mode 100644 index 8fb28a6..0000000 --- a/core/resolver/sourceFile.go +++ /dev/null @@ -1,58 +0,0 @@ -package resolver - -import ( - "io/ioutil" - "regexp" -) - -type sourceFile struct { - path string - dependencies []string -} - -// ParseDependencies reads dependencies from source file. -func (source *sourceFile) ParseDependencies(base string) (err error) { - source.dependencies = make([]string, 0) - - file, err := ioutil.ReadFile(source.path) - dependencyFinder := regexp.MustCompile(`--\s+require\s+['"](.*)['"]`) - dependencies := dependencyFinder.FindAllStringSubmatch(string(file), -1) - - for _, submatches := range dependencies { - if len(submatches) > 1 { - dependency := base + "/" + submatches[1] - alreadyExists := false - - for _, existing := range source.dependencies { - if existing == dependency { - alreadyExists = true - } - } - - if !alreadyExists { - source.dependencies = append(source.dependencies, dependency) - } - } - } - - return -} - -// Resolved checks if all dependencies of current file are resolved -func (source *sourceFile) Resolved(readyFiles []string) bool { - for _, file := range source.dependencies { - resolved := false - - for _, readyFile := range readyFiles { - if readyFile == file { - resolved = true - } - } - - if !resolved { - return false - } - } - - return true -} diff --git a/core/sanity.go b/core/sanity.go index dd9c197..61e9778 100644 --- a/core/sanity.go +++ b/core/sanity.go @@ -2,14 +2,13 @@ package core import ( "fmt" - "github.com/oelmekki/pgrebase/core/utils" ) // Sanity type encapsulates requirement checks. type sanity struct{} -// Check makes sure the fs is ready to be used. -func (checker *sanity) Check() (err error) { +// check makes sure the fs is ready to be used. +func (checker *sanity) check() (err error) { if err = checker.directoryExists(); err != nil { return err } @@ -26,8 +25,8 @@ func (checker *sanity) Check() (err error) { // directoryExists checks the provided sql directory is indeed a directory. func (checker *sanity) directoryExists() (err error) { - if !utils.IsDir(conf.SqlDirPath) { - return fmt.Errorf("%s is not a directory", conf.SqlDirPath) + if !isDir(conf.sqlDirPath) { + return fmt.Errorf("%s is not a directory", conf.sqlDirPath) } return @@ -38,14 +37,14 @@ func (checker *sanity) typedDirExists() (err error) { directories := make([]string, 0) for _, typedDir := range []string{"functions", "triggers", "types", "views"} { - path := conf.SqlDirPath + "/" + typedDir - if utils.IsDir(path) { + path := conf.sqlDirPath + "/" + typedDir + if isDir(path) { directories = append(directories, path) } } if len(directories) == 0 { - return fmt.Errorf("No functions/, triggers/, types/ or views/ directory found in %s", conf.SqlDirPath) + return fmt.Errorf("No functions/, triggers/, types/ or views/ directory found in %s", conf.sqlDirPath) } return @@ -55,8 +54,8 @@ func (checker *sanity) typedDirExists() (err error) { // // No need to process any further if there are no sql files to load. func (checker *sanity) sqlFilesPresent() (err error) { - if len(conf.FunctionFiles)+len(conf.TriggerFiles)+len(conf.ViewFiles) == 0 { - return fmt.Errorf("Didn't find any sql file in %s", conf.SqlDirPath) + if len(conf.functionFiles)+len(conf.triggerFiles)+len(conf.viewFiles) == 0 { + return fmt.Errorf("Didn't find any sql file in %s", conf.sqlDirPath) } return diff --git a/core/scanFiles.go b/core/scanFiles.go index 0ca4b89..dfd26fc 100644 --- a/core/scanFiles.go +++ b/core/scanFiles.go @@ -1,16 +1,58 @@ package core import ( - "github.com/oelmekki/pgrebase/core/config" + "os" + "path/filepath" ) +// sourceWalker type encapsulates fs walking functions. +type sourceWalker struct { + config *config +} + +// process loads all source files paths. +func (walker *sourceWalker) process() { + walker.config.functionFiles = walker.findFunctions() + walker.config.triggerFiles = walker.findTriggers() + walker.config.viewFiles = walker.findViews() + + return +} + +// findFunctions finds path of function files. +func (walker *sourceWalker) findFunctions() (paths []string) { + return walker.sqlFilesIn(walker.config.sqlDirPath + "/functions") +} + +// findTriggers dinds path of trigger files. +func (walker *sourceWalker) findTriggers() (paths []string) { + return walker.sqlFilesIn(walker.config.sqlDirPath + "/triggers") +} + +// findViews finds path of view files. +func (walker *sourceWalker) findViews() (paths []string) { + return walker.sqlFilesIn(walker.config.sqlDirPath + "/views") +} + +// sqlFilesIn walks a directory to find sql files. +func (walker *sourceWalker) sqlFilesIn(path string) (paths []string) { + filepath.Walk(path, func(path string, info os.FileInfo, err error) error { + if isSqlFile(path) { + paths = append(paths, path) + } + + return nil + }) + + return +} + // scanFiles scans sql directory for sql files. -func scanFiles(cfg *config.Config) { - cfg.FunctionFiles = make([]string, 0) - cfg.TriggerFiles = make([]string, 0) - cfg.TypeFiles = make([]string, 0) - cfg.ViewFiles = make([]string, 0) - - walker := sourceWalker{Config: cfg} - walker.Process() +func scanFiles(cfg *config) { + cfg.functionFiles = make([]string, 0) + cfg.triggerFiles = make([]string, 0) + cfg.viewFiles = make([]string, 0) + + walker := sourceWalker{config: cfg} + walker.process() } diff --git a/core/sourceWalker.go b/core/sourceWalker.go deleted file mode 100644 index e35e245..0000000 --- a/core/sourceWalker.go +++ /dev/null @@ -1,56 +0,0 @@ -package core - -import ( - "github.com/oelmekki/pgrebase/core/config" - "github.com/oelmekki/pgrebase/core/utils" - "os" - "path/filepath" -) - -// sourceWalker type encapsulates fs walking functions. -type sourceWalker struct { - Config *config.Config -} - -// Process loads all source files paths. -func (walker *sourceWalker) Process() { - walker.Config.FunctionFiles = walker.findFunctions() - walker.Config.TriggerFiles = walker.findTriggers() - walker.Config.TypeFiles = walker.findTypes() - walker.Config.ViewFiles = walker.findViews() - - return -} - -// findFunctions finds path of function files. -func (walker *sourceWalker) findFunctions() (paths []string) { - return walker.sqlFilesIn(walker.Config.SqlDirPath + "/functions") -} - -// findTriggers dinds path of trigger files. -func (walker *sourceWalker) findTriggers() (paths []string) { - return walker.sqlFilesIn(walker.Config.SqlDirPath + "/triggers") -} - -// findTypes finds path of type files. -func (walker *sourceWalker) findTypes() (paths []string) { - return walker.sqlFilesIn(walker.Config.SqlDirPath + "/types") -} - -// findViews finds path of view files. -func (walker *sourceWalker) findViews() (paths []string) { - return walker.sqlFilesIn(walker.Config.SqlDirPath + "/views") -} - -// sqlFilesIn walks a directory to find sql files. -func (walker *sourceWalker) sqlFilesIn(path string) (paths []string) { - filepath.Walk(path, func(path string, info os.FileInfo, err error) error { - if utils.IsSqlFile(path) { - paths = append(paths, path) - } - - return nil - }) - - return -} diff --git a/core/startWatching.go b/core/startWatching.go deleted file mode 100644 index 0f8cbc0..0000000 --- a/core/startWatching.go +++ /dev/null @@ -1,9 +0,0 @@ -package core - -// startWatching fires a watcher, will die as soon something changed. -func startWatching(errorChan chan error, doneChan chan bool) (err error) { - w := watcher{Done: doneChan, Error: errorChan} - go w.Start() - - return -} diff --git a/core/trigger.go b/core/trigger.go new file mode 100644 index 0000000..37d85f0 --- /dev/null +++ b/core/trigger.go @@ -0,0 +1,100 @@ +package core + +import ( + "fmt" + "io/ioutil" + "regexp" +) + +// loadTriggers loads or reloads all triggers found in FS. +func loadTriggers() (err error) { + successfulCount := len(conf.triggerFiles) + errors := make([]string, 0) + bypass := make(map[string]bool) + + files, err := resolveDependencies(conf.triggerFiles, conf.sqlDirPath+"triggers") + if err != nil { + return err + } + + triggers := make([]*trigger, 0) + for i := len(files) - 1; i >= 0; i-- { + file := files[i] + t := trigger{} + t.path = file + triggers = append(triggers, &t) + + err = downPass(&t, t.path) + if err != nil { + successfulCount-- + errors = append(errors, fmt.Sprintf("%v\n", err)) + bypass[t.path] = true + } + } + + for i := len(triggers) - 1; i >= 0; i-- { + t := triggers[i] + if _, ignore := bypass[t.path]; !ignore { + err = upPass(t, t.path) + if err != nil { + successfulCount-- + errors = append(errors, fmt.Sprintf("%v\n", err)) + } + } + } + + report("triggers", successfulCount, len(conf.triggerFiles), errors) + + return +} + +// trigger is the code unit for triggers. +type trigger struct { + codeUnit + table string // name of the table for the trigger + function function // related function for trigger +} + +// load loads trigger definition from file. +func (t *trigger) load() (err error) { + definition, err := ioutil.ReadFile(t.path) + if err != nil { + return err + } + t.definition = string(definition) + + return +} + +// parse parses trigger for name and signature. +func (t *trigger) parse() (err error) { + triggerFinder := regexp.MustCompile(`(?is)CREATE(?:\s+CONSTRAINT)?\s+TRIGGER\s+(\S+).*?ON\s+(\S+)`) + subMatches := triggerFinder.FindStringSubmatch(t.definition) + + if len(subMatches) < 3 { + return fmt.Errorf("Can't find a tin %s", t.path) + } + + t.function = function{codeUnit: codeUnit{definition: t.definition, path: t.path}} + t.function.parse() + + t.name = subMatches[1] + t.table = subMatches[2] + + return +} + +// drop removes existing trigger from pg. +func (t *trigger) drop() (err error) { + err = t.codeUnit.drop(`DROP TRIGGER IF EXISTS ` + t.name + ` ON ` + t.table) + if err != nil { + return err + } + + return t.function.drop() +} + +// create adds the trigger in pg. +func (t *trigger) create() (err error) { + return t.codeUnit.create(t.definition) +} diff --git a/core/trigger/Init.go b/core/trigger/Init.go deleted file mode 100644 index 2c4c6f9..0000000 --- a/core/trigger/Init.go +++ /dev/null @@ -1,12 +0,0 @@ -package trigger - -import ( - "github.com/oelmekki/pgrebase/core/config" -) - -var conf *config.Config - -// Init stores configuration for further usage. -func Init(cfg *config.Config) { - conf = cfg -} diff --git a/core/trigger/Trigger.go b/core/trigger/Trigger.go deleted file mode 100644 index 47507c3..0000000 --- a/core/trigger/Trigger.go +++ /dev/null @@ -1,104 +0,0 @@ -package trigger - -import ( - "fmt" - "github.com/oelmekki/pgrebase/core/codeunit" - "github.com/oelmekki/pgrebase/core/function" - "github.com/oelmekki/pgrebase/core/resolver" - "github.com/oelmekki/pgrebase/core/utils" - "io/ioutil" - "regexp" -) - -// LoadTriggers loads or reloads all triggers found in FS. -func LoadTriggers() (err error) { - successfulCount := len(conf.TriggerFiles) - errors := make([]string, 0) - bypass := make(map[string]bool) - - files, err := resolver.ResolveDependencies(conf.TriggerFiles, conf.SqlDirPath+"triggers") - if err != nil { - return err - } - - triggers := make([]*Trigger, 0) - for i := len(files) - 1; i >= 0; i-- { - file := files[i] - trigger := Trigger{} - trigger.Path = file - triggers = append(triggers, &trigger) - - err = codeunit.DownPass(&trigger, trigger.Path) - if err != nil { - successfulCount-- - errors = append(errors, fmt.Sprintf("%v\n", err)) - bypass[trigger.Path] = true - } - } - - for i := len(triggers) - 1; i >= 0; i-- { - trigger := triggers[i] - if _, ignore := bypass[trigger.Path]; !ignore { - err = codeunit.UpPass(trigger, trigger.Path) - if err != nil { - successfulCount-- - errors = append(errors, fmt.Sprintf("%v\n", err)) - } - } - } - - utils.Report("triggers", successfulCount, len(conf.TriggerFiles), errors) - - return -} - -// Trigger is the code unit for triggers. -type Trigger struct { - codeunit.CodeUnit - Table string // name of the table for the trigger - Function function.Function // related function for trigger -} - -// Load loads trigger definition from file. -func (trigger *Trigger) Load() (err error) { - definition, err := ioutil.ReadFile(trigger.Path) - if err != nil { - return err - } - trigger.Definition = string(definition) - - return -} - -// Parse parses trigger for name and signature. -func (trigger *Trigger) Parse() (err error) { - triggerFinder := regexp.MustCompile(`(?is)CREATE(?:\s+CONSTRAINT)?\s+TRIGGER\s+(\S+).*?ON\s+(\S+)`) - subMatches := triggerFinder.FindStringSubmatch(trigger.Definition) - - if len(subMatches) < 3 { - return fmt.Errorf("Can't find a trigger in %s", trigger.Path) - } - - trigger.Function = function.Function{CodeUnit: codeunit.CodeUnit{Definition: trigger.Definition, Path: trigger.Path}} - trigger.Function.Parse() - - trigger.Name = subMatches[1] - trigger.Table = subMatches[2] - - return -} - -// Drop removes existing trigger from pg. -func (trigger *Trigger) Drop() (err error) { - err = trigger.CodeUnit.Drop(`DROP TRIGGER IF EXISTS ` + trigger.Name + ` ON ` + trigger.Table) - if err != nil { - return err - } - - return trigger.Function.Drop() -} - -// Create adds the trigger in pg. -func (trigger *Trigger) Create() (err error) { - return trigger.CodeUnit.Create(trigger.Definition) -} diff --git a/core/trigger/doc.go b/core/trigger/doc.go deleted file mode 100644 index 975aa78..0000000 --- a/core/trigger/doc.go +++ /dev/null @@ -1,4 +0,0 @@ -/* -Package trigger allow to manage trigger code units. -*/ -package trigger diff --git a/core/utils.go b/core/utils.go new file mode 100644 index 0000000..55d5af7 --- /dev/null +++ b/core/utils.go @@ -0,0 +1,68 @@ +package core + +import ( + "database/sql" + "fmt" + "os" + "path" + "regexp" + + _ "github.com/lib/pq" +) + +// isDir checks if file exists and is a directory. +func isDir(filePath string) bool { + info, err := os.Stat(filePath) + if err != nil { + return false + } + return info.IsDir() +} + +// isHiddenFile checks if file is hidden. +func isHiddenFile(filePath string) bool { + basename := path.Base(filePath) + return string(basename[0]) == "." +} + +// isSqlFile checks if provided file is an sql file (only check for extension). +func isSqlFile(filePath string) bool { + sqlFile := regexp.MustCompile(`.*\.sql$`) + return sqlFile.MatchString(filePath) +} + +// report pretty prints the result of an import. +func report(name string, successCount, totalCount int, errors []string) { + if os.Getenv("QUIET") != "true" || successCount < totalCount { + fmt.Printf("Loaded %d %s", successCount, name) + + if successCount < totalCount { + fmt.Printf(" - %d with error", totalCount-successCount) + } + + fmt.Printf("\n") + } + + for _, err := range errors { + fmt.Printf(err) + } +} + +// query is a wrapper for sql.Query, meant to be the main query interface. +func query(q string, parameters ...interface{}) (rows *sql.Rows, err error) { + var co *sql.DB + + co, err = sql.Open("postgres", conf.databaseUrl) + if err != nil { + fmt.Printf("can't connect to database : %v\n", err) + return rows, err + } + defer co.Close() + + rows, err = co.Query(q, parameters...) + if err != nil { + return rows, err + } + + return +} diff --git a/core/utils/IsDir.go b/core/utils/IsDir.go deleted file mode 100644 index b01cdc0..0000000 --- a/core/utils/IsDir.go +++ /dev/null @@ -1,14 +0,0 @@ -package utils - -import ( - "os" -) - -// IsDir checks if file exists and is a directory. -func IsDir(filePath string) bool { - info, err := os.Stat(filePath) - if err != nil { - return false - } - return info.IsDir() -} diff --git a/core/utils/IsHiddenFile.go b/core/utils/IsHiddenFile.go deleted file mode 100644 index 4900b8c..0000000 --- a/core/utils/IsHiddenFile.go +++ /dev/null @@ -1,11 +0,0 @@ -package utils - -import ( - "path" -) - -// IsHiddenFile checks if file is hidden. -func IsHiddenFile(filePath string) bool { - basename := path.Base(filePath) - return string(basename[0]) == "." -} diff --git a/core/utils/IsSqlFile.go b/core/utils/IsSqlFile.go deleted file mode 100644 index e591eae..0000000 --- a/core/utils/IsSqlFile.go +++ /dev/null @@ -1,11 +0,0 @@ -package utils - -import ( - "regexp" -) - -// IsSqlFile checks if provided file is an sql file (only check for extension). -func IsSqlFile(filePath string) bool { - isSqlFile := regexp.MustCompile(`.*\.sql$`) - return isSqlFile.MatchString(filePath) -} diff --git a/core/utils/Report.go b/core/utils/Report.go deleted file mode 100644 index 34373d7..0000000 --- a/core/utils/Report.go +++ /dev/null @@ -1,18 +0,0 @@ -package utils - -import ( - "fmt" -) - -// Report pretty prints the result of an import. -func Report(name string, successCount, totalCount int, errors []string) { - fmt.Printf("Loaded %d %s", successCount, name) - if successCount < totalCount { - fmt.Printf(" - %d with error", totalCount-successCount) - } - fmt.Printf("\n") - - for _, err := range errors { - fmt.Printf(err) - } -} diff --git a/core/utils/doc.go b/core/utils/doc.go deleted file mode 100644 index 656d98a..0000000 --- a/core/utils/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package utils provide helpful functions accross app. -package utils diff --git a/core/view.go b/core/view.go new file mode 100644 index 0000000..2ea760a --- /dev/null +++ b/core/view.go @@ -0,0 +1,89 @@ +package core + +import ( + "fmt" + "io/ioutil" + "regexp" +) + +// loadViews loads or reloads all views found in FS. +func loadViews() (err error) { + successfulCount := len(conf.viewFiles) + errors := make([]string, 0) + bypass := make(map[string]bool) + + files, err := resolveDependencies(conf.viewFiles, conf.sqlDirPath+"views") + if err != nil { + return err + } + + views := make([]*view, 0) + for i := len(files) - 1; i >= 0; i-- { + file := files[i] + v := view{} + v.path = file + views = append(views, &v) + + err = downPass(&v, v.path) + if err != nil { + successfulCount-- + errors = append(errors, fmt.Sprintf("%v\n", err)) + bypass[v.path] = true + } + } + + for i := len(views) - 1; i >= 0; i-- { + v := views[i] + if _, ignore := bypass[v.path]; !ignore { + err = upPass(v, v.path) + if err != nil { + successfulCount-- + errors = append(errors, fmt.Sprintf("%v\n", err)) + } + } + } + + report("views", successfulCount, len(conf.viewFiles), errors) + + return +} + +// view is the code unit for views. +type view struct { + codeUnit +} + +// load loads view definition from file. +func (v *view) load() (err error) { + definition, err := ioutil.ReadFile(v.path) + if err != nil { + return err + } + v.definition = string(definition) + + return +} + +// parse parses view for name. +func (v *view) parse() (err error) { + nameFinder := regexp.MustCompile(`(?is)CREATE(?:\s+OR\s+REPLACE)?\s+VIEW\s+(\S+)`) + subMatches := nameFinder.FindStringSubmatch(v.definition) + + if len(subMatches) < 2 { + return fmt.Errorf("Can't find a view in %s", v.path) + } + + v.name = subMatches[1] + + return +} + +// drop removes existing view from pg. +func (v *view) drop() (err error) { + return v.codeUnit.drop(`DROP VIEW IF EXISTS ` + v.name) +} + +// create adds the view in pg. +func (v *view) create() (err error) { + return v.codeUnit.create(v.definition) +} diff --git a/core/view/Init.go b/core/view/Init.go deleted file mode 100644 index 969ef6a..0000000 --- a/core/view/Init.go +++ /dev/null @@ -1,12 +0,0 @@ -package view - -import ( - "github.com/oelmekki/pgrebase/core/config" -) - -var conf *config.Config - -// Init stores configuration for further usage. -func Init(cfg *config.Config) { - conf = cfg -} diff --git a/core/view/View.go b/core/view/View.go deleted file mode 100644 index 3fc2359..0000000 --- a/core/view/View.go +++ /dev/null @@ -1,92 +0,0 @@ -package view - -import ( - "fmt" - "github.com/oelmekki/pgrebase/core/codeunit" - "github.com/oelmekki/pgrebase/core/resolver" - "github.com/oelmekki/pgrebase/core/utils" - "io/ioutil" - "regexp" -) - -// LoadViews loads or reloads all views found in FS. -func LoadViews() (err error) { - successfulCount := len(conf.ViewFiles) - errors := make([]string, 0) - bypass := make(map[string]bool) - - files, err := resolver.ResolveDependencies(conf.ViewFiles, conf.SqlDirPath+"views") - if err != nil { - return err - } - - views := make([]*View, 0) - for i := len(files) - 1; i >= 0; i-- { - file := files[i] - view := View{} - view.Path = file - views = append(views, &view) - - err = codeunit.DownPass(&view, view.Path) - if err != nil { - successfulCount-- - errors = append(errors, fmt.Sprintf("%v\n", err)) - bypass[view.Path] = true - } - } - - for i := len(views) - 1; i >= 0; i-- { - view := views[i] - if _, ignore := bypass[view.Path]; !ignore { - err = codeunit.UpPass(view, view.Path) - if err != nil { - successfulCount-- - errors = append(errors, fmt.Sprintf("%v\n", err)) - } - } - } - - utils.Report("views", successfulCount, len(conf.ViewFiles), errors) - - return -} - -// View is the code unit for views. -type View struct { - codeunit.CodeUnit -} - -// Load loads view definition from file. -func (view *View) Load() (err error) { - definition, err := ioutil.ReadFile(view.Path) - if err != nil { - return err - } - view.Definition = string(definition) - - return -} - -// Parse parses view for name. -func (view *View) Parse() (err error) { - nameFinder := regexp.MustCompile(`(?is)CREATE(?:\s+OR\s+REPLACE)?\s+VIEW\s+(\S+)`) - subMatches := nameFinder.FindStringSubmatch(view.Definition) - - if len(subMatches) < 2 { - return fmt.Errorf("Can't find a view in %s", view.Path) - } - - view.Name = subMatches[1] - - return -} - -// Drop removes existing view from pg. -func (view *View) Drop() (err error) { - return view.CodeUnit.Drop(`DROP VIEW IF EXISTS ` + view.Name) -} - -// Create adds the view in pg. -func (view *View) Create() (err error) { - return view.CodeUnit.Create(view.Definition) -} diff --git a/core/view/doc.go b/core/view/doc.go deleted file mode 100644 index c8542be..0000000 --- a/core/view/doc.go +++ /dev/null @@ -1,4 +0,0 @@ -/* -Package view allow to manage view code units. -*/ -package view diff --git a/core/watcher.go b/core/watcher.go index 27e9fb0..79fd6d9 100644 --- a/core/watcher.go +++ b/core/watcher.go @@ -2,10 +2,10 @@ package core import ( "fmt" - "github.com/fsnotify/fsnotify" - "github.com/oelmekki/pgrebase/core/utils" "os" "path/filepath" + + "github.com/fsnotify/fsnotify" ) // watcher contains data for watching fs for code change. @@ -34,12 +34,12 @@ func (w *watcher) Start() { // build finds all directories and watch them. func (w *watcher) build() (err error) { - if err = w.notify.Add(conf.SqlDirPath); err != nil { + if err = w.notify.Add(conf.sqlDirPath); err != nil { return err } - err = filepath.Walk(conf.SqlDirPath, func(path string, info os.FileInfo, err error) error { - if utils.IsDir(path) { + err = filepath.Walk(conf.sqlDirPath, func(path string, info os.FileInfo, err error) error { + if isDir(path) { if err = w.notify.Add(path); err != nil { return err } @@ -57,8 +57,10 @@ func (w *watcher) loop() { for { select { case event := <-w.notify.Events: - if !utils.IsHiddenFile(event.Name) { - fmt.Printf("\nFS changed. Building.\n") + if !isHiddenFile(event.Name) { + if os.Getenv("QUIET") != "true" { + fmt.Printf("\nFS changed. Building.\n") + } w.Done <- true return } diff --git a/doc.go b/doc.go deleted file mode 100644 index 874d255..0000000 --- a/doc.go +++ /dev/null @@ -1,120 +0,0 @@ -/* -PgRebase is a tool that allows you to easily handle your postgres codebase for -functions, triggers and views. - -PgRebase allows you to manage your functions/triggers/views as plain files in -filesystem. You put them in a `sql/` directory, one file per -function/trigger/type/view. - - $ tree sql/ - sql/ - ├── functions/ - │   └── assign_user_to_team.sql - ├── triggers/ - │   └── user_updated_at.sql - └── views/ - └── user_json.sql - -No need to add drop statement in those files, PgRebase will take care of it. - -In watch mode (useful for development), just save your file, pgrebase will -update your database. In normal mode (useful for deployment), pgrebase will -recreate all functions/triggers/views found in your filesystem directory. - -You can now work with postgres codebase live reload, then call pgrebase just -after your migration task in your deployment pipeline. - -Note: this is the documentation for pgrebase as a command tool. If you -want to use pgrebase as a library, see https://github.com/oelmekki/pgrebase/core -(or core subpackage in godoc). - - -Usage - -Here is an example session with pgrebase: - - $ export DATABASE_URL=postgres://user:pass@host/db - - $ ./pgrebase sql/ - Loaded 10 functions - Loaded 25 views - Loaded 5 triggers - 1 trigger with error - error while loading sql/triggers/user_updated_at.sql - column users.updated_at does not exist - - - $ ./pgrebase -w sql/ - Loaded 10 functions - Loaded 25 views - Loaded 6 triggers - Watching filesystem for changes... - FS changed. Building. - -When working in development environment, you'll probably want to use watch mode -(`-w`) to have your changes automatically loaded. - -For deployment, add `pgrebase` to your repos and call it after your usual -migrations step: - - DATABASE_URL=your_config ./pgrebase ./sql - - -Handling dependencies - -You can specify dependencies for files using require statement, provided those -files are of the same kind. That is, function files can specify dependencies on -other function files, type files can define dependencies on other type files, -etc. - -Here is an example about how to do it. Let's say your `sql/functions/foo.sql` -files depends on `sql/functions/whatever/bar.sql`: - - $ cat sql/functions/foo.sql - -- require "whatever/bar.sql" - CREATE FUNCTION foo() - [...] - -Filenames are always relative to your target directory (`sql/` in that -example), and within in, to the code kind (`functions/` here). - -Do not try to do funky things like adding `./` or `../`, this is no path -resolution, it just tries to match filenames. - -You can add multiple require lines: - - -- require "common.sql" - -- require "hello/world.sql" - -- require "whatever/bar.sql" - CREATE FUNCTION foo() - [...] - -There is no advanced debugging for circular dependencies for now, so be sure -not to get too wild, here (or else, you will have a "maybe there's circular -dependencies?" message and you will have to figure it out for yourself). - - -Caveats - -pgrebase doesn't keep any state about your codebase and does not delete what -is in your database and is not in your codebase. This means that if you want -to remove a trigger/type/view/function, deleting its file is not enough. You -have to use your usual way to migrate db and remove it. - -trigger files should contain both trigger creation and the function it uses. -This is to avoid dropping function still used by trigger (if processing -functions first) or create trigger before its function (if triggers are -processed first). - -files should only contain the definition of the view/function/type/trigger -they're named after (with the exception of trigger files declaring the -function they use). Hazardous results will ensue if it's not the case: only -the first definition will be dropped, but the whole file will be loaded in -pg. - -pgrebase single top concern is to not delete any data. This means that no -DROP will CASCADE. This means that if your database structure depends on -anything defined in pgrebase codebase, it will fail to reload it when it -implies dropping it (that is, most of the time). In other terms, do not -use pgrebase managed functions as default value for your tables' fields. -*/ -package main diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..f56746c --- /dev/null +++ b/go.mod @@ -0,0 +1,10 @@ +module gitlab.com/oelmekki/pgrebase + +go 1.20 + +require ( + github.com/fsnotify/fsnotify v1.6.0 + github.com/lib/pq v1.10.9 +) + +require golang.org/x/sys v0.0.0-20220908164124-27713097b956 // indirect diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..2819418 --- /dev/null +++ b/go.sum @@ -0,0 +1,6 @@ +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +golang.org/x/sys v0.0.0-20220908164124-27713097b956 h1:XeJjHH1KiLpKGb6lvMiksZ9l0fVUh+AmGcm0nOMEBOY= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/main.go b/main.go index 77d9f49..f8a53e1 100644 --- a/main.go +++ b/main.go @@ -5,7 +5,7 @@ import ( "fmt" "os" - "github.com/oelmekki/pgrebase/core" + "gitlab.com/oelmekki/pgrebase/core" ) // Usage shows user how they're supposed to use application. diff --git a/test_data/common.sh b/test_data/common.sh new file mode 100644 index 0000000..e87f861 --- /dev/null +++ b/test_data/common.sh @@ -0,0 +1,11 @@ +export PORT=${PG_PORT:-5433} + +if ! which postgres > /dev/null; then + version=$(ls -1 /usr/lib/postgresql/ | tail -n 1) + if [[ "$version" == "" ]]; then + echo "can't find postgres executable path." + exit 1 + fi + + export PATH="/usr/lib/postgresql/$version/bin:$PATH" +fi diff --git a/test_data/fixtures/dependencies/functions/func.sql b/test_data/fixtures/dependencies/functions/func.sql new file mode 100644 index 0000000..aca7272 --- /dev/null +++ b/test_data/fixtures/dependencies/functions/func.sql @@ -0,0 +1,13 @@ +-- require "test_function3_1.sql" +CREATE FUNCTION test_function3() +RETURNS int +LANGUAGE plpgsql +AS $$ + DECLARE + + BEGIN + SELECT test_function3_1(); + RETURN 1; + END +$$ + diff --git a/test_data/fixtures/dependencies/functions/test_function3_1.sql b/test_data/fixtures/dependencies/functions/test_function3_1.sql new file mode 100644 index 0000000..4a86817 --- /dev/null +++ b/test_data/fixtures/dependencies/functions/test_function3_1.sql @@ -0,0 +1,12 @@ +CREATE FUNCTION test_function3_1() +RETURNS int +LANGUAGE plpgsql +AS $$ + DECLARE + + BEGIN + RETURN 1; + END +$$ + + diff --git a/test_data/fixtures/dependencies/views/test_view.sql b/test_data/fixtures/dependencies/views/test_view.sql new file mode 100644 index 0000000..8cc3cd3 --- /dev/null +++ b/test_data/fixtures/dependencies/views/test_view.sql @@ -0,0 +1,3 @@ +-- require "test_view3_1.sql" +CREATE VIEW test_view3 AS +SELECT id, name FROM users UNION SELECT * FROM test_view3_1; diff --git a/test_data/fixtures/dependencies/views/test_view3_1.sql b/test_data/fixtures/dependencies/views/test_view3_1.sql new file mode 100644 index 0000000..329a019 --- /dev/null +++ b/test_data/fixtures/dependencies/views/test_view3_1.sql @@ -0,0 +1,3 @@ +CREATE VIEW test_view3_1 AS +SELECT id, name FROM users; + diff --git a/test_data/fixtures/loading_a_function/functions/func.sql b/test_data/fixtures/loading_a_function/functions/func.sql new file mode 100644 index 0000000..6394766 --- /dev/null +++ b/test_data/fixtures/loading_a_function/functions/func.sql @@ -0,0 +1,12 @@ +CREATE FUNCTION test_function( +) +RETURNS int +LANGUAGE plpgsql +AS $$ + DECLARE + + BEGIN + RETURN 1; + END +$$ + diff --git a/test_data/fixtures/loading_a_trigger/triggers/test_trigger.sql b/test_data/fixtures/loading_a_trigger/triggers/test_trigger.sql new file mode 100644 index 0000000..3797f8a --- /dev/null +++ b/test_data/fixtures/loading_a_trigger/triggers/test_trigger.sql @@ -0,0 +1,12 @@ +CREATE FUNCTION test_trigger() +RETURNS trigger +LANGUAGE plpgsql +AS $$ + BEGIN + NEW.active := true; + RETURN NEW; + END +$$; + +CREATE TRIGGER test_trigger BEFORE INSERT ON users +FOR EACH ROW EXECUTE PROCEDURE test_trigger(); diff --git a/test_data/fixtures/loading_a_view/views/test_view.sql b/test_data/fixtures/loading_a_view/views/test_view.sql new file mode 100644 index 0000000..1cddc6f --- /dev/null +++ b/test_data/fixtures/loading_a_view/views/test_view.sql @@ -0,0 +1,2 @@ +CREATE VIEW test_view AS +SELECT id, name FROM users; diff --git a/test_data/fixtures/loading_all/functions/func.sql b/test_data/fixtures/loading_all/functions/func.sql new file mode 100644 index 0000000..5dda0b2 --- /dev/null +++ b/test_data/fixtures/loading_all/functions/func.sql @@ -0,0 +1,12 @@ +CREATE FUNCTION test_function2( +) +RETURNS int +LANGUAGE plpgsql +AS $$ + DECLARE + + BEGIN + RETURN 1; + END +$$ + diff --git a/test_data/fixtures/loading_all/triggers/test_trigger.sql b/test_data/fixtures/loading_all/triggers/test_trigger.sql new file mode 100644 index 0000000..7fcaed5 --- /dev/null +++ b/test_data/fixtures/loading_all/triggers/test_trigger.sql @@ -0,0 +1,12 @@ +CREATE FUNCTION test_trigger2() +RETURNS trigger +LANGUAGE plpgsql +AS $$ + BEGIN + NEW.active := true; + RETURN NEW; + END +$$; + +CREATE TRIGGER test_trigger2 BEFORE INSERT ON users +FOR EACH ROW EXECUTE PROCEDURE test_trigger2(); diff --git a/test_data/fixtures/loading_all/views/test_view.sql b/test_data/fixtures/loading_all/views/test_view.sql new file mode 100644 index 0000000..aad87e4 --- /dev/null +++ b/test_data/fixtures/loading_all/views/test_view.sql @@ -0,0 +1,2 @@ +CREATE VIEW test_view2 AS +SELECT id, name FROM users; diff --git a/test_data/fixtures/watcher/views/test_view.sql b/test_data/fixtures/watcher/views/test_view.sql new file mode 100644 index 0000000..fdef28e --- /dev/null +++ b/test_data/fixtures/watcher/views/test_view.sql @@ -0,0 +1,2 @@ +CREATE VIEW test_view4 AS +SELECT id FROM users; diff --git a/test_data/psql.sh b/test_data/psql.sh new file mode 100755 index 0000000..6310889 --- /dev/null +++ b/test_data/psql.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" +. ./common.sh + +psql -h 127.0.0.1 -p $PORT -U postgres -d pgrebase diff --git a/test_data/reset_db.sh b/test_data/reset_db.sh new file mode 100755 index 0000000..c842605 --- /dev/null +++ b/test_data/reset_db.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" +. ./common.sh + +./stop_db.sh + +if [[ -d ./pg ]]; then + rm -rf pg +fi + +initdb -D ./pg -U postgres +./start_db.sh +sleep 3 +createdb -h 127.0.0.1 -p $PORT -U postgres pgrebase +psql -h 127.0.0.1 -p $PORT -U postgres -d pgrebase -c "CREATE TABLE users(id SERIAL, name varchar(255), active boolean NOT NULL DEFAULT false, bio text)" + +echo "Done." diff --git a/test_data/start_db.sh b/test_data/start_db.sh new file mode 100755 index 0000000..0655a04 --- /dev/null +++ b/test_data/start_db.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" +. ./common.sh + +nohup postgres -D ./pg -k . -h 127.0.0.1 -p $PORT -F > ./pg/server_logs & diff --git a/test_data/stop_db.sh b/test_data/stop_db.sh new file mode 100755 index 0000000..cf684ee --- /dev/null +++ b/test_data/stop_db.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" +. ./common.sh + +kill $(cat pg/postmaster.pid | head -n 1)