diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..1230149 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index 34eb6da..0000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: CI - -on: - push: - branches: [ master ] - tags: 'v*' - pull_request: - branches: [ master ] - -jobs: - build: - name: Build & Test - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - - name: Environment - run: | - set -x - docker version - docker-compose version - - - name: Cache - uses: actions/cache@v2 - with: - key: cache - path: .cache - - - name: Build docker image - run: | - set -x - docker-compose build - docker-compose run --rm --no-deps dbmate --version - - - name: Build binaries - run: | - set -x - docker-compose run --rm --no-deps dev make build-all - dist/dbmate-linux-amd64 --version - - - name: Lint - run: docker-compose run --rm --no-deps dev make lint - - - name: Start test dependencies - run: | - set -x - docker-compose pull --quiet - docker-compose up --detach - - - name: Run tests - run: | - set -x - docker-compose run --rm dev make wait - docker-compose run --rm dev make test - - - name: Release - uses: softprops/action-gh-release@v1 - if: ${{ startsWith(github.ref, 'refs/tags/v') }} - with: - files: dist/* - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..f1024f0 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,151 @@ +name: CI + +on: + push: + branches: [main] + tags: "*" + pull_request: + +jobs: + build: + strategy: + fail-fast: false + matrix: + include: + - os: linux + image: ubuntu-latest + arch: amd64 + env: {} + - os: linux + image: ubuntu-latest + arch: arm64 + setup: sudo apt-get update && sudo apt-get install -qq gcc-aarch64-linux-gnu + env: + CC: aarch64-linux-gnu-gcc + CXX: aarch64-linux-gnu-g++ + - os: macos + image: macos-latest + arch: amd64 + env: {} + - os: macos + image: macos-latest + arch: arm64 + env: {} + - os: windows + image: windows-latest + arch: amd64 + env: {} + + name: Build (${{ matrix.os }}/${{ matrix.arch }}) + runs-on: ${{ matrix.image }} + env: ${{ matrix.env }} + + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-go@v2 + with: + go-version: "1.17" + + - name: Setup environment + run: ${{ matrix.setup }} + + - run: go mod download + + - run: make build ls + env: + GOARCH: ${{ matrix.arch }} + OUTPUT: dbmate-${{ matrix.os }}-${{ matrix.arch }} + + - run: dist/dbmate-${{ matrix.os }}-${{ matrix.arch }} --help + if: ${{ matrix.arch == 'amd64' }} + + - name: Publish binaries + uses: softprops/action-gh-release@v1 + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + with: + files: dist/dbmate-* + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + docker: + name: Docker Test (linux/amd64) + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + + - name: Configure QEMU + uses: docker/setup-qemu-action@v1 + + - name: Configure Buildx + uses: docker/setup-buildx-action@v1 + + - name: Check Docker environment + run: | + set -x + docker version + docker buildx version + docker-compose version + + - name: Build Docker image + run: | + set -x + docker-compose build + docker-compose run --rm --no-deps dbmate --version + + - name: Run make build + run: docker-compose run --rm --no-deps dev make build ls + + - name: Run make lint + run: docker-compose run --rm --no-deps dev make lint + + - name: Start test dependencies + run: | + set -x + docker-compose pull --quiet + docker-compose up --detach + docker-compose run --rm dev make wait + + - name: Run make test + run: docker-compose run --rm dev make test + + - name: Login to Docker Hub + uses: docker/login-action@v1 + if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }} + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }} + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Generate Docker image tags + id: meta + uses: docker/metadata-action@v3 + if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }} + with: + images: | + ${{ github.repository }} + ghcr.io/${{ github.repository }} + tags: | + type=ref,event=branch + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + + - name: Publish Docker image + uses: docker/build-push-action@v2 + if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }} + with: + context: . + target: release + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 6c1953e..0000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,62 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. - -name: "CodeQL" - -on: - push: - branches: [ master ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ master ] - schedule: - - cron: '0 0 * * 4' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - language: [ 'go' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] - # Learn more... - # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 diff --git a/.golangci.yml b/.golangci.yml index ac76638..e985ffd 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -5,12 +5,12 @@ linters: - depguard - errcheck - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret + - revive - rowserrcheck - staticcheck - structcheck diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..dbac111 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,4 @@ +// -*- jsonc -*- +{ + "recommendations": ["esbenp.prettier-vscode"] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..cff2122 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,10 @@ +// -*- jsonc -*- +{ + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": true, + "files.eol": "\n", + "files.insertFinalNewline": true, + "files.trimFinalNewlines": true, + "files.trimTrailingWhitespace": true, + "go.formatTool": "goimports" +} diff --git a/Dockerfile b/Dockerfile index ebdbdf9..b6fce52 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,36 +1,25 @@ # development image -FROM techknowlogick/xgo:go-1.15.x as dev +FROM golang:1.17 as dev WORKDIR /src -ENV GOCACHE /src/.cache/go-build - -# enable cgo to build sqlite -ENV CGO_ENABLED 1 # install database clients RUN apt-get update \ && apt-get install -qq --no-install-recommends \ curl \ - mysql-client \ + file \ + mariadb-client \ postgresql-client \ sqlite3 \ && rm -rf /var/lib/apt/lists/* # golangci-lint -RUN curl -fsSL -o /tmp/lint-install.sh https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh \ - && chmod +x /tmp/lint-install.sh \ - && /tmp/lint-install.sh -b /usr/local/bin v1.32.2 \ - && rm -f /tmp/lint-install.sh +RUN curl -fsSL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh \ + | sh -s -- -b /usr/local/bin v1.43.0 # download modules -COPY go.* ./ +COPY go.* /src/ RUN go mod download - -ENTRYPOINT [] -CMD ["/bin/bash"] - -# build stage -FROM dev as build -COPY . ./ +COPY . /src/ RUN make build # release stage @@ -38,6 +27,7 @@ FROM alpine as release RUN apk add --no-cache \ mariadb-client \ postgresql-client \ - sqlite -COPY --from=build /src/dist/dbmate-linux-amd64 /usr/local/bin/dbmate -ENTRYPOINT ["dbmate"] + sqlite \ + tzdata +COPY --from=dev /src/dist/dbmate /usr/local/bin/dbmate +ENTRYPOINT ["/usr/local/bin/dbmate"] diff --git a/Makefile b/Makefile index 70963e0..08beceb 100644 --- a/Makefile +++ b/Makefile @@ -1,58 +1,59 @@ -# no static linking for macos -LDFLAGS := -ldflags '-s' -# statically link binaries (to support alpine + scratch containers) -STATICLDFLAGS := -ldflags '-s -extldflags "-static"' -# avoid building code that is incompatible with static linking -TAGS := -tags netgo,osusergo,sqlite_omit_load_extension,sqlite_json +# enable cgo to build sqlite +export CGO_ENABLED = 1 + +# strip binaries +FLAGS := -tags sqlite_omit_load_extension,sqlite_json -ldflags '-s' + +GOOS := $(shell go env GOOS) +ifeq ($(GOOS),linux) + # statically link binaries to support alpine linux + FLAGS := -tags netgo,osusergo,sqlite_omit_load_extension,sqlite_json -ldflags '-s -extldflags "-static"' +endif +ifeq ($(GOOS),darwin) + export SDKROOT ?= $(shell xcrun --sdk macosx --show-sdk-path) +endif + +OUTPUT ?= dbmate .PHONY: all -all: build test lint +all: fix build wait test + +.PHONY: clean +clean: + rm -rf dist + +.PHONY: build +build: clean + go build -o dist/$(OUTPUT) $(FLAGS) . + +.PHONY: ls +ls: + ls -lh dist/$(OUTPUT) + file dist/$(OUTPUT) .PHONY: test test: - go test -p 1 $(TAGS) $(STATICLDFLAGS) ./... - -.PHONY: fix -fix: - golangci-lint run --fix + go test -p 1 $(FLAGS) ./... .PHONY: lint lint: golangci-lint run +.PHONY: fix +fix: + golangci-lint run --fix + .PHONY: wait wait: - dist/dbmate-linux-amd64 -e CLICKHOUSE_TEST_URL wait - dist/dbmate-linux-amd64 -e MYSQL_TEST_URL wait - dist/dbmate-linux-amd64 -e POSTGRES_TEST_URL wait + dist/dbmate -e CLICKHOUSE_TEST_URL wait + dist/dbmate -e MYSQL_TEST_URL wait + dist/dbmate -e POSTGRES_TEST_URL wait -.PHONY: clean -clean: - rm -rf dist/* - -.PHONY: build -build: clean build-linux-amd64 - ls -lh dist - -.PHONY: build-linux-amd64 -build-linux-amd64: - GOOS=linux GOARCH=amd64 \ - go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-linux-amd64 . - -.PHONY: build-all -build-all: clean build-linux-amd64 - GOOS=linux GOARCH=arm64 CC=aarch64-linux-gnu-gcc-5 CXX=aarch64-linux-gnu-g++-5 \ - go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-linux-arm64 . - GOOS=darwin GOARCH=amd64 CC=o64-clang CXX=o64-clang++ \ - go build $(TAGS) $(LDFLAGS) -o dist/dbmate-macos-amd64 . - GOOS=windows GOARCH=amd64 CC=x86_64-w64-mingw32-gcc-posix CXX=x86_64-w64-mingw32-g++-posix \ - go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-windows-amd64.exe . - ls -lh dist - -.PHONY: docker-make -docker-make: +.PHONY: docker-all +docker-all: + docker-compose pull docker-compose build - docker-compose run --rm dev make + docker-compose run --rm dev make all .PHONY: docker-sh docker-sh: diff --git a/README.md b/README.md index cca110d..b2cb010 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Dbmate -[![GitHub Build](https://img.shields.io/github/workflow/status/amacneil/dbmate/CI/master)](https://github.com/amacneil/dbmate/actions?query=branch%3Amaster+event%3Apush+workflow%3ACI) +[![GitHub Build](https://img.shields.io/github/workflow/status/amacneil/dbmate/CI/main)](https://github.com/amacneil/dbmate/actions?query=branch%3Amain+event%3Apush+workflow%3ACI) [![Go Report Card](https://goreportcard.com/badge/github.com/amacneil/dbmate)](https://goreportcard.com/report/github.com/amacneil/dbmate) [![GitHub Release](https://img.shields.io/github/release/amacneil/dbmate.svg)](https://github.com/amacneil/dbmate/releases) @@ -10,17 +10,40 @@ It is a standalone command line tool, which can be used with Go, Node.js, Python For a comparison between dbmate and other popular database schema migration tools, please see the [Alternatives](#alternatives) table. +## Table of Contents + +- [Features](#features) +- [Installation](#installation) +- [Commands](#commands) + - [Command Line Options](#command-line-options) +- [Usage](#usage) + - [Connecting to the Database](#connecting-to-the-database) + - [PostgreSQL](#postgresql) + - [MySQL](#mysql) + - [SQLite](#sqlite) + - [ClickHouse](#clickhouse) + - [Creating Migrations](#creating-migrations) + - [Running Migrations](#running-migrations) + - [Rolling Back Migrations](#rolling-back-migrations) + - [Migration Options](#migration-options) + - [Waiting For The Database](#waiting-for-the-database) + - [Exporting Schema File](#exporting-schema-file) +- [Internals](#internals) + - [schema_migrations table](#schema_migrations-table) +- [Alternatives](#alternatives) +- [Contributing](#contributing) + ## Features -* Supports MySQL, PostgreSQL, SQLite, and ClickHouse. -* Uses plain SQL for writing schema migrations. -* Migrations are timestamp-versioned, to avoid version number conflicts with multiple developers. -* Migrations are run atomically inside a transaction. -* Supports creating and dropping databases (handy in development/test). -* Supports saving a `schema.sql` file to easily diff schema changes in git. -* Database connection URL is definied using an environment variable (`DATABASE_URL` by default), or specified on the command line. -* Built-in support for reading environment variables from your `.env` file. -* Easy to distribute, single self-contained binary. +- Supports MySQL, PostgreSQL, SQLite, and ClickHouse. +- Uses plain SQL for writing schema migrations. +- Migrations are timestamp-versioned, to avoid version number conflicts with multiple developers. +- Migrations are run atomically inside a transaction. +- Supports creating and dropping databases (handy in development/test). +- Supports saving a `schema.sql` file to easily diff schema changes in git. +- Database connection URL is definied using an environment variable (`DATABASE_URL` by default), or specified on the command line. +- Built-in support for reading environment variables from your `.env` file. +- Easy to distribute, single self-contained binary. ## Installation @@ -43,16 +66,18 @@ $ sudo chmod +x /usr/local/bin/dbmate **Docker** -You can run dbmate using the official docker image (remember to set `--network=host` or see [this comment](https://github.com/amacneil/dbmate/issues/128#issuecomment-615924611) for more tips on using dbmate with docker networking): +Docker images are published to both Docker Hub ([`amacneil/dbmate`](https://hub.docker.com/r/amacneil/dbmate)) and Github Container Registry ([`ghcr.io/amacneil/dbmate`](https://ghcr.io/amacneil/dbmate)). + +Remember to set `--network=host` or see [this comment](https://github.com/amacneil/dbmate/issues/128#issuecomment-615924611) for more tips on using dbmate with docker networking): ```sh -$ docker run --rm -it --network=host amacneil/dbmate --help +$ docker run --rm -it --network=host ghcr.io/amacneil/dbmate:1 --help ``` If you wish to create or apply migrations, you will need to use Docker's [bind mount](https://docs.docker.com/storage/bind-mounts/) feature to make your local working directory (`pwd`) available inside the dbmate container: ```sh -$ docker run --rm -it --network=host -v "$(pwd)/db:/db" amacneil/dbmate new create_users_table +$ docker run --rm -it --network=host -v "$(pwd)/db:/db" ghcr.io/amacneil/dbmate:1 new create_users_table ``` **Heroku** @@ -77,7 +102,7 @@ $ heroku run bin/dbmate up ## Commands ```sh -dbmate # print help +dbmate --help # print usage help dbmate new # generate a new migration file dbmate up # create the database (if it does not already exist) and run any pending migrations dbmate create # create the database @@ -90,8 +115,23 @@ dbmate dump # write the database schema.sql file dbmate wait # wait for the database server to become available ``` +### Command Line Options + +The following options are available with all commands. You must use command line arguments in the order `dbmate [global options] command [command options]`. Most options can also be configured via environment variables (and loaded from your `.env` file, which is helpful to share configuration between team members). + +- `--url, -u "protocol://host:port/dbname"` - specify the database url directly. _(env: `$DATABASE_URL`)_ +- `--env, -e "DATABASE_URL"` - specify an environment variable to read the database connection URL from. +- `--migrations-dir, -d "./db/migrations"` - where to keep the migration files. _(env: `$DBMATE_MIGRATIONS_DIR`)_ +- `--migrations-table "schema_migrations"` - database table to record migrations in. _(env: `$DBMATE_MIGRATIONS_TABLE`)_ +- `--schema-file, -s "./db/schema.sql"` - a path to keep the schema.sql file. _(env: `$DBMATE_SCHEMA_FILE`)_ +- `--no-dump-schema` - don't auto-update the schema.sql file on migrate/rollback _(env: `$DBMATE_NO_DUMP_SCHEMA`)_ +- `--wait` - wait for the db to become available before executing the subsequent command _(env: `$DBMATE_WAIT`)_ +- `--wait-timeout 60s` - timeout for --wait flag _(env: `$DBMATE_WAIT_TIMEOUT`)_ + ## Usage +### Connecting to the Database + Dbmate locates your database using the `DATABASE_URL` environment variable by default. If you are writing a [twelve-factor app](http://12factor.net/), you should be storing all connection strings in environment variables. To make this easy in development, dbmate looks for a `.env` file in the current directory, and treats any variables listed there as if they were specified in the current environment (existing environment variables take preference, however). @@ -109,23 +149,37 @@ DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_development?sslmode=disab protocol://username:password@host:port/database_name?options ``` -* `protocol` must be one of `mysql`, `postgres`, `postgresql`, `sqlite`, `sqlite3`, `clickhouse` -* `host` can be either a hostname or IP address -* `options` are driver-specific (refer to the underlying Go SQL drivers if you wish to use these) +- `protocol` must be one of `mysql`, `postgres`, `postgresql`, `sqlite`, `sqlite3`, `clickhouse` +- `host` can be either a hostname or IP address +- `options` are driver-specific (refer to the underlying Go SQL drivers if you wish to use these) -**MySQL** +Dbmate can also load the connection URL from a different environment variable. For example, before running your test suite, you may wish to drop and recreate the test database. One easy way to do this is to store your test database connection URL in the `TEST_DATABASE_URL` environment variable: ```sh -DATABASE_URL="mysql://username:password@127.0.0.1:3306/database_name" +$ cat .env +DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_dev?sslmode=disable" +TEST_DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable" ``` -A `socket` parameter can be specified to connect through a unix socket: +You can then specify this environment variable in your test script (Makefile or similar): ```sh -DATABASE_URL="mysql://username:password@/database_name?socket=/var/run/mysqld/mysqld.sock" +$ dbmate -e TEST_DATABASE_URL drop +Dropping: myapp_test +$ dbmate -e TEST_DATABASE_URL --no-dump-schema up +Creating: myapp_test +Applying: 20151127184807_create_users_table.sql ``` -**PostgreSQL** +Alternatively, you can specify the url directly on the command line: + +```sh +$ dbmate -u "postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable" up +``` + +The only advantage of using `dbmate -e TEST_DATABASE_URL` over `dbmate -u $TEST_DATABASE_URL` is that the former takes advantage of dbmate's automatic `.env` file loading. + +#### PostgreSQL When connecting to Postgres, you may need to add the `sslmode=disable` option to your connection string, as dbmate by default requires a TLS connection (some other frameworks/languages allow unencrypted connections by default). @@ -150,7 +204,19 @@ DATABASE_URL="postgres://username:password@127.0.0.1:5432/database_name?search_p DATABASE_URL="postgres://username:password@127.0.0.1:5432/database_name?search_path=myschema,public" ``` -**SQLite** +#### MySQL + +```sh +DATABASE_URL="mysql://username:password@127.0.0.1:3306/database_name" +``` + +A `socket` parameter can be specified to connect through a unix socket: + +```sh +DATABASE_URL="mysql://username:password@/database_name?socket=/var/run/mysqld/mysqld.sock" +``` + +#### SQLite SQLite databases are stored on the filesystem, so you do not need to specify a host. By default, files are relative to the current directory. For example, the following will create a database at `./db/database.sqlite3`: @@ -164,7 +230,7 @@ To specify an absolute path, add a forward slash to the path. The following will DATABASE_URL="sqlite:/tmp/database.sqlite3" ``` -**ClickHouse** +#### ClickHouse ```sh DATABASE_URL="clickhouse://username:password@127.0.0.1:9000/database_name" @@ -218,6 +284,12 @@ Writing: ./db/schema.sql Pending migrations are always applied in numerical order. However, dbmate does not prevent migrations from being applied out of order if they are committed independently (for example: if a developer has been working on a branch for a long time, and commits a migration which has a lower version number than other already-applied migrations, dbmate will simply apply the pending migration). See [#159](https://github.com/amacneil/dbmate/issues/159) for a more detailed explanation. +You can also specify a migration to up-to. + +```sh +$ dbmate up 20151127184807 +``` + ### Rolling Back Migrations By default, dbmate doesn't know how to roll back a migration. In development, it's often useful to be able to revert your database to a previous state. To accomplish this, implement the `migrate:down` section: @@ -242,13 +314,21 @@ Rolling back: 20151127184807_create_users_table.sql Writing: ./db/schema.sql ``` +You can also rollback to a specific migration. + +```sh +$ dbmate rollback 20151127184807 +# or, with a limit option +$ dbmate rollback -limit 2 # will rollback the last two migrations +``` + ### Migration Options dbmate supports options passed to a migration block in the form of `key:value` pairs. List of supported options: -* `transaction` +- `transaction` -#### transaction +**transaction** `transaction` is useful if you need to run some SQL which cannot be executed from within a transaction. For example, in Postgres, you would need to disable transactions for migrations that alter an enum type to add a value: @@ -259,23 +339,6 @@ ALTER TYPE colors ADD VALUE 'orange' AFTER 'red'; `transaction` will default to `true` if your database supports it. -### Schema File - -When you run the `up`, `migrate`, or `rollback` commands, dbmate will automatically create a `./db/schema.sql` file containing a complete representation of your database schema. Dbmate keeps this file up to date for you, so you should not manually edit it. - -It is recommended to check this file into source control, so that you can easily review changes to the schema in commits or pull requests. It's also possible to use this file when you want to quickly load a database schema, without running each migration sequentially (for example in your test harness). However, if you do not wish to save this file, you could add it to `.gitignore`, or pass the `--no-dump-schema` command line option. - -To dump the `schema.sql` file without performing any other actions, run `dbmate dump`. Unlike other dbmate actions, this command relies on the respective `pg_dump`, `mysqldump`, or `sqlite3` commands being available in your PATH. If these tools are not available, dbmate will silenty skip the schema dump step during `up`, `migrate`, or `rollback` actions. You can diagnose the issue by running `dbmate dump` and looking at the output: - -```sh -$ dbmate dump -exec: "pg_dump": executable file not found in $PATH -``` - -On Ubuntu or Debian systems, you can fix this by installing `postgresql-client`, `mysql-client`, or `sqlite3` respectively. Ensure that the package version you install is greater than or equal to the version running on your database server. - -> Note: The `schema.sql` file will contain a complete schema for your database, even if some tables or columns were created outside of dbmate migrations. - ### Waiting For The Database If you use a Docker development environment for your project, you may encounter issues with the database not being immediately ready when running migrations or unit tests. This can be due to the database server having only just started. @@ -313,68 +376,65 @@ Error: unable to connect to database: dial tcp 127.0.0.1:5432: connect: connecti Please note that the `wait` command does not verify whether your specified database exists, only that the server is available and ready (so it will return success if the database server is available, but your database has not yet been created). -### Options +### Exporting Schema File -The following command line options are available with all commands. You must use command line arguments in the order `dbmate [global options] command [command options]`. Most options can also be configured via environment variables (and loaded from your `.env` file, which is helpful to share configuration between team members). +When you run the `up`, `migrate`, or `rollback` commands, dbmate will automatically create a `./db/schema.sql` file containing a complete representation of your database schema. Dbmate keeps this file up to date for you, so you should not manually edit it. -* `--url, -u "protocol://host:port/dbname"` - specify the database url directly. _(env: `$DATABASE_URL`)_ -* `--env, -e "DATABASE_URL"` - specify an environment variable to read the database connection URL from. -* `--migrations-dir, -d "./db/migrations"` - where to keep the migration files. _(env: `$DBMATE_MIGRATIONS_DIR`)_ -* `--schema-file, -s "./db/schema.sql"` - a path to keep the schema.sql file. _(env: `$DBMATE_SCHEMA_FILE`)_ -* `--no-dump-schema` - don't auto-update the schema.sql file on migrate/rollback _(env: `$DBMATE_NO_DUMP_SCHEMA`)_ -* `--wait` - wait for the db to become available before executing the subsequent command _(env: `$DBMATE_WAIT`)_ -* `--wait-timeout 60s` - timeout for --wait flag _(env: `$DBMATE_WAIT_TIMEOUT`)_ +It is recommended to check this file into source control, so that you can easily review changes to the schema in commits or pull requests. It's also possible to use this file when you want to quickly load a database schema, without running each migration sequentially (for example in your test harness). However, if you do not wish to save this file, you could add it to your `.gitignore`, or pass the `--no-dump-schema` command line option. -For example, before running your test suite, you may wish to drop and recreate the test database. One easy way to do this is to store your test database connection URL in the `TEST_DATABASE_URL` environment variable: +To dump the `schema.sql` file without performing any other actions, run `dbmate dump`. Unlike other dbmate actions, this command relies on the respective `pg_dump`, `mysqldump`, or `sqlite3` commands being available in your PATH. If these tools are not available, dbmate will silenty skip the schema dump step during `up`, `migrate`, or `rollback` actions. You can diagnose the issue by running `dbmate dump` and looking at the output: ```sh -$ cat .env -TEST_DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable" +$ dbmate dump +exec: "pg_dump": executable file not found in $PATH ``` -You can then specify this environment variable in your test script (Makefile or similar): +On Ubuntu or Debian systems, you can fix this by installing `postgresql-client`, `mysql-client`, or `sqlite3` respectively. Ensure that the package version you install is greater than or equal to the version running on your database server. -```sh -$ dbmate -e TEST_DATABASE_URL drop -Dropping: myapp_test -$ dbmate -e TEST_DATABASE_URL --no-dump-schema up -Creating: myapp_test -Applying: 20151127184807_create_users_table.sql +> Note: The `schema.sql` file will contain a complete schema for your database, even if some tables or columns were created outside of dbmate migrations. + +## Internals + +### schema_migrations table + +By default, dbmate stores a record of each applied migration in a `schema_migrations` table. This table will be created for you automatically if it does not already exist. The table schema is very simple: + +```sql +CREATE TABLE IF NOT EXISTS schema_migrations ( + version VARCHAR(255) PRIMARY KEY +) ``` -Alternatively, you can specify the url directly on the command line: +Dbmate records only the version number of applied migrations, so you can safely rename a migration file without affecting its applied status. -```sh -$ dbmate -u "postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable" up -``` - -The only advantage of using `dbmate -e TEST_DATABASE_URL` over `dbmate -u $TEST_DATABASE_URL` is that the former takes advantage of dbmate's automatic `.env` file loading. +You can customize the name of this table using the `--migrations-table` flag or `$DBMATE_MIGRATIONS_TABLE` environment variable. If you already have a table with this name (possibly from a previous migration tool), you should either manually update it to conform to this schema, or configure dbmate to use a different table name. ## Alternatives Why another database schema migration tool? Dbmate was inspired by many other tools, primarily [Active Record Migrations](http://guides.rubyonrails.org/active_record_migrations.html), with the goals of being trivial to configure, and language & framework independent. Here is a comparison between dbmate and other popular migration tools. -| | [goose](https://bitbucket.org/liamstask/goose/) | [sql-migrate](https://github.com/rubenv/sql-migrate) | [golang-migrate/migrate](https://github.com/golang-migrate/migrate) | [activerecord](http://guides.rubyonrails.org/active_record_migrations.html) | [sequelize](http://docs.sequelizejs.com/manual/tutorial/migrations.html) | [dbmate](https://github.com/amacneil/dbmate) | -| --- |:---:|:---:|:---:|:---:|:---:|:---:| -| **Features** | -|Plain SQL migration files|:white_check_mark:|:white_check_mark:|:white_check_mark:|||:white_check_mark:| -|Support for creating and dropping databases||||:white_check_mark:||:white_check_mark:| -|Support for saving schema dump files||||:white_check_mark:||:white_check_mark:| -|Timestamp-versioned migration files|:white_check_mark:|||:white_check_mark:|:white_check_mark:|:white_check_mark:| -|Ability to wait for database to become ready||||||:white_check_mark:| -|Database connection string loaded from environment variables||||||:white_check_mark:| -|Automatically load .env file||||||:white_check_mark:| -|No separate configuration file||||:white_check_mark:|:white_check_mark:|:white_check_mark:| -|Language/framework independent|:eight_pointed_black_star:|:eight_pointed_black_star:|:eight_pointed_black_star:|||:white_check_mark:| -| **Drivers** | -|PostgreSQL|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:| -|MySQL|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:| -|SQLite|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:| -|CliсkHouse|||:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:| +| | [dbmate](https://github.com/amacneil/dbmate) | [goose](https://github.com/pressly/goose) | [sql-migrate](https://github.com/rubenv/sql-migrate) | [golang-migrate](https://github.com/golang-migrate/migrate) | [activerecord](http://guides.rubyonrails.org/active_record_migrations.html) | [sequelize](http://docs.sequelizejs.com/manual/tutorial/migrations.html) | +| ------------------------------------------------------------ | :------------------------------------------: | :---------------------------------------: | :--------------------------------------------------: | :---------------------------------------------------------: | :-------------------------------------------------------------------------: | :----------------------------------------------------------------------: | +| **Features** | +| Plain SQL migration files | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | | | +| Support for creating and dropping databases | :white_check_mark: | | | | :white_check_mark: | | +| Support for saving schema dump files | :white_check_mark: | | | | :white_check_mark: | | +| Timestamp-versioned migration files | :white_check_mark: | :white_check_mark: | | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| Custom schema migrations table | :white_check_mark: | | :white_check_mark: | | | :white_check_mark: | +| Ability to wait for database to become ready | :white_check_mark: | | | | | | +| Database connection string loaded from environment variables | :white_check_mark: | | | | | | +| Automatically load .env file | :white_check_mark: | | | | | | +| No separate configuration file | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| Language/framework independent | :white_check_mark: | :eight_pointed_black_star: | :eight_pointed_black_star: | :white_check_mark: | | | +| **Drivers** | +| PostgreSQL | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| MySQL | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| SQLite | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| CliсkHouse | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | :white_check_mark: | > :eight_pointed_black_star: In theory these tools could be used with other languages, but a Go development environment is required because binary builds are not provided. -*If you notice any inaccuracies in this table, please [propose a change](https://github.com/amacneil/dbmate/edit/master/README.md).* +_If you notice any inaccuracies in this table, please [propose a change](https://github.com/amacneil/dbmate/edit/main/README.md)._ ## Contributing @@ -389,5 +449,5 @@ $ make docker-all To start a development shell: ```sh -$ make docker-bash +$ make docker-sh ``` diff --git a/docker-compose.yml b/docker-compose.yml index f2a4754..3134144 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: '2.3' +version: "2.3" services: dev: build: @@ -20,10 +20,12 @@ services: build: context: . target: release + image: dbmate_release mysql: - image: mysql:5.7 + image: mysql/mysql-server:8.0 environment: + MYSQL_ROOT_HOST: "%" MYSQL_ROOT_PASSWORD: root postgres: diff --git a/go.mod b/go.mod index 420929a..652b8f1 100644 --- a/go.mod +++ b/go.mod @@ -1,19 +1,22 @@ module github.com/amacneil/dbmate -go 1.15 +go 1.16 require ( github.com/ClickHouse/clickhouse-go v1.4.3 github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-sql-driver/mysql v1.5.0 + github.com/frankban/quicktest v1.11.3 // indirect + github.com/go-sql-driver/mysql v1.6.0 github.com/joho/godotenv v1.3.0 github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d - github.com/kr/pretty v0.1.0 // indirect - github.com/lib/pq v1.8.0 - github.com/mattn/go-sqlite3 v1.14.4 + github.com/kr/text v0.2.0 // indirect + github.com/lib/pq v1.10.0 + github.com/mattn/go-sqlite3 v1.14.6 + github.com/pierrec/lz4 v2.6.0+incompatible // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect - github.com/stretchr/testify v1.4.0 + github.com/stretchr/testify v1.7.0 github.com/urfave/cli/v2 v2.3.0 - gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect + gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) diff --git a/go.sum b/go.sum index 3fd6f97..da9f515 100644 --- a/go.sum +++ b/go.sum @@ -1,4 +1,3 @@ -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/ClickHouse/clickhouse-go v1.4.3 h1:iAFMa2UrQdR5bHJ2/yaSLffZkxpcOYQMCUuKeNXGdqc= github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= @@ -9,48 +8,55 @@ github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/frankban/quicktest v1.11.3 h1:8sXhOn0uLys67V8EsXLc6eszDs8VXWxL3iRvebPhedY= +github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs= -github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d h1:cVtBfNW5XTHiKQe7jDaDBSh/EVM4XLPutLAGboIXuM0= github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d/go.mod h1:P2viExyCEfeWGU259JnaQ34Inuec4R38JCyBx2edgD0= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.8.0 h1:9xohqzkUwzR4Ga4ivdTcawVS89YSDVxXMa3xJX3cGzg= -github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E= +github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= -github.com/mattn/go-sqlite3 v1.14.4 h1:4rQjbDxdu9fSgI/r3KN72G3c2goxknAqHHgPWWs8UlI= -github.com/mattn/go-sqlite3 v1.14.4/go.mod h1:WVKg1VTActs4Qso6iwGbiFih2UIHo0ENGwNd0Lj+XmI= -github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= +github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pierrec/lz4 v2.6.0+incompatible h1:Ix9yFKn1nSPBLFl/yZknTp8TU5G4Ps0JDmguYK6iH1A= +github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3 h1:fvjTMHxHEw/mxHbtzPi3JCcKXQRAnQTBRo6YCJSVHKI= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/main.go b/main.go index 2ed5875..3899cbe 100644 --- a/main.go +++ b/main.go @@ -109,6 +109,7 @@ func NewApp() *cli.App { }, }, Action: action(func(db *dbmate.DB, c *cli.Context) error { + db.TargetVersion = c.Args().First() db.Verbose = c.Bool("verbose") return db.CreateAndMigrate() }), @@ -129,7 +130,7 @@ func NewApp() *cli.App { }, { Name: "migrate", - Usage: "Migrate to the latest version", + Usage: "Migrate to the specified or latest version", Flags: []cli.Flag{ &cli.BoolFlag{ Name: "verbose", @@ -139,6 +140,7 @@ func NewApp() *cli.App { }, }, Action: action(func(db *dbmate.DB, c *cli.Context) error { + db.TargetVersion = c.Args().First() db.Verbose = c.Bool("verbose") return db.Migrate() }), @@ -154,8 +156,16 @@ func NewApp() *cli.App { EnvVars: []string{"DBMATE_VERBOSE"}, Usage: "print the result of each statement execution", }, + &cli.IntFlag{ + Name: "limit", + Aliases: []string{"l"}, + Usage: "Limits the amount of rollbacks (defaults to 1 if no target version is specified)", + Value: -1, + }, }, Action: action(func(db *dbmate.DB, c *cli.Context) error { + db.TargetVersion = c.Args().First() + db.Limit = c.Int("limit") db.Verbose = c.Bool("verbose") return db.Rollback() }), diff --git a/pkg/dbmate/db.go b/pkg/dbmate/db.go index 3350c02..7059043 100644 --- a/pkg/dbmate/db.go +++ b/pkg/dbmate/db.go @@ -4,7 +4,7 @@ import ( "database/sql" "errors" "fmt" - "io/ioutil" + "io" "net/url" "os" "path/filepath" @@ -41,6 +41,9 @@ type DB struct { WaitBefore bool WaitInterval time.Duration WaitTimeout time.Duration + Limit int + TargetVersion string + Log io.Writer } // migrationFileRegexp pattern for valid migration files @@ -63,13 +66,16 @@ func New(databaseURL *url.URL) *DB { WaitBefore: false, WaitInterval: DefaultWaitInterval, WaitTimeout: DefaultWaitTimeout, + Limit: -1, + TargetVersion: "", + Log: os.Stdout, } } // GetDriver initializes the appropriate database driver func (db *DB) GetDriver() (Driver, error) { if db.DatabaseURL == nil || db.DatabaseURL.Scheme == "" { - return nil, errors.New("invalid url") + return nil, errors.New("invalid url, have you set your --url flag or DATABASE_URL environment variable?") } driverFunc := drivers[db.DatabaseURL.Scheme] @@ -80,6 +86,7 @@ func (db *DB) GetDriver() (Driver, error) { config := DriverConfig{ DatabaseURL: db.DatabaseURL, MigrationsTableName: db.MigrationsTableName, + Log: db.Log, } return driverFunc(config), nil @@ -104,22 +111,22 @@ func (db *DB) wait(drv Driver) error { return nil } - fmt.Print("Waiting for database") + fmt.Fprint(db.Log, "Waiting for database") for i := 0 * time.Second; i < db.WaitTimeout; i += db.WaitInterval { - fmt.Print(".") + fmt.Fprint(db.Log, ".") time.Sleep(db.WaitInterval) // attempt connection to database server err = drv.Ping() if err == nil { // connection successful - fmt.Print("\n") + fmt.Fprint(db.Log, "\n") return nil } } // if we find outselves here, we could not connect within the timeout - fmt.Print("\n") + fmt.Fprint(db.Log, "\n") return fmt.Errorf("unable to connect to database: %s", err) } @@ -214,7 +221,7 @@ func (db *DB) dumpSchema(drv Driver) error { return err } - fmt.Printf("Writing: %s\n", db.SchemaFile) + fmt.Fprintf(db.Log, "Writing: %s\n", db.SchemaFile) // ensure schema directory exists if err = ensureDir(filepath.Dir(db.SchemaFile)); err != nil { @@ -222,7 +229,7 @@ func (db *DB) dumpSchema(drv Driver) error { } // write schema to file - return ioutil.WriteFile(db.SchemaFile, schema, 0644) + return os.WriteFile(db.SchemaFile, schema, 0644) } // ensureDir creates a directory if it does not already exist @@ -252,7 +259,7 @@ func (db *DB) NewMigration(name string) error { // check file does not already exist path := filepath.Join(db.MigrationsDir, name) - fmt.Printf("Creating migration: %s\n", path) + fmt.Fprintf(db.Log, "Creating migration: %s\n", path) if _, err := os.Stat(path); !os.IsNotExist(err) { return fmt.Errorf("file already exists") @@ -340,12 +347,12 @@ func (db *DB) migrate(drv Driver) error { for _, filename := range files { ver := migrationVersion(filename) - if ok := applied[ver]; ok { + if ok := applied[ver]; ok && ver != db.TargetVersion { // migration already applied continue } - fmt.Printf("Applying: %s\n", filename) + fmt.Fprintf(db.Log, "Applying: %s\n", filename) up, _, err := parseMigration(filepath.Join(db.MigrationsDir, filename)) if err != nil { @@ -358,7 +365,7 @@ func (db *DB) migrate(drv Driver) error { if err != nil { return err } else if db.Verbose { - printVerbose(result) + db.printVerbose(result) } // record migration @@ -376,6 +383,11 @@ func (db *DB) migrate(drv Driver) error { if err != nil { return err } + + if ver == db.TargetVersion { + fmt.Fprintf(db.Log, "Reached target version %s\n", ver) + break + } } // automatically update schema file, silence errors @@ -386,19 +398,19 @@ func (db *DB) migrate(drv Driver) error { return nil } -func printVerbose(result sql.Result) { +func (db *DB) printVerbose(result sql.Result) { lastInsertID, err := result.LastInsertId() if err == nil { - fmt.Printf("Last insert ID: %d\n", lastInsertID) + fmt.Fprintf(db.Log, "Last insert ID: %d\n", lastInsertID) } rowsAffected, err := result.RowsAffected() if err == nil { - fmt.Printf("Rows affected: %d\n", rowsAffected) + fmt.Fprintf(db.Log, "Rows affected: %d\n", rowsAffected) } } func findMigrationFiles(dir string, re *regexp.Regexp) ([]string, error) { - files, err := ioutil.ReadDir(dir) + files, err := os.ReadDir(dir) if err != nil { return nil, fmt.Errorf("could not find migrations directory `%s`", dir) } @@ -466,55 +478,83 @@ func (db *DB) Rollback() error { } defer dbutil.MustClose(sqlDB) - applied, err := drv.SelectMigrations(sqlDB, 1) + limit := db.Limit + // default limit is -1, if we don't specify a version it should only rollback one version, not all + if limit <= 0 && db.TargetVersion == "" { + limit = 1 + } + + applied, err := drv.SelectMigrations(sqlDB, limit) if err != nil { return err } - // grab most recent applied migration (applied has len=1) - latest := "" - for ver := range applied { - latest = ver - } - if latest == "" { - return fmt.Errorf("can't rollback: no migrations have been applied") + if len(applied) == 0 { + return fmt.Errorf("can't rollback, no migrations found") } - filename, err := findMigrationFile(db.MigrationsDir, latest) - if err != nil { - return err + var versions []string + for v := range applied { + versions = append(versions, v) } - fmt.Printf("Rolling back: %s\n", filename) + // new → old + sort.Sort(sort.Reverse(sort.StringSlice(versions))) - _, down, err := parseMigration(filepath.Join(db.MigrationsDir, filename)) - if err != nil { - return err + if db.TargetVersion != "" { + cache := map[string]bool{} + found := false + + // latest version comes first, so take every version until the version matches + for _, ver := range versions { + if ver == db.TargetVersion { + found = true + break + } + cache[ver] = true + } + if !found { + return fmt.Errorf("target version not found") + } + applied = cache } - execMigration := func(tx dbutil.Transaction) error { - // rollback migration - result, err := tx.Exec(down.Contents) + for version := range applied { + filename, err := findMigrationFile(db.MigrationsDir, version) if err != nil { return err - } else if db.Verbose { - printVerbose(result) } - // remove migration record - return drv.DeleteMigration(tx, latest) - } + fmt.Fprintf(db.Log, "Rolling back: %s\n", filename) + _, down, err := parseMigration(filepath.Join(db.MigrationsDir, filename)) + if err != nil { + return err + } - if down.Options.Transaction() { - // begin transaction - err = doTransaction(sqlDB, execMigration) - } else { - // run outside of transaction - err = execMigration(sqlDB) - } + execMigration := func(tx dbutil.Transaction) error { + // rollback migration + result, err := tx.Exec(down.Contents) + if err != nil { + return err + } else if db.Verbose { + db.printVerbose(result) + } - if err != nil { - return err + // remove migration record + return drv.DeleteMigration(tx, version) + } + + if down.Options.Transaction() { + // begin transaction + err = doTransaction(sqlDB, execMigration) + } else { + // run outside of transaction + err = execMigration(sqlDB) + } + + if err != nil { + return err + } } // automatically update schema file, silence errors @@ -548,15 +588,15 @@ func (db *DB) Status(quiet bool) (int, error) { line = fmt.Sprintf("[ ] %s", res.Filename) } if !quiet { - fmt.Println(line) + fmt.Fprintln(db.Log, line) } } totalPending := len(results) - totalApplied if !quiet { - fmt.Println() - fmt.Printf("Applied: %d\n", totalApplied) - fmt.Printf("Pending: %d\n", totalPending) + fmt.Fprintln(db.Log) + fmt.Fprintf(db.Log, "Applied: %d\n", totalApplied) + fmt.Fprintf(db.Log, "Pending: %d\n", totalPending) } return totalPending, nil @@ -579,7 +619,7 @@ func (db *DB) CheckMigrationsStatus(drv Driver) ([]StatusResult, error) { } defer dbutil.MustClose(sqlDB) - applied, err := drv.SelectMigrations(sqlDB, -1) + applied, err := drv.SelectMigrations(sqlDB, db.Limit) if err != nil { return nil, err } diff --git a/pkg/dbmate/db_test.go b/pkg/dbmate/db_test.go index 0c07457..8582a63 100644 --- a/pkg/dbmate/db_test.go +++ b/pkg/dbmate/db_test.go @@ -1,7 +1,6 @@ package dbmate_test import ( - "io/ioutil" "net/url" "os" "path/filepath" @@ -48,6 +47,8 @@ func TestNew(t *testing.T) { require.False(t, db.WaitBefore) require.Equal(t, time.Second, db.WaitInterval) require.Equal(t, 60*time.Second, db.WaitTimeout) + require.Equal(t, -1, db.Limit) + require.Equal(t, "", db.TargetVersion) } func TestGetDriver(t *testing.T) { @@ -55,14 +56,14 @@ func TestGetDriver(t *testing.T) { db := dbmate.New(nil) drv, err := db.GetDriver() require.Nil(t, drv) - require.EqualError(t, err, "invalid url") + require.EqualError(t, err, "invalid url, have you set your --url flag or DATABASE_URL environment variable?") }) t.Run("missing schema", func(t *testing.T) { db := dbmate.New(dbutil.MustParseURL("//hi")) drv, err := db.GetDriver() require.Nil(t, drv) - require.EqualError(t, err, "invalid url") + require.EqualError(t, err, "invalid url, have you set your --url flag or DATABASE_URL environment variable?") }) t.Run("invalid driver", func(t *testing.T) { @@ -102,7 +103,7 @@ func TestDumpSchema(t *testing.T) { db := newTestDB(t, u) // create custom schema file directory - dir, err := ioutil.TempDir("", "dbmate") + dir, err := os.MkdirTemp("", "dbmate") require.NoError(t, err) defer func() { err := os.RemoveAll(dir) @@ -129,7 +130,7 @@ func TestDumpSchema(t *testing.T) { require.NoError(t, err) // verify schema - schema, err := ioutil.ReadFile(db.SchemaFile) + schema, err := os.ReadFile(db.SchemaFile) require.NoError(t, err) require.Contains(t, string(schema), "-- PostgreSQL database dump") } @@ -140,7 +141,7 @@ func TestAutoDumpSchema(t *testing.T) { db.AutoDumpSchema = true // create custom schema file directory - dir, err := ioutil.TempDir("", "dbmate") + dir, err := os.MkdirTemp("", "dbmate") require.NoError(t, err) defer func() { err := os.RemoveAll(dir) @@ -163,7 +164,7 @@ func TestAutoDumpSchema(t *testing.T) { require.NoError(t, err) // verify schema - schema, err := ioutil.ReadFile(db.SchemaFile) + schema, err := os.ReadFile(db.SchemaFile) require.NoError(t, err) require.Contains(t, string(schema), "-- PostgreSQL database dump") @@ -176,7 +177,7 @@ func TestAutoDumpSchema(t *testing.T) { require.NoError(t, err) // schema should be recreated - schema, err = ioutil.ReadFile(db.SchemaFile) + schema, err = os.ReadFile(db.SchemaFile) require.NoError(t, err) require.Contains(t, string(schema), "-- PostgreSQL database dump") } @@ -243,9 +244,11 @@ func TestWaitBeforeVerbose(t *testing.T) { `Applying: 20151129054053_test_migration.sql Rows affected: 1 Applying: 20200227231541_test_posts.sql +Rows affected: 0 +Applying: 20220607110405_test_category.sql Rows affected: 0`) require.Contains(t, output, - `Rolling back: 20200227231541_test_posts.sql + `Rolling back: 20220607110405_test_category.sql Rows affected: 0`) } @@ -292,6 +295,37 @@ func TestMigrate(t *testing.T) { } } +func TestMigrateToTarget(t *testing.T) { + for _, u := range testURLs() { + t.Run(u.Scheme, func(t *testing.T) { + db := newTestDB(t, u) + db.TargetVersion = "20151129054053" + drv, err := db.GetDriver() + require.NoError(t, err) + + // drop and recreate database + err = db.Drop() + require.NoError(t, err) + err = db.Create() + require.NoError(t, err) + + // migrate + err = db.Migrate() + require.NoError(t, err) + + // verify results + sqlDB, err := drv.Open() + require.NoError(t, err) + defer dbutil.MustClose(sqlDB) + + count := 0 + err = sqlDB.QueryRow(`select count(*) from schema_migrations`).Scan(&count) + require.NoError(t, err) + require.Equal(t, 1, count) + }) + } +} + func TestUp(t *testing.T) { for _, u := range testURLs() { t.Run(u.Scheme, func(t *testing.T) { @@ -351,13 +385,59 @@ func TestRollback(t *testing.T) { require.NoError(t, err) require.Equal(t, 1, count) - err = sqlDB.QueryRow("select count(*) from posts").Scan(&count) + err = sqlDB.QueryRow("select count(*) from categories").Scan(&count) require.Nil(t, err) // rollback err = db.Rollback() require.NoError(t, err) + // verify rollback + err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count) + require.NoError(t, err) + require.Equal(t, 2, count) + + err = sqlDB.QueryRow("select count(*) from categories").Scan(&count) + require.NotNil(t, err) + require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error()) + }) + } +} + +func TestRollbackToTarget(t *testing.T) { + for _, u := range testURLs() { + t.Run(u.Scheme, func(t *testing.T) { + db := newTestDB(t, u) + drv, err := db.GetDriver() + require.NoError(t, err) + + // drop, recreate, and migrate database + err = db.Drop() + require.NoError(t, err) + err = db.Create() + require.NoError(t, err) + err = db.Migrate() + require.NoError(t, err) + + // verify migration + sqlDB, err := drv.Open() + require.NoError(t, err) + defer dbutil.MustClose(sqlDB) + + count := 0 + err = sqlDB.QueryRow(`select count(*) from schema_migrations + where version = '20151129054053'`).Scan(&count) + require.NoError(t, err) + require.Equal(t, 1, count) + + err = sqlDB.QueryRow("select count(*) from categories").Scan(&count) + require.Nil(t, err) + + // rollback + db.TargetVersion = "20151129054053" + err = db.Rollback() + require.NoError(t, err) + // verify rollback err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count) require.NoError(t, err) @@ -366,6 +446,60 @@ func TestRollback(t *testing.T) { err = sqlDB.QueryRow("select count(*) from posts").Scan(&count) require.NotNil(t, err) require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error()) + + err = sqlDB.QueryRow("select count(*) from categories").Scan(&count) + require.NotNil(t, err) + require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error()) + }) + } +} + +func TestRollbackToLimit(t *testing.T) { + for _, u := range testURLs() { + t.Run(u.Scheme, func(t *testing.T) { + db := newTestDB(t, u) + drv, err := db.GetDriver() + require.NoError(t, err) + + // drop, recreate, and migrate database + err = db.Drop() + require.NoError(t, err) + err = db.Create() + require.NoError(t, err) + err = db.Migrate() + require.NoError(t, err) + + // verify migration + sqlDB, err := drv.Open() + require.NoError(t, err) + defer dbutil.MustClose(sqlDB) + + count := 0 + err = sqlDB.QueryRow(`select count(*) from schema_migrations + where version = '20151129054053'`).Scan(&count) + require.NoError(t, err) + require.Equal(t, 1, count) + + err = sqlDB.QueryRow("select count(*) from categories").Scan(&count) + require.Nil(t, err) + + // rollback + db.Limit = 2 + err = db.Rollback() + require.NoError(t, err) + + // verify rollback + err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count) + require.NoError(t, err) + require.Equal(t, 1, count) + + err = sqlDB.QueryRow("select count(*) from posts").Scan(&count) + require.NotNil(t, err) + require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error()) + + err = sqlDB.QueryRow("select count(*) from categories").Scan(&count) + require.NotNil(t, err) + require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error()) }) } } @@ -391,7 +525,7 @@ func TestStatus(t *testing.T) { // two pending results, err := db.CheckMigrationsStatus(drv) require.NoError(t, err) - require.Len(t, results, 2) + require.Len(t, results, 3) require.False(t, results[0].Applied) require.False(t, results[1].Applied) @@ -399,12 +533,13 @@ func TestStatus(t *testing.T) { err = db.Migrate() require.NoError(t, err) - // two applied + // three applied results, err = db.CheckMigrationsStatus(drv) require.NoError(t, err) - require.Len(t, results, 2) + require.Len(t, results, 3) require.True(t, results[0].Applied) require.True(t, results[1].Applied) + require.True(t, results[2].Applied) // rollback last migration err = db.Rollback() @@ -413,9 +548,10 @@ func TestStatus(t *testing.T) { // one applied, one pending results, err = db.CheckMigrationsStatus(drv) require.NoError(t, err) - require.Len(t, results, 2) + require.Len(t, results, 3) require.True(t, results[0].Applied) - require.False(t, results[1].Applied) + require.True(t, results[1].Applied) + require.False(t, results[2].Applied) }) } } diff --git a/pkg/dbmate/driver.go b/pkg/dbmate/driver.go index 09d2503..e79d60d 100644 --- a/pkg/dbmate/driver.go +++ b/pkg/dbmate/driver.go @@ -2,6 +2,7 @@ package dbmate import ( "database/sql" + "io" "net/url" "github.com/amacneil/dbmate/pkg/dbutil" @@ -25,6 +26,7 @@ type Driver interface { type DriverConfig struct { DatabaseURL *url.URL MigrationsTableName string + Log io.Writer } // DriverFunc represents a driver constructor diff --git a/pkg/dbmate/migration.go b/pkg/dbmate/migration.go index 2c0f7b6..bb00018 100644 --- a/pkg/dbmate/migration.go +++ b/pkg/dbmate/migration.go @@ -2,7 +2,7 @@ package dbmate import ( "fmt" - "io/ioutil" + "os" "regexp" "strings" ) @@ -33,7 +33,7 @@ func NewMigration() Migration { // parseMigration reads a migration file and returns (up Migration, down Migration, error) func parseMigration(path string) (Migration, Migration, error) { - data, err := ioutil.ReadFile(path) + data, err := os.ReadFile(path) if err != nil { return NewMigration(), NewMigration(), err } diff --git a/pkg/dbmate/version.go b/pkg/dbmate/version.go index a851cca..c0c2cdf 100644 --- a/pkg/dbmate/version.go +++ b/pkg/dbmate/version.go @@ -1,4 +1,4 @@ package dbmate // Version of dbmate -const Version = "1.11.0" +const Version = "1.12.1" diff --git a/pkg/driver/clickhouse/clickhouse.go b/pkg/driver/clickhouse/clickhouse.go index 0204a58..83571a8 100644 --- a/pkg/driver/clickhouse/clickhouse.go +++ b/pkg/driver/clickhouse/clickhouse.go @@ -4,6 +4,7 @@ import ( "bytes" "database/sql" "fmt" + "io" "net/url" "regexp" "sort" @@ -23,6 +24,7 @@ func init() { type Driver struct { migrationsTableName string databaseURL *url.URL + log io.Writer } // NewDriver initializes the driver @@ -30,6 +32,7 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver { return &Driver{ migrationsTableName: config.MigrationsTableName, databaseURL: config.DatabaseURL, + log: config.Log, } } @@ -108,7 +111,7 @@ func (drv *Driver) quoteIdentifier(str string) string { // CreateDatabase creates the specified database func (drv *Driver) CreateDatabase() error { name := drv.databaseName() - fmt.Printf("Creating: %s\n", name) + fmt.Fprintf(drv.log, "Creating: %s\n", name) db, err := drv.openClickHouseDB() if err != nil { @@ -124,7 +127,7 @@ func (drv *Driver) CreateDatabase() error { // DropDatabase drops the specified database (if it exists) func (drv *Driver) DropDatabase() error { name := drv.databaseName() - fmt.Printf("Dropping: %s\n", name) + fmt.Fprintf(drv.log, "Dropping: %s\n", name) db, err := drv.openClickHouseDB() if err != nil { diff --git a/pkg/driver/mysql/mysql.go b/pkg/driver/mysql/mysql.go index a19b74b..fd25747 100644 --- a/pkg/driver/mysql/mysql.go +++ b/pkg/driver/mysql/mysql.go @@ -4,6 +4,7 @@ import ( "bytes" "database/sql" "fmt" + "io" "net/url" "strings" @@ -21,6 +22,7 @@ func init() { type Driver struct { migrationsTableName string databaseURL *url.URL + log io.Writer } // NewDriver initializes the driver @@ -28,6 +30,7 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver { return &Driver{ migrationsTableName: config.MigrationsTableName, databaseURL: config.DatabaseURL, + log: config.Log, } } @@ -49,7 +52,7 @@ func connectionString(u *url.URL) string { // Get decoded user:pass userPassEncoded := u.User.String() - userPass, _ := url.QueryUnescape(userPassEncoded) + userPass, _ := url.PathUnescape(userPassEncoded) // Build DSN w/ user:pass percent-decoded normalizedString := "" @@ -92,7 +95,7 @@ func (drv *Driver) quoteIdentifier(str string) string { // CreateDatabase creates the specified database func (drv *Driver) CreateDatabase() error { name := dbutil.DatabaseName(drv.databaseURL) - fmt.Printf("Creating: %s\n", name) + fmt.Fprintf(drv.log, "Creating: %s\n", name) db, err := drv.openRootDB() if err != nil { @@ -109,7 +112,7 @@ func (drv *Driver) CreateDatabase() error { // DropDatabase drops the specified database (if it exists) func (drv *Driver) DropDatabase() error { name := dbutil.DatabaseName(drv.databaseURL) - fmt.Printf("Dropping: %s\n", name) + fmt.Fprintf(drv.log, "Dropping: %s\n", name) db, err := drv.openRootDB() if err != nil { diff --git a/pkg/driver/mysql/mysql_test.go b/pkg/driver/mysql/mysql_test.go index 0ff97d0..6bab279 100644 --- a/pkg/driver/mysql/mysql_test.go +++ b/pkg/driver/mysql/mysql_test.go @@ -78,6 +78,18 @@ func TestConnectionString(t *testing.T) { require.Equal(t, "duhfsd7s:123!@123!@@tcp(host:123)/foo?flag=on&multiStatements=true", s) }) + t.Run("url encoding", func(t *testing.T) { + u, err := url.Parse("mysql://bob%2Balice:secret%5E%5B%2A%28%29@host:123/foo") + require.NoError(t, err) + require.Equal(t, "bob+alice:secret%5E%5B%2A%28%29", u.User.String()) + require.Equal(t, "123", u.Port()) + + s := connectionString(u) + // ensure that '+' is correctly encoded by url.PathUnescape as '+' + // (not whitespace as url.QueryUnescape generates) + require.Equal(t, "bob+alice:secret^[*()@tcp(host:123)/foo?multiStatements=true", s) + }) + t.Run("socket", func(t *testing.T) { // test with no user/pass u, err := url.Parse("mysql:///foo?socket=/var/run/mysqld/mysqld.sock&flag=on") @@ -167,10 +179,8 @@ func TestMySQLDumpSchema(t *testing.T) { drv.databaseURL.Path = "/fakedb" schema, err = drv.DumpSchema(db) require.Nil(t, schema) - require.EqualError(t, err, "mysqldump: [Warning] Using a password "+ - "on the command line interface can be insecure.\n"+ - "mysqldump: Got error: 1049: "+ - "Unknown database 'fakedb' when selecting the database") + require.Error(t, err) + require.Contains(t, err.Error(), "Unknown database 'fakedb'") } func TestMySQLDatabaseExists(t *testing.T) { diff --git a/pkg/driver/postgres/postgres.go b/pkg/driver/postgres/postgres.go index 9624ea9..60b1333 100644 --- a/pkg/driver/postgres/postgres.go +++ b/pkg/driver/postgres/postgres.go @@ -4,7 +4,9 @@ import ( "bytes" "database/sql" "fmt" + "io" "net/url" + "runtime" "strings" "github.com/amacneil/dbmate/pkg/dbmate" @@ -22,6 +24,7 @@ func init() { type Driver struct { migrationsTableName string databaseURL *url.URL + log io.Writer } // NewDriver initializes the driver @@ -29,6 +32,7 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver { return &Driver{ migrationsTableName: config.MigrationsTableName, databaseURL: config.DatabaseURL, + log: config.Log, } } @@ -45,7 +49,14 @@ func connectionString(u *url.URL) string { // default hostname if hostname == "" { - hostname = "localhost" + switch runtime.GOOS { + case "linux": + query.Set("host", "/var/run/postgresql") + case "darwin", "freebsd", "dragonfly", "openbsd", "netbsd": + query.Set("host", "/tmp") + default: + hostname = "localhost" + } } // host param overrides url hostname @@ -112,7 +123,7 @@ func (drv *Driver) openPostgresDB() (*sql.DB, error) { // CreateDatabase creates the specified database func (drv *Driver) CreateDatabase() error { name := dbutil.DatabaseName(drv.databaseURL) - fmt.Printf("Creating: %s\n", name) + fmt.Fprintf(drv.log, "Creating: %s\n", name) db, err := drv.openPostgresDB() if err != nil { @@ -129,7 +140,7 @@ func (drv *Driver) CreateDatabase() error { // DropDatabase drops the specified database (if it exists) func (drv *Driver) DropDatabase() error { name := dbutil.DatabaseName(drv.databaseURL) - fmt.Printf("Dropping: %s\n", name) + fmt.Fprintf(drv.log, "Dropping: %s\n", name) db, err := drv.openPostgresDB() if err != nil { @@ -233,7 +244,7 @@ func (drv *Driver) CreateMigrationsTable(db *sql.DB) error { // in theory we could attempt to create the schema every time, but we avoid that // in case the user doesn't have permissions to create schemas - fmt.Printf("Creating schema: %s\n", schema) + fmt.Fprintf(drv.log, "Creating schema: %s\n", schema) _, err = db.Exec(fmt.Sprintf("create schema if not exists %s", schema)) if err != nil { return err diff --git a/pkg/driver/postgres/postgres_test.go b/pkg/driver/postgres/postgres_test.go index f3f8793..7705d6e 100644 --- a/pkg/driver/postgres/postgres_test.go +++ b/pkg/driver/postgres/postgres_test.go @@ -4,6 +4,7 @@ import ( "database/sql" "net/url" "os" + "runtime" "testing" "github.com/amacneil/dbmate/pkg/dbmate" @@ -50,13 +51,24 @@ func TestGetDriver(t *testing.T) { require.Equal(t, "schema_migrations", drv.migrationsTableName) } +func defaultConnString() string { + switch runtime.GOOS { + case "linux": + return "postgres://:5432/foo?host=%2Fvar%2Frun%2Fpostgresql" + case "darwin", "freebsd", "dragonfly", "openbsd", "netbsd": + return "postgres://:5432/foo?host=%2Ftmp" + default: + return "postgres://localhost:5432/foo" + } +} + func TestConnectionString(t *testing.T) { cases := []struct { input string expected string }{ // defaults - {"postgres:///foo", "postgres://localhost:5432/foo"}, + {"postgres:///foo", defaultConnString()}, // support custom url params {"postgres://bob:secret@myhost:1234/foo?bar=baz", "postgres://bob:secret@myhost:1234/foo?bar=baz"}, // support `host` and `port` via url params @@ -85,11 +97,11 @@ func TestConnectionArgsForDump(t *testing.T) { expected []string }{ // defaults - {"postgres:///foo", []string{"postgres://localhost:5432/foo"}}, + {"postgres:///foo", []string{defaultConnString()}}, // support single schema - {"postgres:///foo?search_path=foo", []string{"--schema", "foo", "postgres://localhost:5432/foo"}}, + {"postgres:///foo?search_path=foo", []string{"--schema", "foo", defaultConnString()}}, // support multiple schemas - {"postgres:///foo?search_path=foo,public", []string{"--schema", "foo", "--schema", "public", "postgres://localhost:5432/foo"}}, + {"postgres:///foo?search_path=foo,public", []string{"--schema", "foo", "--schema", "public", defaultConnString()}}, } for _, c := range cases { @@ -174,8 +186,8 @@ func TestPostgresDumpSchema(t *testing.T) { drv.databaseURL.Path = "/fakedb" schema, err = drv.DumpSchema(db) require.Nil(t, schema) - require.EqualError(t, err, "pg_dump: [archiver (db)] connection to database "+ - "\"fakedb\" failed: FATAL: database \"fakedb\" does not exist") + require.Error(t, err) + require.Contains(t, err.Error(), "database \"fakedb\" does not exist") }) t.Run("custom migrations table with schema", func(t *testing.T) { diff --git a/pkg/driver/sqlite/sqlite.go b/pkg/driver/sqlite/sqlite.go index 99ffe3d..be2c5f1 100644 --- a/pkg/driver/sqlite/sqlite.go +++ b/pkg/driver/sqlite/sqlite.go @@ -1,3 +1,4 @@ +//go:build cgo // +build cgo package sqlite @@ -6,6 +7,7 @@ import ( "bytes" "database/sql" "fmt" + "io" "net/url" "os" "regexp" @@ -27,6 +29,7 @@ func init() { type Driver struct { migrationsTableName string databaseURL *url.URL + log io.Writer } // NewDriver initializes the driver @@ -34,6 +37,7 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver { return &Driver{ migrationsTableName: config.MigrationsTableName, databaseURL: config.DatabaseURL, + log: config.Log, } } @@ -56,7 +60,7 @@ func (drv *Driver) Open() (*sql.DB, error) { // CreateDatabase creates the specified database func (drv *Driver) CreateDatabase() error { - fmt.Printf("Creating: %s\n", ConnectionString(drv.databaseURL)) + fmt.Fprintf(drv.log, "Creating: %s\n", ConnectionString(drv.databaseURL)) db, err := drv.Open() if err != nil { @@ -70,7 +74,7 @@ func (drv *Driver) CreateDatabase() error { // DropDatabase drops the specified database (if it exists) func (drv *Driver) DropDatabase() error { path := ConnectionString(drv.databaseURL) - fmt.Printf("Dropping: %s\n", path) + fmt.Fprintf(drv.log, "Dropping: %s\n", path) exists, err := drv.DatabaseExists() if err != nil { diff --git a/pkg/driver/sqlite/sqlite_test.go b/pkg/driver/sqlite/sqlite_test.go index f4638f1..8473e61 100644 --- a/pkg/driver/sqlite/sqlite_test.go +++ b/pkg/driver/sqlite/sqlite_test.go @@ -1,3 +1,4 @@ +//go:build cgo // +build cgo package sqlite diff --git a/testdata/db/migrations/20220607110405_test_category.sql b/testdata/db/migrations/20220607110405_test_category.sql new file mode 100644 index 0000000..098ff16 --- /dev/null +++ b/testdata/db/migrations/20220607110405_test_category.sql @@ -0,0 +1,9 @@ +-- migrate:up +create table categories ( + id integer, + title varchar(50), + slug varchar(100) +); + +-- migrate:down +drop table categories;