Compare commits

..

No commits in common. "main" and "v1.11.0" have entirely different histories.

27 changed files with 397 additions and 746 deletions

View file

@ -1,6 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

63
.github/workflows/build.yml vendored Normal file
View file

@ -0,0 +1,63 @@
name: CI
on:
push:
branches: [ master ]
tags: 'v*'
pull_request:
branches: [ master ]
jobs:
build:
name: Build & Test
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Environment
run: |
set -x
docker version
docker-compose version
- name: Cache
uses: actions/cache@v2
with:
key: cache
path: .cache
- name: Build docker image
run: |
set -x
docker-compose build
docker-compose run --rm --no-deps dbmate --version
- name: Build binaries
run: |
set -x
docker-compose run --rm --no-deps dev make build-all
dist/dbmate-linux-amd64 --version
- name: Lint
run: docker-compose run --rm --no-deps dev make lint
- name: Start test dependencies
run: |
set -x
docker-compose pull --quiet
docker-compose up --detach
- name: Run tests
run: |
set -x
docker-compose run --rm dev make wait
docker-compose run --rm dev make test
- name: Release
uses: softprops/action-gh-release@v1
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
with:
files: dist/*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,151 +0,0 @@
name: CI
on:
push:
branches: [main]
tags: "*"
pull_request:
jobs:
build:
strategy:
fail-fast: false
matrix:
include:
- os: linux
image: ubuntu-latest
arch: amd64
env: {}
- os: linux
image: ubuntu-latest
arch: arm64
setup: sudo apt-get update && sudo apt-get install -qq gcc-aarch64-linux-gnu
env:
CC: aarch64-linux-gnu-gcc
CXX: aarch64-linux-gnu-g++
- os: macos
image: macos-latest
arch: amd64
env: {}
- os: macos
image: macos-latest
arch: arm64
env: {}
- os: windows
image: windows-latest
arch: amd64
env: {}
name: Build (${{ matrix.os }}/${{ matrix.arch }})
runs-on: ${{ matrix.image }}
env: ${{ matrix.env }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2
with:
go-version: "1.17"
- name: Setup environment
run: ${{ matrix.setup }}
- run: go mod download
- run: make build ls
env:
GOARCH: ${{ matrix.arch }}
OUTPUT: dbmate-${{ matrix.os }}-${{ matrix.arch }}
- run: dist/dbmate-${{ matrix.os }}-${{ matrix.arch }} --help
if: ${{ matrix.arch == 'amd64' }}
- name: Publish binaries
uses: softprops/action-gh-release@v1
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
with:
files: dist/dbmate-*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
docker:
name: Docker Test (linux/amd64)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Configure QEMU
uses: docker/setup-qemu-action@v1
- name: Configure Buildx
uses: docker/setup-buildx-action@v1
- name: Check Docker environment
run: |
set -x
docker version
docker buildx version
docker-compose version
- name: Build Docker image
run: |
set -x
docker-compose build
docker-compose run --rm --no-deps dbmate --version
- name: Run make build
run: docker-compose run --rm --no-deps dev make build ls
- name: Run make lint
run: docker-compose run --rm --no-deps dev make lint
- name: Start test dependencies
run: |
set -x
docker-compose pull --quiet
docker-compose up --detach
docker-compose run --rm dev make wait
- name: Run make test
run: docker-compose run --rm dev make test
- name: Login to Docker Hub
uses: docker/login-action@v1
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate Docker image tags
id: meta
uses: docker/metadata-action@v3
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
with:
images: |
${{ github.repository }}
ghcr.io/${{ github.repository }}
tags: |
type=ref,event=branch
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
- name: Publish Docker image
uses: docker/build-push-action@v2
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
with:
context: .
target: release
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

62
.github/workflows/codeql-analysis.yml vendored Normal file
View file

@ -0,0 +1,62 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
name: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '0 0 * * 4'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'go' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more...
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View file

@ -5,12 +5,12 @@ linters:
- depguard - depguard
- errcheck - errcheck
- goimports - goimports
- golint
- gosimple - gosimple
- govet - govet
- ineffassign - ineffassign
- misspell - misspell
- nakedret - nakedret
- revive
- rowserrcheck - rowserrcheck
- staticcheck - staticcheck
- structcheck - structcheck

View file

@ -1,4 +0,0 @@
// -*- jsonc -*-
{
"recommendations": ["esbenp.prettier-vscode"]
}

10
.vscode/settings.json vendored
View file

@ -1,10 +0,0 @@
// -*- jsonc -*-
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"files.eol": "\n",
"files.insertFinalNewline": true,
"files.trimFinalNewlines": true,
"files.trimTrailingWhitespace": true,
"go.formatTool": "goimports"
}

View file

@ -1,25 +1,36 @@
# development image # development image
FROM golang:1.17 as dev FROM techknowlogick/xgo:go-1.15.x as dev
WORKDIR /src WORKDIR /src
ENV GOCACHE /src/.cache/go-build
# enable cgo to build sqlite
ENV CGO_ENABLED 1
# install database clients # install database clients
RUN apt-get update \ RUN apt-get update \
&& apt-get install -qq --no-install-recommends \ && apt-get install -qq --no-install-recommends \
curl \ curl \
file \ mysql-client \
mariadb-client \
postgresql-client \ postgresql-client \
sqlite3 \ sqlite3 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# golangci-lint # golangci-lint
RUN curl -fsSL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh \ RUN curl -fsSL -o /tmp/lint-install.sh https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh \
| sh -s -- -b /usr/local/bin v1.43.0 && chmod +x /tmp/lint-install.sh \
&& /tmp/lint-install.sh -b /usr/local/bin v1.32.2 \
&& rm -f /tmp/lint-install.sh
# download modules # download modules
COPY go.* /src/ COPY go.* ./
RUN go mod download RUN go mod download
COPY . /src/
ENTRYPOINT []
CMD ["/bin/bash"]
# build stage
FROM dev as build
COPY . ./
RUN make build RUN make build
# release stage # release stage
@ -27,7 +38,6 @@ FROM alpine as release
RUN apk add --no-cache \ RUN apk add --no-cache \
mariadb-client \ mariadb-client \
postgresql-client \ postgresql-client \
sqlite \ sqlite
tzdata COPY --from=build /src/dist/dbmate-linux-amd64 /usr/local/bin/dbmate
COPY --from=dev /src/dist/dbmate /usr/local/bin/dbmate ENTRYPOINT ["dbmate"]
ENTRYPOINT ["/usr/local/bin/dbmate"]

View file

@ -1,59 +1,58 @@
# enable cgo to build sqlite # no static linking for macos
export CGO_ENABLED = 1 LDFLAGS := -ldflags '-s'
# statically link binaries (to support alpine + scratch containers)
# strip binaries STATICLDFLAGS := -ldflags '-s -extldflags "-static"'
FLAGS := -tags sqlite_omit_load_extension,sqlite_json -ldflags '-s' # avoid building code that is incompatible with static linking
TAGS := -tags netgo,osusergo,sqlite_omit_load_extension,sqlite_json
GOOS := $(shell go env GOOS)
ifeq ($(GOOS),linux)
# statically link binaries to support alpine linux
FLAGS := -tags netgo,osusergo,sqlite_omit_load_extension,sqlite_json -ldflags '-s -extldflags "-static"'
endif
ifeq ($(GOOS),darwin)
export SDKROOT ?= $(shell xcrun --sdk macosx --show-sdk-path)
endif
OUTPUT ?= dbmate
.PHONY: all .PHONY: all
all: fix build wait test all: build test lint
.PHONY: clean
clean:
rm -rf dist
.PHONY: build
build: clean
go build -o dist/$(OUTPUT) $(FLAGS) .
.PHONY: ls
ls:
ls -lh dist/$(OUTPUT)
file dist/$(OUTPUT)
.PHONY: test .PHONY: test
test: test:
go test -p 1 $(FLAGS) ./... go test -p 1 $(TAGS) $(STATICLDFLAGS) ./...
.PHONY: lint
lint:
golangci-lint run
.PHONY: fix .PHONY: fix
fix: fix:
golangci-lint run --fix golangci-lint run --fix
.PHONY: lint
lint:
golangci-lint run
.PHONY: wait .PHONY: wait
wait: wait:
dist/dbmate -e CLICKHOUSE_TEST_URL wait dist/dbmate-linux-amd64 -e CLICKHOUSE_TEST_URL wait
dist/dbmate -e MYSQL_TEST_URL wait dist/dbmate-linux-amd64 -e MYSQL_TEST_URL wait
dist/dbmate -e POSTGRES_TEST_URL wait dist/dbmate-linux-amd64 -e POSTGRES_TEST_URL wait
.PHONY: docker-all .PHONY: clean
docker-all: clean:
docker-compose pull rm -rf dist/*
.PHONY: build
build: clean build-linux-amd64
ls -lh dist
.PHONY: build-linux-amd64
build-linux-amd64:
GOOS=linux GOARCH=amd64 \
go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-linux-amd64 .
.PHONY: build-all
build-all: clean build-linux-amd64
GOOS=linux GOARCH=arm64 CC=aarch64-linux-gnu-gcc-5 CXX=aarch64-linux-gnu-g++-5 \
go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-linux-arm64 .
GOOS=darwin GOARCH=amd64 CC=o64-clang CXX=o64-clang++ \
go build $(TAGS) $(LDFLAGS) -o dist/dbmate-macos-amd64 .
GOOS=windows GOARCH=amd64 CC=x86_64-w64-mingw32-gcc-posix CXX=x86_64-w64-mingw32-g++-posix \
go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-windows-amd64.exe .
ls -lh dist
.PHONY: docker-make
docker-make:
docker-compose build docker-compose build
docker-compose run --rm dev make all docker-compose run --rm dev make
.PHONY: docker-sh .PHONY: docker-sh
docker-sh: docker-sh:

224
README.md
View file

@ -1,6 +1,6 @@
# Dbmate # Dbmate
[![GitHub Build](https://img.shields.io/github/workflow/status/amacneil/dbmate/CI/main)](https://github.com/amacneil/dbmate/actions?query=branch%3Amain+event%3Apush+workflow%3ACI) [![GitHub Build](https://img.shields.io/github/workflow/status/amacneil/dbmate/CI/master)](https://github.com/amacneil/dbmate/actions?query=branch%3Amaster+event%3Apush+workflow%3ACI)
[![Go Report Card](https://goreportcard.com/badge/github.com/amacneil/dbmate)](https://goreportcard.com/report/github.com/amacneil/dbmate) [![Go Report Card](https://goreportcard.com/badge/github.com/amacneil/dbmate)](https://goreportcard.com/report/github.com/amacneil/dbmate)
[![GitHub Release](https://img.shields.io/github/release/amacneil/dbmate.svg)](https://github.com/amacneil/dbmate/releases) [![GitHub Release](https://img.shields.io/github/release/amacneil/dbmate.svg)](https://github.com/amacneil/dbmate/releases)
@ -10,40 +10,17 @@ It is a standalone command line tool, which can be used with Go, Node.js, Python
For a comparison between dbmate and other popular database schema migration tools, please see the [Alternatives](#alternatives) table. For a comparison between dbmate and other popular database schema migration tools, please see the [Alternatives](#alternatives) table.
## Table of Contents
- [Features](#features)
- [Installation](#installation)
- [Commands](#commands)
- [Command Line Options](#command-line-options)
- [Usage](#usage)
- [Connecting to the Database](#connecting-to-the-database)
- [PostgreSQL](#postgresql)
- [MySQL](#mysql)
- [SQLite](#sqlite)
- [ClickHouse](#clickhouse)
- [Creating Migrations](#creating-migrations)
- [Running Migrations](#running-migrations)
- [Rolling Back Migrations](#rolling-back-migrations)
- [Migration Options](#migration-options)
- [Waiting For The Database](#waiting-for-the-database)
- [Exporting Schema File](#exporting-schema-file)
- [Internals](#internals)
- [schema_migrations table](#schema_migrations-table)
- [Alternatives](#alternatives)
- [Contributing](#contributing)
## Features ## Features
- Supports MySQL, PostgreSQL, SQLite, and ClickHouse. * Supports MySQL, PostgreSQL, SQLite, and ClickHouse.
- Uses plain SQL for writing schema migrations. * Uses plain SQL for writing schema migrations.
- Migrations are timestamp-versioned, to avoid version number conflicts with multiple developers. * Migrations are timestamp-versioned, to avoid version number conflicts with multiple developers.
- Migrations are run atomically inside a transaction. * Migrations are run atomically inside a transaction.
- Supports creating and dropping databases (handy in development/test). * Supports creating and dropping databases (handy in development/test).
- Supports saving a `schema.sql` file to easily diff schema changes in git. * Supports saving a `schema.sql` file to easily diff schema changes in git.
- Database connection URL is definied using an environment variable (`DATABASE_URL` by default), or specified on the command line. * Database connection URL is definied using an environment variable (`DATABASE_URL` by default), or specified on the command line.
- Built-in support for reading environment variables from your `.env` file. * Built-in support for reading environment variables from your `.env` file.
- Easy to distribute, single self-contained binary. * Easy to distribute, single self-contained binary.
## Installation ## Installation
@ -66,18 +43,16 @@ $ sudo chmod +x /usr/local/bin/dbmate
**Docker** **Docker**
Docker images are published to both Docker Hub ([`amacneil/dbmate`](https://hub.docker.com/r/amacneil/dbmate)) and Github Container Registry ([`ghcr.io/amacneil/dbmate`](https://ghcr.io/amacneil/dbmate)). You can run dbmate using the official docker image (remember to set `--network=host` or see [this comment](https://github.com/amacneil/dbmate/issues/128#issuecomment-615924611) for more tips on using dbmate with docker networking):
Remember to set `--network=host` or see [this comment](https://github.com/amacneil/dbmate/issues/128#issuecomment-615924611) for more tips on using dbmate with docker networking):
```sh ```sh
$ docker run --rm -it --network=host ghcr.io/amacneil/dbmate:1 --help $ docker run --rm -it --network=host amacneil/dbmate --help
``` ```
If you wish to create or apply migrations, you will need to use Docker's [bind mount](https://docs.docker.com/storage/bind-mounts/) feature to make your local working directory (`pwd`) available inside the dbmate container: If you wish to create or apply migrations, you will need to use Docker's [bind mount](https://docs.docker.com/storage/bind-mounts/) feature to make your local working directory (`pwd`) available inside the dbmate container:
```sh ```sh
$ docker run --rm -it --network=host -v "$(pwd)/db:/db" ghcr.io/amacneil/dbmate:1 new create_users_table $ docker run --rm -it --network=host -v "$(pwd)/db:/db" amacneil/dbmate new create_users_table
``` ```
**Heroku** **Heroku**
@ -102,7 +77,7 @@ $ heroku run bin/dbmate up
## Commands ## Commands
```sh ```sh
dbmate --help # print usage help dbmate # print help
dbmate new # generate a new migration file dbmate new # generate a new migration file
dbmate up # create the database (if it does not already exist) and run any pending migrations dbmate up # create the database (if it does not already exist) and run any pending migrations
dbmate create # create the database dbmate create # create the database
@ -115,23 +90,8 @@ dbmate dump # write the database schema.sql file
dbmate wait # wait for the database server to become available dbmate wait # wait for the database server to become available
``` ```
### Command Line Options
The following options are available with all commands. You must use command line arguments in the order `dbmate [global options] command [command options]`. Most options can also be configured via environment variables (and loaded from your `.env` file, which is helpful to share configuration between team members).
- `--url, -u "protocol://host:port/dbname"` - specify the database url directly. _(env: `$DATABASE_URL`)_
- `--env, -e "DATABASE_URL"` - specify an environment variable to read the database connection URL from.
- `--migrations-dir, -d "./db/migrations"` - where to keep the migration files. _(env: `$DBMATE_MIGRATIONS_DIR`)_
- `--migrations-table "schema_migrations"` - database table to record migrations in. _(env: `$DBMATE_MIGRATIONS_TABLE`)_
- `--schema-file, -s "./db/schema.sql"` - a path to keep the schema.sql file. _(env: `$DBMATE_SCHEMA_FILE`)_
- `--no-dump-schema` - don't auto-update the schema.sql file on migrate/rollback _(env: `$DBMATE_NO_DUMP_SCHEMA`)_
- `--wait` - wait for the db to become available before executing the subsequent command _(env: `$DBMATE_WAIT`)_
- `--wait-timeout 60s` - timeout for --wait flag _(env: `$DBMATE_WAIT_TIMEOUT`)_
## Usage ## Usage
### Connecting to the Database
Dbmate locates your database using the `DATABASE_URL` environment variable by default. If you are writing a [twelve-factor app](http://12factor.net/), you should be storing all connection strings in environment variables. Dbmate locates your database using the `DATABASE_URL` environment variable by default. If you are writing a [twelve-factor app](http://12factor.net/), you should be storing all connection strings in environment variables.
To make this easy in development, dbmate looks for a `.env` file in the current directory, and treats any variables listed there as if they were specified in the current environment (existing environment variables take preference, however). To make this easy in development, dbmate looks for a `.env` file in the current directory, and treats any variables listed there as if they were specified in the current environment (existing environment variables take preference, however).
@ -149,37 +109,23 @@ DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_development?sslmode=disab
protocol://username:password@host:port/database_name?options protocol://username:password@host:port/database_name?options
``` ```
- `protocol` must be one of `mysql`, `postgres`, `postgresql`, `sqlite`, `sqlite3`, `clickhouse` * `protocol` must be one of `mysql`, `postgres`, `postgresql`, `sqlite`, `sqlite3`, `clickhouse`
- `host` can be either a hostname or IP address * `host` can be either a hostname or IP address
- `options` are driver-specific (refer to the underlying Go SQL drivers if you wish to use these) * `options` are driver-specific (refer to the underlying Go SQL drivers if you wish to use these)
Dbmate can also load the connection URL from a different environment variable. For example, before running your test suite, you may wish to drop and recreate the test database. One easy way to do this is to store your test database connection URL in the `TEST_DATABASE_URL` environment variable: **MySQL**
```sh ```sh
$ cat .env DATABASE_URL="mysql://username:password@127.0.0.1:3306/database_name"
DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_dev?sslmode=disable"
TEST_DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable"
``` ```
You can then specify this environment variable in your test script (Makefile or similar): A `socket` parameter can be specified to connect through a unix socket:
```sh ```sh
$ dbmate -e TEST_DATABASE_URL drop DATABASE_URL="mysql://username:password@/database_name?socket=/var/run/mysqld/mysqld.sock"
Dropping: myapp_test
$ dbmate -e TEST_DATABASE_URL --no-dump-schema up
Creating: myapp_test
Applying: 20151127184807_create_users_table.sql
``` ```
Alternatively, you can specify the url directly on the command line: **PostgreSQL**
```sh
$ dbmate -u "postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable" up
```
The only advantage of using `dbmate -e TEST_DATABASE_URL` over `dbmate -u $TEST_DATABASE_URL` is that the former takes advantage of dbmate's automatic `.env` file loading.
#### PostgreSQL
When connecting to Postgres, you may need to add the `sslmode=disable` option to your connection string, as dbmate by default requires a TLS connection (some other frameworks/languages allow unencrypted connections by default). When connecting to Postgres, you may need to add the `sslmode=disable` option to your connection string, as dbmate by default requires a TLS connection (some other frameworks/languages allow unencrypted connections by default).
@ -204,19 +150,7 @@ DATABASE_URL="postgres://username:password@127.0.0.1:5432/database_name?search_p
DATABASE_URL="postgres://username:password@127.0.0.1:5432/database_name?search_path=myschema,public" DATABASE_URL="postgres://username:password@127.0.0.1:5432/database_name?search_path=myschema,public"
``` ```
#### MySQL **SQLite**
```sh
DATABASE_URL="mysql://username:password@127.0.0.1:3306/database_name"
```
A `socket` parameter can be specified to connect through a unix socket:
```sh
DATABASE_URL="mysql://username:password@/database_name?socket=/var/run/mysqld/mysqld.sock"
```
#### SQLite
SQLite databases are stored on the filesystem, so you do not need to specify a host. By default, files are relative to the current directory. For example, the following will create a database at `./db/database.sqlite3`: SQLite databases are stored on the filesystem, so you do not need to specify a host. By default, files are relative to the current directory. For example, the following will create a database at `./db/database.sqlite3`:
@ -230,7 +164,7 @@ To specify an absolute path, add a forward slash to the path. The following will
DATABASE_URL="sqlite:/tmp/database.sqlite3" DATABASE_URL="sqlite:/tmp/database.sqlite3"
``` ```
#### ClickHouse **ClickHouse**
```sh ```sh
DATABASE_URL="clickhouse://username:password@127.0.0.1:9000/database_name" DATABASE_URL="clickhouse://username:password@127.0.0.1:9000/database_name"
@ -284,12 +218,6 @@ Writing: ./db/schema.sql
Pending migrations are always applied in numerical order. However, dbmate does not prevent migrations from being applied out of order if they are committed independently (for example: if a developer has been working on a branch for a long time, and commits a migration which has a lower version number than other already-applied migrations, dbmate will simply apply the pending migration). See [#159](https://github.com/amacneil/dbmate/issues/159) for a more detailed explanation. Pending migrations are always applied in numerical order. However, dbmate does not prevent migrations from being applied out of order if they are committed independently (for example: if a developer has been working on a branch for a long time, and commits a migration which has a lower version number than other already-applied migrations, dbmate will simply apply the pending migration). See [#159](https://github.com/amacneil/dbmate/issues/159) for a more detailed explanation.
You can also specify a migration to up-to.
```sh
$ dbmate up 20151127184807
```
### Rolling Back Migrations ### Rolling Back Migrations
By default, dbmate doesn't know how to roll back a migration. In development, it's often useful to be able to revert your database to a previous state. To accomplish this, implement the `migrate:down` section: By default, dbmate doesn't know how to roll back a migration. In development, it's often useful to be able to revert your database to a previous state. To accomplish this, implement the `migrate:down` section:
@ -314,21 +242,13 @@ Rolling back: 20151127184807_create_users_table.sql
Writing: ./db/schema.sql Writing: ./db/schema.sql
``` ```
You can also rollback to a specific migration.
```sh
$ dbmate rollback 20151127184807
# or, with a limit option
$ dbmate rollback -limit 2 # will rollback the last two migrations
```
### Migration Options ### Migration Options
dbmate supports options passed to a migration block in the form of `key:value` pairs. List of supported options: dbmate supports options passed to a migration block in the form of `key:value` pairs. List of supported options:
- `transaction` * `transaction`
**transaction** #### transaction
`transaction` is useful if you need to run some SQL which cannot be executed from within a transaction. For example, in Postgres, you would need to disable transactions for migrations that alter an enum type to add a value: `transaction` is useful if you need to run some SQL which cannot be executed from within a transaction. For example, in Postgres, you would need to disable transactions for migrations that alter an enum type to add a value:
@ -339,6 +259,23 @@ ALTER TYPE colors ADD VALUE 'orange' AFTER 'red';
`transaction` will default to `true` if your database supports it. `transaction` will default to `true` if your database supports it.
### Schema File
When you run the `up`, `migrate`, or `rollback` commands, dbmate will automatically create a `./db/schema.sql` file containing a complete representation of your database schema. Dbmate keeps this file up to date for you, so you should not manually edit it.
It is recommended to check this file into source control, so that you can easily review changes to the schema in commits or pull requests. It's also possible to use this file when you want to quickly load a database schema, without running each migration sequentially (for example in your test harness). However, if you do not wish to save this file, you could add it to `.gitignore`, or pass the `--no-dump-schema` command line option.
To dump the `schema.sql` file without performing any other actions, run `dbmate dump`. Unlike other dbmate actions, this command relies on the respective `pg_dump`, `mysqldump`, or `sqlite3` commands being available in your PATH. If these tools are not available, dbmate will silenty skip the schema dump step during `up`, `migrate`, or `rollback` actions. You can diagnose the issue by running `dbmate dump` and looking at the output:
```sh
$ dbmate dump
exec: "pg_dump": executable file not found in $PATH
```
On Ubuntu or Debian systems, you can fix this by installing `postgresql-client`, `mysql-client`, or `sqlite3` respectively. Ensure that the package version you install is greater than or equal to the version running on your database server.
> Note: The `schema.sql` file will contain a complete schema for your database, even if some tables or columns were created outside of dbmate migrations.
### Waiting For The Database ### Waiting For The Database
If you use a Docker development environment for your project, you may encounter issues with the database not being immediately ready when running migrations or unit tests. This can be due to the database server having only just started. If you use a Docker development environment for your project, you may encounter issues with the database not being immediately ready when running migrations or unit tests. This can be due to the database server having only just started.
@ -376,65 +313,68 @@ Error: unable to connect to database: dial tcp 127.0.0.1:5432: connect: connecti
Please note that the `wait` command does not verify whether your specified database exists, only that the server is available and ready (so it will return success if the database server is available, but your database has not yet been created). Please note that the `wait` command does not verify whether your specified database exists, only that the server is available and ready (so it will return success if the database server is available, but your database has not yet been created).
### Exporting Schema File ### Options
When you run the `up`, `migrate`, or `rollback` commands, dbmate will automatically create a `./db/schema.sql` file containing a complete representation of your database schema. Dbmate keeps this file up to date for you, so you should not manually edit it. The following command line options are available with all commands. You must use command line arguments in the order `dbmate [global options] command [command options]`. Most options can also be configured via environment variables (and loaded from your `.env` file, which is helpful to share configuration between team members).
It is recommended to check this file into source control, so that you can easily review changes to the schema in commits or pull requests. It's also possible to use this file when you want to quickly load a database schema, without running each migration sequentially (for example in your test harness). However, if you do not wish to save this file, you could add it to your `.gitignore`, or pass the `--no-dump-schema` command line option. * `--url, -u "protocol://host:port/dbname"` - specify the database url directly. _(env: `$DATABASE_URL`)_
* `--env, -e "DATABASE_URL"` - specify an environment variable to read the database connection URL from.
* `--migrations-dir, -d "./db/migrations"` - where to keep the migration files. _(env: `$DBMATE_MIGRATIONS_DIR`)_
* `--schema-file, -s "./db/schema.sql"` - a path to keep the schema.sql file. _(env: `$DBMATE_SCHEMA_FILE`)_
* `--no-dump-schema` - don't auto-update the schema.sql file on migrate/rollback _(env: `$DBMATE_NO_DUMP_SCHEMA`)_
* `--wait` - wait for the db to become available before executing the subsequent command _(env: `$DBMATE_WAIT`)_
* `--wait-timeout 60s` - timeout for --wait flag _(env: `$DBMATE_WAIT_TIMEOUT`)_
To dump the `schema.sql` file without performing any other actions, run `dbmate dump`. Unlike other dbmate actions, this command relies on the respective `pg_dump`, `mysqldump`, or `sqlite3` commands being available in your PATH. If these tools are not available, dbmate will silenty skip the schema dump step during `up`, `migrate`, or `rollback` actions. You can diagnose the issue by running `dbmate dump` and looking at the output: For example, before running your test suite, you may wish to drop and recreate the test database. One easy way to do this is to store your test database connection URL in the `TEST_DATABASE_URL` environment variable:
```sh ```sh
$ dbmate dump $ cat .env
exec: "pg_dump": executable file not found in $PATH TEST_DATABASE_URL="postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable"
``` ```
On Ubuntu or Debian systems, you can fix this by installing `postgresql-client`, `mysql-client`, or `sqlite3` respectively. Ensure that the package version you install is greater than or equal to the version running on your database server. You can then specify this environment variable in your test script (Makefile or similar):
> Note: The `schema.sql` file will contain a complete schema for your database, even if some tables or columns were created outside of dbmate migrations. ```sh
$ dbmate -e TEST_DATABASE_URL drop
## Internals Dropping: myapp_test
$ dbmate -e TEST_DATABASE_URL --no-dump-schema up
### schema_migrations table Creating: myapp_test
Applying: 20151127184807_create_users_table.sql
By default, dbmate stores a record of each applied migration in a `schema_migrations` table. This table will be created for you automatically if it does not already exist. The table schema is very simple:
```sql
CREATE TABLE IF NOT EXISTS schema_migrations (
version VARCHAR(255) PRIMARY KEY
)
``` ```
Dbmate records only the version number of applied migrations, so you can safely rename a migration file without affecting its applied status. Alternatively, you can specify the url directly on the command line:
You can customize the name of this table using the `--migrations-table` flag or `$DBMATE_MIGRATIONS_TABLE` environment variable. If you already have a table with this name (possibly from a previous migration tool), you should either manually update it to conform to this schema, or configure dbmate to use a different table name. ```sh
$ dbmate -u "postgres://postgres@127.0.0.1:5432/myapp_test?sslmode=disable" up
```
The only advantage of using `dbmate -e TEST_DATABASE_URL` over `dbmate -u $TEST_DATABASE_URL` is that the former takes advantage of dbmate's automatic `.env` file loading.
## Alternatives ## Alternatives
Why another database schema migration tool? Dbmate was inspired by many other tools, primarily [Active Record Migrations](http://guides.rubyonrails.org/active_record_migrations.html), with the goals of being trivial to configure, and language & framework independent. Here is a comparison between dbmate and other popular migration tools. Why another database schema migration tool? Dbmate was inspired by many other tools, primarily [Active Record Migrations](http://guides.rubyonrails.org/active_record_migrations.html), with the goals of being trivial to configure, and language & framework independent. Here is a comparison between dbmate and other popular migration tools.
| | [dbmate](https://github.com/amacneil/dbmate) | [goose](https://github.com/pressly/goose) | [sql-migrate](https://github.com/rubenv/sql-migrate) | [golang-migrate](https://github.com/golang-migrate/migrate) | [activerecord](http://guides.rubyonrails.org/active_record_migrations.html) | [sequelize](http://docs.sequelizejs.com/manual/tutorial/migrations.html) | | | [goose](https://bitbucket.org/liamstask/goose/) | [sql-migrate](https://github.com/rubenv/sql-migrate) | [golang-migrate/migrate](https://github.com/golang-migrate/migrate) | [activerecord](http://guides.rubyonrails.org/active_record_migrations.html) | [sequelize](http://docs.sequelizejs.com/manual/tutorial/migrations.html) | [dbmate](https://github.com/amacneil/dbmate) |
| ------------------------------------------------------------ | :------------------------------------------: | :---------------------------------------: | :--------------------------------------------------: | :---------------------------------------------------------: | :-------------------------------------------------------------------------: | :----------------------------------------------------------------------: | | --- |:---:|:---:|:---:|:---:|:---:|:---:|
| **Features** | | **Features** |
| Plain SQL migration files | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | | | |Plain SQL migration files|:white_check_mark:|:white_check_mark:|:white_check_mark:|||:white_check_mark:|
| Support for creating and dropping databases | :white_check_mark: | | | | :white_check_mark: | | |Support for creating and dropping databases||||:white_check_mark:||:white_check_mark:|
| Support for saving schema dump files | :white_check_mark: | | | | :white_check_mark: | | |Support for saving schema dump files||||:white_check_mark:||:white_check_mark:|
| Timestamp-versioned migration files | :white_check_mark: | :white_check_mark: | | :white_check_mark: | :white_check_mark: | :white_check_mark: | |Timestamp-versioned migration files|:white_check_mark:|||:white_check_mark:|:white_check_mark:|:white_check_mark:|
| Custom schema migrations table | :white_check_mark: | | :white_check_mark: | | | :white_check_mark: | |Ability to wait for database to become ready||||||:white_check_mark:|
| Ability to wait for database to become ready | :white_check_mark: | | | | | | |Database connection string loaded from environment variables||||||:white_check_mark:|
| Database connection string loaded from environment variables | :white_check_mark: | | | | | | |Automatically load .env file||||||:white_check_mark:|
| Automatically load .env file | :white_check_mark: | | | | | | |No separate configuration file||||:white_check_mark:|:white_check_mark:|:white_check_mark:|
| No separate configuration file | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | :white_check_mark: | |Language/framework independent|:eight_pointed_black_star:|:eight_pointed_black_star:|:eight_pointed_black_star:|||:white_check_mark:|
| Language/framework independent | :white_check_mark: | :eight_pointed_black_star: | :eight_pointed_black_star: | :white_check_mark: | | |
| **Drivers** | | **Drivers** |
|PostgreSQL|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:| |PostgreSQL|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|
|MySQL|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:| |MySQL|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|
|SQLite|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:| |SQLite|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|
| CliсkHouse | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | :white_check_mark: | |CliсkHouse|||:white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark:|
> :eight_pointed_black_star: In theory these tools could be used with other languages, but a Go development environment is required because binary builds are not provided. > :eight_pointed_black_star: In theory these tools could be used with other languages, but a Go development environment is required because binary builds are not provided.
_If you notice any inaccuracies in this table, please [propose a change](https://github.com/amacneil/dbmate/edit/main/README.md)._ *If you notice any inaccuracies in this table, please [propose a change](https://github.com/amacneil/dbmate/edit/master/README.md).*
## Contributing ## Contributing
@ -449,5 +389,5 @@ $ make docker-all
To start a development shell: To start a development shell:
```sh ```sh
$ make docker-sh $ make docker-bash
``` ```

View file

@ -1,4 +1,4 @@
version: "2.3" version: '2.3'
services: services:
dev: dev:
build: build:
@ -20,12 +20,10 @@ services:
build: build:
context: . context: .
target: release target: release
image: dbmate_release
mysql: mysql:
image: mysql/mysql-server:8.0 image: mysql:5.7
environment: environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: root MYSQL_ROOT_PASSWORD: root
postgres: postgres:

17
go.mod
View file

@ -1,22 +1,19 @@
module github.com/amacneil/dbmate module github.com/amacneil/dbmate
go 1.16 go 1.15
require ( require (
github.com/ClickHouse/clickhouse-go v1.4.3 github.com/ClickHouse/clickhouse-go v1.4.3
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/frankban/quicktest v1.11.3 // indirect github.com/go-sql-driver/mysql v1.5.0
github.com/go-sql-driver/mysql v1.6.0
github.com/joho/godotenv v1.3.0 github.com/joho/godotenv v1.3.0
github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d
github.com/kr/text v0.2.0 // indirect github.com/kr/pretty v0.1.0 // indirect
github.com/lib/pq v1.10.0 github.com/lib/pq v1.8.0
github.com/mattn/go-sqlite3 v1.14.6 github.com/mattn/go-sqlite3 v1.14.4
github.com/pierrec/lz4 v2.6.0+incompatible // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/stretchr/testify v1.7.0 github.com/stretchr/testify v1.4.0
github.com/urfave/cli/v2 v2.3.0 github.com/urfave/cli/v2 v2.3.0
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
) )

46
go.sum
View file

@ -1,3 +1,4 @@
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/ClickHouse/clickhouse-go v1.4.3 h1:iAFMa2UrQdR5bHJ2/yaSLffZkxpcOYQMCUuKeNXGdqc= github.com/ClickHouse/clickhouse-go v1.4.3 h1:iAFMa2UrQdR5bHJ2/yaSLffZkxpcOYQMCUuKeNXGdqc=
github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
@ -8,55 +9,48 @@ github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/frankban/quicktest v1.11.3 h1:8sXhOn0uLys67V8EsXLc6eszDs8VXWxL3iRvebPhedY=
github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d h1:cVtBfNW5XTHiKQe7jDaDBSh/EVM4XLPutLAGboIXuM0= github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d h1:cVtBfNW5XTHiKQe7jDaDBSh/EVM4XLPutLAGboIXuM0=
github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d/go.mod h1:P2viExyCEfeWGU259JnaQ34Inuec4R38JCyBx2edgD0= github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d/go.mod h1:P2viExyCEfeWGU259JnaQ34Inuec4R38JCyBx2edgD0=
github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E= github.com/lib/pq v1.8.0 h1:9xohqzkUwzR4Ga4ivdTcawVS89YSDVxXMa3xJX3cGzg=
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= github.com/mattn/go-sqlite3 v1.14.4 h1:4rQjbDxdu9fSgI/r3KN72G3c2goxknAqHHgPWWs8UlI=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.4/go.mod h1:WVKg1VTActs4Qso6iwGbiFih2UIHo0ENGwNd0Lj+XmI=
github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4 v2.6.0+incompatible h1:Ix9yFKn1nSPBLFl/yZknTp8TU5G4Ps0JDmguYK6iH1A=
github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M=
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3 h1:fvjTMHxHEw/mxHbtzPi3JCcKXQRAnQTBRo6YCJSVHKI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

12
main.go
View file

@ -109,7 +109,6 @@ func NewApp() *cli.App {
}, },
}, },
Action: action(func(db *dbmate.DB, c *cli.Context) error { Action: action(func(db *dbmate.DB, c *cli.Context) error {
db.TargetVersion = c.Args().First()
db.Verbose = c.Bool("verbose") db.Verbose = c.Bool("verbose")
return db.CreateAndMigrate() return db.CreateAndMigrate()
}), }),
@ -130,7 +129,7 @@ func NewApp() *cli.App {
}, },
{ {
Name: "migrate", Name: "migrate",
Usage: "Migrate to the specified or latest version", Usage: "Migrate to the latest version",
Flags: []cli.Flag{ Flags: []cli.Flag{
&cli.BoolFlag{ &cli.BoolFlag{
Name: "verbose", Name: "verbose",
@ -140,7 +139,6 @@ func NewApp() *cli.App {
}, },
}, },
Action: action(func(db *dbmate.DB, c *cli.Context) error { Action: action(func(db *dbmate.DB, c *cli.Context) error {
db.TargetVersion = c.Args().First()
db.Verbose = c.Bool("verbose") db.Verbose = c.Bool("verbose")
return db.Migrate() return db.Migrate()
}), }),
@ -156,16 +154,8 @@ func NewApp() *cli.App {
EnvVars: []string{"DBMATE_VERBOSE"}, EnvVars: []string{"DBMATE_VERBOSE"},
Usage: "print the result of each statement execution", Usage: "print the result of each statement execution",
}, },
&cli.IntFlag{
Name: "limit",
Aliases: []string{"l"},
Usage: "Limits the amount of rollbacks (defaults to 1 if no target version is specified)",
Value: -1,
},
}, },
Action: action(func(db *dbmate.DB, c *cli.Context) error { Action: action(func(db *dbmate.DB, c *cli.Context) error {
db.TargetVersion = c.Args().First()
db.Limit = c.Int("limit")
db.Verbose = c.Bool("verbose") db.Verbose = c.Bool("verbose")
return db.Rollback() return db.Rollback()
}), }),

View file

@ -4,7 +4,7 @@ import (
"database/sql" "database/sql"
"errors" "errors"
"fmt" "fmt"
"io" "io/ioutil"
"net/url" "net/url"
"os" "os"
"path/filepath" "path/filepath"
@ -41,9 +41,6 @@ type DB struct {
WaitBefore bool WaitBefore bool
WaitInterval time.Duration WaitInterval time.Duration
WaitTimeout time.Duration WaitTimeout time.Duration
Limit int
TargetVersion string
Log io.Writer
} }
// migrationFileRegexp pattern for valid migration files // migrationFileRegexp pattern for valid migration files
@ -66,16 +63,13 @@ func New(databaseURL *url.URL) *DB {
WaitBefore: false, WaitBefore: false,
WaitInterval: DefaultWaitInterval, WaitInterval: DefaultWaitInterval,
WaitTimeout: DefaultWaitTimeout, WaitTimeout: DefaultWaitTimeout,
Limit: -1,
TargetVersion: "",
Log: os.Stdout,
} }
} }
// GetDriver initializes the appropriate database driver // GetDriver initializes the appropriate database driver
func (db *DB) GetDriver() (Driver, error) { func (db *DB) GetDriver() (Driver, error) {
if db.DatabaseURL == nil || db.DatabaseURL.Scheme == "" { if db.DatabaseURL == nil || db.DatabaseURL.Scheme == "" {
return nil, errors.New("invalid url, have you set your --url flag or DATABASE_URL environment variable?") return nil, errors.New("invalid url")
} }
driverFunc := drivers[db.DatabaseURL.Scheme] driverFunc := drivers[db.DatabaseURL.Scheme]
@ -86,7 +80,6 @@ func (db *DB) GetDriver() (Driver, error) {
config := DriverConfig{ config := DriverConfig{
DatabaseURL: db.DatabaseURL, DatabaseURL: db.DatabaseURL,
MigrationsTableName: db.MigrationsTableName, MigrationsTableName: db.MigrationsTableName,
Log: db.Log,
} }
return driverFunc(config), nil return driverFunc(config), nil
@ -111,22 +104,22 @@ func (db *DB) wait(drv Driver) error {
return nil return nil
} }
fmt.Fprint(db.Log, "Waiting for database") fmt.Print("Waiting for database")
for i := 0 * time.Second; i < db.WaitTimeout; i += db.WaitInterval { for i := 0 * time.Second; i < db.WaitTimeout; i += db.WaitInterval {
fmt.Fprint(db.Log, ".") fmt.Print(".")
time.Sleep(db.WaitInterval) time.Sleep(db.WaitInterval)
// attempt connection to database server // attempt connection to database server
err = drv.Ping() err = drv.Ping()
if err == nil { if err == nil {
// connection successful // connection successful
fmt.Fprint(db.Log, "\n") fmt.Print("\n")
return nil return nil
} }
} }
// if we find outselves here, we could not connect within the timeout // if we find outselves here, we could not connect within the timeout
fmt.Fprint(db.Log, "\n") fmt.Print("\n")
return fmt.Errorf("unable to connect to database: %s", err) return fmt.Errorf("unable to connect to database: %s", err)
} }
@ -221,7 +214,7 @@ func (db *DB) dumpSchema(drv Driver) error {
return err return err
} }
fmt.Fprintf(db.Log, "Writing: %s\n", db.SchemaFile) fmt.Printf("Writing: %s\n", db.SchemaFile)
// ensure schema directory exists // ensure schema directory exists
if err = ensureDir(filepath.Dir(db.SchemaFile)); err != nil { if err = ensureDir(filepath.Dir(db.SchemaFile)); err != nil {
@ -229,7 +222,7 @@ func (db *DB) dumpSchema(drv Driver) error {
} }
// write schema to file // write schema to file
return os.WriteFile(db.SchemaFile, schema, 0644) return ioutil.WriteFile(db.SchemaFile, schema, 0644)
} }
// ensureDir creates a directory if it does not already exist // ensureDir creates a directory if it does not already exist
@ -259,7 +252,7 @@ func (db *DB) NewMigration(name string) error {
// check file does not already exist // check file does not already exist
path := filepath.Join(db.MigrationsDir, name) path := filepath.Join(db.MigrationsDir, name)
fmt.Fprintf(db.Log, "Creating migration: %s\n", path) fmt.Printf("Creating migration: %s\n", path)
if _, err := os.Stat(path); !os.IsNotExist(err) { if _, err := os.Stat(path); !os.IsNotExist(err) {
return fmt.Errorf("file already exists") return fmt.Errorf("file already exists")
@ -347,12 +340,12 @@ func (db *DB) migrate(drv Driver) error {
for _, filename := range files { for _, filename := range files {
ver := migrationVersion(filename) ver := migrationVersion(filename)
if ok := applied[ver]; ok && ver != db.TargetVersion { if ok := applied[ver]; ok {
// migration already applied // migration already applied
continue continue
} }
fmt.Fprintf(db.Log, "Applying: %s\n", filename) fmt.Printf("Applying: %s\n", filename)
up, _, err := parseMigration(filepath.Join(db.MigrationsDir, filename)) up, _, err := parseMigration(filepath.Join(db.MigrationsDir, filename))
if err != nil { if err != nil {
@ -365,7 +358,7 @@ func (db *DB) migrate(drv Driver) error {
if err != nil { if err != nil {
return err return err
} else if db.Verbose { } else if db.Verbose {
db.printVerbose(result) printVerbose(result)
} }
// record migration // record migration
@ -383,11 +376,6 @@ func (db *DB) migrate(drv Driver) error {
if err != nil { if err != nil {
return err return err
} }
if ver == db.TargetVersion {
fmt.Fprintf(db.Log, "Reached target version %s\n", ver)
break
}
} }
// automatically update schema file, silence errors // automatically update schema file, silence errors
@ -398,19 +386,19 @@ func (db *DB) migrate(drv Driver) error {
return nil return nil
} }
func (db *DB) printVerbose(result sql.Result) { func printVerbose(result sql.Result) {
lastInsertID, err := result.LastInsertId() lastInsertID, err := result.LastInsertId()
if err == nil { if err == nil {
fmt.Fprintf(db.Log, "Last insert ID: %d\n", lastInsertID) fmt.Printf("Last insert ID: %d\n", lastInsertID)
} }
rowsAffected, err := result.RowsAffected() rowsAffected, err := result.RowsAffected()
if err == nil { if err == nil {
fmt.Fprintf(db.Log, "Rows affected: %d\n", rowsAffected) fmt.Printf("Rows affected: %d\n", rowsAffected)
} }
} }
func findMigrationFiles(dir string, re *regexp.Regexp) ([]string, error) { func findMigrationFiles(dir string, re *regexp.Regexp) ([]string, error) {
files, err := os.ReadDir(dir) files, err := ioutil.ReadDir(dir)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not find migrations directory `%s`", dir) return nil, fmt.Errorf("could not find migrations directory `%s`", dir)
} }
@ -478,54 +466,27 @@ func (db *DB) Rollback() error {
} }
defer dbutil.MustClose(sqlDB) defer dbutil.MustClose(sqlDB)
limit := db.Limit applied, err := drv.SelectMigrations(sqlDB, 1)
// default limit is -1, if we don't specify a version it should only rollback one version, not all
if limit <= 0 && db.TargetVersion == "" {
limit = 1
}
applied, err := drv.SelectMigrations(sqlDB, limit)
if err != nil { if err != nil {
return err return err
} }
if len(applied) == 0 { // grab most recent applied migration (applied has len=1)
return fmt.Errorf("can't rollback, no migrations found") latest := ""
for ver := range applied {
latest = ver
}
if latest == "" {
return fmt.Errorf("can't rollback: no migrations have been applied")
} }
var versions []string filename, err := findMigrationFile(db.MigrationsDir, latest)
for v := range applied {
versions = append(versions, v)
}
// new → old
sort.Sort(sort.Reverse(sort.StringSlice(versions)))
if db.TargetVersion != "" {
cache := map[string]bool{}
found := false
// latest version comes first, so take every version until the version matches
for _, ver := range versions {
if ver == db.TargetVersion {
found = true
break
}
cache[ver] = true
}
if !found {
return fmt.Errorf("target version not found")
}
applied = cache
}
for version := range applied {
filename, err := findMigrationFile(db.MigrationsDir, version)
if err != nil { if err != nil {
return err return err
} }
fmt.Fprintf(db.Log, "Rolling back: %s\n", filename) fmt.Printf("Rolling back: %s\n", filename)
_, down, err := parseMigration(filepath.Join(db.MigrationsDir, filename)) _, down, err := parseMigration(filepath.Join(db.MigrationsDir, filename))
if err != nil { if err != nil {
return err return err
@ -537,11 +498,11 @@ func (db *DB) Rollback() error {
if err != nil { if err != nil {
return err return err
} else if db.Verbose { } else if db.Verbose {
db.printVerbose(result) printVerbose(result)
} }
// remove migration record // remove migration record
return drv.DeleteMigration(tx, version) return drv.DeleteMigration(tx, latest)
} }
if down.Options.Transaction() { if down.Options.Transaction() {
@ -555,7 +516,6 @@ func (db *DB) Rollback() error {
if err != nil { if err != nil {
return err return err
} }
}
// automatically update schema file, silence errors // automatically update schema file, silence errors
if db.AutoDumpSchema { if db.AutoDumpSchema {
@ -588,15 +548,15 @@ func (db *DB) Status(quiet bool) (int, error) {
line = fmt.Sprintf("[ ] %s", res.Filename) line = fmt.Sprintf("[ ] %s", res.Filename)
} }
if !quiet { if !quiet {
fmt.Fprintln(db.Log, line) fmt.Println(line)
} }
} }
totalPending := len(results) - totalApplied totalPending := len(results) - totalApplied
if !quiet { if !quiet {
fmt.Fprintln(db.Log) fmt.Println()
fmt.Fprintf(db.Log, "Applied: %d\n", totalApplied) fmt.Printf("Applied: %d\n", totalApplied)
fmt.Fprintf(db.Log, "Pending: %d\n", totalPending) fmt.Printf("Pending: %d\n", totalPending)
} }
return totalPending, nil return totalPending, nil
@ -619,7 +579,7 @@ func (db *DB) CheckMigrationsStatus(drv Driver) ([]StatusResult, error) {
} }
defer dbutil.MustClose(sqlDB) defer dbutil.MustClose(sqlDB)
applied, err := drv.SelectMigrations(sqlDB, db.Limit) applied, err := drv.SelectMigrations(sqlDB, -1)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -1,6 +1,7 @@
package dbmate_test package dbmate_test
import ( import (
"io/ioutil"
"net/url" "net/url"
"os" "os"
"path/filepath" "path/filepath"
@ -47,8 +48,6 @@ func TestNew(t *testing.T) {
require.False(t, db.WaitBefore) require.False(t, db.WaitBefore)
require.Equal(t, time.Second, db.WaitInterval) require.Equal(t, time.Second, db.WaitInterval)
require.Equal(t, 60*time.Second, db.WaitTimeout) require.Equal(t, 60*time.Second, db.WaitTimeout)
require.Equal(t, -1, db.Limit)
require.Equal(t, "", db.TargetVersion)
} }
func TestGetDriver(t *testing.T) { func TestGetDriver(t *testing.T) {
@ -56,14 +55,14 @@ func TestGetDriver(t *testing.T) {
db := dbmate.New(nil) db := dbmate.New(nil)
drv, err := db.GetDriver() drv, err := db.GetDriver()
require.Nil(t, drv) require.Nil(t, drv)
require.EqualError(t, err, "invalid url, have you set your --url flag or DATABASE_URL environment variable?") require.EqualError(t, err, "invalid url")
}) })
t.Run("missing schema", func(t *testing.T) { t.Run("missing schema", func(t *testing.T) {
db := dbmate.New(dbutil.MustParseURL("//hi")) db := dbmate.New(dbutil.MustParseURL("//hi"))
drv, err := db.GetDriver() drv, err := db.GetDriver()
require.Nil(t, drv) require.Nil(t, drv)
require.EqualError(t, err, "invalid url, have you set your --url flag or DATABASE_URL environment variable?") require.EqualError(t, err, "invalid url")
}) })
t.Run("invalid driver", func(t *testing.T) { t.Run("invalid driver", func(t *testing.T) {
@ -103,7 +102,7 @@ func TestDumpSchema(t *testing.T) {
db := newTestDB(t, u) db := newTestDB(t, u)
// create custom schema file directory // create custom schema file directory
dir, err := os.MkdirTemp("", "dbmate") dir, err := ioutil.TempDir("", "dbmate")
require.NoError(t, err) require.NoError(t, err)
defer func() { defer func() {
err := os.RemoveAll(dir) err := os.RemoveAll(dir)
@ -130,7 +129,7 @@ func TestDumpSchema(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// verify schema // verify schema
schema, err := os.ReadFile(db.SchemaFile) schema, err := ioutil.ReadFile(db.SchemaFile)
require.NoError(t, err) require.NoError(t, err)
require.Contains(t, string(schema), "-- PostgreSQL database dump") require.Contains(t, string(schema), "-- PostgreSQL database dump")
} }
@ -141,7 +140,7 @@ func TestAutoDumpSchema(t *testing.T) {
db.AutoDumpSchema = true db.AutoDumpSchema = true
// create custom schema file directory // create custom schema file directory
dir, err := os.MkdirTemp("", "dbmate") dir, err := ioutil.TempDir("", "dbmate")
require.NoError(t, err) require.NoError(t, err)
defer func() { defer func() {
err := os.RemoveAll(dir) err := os.RemoveAll(dir)
@ -164,7 +163,7 @@ func TestAutoDumpSchema(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// verify schema // verify schema
schema, err := os.ReadFile(db.SchemaFile) schema, err := ioutil.ReadFile(db.SchemaFile)
require.NoError(t, err) require.NoError(t, err)
require.Contains(t, string(schema), "-- PostgreSQL database dump") require.Contains(t, string(schema), "-- PostgreSQL database dump")
@ -177,7 +176,7 @@ func TestAutoDumpSchema(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// schema should be recreated // schema should be recreated
schema, err = os.ReadFile(db.SchemaFile) schema, err = ioutil.ReadFile(db.SchemaFile)
require.NoError(t, err) require.NoError(t, err)
require.Contains(t, string(schema), "-- PostgreSQL database dump") require.Contains(t, string(schema), "-- PostgreSQL database dump")
} }
@ -244,11 +243,9 @@ func TestWaitBeforeVerbose(t *testing.T) {
`Applying: 20151129054053_test_migration.sql `Applying: 20151129054053_test_migration.sql
Rows affected: 1 Rows affected: 1
Applying: 20200227231541_test_posts.sql Applying: 20200227231541_test_posts.sql
Rows affected: 0
Applying: 20220607110405_test_category.sql
Rows affected: 0`) Rows affected: 0`)
require.Contains(t, output, require.Contains(t, output,
`Rolling back: 20220607110405_test_category.sql `Rolling back: 20200227231541_test_posts.sql
Rows affected: 0`) Rows affected: 0`)
} }
@ -295,37 +292,6 @@ func TestMigrate(t *testing.T) {
} }
} }
func TestMigrateToTarget(t *testing.T) {
for _, u := range testURLs() {
t.Run(u.Scheme, func(t *testing.T) {
db := newTestDB(t, u)
db.TargetVersion = "20151129054053"
drv, err := db.GetDriver()
require.NoError(t, err)
// drop and recreate database
err = db.Drop()
require.NoError(t, err)
err = db.Create()
require.NoError(t, err)
// migrate
err = db.Migrate()
require.NoError(t, err)
// verify results
sqlDB, err := drv.Open()
require.NoError(t, err)
defer dbutil.MustClose(sqlDB)
count := 0
err = sqlDB.QueryRow(`select count(*) from schema_migrations`).Scan(&count)
require.NoError(t, err)
require.Equal(t, 1, count)
})
}
}
func TestUp(t *testing.T) { func TestUp(t *testing.T) {
for _, u := range testURLs() { for _, u := range testURLs() {
t.Run(u.Scheme, func(t *testing.T) { t.Run(u.Scheme, func(t *testing.T) {
@ -385,59 +351,13 @@ func TestRollback(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 1, count) require.Equal(t, 1, count)
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count) err = sqlDB.QueryRow("select count(*) from posts").Scan(&count)
require.Nil(t, err) require.Nil(t, err)
// rollback // rollback
err = db.Rollback() err = db.Rollback()
require.NoError(t, err) require.NoError(t, err)
// verify rollback
err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count)
require.NoError(t, err)
require.Equal(t, 2, count)
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
require.NotNil(t, err)
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
})
}
}
func TestRollbackToTarget(t *testing.T) {
for _, u := range testURLs() {
t.Run(u.Scheme, func(t *testing.T) {
db := newTestDB(t, u)
drv, err := db.GetDriver()
require.NoError(t, err)
// drop, recreate, and migrate database
err = db.Drop()
require.NoError(t, err)
err = db.Create()
require.NoError(t, err)
err = db.Migrate()
require.NoError(t, err)
// verify migration
sqlDB, err := drv.Open()
require.NoError(t, err)
defer dbutil.MustClose(sqlDB)
count := 0
err = sqlDB.QueryRow(`select count(*) from schema_migrations
where version = '20151129054053'`).Scan(&count)
require.NoError(t, err)
require.Equal(t, 1, count)
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
require.Nil(t, err)
// rollback
db.TargetVersion = "20151129054053"
err = db.Rollback()
require.NoError(t, err)
// verify rollback // verify rollback
err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count) err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count)
require.NoError(t, err) require.NoError(t, err)
@ -446,60 +366,6 @@ func TestRollbackToTarget(t *testing.T) {
err = sqlDB.QueryRow("select count(*) from posts").Scan(&count) err = sqlDB.QueryRow("select count(*) from posts").Scan(&count)
require.NotNil(t, err) require.NotNil(t, err)
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error()) require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
require.NotNil(t, err)
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
})
}
}
func TestRollbackToLimit(t *testing.T) {
for _, u := range testURLs() {
t.Run(u.Scheme, func(t *testing.T) {
db := newTestDB(t, u)
drv, err := db.GetDriver()
require.NoError(t, err)
// drop, recreate, and migrate database
err = db.Drop()
require.NoError(t, err)
err = db.Create()
require.NoError(t, err)
err = db.Migrate()
require.NoError(t, err)
// verify migration
sqlDB, err := drv.Open()
require.NoError(t, err)
defer dbutil.MustClose(sqlDB)
count := 0
err = sqlDB.QueryRow(`select count(*) from schema_migrations
where version = '20151129054053'`).Scan(&count)
require.NoError(t, err)
require.Equal(t, 1, count)
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
require.Nil(t, err)
// rollback
db.Limit = 2
err = db.Rollback()
require.NoError(t, err)
// verify rollback
err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count)
require.NoError(t, err)
require.Equal(t, 1, count)
err = sqlDB.QueryRow("select count(*) from posts").Scan(&count)
require.NotNil(t, err)
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
require.NotNil(t, err)
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
}) })
} }
} }
@ -525,7 +391,7 @@ func TestStatus(t *testing.T) {
// two pending // two pending
results, err := db.CheckMigrationsStatus(drv) results, err := db.CheckMigrationsStatus(drv)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, results, 3) require.Len(t, results, 2)
require.False(t, results[0].Applied) require.False(t, results[0].Applied)
require.False(t, results[1].Applied) require.False(t, results[1].Applied)
@ -533,13 +399,12 @@ func TestStatus(t *testing.T) {
err = db.Migrate() err = db.Migrate()
require.NoError(t, err) require.NoError(t, err)
// three applied // two applied
results, err = db.CheckMigrationsStatus(drv) results, err = db.CheckMigrationsStatus(drv)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, results, 3) require.Len(t, results, 2)
require.True(t, results[0].Applied) require.True(t, results[0].Applied)
require.True(t, results[1].Applied) require.True(t, results[1].Applied)
require.True(t, results[2].Applied)
// rollback last migration // rollback last migration
err = db.Rollback() err = db.Rollback()
@ -548,10 +413,9 @@ func TestStatus(t *testing.T) {
// one applied, one pending // one applied, one pending
results, err = db.CheckMigrationsStatus(drv) results, err = db.CheckMigrationsStatus(drv)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, results, 3) require.Len(t, results, 2)
require.True(t, results[0].Applied) require.True(t, results[0].Applied)
require.True(t, results[1].Applied) require.False(t, results[1].Applied)
require.False(t, results[2].Applied)
}) })
} }
} }

View file

@ -2,7 +2,6 @@ package dbmate
import ( import (
"database/sql" "database/sql"
"io"
"net/url" "net/url"
"github.com/amacneil/dbmate/pkg/dbutil" "github.com/amacneil/dbmate/pkg/dbutil"
@ -26,7 +25,6 @@ type Driver interface {
type DriverConfig struct { type DriverConfig struct {
DatabaseURL *url.URL DatabaseURL *url.URL
MigrationsTableName string MigrationsTableName string
Log io.Writer
} }
// DriverFunc represents a driver constructor // DriverFunc represents a driver constructor

View file

@ -2,7 +2,7 @@ package dbmate
import ( import (
"fmt" "fmt"
"os" "io/ioutil"
"regexp" "regexp"
"strings" "strings"
) )
@ -33,7 +33,7 @@ func NewMigration() Migration {
// parseMigration reads a migration file and returns (up Migration, down Migration, error) // parseMigration reads a migration file and returns (up Migration, down Migration, error)
func parseMigration(path string) (Migration, Migration, error) { func parseMigration(path string) (Migration, Migration, error) {
data, err := os.ReadFile(path) data, err := ioutil.ReadFile(path)
if err != nil { if err != nil {
return NewMigration(), NewMigration(), err return NewMigration(), NewMigration(), err
} }

View file

@ -1,4 +1,4 @@
package dbmate package dbmate
// Version of dbmate // Version of dbmate
const Version = "1.12.1" const Version = "1.11.0"

View file

@ -4,7 +4,6 @@ import (
"bytes" "bytes"
"database/sql" "database/sql"
"fmt" "fmt"
"io"
"net/url" "net/url"
"regexp" "regexp"
"sort" "sort"
@ -24,7 +23,6 @@ func init() {
type Driver struct { type Driver struct {
migrationsTableName string migrationsTableName string
databaseURL *url.URL databaseURL *url.URL
log io.Writer
} }
// NewDriver initializes the driver // NewDriver initializes the driver
@ -32,7 +30,6 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver {
return &Driver{ return &Driver{
migrationsTableName: config.MigrationsTableName, migrationsTableName: config.MigrationsTableName,
databaseURL: config.DatabaseURL, databaseURL: config.DatabaseURL,
log: config.Log,
} }
} }
@ -111,7 +108,7 @@ func (drv *Driver) quoteIdentifier(str string) string {
// CreateDatabase creates the specified database // CreateDatabase creates the specified database
func (drv *Driver) CreateDatabase() error { func (drv *Driver) CreateDatabase() error {
name := drv.databaseName() name := drv.databaseName()
fmt.Fprintf(drv.log, "Creating: %s\n", name) fmt.Printf("Creating: %s\n", name)
db, err := drv.openClickHouseDB() db, err := drv.openClickHouseDB()
if err != nil { if err != nil {
@ -127,7 +124,7 @@ func (drv *Driver) CreateDatabase() error {
// DropDatabase drops the specified database (if it exists) // DropDatabase drops the specified database (if it exists)
func (drv *Driver) DropDatabase() error { func (drv *Driver) DropDatabase() error {
name := drv.databaseName() name := drv.databaseName()
fmt.Fprintf(drv.log, "Dropping: %s\n", name) fmt.Printf("Dropping: %s\n", name)
db, err := drv.openClickHouseDB() db, err := drv.openClickHouseDB()
if err != nil { if err != nil {

View file

@ -4,7 +4,6 @@ import (
"bytes" "bytes"
"database/sql" "database/sql"
"fmt" "fmt"
"io"
"net/url" "net/url"
"strings" "strings"
@ -22,7 +21,6 @@ func init() {
type Driver struct { type Driver struct {
migrationsTableName string migrationsTableName string
databaseURL *url.URL databaseURL *url.URL
log io.Writer
} }
// NewDriver initializes the driver // NewDriver initializes the driver
@ -30,7 +28,6 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver {
return &Driver{ return &Driver{
migrationsTableName: config.MigrationsTableName, migrationsTableName: config.MigrationsTableName,
databaseURL: config.DatabaseURL, databaseURL: config.DatabaseURL,
log: config.Log,
} }
} }
@ -52,7 +49,7 @@ func connectionString(u *url.URL) string {
// Get decoded user:pass // Get decoded user:pass
userPassEncoded := u.User.String() userPassEncoded := u.User.String()
userPass, _ := url.PathUnescape(userPassEncoded) userPass, _ := url.QueryUnescape(userPassEncoded)
// Build DSN w/ user:pass percent-decoded // Build DSN w/ user:pass percent-decoded
normalizedString := "" normalizedString := ""
@ -95,7 +92,7 @@ func (drv *Driver) quoteIdentifier(str string) string {
// CreateDatabase creates the specified database // CreateDatabase creates the specified database
func (drv *Driver) CreateDatabase() error { func (drv *Driver) CreateDatabase() error {
name := dbutil.DatabaseName(drv.databaseURL) name := dbutil.DatabaseName(drv.databaseURL)
fmt.Fprintf(drv.log, "Creating: %s\n", name) fmt.Printf("Creating: %s\n", name)
db, err := drv.openRootDB() db, err := drv.openRootDB()
if err != nil { if err != nil {
@ -112,7 +109,7 @@ func (drv *Driver) CreateDatabase() error {
// DropDatabase drops the specified database (if it exists) // DropDatabase drops the specified database (if it exists)
func (drv *Driver) DropDatabase() error { func (drv *Driver) DropDatabase() error {
name := dbutil.DatabaseName(drv.databaseURL) name := dbutil.DatabaseName(drv.databaseURL)
fmt.Fprintf(drv.log, "Dropping: %s\n", name) fmt.Printf("Dropping: %s\n", name)
db, err := drv.openRootDB() db, err := drv.openRootDB()
if err != nil { if err != nil {

View file

@ -78,18 +78,6 @@ func TestConnectionString(t *testing.T) {
require.Equal(t, "duhfsd7s:123!@123!@@tcp(host:123)/foo?flag=on&multiStatements=true", s) require.Equal(t, "duhfsd7s:123!@123!@@tcp(host:123)/foo?flag=on&multiStatements=true", s)
}) })
t.Run("url encoding", func(t *testing.T) {
u, err := url.Parse("mysql://bob%2Balice:secret%5E%5B%2A%28%29@host:123/foo")
require.NoError(t, err)
require.Equal(t, "bob+alice:secret%5E%5B%2A%28%29", u.User.String())
require.Equal(t, "123", u.Port())
s := connectionString(u)
// ensure that '+' is correctly encoded by url.PathUnescape as '+'
// (not whitespace as url.QueryUnescape generates)
require.Equal(t, "bob+alice:secret^[*()@tcp(host:123)/foo?multiStatements=true", s)
})
t.Run("socket", func(t *testing.T) { t.Run("socket", func(t *testing.T) {
// test with no user/pass // test with no user/pass
u, err := url.Parse("mysql:///foo?socket=/var/run/mysqld/mysqld.sock&flag=on") u, err := url.Parse("mysql:///foo?socket=/var/run/mysqld/mysqld.sock&flag=on")
@ -179,8 +167,10 @@ func TestMySQLDumpSchema(t *testing.T) {
drv.databaseURL.Path = "/fakedb" drv.databaseURL.Path = "/fakedb"
schema, err = drv.DumpSchema(db) schema, err = drv.DumpSchema(db)
require.Nil(t, schema) require.Nil(t, schema)
require.Error(t, err) require.EqualError(t, err, "mysqldump: [Warning] Using a password "+
require.Contains(t, err.Error(), "Unknown database 'fakedb'") "on the command line interface can be insecure.\n"+
"mysqldump: Got error: 1049: "+
"Unknown database 'fakedb' when selecting the database")
} }
func TestMySQLDatabaseExists(t *testing.T) { func TestMySQLDatabaseExists(t *testing.T) {

View file

@ -4,9 +4,7 @@ import (
"bytes" "bytes"
"database/sql" "database/sql"
"fmt" "fmt"
"io"
"net/url" "net/url"
"runtime"
"strings" "strings"
"github.com/amacneil/dbmate/pkg/dbmate" "github.com/amacneil/dbmate/pkg/dbmate"
@ -24,7 +22,6 @@ func init() {
type Driver struct { type Driver struct {
migrationsTableName string migrationsTableName string
databaseURL *url.URL databaseURL *url.URL
log io.Writer
} }
// NewDriver initializes the driver // NewDriver initializes the driver
@ -32,7 +29,6 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver {
return &Driver{ return &Driver{
migrationsTableName: config.MigrationsTableName, migrationsTableName: config.MigrationsTableName,
databaseURL: config.DatabaseURL, databaseURL: config.DatabaseURL,
log: config.Log,
} }
} }
@ -49,15 +45,8 @@ func connectionString(u *url.URL) string {
// default hostname // default hostname
if hostname == "" { if hostname == "" {
switch runtime.GOOS {
case "linux":
query.Set("host", "/var/run/postgresql")
case "darwin", "freebsd", "dragonfly", "openbsd", "netbsd":
query.Set("host", "/tmp")
default:
hostname = "localhost" hostname = "localhost"
} }
}
// host param overrides url hostname // host param overrides url hostname
if query.Get("host") != "" { if query.Get("host") != "" {
@ -123,7 +112,7 @@ func (drv *Driver) openPostgresDB() (*sql.DB, error) {
// CreateDatabase creates the specified database // CreateDatabase creates the specified database
func (drv *Driver) CreateDatabase() error { func (drv *Driver) CreateDatabase() error {
name := dbutil.DatabaseName(drv.databaseURL) name := dbutil.DatabaseName(drv.databaseURL)
fmt.Fprintf(drv.log, "Creating: %s\n", name) fmt.Printf("Creating: %s\n", name)
db, err := drv.openPostgresDB() db, err := drv.openPostgresDB()
if err != nil { if err != nil {
@ -140,7 +129,7 @@ func (drv *Driver) CreateDatabase() error {
// DropDatabase drops the specified database (if it exists) // DropDatabase drops the specified database (if it exists)
func (drv *Driver) DropDatabase() error { func (drv *Driver) DropDatabase() error {
name := dbutil.DatabaseName(drv.databaseURL) name := dbutil.DatabaseName(drv.databaseURL)
fmt.Fprintf(drv.log, "Dropping: %s\n", name) fmt.Printf("Dropping: %s\n", name)
db, err := drv.openPostgresDB() db, err := drv.openPostgresDB()
if err != nil { if err != nil {
@ -244,7 +233,7 @@ func (drv *Driver) CreateMigrationsTable(db *sql.DB) error {
// in theory we could attempt to create the schema every time, but we avoid that // in theory we could attempt to create the schema every time, but we avoid that
// in case the user doesn't have permissions to create schemas // in case the user doesn't have permissions to create schemas
fmt.Fprintf(drv.log, "Creating schema: %s\n", schema) fmt.Printf("Creating schema: %s\n", schema)
_, err = db.Exec(fmt.Sprintf("create schema if not exists %s", schema)) _, err = db.Exec(fmt.Sprintf("create schema if not exists %s", schema))
if err != nil { if err != nil {
return err return err

View file

@ -4,7 +4,6 @@ import (
"database/sql" "database/sql"
"net/url" "net/url"
"os" "os"
"runtime"
"testing" "testing"
"github.com/amacneil/dbmate/pkg/dbmate" "github.com/amacneil/dbmate/pkg/dbmate"
@ -51,24 +50,13 @@ func TestGetDriver(t *testing.T) {
require.Equal(t, "schema_migrations", drv.migrationsTableName) require.Equal(t, "schema_migrations", drv.migrationsTableName)
} }
func defaultConnString() string {
switch runtime.GOOS {
case "linux":
return "postgres://:5432/foo?host=%2Fvar%2Frun%2Fpostgresql"
case "darwin", "freebsd", "dragonfly", "openbsd", "netbsd":
return "postgres://:5432/foo?host=%2Ftmp"
default:
return "postgres://localhost:5432/foo"
}
}
func TestConnectionString(t *testing.T) { func TestConnectionString(t *testing.T) {
cases := []struct { cases := []struct {
input string input string
expected string expected string
}{ }{
// defaults // defaults
{"postgres:///foo", defaultConnString()}, {"postgres:///foo", "postgres://localhost:5432/foo"},
// support custom url params // support custom url params
{"postgres://bob:secret@myhost:1234/foo?bar=baz", "postgres://bob:secret@myhost:1234/foo?bar=baz"}, {"postgres://bob:secret@myhost:1234/foo?bar=baz", "postgres://bob:secret@myhost:1234/foo?bar=baz"},
// support `host` and `port` via url params // support `host` and `port` via url params
@ -97,11 +85,11 @@ func TestConnectionArgsForDump(t *testing.T) {
expected []string expected []string
}{ }{
// defaults // defaults
{"postgres:///foo", []string{defaultConnString()}}, {"postgres:///foo", []string{"postgres://localhost:5432/foo"}},
// support single schema // support single schema
{"postgres:///foo?search_path=foo", []string{"--schema", "foo", defaultConnString()}}, {"postgres:///foo?search_path=foo", []string{"--schema", "foo", "postgres://localhost:5432/foo"}},
// support multiple schemas // support multiple schemas
{"postgres:///foo?search_path=foo,public", []string{"--schema", "foo", "--schema", "public", defaultConnString()}}, {"postgres:///foo?search_path=foo,public", []string{"--schema", "foo", "--schema", "public", "postgres://localhost:5432/foo"}},
} }
for _, c := range cases { for _, c := range cases {
@ -186,8 +174,8 @@ func TestPostgresDumpSchema(t *testing.T) {
drv.databaseURL.Path = "/fakedb" drv.databaseURL.Path = "/fakedb"
schema, err = drv.DumpSchema(db) schema, err = drv.DumpSchema(db)
require.Nil(t, schema) require.Nil(t, schema)
require.Error(t, err) require.EqualError(t, err, "pg_dump: [archiver (db)] connection to database "+
require.Contains(t, err.Error(), "database \"fakedb\" does not exist") "\"fakedb\" failed: FATAL: database \"fakedb\" does not exist")
}) })
t.Run("custom migrations table with schema", func(t *testing.T) { t.Run("custom migrations table with schema", func(t *testing.T) {

View file

@ -1,4 +1,3 @@
//go:build cgo
// +build cgo // +build cgo
package sqlite package sqlite
@ -7,7 +6,6 @@ import (
"bytes" "bytes"
"database/sql" "database/sql"
"fmt" "fmt"
"io"
"net/url" "net/url"
"os" "os"
"regexp" "regexp"
@ -29,7 +27,6 @@ func init() {
type Driver struct { type Driver struct {
migrationsTableName string migrationsTableName string
databaseURL *url.URL databaseURL *url.URL
log io.Writer
} }
// NewDriver initializes the driver // NewDriver initializes the driver
@ -37,7 +34,6 @@ func NewDriver(config dbmate.DriverConfig) dbmate.Driver {
return &Driver{ return &Driver{
migrationsTableName: config.MigrationsTableName, migrationsTableName: config.MigrationsTableName,
databaseURL: config.DatabaseURL, databaseURL: config.DatabaseURL,
log: config.Log,
} }
} }
@ -60,7 +56,7 @@ func (drv *Driver) Open() (*sql.DB, error) {
// CreateDatabase creates the specified database // CreateDatabase creates the specified database
func (drv *Driver) CreateDatabase() error { func (drv *Driver) CreateDatabase() error {
fmt.Fprintf(drv.log, "Creating: %s\n", ConnectionString(drv.databaseURL)) fmt.Printf("Creating: %s\n", ConnectionString(drv.databaseURL))
db, err := drv.Open() db, err := drv.Open()
if err != nil { if err != nil {
@ -74,7 +70,7 @@ func (drv *Driver) CreateDatabase() error {
// DropDatabase drops the specified database (if it exists) // DropDatabase drops the specified database (if it exists)
func (drv *Driver) DropDatabase() error { func (drv *Driver) DropDatabase() error {
path := ConnectionString(drv.databaseURL) path := ConnectionString(drv.databaseURL)
fmt.Fprintf(drv.log, "Dropping: %s\n", path) fmt.Printf("Dropping: %s\n", path)
exists, err := drv.DatabaseExists() exists, err := drv.DatabaseExists()
if err != nil { if err != nil {

View file

@ -1,4 +1,3 @@
//go:build cgo
// +build cgo // +build cgo
package sqlite package sqlite

View file

@ -1,9 +0,0 @@
-- migrate:up
create table categories (
id integer,
title varchar(50),
slug varchar(100)
);
-- migrate:down
drop table categories;