mirror of
https://github.com/TECHNOFAB11/dbmate.git
synced 2025-12-11 23:50:04 +01:00
Compare commits
19 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8103b25135 | ||
|
|
60e93d5c10 | ||
| a5b92832d3 | |||
| a55233c50b | |||
|
|
5b60f68107 | ||
|
|
52cd75fbc1 | ||
|
|
c99d611cb4 | ||
|
|
955c9ac653 | ||
|
|
f69f1dea03 | ||
|
|
06d8bb7567 | ||
|
|
81fe01b34f | ||
|
|
fb17e8eeca | ||
|
|
6243c2b9a9 | ||
|
|
26d5f9f306 | ||
|
|
511336d346 | ||
|
|
4a3698c7ac | ||
|
|
7c6f9ed747 | ||
|
|
cdbbdd65ea | ||
|
|
abd02b7f0b |
22 changed files with 522 additions and 233 deletions
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
63
.github/workflows/build.yml
vendored
63
.github/workflows/build.yml
vendored
|
|
@ -1,63 +0,0 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
tags: "v*"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build & Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Environment
|
||||
run: |
|
||||
set -x
|
||||
docker version
|
||||
docker-compose version
|
||||
|
||||
- name: Cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
key: cache
|
||||
path: .cache
|
||||
|
||||
- name: Build docker image
|
||||
run: |
|
||||
set -x
|
||||
docker-compose build
|
||||
docker-compose run --rm --no-deps dbmate --version
|
||||
|
||||
- name: Build binaries
|
||||
run: |
|
||||
set -x
|
||||
docker-compose run --rm --no-deps dev make build-all
|
||||
dist/dbmate-linux-amd64 --version
|
||||
|
||||
- name: Lint
|
||||
run: docker-compose run --rm --no-deps dev make lint
|
||||
|
||||
- name: Start test dependencies
|
||||
run: |
|
||||
set -x
|
||||
docker-compose pull --quiet
|
||||
docker-compose up --detach
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
set -x
|
||||
docker-compose run --rm dev make wait
|
||||
docker-compose run --rm dev make test
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
with:
|
||||
files: dist/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
151
.github/workflows/ci.yml
vendored
Normal file
151
.github/workflows/ci.yml
vendored
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
tags: "*"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
image: ubuntu-latest
|
||||
arch: amd64
|
||||
env: {}
|
||||
- os: linux
|
||||
image: ubuntu-latest
|
||||
arch: arm64
|
||||
setup: sudo apt-get update && sudo apt-get install -qq gcc-aarch64-linux-gnu
|
||||
env:
|
||||
CC: aarch64-linux-gnu-gcc
|
||||
CXX: aarch64-linux-gnu-g++
|
||||
- os: macos
|
||||
image: macos-latest
|
||||
arch: amd64
|
||||
env: {}
|
||||
- os: macos
|
||||
image: macos-latest
|
||||
arch: arm64
|
||||
env: {}
|
||||
- os: windows
|
||||
image: windows-latest
|
||||
arch: amd64
|
||||
env: {}
|
||||
|
||||
name: Build (${{ matrix.os }}/${{ matrix.arch }})
|
||||
runs-on: ${{ matrix.image }}
|
||||
env: ${{ matrix.env }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "1.17"
|
||||
|
||||
- name: Setup environment
|
||||
run: ${{ matrix.setup }}
|
||||
|
||||
- run: go mod download
|
||||
|
||||
- run: make build ls
|
||||
env:
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
OUTPUT: dbmate-${{ matrix.os }}-${{ matrix.arch }}
|
||||
|
||||
- run: dist/dbmate-${{ matrix.os }}-${{ matrix.arch }} --help
|
||||
if: ${{ matrix.arch == 'amd64' }}
|
||||
|
||||
- name: Publish binaries
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
with:
|
||||
files: dist/dbmate-*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
docker:
|
||||
name: Docker Test (linux/amd64)
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Configure QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Configure Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Check Docker environment
|
||||
run: |
|
||||
set -x
|
||||
docker version
|
||||
docker buildx version
|
||||
docker-compose version
|
||||
|
||||
- name: Build Docker image
|
||||
run: |
|
||||
set -x
|
||||
docker-compose build
|
||||
docker-compose run --rm --no-deps dbmate --version
|
||||
|
||||
- name: Run make build
|
||||
run: docker-compose run --rm --no-deps dev make build ls
|
||||
|
||||
- name: Run make lint
|
||||
run: docker-compose run --rm --no-deps dev make lint
|
||||
|
||||
- name: Start test dependencies
|
||||
run: |
|
||||
set -x
|
||||
docker-compose pull --quiet
|
||||
docker-compose up --detach
|
||||
docker-compose run --rm dev make wait
|
||||
|
||||
- name: Run make test
|
||||
run: docker-compose run --rm dev make test
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate Docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
|
||||
with:
|
||||
images: |
|
||||
${{ github.repository }}
|
||||
ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
|
||||
- name: Publish Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }}
|
||||
with:
|
||||
context: .
|
||||
target: release
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
40
.github/workflows/codeql-analysis.yml
vendored
40
.github/workflows/codeql-analysis.yml
vendored
|
|
@ -1,40 +0,0 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: "0 0 * * 4"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["go"]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
|
|
@ -5,12 +5,12 @@ linters:
|
|||
- depguard
|
||||
- errcheck
|
||||
- goimports
|
||||
- golint
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- revive
|
||||
- rowserrcheck
|
||||
- staticcheck
|
||||
- structcheck
|
||||
|
|
|
|||
4
.vscode/extensions.json
vendored
Normal file
4
.vscode/extensions.json
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
// -*- jsonc -*-
|
||||
{
|
||||
"recommendations": ["esbenp.prettier-vscode"]
|
||||
}
|
||||
10
.vscode/settings.json
vendored
Normal file
10
.vscode/settings.json
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
// -*- jsonc -*-
|
||||
{
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"files.eol": "\n",
|
||||
"files.insertFinalNewline": true,
|
||||
"files.trimFinalNewlines": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"go.formatTool": "goimports"
|
||||
}
|
||||
25
Dockerfile
25
Dockerfile
|
|
@ -1,34 +1,25 @@
|
|||
# development image
|
||||
FROM techknowlogick/xgo:go-1.16.x as dev
|
||||
FROM golang:1.17 as dev
|
||||
WORKDIR /src
|
||||
ENV GOCACHE /src/.cache/go-build
|
||||
|
||||
# enable cgo to build sqlite
|
||||
ENV CGO_ENABLED 1
|
||||
|
||||
# install database clients
|
||||
RUN apt-get update \
|
||||
&& apt-get install -qq --no-install-recommends \
|
||||
curl \
|
||||
mysql-client \
|
||||
file \
|
||||
mariadb-client \
|
||||
postgresql-client \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# golangci-lint
|
||||
RUN curl -fsSL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh \
|
||||
| sh -s -- -b /usr/local/bin v1.39.0
|
||||
| sh -s -- -b /usr/local/bin v1.43.0
|
||||
|
||||
# download modules
|
||||
COPY go.* ./
|
||||
COPY go.* /src/
|
||||
RUN go mod download
|
||||
|
||||
ENTRYPOINT []
|
||||
CMD ["/bin/bash"]
|
||||
|
||||
# build stage
|
||||
FROM dev as build
|
||||
COPY . ./
|
||||
COPY . /src/
|
||||
RUN make build
|
||||
|
||||
# release stage
|
||||
|
|
@ -38,5 +29,5 @@ RUN apk add --no-cache \
|
|||
postgresql-client \
|
||||
sqlite \
|
||||
tzdata
|
||||
COPY --from=build /src/dist/dbmate-linux-amd64 /usr/local/bin/dbmate
|
||||
ENTRYPOINT ["dbmate"]
|
||||
COPY --from=dev /src/dist/dbmate /usr/local/bin/dbmate
|
||||
ENTRYPOINT ["/usr/local/bin/dbmate"]
|
||||
|
|
|
|||
79
Makefile
79
Makefile
|
|
@ -1,58 +1,59 @@
|
|||
# no static linking for macos
|
||||
LDFLAGS := -ldflags '-s'
|
||||
# statically link binaries (to support alpine + scratch containers)
|
||||
STATICLDFLAGS := -ldflags '-s -extldflags "-static"'
|
||||
# avoid building code that is incompatible with static linking
|
||||
TAGS := -tags netgo,osusergo,sqlite_omit_load_extension,sqlite_json
|
||||
# enable cgo to build sqlite
|
||||
export CGO_ENABLED = 1
|
||||
|
||||
# strip binaries
|
||||
FLAGS := -tags sqlite_omit_load_extension,sqlite_json -ldflags '-s'
|
||||
|
||||
GOOS := $(shell go env GOOS)
|
||||
ifeq ($(GOOS),linux)
|
||||
# statically link binaries to support alpine linux
|
||||
FLAGS := -tags netgo,osusergo,sqlite_omit_load_extension,sqlite_json -ldflags '-s -extldflags "-static"'
|
||||
endif
|
||||
ifeq ($(GOOS),darwin)
|
||||
export SDKROOT ?= $(shell xcrun --sdk macosx --show-sdk-path)
|
||||
endif
|
||||
|
||||
OUTPUT ?= dbmate
|
||||
|
||||
.PHONY: all
|
||||
all: build test lint
|
||||
all: fix build wait test
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf dist
|
||||
|
||||
.PHONY: build
|
||||
build: clean
|
||||
go build -o dist/$(OUTPUT) $(FLAGS) .
|
||||
|
||||
.PHONY: ls
|
||||
ls:
|
||||
ls -lh dist/$(OUTPUT)
|
||||
file dist/$(OUTPUT)
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
go test -p 1 $(TAGS) $(STATICLDFLAGS) ./...
|
||||
|
||||
.PHONY: fix
|
||||
fix:
|
||||
golangci-lint run --fix
|
||||
go test -p 1 $(FLAGS) ./...
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
golangci-lint run
|
||||
|
||||
.PHONY: fix
|
||||
fix:
|
||||
golangci-lint run --fix
|
||||
|
||||
.PHONY: wait
|
||||
wait:
|
||||
dist/dbmate-linux-amd64 -e CLICKHOUSE_TEST_URL wait
|
||||
dist/dbmate-linux-amd64 -e MYSQL_TEST_URL wait
|
||||
dist/dbmate-linux-amd64 -e POSTGRES_TEST_URL wait
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf dist/*
|
||||
|
||||
.PHONY: build
|
||||
build: clean build-linux-amd64
|
||||
ls -lh dist
|
||||
|
||||
.PHONY: build-linux-amd64
|
||||
build-linux-amd64:
|
||||
GOOS=linux GOARCH=amd64 \
|
||||
go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-linux-amd64 .
|
||||
|
||||
.PHONY: build-all
|
||||
build-all: clean build-linux-amd64
|
||||
GOOS=linux GOARCH=arm64 CC=aarch64-linux-gnu-gcc-5 CXX=aarch64-linux-gnu-g++-5 \
|
||||
go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-linux-arm64 .
|
||||
GOOS=darwin GOARCH=amd64 CC=o64-clang CXX=o64-clang++ \
|
||||
go build $(TAGS) $(LDFLAGS) -o dist/dbmate-macos-amd64 .
|
||||
GOOS=windows GOARCH=amd64 CC=x86_64-w64-mingw32-gcc-posix CXX=x86_64-w64-mingw32-g++-posix \
|
||||
go build $(TAGS) $(STATICLDFLAGS) -o dist/dbmate-windows-amd64.exe .
|
||||
ls -lh dist
|
||||
dist/dbmate -e CLICKHOUSE_TEST_URL wait
|
||||
dist/dbmate -e MYSQL_TEST_URL wait
|
||||
dist/dbmate -e POSTGRES_TEST_URL wait
|
||||
|
||||
.PHONY: docker-all
|
||||
docker-all:
|
||||
docker-compose pull
|
||||
docker-compose build
|
||||
docker-compose run --rm dev make
|
||||
docker-compose run --rm dev make all
|
||||
|
||||
.PHONY: docker-sh
|
||||
docker-sh:
|
||||
|
|
|
|||
28
README.md
28
README.md
|
|
@ -66,16 +66,18 @@ $ sudo chmod +x /usr/local/bin/dbmate
|
|||
|
||||
**Docker**
|
||||
|
||||
You can run dbmate using the official docker image (remember to set `--network=host` or see [this comment](https://github.com/amacneil/dbmate/issues/128#issuecomment-615924611) for more tips on using dbmate with docker networking):
|
||||
Docker images are published to both Docker Hub ([`amacneil/dbmate`](https://hub.docker.com/r/amacneil/dbmate)) and Github Container Registry ([`ghcr.io/amacneil/dbmate`](https://ghcr.io/amacneil/dbmate)).
|
||||
|
||||
Remember to set `--network=host` or see [this comment](https://github.com/amacneil/dbmate/issues/128#issuecomment-615924611) for more tips on using dbmate with docker networking):
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it --network=host amacneil/dbmate --help
|
||||
$ docker run --rm -it --network=host ghcr.io/amacneil/dbmate:1 --help
|
||||
```
|
||||
|
||||
If you wish to create or apply migrations, you will need to use Docker's [bind mount](https://docs.docker.com/storage/bind-mounts/) feature to make your local working directory (`pwd`) available inside the dbmate container:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it --network=host -v "$(pwd)/db:/db" amacneil/dbmate new create_users_table
|
||||
$ docker run --rm -it --network=host -v "$(pwd)/db:/db" ghcr.io/amacneil/dbmate:1 new create_users_table
|
||||
```
|
||||
|
||||
**Heroku**
|
||||
|
|
@ -282,6 +284,12 @@ Writing: ./db/schema.sql
|
|||
|
||||
Pending migrations are always applied in numerical order. However, dbmate does not prevent migrations from being applied out of order if they are committed independently (for example: if a developer has been working on a branch for a long time, and commits a migration which has a lower version number than other already-applied migrations, dbmate will simply apply the pending migration). See [#159](https://github.com/amacneil/dbmate/issues/159) for a more detailed explanation.
|
||||
|
||||
You can also specify a migration to up-to.
|
||||
|
||||
```sh
|
||||
$ dbmate up 20151127184807
|
||||
```
|
||||
|
||||
### Rolling Back Migrations
|
||||
|
||||
By default, dbmate doesn't know how to roll back a migration. In development, it's often useful to be able to revert your database to a previous state. To accomplish this, implement the `migrate:down` section:
|
||||
|
|
@ -306,6 +314,14 @@ Rolling back: 20151127184807_create_users_table.sql
|
|||
Writing: ./db/schema.sql
|
||||
```
|
||||
|
||||
You can also rollback to a specific migration.
|
||||
|
||||
```sh
|
||||
$ dbmate rollback 20151127184807
|
||||
# or, with a limit option
|
||||
$ dbmate rollback -limit 2 # will rollback the last two migrations
|
||||
```
|
||||
|
||||
### Migration Options
|
||||
|
||||
dbmate supports options passed to a migration block in the form of `key:value` pairs. List of supported options:
|
||||
|
|
@ -403,13 +419,13 @@ Why another database schema migration tool? Dbmate was inspired by many other to
|
|||
| Plain SQL migration files | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | | |
|
||||
| Support for creating and dropping databases | :white_check_mark: | | | | :white_check_mark: | |
|
||||
| Support for saving schema dump files | :white_check_mark: | | | | :white_check_mark: | |
|
||||
| Timestamp-versioned migration files | :white_check_mark: | :white_check_mark: | | | :white_check_mark: | :white_check_mark: |
|
||||
| Timestamp-versioned migration files | :white_check_mark: | :white_check_mark: | | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||
| Custom schema migrations table | :white_check_mark: | | :white_check_mark: | | | :white_check_mark: |
|
||||
| Ability to wait for database to become ready | :white_check_mark: | | | | | |
|
||||
| Database connection string loaded from environment variables | :white_check_mark: | | | | | |
|
||||
| Automatically load .env file | :white_check_mark: | | | | | |
|
||||
| No separate configuration file | :white_check_mark: | | | | :white_check_mark: | :white_check_mark: |
|
||||
| Language/framework independent | :white_check_mark: | :eight_pointed_black_star: | :eight_pointed_black_star: | :eight_pointed_black_star: | | |
|
||||
| No separate configuration file | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||
| Language/framework independent | :white_check_mark: | :eight_pointed_black_star: | :eight_pointed_black_star: | :white_check_mark: | | |
|
||||
| **Drivers** |
|
||||
| PostgreSQL | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||
| MySQL | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
version: '2.3'
|
||||
version: "2.3"
|
||||
services:
|
||||
dev:
|
||||
build:
|
||||
|
|
@ -20,10 +20,12 @@ services:
|
|||
build:
|
||||
context: .
|
||||
target: release
|
||||
image: dbmate_release
|
||||
|
||||
mysql:
|
||||
image: mysql:5.7
|
||||
image: mysql/mysql-server:8.0
|
||||
environment:
|
||||
MYSQL_ROOT_HOST: "%"
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
|
||||
postgres:
|
||||
|
|
|
|||
12
main.go
12
main.go
|
|
@ -109,6 +109,7 @@ func NewApp() *cli.App {
|
|||
},
|
||||
},
|
||||
Action: action(func(db *dbmate.DB, c *cli.Context) error {
|
||||
db.TargetVersion = c.Args().First()
|
||||
db.Verbose = c.Bool("verbose")
|
||||
return db.CreateAndMigrate()
|
||||
}),
|
||||
|
|
@ -129,7 +130,7 @@ func NewApp() *cli.App {
|
|||
},
|
||||
{
|
||||
Name: "migrate",
|
||||
Usage: "Migrate to the latest version",
|
||||
Usage: "Migrate to the specified or latest version",
|
||||
Flags: []cli.Flag{
|
||||
&cli.BoolFlag{
|
||||
Name: "verbose",
|
||||
|
|
@ -139,6 +140,7 @@ func NewApp() *cli.App {
|
|||
},
|
||||
},
|
||||
Action: action(func(db *dbmate.DB, c *cli.Context) error {
|
||||
db.TargetVersion = c.Args().First()
|
||||
db.Verbose = c.Bool("verbose")
|
||||
return db.Migrate()
|
||||
}),
|
||||
|
|
@ -154,8 +156,16 @@ func NewApp() *cli.App {
|
|||
EnvVars: []string{"DBMATE_VERBOSE"},
|
||||
Usage: "print the result of each statement execution",
|
||||
},
|
||||
&cli.IntFlag{
|
||||
Name: "limit",
|
||||
Aliases: []string{"l"},
|
||||
Usage: "Limits the amount of rollbacks (defaults to 1 if no target version is specified)",
|
||||
Value: -1,
|
||||
},
|
||||
},
|
||||
Action: action(func(db *dbmate.DB, c *cli.Context) error {
|
||||
db.TargetVersion = c.Args().First()
|
||||
db.Limit = c.Int("limit")
|
||||
db.Verbose = c.Bool("verbose")
|
||||
return db.Rollback()
|
||||
}),
|
||||
|
|
|
|||
110
pkg/dbmate/db.go
110
pkg/dbmate/db.go
|
|
@ -5,7 +5,6 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
|
@ -42,6 +41,8 @@ type DB struct {
|
|||
WaitBefore bool
|
||||
WaitInterval time.Duration
|
||||
WaitTimeout time.Duration
|
||||
Limit int
|
||||
TargetVersion string
|
||||
Log io.Writer
|
||||
}
|
||||
|
||||
|
|
@ -65,6 +66,8 @@ func New(databaseURL *url.URL) *DB {
|
|||
WaitBefore: false,
|
||||
WaitInterval: DefaultWaitInterval,
|
||||
WaitTimeout: DefaultWaitTimeout,
|
||||
Limit: -1,
|
||||
TargetVersion: "",
|
||||
Log: os.Stdout,
|
||||
}
|
||||
}
|
||||
|
|
@ -226,7 +229,7 @@ func (db *DB) dumpSchema(drv Driver) error {
|
|||
}
|
||||
|
||||
// write schema to file
|
||||
return ioutil.WriteFile(db.SchemaFile, schema, 0644)
|
||||
return os.WriteFile(db.SchemaFile, schema, 0644)
|
||||
}
|
||||
|
||||
// ensureDir creates a directory if it does not already exist
|
||||
|
|
@ -344,7 +347,7 @@ func (db *DB) migrate(drv Driver) error {
|
|||
|
||||
for _, filename := range files {
|
||||
ver := migrationVersion(filename)
|
||||
if ok := applied[ver]; ok {
|
||||
if ok := applied[ver]; ok && ver != db.TargetVersion {
|
||||
// migration already applied
|
||||
continue
|
||||
}
|
||||
|
|
@ -380,6 +383,11 @@ func (db *DB) migrate(drv Driver) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ver == db.TargetVersion {
|
||||
fmt.Fprintf(db.Log, "Reached target version %s\n", ver)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// automatically update schema file, silence errors
|
||||
|
|
@ -402,7 +410,7 @@ func (db *DB) printVerbose(result sql.Result) {
|
|||
}
|
||||
|
||||
func findMigrationFiles(dir string, re *regexp.Regexp) ([]string, error) {
|
||||
files, err := ioutil.ReadDir(dir)
|
||||
files, err := os.ReadDir(dir)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not find migrations directory `%s`", dir)
|
||||
}
|
||||
|
|
@ -470,55 +478,83 @@ func (db *DB) Rollback() error {
|
|||
}
|
||||
defer dbutil.MustClose(sqlDB)
|
||||
|
||||
applied, err := drv.SelectMigrations(sqlDB, 1)
|
||||
limit := db.Limit
|
||||
// default limit is -1, if we don't specify a version it should only rollback one version, not all
|
||||
if limit <= 0 && db.TargetVersion == "" {
|
||||
limit = 1
|
||||
}
|
||||
|
||||
applied, err := drv.SelectMigrations(sqlDB, limit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// grab most recent applied migration (applied has len=1)
|
||||
latest := ""
|
||||
for ver := range applied {
|
||||
latest = ver
|
||||
}
|
||||
if latest == "" {
|
||||
return fmt.Errorf("can't rollback: no migrations have been applied")
|
||||
if len(applied) == 0 {
|
||||
return fmt.Errorf("can't rollback, no migrations found")
|
||||
}
|
||||
|
||||
filename, err := findMigrationFile(db.MigrationsDir, latest)
|
||||
if err != nil {
|
||||
return err
|
||||
var versions []string
|
||||
for v := range applied {
|
||||
versions = append(versions, v)
|
||||
}
|
||||
|
||||
fmt.Fprintf(db.Log, "Rolling back: %s\n", filename)
|
||||
// new → old
|
||||
sort.Sort(sort.Reverse(sort.StringSlice(versions)))
|
||||
|
||||
_, down, err := parseMigration(filepath.Join(db.MigrationsDir, filename))
|
||||
if err != nil {
|
||||
return err
|
||||
if db.TargetVersion != "" {
|
||||
cache := map[string]bool{}
|
||||
found := false
|
||||
|
||||
// latest version comes first, so take every version until the version matches
|
||||
for _, ver := range versions {
|
||||
if ver == db.TargetVersion {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
cache[ver] = true
|
||||
}
|
||||
if !found {
|
||||
return fmt.Errorf("target version not found")
|
||||
}
|
||||
applied = cache
|
||||
}
|
||||
|
||||
execMigration := func(tx dbutil.Transaction) error {
|
||||
// rollback migration
|
||||
result, err := tx.Exec(down.Contents)
|
||||
for version := range applied {
|
||||
filename, err := findMigrationFile(db.MigrationsDir, version)
|
||||
if err != nil {
|
||||
return err
|
||||
} else if db.Verbose {
|
||||
db.printVerbose(result)
|
||||
}
|
||||
|
||||
// remove migration record
|
||||
return drv.DeleteMigration(tx, latest)
|
||||
}
|
||||
fmt.Fprintf(db.Log, "Rolling back: %s\n", filename)
|
||||
_, down, err := parseMigration(filepath.Join(db.MigrationsDir, filename))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if down.Options.Transaction() {
|
||||
// begin transaction
|
||||
err = doTransaction(sqlDB, execMigration)
|
||||
} else {
|
||||
// run outside of transaction
|
||||
err = execMigration(sqlDB)
|
||||
}
|
||||
execMigration := func(tx dbutil.Transaction) error {
|
||||
// rollback migration
|
||||
result, err := tx.Exec(down.Contents)
|
||||
if err != nil {
|
||||
return err
|
||||
} else if db.Verbose {
|
||||
db.printVerbose(result)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
// remove migration record
|
||||
return drv.DeleteMigration(tx, version)
|
||||
}
|
||||
|
||||
if down.Options.Transaction() {
|
||||
// begin transaction
|
||||
err = doTransaction(sqlDB, execMigration)
|
||||
} else {
|
||||
// run outside of transaction
|
||||
err = execMigration(sqlDB)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// automatically update schema file, silence errors
|
||||
|
|
@ -583,7 +619,7 @@ func (db *DB) CheckMigrationsStatus(drv Driver) ([]StatusResult, error) {
|
|||
}
|
||||
defer dbutil.MustClose(sqlDB)
|
||||
|
||||
applied, err := drv.SelectMigrations(sqlDB, -1)
|
||||
applied, err := drv.SelectMigrations(sqlDB, db.Limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
package dbmate_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
|
@ -48,6 +47,8 @@ func TestNew(t *testing.T) {
|
|||
require.False(t, db.WaitBefore)
|
||||
require.Equal(t, time.Second, db.WaitInterval)
|
||||
require.Equal(t, 60*time.Second, db.WaitTimeout)
|
||||
require.Equal(t, -1, db.Limit)
|
||||
require.Equal(t, "", db.TargetVersion)
|
||||
}
|
||||
|
||||
func TestGetDriver(t *testing.T) {
|
||||
|
|
@ -102,7 +103,7 @@ func TestDumpSchema(t *testing.T) {
|
|||
db := newTestDB(t, u)
|
||||
|
||||
// create custom schema file directory
|
||||
dir, err := ioutil.TempDir("", "dbmate")
|
||||
dir, err := os.MkdirTemp("", "dbmate")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
err := os.RemoveAll(dir)
|
||||
|
|
@ -129,7 +130,7 @@ func TestDumpSchema(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
// verify schema
|
||||
schema, err := ioutil.ReadFile(db.SchemaFile)
|
||||
schema, err := os.ReadFile(db.SchemaFile)
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, string(schema), "-- PostgreSQL database dump")
|
||||
}
|
||||
|
|
@ -140,7 +141,7 @@ func TestAutoDumpSchema(t *testing.T) {
|
|||
db.AutoDumpSchema = true
|
||||
|
||||
// create custom schema file directory
|
||||
dir, err := ioutil.TempDir("", "dbmate")
|
||||
dir, err := os.MkdirTemp("", "dbmate")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
err := os.RemoveAll(dir)
|
||||
|
|
@ -163,7 +164,7 @@ func TestAutoDumpSchema(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
// verify schema
|
||||
schema, err := ioutil.ReadFile(db.SchemaFile)
|
||||
schema, err := os.ReadFile(db.SchemaFile)
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, string(schema), "-- PostgreSQL database dump")
|
||||
|
||||
|
|
@ -176,7 +177,7 @@ func TestAutoDumpSchema(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
// schema should be recreated
|
||||
schema, err = ioutil.ReadFile(db.SchemaFile)
|
||||
schema, err = os.ReadFile(db.SchemaFile)
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, string(schema), "-- PostgreSQL database dump")
|
||||
}
|
||||
|
|
@ -243,9 +244,11 @@ func TestWaitBeforeVerbose(t *testing.T) {
|
|||
`Applying: 20151129054053_test_migration.sql
|
||||
Rows affected: 1
|
||||
Applying: 20200227231541_test_posts.sql
|
||||
Rows affected: 0
|
||||
Applying: 20220607110405_test_category.sql
|
||||
Rows affected: 0`)
|
||||
require.Contains(t, output,
|
||||
`Rolling back: 20200227231541_test_posts.sql
|
||||
`Rolling back: 20220607110405_test_category.sql
|
||||
Rows affected: 0`)
|
||||
}
|
||||
|
||||
|
|
@ -292,6 +295,37 @@ func TestMigrate(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestMigrateToTarget(t *testing.T) {
|
||||
for _, u := range testURLs() {
|
||||
t.Run(u.Scheme, func(t *testing.T) {
|
||||
db := newTestDB(t, u)
|
||||
db.TargetVersion = "20151129054053"
|
||||
drv, err := db.GetDriver()
|
||||
require.NoError(t, err)
|
||||
|
||||
// drop and recreate database
|
||||
err = db.Drop()
|
||||
require.NoError(t, err)
|
||||
err = db.Create()
|
||||
require.NoError(t, err)
|
||||
|
||||
// migrate
|
||||
err = db.Migrate()
|
||||
require.NoError(t, err)
|
||||
|
||||
// verify results
|
||||
sqlDB, err := drv.Open()
|
||||
require.NoError(t, err)
|
||||
defer dbutil.MustClose(sqlDB)
|
||||
|
||||
count := 0
|
||||
err = sqlDB.QueryRow(`select count(*) from schema_migrations`).Scan(&count)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, count)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUp(t *testing.T) {
|
||||
for _, u := range testURLs() {
|
||||
t.Run(u.Scheme, func(t *testing.T) {
|
||||
|
|
@ -351,13 +385,59 @@ func TestRollback(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
require.Equal(t, 1, count)
|
||||
|
||||
err = sqlDB.QueryRow("select count(*) from posts").Scan(&count)
|
||||
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
|
||||
require.Nil(t, err)
|
||||
|
||||
// rollback
|
||||
err = db.Rollback()
|
||||
require.NoError(t, err)
|
||||
|
||||
// verify rollback
|
||||
err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, count)
|
||||
|
||||
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
|
||||
require.NotNil(t, err)
|
||||
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRollbackToTarget(t *testing.T) {
|
||||
for _, u := range testURLs() {
|
||||
t.Run(u.Scheme, func(t *testing.T) {
|
||||
db := newTestDB(t, u)
|
||||
drv, err := db.GetDriver()
|
||||
require.NoError(t, err)
|
||||
|
||||
// drop, recreate, and migrate database
|
||||
err = db.Drop()
|
||||
require.NoError(t, err)
|
||||
err = db.Create()
|
||||
require.NoError(t, err)
|
||||
err = db.Migrate()
|
||||
require.NoError(t, err)
|
||||
|
||||
// verify migration
|
||||
sqlDB, err := drv.Open()
|
||||
require.NoError(t, err)
|
||||
defer dbutil.MustClose(sqlDB)
|
||||
|
||||
count := 0
|
||||
err = sqlDB.QueryRow(`select count(*) from schema_migrations
|
||||
where version = '20151129054053'`).Scan(&count)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, count)
|
||||
|
||||
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
|
||||
require.Nil(t, err)
|
||||
|
||||
// rollback
|
||||
db.TargetVersion = "20151129054053"
|
||||
err = db.Rollback()
|
||||
require.NoError(t, err)
|
||||
|
||||
// verify rollback
|
||||
err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count)
|
||||
require.NoError(t, err)
|
||||
|
|
@ -366,6 +446,60 @@ func TestRollback(t *testing.T) {
|
|||
err = sqlDB.QueryRow("select count(*) from posts").Scan(&count)
|
||||
require.NotNil(t, err)
|
||||
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
|
||||
|
||||
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
|
||||
require.NotNil(t, err)
|
||||
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRollbackToLimit(t *testing.T) {
|
||||
for _, u := range testURLs() {
|
||||
t.Run(u.Scheme, func(t *testing.T) {
|
||||
db := newTestDB(t, u)
|
||||
drv, err := db.GetDriver()
|
||||
require.NoError(t, err)
|
||||
|
||||
// drop, recreate, and migrate database
|
||||
err = db.Drop()
|
||||
require.NoError(t, err)
|
||||
err = db.Create()
|
||||
require.NoError(t, err)
|
||||
err = db.Migrate()
|
||||
require.NoError(t, err)
|
||||
|
||||
// verify migration
|
||||
sqlDB, err := drv.Open()
|
||||
require.NoError(t, err)
|
||||
defer dbutil.MustClose(sqlDB)
|
||||
|
||||
count := 0
|
||||
err = sqlDB.QueryRow(`select count(*) from schema_migrations
|
||||
where version = '20151129054053'`).Scan(&count)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, count)
|
||||
|
||||
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
|
||||
require.Nil(t, err)
|
||||
|
||||
// rollback
|
||||
db.Limit = 2
|
||||
err = db.Rollback()
|
||||
require.NoError(t, err)
|
||||
|
||||
// verify rollback
|
||||
err = sqlDB.QueryRow("select count(*) from schema_migrations").Scan(&count)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, count)
|
||||
|
||||
err = sqlDB.QueryRow("select count(*) from posts").Scan(&count)
|
||||
require.NotNil(t, err)
|
||||
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
|
||||
|
||||
err = sqlDB.QueryRow("select count(*) from categories").Scan(&count)
|
||||
require.NotNil(t, err)
|
||||
require.Regexp(t, "(does not exist|doesn't exist|no such table)", err.Error())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -391,7 +525,7 @@ func TestStatus(t *testing.T) {
|
|||
// two pending
|
||||
results, err := db.CheckMigrationsStatus(drv)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 2)
|
||||
require.Len(t, results, 3)
|
||||
require.False(t, results[0].Applied)
|
||||
require.False(t, results[1].Applied)
|
||||
|
||||
|
|
@ -399,12 +533,13 @@ func TestStatus(t *testing.T) {
|
|||
err = db.Migrate()
|
||||
require.NoError(t, err)
|
||||
|
||||
// two applied
|
||||
// three applied
|
||||
results, err = db.CheckMigrationsStatus(drv)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 2)
|
||||
require.Len(t, results, 3)
|
||||
require.True(t, results[0].Applied)
|
||||
require.True(t, results[1].Applied)
|
||||
require.True(t, results[2].Applied)
|
||||
|
||||
// rollback last migration
|
||||
err = db.Rollback()
|
||||
|
|
@ -413,9 +548,10 @@ func TestStatus(t *testing.T) {
|
|||
// one applied, one pending
|
||||
results, err = db.CheckMigrationsStatus(drv)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 2)
|
||||
require.Len(t, results, 3)
|
||||
require.True(t, results[0].Applied)
|
||||
require.False(t, results[1].Applied)
|
||||
require.True(t, results[1].Applied)
|
||||
require.False(t, results[2].Applied)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ package dbmate
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
|
@ -33,7 +33,7 @@ func NewMigration() Migration {
|
|||
|
||||
// parseMigration reads a migration file and returns (up Migration, down Migration, error)
|
||||
func parseMigration(path string) (Migration, Migration, error) {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return NewMigration(), NewMigration(), err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
package dbmate
|
||||
|
||||
// Version of dbmate
|
||||
const Version = "1.12.0"
|
||||
const Version = "1.12.1"
|
||||
|
|
|
|||
|
|
@ -179,10 +179,8 @@ func TestMySQLDumpSchema(t *testing.T) {
|
|||
drv.databaseURL.Path = "/fakedb"
|
||||
schema, err = drv.DumpSchema(db)
|
||||
require.Nil(t, schema)
|
||||
require.EqualError(t, err, "mysqldump: [Warning] Using a password "+
|
||||
"on the command line interface can be insecure.\n"+
|
||||
"mysqldump: Got error: 1049: "+
|
||||
"Unknown database 'fakedb' when selecting the database")
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "Unknown database 'fakedb'")
|
||||
}
|
||||
|
||||
func TestMySQLDatabaseExists(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/amacneil/dbmate/pkg/dbmate"
|
||||
|
|
@ -48,7 +49,14 @@ func connectionString(u *url.URL) string {
|
|||
|
||||
// default hostname
|
||||
if hostname == "" {
|
||||
hostname = "localhost"
|
||||
switch runtime.GOOS {
|
||||
case "linux":
|
||||
query.Set("host", "/var/run/postgresql")
|
||||
case "darwin", "freebsd", "dragonfly", "openbsd", "netbsd":
|
||||
query.Set("host", "/tmp")
|
||||
default:
|
||||
hostname = "localhost"
|
||||
}
|
||||
}
|
||||
|
||||
// host param overrides url hostname
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import (
|
|||
"database/sql"
|
||||
"net/url"
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"github.com/amacneil/dbmate/pkg/dbmate"
|
||||
|
|
@ -50,13 +51,24 @@ func TestGetDriver(t *testing.T) {
|
|||
require.Equal(t, "schema_migrations", drv.migrationsTableName)
|
||||
}
|
||||
|
||||
func defaultConnString() string {
|
||||
switch runtime.GOOS {
|
||||
case "linux":
|
||||
return "postgres://:5432/foo?host=%2Fvar%2Frun%2Fpostgresql"
|
||||
case "darwin", "freebsd", "dragonfly", "openbsd", "netbsd":
|
||||
return "postgres://:5432/foo?host=%2Ftmp"
|
||||
default:
|
||||
return "postgres://localhost:5432/foo"
|
||||
}
|
||||
}
|
||||
|
||||
func TestConnectionString(t *testing.T) {
|
||||
cases := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
// defaults
|
||||
{"postgres:///foo", "postgres://localhost:5432/foo"},
|
||||
{"postgres:///foo", defaultConnString()},
|
||||
// support custom url params
|
||||
{"postgres://bob:secret@myhost:1234/foo?bar=baz", "postgres://bob:secret@myhost:1234/foo?bar=baz"},
|
||||
// support `host` and `port` via url params
|
||||
|
|
@ -85,11 +97,11 @@ func TestConnectionArgsForDump(t *testing.T) {
|
|||
expected []string
|
||||
}{
|
||||
// defaults
|
||||
{"postgres:///foo", []string{"postgres://localhost:5432/foo"}},
|
||||
{"postgres:///foo", []string{defaultConnString()}},
|
||||
// support single schema
|
||||
{"postgres:///foo?search_path=foo", []string{"--schema", "foo", "postgres://localhost:5432/foo"}},
|
||||
{"postgres:///foo?search_path=foo", []string{"--schema", "foo", defaultConnString()}},
|
||||
// support multiple schemas
|
||||
{"postgres:///foo?search_path=foo,public", []string{"--schema", "foo", "--schema", "public", "postgres://localhost:5432/foo"}},
|
||||
{"postgres:///foo?search_path=foo,public", []string{"--schema", "foo", "--schema", "public", defaultConnString()}},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
|
|
@ -174,8 +186,8 @@ func TestPostgresDumpSchema(t *testing.T) {
|
|||
drv.databaseURL.Path = "/fakedb"
|
||||
schema, err = drv.DumpSchema(db)
|
||||
require.Nil(t, schema)
|
||||
require.EqualError(t, err, "pg_dump: [archiver (db)] connection to database "+
|
||||
"\"fakedb\" failed: FATAL: database \"fakedb\" does not exist")
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "database \"fakedb\" does not exist")
|
||||
})
|
||||
|
||||
t.Run("custom migrations table with schema", func(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
//go:build cgo
|
||||
// +build cgo
|
||||
|
||||
package sqlite
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
//go:build cgo
|
||||
// +build cgo
|
||||
|
||||
package sqlite
|
||||
|
|
|
|||
9
testdata/db/migrations/20220607110405_test_category.sql
vendored
Normal file
9
testdata/db/migrations/20220607110405_test_category.sql
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
-- migrate:up
|
||||
create table categories (
|
||||
id integer,
|
||||
title varchar(50),
|
||||
slug varchar(100)
|
||||
);
|
||||
|
||||
-- migrate:down
|
||||
drop table categories;
|
||||
Loading…
Add table
Add a link
Reference in a new issue