mirror of
https://github.com/TECHNOFAB11/bumpver.git
synced 2025-12-11 22:10:09 +01:00
chore: yoink everything not needed for nix package
This commit is contained in:
parent
0650d80b0c
commit
0955cb2e22
36 changed files with 101 additions and 3351 deletions
75
.github/workflows/ci.yml
vendored
75
.github/workflows/ci.yml
vendored
|
|
@ -1,75 +0,0 @@
|
|||
name: CI
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
|
||||
build-ubuntu:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Cache Conda Envs
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/miniconda3
|
||||
build/*.txt
|
||||
key: ${{ runner.OS }}-conda-cache-${{ hashFiles('requirements/*.txt', 'setup.py', 'Makefile*') }}
|
||||
restore-keys: |
|
||||
${{ runner.OS }}-conda-cache-${{ hashFiles('requirements/*.txt', 'setup.py', 'Makefile*') }}
|
||||
|
||||
- name: make conda
|
||||
run:
|
||||
if [[ -e build/envs.txt ]]; then touch build/envs.txt; fi;
|
||||
if [[ -e build/deps.txt ]]; then touch build/deps.txt; fi;
|
||||
make conda
|
||||
|
||||
- name: make lint
|
||||
run: make lint
|
||||
|
||||
- name: make mypy
|
||||
run: make mypy
|
||||
|
||||
- name: make test
|
||||
run: make test
|
||||
|
||||
- name: make test_compat
|
||||
run: make test_compat
|
||||
|
||||
build-macos:
|
||||
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Cache Conda Envs
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/miniconda3
|
||||
build/*.txt
|
||||
key: ${{ runner.OS }}-conda-cache-${{ hashFiles('requirements/*.txt', 'setup.py', 'Makefile*') }}
|
||||
restore-keys: |
|
||||
${{ runner.OS }}-conda-cache-${{ hashFiles('requirements/*.txt', 'setup.py', 'Makefile*') }}
|
||||
|
||||
- name: brew install mercurial
|
||||
run: brew install mercurial
|
||||
|
||||
- name: make conda
|
||||
run:
|
||||
if [[ -e build/envs.txt ]]; then touch build/envs.txt; fi;
|
||||
if [[ -e build/deps.txt ]]; then touch build/deps.txt; fi;
|
||||
make conda
|
||||
|
||||
- name: make lint
|
||||
run: make lint
|
||||
|
||||
- name: make mypy
|
||||
run: make mypy
|
||||
|
||||
- name: make test
|
||||
run: make test
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,3 +1,6 @@
|
|||
# Nix build result
|
||||
result/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
|
|
|||
|
|
@ -1,65 +0,0 @@
|
|||
stages:
|
||||
- test
|
||||
- build
|
||||
|
||||
|
||||
lint:
|
||||
stage: test
|
||||
image: registry.gitlab.com/mbarkhau/pycalver/base
|
||||
script:
|
||||
- make lint
|
||||
- make mypy
|
||||
artifacts:
|
||||
reports:
|
||||
junit:
|
||||
- reports/flake8.xml
|
||||
paths:
|
||||
- reports/mypycov/
|
||||
allow_failure: false
|
||||
|
||||
|
||||
test:
|
||||
# NOTE: Resource_group is conservative and can be disabled
|
||||
# for simple tests. It should be enabled if the tests
|
||||
# need exclusive access to some common resource. The
|
||||
# resource_group will prevent multiple pipelines from
|
||||
# running concurrently.
|
||||
# resource_group: test-unit
|
||||
stage: test
|
||||
image: registry.gitlab.com/mbarkhau/pycalver/base
|
||||
script:
|
||||
- make test
|
||||
coverage: '/^(TOTAL|src).*?(\d+\%)$/'
|
||||
artifacts:
|
||||
reports:
|
||||
junit:
|
||||
- reports/pytest.xml
|
||||
paths:
|
||||
- reports/testcov/
|
||||
allow_failure: false
|
||||
|
||||
test_compat:
|
||||
# NOTE: Resource_group is conservative and can be disabled
|
||||
# for simple tests. It should be enabled if the tests
|
||||
# need exclusive access to some common resource. The
|
||||
# resource_group will prevent multiple pipelines from
|
||||
# running concurrently.
|
||||
# resource_group: test-unit
|
||||
stage: test
|
||||
image: registry.gitlab.com/mbarkhau/pycalver/base
|
||||
script:
|
||||
- make test_compat
|
||||
allow_failure: false
|
||||
|
||||
pages:
|
||||
stage: build
|
||||
script:
|
||||
- mkdir -p public/cov
|
||||
- mkdir -p public/mypycov
|
||||
- cp -r reports/testcov/* public/cov/
|
||||
- cp -r reports/mypycov/* public/mypycov/
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
- master
|
||||
290
CHANGELOG.md
290
CHANGELOG.md
|
|
@ -1,290 +0,0 @@
|
|||
# Changelog for https://github.com/mbarkhau/bumpver
|
||||
|
||||
## BumpVer 2023.1124
|
||||
|
||||
- Fix [#208][gh_i208]: Fix handling of versions with PEP440 epoch.
|
||||
|
||||
[gh_i208]: https://github.com/mbarkhau/bumpver/issues/208
|
||||
|
||||
Thank you [Wen Kokke](https://github.com/wenkokke) for the issue report.
|
||||
|
||||
|
||||
## BumpVer 2023.1122
|
||||
|
||||
- Fix [#207][gh_i207]: Add --ignore-vcs-tag to support bumping older versions
|
||||
|
||||
[gh_i207]: https://github.com/mbarkhau/bumpver/issues/207
|
||||
|
||||
Thank you [Jusong Yu](https://github.com/unkcpz) for your contribution.
|
||||
|
||||
|
||||
## BumpVer 2023.1121
|
||||
|
||||
- Fix [#200][gh_i200]: Fix compatability with packaging 23.0.
|
||||
- Fix [#203][gh_i203]: Add dev to the list of valid release tags
|
||||
|
||||
[gh_i200]: https://github.com/mbarkhau/bumpver/issues/200
|
||||
[gh_i203]: https://github.com/mbarkhau/bumpver/issues/203
|
||||
|
||||
Thank you [Sharon Yogev](https://github.com/sharonyogev) for your contribution.
|
||||
|
||||
|
||||
## BumpVer 2022.1120
|
||||
|
||||
- Fix [#196][gh_i196]: Add `--pin-increments`.
|
||||
|
||||
[gh_i196]: https://github.com/mbarkhau/bumpver/issues/196
|
||||
|
||||
Thank you [Markus Holtermann](https://github.com/MarkusH) for
|
||||
this contribution.
|
||||
|
||||
|
||||
## BumpVer 2022.1119
|
||||
|
||||
- Fix [#190][gh_i190]: Allow multiple patterns on the same line
|
||||
|
||||
- Fix [#182][gh_i182]: Use quotes for vcs commands
|
||||
|
||||
[gh_i190]: https://github.com/mbarkhau/bumpver/issues/190
|
||||
[gh_i182]: https://github.com/mbarkhau/bumpver/issues/182
|
||||
|
||||
|
||||
## BumpVer 2022.1118
|
||||
|
||||
- Fix [#181][gh_i181]: Enable use of ``^$`` charachters to restrict
|
||||
matching to beginning and end of line.
|
||||
- Add ``GITHASH`` to ``version_pattern`` (@mpasternak)
|
||||
|
||||
[gh_i181]: https://github.com/mbarkhau/bumpver/issues/181
|
||||
|
||||
|
||||
## BumpVer 2022.1116
|
||||
|
||||
- Fix: [incorrect version comparison when updating from vcs tag][gh_i174].
|
||||
|
||||
When comparing the updated version to the latest vcs tag,
|
||||
an insufficient string comparison was used instead of
|
||||
comparing the parsed versions.
|
||||
|
||||
[gh_i174]: https://github.com/mbarkhau/bumpver/issues/174
|
||||
|
||||
Thank you to Timo Ludwig @timoludwig for this contribution.
|
||||
|
||||
|
||||
## BumpVer 2022.1115
|
||||
|
||||
- Fix: [use default date values][gh_i172].
|
||||
|
||||
When parsing the current version, if it doesn't specify anyt
|
||||
date part, (such as is the case for e.g. SemVer), then use the
|
||||
current date to populate default parts.
|
||||
|
||||
This enables updating YYYY patterns in copyright headers even
|
||||
for projects that don't use a CalVer pattern.
|
||||
|
||||
Thank you [Benjamin Depardon (@bdepardo)][gh_bdepardo] for
|
||||
finding and reporting this issue.
|
||||
|
||||
[gh_i172]: https://github.com/mbarkhau/bumpver/issues/172
|
||||
[gh_bdepardo]: https://github.com/bdepardo
|
||||
|
||||
|
||||
## BumpVer 2021.1114
|
||||
|
||||
- Add: [flags to override vcs options][gh_i168] for `bumpver update`
|
||||
|
||||
[gh_i168]: https://github.com/mbarkhau/bumpver/issues/168
|
||||
|
||||
Thank you to Timo Ludwig @timoludwig for this contribution.
|
||||
|
||||
|
||||
## BumpVer 2021.1113
|
||||
|
||||
- Add: [`--commit-message` argument][gh_i162] for `bumpver update`
|
||||
|
||||
[gh_i162]: https://github.com/mbarkhau/bumpver/issues/162
|
||||
|
||||
|
||||
## BumpVer 2021.1112
|
||||
|
||||
- Fix: Build from source on windows.
|
||||
|
||||
|
||||
## BumpVer 2021.1110
|
||||
|
||||
- Fix [github#157][gh_i157]: Improve error messages.
|
||||
- Fix [github#158][gh_i158]: Clarify `PYTAGNUM` "part"
|
||||
|
||||
[gh_i157]: https://github.com/mbarkhau/bumpver/issues/157
|
||||
[gh_i158]: https://github.com/mbarkhau/bumpver/issues/158
|
||||
|
||||
Thank you to Julien Palard @JulienPalard for testing and feedback.
|
||||
|
||||
|
||||
## BumpVer 2021.1109
|
||||
|
||||
- Add `-e/--env` option to support shell script automation.
|
||||
- Fix [github#151][gh_i151]: invalid increment of `TAGNUM` when `TAG=final` is set.
|
||||
|
||||
[gh_i151]: https://github.com/mbarkhau/bumpver/issues/151
|
||||
|
||||
Thank you to Dave Wapstra @dwapstra for your contributions.
|
||||
|
||||
|
||||
## BumpVer 2020.1108
|
||||
|
||||
- Don't match empty patterns (possibly causing a whole file to be rewritten if braces `[]` are not escaped).
|
||||
|
||||
|
||||
## BumpVer 2020.1107
|
||||
|
||||
- Non-Beta release (no significant code changes).
|
||||
|
||||
## BumpVer 2020.1105-beta
|
||||
|
||||
- Fix [gitlab#15][gitlab_i15]: Fix config parsing corner case.
|
||||
- Fix [gitlab#16][gitlab_i16]: Fix rollover handling for tag/pytag.
|
||||
|
||||
[gitlab_i15]: https://gitlab.com/mbarkhau/pycalver/-/issues/15
|
||||
[gitlab_i16]: https://gitlab.com/mbarkhau/pycalver/-/issues/16
|
||||
|
||||
|
||||
## BumpVer 2020.1104-beta
|
||||
|
||||
- Fix [gitlab#13][gitlab_i13]: Add `--set-version=<VERSION>` to explicitly set version.
|
||||
- Fix [gitlab#14][gitlab_i14]: Parse `tool.bumpver` when using pyproject.toml as per PEP 518.
|
||||
|
||||
[gitlab_i13]: https://gitlab.com/mbarkhau/pycalver/-/issues/13
|
||||
[gitlab_i14]: https://gitlab.com/mbarkhau/pycalver/-/issues/14
|
||||
|
||||
|
||||
## BumpVer 2020.1100-beta
|
||||
|
||||
Rename package and module from PyCalVer to BumpVer. This name change is due to confusion that this project is either Python specific, or only suitible for CalVer versioning schemes, neither of which is the case.
|
||||
|
||||
This release includes a new syntax for patterns.
|
||||
|
||||
```
|
||||
version_pattern = "vYYYY0M.BUILD[-RELEASE]" # new style
|
||||
version_pattern = "v{year}{month}{build}{release}" # old style
|
||||
|
||||
version_pattern = "MAJOR.MINOR.PATCH" # new style semver
|
||||
version_pattern = "{MAJOR}.{MINOR}.{PATCH}" # old style semver
|
||||
```
|
||||
|
||||
The main reasons for this switch were:
|
||||
- To enable optional parts using braces `[PART]`.
|
||||
- To align the syntax with the conventions used on CalVer.org
|
||||
|
||||
The previous syntax will continue to be supported, but all documentation has been updated to primarily reference new style patterns.
|
||||
|
||||
- Switch main repo from gitlab to github.
|
||||
- New [gitlab#7][gitlab_i7]: New style pattern syntax.
|
||||
- Better support for week numbers.
|
||||
- Better support for optional parts.
|
||||
- New: `BUILD` part now starts at `1000` instead of `0001` to avoid truncation of leading zeros.
|
||||
- New: Add `INC0` (0-based) and `INC1` (1-based) parts that do auto increment and rollover.
|
||||
- New: `MAJOR`/`MINOR`/`PATCH`/`INC` will roll over when a date part changes to their left.
|
||||
- New [gitlab#2][gitlab_i2]: Added `grep` sub-command to help with debugging of patterns.
|
||||
- New [gitlab#10][gitlab_i10]: `--pin-date` to keep date parts unchanged, and only increment non-date parts.
|
||||
- New: Added `--date=<iso-date>` parameter to set explicit date (instead of current date).
|
||||
- New: Added `--release-num` to increment the `alphaN`/`betaN`/`a0`/`b0`/etc. release number
|
||||
- New: Added better error messages to debug regular expressions.
|
||||
- New [gitlab#9][gitlab_i9]: Make commit message configurable.
|
||||
- Fix [gitlab#12][gitlab_i12]: Error with sorting non-lexical version tags (e.g. SemVer).
|
||||
- Fix [gitlab#11][gitlab_i11]: Show regexp when `--verbose` is used.
|
||||
- Fix [gitlab#8][gitlab_i8]: `bumpver update` will now also push HEAD (previously only the tag itself was pushed).
|
||||
- Fix: Disallow `--release=dev`. The semantics of a `dev` releases are different than for other release tags and further development would be required to support them correctly.
|
||||
- Fix: Entries in `file_patterns` were ignored if there were multiple entries for the same file.
|
||||
|
||||
This release no longer includes the `pycalver.lexid` module, which has been moved into its own package: [pypi.org/project/lexid/](https://pypi.org/project/lexid/).
|
||||
|
||||
Many thanks to contributors of this release: @LucidOne, @khanguslee, @chaudum
|
||||
|
||||
[gitlab_i7]:https://gitlab.com/mbarkhau/pycalver/-/issues/7
|
||||
[gitlab_i2]: https://gitlab.com/mbarkhau/pycalver/-/issues/2
|
||||
[gitlab_i10]: https://gitlab.com/mbarkhau/pycalver/-/issues/10
|
||||
[gitlab_i9]: https://gitlab.com/mbarkhau/pycalver/-/issues/9
|
||||
[gitlab_i12]: https://gitlab.com/mbarkhau/pycalver/-/issues/12
|
||||
[gitlab_i11]: https://gitlab.com/mbarkhau/pycalver/-/issues/11
|
||||
[gitlab_i8]: https://gitlab.com/mbarkhau/pycalver/-/issues/8
|
||||
|
||||
|
||||
## PyCalVer v202010.1042
|
||||
|
||||
- Add deprication warning to README.md
|
||||
|
||||
|
||||
## PyCalVer v201907.0036
|
||||
|
||||
- Fix: Don't use git/hg command if `commit=False` is configured (thanks @valentin87)
|
||||
|
||||
|
||||
## PyCalVer v201907.0035
|
||||
|
||||
- Fix [gitlab#6][gitlab_i6]: Add parts `{month_short}`, `{dom_short}`, `{doy_short}`.
|
||||
- Fix [gitlab#5][gitlab_i5]: Better warning when using bump with SemVer (one of --major/--minor/--patch is required)
|
||||
- Fix [gitlab#4][gitlab_i4]: Make {release} part optional, so that versions generated by --release=final are parsed.
|
||||
|
||||
[gitlab_i6]: https://gitlab.com/mbarkhau/pycalver/-/issues/6
|
||||
[gitlab_i5]: https://gitlab.com/mbarkhau/pycalver/-/issues/5
|
||||
[gitlab_i4]: https://gitlab.com/mbarkhau/pycalver/-/issues/4
|
||||
|
||||
|
||||
## PyCalVer v201903.0030
|
||||
|
||||
- Fix: Use pattern from config instead of hard-coded {pycalver} pattern.
|
||||
- Fix: Better error messages for git/hg issues.
|
||||
- Add: Implicit default pattern for config file.
|
||||
|
||||
|
||||
## PyCalVer v201903.0028
|
||||
|
||||
- Fix: Add warnings when configured files are not under version control.
|
||||
- Add: Colored output for bump --dry
|
||||
|
||||
|
||||
## PyCalVer v201902.0027
|
||||
|
||||
- Fix: Allow --release=post
|
||||
- Fix: Better error reporting for bad patterns
|
||||
- Fix: Regex escaping issue with "?"
|
||||
|
||||
|
||||
## PyCalVer v201902.0024
|
||||
|
||||
- Added: Support for globs in file patterns.
|
||||
- Fixed: Better error reporting for invalid config.
|
||||
|
||||
|
||||
## PyCalVer v201902.0020
|
||||
|
||||
- Added: Support for many more custom version patterns.
|
||||
|
||||
|
||||
## PyCalVer v201812.0018
|
||||
|
||||
- Fixed: Better handling of pattern replacements with "-final" releases.
|
||||
|
||||
|
||||
## PyCalVer v201812.0017
|
||||
|
||||
- Fixed [github#2]. `pycalver init` was broken.
|
||||
- Fixed pattern escaping issues.
|
||||
- Added lots more tests for cli.
|
||||
- Cleaned up documentation.
|
||||
|
||||
[gihlab_i2]: https://github.com/mbarkhau/pycalver/-/issues/2
|
||||
|
||||
|
||||
## PyCalVer v201812.0011-beta
|
||||
|
||||
- Add version tags using git/hg.
|
||||
- Use git/hg tags as SSOT for most recent version.
|
||||
- Start using https://gitlab.com/mbarkhau/bootstrapit
|
||||
- Move to https://gitlab.com/mbarkhau/pycalver
|
||||
|
||||
|
||||
## PyCalVer v201809.0001-alpha
|
||||
|
||||
- Initial release
|
||||
485
CONTRIBUTING.md
485
CONTRIBUTING.md
|
|
@ -1,485 +0,0 @@
|
|||
# Contributing
|
||||
|
||||
<!--
|
||||
$ pip install md-toc
|
||||
$ md_toc -i gitlab CONTRIBUTING.md.template
|
||||
-->
|
||||
|
||||
[](TOC)
|
||||
|
||||
- [Contributing](#contributing)
|
||||
- [Introduction](#introduction)
|
||||
- [Setup](#setup)
|
||||
- [Setup SSH keys](#setup-ssh-keys)
|
||||
- [Setup Virtual Environments](#setup-virtual-environments)
|
||||
- [Project Types](#project-types)
|
||||
- [Project Layout](#project-layout)
|
||||
- [Dependency Management](#dependency-management)
|
||||
- [These are not used on production, or staging, only](#these-are-not-used-on-production-or-staging-only)
|
||||
- [on development machines and the CI environment.](#on-development-machines-and-the-ci-environment)
|
||||
- [These are the requirements produced for specific builds. They can be](#these-are-the-requirements-produced-for-specific-builds-they-can-be)
|
||||
- [used to debug version compatibility issues . They are generated](#used-to-debug-version-compatibility-issues-they-are-generated)
|
||||
- [using pip freeze](#using-pip-freeze)
|
||||
- [Vendoring](#vendoring)
|
||||
- [Development](#development)
|
||||
- [Linting](#linting)
|
||||
- [Type Checking](#type-checking)
|
||||
- [Documentation](#documentation)
|
||||
- [Setup to run docker](#setup-to-run-docker)
|
||||
- [PyCharm](#pycharm)
|
||||
- [Sublime Text](#sublime-text)
|
||||
- [Best Practices](#best-practices)
|
||||
|
||||
[](TOC)
|
||||
|
||||
|
||||
## Introduction
|
||||
|
||||
Friction for new contributors should be as low as possible. Ideally a
|
||||
new contributor, starting any unix[^1] system can go through these
|
||||
steps and not encounter any errors:
|
||||
|
||||
1. `git clone <project_url>`
|
||||
2. `cd <project>`
|
||||
3. `make conda`
|
||||
4. `# get some coffee`
|
||||
5. `make fmt lint mypy test`
|
||||
|
||||
If you as a new contributor encounter any errors, then please create
|
||||
an issue report and you will already have made a great contribution!
|
||||
|
||||
|
||||
## Setup
|
||||
|
||||
The development workflow described here is documented based on a Unix
|
||||
environment. Hopefully this will reduce discrepancies between
|
||||
development and production systems.
|
||||
|
||||
|
||||
### Setup SSH keys
|
||||
|
||||
Projects which depend on private repositories require ssh to
|
||||
connect to remote servers. If this is the case, you should make
|
||||
sure that your ssh keys are available in `${HOME}/.ssh`, or you
|
||||
will have to do `ssh-keygen` and install the generated public
|
||||
key to host system. If this is not done, `pip install` will fail
|
||||
to install these dependencies from your private repositories with
|
||||
an error like this
|
||||
|
||||
```shell
|
||||
Downloading/unpacking git+git://...git
|
||||
Cloning Git repository git://
|
||||
|
||||
Permission denied (publickey).
|
||||
|
||||
fatal: The remote end hung up unexpectedly
|
||||
----------------------------------------
|
||||
Command /usr/local/bin/git clone ... failed with error code 128
|
||||
```
|
||||
|
||||
|
||||
### Setup Virtual Environments
|
||||
|
||||
The first setup can take a while, since it will install miniconda and
|
||||
download lots of dependencies for the first time. If you would like to
|
||||
know more about conda, there is a good article written by Gergely
|
||||
Szerovay: https://medium.freecodecamp.org/85f155f4353c
|
||||
|
||||
```shell
|
||||
dev@host:~
|
||||
$ git clone git@../group/project.git
|
||||
Cloning Git repository git@../group/project.git to project
|
||||
...
|
||||
|
||||
$ cd project
|
||||
|
||||
dev@host:~/project
|
||||
$ make conda
|
||||
Solving environment:
|
||||
...
|
||||
```
|
||||
|
||||
This will do quite a few things.
|
||||
|
||||
1. Install miniconda3, if it isn't already installed. It checks
|
||||
the path `$HOME/miniconda3` for an existing installation
|
||||
2. Creates python virtual environments for all supported python
|
||||
versions of this project.
|
||||
3. Installs application and development dependencies to the
|
||||
environments.
|
||||
4. Installs vendored dependencies into `vendor/`
|
||||
|
||||
If installation was successful, you should be able to at least
|
||||
run the linter (assuming previous developers have a bare minimum
|
||||
of diligence).
|
||||
|
||||
```console
|
||||
$ make lint
|
||||
flake8 .. ok
|
||||
mypy .... ok
|
||||
doc ..... ok
|
||||
```
|
||||
|
||||
If this is the first time conda has been installed on your
|
||||
system, you'll probably want to enable the `conda` command:
|
||||
|
||||
```
|
||||
$ echo ". ${HOME}/miniconda3/etc/profile.d/conda.sh" >> ~/.bashrc
|
||||
$ conda --version
|
||||
conda 4.5.11
|
||||
```
|
||||
|
||||
You can also activate the default virtual environment as follows.
|
||||
|
||||
|
||||
```shell
|
||||
(myproject_py36) dev@host:~/myproject
|
||||
$ source ./activate
|
||||
$ which python
|
||||
/home/dev/miniconda3/envs/myproject_py36/bin/python
|
||||
|
||||
$ ipython
|
||||
Python 3.6.6 |Anaconda, Inc.| (default, Jun 28 2018, 17:14:51)
|
||||
t Type 'copyright', 'credits' or 'license' for more information
|
||||
IPython 6.5.0 -- An enhanced Interactive Python. Type '?' for help.
|
||||
|
||||
In [1]: import sys
|
||||
|
||||
In [2]: sys.path
|
||||
Out[2]:
|
||||
['/home/dev/miniconda3/envs/pycalver_py36/bin',
|
||||
'/home/dev/myproject/src',
|
||||
'/home/dev/myproject/vendor',
|
||||
...
|
||||
In [3]: import myproject
|
||||
|
||||
In [4]: myproject.__file__
|
||||
Out[4]: '/home/dev/myproject/src/myproject/__init__.py'
|
||||
```
|
||||
|
||||
Note that the `PYTHONPATH` has been set up to import modules
|
||||
of the project. You can review the definition for `make ipy`
|
||||
to see how to set up `PYTHONPATH` correctly.
|
||||
|
||||
|
||||
```shell
|
||||
$ make ipy --dry-run
|
||||
ENV=${ENV-dev} PYTHONPATH=src/:vendor/:$PYTHONPATH \
|
||||
/home/dev/miniconda3/envs/myproject_py36/bin/ipython
|
||||
$ make ipy
|
||||
Python 3.6.6 |Anaconda, Inc.| (default, Jun 28 2018, 17:14:51)
|
||||
Type 'copyright', 'credits' or 'license' for more information
|
||||
IPython 6.5.0 -- An enhanced Interactive Python. Type '?' for help.
|
||||
|
||||
In [1]: import myproject
|
||||
|
||||
In [2]: myproject.__file__
|
||||
Out[2]: '/home/dev/myproject/src/myproject/__init__.py'
|
||||
```
|
||||
|
||||
|
||||
## Project Types
|
||||
|
||||
These guidelines written for different kinds of projects, each of
|
||||
which is ideally: small, focused and reusable. These projects can be:
|
||||
|
||||
1. Services: Projects which are deployed and run continuously.
|
||||
2. Libraries: Projects which are not deployed by themselves but
|
||||
installed and used by others.
|
||||
3. CLI Tools: Projects which are installed and mainly used by
|
||||
developers and admins.
|
||||
|
||||
The choices made here are intended to make it easy to start new
|
||||
projects by reducing the burden of project setup to a minimum.
|
||||
|
||||
|
||||
## Project Layout
|
||||
|
||||
src/ # source code of project
|
||||
vendor/ # vendored dependencies
|
||||
stubs/ # mypy .pyi stub files
|
||||
test/ # pytest test files (files begin with test_)
|
||||
scripts/ # miscellaneous scripts used deployment and ops
|
||||
|
||||
requirements/ # dependency metadata files
|
||||
docs/ # documentation source files
|
||||
data/ # fixtures for unit tests and db initialization
|
||||
|
||||
setup.py # main python package metadata
|
||||
setup.cfg # misc python tooling configuration
|
||||
|
||||
README.md # project overview and status
|
||||
CONTRIBUTING.md # guide for developers
|
||||
CHANGELOG.md # short documentation of release history
|
||||
LICENSE # for public libraries (MIT preferred)
|
||||
|
||||
Makefile # project specific configuration
|
||||
# variables and make targets
|
||||
Makefile.bootstrapit.make # bootstrapit make include library
|
||||
|
||||
docker_base.Dockerfile # base image for CI (only conda envs)
|
||||
Dockerfile # image with source of the project
|
||||
|
||||
|
||||
### Dependency Management
|
||||
|
||||
|
||||
Dependencies are managed using a set of requirements/\*.txt files. You
|
||||
only need to know about this if you want to add or change a dependency.
|
||||
|
||||
|
||||
```shell
|
||||
requirements/conda.txt # installed via conda from main or conda-forge
|
||||
requirements/pypi.txt # installed via pip from pypi to virutal environments
|
||||
requirements/vendor.txt # installed via pip from pypi to vendor/
|
||||
|
||||
# These are not used on production, or staging, only
|
||||
# on development machines and the CI environment.
|
||||
requirements/development.txt # useful packages for development/debugging
|
||||
requirements/integration.txt # used for linting/testing/packaging
|
||||
|
||||
# These are the requirements produced for specific builds. They can be
|
||||
# used to debug version compatibility issues. They are generated
|
||||
# using make freeze
|
||||
requirements/20190214t212403_freeze.txt
|
||||
```
|
||||
|
||||
|
||||
When adding a new dependency please consider:
|
||||
|
||||
- Only specify direct dependencies of the project, not transitive
|
||||
dependencies of other projects. These are installed via their
|
||||
own respective dependency declarations.
|
||||
- Whenever possible, the specifier for a package should be only its
|
||||
name without a version specifier. With this as the default, the
|
||||
project remains up to date in terms of security fixes and other
|
||||
library improvements.
|
||||
- Some packages consider some of their dependencies to be optional,
|
||||
in which case you will have to specify their transitive
|
||||
dependencies.
|
||||
- Only specify/pin/freeze a specific (older) version if there are
|
||||
known issues, or your project requires features from an unstable
|
||||
(alpha/beta) version of the package. Each pinned version should
|
||||
document why it was pinned, so that it can later be determined if
|
||||
the issue has been resolved in the meantime.
|
||||
|
||||
One argument against this approach is the issue of rogue package
|
||||
maintainers. A package maintainer might release a new version which
|
||||
you automatically install using `make conda`, and this new code opens
|
||||
a back door or proceeds to send data from your production system to a
|
||||
random server on the internet.
|
||||
|
||||
The only protection pypi or conda-forge have against this is to remove
|
||||
packages that are reported to them. If you are paranoid, you could
|
||||
start pinning dependencies to older versions, for which you feel
|
||||
comfortable that any issues would have been noticed. This is only a
|
||||
half measure however, since the issues may not be noticed even after
|
||||
months.
|
||||
|
||||
Ultimately, if data breaches are a concern you should talk to your
|
||||
network admin about firewall rules and if data loss is a concern you
|
||||
should review your backup policy.
|
||||
|
||||
Further Reading:
|
||||
https://hackernoon.com/building-a-botnet-on-pypi-be1ad280b8d6
|
||||
https://python-security.readthedocs.io/packages.html
|
||||
|
||||
Dependencies are installed in this order:
|
||||
|
||||
- `conda.txt`
|
||||
- `pypi.txt`
|
||||
- `vendor.txt`
|
||||
- `development.txt`
|
||||
- `integration.txt`
|
||||
|
||||
Please review the documentation header at the beginning of each
|
||||
`requirements/*.txt` file to determine which file is appropriate
|
||||
for the dependency you want to add.
|
||||
|
||||
Choose a file:
|
||||
|
||||
- `conda.txt` is appropriate for non python packages and packages
|
||||
which would require compilation if they were downloaded from pypi
|
||||
or cannot be downloaded from pypi (such as openjdk or node).
|
||||
- `pypi.txt` is for dependencies on python packages, be they from
|
||||
pypi or git repositories.
|
||||
- `vendor.txt` is appropriate for pure python libraries which are
|
||||
written using mypy. This allows the mypy type checker to work with
|
||||
types defined in other packages
|
||||
|
||||
After adding a new dependency, you can run `make conda`
|
||||
|
||||
|
||||
```shell
|
||||
(myproject_py36) dev@host:~/myproject
|
||||
$ make conda
|
||||
Solving environment: done
|
||||
|
||||
Downloading and Extracting Packages
|
||||
requests-2.19.1 | 94 KB conda-forge
|
||||
...
|
||||
```
|
||||
|
||||
Normally `make conda` only does something if you update one of the
|
||||
`requirements/*.txt` files. If you know a dependency was updated, and
|
||||
`make conda` is not having an effect, you can force the update using
|
||||
`make force conda`.
|
||||
|
||||
|
||||
### Vendoring
|
||||
|
||||
Vendored dependencies are usually committed to git, but if you
|
||||
trust the package maintainer and the installation via `vendor.txt`,
|
||||
then it's not required.
|
||||
|
||||
There are a few reasons to vendor a dependency:
|
||||
|
||||
1. You want the source to be easily accessible in your development
|
||||
tools. For example mypy can access the types of vendored projects.
|
||||
2. You don't trust the maintainer of a dependency, and want to review
|
||||
any updates using git diff.
|
||||
3. There is no maintainer or downloadable package, so your only option
|
||||
is to download it into a local directory. For example you may want to
|
||||
use some of the modules from https://github.com/TheAlgorithms/Python
|
||||
|
||||
If you do vendor a dependency, avoid local modifications, instead
|
||||
contribute to the upstream project when possible.
|
||||
|
||||
|
||||
## Development
|
||||
|
||||
The typical commands used during development are:
|
||||
|
||||
- `make conda`: Setup virtual environment
|
||||
- `source activate`: Activate virtual environment
|
||||
- `make help`: Overview of tasks
|
||||
- `make fmt`: Format code
|
||||
- `make lint`: Linting
|
||||
- `make mypy`: Typecheck
|
||||
- `make devtest`: Run unittests with dev interpreter against code from `src/`.
|
||||
|
||||
Slightly less common but good to run before doing `git push`.
|
||||
|
||||
- `make test`: Run unitests on all supported interpreters after installing
|
||||
using `python setup.py install`. This tests the code as the users of your
|
||||
library will have installed.
|
||||
- `make citest`: Run `make test` but inside a docker container, which is as
|
||||
close to the ci environment as possible. This is quite useful if you don't
|
||||
want to trigger dozens of CI builds to debug a tricky issue.
|
||||
|
||||
|
||||
### Packaging/Distribution
|
||||
|
||||
Publishing a package is done using twine, for which you will need to somehow supply your pypi authentication. I haven't tried [keyring-support](https://twine.readthedocs.io/en/latest/#keyring-support), but your welcome to give that a shot. Another way is to add an entry in your `~/.pypirc`:
|
||||
|
||||
|
||||
```
|
||||
[distutils]
|
||||
index-servers =
|
||||
pypi
|
||||
pypi-legacy
|
||||
|
||||
[pypi]
|
||||
repository = https://pypi.org
|
||||
username = Your.Username
|
||||
password = secret
|
||||
|
||||
[pypi-legacy]
|
||||
repository = https://upload.pypi.org/legacy/
|
||||
username = Your.Username
|
||||
password = secret
|
||||
```
|
||||
|
||||
Creating a new package and uploading it to pypi will typically involve these steps:
|
||||
|
||||
- `make lint mypy test`: Run CI locally, in case you don't trust the CI setup.
|
||||
- `make bump_version`: Increment project wide version numbers and tag commit.
|
||||
- `git push`: Push the bumped version.
|
||||
- `make dist_build`: Create the .whl and .tar.gz distributions.
|
||||
- `make dist_upload`: Publish to pypi.
|
||||
|
||||
|
||||
### Docker
|
||||
|
||||
The base image of the project is `docker_base.Dockerfile` which is
|
||||
used to create images that have only the conda virtual environment needed
|
||||
to run the project. The CI environment uses the image generated by
|
||||
`make docker_build`. While this means that the CI setup is simpler and faster,
|
||||
as you don't have to build the image for the test run in the CI environment, it does mean that you have to run `make docker_build` every time one of your dependencies is updated.
|
||||
|
||||
The `docker_base.Dockerfile` uses the multi stage builder pattern, so that 1.
|
||||
your private key doesn't end up in the published image 2. the published image
|
||||
is as small as possible.
|
||||
|
||||
|
||||
```
|
||||
$ make docker_build
|
||||
Sending build context to Docker daemon 7.761MB
|
||||
Step 1/20 : FROM registry.gitlab.com/mbarkhau/bootstrapit/env_builder AS builder
|
||||
...
|
||||
conda create --name myproject_py36 python=3.6 ...
|
||||
Solving environment: ...working... done
|
||||
...
|
||||
conda create --name myproject_py35 python=3.5 ...
|
||||
Solving environment: ...working... done
|
||||
|
||||
docker push
|
||||
```
|
||||
|
||||
As is the case for your local development setup, every version of python
|
||||
that you have configured to be supported, is installed in the image. If
|
||||
you want to create a minimal image for a production system, you may wish
|
||||
to trim this down.
|
||||
|
||||
|
||||
### Documentation
|
||||
|
||||
|
||||
Documentation is written in Github Flavored Markdown. Typora is
|
||||
decent cross platform editor.
|
||||
|
||||
TODO: `make doc`
|
||||
|
||||
|
||||
### Editor Setup
|
||||
|
||||
https://gitlab.com/mbarkhau/straitjacket#editortooling-integration
|
||||
|
||||
TODO: Expand how to set editor, possibly by sharing editor config files?
|
||||
|
||||
|
||||
## Best Practices
|
||||
|
||||
While not all practices linked here are followed (they are
|
||||
contradictory to each other in places), reading them will give you a
|
||||
good overview of how different people think about structuring their
|
||||
code in order to minimize common pitfalls.
|
||||
|
||||
Please read, view at your leisure:
|
||||
|
||||
- Talks:
|
||||
- [Stop Writing Classes by Jack Diederich](https://www.youtube.com/watch?v=o9pEzgHorH0)
|
||||
- [The Naming of Ducks: Where Dynamic Types Meet Smart Conventions by Brandon Rhodes](https://www.youtube.com/watch?v=YklKUuDpX5c)
|
||||
- [Transforming Code into Beautiful, Idiomatic Python by Raymond Hettinger](https://www.youtube.com/watch?v=OSGv2VnC0go)
|
||||
- [Beyond PEP 8 -- Best practices for beautiful intelligible code by Raymond Hettinger](https://www.youtube.com/watch?v=wf-BqAjZb8M)
|
||||
- Articles, Essays, Books:
|
||||
- Short ebook for Novice to Intermediate Pythonistas:
|
||||
[How to Make Mistakes in Python](https://www.oreilly.com/programming/free/how-to-make-mistakes-in-python.csp)
|
||||
- [The Little Book of Python Anti-Patterns](https://docs.quantifiedcode.com/python-anti-patterns/)
|
||||
- Style Guides:
|
||||
- https://www.python.org/dev/peps/pep-0008/
|
||||
- https://github.com/amontalenti/elements-of-python-style
|
||||
- https://github.com/google/styleguide/blob/gh-pages/pyguide.md
|
||||
|
||||
Keep in mind, that all of this is about the form of your code, and
|
||||
catching common pitfalls or gotchas. None of this relieves you of the
|
||||
burden of thinking about your code. The reason to use linters and type
|
||||
checking is not to have a tool to make your code correct, but to
|
||||
support you to make your code correct.
|
||||
|
||||
For now I won't go into the effort of writing yet another style guide.
|
||||
Instead, if your code passes `make fmt lint`, then it's acceptable.
|
||||
Every time you encounter a linting error, consider it as an opportunity
|
||||
to learn a best practice and look up the error code.
|
||||
|
||||
[^1]: Linux, MacOS and [WSL](https://docs.microsoft.com/en-us/windows/wsl/install-win10)
|
||||
19
Dockerfile
19
Dockerfile
|
|
@ -1,19 +0,0 @@
|
|||
FROM registry.gitlab.com/mbarkhau/pycalver/base
|
||||
|
||||
ADD src/ src/
|
||||
ADD stubs/ stubs/
|
||||
ADD test/ test/
|
||||
ADD requirements/ requirements/
|
||||
ADD setup.cfg setup.cfg
|
||||
ADD setup.py setup.py
|
||||
ADD pylint-ignore.md pylint-ignore.md
|
||||
ADD README.md README.md
|
||||
ADD CHANGELOG.md CHANGELOG.md
|
||||
ADD LICENSE LICENSE
|
||||
ADD Makefile Makefile
|
||||
ADD Makefile.bootstrapit.make Makefile.bootstrapit.make
|
||||
ADD scripts/exit_0_if_empty.py scripts/exit_0_if_empty.py
|
||||
|
||||
ENV PYTHONPATH="src/:vendor/"
|
||||
|
||||
CMD make lint mypy test_compat
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
include LICENSE
|
||||
include README.md
|
||||
include CHANGELOG.md
|
||||
include fastentrypoints.py
|
||||
include requirements/pypi.txt
|
||||
graft test
|
||||
global-exclude *.py[cod]
|
||||
81
Makefile
81
Makefile
|
|
@ -1,81 +0,0 @@
|
|||
|
||||
PACKAGE_NAME := bumpver
|
||||
|
||||
# This is the python version that is used for:
|
||||
# - `make fmt`
|
||||
# - `make ipy`
|
||||
# - `make lint`
|
||||
# - `make devtest`
|
||||
DEVELOPMENT_PYTHON_VERSION := python=3.9
|
||||
|
||||
# These must be valid (space separated) conda package names.
|
||||
# A separate conda environment will be created for each of these.
|
||||
#
|
||||
# Some valid options are:
|
||||
# - python=2.7
|
||||
# - python=3.5
|
||||
# - python=3.6
|
||||
# - python=3.7
|
||||
# - pypy2.7
|
||||
# - pypy3.5
|
||||
SUPPORTED_PYTHON_VERSIONS := python=3.9 pypy3.5 python=2.7
|
||||
|
||||
|
||||
include Makefile.bootstrapit.make
|
||||
|
||||
## -- Extra/Custom/Project Specific Tasks --
|
||||
|
||||
|
||||
## Start the development http server in debug mode
|
||||
## This is just to illustrate how to add your
|
||||
## extra targets outside of the main makefile.
|
||||
.PHONY: serve
|
||||
serve:
|
||||
echo "Not Implemented"
|
||||
|
||||
|
||||
COMPAT_TEST_FILES = $(shell ls -1 test/*.py 2>/dev/null | awk '{ printf " compat_"$$0 }')
|
||||
|
||||
compat_test/%.py: test/%.py
|
||||
@mkdir -p compat_test/;
|
||||
$(DEV_ENV)/bin/lib3to6 $< > $@.tmp;
|
||||
mv $@.tmp $@;
|
||||
|
||||
|
||||
## Run pytest integration tests
|
||||
.PHONY: test_compat
|
||||
test_compat: $(COMPAT_TEST_FILES)
|
||||
rm -rf compat_test/fixtures;
|
||||
mkdir -p compat_test/fixtures;
|
||||
cp -R test/fixtures compat_test/
|
||||
|
||||
# install the package and run the test suite against it.
|
||||
rm -rf build/test_wheel;
|
||||
mkdir -p build/test_wheel;
|
||||
$(DEV_ENV_PY) setup.py bdist_wheel --dist-dir build/test_wheel;
|
||||
|
||||
IFS=' ' read -r -a env_pys <<< "$(CONDA_ENV_BIN_PYTHON_PATHS)"; \
|
||||
for i in $${!env_pys[@]}; do \
|
||||
env_py=$${env_pys[i]}; \
|
||||
$${env_py} -m pip install --upgrade build/test_wheel/*.whl; \
|
||||
ENABLE_BACKTRACE=0 PYTHONPATH="" ENV=$${ENV-dev} \
|
||||
$${env_py} -m pytest \
|
||||
-k "$${PYTEST_FILTER-$${FLTR}}" \
|
||||
--verbose compat_test/; \
|
||||
done;
|
||||
|
||||
rm -rf compat_test/
|
||||
|
||||
|
||||
pycalver_deps.svg:
|
||||
pydeps src/pycalver \
|
||||
--no-show --noise-level 3 \
|
||||
--reverse --include-missing \
|
||||
-x 'click.*' 'toml.*' 'pretty_traceback.*' \
|
||||
-o pycalver_deps.svg
|
||||
|
||||
|
||||
## Update cli reference in README.md
|
||||
README.md: src/pycalver2/cli.py scripts/update_readme_examples.py Makefile
|
||||
@git add README.md
|
||||
@$(DEV_ENV)/bin/python scripts/update_readme_examples.py
|
||||
|
|
@ -1,611 +0,0 @@
|
|||
# Helpful Links
|
||||
|
||||
# http://clarkgrubb.com/makefile-style-guide
|
||||
# https://explainshell.com
|
||||
# https://stackoverflow.com/questions/448910
|
||||
# https://shiroyasha.svbtle.com/escape-sequences-a-quick-guide-1
|
||||
|
||||
SHELL := /bin/bash
|
||||
.SHELLFLAGS := -O extglob -eo pipefail -c
|
||||
.DEFAULT_GOAL := help
|
||||
.SUFFIXES:
|
||||
|
||||
PROJECT_DIR := $(notdir $(abspath .))
|
||||
|
||||
ifndef DEVELOPMENT_PYTHON_VERSION
|
||||
DEVELOPMENT_PYTHON_VERSION := python=3.8
|
||||
endif
|
||||
|
||||
ifndef SUPPORTED_PYTHON_VERSIONS
|
||||
SUPPORTED_PYTHON_VERSIONS := $(DEVELOPMENT_PYTHON_VERSION)
|
||||
endif
|
||||
|
||||
PKG_NAME := $(PACKAGE_NAME)
|
||||
MODULE_NAME := $(shell echo $(subst -,_,$(PACKAGE_NAME)) | tr A-Z a-z)
|
||||
|
||||
# TODO (mb 2018-09-23): Support for bash on windows
|
||||
# perhaps we need to install conda using this
|
||||
# https://repo.continuum.io/miniconda/Miniconda3-latest-Windows-x86_64.exe
|
||||
PLATFORM = $(shell uname -s)
|
||||
|
||||
# miniconda is shared between projects
|
||||
CONDA_ROOT := $(shell if [[ -d /opt/conda/envs ]]; then echo "/opt/conda"; else echo "$$HOME/miniconda3"; fi;)
|
||||
CONDA_BIN := $(CONDA_ROOT)/bin/conda
|
||||
|
||||
ENV_PREFIX := $(CONDA_ROOT)/envs
|
||||
|
||||
DEV_ENV_NAME := \
|
||||
$(subst pypy,$(PKG_NAME)_pypy,$(subst python=,$(PKG_NAME)_py,$(subst .,,$(DEVELOPMENT_PYTHON_VERSION))))
|
||||
|
||||
CONDA_ENV_NAMES := \
|
||||
$(subst pypy,$(PKG_NAME)_pypy,$(subst python=,$(PKG_NAME)_py,$(subst .,,$(SUPPORTED_PYTHON_VERSIONS))))
|
||||
|
||||
CONDA_ENV_PATHS := \
|
||||
$(subst pypy,$(ENV_PREFIX)/$(PKG_NAME)_pypy,$(subst python=,$(ENV_PREFIX)/$(PKG_NAME)_py,$(subst .,,$(SUPPORTED_PYTHON_VERSIONS))))
|
||||
|
||||
# envname/bin/python is unfortunately not always the correct
|
||||
# interpreter. In the case of pypy it is either envname/bin/pypy or
|
||||
# envname/bin/pypy3
|
||||
CONDA_ENV_BIN_PYTHON_PATHS := \
|
||||
$(shell echo "$(CONDA_ENV_PATHS)" \
|
||||
| sed 's!\(_py[[:digit:]]\{1,\}\)!\1/bin/python!g' \
|
||||
| sed 's!\(_pypy2[[:digit:]]\)!\1/bin/pypy!g' \
|
||||
| sed 's!\(_pypy3[[:digit:]]\)!\1/bin/pypy3!g' \
|
||||
)
|
||||
|
||||
empty :=
|
||||
literal_space := $(empty) $(empty)
|
||||
|
||||
BDIST_WHEEL_PYTHON_TAG := py2.py3
|
||||
|
||||
SDIST_FILE_CMD = ls -1t dist/*.tar.gz | head -n 1
|
||||
|
||||
BDIST_WHEEL_FILE_CMD = ls -1t dist/*.whl | head -n 1
|
||||
|
||||
|
||||
# default version for development
|
||||
DEV_ENV := $(ENV_PREFIX)/$(DEV_ENV_NAME)
|
||||
DEV_ENV_PY := $(DEV_ENV)/bin/python
|
||||
|
||||
DOCKER := $(shell which docker)
|
||||
|
||||
DOCKER_BASE_IMAGE := registry.gitlab.com/mbarkhau/pycalver/base
|
||||
|
||||
GIT_HEAD_REV = $(shell git rev-parse --short HEAD)
|
||||
DOCKER_IMAGE_VERSION = $(shell date -u +'%Y%m%dt%H%M%S')_$(GIT_HEAD_REV)
|
||||
|
||||
MAX_LINE_LEN = $(shell grep 'max-line-length' setup.cfg | sed 's![^0-9]\{1,\}!!')
|
||||
|
||||
|
||||
build/envs.txt: requirements/conda.txt
|
||||
@mkdir -p build/;
|
||||
|
||||
@if [[ ! -f $(CONDA_BIN) ]]; then \
|
||||
echo "installing miniconda ..."; \
|
||||
if [[ $(PLATFORM) == "Linux" ]]; then \
|
||||
curl "https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh" --location \
|
||||
> build/miniconda3.sh; \
|
||||
elif [[ $(PLATFORM) == "MINGW64_NT-10.0" ]]; then \
|
||||
curl "https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh" --location \
|
||||
> build/miniconda3.sh; \
|
||||
elif [[ $(PLATFORM) == "Darwin" ]]; then \
|
||||
curl "https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh" --location \
|
||||
> build/miniconda3.sh; \
|
||||
fi; \
|
||||
bash build/miniconda3.sh -b -p $(CONDA_ROOT); \
|
||||
rm build/miniconda3.sh; \
|
||||
fi
|
||||
|
||||
rm -f build/envs.txt.tmp;
|
||||
|
||||
@SUPPORTED_PYTHON_VERSIONS="$(SUPPORTED_PYTHON_VERSIONS)" \
|
||||
CONDA_ENV_NAMES="$(CONDA_ENV_NAMES)" \
|
||||
CONDA_ENV_PATHS="$(CONDA_ENV_PATHS)" \
|
||||
CONDA_ENV_BIN_PYTHON_PATHS="$(CONDA_ENV_BIN_PYTHON_PATHS)" \
|
||||
CONDA_BIN="$(CONDA_BIN)" \
|
||||
bash scripts/setup_conda_envs.sh;
|
||||
|
||||
$(CONDA_BIN) env list \
|
||||
| grep $(PKG_NAME) \
|
||||
| rev | cut -d " " -f1 \
|
||||
| rev | sort >> build/envs.txt.tmp;
|
||||
|
||||
mv build/envs.txt.tmp build/envs.txt;
|
||||
|
||||
|
||||
build/deps.txt: build/envs.txt requirements/*.txt
|
||||
@mkdir -p build/;
|
||||
|
||||
@SUPPORTED_PYTHON_VERSIONS="$(SUPPORTED_PYTHON_VERSIONS)" \
|
||||
CONDA_ENV_NAMES="$(CONDA_ENV_NAMES)" \
|
||||
CONDA_ENV_PATHS="$(CONDA_ENV_PATHS)" \
|
||||
CONDA_ENV_BIN_PYTHON_PATHS="$(CONDA_ENV_BIN_PYTHON_PATHS)" \
|
||||
CONDA_BIN="$(CONDA_BIN)" \
|
||||
bash scripts/update_conda_env_deps.sh;
|
||||
|
||||
@echo "updating $(DEV_ENV_NAME) development deps ...";
|
||||
|
||||
@$(DEV_ENV_PY) -m pip install \
|
||||
--disable-pip-version-check --upgrade \
|
||||
--requirement=requirements/integration.txt;
|
||||
|
||||
@$(DEV_ENV_PY) -m pip install \
|
||||
--disable-pip-version-check --upgrade \
|
||||
--requirement=requirements/development.txt;
|
||||
|
||||
@echo "updating local vendor dep copies ...";
|
||||
|
||||
@$(DEV_ENV_PY) -m pip install \
|
||||
--upgrade --disable-pip-version-check \
|
||||
--no-deps --target=./vendor \
|
||||
--requirement=requirements/vendor.txt;
|
||||
|
||||
@rm -f build/deps.txt.tmp;
|
||||
|
||||
@for env_py in $(CONDA_ENV_BIN_PYTHON_PATHS); do \
|
||||
printf "\n# pip freeze for $${env_py}:\n" >> build/deps.txt.tmp; \
|
||||
$${env_py} -m pip freeze >> build/deps.txt.tmp; \
|
||||
printf "\n\n" >> build/deps.txt.tmp; \
|
||||
done
|
||||
|
||||
@mv build/deps.txt.tmp build/deps.txt
|
||||
|
||||
|
||||
## Short help message for each task.
|
||||
.PHONY: help
|
||||
help:
|
||||
@awk '{ \
|
||||
if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \
|
||||
helpCommand = substr($$0, index($$0, ":") + 2); \
|
||||
if (helpMessage) { \
|
||||
printf "\033[36m%-20s\033[0m %s\n", \
|
||||
helpCommand, helpMessage; \
|
||||
helpMessage = ""; \
|
||||
} \
|
||||
} else if ($$0 ~ /^[a-zA-Z\-\_0-9.\/]+:/) { \
|
||||
helpCommand = substr($$0, 0, index($$0, ":")); \
|
||||
if (helpMessage) { \
|
||||
printf "\033[36m%-20s\033[0m %s\n", \
|
||||
helpCommand, helpMessage; \
|
||||
helpMessage = ""; \
|
||||
} \
|
||||
} else if ($$0 ~ /^##/) { \
|
||||
if (! (helpMessage)) { \
|
||||
helpMessage = substr($$0, 3); \
|
||||
} \
|
||||
} else { \
|
||||
if (helpMessage) { \
|
||||
print " "helpMessage \
|
||||
} \
|
||||
helpMessage = ""; \
|
||||
} \
|
||||
}' \
|
||||
Makefile.bootstrapit.make Makefile
|
||||
|
||||
@if [[ ! -f $(DEV_ENV_PY) ]]; then \
|
||||
echo "Missing python interpreter at $(DEV_ENV_PY) !"; \
|
||||
echo "You problably want to first setup the virtual environments:"; \
|
||||
echo ""; \
|
||||
echo " make conda"; \
|
||||
echo ""; \
|
||||
exit 0; \
|
||||
fi
|
||||
|
||||
@if [[ ! -f $(CONDA_BIN) ]]; then \
|
||||
echo "No conda installation found!"; \
|
||||
echo "You problably want to first setup the virtual environments:"; \
|
||||
echo ""; \
|
||||
echo " make conda"; \
|
||||
echo ""; \
|
||||
exit 0; \
|
||||
fi
|
||||
|
||||
|
||||
## Full help message for each task.
|
||||
.PHONY: helpverbose
|
||||
helpverbose:
|
||||
@printf "Available make targets for \033[97m$(PKG_NAME)\033[0m:\n";
|
||||
|
||||
@awk '{ \
|
||||
if ($$0 ~ /^.PHONY: [a-zA-Z\-\_0-9]+$$/) { \
|
||||
helpCommand = substr($$0, index($$0, ":") + 2); \
|
||||
if (helpMessage) { \
|
||||
printf "\033[36m%-20s\033[0m %s\n", \
|
||||
helpCommand, helpMessage; \
|
||||
helpMessage = ""; \
|
||||
} \
|
||||
} else if ($$0 ~ /^[a-zA-Z\-\_0-9.\/]+:/) { \
|
||||
helpCommand = substr($$0, 0, index($$0, ":")); \
|
||||
if (helpMessage) { \
|
||||
printf "\033[36m%-20s\033[0m %s\n", \
|
||||
helpCommand, helpMessage; \
|
||||
helpMessage = ""; \
|
||||
} \
|
||||
} else if ($$0 ~ /^##/) { \
|
||||
if (helpMessage) { \
|
||||
helpMessage = helpMessage"\n "substr($$0, 3); \
|
||||
} else { \
|
||||
helpMessage = substr($$0, 3); \
|
||||
} \
|
||||
} else { \
|
||||
if (helpMessage) { \
|
||||
print "\n "helpMessage"\n" \
|
||||
} \
|
||||
helpMessage = ""; \
|
||||
} \
|
||||
}' \
|
||||
Makefile.bootstrapit.make Makefile
|
||||
|
||||
|
||||
## -- Project Setup --
|
||||
|
||||
|
||||
## Delete conda envs and cache 💩
|
||||
.PHONY: clean
|
||||
clean:
|
||||
@for env_name in $(CONDA_ENV_NAMES); do \
|
||||
env_py="$(ENV_PREFIX)/$${env_name}/bin/python"; \
|
||||
if [[ -f $${env_py} ]]; then \
|
||||
$(CONDA_BIN) env remove --name $${env_name} --yes; \
|
||||
fi; \
|
||||
done
|
||||
|
||||
rm -f build/envs.txt
|
||||
rm -f build/deps.txt
|
||||
rm -rf vendor/
|
||||
rm -rf .mypy_cache/
|
||||
rm -rf .pytest_cache/
|
||||
rm -rf __pycache__/
|
||||
rm -rf src/__pycache__/
|
||||
rm -rf vendor/__pycache__/
|
||||
@printf "\n setup/update completed ✨ 🍰 ✨ \n\n"
|
||||
|
||||
|
||||
## Force update of dependencies by removing marker files
|
||||
## Use this when you know an external dependency was
|
||||
## updated, but that is not reflected in your
|
||||
## requirements files.
|
||||
##
|
||||
## Usage: make force update
|
||||
.PHONY: force
|
||||
force:
|
||||
rm -f build/envs.txt
|
||||
rm -f build/deps.txt
|
||||
rm -rf vendor/
|
||||
rm -rf .mypy_cache/
|
||||
rm -rf .pytest_cache/
|
||||
rm -rf __pycache__/
|
||||
rm -rf src/__pycache__/
|
||||
rm -rf vendor/__pycache__/
|
||||
|
||||
|
||||
## Create/Update python virtual environments
|
||||
.PHONY: conda
|
||||
conda: build/deps.txt
|
||||
|
||||
|
||||
## Install git pre-push hooks
|
||||
.PHONY: git_hooks
|
||||
git_hooks:
|
||||
@rm -f "$(PWD)/.git/hooks/pre-push"
|
||||
ln -s "$(PWD)/scripts/pre-push-hook.sh" "$(PWD)/.git/hooks/pre-push"
|
||||
|
||||
|
||||
## -- Integration --
|
||||
|
||||
|
||||
## Run isort with --check-only
|
||||
.PHONY: lint_isort
|
||||
lint_isort:
|
||||
@printf "isort ...\n"
|
||||
@$(DEV_ENV)/bin/isort \
|
||||
--check-only \
|
||||
--line-width=$(MAX_LINE_LEN) \
|
||||
--project $(MODULE_NAME) \
|
||||
src/ test/
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run sjfmt with --check
|
||||
.PHONY: lint_fmt
|
||||
lint_fmt:
|
||||
@printf "sjfmt ...\n"
|
||||
@$(DEV_ENV)/bin/sjfmt \
|
||||
--target-version=py36 \
|
||||
--skip-string-normalization \
|
||||
--line-length=$(MAX_LINE_LEN) \
|
||||
--check \
|
||||
src/ test/ 2>&1 | sed "/All done/d" | sed "/left unchanged/d"
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run flake8
|
||||
.PHONY: lint_flake8
|
||||
lint_flake8:
|
||||
@rm -f reports/flake8*;
|
||||
@mkdir -p "reports/";
|
||||
|
||||
@printf "flake8 ..\n"
|
||||
@$(DEV_ENV)/bin/flake8 src/ --tee --output-file reports/flake8.txt || exit 0;
|
||||
@$(DEV_ENV)/bin/flake8_junit reports/flake8.txt reports/flake8.xml >> /dev/null;
|
||||
@$(DEV_ENV_PY) scripts/exit_0_if_empty.py reports/flake8.txt;
|
||||
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run pylint --errors-only.
|
||||
.PHONY: lint_pylint_errors
|
||||
lint_pylint_errors:
|
||||
@printf "pylint ..\n";
|
||||
@$(DEV_ENV)/bin/pylint --errors-only --jobs=4 --rcfile=setup.cfg \
|
||||
src/ test/
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run pylint.
|
||||
.PHONY: lint_pylint
|
||||
lint_pylint:
|
||||
@mkdir -p "reports/";
|
||||
|
||||
@printf "pylint ..\n";
|
||||
@$(DEV_ENV)/bin/pylint-ignore --rcfile=setup.cfg \
|
||||
src/ test/
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run pylint-ignore --update-ignorefile.
|
||||
.PHONY: pylint_ignore
|
||||
pylint_ignore:
|
||||
$(DEV_ENV)/bin/pylint-ignore --rcfile=setup.cfg \
|
||||
src/ test/ --update-ignorefile
|
||||
|
||||
|
||||
## Run flake8 linter and check for fmt
|
||||
.PHONY: lint
|
||||
lint: lint_isort lint_fmt lint_flake8 lint_pylint
|
||||
|
||||
|
||||
## Run mypy type checker
|
||||
.PHONY: mypy
|
||||
mypy:
|
||||
@rm -rf ".mypy_cache";
|
||||
@rm -rf "reports/mypycov";
|
||||
@mkdir -p "reports/";
|
||||
|
||||
@printf "mypy ....\n"
|
||||
@MYPYPATH=stubs/:vendor/ $(DEV_ENV_PY) -m mypy \
|
||||
--html-report reports/mypycov \
|
||||
--no-error-summary \
|
||||
src/ | sed "/Generated HTML report/d"
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run pytest unit and integration tests
|
||||
.PHONY: test
|
||||
test:
|
||||
@rm -rf ".pytest_cache";
|
||||
@rm -rf "src/__pycache__";
|
||||
@rm -rf "test/__pycache__";
|
||||
@rm -rf "reports/testcov/";
|
||||
@rm -f "reports/pytest*";
|
||||
@mkdir -p "reports/";
|
||||
|
||||
# First we test the local source tree using the dev environment
|
||||
ENV=$${ENV-dev} \
|
||||
PYTHONPATH=src/:vendor/:$$PYTHONPATH \
|
||||
PATH=$(DEV_ENV)/bin:$$PATH \
|
||||
$(DEV_ENV_PY) -m pytest -v \
|
||||
--doctest-modules \
|
||||
--verbose \
|
||||
--cov-report "html:reports/testcov/" \
|
||||
--cov-report term \
|
||||
--html=reports/pytest/index.html \
|
||||
--junitxml reports/pytest.xml \
|
||||
-k "$${PYTEST_FILTER-$${FLTR}}" \
|
||||
$(shell cd src/ && ls -1 */__init__.py | awk '{ sub(/\/__init__.py/, "", $$1); print "--cov "$$1 }') \
|
||||
test/ src/;
|
||||
|
||||
# Next we install the package and run the test suite against it.
|
||||
|
||||
# IFS=' ' read -r -a env_py_paths <<< "$(CONDA_ENV_BIN_PYTHON_PATHS)"; \
|
||||
# for i in $${!env_py_paths[@]}; do \
|
||||
# env_py=$${env_py_paths[i]}; \
|
||||
# $${env_py} -m pip install --upgrade .; \
|
||||
# PYTHONPATH="" ENV=$${ENV-dev} $${env_py} -m pytest test/; \
|
||||
# done;
|
||||
|
||||
@rm -rf ".pytest_cache";
|
||||
@rm -rf "src/__pycache__";
|
||||
@rm -rf "test/__pycache__";
|
||||
|
||||
|
||||
## Run import sorting on src/ and test/
|
||||
.PHONY: fmt_isort
|
||||
fmt_isort:
|
||||
@$(DEV_ENV)/bin/isort \
|
||||
--line-width=$(MAX_LINE_LEN) \
|
||||
--project $(MODULE_NAME) \
|
||||
src/ test/;
|
||||
|
||||
|
||||
## Run code formatter on src/ and test/
|
||||
.PHONY: fmt_sjfmt
|
||||
fmt_sjfmt:
|
||||
@$(DEV_ENV)/bin/sjfmt \
|
||||
--target-version=py36 \
|
||||
--skip-string-normalization \
|
||||
--line-length=$(MAX_LINE_LEN) \
|
||||
src/ test/;
|
||||
|
||||
|
||||
## Run code formatters
|
||||
.PHONY: fmt
|
||||
fmt: fmt_isort fmt_sjfmt
|
||||
|
||||
|
||||
## -- Helpers --
|
||||
|
||||
|
||||
## Shortcut for make fmt lint mypy devtest test
|
||||
.PHONY: check
|
||||
check: fmt lint mypy devtest test
|
||||
|
||||
|
||||
## Start subshell with environ variables set.
|
||||
.PHONY: env_subshell
|
||||
env_subshell:
|
||||
@bash --init-file <(echo '\
|
||||
source $$HOME/.bashrc; \
|
||||
source $(CONDA_ROOT)/etc/profile.d/conda.sh \
|
||||
export ENV=$${ENV-dev}; \
|
||||
export PYTHONPATH="src/:vendor/:$$PYTHONPATH"; \
|
||||
conda activate $(DEV_ENV_NAME) \
|
||||
')
|
||||
|
||||
|
||||
## Usage: "source ./activate", to deactivate: "deactivate"
|
||||
.PHONY: activate
|
||||
activate:
|
||||
@echo 'source $(CONDA_ROOT)/etc/profile.d/conda.sh;'
|
||||
@echo 'if [[ -z $$ENV ]]; then'
|
||||
@echo ' export _env_before_activate=$${ENV};'
|
||||
@echo 'fi'
|
||||
@echo 'if [[ -z $$PYTHONPATH ]]; then'
|
||||
@echo ' export _pythonpath_before_activate=$${PYTHONPATH};'
|
||||
@echo 'fi'
|
||||
@echo 'export ENV=$${ENV-dev};'
|
||||
@echo 'export PYTHONPATH="src/:vendor/:$$PYTHONPATH";'
|
||||
@echo 'conda activate $(DEV_ENV_NAME);'
|
||||
|
||||
@echo 'function deactivate {'
|
||||
@echo ' if [[ -z $${_env_before_activate} ]]; then'
|
||||
@echo ' export ENV=$${_env_before_activate}; '
|
||||
@echo ' else'
|
||||
@echo ' unset ENV;'
|
||||
@echo ' fi'
|
||||
@echo ' if [[ -z $${_pythonpath_before_activate} ]]; then'
|
||||
@echo ' export PYTHONPATH=$${_pythonpath_before_activate}; '
|
||||
@echo ' else'
|
||||
@echo ' unset PYTHONPATH;'
|
||||
@echo ' fi'
|
||||
@echo ' unset _env_before_activate;'
|
||||
@echo ' unset _pythonpath_before_activate;'
|
||||
@echo ' conda deactivate;'
|
||||
@echo '};'
|
||||
|
||||
|
||||
## Drop into an ipython shell with correct env variables set
|
||||
.PHONY: ipy
|
||||
ipy:
|
||||
@ENV=$${ENV-dev} \
|
||||
PYTHONPATH=src/:vendor/:$$PYTHONPATH \
|
||||
PATH=$(DEV_ENV)/bin:$$PATH \
|
||||
$(DEV_ENV)/bin/ipython
|
||||
|
||||
|
||||
## Like `make test`, but with debug parameters
|
||||
.PHONY: devtest
|
||||
devtest:
|
||||
@rm -rf "src/__pycache__";
|
||||
@rm -rf "test/__pycache__";
|
||||
|
||||
ENV=$${ENV-dev} \
|
||||
PYTHONPATH=src/:vendor/:$$PYTHONPATH \
|
||||
PATH=$(DEV_ENV)/bin:$$PATH \
|
||||
$(DEV_ENV_PY) -m pytest -v \
|
||||
--doctest-modules \
|
||||
--no-cov \
|
||||
--durations 5 \
|
||||
--verbose \
|
||||
--capture=no \
|
||||
--exitfirst \
|
||||
--failed-first \
|
||||
-k "$${PYTEST_FILTER-$${FLTR}}" \
|
||||
test/ src/;
|
||||
|
||||
@rm -rf "src/__pycache__";
|
||||
@rm -rf "test/__pycache__";
|
||||
|
||||
|
||||
## Run `make lint mypy test` using docker
|
||||
.PHONY: citest
|
||||
citest:
|
||||
$(DOCKER) build --file Dockerfile --tag tmp_citest_$(PKG_NAME) .
|
||||
$(DOCKER) run --tty tmp_citest_$(PKG_NAME) make lint mypy test test_compat
|
||||
|
||||
|
||||
## -- Build/Deploy --
|
||||
|
||||
|
||||
# Generate Documentation
|
||||
# .PHONY: doc
|
||||
# doc:
|
||||
# echo "Not Implemented"
|
||||
|
||||
|
||||
## Freeze dependencies of the current development env.
|
||||
## The requirements files this produces should be used
|
||||
## in order to have reproducable builds, otherwise you
|
||||
## should minimize the number of pinned versions in
|
||||
## your requirements.
|
||||
.PHONY: freeze
|
||||
freeze:
|
||||
$(DEV_ENV_PY) -m pip freeze \
|
||||
> requirements/$(shell date -u +"%Y%m%dt%H%M%S")_freeze.txt
|
||||
|
||||
|
||||
## Bump Version number in all files
|
||||
.PHONY: bump_version
|
||||
bump_version:
|
||||
$(DEV_ENV)/bin/bumpver update;
|
||||
|
||||
|
||||
## Create python sdist and bdist_wheel files
|
||||
.PHONY: dist_build
|
||||
dist_build:
|
||||
@rm -rf build/lib3to6_out/
|
||||
@rm -rf build/lib/
|
||||
@rm -rf build/bdist*
|
||||
$(DEV_ENV_PY) setup.py sdist;
|
||||
$(DEV_ENV_PY) setup.py bdist_wheel --python-tag=$(BDIST_WHEEL_PYTHON_TAG);
|
||||
@rm -rf src/*.egg-info
|
||||
|
||||
|
||||
## Upload sdist and bdist files to pypi
|
||||
.PHONY: dist_upload
|
||||
dist_upload:
|
||||
@if [[ "1" != "1" ]]; then \
|
||||
echo "FAILSAFE! Not publishing a private package."; \
|
||||
echo " To avoid this set IS_PUBLIC=1 in bootstrap.sh and run it."; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
$(DEV_ENV)/bin/twine check $$($(SDIST_FILE_CMD));
|
||||
$(DEV_ENV)/bin/twine check $$($(BDIST_WHEEL_FILE_CMD));
|
||||
$(DEV_ENV)/bin/twine upload --skip-existing \
|
||||
--repository pypi-legacy \
|
||||
$$($(SDIST_FILE_CMD)) $$($(BDIST_WHEEL_FILE_CMD));
|
||||
|
||||
|
||||
## bump_version dist_build dist_upload
|
||||
.PHONY: dist_publish
|
||||
dist_publish: bump_version dist_build dist_upload
|
||||
|
||||
|
||||
## Build docker images. Must be run when dependencies are added
|
||||
## or updated. The main reasons this can fail are:
|
||||
## 1. No ssh key at $(HOME)/.ssh/$(PKG_NAME)_gitlab_runner_id_rsa
|
||||
## (which is needed to install packages from private repos
|
||||
## and is copied into a temp container during the build).
|
||||
## 2. Your docker daemon is not running
|
||||
## 3. You're using WSL and docker is not exposed on tcp://localhost:2375
|
||||
## 4. You're using WSL but didn't do export DOCKER_HOST="tcp://localhost:2375"
|
||||
.PHONY: docker_build
|
||||
docker_build:
|
||||
$(DOCKER) build \
|
||||
--file docker_base.Dockerfile \
|
||||
--tag $(DOCKER_BASE_IMAGE):$(DOCKER_IMAGE_VERSION) \
|
||||
--tag $(DOCKER_BASE_IMAGE) \
|
||||
.;
|
||||
|
||||
$(DOCKER) push $(DOCKER_BASE_IMAGE)
|
||||
|
|
@ -22,7 +22,7 @@ Project/Repo:
|
|||
|
||||
[![MIT License][img_license]][url_license]
|
||||
[![Supported Python Versions][img_pyversions]][url_pyversions]
|
||||
[![CalVer 2023.1124][img_version]][url_version]
|
||||
[![CalVer 2023.1125][img_version]][url_version]
|
||||
[![PyPI Releases][img_pypi]][url_pypi]
|
||||
[![PyPI Downloads][img_downloads]][url_downloads]
|
||||
|
||||
|
|
@ -56,7 +56,7 @@ Code Quality/CI:
|
|||
[img_downloads]: https://pepy.tech/badge/bumpver/month
|
||||
[url_downloads]: https://pepy.tech/project/bumpver
|
||||
|
||||
[img_version]: https://img.shields.io/static/v1.svg?label=CalVer&message=2023.1124&color=blue
|
||||
[img_version]: https://img.shields.io/static/v1.svg?label=CalVer&message=2023.1125&color=blue
|
||||
[url_version]: https://pypi.org/project/bumpver/
|
||||
|
||||
[img_pypi]: https://img.shields.io/badge/PyPI-wheels-green.svg
|
||||
|
|
@ -801,7 +801,7 @@ The create an initial configuration for project with `bumpver init`.
|
|||
$ pip install bumpver
|
||||
...
|
||||
Installing collected packages: click toml lexid bumpver
|
||||
Successfully installed bumpver-2023.1124
|
||||
Successfully installed bumpver-2023.1125
|
||||
|
||||
$ cd myproject
|
||||
~/myproject/
|
||||
|
|
|
|||
7
activate
7
activate
|
|
@ -1,7 +0,0 @@
|
|||
# This is a convenience for development purposes,
|
||||
# for deployments you should set the appropriate
|
||||
# environment variables explicitly and use fully
|
||||
# qualified paths to the interpreter, as is done
|
||||
# for example in the Dockerfile
|
||||
|
||||
source <(make activate;);
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Bootstrapit Project Configuration
|
||||
|
||||
AUTHOR_NAME="Manuel Barkhau"
|
||||
AUTHOR_EMAIL="mbarkhau@gmail.com"
|
||||
|
||||
KEYWORDS="version bumpver calver semver versioning bumpversion pep440"
|
||||
DESCRIPTION="Bump version numbers in project files."
|
||||
|
||||
LICENSE_ID="MIT"
|
||||
|
||||
PACKAGE_NAME="bumpver"
|
||||
GIT_REPO_NAMESPACE="mbarkhau"
|
||||
GIT_REPO_DOMAIN="github.com"
|
||||
|
||||
PACKAGE_VERSION="2023.1124"
|
||||
|
||||
DEFAULT_PYTHON_VERSION="python=3.8"
|
||||
SUPPORTED_PYTHON_VERSIONS="python=2.7 python=3.6 pypy2.7 pypy3.5 python=3.8"
|
||||
|
||||
DOCKER_REGISTRY_DOMAIN=registry.gitlab.com
|
||||
|
||||
|
||||
IS_PUBLIC=1
|
||||
|
||||
## Download and run the actual update script
|
||||
|
||||
PROJECT_DIR=$(dirname $0)
|
||||
|
||||
if ! [[ -f $PROJECT_DIR/scripts/bootstrapit_update.sh ]]; then
|
||||
mkdir -p "$PROJECT_DIR/scripts/";
|
||||
RAW_FILES_URL="https://gitlab.com/mbarkhau/bootstrapit/raw/master";
|
||||
curl --silent "$RAW_FILES_URL/scripts/bootstrapit_update.sh" \
|
||||
> "$PROJECT_DIR/scripts/bootstrapit_update.sh"
|
||||
fi
|
||||
|
||||
source $PROJECT_DIR/scripts/bootstrapit_update.sh;
|
||||
BIN
bumpver_128.png
BIN
bumpver_128.png
Binary file not shown.
|
Before Width: | Height: | Size: 5.1 KiB |
428
bumpver_logo.svg
428
bumpver_logo.svg
|
|
@ -1,428 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="128"
|
||||
height="128"
|
||||
viewBox="0 0 33.866666 33.866668"
|
||||
version="1.1"
|
||||
id="svg8"
|
||||
inkscape:version="0.92.5 (2060ec1f9f, 2020-04-08)"
|
||||
sodipodi:docname="pycalver1k.svg"
|
||||
inkscape:export-filename="/home/mbarkhau/foss/pycalver/pycalver1k2_128.png"
|
||||
inkscape:export-xdpi="96"
|
||||
inkscape:export-ydpi="96">
|
||||
<defs
|
||||
id="defs2">
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient1163">
|
||||
<stop
|
||||
style="stop-color:#e89a00;stop-opacity:1"
|
||||
offset="0"
|
||||
id="stop1159" />
|
||||
<stop
|
||||
style="stop-color:#ffd42a;stop-opacity:1"
|
||||
offset="1"
|
||||
id="stop1161" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient1163"
|
||||
id="linearGradient1165"
|
||||
x1="17.506153"
|
||||
y1="278.55835"
|
||||
x2="17.63979"
|
||||
y2="282.83472"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(0.94649427,0,0,1.0042072,0.90603086,-0.95465177)" />
|
||||
<clipPath
|
||||
clipPathUnits="userSpaceOnUse"
|
||||
id="clipPath1193">
|
||||
<g
|
||||
style="fill:#45b848;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="use1195"
|
||||
inkscape:label="Clip">
|
||||
<g
|
||||
style="fill:#45b848;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="g1278"
|
||||
transform="translate(0,1.1994176)">
|
||||
<circle
|
||||
cy="270.14941"
|
||||
cx="23.779428"
|
||||
id="circle1274"
|
||||
style="fill:#45b848;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
r="2.3812499" />
|
||||
<rect
|
||||
y="264.03714"
|
||||
x="22.339876"
|
||||
height="6.5171237"
|
||||
width="2.8791037"
|
||||
id="rect1276"
|
||||
style="fill:#45b848;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal" />
|
||||
</g>
|
||||
<g
|
||||
style="fill:#45b848;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="g1284"
|
||||
transform="translate(0,1.5112466)">
|
||||
<circle
|
||||
style="fill:#45b848;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
id="circle1280"
|
||||
cx="10.087241"
|
||||
cy="269.573"
|
||||
r="2.3812499" />
|
||||
<rect
|
||||
style="fill:#45b848;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
id="rect1282"
|
||||
width="2.8791037"
|
||||
height="6.5171237"
|
||||
x="8.6476898"
|
||||
y="263.9899" />
|
||||
</g>
|
||||
</g>
|
||||
</clipPath>
|
||||
<mask
|
||||
maskUnits="userSpaceOnUse"
|
||||
id="mask1425">
|
||||
<g
|
||||
id="g1441"
|
||||
transform="translate(-1.5416735,-2.3386165)">
|
||||
<rect
|
||||
y="267.51743"
|
||||
x="-0.8018086"
|
||||
height="9.5214758"
|
||||
width="38.553631"
|
||||
id="rect1427"
|
||||
style="opacity:1;fill:#ffffff;fill-opacity:1;stroke:#aa8800;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers" />
|
||||
<g
|
||||
transform="translate(1.541672,3.8452511)"
|
||||
id="g1433"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1">
|
||||
<circle
|
||||
r="2.3812499"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
id="circle1429"
|
||||
cx="23.779428"
|
||||
cy="270.14941" />
|
||||
<rect
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
id="rect1431"
|
||||
width="2.8791037"
|
||||
height="6.5171237"
|
||||
x="22.339876"
|
||||
y="264.03714" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(1.541672,4.1570801)"
|
||||
id="g1439"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1">
|
||||
<circle
|
||||
r="2.3812499"
|
||||
cy="269.573"
|
||||
cx="10.087241"
|
||||
id="circle1435"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal" />
|
||||
<rect
|
||||
y="263.9899"
|
||||
x="8.6476898"
|
||||
height="6.5171237"
|
||||
width="2.8791037"
|
||||
id="rect1437"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal" />
|
||||
</g>
|
||||
</g>
|
||||
</mask>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient1163"
|
||||
id="linearGradient1165-3"
|
||||
x1="17.605284"
|
||||
y1="269.99991"
|
||||
x2="17.63979"
|
||||
y2="282.83472"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(1.0541065,0,0,0.87055183,-0.916204,-223.64659)" />
|
||||
<mask
|
||||
maskUnits="userSpaceOnUse"
|
||||
id="mask1129">
|
||||
<g
|
||||
id="g1145"
|
||||
transform="translate(-1.5416733,-2.3386129)">
|
||||
<rect
|
||||
y="267.51743"
|
||||
x="-0.8018086"
|
||||
height="9.5214758"
|
||||
width="38.553631"
|
||||
id="rect1131"
|
||||
style="opacity:1;fill:#ffffff;fill-opacity:1;stroke:#aa8800;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers" />
|
||||
<g
|
||||
transform="translate(0.07525963,4.2889947)"
|
||||
id="g1137"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1">
|
||||
<ellipse
|
||||
cy="268.73132"
|
||||
cx="10.111843"
|
||||
id="ellipse1133"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
rx="2.2523122"
|
||||
ry="2.629046" />
|
||||
<rect
|
||||
y="263.9899"
|
||||
x="8.7405252"
|
||||
height="6.5171237"
|
||||
width="2.7426364"
|
||||
id="rect1135"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(16.669414,4.2889947)"
|
||||
id="g1143"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1">
|
||||
<ellipse
|
||||
cy="268.73132"
|
||||
cx="10.111843"
|
||||
id="ellipse1139"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
rx="2.2523122"
|
||||
ry="2.629046" />
|
||||
<rect
|
||||
y="263.9899"
|
||||
x="8.7405252"
|
||||
height="6.5171237"
|
||||
width="2.7426364"
|
||||
id="rect1141"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal" />
|
||||
</g>
|
||||
</g>
|
||||
</mask>
|
||||
<mask
|
||||
maskUnits="userSpaceOnUse"
|
||||
id="mask1129-3">
|
||||
<g
|
||||
id="g1145-6"
|
||||
transform="translate(-1.5416733,-2.3386129)">
|
||||
<rect
|
||||
y="267.51743"
|
||||
x="-0.8018086"
|
||||
height="9.5214758"
|
||||
width="38.553631"
|
||||
id="rect1131-7"
|
||||
style="opacity:1;fill:#ffffff;fill-opacity:1;stroke:#aa8800;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers" />
|
||||
<g
|
||||
transform="translate(0.07525963,4.2889947)"
|
||||
id="g1137-5"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1">
|
||||
<ellipse
|
||||
cy="268.73132"
|
||||
cx="10.111843"
|
||||
id="ellipse1133-3"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
rx="2.2523122"
|
||||
ry="2.629046" />
|
||||
<rect
|
||||
y="263.9899"
|
||||
x="8.7405252"
|
||||
height="6.5171237"
|
||||
width="2.7426364"
|
||||
id="rect1135-5"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(16.669414,4.2889947)"
|
||||
id="g1143-6"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1">
|
||||
<ellipse
|
||||
cy="268.73132"
|
||||
cx="10.111843"
|
||||
id="ellipse1139-2"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
rx="2.2523122"
|
||||
ry="2.629046" />
|
||||
<rect
|
||||
y="263.9899"
|
||||
x="8.7405252"
|
||||
height="6.5171237"
|
||||
width="2.7426364"
|
||||
id="rect1141-9"
|
||||
style="fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal" />
|
||||
</g>
|
||||
</g>
|
||||
</mask>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="8"
|
||||
inkscape:cx="108.58142"
|
||||
inkscape:cy="49.637042"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:current-layer="layer8"
|
||||
showgrid="false"
|
||||
units="px"
|
||||
inkscape:window-width="2512"
|
||||
inkscape:window-height="1376"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:document-rotation="0" />
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer3"
|
||||
inkscape:label="debug_bg"
|
||||
style="display:none"
|
||||
sodipodi:insensitive="true">
|
||||
<rect
|
||||
style="fill:#ff00ff;fill-opacity:1;stroke:none;stroke-width:1.92171;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
id="rect954"
|
||||
width="33.848568"
|
||||
height="33.868378"
|
||||
x="0.044362877"
|
||||
y="0.0074945446" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer6"
|
||||
inkscape:label="bg"
|
||||
style="display:inline">
|
||||
<path
|
||||
id="rect880"
|
||||
style="display:inline;fill:#ffffff;stroke-width:7.16557"
|
||||
d="m 1.585409,3.7099638 30.618783,-0.052171 0.725957,0.7967657 -0.0046,27.3188725 -0.528162,0.557572 L 1.1974464,32.32312 0.94904837,32.088188 0.93651797,4.3514519 Z"
|
||||
sodipodi:nodetypes="ccccccccc" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer4"
|
||||
inkscape:label="gradient"
|
||||
style="display:inline">
|
||||
<rect
|
||||
style="display:inline;opacity:1;fill:url(#linearGradient1165-3);fill-opacity:1;stroke:#000000;stroke-width:1.21634;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
|
||||
id="rect815"
|
||||
width="29.469652"
|
||||
height="13.742671"
|
||||
x="2.1985073"
|
||||
y="17.262745" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:label="text bottom"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(0,-263.13332)"
|
||||
style="display:inline">
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:14.81669998px;line-height:0.85000002;font-family:Monoid;-inkscape-font-specification:Monoid;letter-spacing:-0.111125px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458299"
|
||||
x="3.1427319"
|
||||
y="292.59897"
|
||||
id="text859"><tspan
|
||||
sodipodi:role="line"
|
||||
x="3.1427319"
|
||||
y="292.59897"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:14.0px;font-family:'Iosevka Term SS05';-inkscape-font-specification:'Iosevka Term SS05 Bold';stroke-width:0.27"
|
||||
id="tspan953">2022</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer9"
|
||||
inkscape:label="frame top"
|
||||
style="display:inline">
|
||||
<rect
|
||||
style="display:inline;opacity:1;fill:#000000;fill-opacity:1;stroke:#ffffff;stroke-width:1.5;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
|
||||
id="rect815-0"
|
||||
width="30.546177"
|
||||
height="13.588312"
|
||||
x="1.6602445"
|
||||
y="-15.922193"
|
||||
transform="scale(1,-1)" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer8"
|
||||
inkscape:label="text top"
|
||||
style="display:inline">
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;font-size:16.93330002px;line-height:1.25;font-family:sans-serif;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.26499999;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
x="4.106391"
|
||||
y="13.507061"
|
||||
id="text921"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan919"
|
||||
x="4.106391"
|
||||
y="13.507061"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:16.93330002px;font-family:'Iosevka Term SS05';-inkscape-font-specification:'Iosevka Term SS05';fill:#ffffff;stroke-width:0.26499999;stroke-miterlimit:4;stroke-dasharray:none">ver</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer2"
|
||||
inkscape:label="frame"
|
||||
style="display:inline">
|
||||
<rect
|
||||
style="display:inline;opacity:1;fill:#000000;fill-opacity:1;stroke:#aa8800;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
|
||||
id="rect1398"
|
||||
width="30.684868"
|
||||
height="0.76833469"
|
||||
x="1.5910856"
|
||||
y="16.622082" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer7"
|
||||
inkscape:label="top frame"
|
||||
sodipodi:insensitive="true"
|
||||
style="display:none">
|
||||
<rect
|
||||
style="display:inline;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:2.32912993;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:normal"
|
||||
id="rect900"
|
||||
width="26.775116"
|
||||
height="6.1376147"
|
||||
x="3.5457754"
|
||||
y="268.57437"
|
||||
clip-path="none"
|
||||
mask="url(#mask1129-3)"
|
||||
transform="matrix(1.0545213,0,0,0.89332824,-0.92322741,-236.38373)" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer5"
|
||||
inkscape:label="pegs"
|
||||
style="display:none"
|
||||
sodipodi:insensitive="true">
|
||||
<rect
|
||||
style="display:inline;opacity:1;fill:#000000;fill-opacity:1;stroke:#aa8800;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||
id="rect1400"
|
||||
width="1.5874993"
|
||||
height="5.499999"
|
||||
x="7.3927369"
|
||||
y="1.7978847" />
|
||||
<rect
|
||||
style="display:inline;opacity:1;fill:#000000;fill-opacity:1;stroke:#aa8800;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||
id="rect1400-6"
|
||||
width="1.5874993"
|
||||
height="5.499999"
|
||||
x="24.886431"
|
||||
y="1.7978847" />
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 17 KiB |
|
|
@ -1,52 +0,0 @@
|
|||
# Stages:
|
||||
# root : Common image, both for the builder and for the final image.
|
||||
# This contains only minimal dependencies required in both cases
|
||||
# for miniconda and the Makefile.
|
||||
# env_builder: stage in which the conda envrionment is created
|
||||
# and dependencies are installed
|
||||
# base : the final image containing only the required environment files,
|
||||
# and none of the infrastructure required to generate them.
|
||||
|
||||
FROM registry.gitlab.com/mbarkhau/bootstrapit/env_builder AS builder
|
||||
|
||||
RUN mkdir /root/.ssh/ && \
|
||||
ssh-keyscan gitlab.com >> /root/.ssh/known_hosts && \
|
||||
ssh-keyscan registry.gitlab.com >> /root/.ssh/known_hosts
|
||||
|
||||
ADD requirements/ requirements/
|
||||
ADD scripts/ scripts/
|
||||
|
||||
ADD Makefile.bootstrapit.make Makefile.bootstrapit.make
|
||||
ADD Makefile Makefile
|
||||
|
||||
# install envs (relatively stable)
|
||||
ADD requirements/conda.txt requirements/conda.txt
|
||||
RUN make build/envs.txt
|
||||
|
||||
# install python package dependencies (change more often)
|
||||
ADD requirements/ requirements/
|
||||
RUN make conda
|
||||
|
||||
# Deleting pkgs implies that `conda install`
|
||||
# will have to pull all packages again.
|
||||
RUN conda clean --all --yes
|
||||
# Conda docs say that it is not safe to delete pkgs
|
||||
# because there may be symbolic links, so we verify
|
||||
# first that there are no such links.
|
||||
RUN find -L /opt/conda/envs/ -type l | grep "/opt/conda/pkgs" || exit 0
|
||||
|
||||
# The conda install is not usable after this RUN command. Since
|
||||
# we only need /opt/conda/envs/ anyway, this shouldn't be an issue.
|
||||
RUN conda clean --all --yes && \
|
||||
ls -d /opt/conda/* | grep -v envs | xargs rm -rf && \
|
||||
find /opt/conda/ -name "*.exe" | xargs rm -rf && \
|
||||
find /opt/conda/ -name "__pycache__" | xargs rm -rf && \
|
||||
rm -rf /opt/conda/pkgs/
|
||||
|
||||
|
||||
FROM registry.gitlab.com/mbarkhau/bootstrapit/root
|
||||
|
||||
RUN apt-get install --yes mercurial;
|
||||
|
||||
COPY --from=builder /opt/conda/ /opt/conda/
|
||||
COPY --from=builder /vendor/ /vendor
|
||||
|
|
@ -1,113 +0,0 @@
|
|||
# noqa: D300,D400
|
||||
# Copyright (c) 2016, Aaron Christianson
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
|
||||
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
||||
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
'''
|
||||
Monkey patch setuptools to write faster console_scripts with this format:
|
||||
|
||||
import sys
|
||||
from mymodule import entry_function
|
||||
sys.exit(entry_function())
|
||||
|
||||
This is better.
|
||||
|
||||
(c) 2016, Aaron Christianson
|
||||
http://github.com/ninjaaron/fast-entry_points
|
||||
'''
|
||||
from setuptools.command import easy_install
|
||||
import re
|
||||
TEMPLATE = r'''
|
||||
# -*- coding: utf-8 -*-
|
||||
# EASY-INSTALL-ENTRY-SCRIPT: '{3}','{4}','{5}'
|
||||
__requires__ = '{3}'
|
||||
import re
|
||||
import sys
|
||||
|
||||
from {0} import {1}
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit({2}())
|
||||
'''.lstrip()
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_args(cls, dist, header=None): # noqa: D205,D400
|
||||
"""
|
||||
Yield write_script() argument tuples for a distribution's
|
||||
console_scripts and gui_scripts entry points.
|
||||
"""
|
||||
if header is None:
|
||||
# pylint: disable=E1101
|
||||
header = cls.get_header()
|
||||
spec = str(dist.as_requirement())
|
||||
for type_ in 'console', 'gui':
|
||||
group = type_ + '_scripts'
|
||||
for name, ep in dist.get_entry_map(group).items():
|
||||
# ensure_safe_name
|
||||
if re.search(r'[\\/]', name):
|
||||
raise ValueError("Path separators not allowed in script names")
|
||||
script_text = TEMPLATE.format(
|
||||
ep.module_name, ep.attrs[0], '.'.join(ep.attrs),
|
||||
spec, group, name)
|
||||
# pylint: disable=E1101
|
||||
args = cls._get_script_args(type_, name, header, script_text)
|
||||
for res in args:
|
||||
yield res
|
||||
|
||||
|
||||
# pylint: disable=E1101
|
||||
easy_install.ScriptWriter.get_args = get_args
|
||||
|
||||
|
||||
def main():
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
dests = sys.argv[1:] or ['.']
|
||||
filename = re.sub(r'\.pyc$', '.py', __file__)
|
||||
|
||||
for dst in dests:
|
||||
shutil.copy(filename, dst)
|
||||
manifest_path = os.path.join(dst, 'MANIFEST.in')
|
||||
setup_path = os.path.join(dst, 'setup.py')
|
||||
|
||||
# Insert the include statement to MANIFEST.in if not present
|
||||
with open(manifest_path, 'a+') as manifest:
|
||||
manifest.seek(0)
|
||||
manifest_content = manifest.read()
|
||||
if 'include fastentrypoints.py' not in manifest_content:
|
||||
manifest.write(('\n' if manifest_content else '') +
|
||||
'include fastentrypoints.py')
|
||||
|
||||
# Insert the import statement to setup.py if not present
|
||||
with open(setup_path, 'a+') as setup:
|
||||
setup.seek(0)
|
||||
setup_content = setup.read()
|
||||
if 'import fastentrypoints' not in setup_content:
|
||||
setup.seek(0)
|
||||
setup.truncate()
|
||||
setup.write('import fastentrypoints\n' + setup_content)
|
||||
41
flake.lock
generated
Normal file
41
flake.lock
generated
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1686059680,
|
||||
"narHash": "sha256-sp0WlCIeVczzB0G8f8iyRg3IYW7KG31mI66z7HIZwrI=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "a558f7ac29f50c4b937fb5c102f587678ae1c9fb",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"ref": "nixos-23.05",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"systems": "systems"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "systems",
|
||||
"type": "indirect"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
47
flake.nix
Normal file
47
flake.nix
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
inputs.nixpkgs.url = "nixpkgs/nixos-23.05";
|
||||
|
||||
outputs = { self, nixpkgs, systems }:
|
||||
let
|
||||
version = "2023.1125";
|
||||
forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
||||
nixpkgsFor = forEachSystem (system: import nixpkgs { inherit system; });
|
||||
in {
|
||||
packages = forEachSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in {
|
||||
bumpver = pkgs.python3.pkgs.buildPythonApplication rec {
|
||||
pname = "bumpver";
|
||||
inherit version;
|
||||
|
||||
src = ./.;
|
||||
|
||||
propagatedBuildInputs = with pkgs.python3.pkgs; [
|
||||
pathlib2
|
||||
click
|
||||
toml
|
||||
lexid
|
||||
colorama
|
||||
setuptools
|
||||
rich
|
||||
looseversion
|
||||
];
|
||||
|
||||
nativeCheckInputs = [
|
||||
pkgs.python3.pkgs.pytestCheckHook
|
||||
pkgs.git
|
||||
pkgs.mercurial
|
||||
];
|
||||
|
||||
disabledTests = [
|
||||
# fails due to more aggressive setuptools version specifier validation
|
||||
"test_parse_default_pattern"
|
||||
];
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
defaultPackage = forEachSystem (system: self.packages.${system}.bumpver);
|
||||
};
|
||||
}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
Individual files contain the following tag instead of the full license text.
|
||||
|
||||
This file is part of the bumpver project
|
||||
https://github.com/mbarkhau/bumpver
|
||||
|
||||
Copyright (c) 2018-2023 Manuel Barkhau (mbarkhau@gmail.com) - MIT License
|
||||
SPDX-License-Identifier: MIT
|
||||
|
||||
This enables machine processing of license information based on the SPDX
|
||||
License Identifiers that are here available: https://spdx.org/licenses/
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
# Pylint-Ignore
|
||||
|
||||
**WARNING: This file is programatically generated.**
|
||||
|
||||
This file is parsed by [`pylint-ignore`](https://pypi.org/project/pylint-ignore/)
|
||||
to determine which
|
||||
[Pylint messages](https://pylint.pycqa.org/en/stable/technical_reference/features.html)
|
||||
should be ignored.
|
||||
|
||||
- Do not edit this file manually.
|
||||
- To update, use `pylint-ignore --update-ignorefile`
|
||||
|
||||
The recommended approach to using `pylint-ignore` is:
|
||||
|
||||
1. If a message refers to a valid issue, update your code rather than
|
||||
ignoring the message.
|
||||
2. If a message should *always* be ignored (globally), then to do so
|
||||
via the usual `pylintrc` or `setup.cfg` files rather than this
|
||||
`pylint-ignore.md` file.
|
||||
3. If a message is a false positive, add a comment of this form to your code:
|
||||
`# pylint:disable=<symbol> ; explain why this is a false positive`
|
||||
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
# These dependencies are installed using:
|
||||
#
|
||||
# conda install --channel conda-forge --name <env>
|
||||
#
|
||||
# Conda should be used for
|
||||
#
|
||||
# 1. Binary python packages (numpy, pandas, pillow).
|
||||
# The pypi may not always have binary packages for all platforms
|
||||
# and architectures you want to support. For example, pyblake2 only
|
||||
# has binary wheels for windows on pypi, whereas there are binary
|
||||
# packages on conda-forge (as of Sep 2018).
|
||||
# Binary wheels are becomming more common on the pypi this is
|
||||
# becomming, so this is less and less of an issue. Most of the time
|
||||
# it should be fine to add the dependency to pypi.txt instead.
|
||||
#
|
||||
# 2. Non python packages (nodejs, typescript).
|
||||
# Using conda for these kinds of dependencies minimizes
|
||||
# installation overhead for developers.
|
||||
|
||||
# https://pypi.org/project/ujson/
|
||||
# UltraJSON is an ultra fast JSON encoder and decoder written
|
||||
# in pure C with bindings for Python 2.5+ and 3.
|
||||
# ujson
|
||||
|
||||
# The hot new pkdf on the block is argon2, winner of
|
||||
# the https://password-hashing.net/ competition.
|
||||
# argon2_cffi
|
||||
|
||||
# https://blake2.net/
|
||||
# BLAKE2 is a cryptographic hash function faster than MD5, SHA-1,
|
||||
# SHA-2, and SHA-3, yet is at least as secure as the latest standard
|
||||
# SHA-3. BLAKE2 has been adopted by many projects due to its high
|
||||
# speed, security, and simplicity.
|
||||
# pyblake2
|
||||
|
||||
# needed for mypy coverage report
|
||||
lxml
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
# These dependencies are installed using:
|
||||
#
|
||||
# pip install --upgrade
|
||||
#
|
||||
# This list should only contain packages related to
|
||||
# local development and debugging. It should not contain
|
||||
# any packages required for production, building or packaging
|
||||
|
||||
# PuDB is a full-screen, console-based visual debugger for Python.
|
||||
# https://documen.tician.de/pudb/
|
||||
pudb
|
||||
|
||||
# Quick-and-dirty debugging output for tired programmers
|
||||
# https://pypi.org/project/q/
|
||||
q
|
||||
|
||||
# pretty-traceback manipulates Python tracebacks to make
|
||||
# them more readable.
|
||||
# https://pypi.org/project/pretty-traceback/
|
||||
pretty-traceback
|
||||
|
||||
# Py-Spy: A sampling profiler for Python programs.
|
||||
# https://github.com/benfred/py-spy
|
||||
# This is good for coarse grained profiling (even on production)
|
||||
py-spy
|
||||
|
||||
# SNAKEVIZ : A browser based viewer for the output of Python’s cProfile.
|
||||
# https://jiffyclub.github.io/snakeviz/
|
||||
# This is good for fine grained profiling (function level/micro optimizations)
|
||||
snakeviz
|
||||
|
||||
# I've yet to find a decent memory profiler for python, feel free to
|
||||
# add one after you've tested it and found it to be actually useful.
|
||||
|
||||
ipython # nuff said
|
||||
|
||||
|
||||
# A command line utility to display dependency tree of the installed Python packages
|
||||
# https://github.com/naiquevin/pipdeptree
|
||||
#
|
||||
# For graph output you will need to:
|
||||
# $ sudo apt install -y graphviz
|
||||
# $ pipdeptree --graph-output svg > requirements/tree.svg
|
||||
pipdeptree
|
||||
graphviz
|
||||
|
||||
# run failed tests first
|
||||
pytest-cache
|
||||
|
||||
# to update the readme examples
|
||||
rich
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
# These dependencies are installed using:
|
||||
#
|
||||
# pip install --upgrade
|
||||
#
|
||||
# This file should only declare dependencies related to code
|
||||
# formatting, linting, testing and packaging.
|
||||
#
|
||||
# No dependencies required for production should be listed here.
|
||||
|
||||
# http://www.pydocstyle.org/en/4.0.0/release_notes.html
|
||||
# Support for Python 2.x and PyPy has been dropped (#340).
|
||||
pydocstyle<4
|
||||
|
||||
|
||||
flake8
|
||||
flake8-bugbear
|
||||
flake8-docstrings
|
||||
flake8-builtins
|
||||
flake8-comprehensions
|
||||
flake8-junit-report
|
||||
flake8-2020
|
||||
pylint==2.12.1
|
||||
pylint-ignore>=2020.1013
|
||||
isort
|
||||
|
||||
mypy==0.942
|
||||
types-setuptools
|
||||
types-toml
|
||||
|
||||
# http://doc.pytest.org/en/latest/py27-py34-deprecation.html
|
||||
# The pytest 4.6 series will be the last to support Python 2.7
|
||||
# and 3.4, and is scheduled to be released by mid-2019.
|
||||
# pytest 5.0 and onwards will support only Python 3.5+.
|
||||
pytest; python_version >= "3.5"
|
||||
pytest<5.0; python_version < "3.5"
|
||||
pytest-cov
|
||||
# https://github.com/pytest-dev/pytest-html/blob/master/CHANGES.rst
|
||||
# pytest-html 2.0+ doesn't support python2.7
|
||||
pytest-html<2.0
|
||||
py
|
||||
|
||||
readme_renderer[md]
|
||||
twine
|
||||
|
||||
md-toc
|
||||
straitjacket>=v202008.1016
|
||||
pycalver
|
||||
lib3to6
|
||||
|
||||
# needed for mypy coverage report
|
||||
lxml
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
# These dependencies are installed using:
|
||||
#
|
||||
# pip install --upgrade
|
||||
# pip install --upgrade --no-deps --target vendor/
|
||||
#
|
||||
# Vendored dependencies are installed both in the virtual
|
||||
# environment as well as in the vendor/ directory. This way:
|
||||
#
|
||||
# 1. All transitive dependencies of a package are installed in
|
||||
# the virtualenv (in the first installation step)
|
||||
# 2. If there is a binary version of the package available, it
|
||||
# will be installed into the virtualenv
|
||||
# 3. In the third step only (--no-deps) the source version of
|
||||
# the (--no-binary) package is installed to vendor/
|
||||
#
|
||||
# This allows us to:
|
||||
#
|
||||
# 1. Enable mypy to check modules of multiple packages.
|
||||
# 2. Easily navigate to the source of a vendored dependency
|
||||
# 3. Use binary versions packages instead of source versions of
|
||||
# packages, simply by not including the vendor/ directory in
|
||||
# the PYTHONPATH. The version from the virtualenv will then
|
||||
# be loaded instead.
|
||||
|
|
@ -1,472 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -Ee -o pipefail;
|
||||
shopt -s extglob nocasematch;
|
||||
|
||||
BOOTSTRAPIT_GIT_URL="https://gitlab.com/mbarkhau/bootstrapit.git/"
|
||||
|
||||
BOOTSTRAPIT_GIT_PATH=/tmp/bootstrapit;
|
||||
|
||||
echo "Updating from $BOOTSTRAPIT_GIT_URL";
|
||||
|
||||
OLD_PWD="$PWD";
|
||||
|
||||
if [[ ! -e "$BOOTSTRAPIT_GIT_PATH" ]]; then
|
||||
git clone "$BOOTSTRAPIT_GIT_URL" "$BOOTSTRAPIT_GIT_PATH";
|
||||
else
|
||||
cd "$BOOTSTRAPIT_GIT_PATH";
|
||||
git pull --quiet;
|
||||
cd "$OLD_PWD";
|
||||
fi
|
||||
|
||||
cd "$BOOTSTRAPIT_GIT_PATH";
|
||||
git checkout "${BOOTSTRAPIT_DEV_BRANCH-master}";
|
||||
git pull --quiet;
|
||||
cd "$OLD_PWD";
|
||||
|
||||
if [[ ${BOOTSTRAPIT_DEBUG-0} == 0 ]]; then
|
||||
if [[ -f "$PROJECT_DIR/.git/config" ]]; then
|
||||
cd "$PROJECT_DIR";
|
||||
if [[ $( git diff -s --exit-code || echo "$?" ) -gt 0 ]]; then
|
||||
echo "ABORTING!: Your repo has local changes which are not comitted."
|
||||
echo "To avoid overwriting these changes, please commit your changes."
|
||||
exit 1;
|
||||
fi
|
||||
cd "$OLD_PWD";
|
||||
fi
|
||||
|
||||
md5cmd=$(command -v md5sum || command -v md5)
|
||||
|
||||
old_md5=$( $md5cmd < "$PROJECT_DIR/scripts/bootstrapit_update.sh" );
|
||||
new_md5=$( $md5cmd < "$BOOTSTRAPIT_GIT_PATH/scripts/bootstrapit_update.sh" );
|
||||
|
||||
if [[ "$old_md5" != "$new_md5" ]]; then
|
||||
# Copy the updated file, run it and exit the current execution.
|
||||
cp "${BOOTSTRAPIT_GIT_PATH}/scripts/bootstrapit_update.sh" \
|
||||
"${PROJECT_DIR}/scripts/";
|
||||
git add "${PROJECT_DIR}/scripts/bootstrapit_update.sh";
|
||||
git commit --no-verify -m "auto update of scripts/bootstrapit_update.sh"
|
||||
# shellcheck source=scripts/bootstrapit_update.sh
|
||||
source "${PROJECT_DIR}/scripts/bootstrapit_update.sh";
|
||||
exit 0;
|
||||
fi
|
||||
fi
|
||||
|
||||
# One time update of makefile setup
|
||||
if [[ -f "makefile.extra.make" && -f "makefile.config.make" ]]; then
|
||||
printf "Converting simplified makefile setup\n\n"
|
||||
printf " mv makefile makefile.bootstrapit.make\n"
|
||||
printf " cat makefile.config.make > makefile\n"
|
||||
printf " cat makefile.extra.make >> makefile\n"
|
||||
|
||||
grep -v "include" makefile | grep -v "Project Specific Tasks" > makefile.bootstrapit.make;
|
||||
|
||||
cat makefile.config.make > makefile;
|
||||
printf "\n\ninclude makefile.bootstrapit.make\n\n" >> makefile;
|
||||
printf "## -- Extra/Custom/Project Specific Tasks --\n" >> makefile;
|
||||
cat makefile.extra.make >> makefile;
|
||||
|
||||
git rm makefile.config.make;
|
||||
git rm makefile.extra.make;
|
||||
git add makefile;
|
||||
git add makefile.bootstrapit.make;
|
||||
|
||||
printf "\nNow the 'makefile' is yours and the bootstrapit targets are in makefile.bootstrapit.make"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# One time update of makefile capitalization
|
||||
if [[ -f "makefile" && -f "makefile.bootstrapit.make" ]]; then
|
||||
printf "Change capitalization of makefile -> Makefile # because too many rustled jimmies\n\n"
|
||||
printf " mv makefile Makefile\n"
|
||||
printf " mv makefile.bootstrapit.make Makefile.bootstrapit.make\n"
|
||||
sed -i 's/include makefile.bootstrapit.make/include Makefile.bootstrapit.make/g' makefile
|
||||
git add makefile
|
||||
git mv makefile Makefile;
|
||||
git mv makefile.bootstrapit.make Makefile.bootstrapit.make;
|
||||
|
||||
printf "Please commit the renamed files and run bootstrapit_update.sh again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Argument parsing from
|
||||
# https://stackoverflow.com/a/14203146/62997
|
||||
UPDATE_ALL=0
|
||||
|
||||
POSITIONAL=()
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
key="$1"
|
||||
|
||||
case $key in
|
||||
-a|--all)
|
||||
UPDATE_ALL=1
|
||||
shift # past argument
|
||||
;;
|
||||
*) # unknown option
|
||||
POSITIONAL+=("$1") # save it in an array for later
|
||||
shift # past argument
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
set -- "${POSITIONAL[@]}" # restore positional parameters
|
||||
|
||||
if [[ -z $AUTHOR_EMAIL && -n $AUTHOR_CONTACT ]]; then
|
||||
AUTHOR_EMAIL="${AUTHOR_CONTACT}"
|
||||
fi
|
||||
|
||||
YEAR=$(date +%Y)
|
||||
MONTH=$(date +%m)
|
||||
|
||||
declare -a required_config_param_names=(
|
||||
"AUTHOR_NAME"
|
||||
"AUTHOR_EMAIL"
|
||||
"PACKAGE_NAME"
|
||||
"IS_PUBLIC"
|
||||
"GIT_REPO_NAMESPACE"
|
||||
"GIT_REPO_DOMAIN"
|
||||
"DESCRIPTION"
|
||||
"KEYWORDS"
|
||||
"LICENSE_ID"
|
||||
)
|
||||
|
||||
for name in "${required_config_param_names[@]}"; do
|
||||
if [[ -z ${!name} ]]; then
|
||||
echo "Missing parameter $name in $1";
|
||||
exit 1;
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z $MODULE_NAME ]]; then
|
||||
MODULE_NAME=${PACKAGE_NAME};
|
||||
# replace "-"" with "_"
|
||||
MODULE_NAME=${MODULE_NAME//-/_};
|
||||
# lower case
|
||||
MODULE_NAME=${MODULE_NAME,,};
|
||||
fi
|
||||
|
||||
if [[ -z $PACKAGE_VERSION ]]; then
|
||||
PACKAGE_VERSION="$(date +'v%Y%m.0001-alpha')"
|
||||
fi
|
||||
|
||||
if [[ -z $DEFAULT_PYTHON_VERSION ]]; then
|
||||
DEFAULT_PYTHON_VERSION="python=3.6";
|
||||
fi
|
||||
|
||||
if [[ -z $SUPPORTED_PYTHON_VERSIONS ]]; then
|
||||
SUPPORTED_PYTHON_VERSIONS=${DEFAULT_PYTHON_VERSION};
|
||||
fi
|
||||
|
||||
if [[ -z $SPDX_LICENSE_ID ]]; then
|
||||
if [[ $LICENSE_ID =~ none ]]; then
|
||||
SPDX_LICENSE_ID="Proprietary";
|
||||
else
|
||||
SPDX_LICENSE_ID=$LICENSE_ID;
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
SPDX_REPO_URL="https://raw.githubusercontent.com/spdx";
|
||||
LICENSE_TXT_URL="$SPDX_REPO_URL/license-list-data/master/text/${SPDX_LICENSE_ID}.txt";
|
||||
LICENSE_XML_URL="$SPDX_REPO_URL/license-list-XML/master/src/${SPDX_LICENSE_ID}.xml";
|
||||
|
||||
LICENSE_TXT_FILE="/tmp/bootstrapit_$LICENSE_ID.txt"
|
||||
LICENSE_XML_FILE="/tmp/bootstrapit_$LICENSE_ID.xml"
|
||||
|
||||
|
||||
if ! [[ $LICENSE_ID =~ none ]]; then
|
||||
if ! [[ -f "$LICENSE_TXT_FILE" ]]; then
|
||||
echo "Downloading license text from $LICENSE_TXT_URL"
|
||||
curl -L --silent "$LICENSE_TXT_URL" > "$LICENSE_TXT_FILE.tmp";
|
||||
mv "$LICENSE_TXT_FILE.tmp" "$LICENSE_TXT_FILE";
|
||||
fi
|
||||
if ! [[ -f "$LICENSE_XML_FILE" ]]; then
|
||||
echo "Downloading license info from $LICENSE_XML_URL"
|
||||
curl -L --silent "$LICENSE_XML_URL" > "$LICENSE_XML_FILE.tmp";
|
||||
mv "$LICENSE_XML_FILE.tmp" "$LICENSE_XML_FILE";
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [[ -z $LICENSE_NAME ]]; then
|
||||
if [[ $LICENSE_ID =~ none ]]; then
|
||||
LICENSE_NAME="All Rights Reserved";
|
||||
else
|
||||
LICENSE_NAME=$(
|
||||
awk '{ if ($0 ~ /[^>]\s*$/ ) { printf "%s", $0 } else {printf "%s\n", $0 } }' \
|
||||
"$LICENSE_XML_FILE" \
|
||||
| grep "<license" \
|
||||
| sed -E 's/.*name="([A-Za-z0-9[:punct:][:space:]]+)".*/\1/g' \
|
||||
| sed 's/"/"/g' \
|
||||
| head -n 1
|
||||
)
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [[ -z $LICENSE_CLASSIFIER ]]; then
|
||||
if [[ $LICENSE_ID =~ none ]]; then
|
||||
LICENSE_CLASSIFIER="License :: Other/Proprietary License";
|
||||
elif [[ $LICENSE_ID =~ mit ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: MIT License";
|
||||
elif [[ $LICENSE_ID =~ bsd ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: BSD License";
|
||||
elif [[ $LICENSE_ID =~ gpl-2.0-only ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: GNU General Public License v2 (GPLv2)";
|
||||
elif [[ $LICENSE_ID =~ lgpl-2.0-only ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)";
|
||||
elif [[ $LICENSE_ID =~ gpl-3.0-only ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: GNU General Public License v3 (GPLv3)";
|
||||
elif [[ $LICENSE_ID =~ agpl-3.0-only ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: GNU Affero General Public License v3";
|
||||
elif [[ $LICENSE_ID =~ lgpl-3.0-only ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)";
|
||||
elif [[ $LICENSE_ID =~ mpl-2.0 ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)";
|
||||
elif [[ $LICENSE_ID =~ apache-2.0 ]]; then
|
||||
LICENSE_CLASSIFIER="License :: OSI Approved :: Apache Software License";
|
||||
else
|
||||
echo "Invalid LICENSE_ID=\"$LICENSE_ID\". Could not determine LICENSE_CLASSIFIER.";
|
||||
exit 1;
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$COPYRIGHT_STRING" ]]; then
|
||||
COPYRIGHT_STRING="Copyright (c) ${YEAR} ${AUTHOR_NAME} (${AUTHOR_EMAIL}) - ${LICENSE_NAME}";
|
||||
fi
|
||||
|
||||
if [[ -z "$SETUP_PY_LICENSE" ]]; then
|
||||
if [[ $LICENSE_ID =~ none ]]; then
|
||||
SETUP_PY_LICENSE="$COPYRIGHT_STRING";
|
||||
else
|
||||
SETUP_PY_LICENSE=$SPDX_LICENSE_ID;
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [[ -z "$IS_PUBLIC" ]]; then
|
||||
IS_PUBLIC=$( echo "$GIT_REPO_DOMAIN" | grep -c -E '(gitlab\.com|github\.com|bitbucket\.org)' || true );
|
||||
fi
|
||||
|
||||
if [[ -z "$PAGES_DOMAIN" ]]; then
|
||||
if [[ "$GIT_REPO_DOMAIN" == "gitlab.com" ]]; then
|
||||
PAGES_DOMAIN=gitlab.io;
|
||||
elif [[ "$GIT_REPO_DOMAIN" == "github.com" ]]; then
|
||||
PAGES_DOMAIN=github.io;
|
||||
elif [[ "$GIT_REPO_DOMAIN" == "bitbucket.org" ]]; then
|
||||
PAGES_DOMAIN=bitbucket.io;
|
||||
else
|
||||
PAGES_DOMAIN="gitlab-pages.$GIT_REPO_DOMAIN";
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$PAGES_URL" ]]; then
|
||||
PAGES_URL="https://${GIT_REPO_NAMESPACE}.${PAGES_DOMAIN}/${PACKAGE_NAME}/"
|
||||
fi
|
||||
|
||||
if [[ -z "$DOCKER_REGISTRY_DOMAIN" ]]; then
|
||||
if [[ "$GIT_REPO_DOMAIN" == "gitlab.com" ]]; then
|
||||
DOCKER_REGISTRY_DOMAIN=registry.gitlab.com;
|
||||
else
|
||||
DOCKER_REGISTRY_DOMAIN=hub.docker.com;
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$DOCKER_ROOT_IMAGE" ]]; then
|
||||
DOCKER_ROOT_IMAGE=registry.gitlab.com/mbarkhau/bootstrapit/root
|
||||
fi
|
||||
|
||||
if [[ -z "$DOCKER_ENV_BUILDER_IMAGE" ]]; then
|
||||
DOCKER_ENV_BUILDER_IMAGE=registry.gitlab.com/mbarkhau/bootstrapit/env_builder
|
||||
fi
|
||||
|
||||
if [[ -z "$DOCKER_REGISTRY_URL" ]]; then
|
||||
DOCKER_REGISTRY_URL=${DOCKER_REGISTRY_DOMAIN}/${GIT_REPO_NAMESPACE}/${PACKAGE_NAME}
|
||||
fi
|
||||
|
||||
if [[ -z "$DOCKER_BASE_IMAGE" ]]; then
|
||||
DOCKER_BASE_IMAGE=${DOCKER_REGISTRY_URL}/base
|
||||
fi
|
||||
|
||||
# strip off ":latest"
|
||||
# https://medium.com/@mccode/the-misunderstood-docker-tag-latest-af3babfd6375
|
||||
# https://vsupalov.com/docker-latest-tag/
|
||||
DOCKER_BASE_IMAGE="$(dirname ${DOCKER_BASE_IMAGE})"/"$(basename ${DOCKER_BASE_IMAGE} ':latest')";
|
||||
|
||||
if [[ -z "$MODULE_NAME" ]]; then
|
||||
MODULE_NAME=$( echo "${PACKAGE_NAME}" | tr '[:upper:]' '[:lower:]' | sed -E -e 's;-;_;g'; );
|
||||
fi
|
||||
|
||||
if [[ -z "$GIT_REPO_URL" ]]; then
|
||||
GIT_REPO_URL=https://${GIT_REPO_DOMAIN}/${GIT_REPO_NAMESPACE}/${PACKAGE_NAME}
|
||||
elif [[ ! "$GIT_REPO_URL" =~ ^https?://[^/]+/[^/]+/[^/]+(/|.git)?$ ]]; then
|
||||
echo "ERROR: Invalid argument for '${GIT_REPO_URL}'";
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
GIT_REPO_PATH=$( echo "$GIT_REPO_URL" | sed -E -e 's;https?://[^/]+/;;g' | sed -E 's;(/|.git)$;;g' )
|
||||
GIT_REPO_NAME=$( echo "$GIT_REPO_PATH" | sed -E -e 's;^[A-Za-z_-]+/;;g' )
|
||||
|
||||
if [[ "$LICENSE_ID" =~ "none" ]]; then
|
||||
echo "$COPYRIGHT_STRING" > "$PROJECT_DIR/LICENSE";
|
||||
else
|
||||
cat "$LICENSE_TXT_FILE" \
|
||||
| sed "s/Copyright (c) <year> <owner>[[:space:]]*/Copyright (c) $YEAR $AUTHOR_NAME ($AUTHOR_EMAIL)/g" \
|
||||
| sed "s/Copyright (c) <year> <copyright holders>[[:space:]]*/Copyright (c) $YEAR $AUTHOR_NAME ($AUTHOR_EMAIL)/g" \
|
||||
> "$PROJECT_DIR/LICENSE";
|
||||
fi
|
||||
|
||||
function format_template()
|
||||
{
|
||||
cat "$1" \
|
||||
| sed "s;\${GIT_REPO_URL};${GIT_REPO_URL};g" \
|
||||
| sed "s;\${GIT_REPO_PATH};${GIT_REPO_PATH};g" \
|
||||
| sed "s;\${GIT_REPO_NAMESPACE};${GIT_REPO_NAMESPACE};g" \
|
||||
| sed "s;\${GIT_REPO_NAME};${GIT_REPO_NAME};g" \
|
||||
| sed "s;\${GIT_REPO_DOMAIN};${GIT_REPO_DOMAIN};g" \
|
||||
| sed "s;\${DEFAULT_PYTHON_VERSION};${DEFAULT_PYTHON_VERSION};g" \
|
||||
| sed "s;\${SUPPORTED_PYTHON_VERSIONS};${SUPPORTED_PYTHON_VERSIONS};g" \
|
||||
| sed "s;\${DOCKER_REGISTRY_DOMAIN};${DOCKER_REGISTRY_DOMAIN};g" \
|
||||
| sed "s;\${DOCKER_REGISTRY_URL};${DOCKER_REGISTRY_URL};g" \
|
||||
| sed "s;\${DOCKER_ROOT_IMAGE};${DOCKER_ROOT_IMAGE};g" \
|
||||
| sed "s;\${DOCKER_ENV_BUILDER_IMAGE};${DOCKER_ENV_BUILDER_IMAGE};g" \
|
||||
| sed "s;\${DOCKER_BASE_IMAGE};${DOCKER_BASE_IMAGE};g" \
|
||||
| sed "s;\${PAGES_DOMAIN};${PAGES_DOMAIN};g" \
|
||||
| sed "s;\${PAGES_URL};${PAGES_URL};g" \
|
||||
| sed "s;\${AUTHOR_CONTACT};${AUTHOR_CONTACT};g" \
|
||||
| sed "s;\${AUTHOR_EMAIL};${AUTHOR_EMAIL};g" \
|
||||
| sed "s;\${AUTHOR_NAME};${AUTHOR_NAME};g" \
|
||||
| sed "s;\${PACKAGE_NAME};${PACKAGE_NAME};g" \
|
||||
| sed "s;\${PACKAGE_VERSION};${PACKAGE_VERSION};g" \
|
||||
| sed "s;\${MODULE_NAME};${MODULE_NAME};g" \
|
||||
| sed "s;\${DESCRIPTION};${DESCRIPTION};g" \
|
||||
| sed "s;\${KEYWORDS};${KEYWORDS};g" \
|
||||
| sed "s;\${SPDX_LICENSE_ID};${SPDX_LICENSE_ID};g" \
|
||||
| sed "s;\${SETUP_PY_LICENSE};${SETUP_PY_LICENSE};g" \
|
||||
| sed "s;\${LICENSE_CLASSIFIER};${LICENSE_CLASSIFIER};g" \
|
||||
| sed "s;\${COPYRIGHT_STRING};${COPYRIGHT_STRING};g" \
|
||||
| sed "s;\${YEAR};${YEAR};g" \
|
||||
| sed "s;\${MONTH};${MONTH};g" \
|
||||
| sed "s;\${IS_PUBLIC};${IS_PUBLIC};g" \
|
||||
> "$1.tmp";
|
||||
mv "$1.tmp" "$1";
|
||||
}
|
||||
|
||||
if [[ "${UPDATE_ALL}" -eq "1" ]]; then
|
||||
declare -a IGNORE_IF_EXISTS=()
|
||||
elif [[ -z "${IGNORE_IF_EXISTS[*]}" ]]; then
|
||||
declare -a IGNORE_IF_EXISTS=(
|
||||
"CHANGELOG.md"
|
||||
"README.md"
|
||||
"setup.py"
|
||||
"requirements/pypi.txt"
|
||||
"requirements/conda.txt"
|
||||
"requirements/vendor.txt"
|
||||
"src/${MODULE_NAME}/__init__.py"
|
||||
"src/${MODULE_NAME}/__main__.py"
|
||||
)
|
||||
fi
|
||||
|
||||
function copy_template()
|
||||
{
|
||||
if [[ -z ${2} ]]; then
|
||||
dest_subpath=$1;
|
||||
else
|
||||
dest_subpath=$2;
|
||||
fi
|
||||
|
||||
dest_path=${PROJECT_DIR}/$dest_subpath;
|
||||
if [[ -f "$dest_subpath" ]]; then
|
||||
for ignore_item in "${IGNORE_IF_EXISTS[@]}"; do
|
||||
if [[ "$dest_subpath" == "$ignore_item" ]]; then
|
||||
return 0;
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
cat "${BOOTSTRAPIT_GIT_PATH}/$1.template" > "$dest_path";
|
||||
|
||||
format_template "$dest_path";
|
||||
}
|
||||
|
||||
mkdir -p "${PROJECT_DIR}/test/";
|
||||
mkdir -p "${PROJECT_DIR}/vendor/";
|
||||
mkdir -p "${PROJECT_DIR}/scripts/";
|
||||
mkdir -p "${PROJECT_DIR}/stubs/";
|
||||
mkdir -p "${PROJECT_DIR}/src/";
|
||||
mkdir -p "${PROJECT_DIR}/requirements/";
|
||||
mkdir -p "${PROJECT_DIR}/src/${MODULE_NAME}";
|
||||
mkdir -p "${PROJECT_DIR}/.github/workflows/";
|
||||
|
||||
copy_template .gitignore;
|
||||
copy_template README.md;
|
||||
copy_template CONTRIBUTING.md;
|
||||
copy_template CHANGELOG.md;
|
||||
copy_template license.header;
|
||||
copy_template stubs/README.md;
|
||||
copy_template MANIFEST.in;
|
||||
|
||||
copy_template setup.py;
|
||||
copy_template setup.cfg;
|
||||
|
||||
copy_template Makefile;
|
||||
copy_template Makefile.bootstrapit.make;
|
||||
copy_template activate;
|
||||
copy_template docker_base.Dockerfile;
|
||||
copy_template Dockerfile;
|
||||
|
||||
copy_template requirements/conda.txt;
|
||||
copy_template requirements/pypi.txt;
|
||||
copy_template requirements/development.txt;
|
||||
copy_template requirements/integration.txt;
|
||||
copy_template requirements/vendor.txt;
|
||||
|
||||
copy_template .gitlab-ci.yml;
|
||||
copy_template .github/workflows/ci.yml;
|
||||
|
||||
copy_template scripts/update_conda_env_deps.sh;
|
||||
copy_template scripts/setup_conda_envs.sh;
|
||||
copy_template scripts/pre-push-hook.sh;
|
||||
copy_template scripts/exit_0_if_empty.py;
|
||||
|
||||
copy_template __main__.py "src/${MODULE_NAME}/__main__.py";
|
||||
copy_template __init__.py "src/${MODULE_NAME}/__init__.py";
|
||||
touch "${PROJECT_DIR}/test/__init__.py";
|
||||
|
||||
chmod +x "${PROJECT_DIR}/src/${MODULE_NAME}/__main__.py";
|
||||
chmod +x "${PROJECT_DIR}/scripts/update_conda_env_deps.sh";
|
||||
chmod +x "${PROJECT_DIR}/scripts/setup_conda_envs.sh";
|
||||
chmod +x "${PROJECT_DIR}/scripts/pre-push-hook.sh";
|
||||
|
||||
head -n 7 "${PROJECT_DIR}/license.header" \
|
||||
| tail -n +3 \
|
||||
| sed -E 's/(^ |^$)/#/g' \
|
||||
> /tmp/.py_license.header;
|
||||
|
||||
src_files="${PROJECT_DIR}/src/*/*.py"
|
||||
|
||||
for src_file in $src_files; do
|
||||
if grep -q -E '^# SPDX-License-Identifier' "$src_file"; then
|
||||
continue;
|
||||
fi
|
||||
offset=0
|
||||
if grep -z -q -E '^#![/a-z ]+?python' "$src_file"; then
|
||||
(( offset+=1 ));
|
||||
fi
|
||||
if grep -q -E '^# .+?coding: [a-zA-Z0-9_\-]+' "$src_file"; then
|
||||
(( offset+=1 ));
|
||||
fi
|
||||
rm -f "${src_file}.with_header";
|
||||
if [[ $offset -gt 0 ]]; then
|
||||
head -n $offset "${src_file}" > "${src_file}.with_header";
|
||||
fi
|
||||
(( offset+=1 ));
|
||||
cat /tmp/.py_license.header >> "${src_file}.with_header";
|
||||
tail -n +$offset "${src_file}" >> "${src_file}.with_header";
|
||||
mv "${src_file}.with_header" "$src_file";
|
||||
done
|
||||
|
||||
rm /tmp/.py_license.header;
|
||||
|
||||
git status;
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# if you know a bash one liner for this, be my guest
|
||||
import sys
|
||||
|
||||
data = open(sys.argv[1]).read(10)
|
||||
has_data = len(data) > 0
|
||||
|
||||
sys.exit(has_data)
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -euo pipefail;
|
||||
|
||||
make fmt;
|
||||
|
||||
git diff --exit-code --stat src/;
|
||||
git diff --exit-code --stat test/;
|
||||
git diff --exit-code --stat scripts/;
|
||||
git diff --exit-code --stat requirements/;
|
||||
|
||||
make lint;
|
||||
make test;
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
read -r -a env_names <<< "${CONDA_ENV_NAMES//, /$IFS}";
|
||||
read -r -a env_py_paths <<< "${CONDA_ENV_BIN_PYTHON_PATHS//, /$IFS}";
|
||||
read -r -a py_versions <<< "${SUPPORTED_PYTHON_VERSIONS//, /$IFS}";
|
||||
|
||||
for i in ${!env_py_paths[@]}; do
|
||||
env_path_python=${env_py_paths[i]};
|
||||
env_name=${env_names[i]};
|
||||
py_version=${py_versions[i]};
|
||||
|
||||
if [[ ! -f ${env_path_python} ]]; then
|
||||
echo "conda create --name ${env_name} ${py_version} ...";
|
||||
${CONDA_BIN} create --name ${env_name} --yes;
|
||||
${CONDA_BIN} install --name ${env_name} --yes --channel conda-forge ${py_version};
|
||||
fi;
|
||||
|
||||
echo "updating ${env_name} conda deps ...";
|
||||
${CONDA_BIN} install --name ${env_name} --channel conda-forge --yes --quiet \
|
||||
$(grep -o '^[^#][^ ]*' requirements/conda.txt)
|
||||
|
||||
${env_path_python} -m ensurepip;
|
||||
|
||||
${env_path_python} --version >> build/envs.txt.tmp \
|
||||
2>>build/envs.txt.tmp \
|
||||
1>>build/envs.txt.tmp;
|
||||
|
||||
done;
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
read -r -a env_py_paths <<< "${CONDA_ENV_BIN_PYTHON_PATHS//, /$IFS}";
|
||||
read -r -a env_names <<< "${CONDA_ENV_NAMES//, /$IFS}";
|
||||
|
||||
for i in ${!env_py_paths[@]}; do
|
||||
env_path_python=${env_py_paths[i]};
|
||||
env_name=${env_names[i]};
|
||||
|
||||
${env_path_python} -m pip install --upgrade --quiet pip;
|
||||
|
||||
echo "updating ${env_name} pypi deps ...";
|
||||
|
||||
# pytest is required in every environment to run the test suite
|
||||
# against the installed modules.
|
||||
${env_path_python} -m pip install \
|
||||
--disable-pip-version-check --upgrade --quiet \
|
||||
pytest;
|
||||
|
||||
${env_path_python} -m pip install \
|
||||
--disable-pip-version-check --upgrade --quiet \
|
||||
--requirement=requirements/pypi.txt;
|
||||
|
||||
echo "updating ${env_name} vendor deps ...";
|
||||
|
||||
${env_path_python} -m pip install \
|
||||
--disable-pip-version-check --upgrade --quiet \
|
||||
--requirement=requirements/vendor.txt;
|
||||
done;
|
||||
|
|
@ -1,199 +0,0 @@
|
|||
import io
|
||||
import sys
|
||||
import shlex
|
||||
import random
|
||||
import difflib
|
||||
import datetime as dt
|
||||
import subprocess as sp
|
||||
import pkg_resources
|
||||
|
||||
import rich
|
||||
import rich.box
|
||||
import rich.table
|
||||
|
||||
from bumpver import v2version
|
||||
|
||||
|
||||
def update(content, marker, value):
|
||||
begin_marker = f"<!-- BEGIN {marker} -->"
|
||||
end_marker = f"<!-- END {marker} -->"
|
||||
|
||||
prefix, rest = content.split(begin_marker)
|
||||
_ , suffix = rest.split(end_marker)
|
||||
return prefix + begin_marker + value + end_marker + suffix
|
||||
|
||||
|
||||
def _color_line(line):
|
||||
if line.startswith("+++") or line.startswith("---"):
|
||||
return line
|
||||
elif line.startswith("+"):
|
||||
return "\u001b[32m" + line + "\u001b[0m"
|
||||
elif line.startswith("-"):
|
||||
return "\u001b[31m" + line + "\u001b[0m"
|
||||
elif line.startswith("@"):
|
||||
return "\u001b[36m" + line + "\u001b[0m"
|
||||
else:
|
||||
return line
|
||||
|
||||
|
||||
def print_diff(old_content, new_content):
|
||||
diff_lines = difflib.unified_diff(
|
||||
a=old_content.splitlines(),
|
||||
b=new_content.splitlines(),
|
||||
lineterm="",
|
||||
)
|
||||
|
||||
for line in diff_lines:
|
||||
print(_color_line(line))
|
||||
|
||||
|
||||
def update_md_code_output(content, command):
|
||||
output_data = sp.check_output(shlex.split(command))
|
||||
output = output_data.decode("utf-8")
|
||||
|
||||
replacement = "\n\n```\n" + "$ " + command + "\n" + output + "```\n\n"
|
||||
return update(content, command, replacement)
|
||||
|
||||
|
||||
def weeknum_example():
|
||||
base_date = dt.date(2020, 12, 26)
|
||||
|
||||
rows = []
|
||||
for i in range(10):
|
||||
d = base_date + dt.timedelta(days=i)
|
||||
row = d.strftime("%Y-%m-%d (%a): %Y %W %U %G %V")
|
||||
rows.append(row)
|
||||
|
||||
content = "\n".join([" YYYY WW UU GGGG VV"] + rows)
|
||||
return "\n\n```\n" + content + "\n```\n\n"
|
||||
|
||||
|
||||
def pattern_examples():
|
||||
patterns = [
|
||||
("MAJOR.MINOR.PATCH[PYTAGNUM]" , ""),
|
||||
("MAJOR.MINOR[.PATCH[PYTAGNUM]]", ""),
|
||||
("YYYY.BUILD[PYTAGNUM]" , ""),
|
||||
("YYYY.BUILD[-TAG]" , ""),
|
||||
("YYYY.INC0[PYTAGNUM]" , ""),
|
||||
("YYYY0M.PATCH[-TAG]" , "¹"),
|
||||
("YYYY0M.BUILD[-TAG]" , ""),
|
||||
("YYYY.0M" , ""),
|
||||
("YYYY.MM" , ""),
|
||||
("YYYY.WW" , ""),
|
||||
("YYYY.MM.PATCH[PYTAGNUM]" , ""),
|
||||
("YYYY.0M.PATCH[PYTAGNUM]" , "¹"),
|
||||
("YYYY.MM.INC0" , ""),
|
||||
("YYYY.MM.DD" , ""),
|
||||
("YYYY.0M.0D" , ""),
|
||||
("YY.0M.PATCH" , "²"),
|
||||
]
|
||||
|
||||
rand = random.Random(0)
|
||||
field_values = [
|
||||
{
|
||||
'year_y': rand.randrange(2020, 2023),
|
||||
'month' : rand.randrange( 1, 12),
|
||||
'dom' : rand.randrange( 1, 28),
|
||||
'major' : rand.randrange( 0, 1),
|
||||
'minor' : rand.randrange( 0, 20),
|
||||
'patch' : rand.randrange( 0, 20),
|
||||
'inc0' : rand.randrange( 0, 20),
|
||||
'bid' : rand.randrange(1000, 1500),
|
||||
'tag' : rand.choice(["final", "beta"]),
|
||||
}
|
||||
for _ in range(100)
|
||||
]
|
||||
|
||||
rows = []
|
||||
for raw_pattern, lexico_caveat in patterns:
|
||||
sort_keys = ['year_y']
|
||||
if "0M" in raw_pattern or "MM" in raw_pattern:
|
||||
sort_keys.append('month')
|
||||
if "0D" in raw_pattern or "DD" in raw_pattern:
|
||||
sort_keys.append('dom')
|
||||
if "PATCH" in raw_pattern:
|
||||
sort_keys.append('patch')
|
||||
if "INC0" in raw_pattern:
|
||||
sort_keys.append('inc0')
|
||||
if "BUILD" in raw_pattern:
|
||||
sort_keys.append('bid')
|
||||
if "PYTAG" in raw_pattern:
|
||||
sort_keys.append('tag')
|
||||
|
||||
field_values.sort(key=lambda fv: tuple(fv[k] for k in sort_keys))
|
||||
field_values[-1]['year_y'] = 2101
|
||||
|
||||
example_versions = []
|
||||
notag_versions = []
|
||||
pep440_versions = []
|
||||
|
||||
for fvals in field_values:
|
||||
vinfo = v2version.parse_field_values_to_vinfo(fvals)
|
||||
example_version = v2version.format_version(vinfo, raw_pattern)
|
||||
example_versions.append(example_version)
|
||||
|
||||
pep440_version = str(pkg_resources.parse_version(example_version))
|
||||
pep440_versions.append(pep440_version)
|
||||
|
||||
notag_fvals = fvals.copy()
|
||||
notag_fvals['tag'] = 'final'
|
||||
|
||||
notag_vinfo = v2version.parse_field_values_to_vinfo(notag_fvals)
|
||||
notag_version = v2version.format_version(notag_vinfo, raw_pattern)
|
||||
notag_versions.append(notag_version)
|
||||
|
||||
sample = rand.sample(sorted(example_versions, key=len, reverse=True)[:-1], 2)
|
||||
sample.sort(key=pkg_resources.parse_version)
|
||||
|
||||
is_pep440 = pep440_versions == example_versions
|
||||
is_lexico = sorted(notag_versions) == notag_versions
|
||||
|
||||
pattern_col = f"`{raw_pattern}`"
|
||||
pep440_col = "yes" if is_pep440 else "no"
|
||||
lexico_col = ("yes" if is_lexico else "no") + lexico_caveat
|
||||
sample_str = " ".join([v.ljust(16) for v in sample]).strip()
|
||||
examples_col = "`" + sample_str + "`"
|
||||
|
||||
# row = (pattern_col, examples_col, pep440_col)
|
||||
# sort_key = (is_pep440 , -len(raw_pattern))
|
||||
|
||||
row = (pattern_col, examples_col, pep440_col, lexico_col)
|
||||
sort_key = (is_pep440 , is_lexico , -len(raw_pattern))
|
||||
|
||||
rows.append((sort_key, row))
|
||||
|
||||
# rows.sort(reverse=True)
|
||||
|
||||
patterns_table = rich.table.Table(show_header=True, box=rich.box.ASCII)
|
||||
patterns_table.add_column("pattern")
|
||||
patterns_table.add_column("examples")
|
||||
patterns_table.add_column("PEP440")
|
||||
patterns_table.add_column("lexico.")
|
||||
|
||||
for _, row in rows:
|
||||
patterns_table.add_row(*row)
|
||||
|
||||
buf = io.StringIO()
|
||||
rich.print(patterns_table, file=buf)
|
||||
table_str = buf.getvalue()
|
||||
table_str = "\n".join(table_str.splitlines()[1:-1])
|
||||
table_str = table_str.replace("-+-", "-|-")
|
||||
return "\n\n" + table_str + "\n\n"
|
||||
|
||||
|
||||
old_content = io.open("README.md").read()
|
||||
|
||||
new_content = old_content
|
||||
new_content = update_md_code_output(new_content, "bumpver --help")
|
||||
new_content = update_md_code_output(new_content, "bumpver update --help")
|
||||
new_content = update(new_content, "pattern_examples", pattern_examples())
|
||||
new_content = update(new_content, "weeknum_example" , weeknum_example())
|
||||
|
||||
|
||||
if old_content == new_content:
|
||||
print("Nothing changed")
|
||||
elif "--dry" in sys.argv:
|
||||
print_diff(old_content, new_content)
|
||||
else:
|
||||
with io.open("README.md", mode="w") as fobj:
|
||||
fobj.write(new_content)
|
||||
|
|
@ -89,7 +89,7 @@ addopts = --doctest-modules
|
|||
|
||||
|
||||
[bumpver]
|
||||
current_version = "2023.1124"
|
||||
current_version = "2023.1125"
|
||||
version_pattern = "YYYY.BUILD[-TAG]"
|
||||
commit_message = "bump {old_version} -> {new_version}"
|
||||
commit = True
|
||||
|
|
@ -97,6 +97,8 @@ tag = True
|
|||
push = True
|
||||
|
||||
[bumpver:file_patterns]
|
||||
flake.nix =
|
||||
version = "{version}";
|
||||
bootstrapit.sh =
|
||||
PACKAGE_VERSION="{version}"
|
||||
setup.cfg =
|
||||
|
|
|
|||
9
setup.py
9
setup.py
|
|
@ -31,7 +31,7 @@ install_requires = [
|
|||
]
|
||||
|
||||
|
||||
long_description = "\n\n".join((read("README.md"), read("CHANGELOG.md")))
|
||||
long_description = read("README.md")
|
||||
|
||||
|
||||
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
|
|
@ -57,18 +57,13 @@ classifiers = [
|
|||
package_dir = {"": "src"}
|
||||
|
||||
|
||||
if any(arg.startswith("bdist") for arg in sys.argv):
|
||||
import lib3to6
|
||||
package_dir = lib3to6.fix(package_dir)
|
||||
|
||||
|
||||
setuptools.setup(
|
||||
name="bumpver",
|
||||
license="MIT",
|
||||
author="Manuel Barkhau",
|
||||
author_email="mbarkhau@gmail.com",
|
||||
url="https://github.com/mbarkhau/bumpver",
|
||||
version="2023.1124",
|
||||
version="2023.1125",
|
||||
keywords="version bumpver calver semver versioning bumpversion pep440",
|
||||
description="Bump version numbers in project files.",
|
||||
long_description=long_description,
|
||||
|
|
|
|||
|
|
@ -5,4 +5,4 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
"""BumpVer: A CLI program for versioning."""
|
||||
|
||||
__version__ = "2023.1124"
|
||||
__version__ = "2023.1125"
|
||||
|
|
|
|||
|
|
@ -253,7 +253,7 @@ def version_options(function: typ.Callable) -> typ.Callable:
|
|||
|
||||
|
||||
@click.group(context_settings={'help_option_names': ["-h", "--help"]})
|
||||
@click.version_option(version="2023.1124")
|
||||
@click.version_option(version="2023.1125")
|
||||
@verbose_option
|
||||
def cli(verbose: int = 0) -> None:
|
||||
"""Automatically update version strings in plaintext files."""
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
# Stub files for mypy
|
||||
|
||||
Before using stubs, check if the library you want to use
|
||||
itself uses mypy. If it does, the better approach is to
|
||||
add it to `requirements/vendor.txt`. This way mypy will
|
||||
find the actual source instead of just stub files.
|
||||
|
|
@ -1,120 +0,0 @@
|
|||
# Stubs for pathlib2 (adapted for Python 2 from Python 3.4 typeshed)
|
||||
|
||||
from typing import Any, Generator, IO, Optional, Sequence, Tuple, Type, TypeVar, Union, List
|
||||
import os
|
||||
import sys
|
||||
|
||||
_P = TypeVar('_P', bound='PurePath')
|
||||
|
||||
if sys.version_info >= (3, 6):
|
||||
_PurePathBase = os.PathLike[str]
|
||||
else:
|
||||
_PurePathBase = object
|
||||
|
||||
class PurePath(_PurePathBase):
|
||||
parts = ... # type: Tuple[str, ...]
|
||||
drive = ... # type: str
|
||||
root = ... # type: str
|
||||
anchor = ... # type: str
|
||||
name = ... # type: str
|
||||
suffix = ... # type: str
|
||||
suffixes = ... # type: List[str]
|
||||
stem = ... # type: str
|
||||
if sys.version_info < (3, 5):
|
||||
def __init__(self, *pathsegments: str) -> None: ...
|
||||
elif sys.version_info < (3, 6):
|
||||
def __new__(cls: Type[_P], *args: Union[str, PurePath]) -> _P: ...
|
||||
else:
|
||||
def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]]) -> _P: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __lt__(self, other: PurePath) -> bool: ...
|
||||
def __le__(self, other: PurePath) -> bool: ...
|
||||
def __gt__(self, other: PurePath) -> bool: ...
|
||||
def __div__(self: _P, key: Union[str, PurePath]) -> _P: ...
|
||||
def __rdiv__(self: _P, key: Union[str, PurePath]) -> _P: ...
|
||||
def __truediv__(self: _P, key: Union[str, PurePath]) -> _P: ...
|
||||
def __rtruediv__(self: _P, key: Union[str, PurePath]) -> _P: ...
|
||||
def __ge__(self, other: PurePath) -> bool: ...
|
||||
def __bytes__(self) -> bytes: ...
|
||||
def as_posix(self) -> str: ...
|
||||
def as_uri(self) -> str: ...
|
||||
def is_absolute(self) -> bool: ...
|
||||
def is_reserved(self) -> bool: ...
|
||||
def match(self, path_pattern: str) -> bool: ...
|
||||
def relative_to(self: _P, *other: Union[str, PurePath]) -> _P: ...
|
||||
def with_name(self: _P, name: str) -> _P: ...
|
||||
def with_suffix(self: _P, suffix: str) -> _P: ...
|
||||
def joinpath(self: _P, *other: Union[str, PurePath]) -> _P: ...
|
||||
|
||||
@property
|
||||
def parents(self: _P) -> Sequence[_P]: ...
|
||||
@property
|
||||
def parent(self: _P) -> _P: ...
|
||||
|
||||
class PurePosixPath(PurePath): ...
|
||||
class PureWindowsPath(PurePath): ...
|
||||
|
||||
class Path(PurePath):
|
||||
@classmethod
|
||||
def cwd(cls: Type[_P]) -> _P: ...
|
||||
def stat(self) -> os.stat_result: ...
|
||||
def chmod(self, mode: int) -> None: ...
|
||||
def exists(self) -> bool: ...
|
||||
def glob(self, pattern: str) -> Generator[Path, None, None]: ...
|
||||
def group(self) -> str: ...
|
||||
def is_dir(self) -> bool: ...
|
||||
def is_file(self) -> bool: ...
|
||||
def is_symlink(self) -> bool: ...
|
||||
def is_socket(self) -> bool: ...
|
||||
def is_fifo(self) -> bool: ...
|
||||
def is_block_device(self) -> bool: ...
|
||||
def is_char_device(self) -> bool: ...
|
||||
def iterdir(self) -> Generator[Path, None, None]: ...
|
||||
def lchmod(self, mode: int) -> None: ...
|
||||
def lstat(self) -> os.stat_result: ...
|
||||
if sys.version_info < (3, 5):
|
||||
def mkdir(self, mode: int = ...,
|
||||
parents: bool = ...) -> None: ...
|
||||
else:
|
||||
def mkdir(self, mode: int = ..., parents: bool = ...,
|
||||
exist_ok: bool = ...) -> None: ...
|
||||
def open(self, mode: str = ..., buffering: int = ...,
|
||||
encoding: Optional[str] = ..., errors: Optional[str] = ...,
|
||||
newline: Optional[str] = ...) -> IO[Any]: ...
|
||||
def owner(self) -> str: ...
|
||||
def rename(self, target: Union[str, PurePath]) -> None: ...
|
||||
def replace(self, target: Union[str, PurePath]) -> None: ...
|
||||
if sys.version_info < (3, 6):
|
||||
def resolve(self: _P) -> _P: ...
|
||||
else:
|
||||
def resolve(self: _P, strict: bool = ...) -> _P: ...
|
||||
def rglob(self, pattern: str) -> Generator[Path, None, None]: ...
|
||||
def rmdir(self) -> None: ...
|
||||
def symlink_to(self, target: Union[str, Path],
|
||||
target_is_directory: bool = ...) -> None: ...
|
||||
def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ...
|
||||
def unlink(self) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 5):
|
||||
@classmethod
|
||||
def home(cls: Type[_P]) -> _P: ...
|
||||
if sys.version_info < (3, 6):
|
||||
def __new__(cls: Type[_P], *args: Union[str, PurePath],
|
||||
**kwargs: Any) -> _P: ...
|
||||
else:
|
||||
def __new__(cls: Type[_P], *args: Union[str, os.PathLike[str]],
|
||||
**kwargs: Any) -> _P: ...
|
||||
|
||||
def absolute(self: _P) -> _P: ...
|
||||
def expanduser(self: _P) -> _P: ...
|
||||
def read_bytes(self) -> bytes: ...
|
||||
def read_text(self, encoding: Optional[str] = ...,
|
||||
errors: Optional[str] = ...) -> str: ...
|
||||
def samefile(self, other_path: Union[str, bytes, int, Path]) -> bool: ...
|
||||
def write_bytes(self, data: bytes) -> int: ...
|
||||
def write_text(self, data: str, encoding: Optional[str] = ...,
|
||||
errors: Optional[str] = ...) -> int: ...
|
||||
|
||||
|
||||
class PosixPath(Path, PurePosixPath): ...
|
||||
class WindowsPath(Path, PureWindowsPath): ...
|
||||
Loading…
Add table
Add a link
Reference in a new issue