Merge branch 'valentin87/pycalver-patch-1' into 'master'

Valentin87/pycalver patch 1

See merge request mbarkhau/pycalver!3
This commit is contained in:
Manuel Barkhau 2020-07-19 15:18:28 +00:00
commit cad2c482a5
18 changed files with 491 additions and 236 deletions

144
.gitignore vendored
View file

@ -73,3 +73,147 @@ envs.txt
test_build_logs/*.log
build/miniconda3.sh
compat_test/
# https://github.com/github/gitignore/blob/master/Global/Linux.gitignore
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
# https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# https://raw.githubusercontent.com/github/gitignore/master/Global/macOS.gitignore
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# https://raw.githubusercontent.com/github/gitignore/master/Global/JetBrains.gitignore
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser

View file

@ -13,9 +13,13 @@ unit:
- make test_compat
coverage: '/^(TOTAL|src).*?(\d+\%)$/'
artifacts:
reports:
junit:
- reports/flake8.xml
- reports/pytest.xml
paths:
- htmlcov/
- mypycov/
- reports/htmlcov/
- reports/mypycov/
allow_failure: false
@ -24,8 +28,8 @@ pages:
script:
- mkdir -p public/cov
- mkdir -p public/mypycov
- cp -r htmlcov/* public/cov/
- cp -r mypycov/* public/mypycov/
- cp -r reports/htmlcov/* public/cov/
- cp -r reports/mypycov/* public/mypycov/
artifacts:
paths:
- public

View file

@ -300,9 +300,9 @@ git_hooks:
## -- Integration --
## Run flake8 linter and check for fmt
.PHONY: lint
lint:
## Run isort with --check-only
.PHONY: lint_isort
lint_isort:
@printf "isort ..\n"
@$(DEV_ENV)/bin/isort \
--check-only \
@ -314,6 +314,10 @@ lint:
src/ test/
@printf "\e[1F\e[9C ok\n"
## Run sjfmt with --check
.PHONY: lint_sjfmt
lint_sjfmt:
@printf "sjfmt ..\n"
@$(DEV_ENV)/bin/sjfmt \
--target-version=py36 \
@ -323,46 +327,60 @@ lint:
src/ test/ 2>&1 | sed "/All done/d" | sed "/left unchanged/d"
@printf "\e[1F\e[9C ok\n"
## Run flake8
.PHONY: lint_flake8
lint_flake8:
@rm -f reports/flake8*;
@mkdir -p "reports/";
@printf "flake8 ..\n"
@$(DEV_ENV)/bin/flake8 src/
@$(DEV_ENV)/bin/flake8 src/ --tee --output-file reports/flake8.txt || exit 0;
@$(DEV_ENV)/bin/flake8_junit reports/flake8.txt reports/flake8.xml >> /dev/null;
@$(DEV_ENV_PY) scripts/exit_0_if_empty.py reports/flake8.txt;
@printf "\e[1F\e[9C ok\n"
## Run pylint. Should not break the build yet
.PHONY: lint_pylint
lint_pylint:
@mkdir -p "reports/";
@printf "pylint ..\n";
@$(DEV_ENV)/bin/pylint --rcfile=setup.cfg src/ test/
@printf "\e[1F\e[9C ok\n"
## Run flake8 linter and check for fmt
.PHONY: lint
lint: lint_isort lint_sjfmt lint_flake8
## Run mypy type checker
.PHONY: mypy
mypy:
@rm -rf ".mypy_cache";
@rm -rf "reports/mypycov";
@mkdir -p "reports/";
@printf "mypy ....\n"
@MYPYPATH=stubs/:vendor/ $(DEV_ENV_PY) -m mypy \
--html-report mypycov \
--html-report reports/mypycov \
--no-error-summary \
src/ | sed "/Generated HTML report/d"
@printf "\e[1F\e[9C ok\n"
## Run pylint. Should not break the build yet
.PHONY: pylint
pylint:
@printf "pylint ..\n";
@$(DEV_ENV)/bin/pylint --jobs=4 --output-format=colorized --score=no \
--disable=C0103,C0301,C0330,C0326,C0330,C0411,R0903,W1619,W1618,W1203 \
--extension-pkg-whitelist=ujson,lxml,PIL,numpy,pandas,sklearn,pyblake2 \
src/
@$(DEV_ENV)/bin/pylint --jobs=4 --output-format=colorized --score=no \
--disable=C0103,C0111,C0301,C0330,C0326,C0330,C0411,R0903,W1619,W1618,W1203 \
--extension-pkg-whitelist=ujson,lxml,PIL,numpy,pandas,sklearn,pyblake2 \
test/
@printf "\e[1F\e[9C ok\n"
## Run pytest unit and integration tests
.PHONY: test
test:
@rm -rf ".pytest_cache";
@rm -rf "src/__pycache__";
@rm -rf "test/__pycache__";
@rm -rf "reports/testcov/";
@rm -f "reports/pytest*";
@mkdir -p "reports/";
# First we test the local source tree using the dev environment
ENV=$${ENV-dev} \
@ -371,8 +389,10 @@ test:
$(DEV_ENV_PY) -m pytest -v \
--doctest-modules \
--verbose \
--cov-report html \
--cov-report "html:reports/testcov/" \
--cov-report term \
--html=reports/pytest/index.html \
--junitxml reports/pytest.xml \
-k "$${PYTEST_FILTER}" \
$(shell cd src/ && ls -1 */__init__.py | awk '{ sub(/\/__init__.py/, "", $$1); print "--cov "$$1 }') \
test/ src/;
@ -394,9 +414,9 @@ test:
## -- Helpers --
## Run code formatter on src/ and test/
.PHONY: fmt
fmt:
## Run import sorting on src/ and test/
.PHONY: fmt_isort
fmt_isort:
@$(DEV_ENV)/bin/isort \
--force-single-line-imports \
--length-sort \
@ -405,6 +425,10 @@ fmt:
--project $(PKG_NAME) \
src/ test/;
## Run code formatter on src/ and test/
.PHONY: fmt_sjfmt
fmt_sjfmt:
@$(DEV_ENV)/bin/sjfmt \
--target-version=py36 \
--skip-string-normalization \
@ -412,10 +436,14 @@ fmt:
src/ test/;
## Run code formatters
.PHONY: fmt
fmt: fmt_isort fmt_sjfmt
## Shortcut for make fmt lint mypy test
## Shortcut for make fmt lint mypy devtest test
.PHONY: check
check: fmt lint mypy test
check: fmt lint mypy devtest test
## Start subshell with environ variables set.

View file

@ -20,7 +20,9 @@ flake8-comprehensions
flake8-junit-report
pylint
mypy
isort
# pylint doesn't support isort>=5 for now
# https://github.com/PyCQA/pylint/issues/3722
isort<5
# http://doc.pytest.org/en/latest/py27-py34-deprecation.html
# The pytest 4.6 series will be the last to support Python 2.7
@ -28,7 +30,9 @@ isort
# pytest 5.0 and onwards will support only Python 3.5+.
pytest<5.0
pytest-cov
pytest-html
# https://github.com/pytest-dev/pytest-html/blob/master/CHANGES.rst
# pytest-html 2.0+ doesn't support python2.7
pytest-html<2.0
readme_renderer[md]
twine

8
scripts/exit_0_if_empty.py Executable file
View file

@ -0,0 +1,8 @@
#!/usr/bin/env python
# if you know a bash one liner for this, be my guest
import sys
data = open(sys.argv[1]).read(10)
has_data = len(data) > 0
sys.exit(has_data)

View file

@ -12,6 +12,13 @@ strict_optional = True
ignore_missing_imports = True
show_error_codes = True
[tool:isort]
known_third_party = pathlib2
force_single_line = True
length_sort = True
line_length = 100
[flake8]
max-line-length = 100
max-complexity = 10
@ -48,6 +55,8 @@ ignore =
D102
# Missing docstring in public function
D103
# Missing docstring in public package
# D104
# Missing docstring in magic method
# D105
# Missing docstring on __init__
@ -97,3 +106,43 @@ README.md =
img.shields.io/static/v1.svg?label=PyCalVer&message={version}&color=blue
Successfully installed pycalver-{pep440_version}
pycalver, version {version}
[tool:pylint]
score = yes
# pylint-ignore only works with jobs = 1
jobs = 1
# Set the output format. Available formats are text, parseable, colorized,
# msvs (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format = colorized
# Maximum number of locals for function / method body
max-locals = 20
# Maximum number of arguments for function / method
max-args = 8
good-names = logger,i,ex
# These are packages that are implemented as c extensions and
# which pylint cannot do introspection on.
extension-pkg-whitelist = numpy,pandas,lxml,PIL,sklearn,pyblake2
notes=TODO,FIXME,XXX,SLOW,BUG
# https://pylint.pycqa.org/en/stable/technical_reference/features.html
disable =
bad-continuation,
bad-whitespace,
line-too-long,
logging-not-lazy,
logging-fstring-interpolation,
no-else-return,
no-else-raise,
missing-docstring,
missing-module-docstring,
missing-class-docstring,
missing-function-docstring,

View file

@ -42,7 +42,7 @@ click.disable_unicode_literals_warning = True
VALID_RELEASE_VALUES = ("alpha", "beta", "dev", "rc", "post", "final")
log = logging.getLogger("pycalver.cli")
logger = logging.getLogger("pycalver.cli")
def _configure_logging(verbose: int = 0) -> None:
@ -57,15 +57,15 @@ def _configure_logging(verbose: int = 0) -> None:
log_level = logging.INFO
logging.basicConfig(level=log_level, format=log_format, datefmt="%Y-%m-%dT%H:%M:%S")
log.debug("Logging configured.")
logger.debug("Logging configured.")
def _validate_release_tag(release: str) -> None:
if release in VALID_RELEASE_VALUES:
return
log.error(f"Invalid argument --release={release}")
log.error(f"Valid arguments are: {', '.join(VALID_RELEASE_VALUES)}")
logger.error(f"Invalid argument --release={release}")
logger.error(f"Valid arguments are: {', '.join(VALID_RELEASE_VALUES)}")
sys.exit(1)
@ -108,7 +108,7 @@ def test(
old_version, pattern=pattern, release=release, major=major, minor=minor, patch=patch
)
if new_version is None:
log.error(f"Invalid version '{old_version}' and/or pattern '{pattern}'.")
logger.error(f"Invalid version '{old_version}' and/or pattern '{pattern}'.")
sys.exit(1)
pep440_version = version.to_pep440(new_version)
@ -119,29 +119,31 @@ def test(
def _update_cfg_from_vcs(cfg: config.Config, fetch: bool) -> config.Config:
try:
_vcs = vcs.get_vcs()
log.debug(f"vcs found: {_vcs.name}")
vcs_api = vcs.get_vcs_api()
logger.debug(f"vcs found: {vcs_api.name}")
if fetch:
log.info("fetching tags from remote (to turn off use: -n / --no-fetch)")
_vcs.fetch()
logger.info("fetching tags from remote (to turn off use: -n / --no-fetch)")
vcs_api.fetch()
version_tags = [tag for tag in _vcs.ls_tags() if version.is_valid(tag, cfg.version_pattern)]
version_tags = [
tag for tag in vcs_api.ls_tags() if version.is_valid(tag, cfg.version_pattern)
]
if version_tags:
version_tags.sort(reverse=True)
log.debug(f"found {len(version_tags)} tags: {version_tags[:2]}")
logger.debug(f"found {len(version_tags)} tags: {version_tags[:2]}")
latest_version_tag = version_tags[0]
latest_version_pep440 = version.to_pep440(latest_version_tag)
if latest_version_tag > cfg.current_version:
log.info(f"Working dir version : {cfg.current_version}")
log.info(f"Latest version from {_vcs.name:>3} tag: {latest_version_tag}")
logger.info(f"Working dir version : {cfg.current_version}")
logger.info(f"Latest version from {vcs_api.name:>3} tag: {latest_version_tag}")
cfg = cfg._replace(
current_version=latest_version_tag, pep440_version=latest_version_pep440
)
else:
log.debug("no vcs tags found")
logger.debug("no vcs tags found")
except OSError:
log.debug("No vcs found")
logger.debug("No vcs found")
return cfg
@ -159,7 +161,7 @@ def show(verbose: int = 0, fetch: bool = True) -> None:
cfg: config.MaybeConfig = config.parse(ctx)
if cfg is None:
log.error("Could not parse configuration. Perhaps try 'pycalver init'.")
logger.error("Could not parse configuration. Perhaps try 'pycalver init'.")
sys.exit(1)
cfg = _update_cfg_from_vcs(cfg, fetch=fetch)
@ -181,7 +183,7 @@ def init(verbose: int = 0, dry: bool = False) -> None:
cfg: config.MaybeConfig = config.parse(ctx)
if cfg:
log.error(f"Configuration already initialized in {ctx.config_filepath}")
logger.error(f"Configuration already initialized in {ctx.config_filepath}")
sys.exit(1)
if dry:
@ -193,66 +195,70 @@ def init(verbose: int = 0, dry: bool = False) -> None:
config.write_content(ctx)
def _assert_not_dirty(_vcs: vcs.VCS, filepaths: typ.Set[str], allow_dirty: bool) -> None:
dirty_files = _vcs.status(required_files=filepaths)
def _assert_not_dirty(vcs_api: vcs.VCSAPI, filepaths: typ.Set[str], allow_dirty: bool) -> None:
dirty_files = vcs_api.status(required_files=filepaths)
if dirty_files:
log.warning(f"{_vcs.name} working directory is not clean. Uncomitted file(s):")
logger.warning(f"{vcs_api.name} working directory is not clean. Uncomitted file(s):")
for dirty_file in dirty_files:
log.warning(" " + dirty_file)
logger.warning(" " + dirty_file)
if not allow_dirty and dirty_files:
sys.exit(1)
dirty_pattern_files = set(dirty_files) & filepaths
if dirty_pattern_files:
log.error("Not commiting when pattern files are dirty:")
logger.error("Not commiting when pattern files are dirty:")
for dirty_file in dirty_pattern_files:
log.warning(" " + dirty_file)
logger.warning(" " + dirty_file)
sys.exit(1)
def _bump(cfg: config.Config, new_version: str, allow_dirty: bool = False) -> None:
_vcs: typ.Optional[vcs.VCS]
def _commit(
cfg: config.Config, new_version: str, vcs_api: vcs.VCSAPI, filepaths: typ.Set[str]
) -> None:
for filepath in filepaths:
vcs_api.add(filepath)
try:
_vcs = vcs.get_vcs()
except OSError:
log.warning("Version Control System not found, aborting commit.")
_vcs = None
vcs_api.commit(f"bump version to {new_version}")
if cfg.commit and cfg.tag:
vcs_api.tag(new_version)
if cfg.commit and cfg.tag and cfg.push:
vcs_api.push(new_version)
def _bump(cfg: config.Config, new_version: str, allow_dirty: bool = False) -> None:
vcs_api: typ.Optional[vcs.VCSAPI] = None
if cfg.commit:
try:
vcs_api = vcs.get_vcs_api()
except OSError:
logger.warning("Version Control System not found, aborting commit.")
filepaths = set(cfg.file_patterns.keys())
if _vcs:
_assert_not_dirty(_vcs, filepaths, allow_dirty)
if vcs_api:
_assert_not_dirty(vcs_api, filepaths, allow_dirty)
try:
new_vinfo = version.parse_version_info(new_version, cfg.version_pattern)
rewrite.rewrite(cfg.file_patterns, new_vinfo)
except Exception as ex:
log.error(str(ex))
logger.error(str(ex))
sys.exit(1)
if _vcs is None or not cfg.commit:
return
for filepath in filepaths:
_vcs.add(filepath)
_vcs.commit(f"bump version to {new_version}")
if cfg.commit and cfg.tag:
_vcs.tag(new_version)
if cfg.commit and cfg.tag and cfg.push:
_vcs.push(new_version)
if vcs_api:
_commit(cfg, new_version, vcs_api, filepaths)
def _try_bump(cfg: config.Config, new_version: str, allow_dirty: bool = False) -> None:
try:
_bump(cfg, new_version, allow_dirty)
except sp.CalledProcessError as ex:
log.error(f"Error running subcommand: {ex.cmd}")
logger.error(f"Error running subcommand: {ex.cmd}")
if ex.stdout:
sys.stdout.write(ex.stdout.decode('utf-8'))
if ex.stderr:
@ -284,7 +290,7 @@ def _try_print_diff(cfg: config.Config, new_version: str) -> None:
try:
_print_diff(cfg, new_version)
except Exception as ex:
log.error(str(ex))
logger.error(str(ex))
sys.exit(1)
@ -339,7 +345,7 @@ def bump(
cfg: config.MaybeConfig = config.parse(ctx)
if cfg is None:
log.error("Could not parse configuration. Perhaps try 'pycalver init'.")
logger.error("Could not parse configuration. Perhaps try 'pycalver init'.")
sys.exit(1)
cfg = _update_cfg_from_vcs(cfg, fetch=fetch)
@ -357,13 +363,13 @@ def bump(
is_semver = "{semver}" in cfg.version_pattern
has_semver_inc = major or minor or patch
if is_semver and not has_semver_inc:
log.warning("bump --major/--minor/--patch required when using semver.")
logger.warning("bump --major/--minor/--patch required when using semver.")
else:
log.error(f"Invalid version '{old_version}' and/or pattern '{cfg.version_pattern}'.")
logger.error(f"Invalid version '{old_version}' and/or pattern '{cfg.version_pattern}'.")
sys.exit(1)
log.info(f"Old Version: {old_version}")
log.info(f"New Version: {new_version}")
logger.info(f"Old Version: {old_version}")
logger.info(f"New Version: {new_version}")
if dry or verbose >= 2:
_try_print_diff(cfg, new_version)

View file

@ -17,7 +17,7 @@ import pathlib2 as pl
from . import version
log = logging.getLogger("pycalver.config")
logger = logging.getLogger("pycalver.config")
Patterns = typ.List[str]
PatternsByGlob = typ.Dict[str, Patterns]
@ -135,6 +135,7 @@ def _parse_cfg_file_patterns(cfg_parser: configparser.RawConfigParser) -> FilePa
class _ConfigParser(configparser.RawConfigParser):
# pylint:disable=too-many-ancestors ; from our perspective, it's just one
"""Custom parser, simply to override optionxform behaviour."""
def optionxform(self, optionstr: str) -> str:
@ -203,7 +204,7 @@ def _normalize_file_patterns(raw_cfg: RawConfig) -> FilePatterns:
for filepath, patterns in list(file_patterns.items()):
if not os.path.exists(filepath):
log.warning(f"Invalid config, no such file: {filepath}")
logger.warning(f"Invalid config, no such file: {filepath}")
normalized_patterns: typ.List[str] = []
for pattern in patterns:
@ -215,8 +216,8 @@ def _normalize_file_patterns(raw_cfg: RawConfig) -> FilePatterns:
elif version_pattern == "{semver}":
normalized_pattern = normalized_pattern.replace("{pep440_version}", "{semver}")
elif "{pep440_version}" in pattern:
log.warning(f"Invalid config, cannot match '{pattern}' for '{filepath}'.")
log.warning(f"No mapping of '{version_pattern}' to '{pep440_version}'")
logger.warning(f"Invalid config, cannot match '{pattern}' for '{filepath}'.")
logger.warning(f"No mapping of '{version_pattern}' to '{pep440_version}'")
normalized_patterns.append(normalized_pattern)
file_patterns[filepath] = normalized_patterns
@ -268,7 +269,7 @@ def _parse_config(raw_cfg: RawConfig) -> Config:
push=push,
file_patterns=file_patterns,
)
log.debug(_debug_str(cfg))
logger.debug(_debug_str(cfg))
return cfg
@ -277,7 +278,8 @@ def _parse_current_version_default_pattern(cfg: Config, raw_cfg_text: str) -> st
for line in raw_cfg_text.splitlines():
if is_pycalver_section and line.startswith("current_version"):
return line.replace(cfg.current_version, cfg.version_pattern)
elif line.strip() == "[pycalver]":
if line.strip() == "[pycalver]":
is_pycalver_section = True
elif line and line[0] == "[" and line[-1] == "]":
is_pycalver_section = False
@ -288,10 +290,10 @@ def _parse_current_version_default_pattern(cfg: Config, raw_cfg_text: str) -> st
def parse(ctx: ProjectContext) -> MaybeConfig:
"""Parse config file if available."""
if not ctx.config_filepath.exists():
log.warning(f"File not found: {ctx.config_filepath}")
logger.warning(f"File not found: {ctx.config_filepath}")
return None
fh: typ.IO[str]
fobj: typ.IO[str]
cfg_path: str
if ctx.config_filepath.is_absolute():
@ -302,11 +304,11 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
raw_cfg: RawConfig
try:
with ctx.config_filepath.open(mode="rt", encoding="utf-8") as fh:
with ctx.config_filepath.open(mode="rt", encoding="utf-8") as fobj:
if ctx.config_format == 'toml':
raw_cfg = _parse_toml(fh)
raw_cfg = _parse_toml(fobj)
elif ctx.config_format == 'cfg':
raw_cfg = _parse_cfg(fh)
raw_cfg = _parse_cfg(fobj)
else:
err_msg = "Invalid config_format='{ctx.config_format}'"
raise RuntimeError(err_msg)
@ -314,15 +316,15 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
cfg: Config = _parse_config(raw_cfg)
if cfg_path not in cfg.file_patterns:
fh.seek(0)
raw_cfg_text = fh.read()
fobj.seek(0)
raw_cfg_text = fobj.read()
cfg.file_patterns[cfg_path] = [
_parse_current_version_default_pattern(cfg, raw_cfg_text)
]
return cfg
except ValueError as ex:
log.warning(f"Couldn't parse {cfg_path}: {str(ex)}")
logger.warning(f"Couldn't parse {cfg_path}: {str(ex)}")
return None
@ -469,12 +471,12 @@ def default_config(ctx: ProjectContext) -> str:
def write_content(ctx: ProjectContext) -> None:
"""Update project config file with initial default config."""
fh: typ.IO[str]
fobj: typ.IO[str]
cfg_content = default_config(ctx)
if ctx.config_filepath.exists():
cfg_content = "\n" + cfg_content
with ctx.config_filepath.open(mode="at", encoding="utf-8") as fh:
fh.write(cfg_content)
with ctx.config_filepath.open(mode="at", encoding="utf-8") as fobj:
fobj.write(cfg_content)
print(f"Updated {ctx.config_filepath}")

View file

@ -6,11 +6,8 @@
"""Parse PyCalVer strings from files."""
import typing as typ
import logging
from . import patterns
log = logging.getLogger("pycalver.parse")
from .patterns import compile_pattern
class PatternMatch(typ.NamedTuple):
@ -29,7 +26,7 @@ PatternMatches = typ.Iterable[PatternMatch]
def _iter_for_pattern(lines: typ.List[str], pattern: str) -> PatternMatches:
# The pattern is escaped, so that everything besides the format
# string variables is treated literally.
pattern_re = patterns.compile_pattern(pattern)
pattern_re = compile_pattern(pattern)
for lineno, line in enumerate(lines):
match = pattern_re.search(line)

View file

@ -18,7 +18,7 @@ from . import config
from . import version
from . import patterns
log = logging.getLogger("pycalver.rewrite")
logger = logging.getLogger("pycalver.rewrite")
def detect_line_sep(content: str) -> str:
@ -44,7 +44,7 @@ def detect_line_sep(content: str) -> str:
class NoPatternMatch(Exception):
"""Pattern not found in content.
log.error is used to show error info about the patterns so
logger.error is used to show error info about the patterns so
that users can debug what is wrong with them. The class
itself doesn't capture that info. This approach is used so
that all patter issues can be shown, rather than bubbling
@ -70,19 +70,19 @@ def rewrite_lines(
new_lines = old_lines[:]
found_patterns = set()
for m in parse.iter_matches(old_lines, pattern_strs):
found_patterns.add(m.pattern)
replacement = version.format_version(new_vinfo, m.pattern)
span_l, span_r = m.span
new_line = m.line[:span_l] + replacement + m.line[span_r:]
new_lines[m.lineno] = new_line
for match in parse.iter_matches(old_lines, pattern_strs):
found_patterns.add(match.pattern)
replacement = version.format_version(new_vinfo, match.pattern)
span_l, span_r = match.span
new_line = match.line[:span_l] + replacement + match.line[span_r:]
new_lines[match.lineno] = new_line
non_matched_patterns = set(pattern_strs) - found_patterns
if non_matched_patterns:
for non_matched_pattern in non_matched_patterns:
log.error(f"No match for pattern '{non_matched_pattern}'")
logger.error(f"No match for pattern '{non_matched_pattern}'")
compiled_pattern = patterns._compile_pattern(non_matched_pattern)
log.error(f"Pattern compiles to regex '{compiled_pattern}'")
logger.error(f"Pattern compiles to regex '{compiled_pattern}'")
raise NoPatternMatch("Invalid pattern(s)")
else:
return new_lines
@ -158,11 +158,11 @@ def iter_rewritten(
>>>
'''
fh: typ.IO[str]
fobj: typ.IO[str]
for file_path, pattern_strs in _iter_file_paths(file_patterns):
with file_path.open(mode="rt", encoding="utf-8") as fh:
content = fh.read()
with file_path.open(mode="rt", encoding="utf-8") as fobj:
content = fobj.read()
rfd = rfd_from_content(pattern_strs, new_vinfo, content)
yield rfd._replace(path=str(file_path))
@ -202,11 +202,11 @@ def diff(new_vinfo: version.VersionInfo, file_patterns: config.PatternsByGlob) -
"""
full_diff = ""
fh: typ.IO[str]
fobj: typ.IO[str]
for file_path, pattern_strs in sorted(_iter_file_paths(file_patterns)):
with file_path.open(mode="rt", encoding="utf-8") as fh:
content = fh.read()
with file_path.open(mode="rt", encoding="utf-8") as fobj:
content = fobj.read()
try:
rfd = rfd_from_content(pattern_strs, new_vinfo, content)
@ -228,9 +228,9 @@ def diff(new_vinfo: version.VersionInfo, file_patterns: config.PatternsByGlob) -
def rewrite(file_patterns: config.PatternsByGlob, new_vinfo: version.VersionInfo) -> None:
"""Rewrite project files, updating each with the new version."""
fh: typ.IO[str]
fobj: typ.IO[str]
for file_data in iter_rewritten(file_patterns, new_vinfo):
new_content = file_data.line_sep.join(file_data.new_lines)
with io.open(file_data.path, mode="wt", encoding="utf-8") as fh:
fh.write(new_content)
with io.open(file_data.path, mode="wt", encoding="utf-8") as fobj:
fobj.write(new_content)

View file

@ -20,7 +20,7 @@ import logging
import tempfile
import subprocess as sp
log = logging.getLogger("pycalver.vcs")
logger = logging.getLogger("pycalver.vcs")
VCS_SUBCOMMANDS_BY_NAME = {
@ -52,8 +52,8 @@ VCS_SUBCOMMANDS_BY_NAME = {
Env = typ.Dict[str, str]
class VCS:
"""VCS absraction for git and mercurial."""
class VCSAPI:
"""Absraction for git and mercurial."""
def __init__(self, name: str, subcommands: typ.Dict[str, str] = None):
self.name = name
@ -67,9 +67,9 @@ class VCS:
cmd_tmpl = self.subcommands[cmd_name]
cmd_str = cmd_tmpl.format(**kwargs)
if cmd_name in ("commit", "tag", "push_tag"):
log.info(cmd_str)
logger.info(cmd_str)
else:
log.debug(cmd_str)
logger.debug(cmd_str)
output_data: bytes = sp.check_output(cmd_str.split(), env=env, stderr=sp.STDOUT)
# TODO (mb 2018-11-15): Detect encoding of output?
@ -87,11 +87,12 @@ class VCS:
try:
retcode = sp.call(cmd, stderr=sp.PIPE, stdout=sp.PIPE)
return retcode == 0
except OSError as e:
if e.errno == 2:
except OSError as err:
if err.errno == 2:
# git/mercurial is not installed.
return False
raise
else:
raise
@property
def has_remote(self) -> bool:
@ -122,7 +123,7 @@ class VCS:
def ls_tags(self) -> typ.List[str]:
"""List vcs tags on all branches."""
ls_tag_lines = self('ls_tags').splitlines()
log.debug(f"ls_tags output {ls_tag_lines}")
logger.debug(f"ls_tags output {ls_tag_lines}")
return [line.strip().split(" ", 1)[0] for line in ls_tag_lines]
def add(self, path: str) -> None:
@ -143,10 +144,10 @@ class VCS:
tmp_file = tempfile.NamedTemporaryFile("wb", delete=False)
assert " " not in tmp_file.name
fh: typ.IO[bytes]
fobj: typ.IO[bytes]
with tmp_file as fh:
fh.write(message_data)
with tmp_file as fobj:
fobj.write(message_data)
env: Env = os.environ.copy()
env['HGENCODING'] = "utf-8"
@ -164,17 +165,17 @@ class VCS:
def __repr__(self) -> str:
"""Generate string representation."""
return f"VCS(name='{self.name}')"
return f"VCSAPI(name='{self.name}')"
def get_vcs() -> VCS:
def get_vcs_api() -> VCSAPI:
"""Detect the appropriate VCS for a repository.
raises OSError if the directory doesn't use a supported VCS.
"""
for vcs_name in VCS_SUBCOMMANDS_BY_NAME.keys():
vcs = VCS(name=vcs_name)
if vcs.is_usable:
return vcs
for vcs_name in VCS_SUBCOMMANDS_BY_NAME:
vcs_api = VCSAPI(name=vcs_name)
if vcs_api.is_usable:
return vcs_api
raise OSError("No such directory .git/ or .hg/ ")

View file

@ -14,7 +14,7 @@ import pkg_resources
from . import lex_id
from . import patterns
log = logging.getLogger("pycalver.version")
logger = logging.getLogger("pycalver.version")
# The test suite may replace this.
@ -120,7 +120,7 @@ def cal_info(date: dt.date = None) -> CalendarInfo:
if date is None:
date = TODAY
kw = {
kwargs = {
'year' : date.year,
'quarter' : _quarter_from_month(date.month),
'month' : date.month,
@ -130,7 +130,7 @@ def cal_info(date: dt.date = None) -> CalendarInfo:
'us_week' : int(date.strftime("%U"), base=10),
}
return CalendarInfo(**kw)
return CalendarInfo(**kwargs)
class VersionInfo(typ.NamedTuple):
@ -159,17 +159,17 @@ FieldValues = typ.Dict[FieldKey , MatchGroupStr]
def _parse_field_values(field_values: FieldValues) -> VersionInfo:
fv = field_values
tag = fv.get('tag')
fvals = field_values
tag = fvals.get('tag')
if tag is None:
tag = "final"
tag = TAG_ALIASES.get(tag, tag)
assert tag is not None
bid = fv['bid'] if 'bid' in fv else "0001"
bid = fvals['bid'] if 'bid' in fvals else "0001"
year = int(fv['year']) if 'year' in fv else None
doy = int(fv['doy' ]) if 'doy' in fv else None
year = int(fvals['year']) if 'year' in fvals else None
doy = int(fvals['doy' ]) if 'doy' in fvals else None
month: typ.Optional[int]
dom : typ.Optional[int]
@ -179,8 +179,8 @@ def _parse_field_values(field_values: FieldValues) -> VersionInfo:
month = date.month
dom = date.day
else:
month = int(fv['month']) if 'month' in fv else None
dom = int(fv['dom' ]) if 'dom' in fv else None
month = int(fvals['month']) if 'month' in fvals else None
dom = int(fvals['dom' ]) if 'dom' in fvals else None
iso_week: typ.Optional[int]
us_week : typ.Optional[int]
@ -194,13 +194,13 @@ def _parse_field_values(field_values: FieldValues) -> VersionInfo:
iso_week = None
us_week = None
quarter = int(fv['quarter']) if 'quarter' in fv else None
quarter = int(fvals['quarter']) if 'quarter' in fvals else None
if quarter is None and month:
quarter = _quarter_from_month(month)
major = int(fv['major']) if 'major' in fv else 0
minor = int(fv['minor']) if 'minor' in fv else 0
patch = int(fv['patch']) if 'patch' in fv else 0
major = int(fvals['major']) if 'major' in fvals else 0
minor = int(fvals['minor']) if 'minor' in fvals else 0
patch = int(fvals['patch']) if 'patch' in fvals else 0
return VersionInfo(
year=year,
@ -417,38 +417,38 @@ def format_version(vinfo: VersionInfo, pattern: str) -> str:
for part_name, full_part_format in patterns.FULL_PART_FORMATS.items():
full_pattern = full_pattern.replace("{" + part_name + "}", full_part_format)
kw: typ.Dict[str, typ.Union[str, int, None]] = vinfo._asdict()
kwargs: typ.Dict[str, typ.Union[str, int, None]] = vinfo._asdict()
tag = vinfo.tag
if tag == 'final':
kw['release' ] = ""
kw['pep440_tag'] = ""
kwargs['release' ] = ""
kwargs['pep440_tag'] = ""
else:
kw['release' ] = "-" + tag
kw['pep440_tag'] = PEP440_TAGS[tag] + "0"
kwargs['release' ] = "-" + tag
kwargs['pep440_tag'] = PEP440_TAGS[tag] + "0"
kw['release_tag'] = tag
kwargs['release_tag'] = tag
year = vinfo.year
if year:
kw['yy' ] = str(year)[-2:]
kw['yyyy'] = year
kwargs['yy' ] = str(year)[-2:]
kwargs['yyyy'] = year
kw['BID'] = int(vinfo.bid, 10)
kwargs['BID'] = int(vinfo.bid, 10)
for part_name, field in ID_FIELDS_BY_PART.items():
val = kw[field]
val = kwargs[field]
if part_name.lower() == field.lower():
if isinstance(val, str):
kw[part_name] = int(val, base=10)
kwargs[part_name] = int(val, base=10)
else:
kw[part_name] = val
kwargs[part_name] = val
else:
assert len(set(part_name)) == 1
padded_len = len(part_name)
kw[part_name] = str(val).zfill(padded_len)
kwargs[part_name] = str(val).zfill(padded_len)
return full_pattern.format(**kw)
return full_pattern.format(**kwargs)
def incr(
@ -467,7 +467,7 @@ def incr(
try:
old_vinfo = parse_version_info(old_version, pattern)
except PatternError as ex:
log.error(str(ex))
logger.error(str(ex))
return None
cur_vinfo = old_vinfo
@ -480,7 +480,7 @@ def incr(
if old_date <= cur_date:
cur_vinfo = cur_vinfo._replace(**cur_cal_nfo._asdict())
else:
log.warning(f"Version appears to be from the future '{old_version}'")
logger.warning(f"Version appears to be from the future '{old_version}'")
cur_vinfo = cur_vinfo._replace(bid=lex_id.next_id(cur_vinfo.bid))
@ -496,7 +496,7 @@ def incr(
new_version = format_version(cur_vinfo, pattern)
if new_version == old_version:
log.error("Invalid arguments or pattern, version did not change.")
logger.error("Invalid arguments or pattern, version did not change.")
return None
else:
return new_version

View file

@ -1,3 +1,6 @@
# pylint:disable=redefined-outer-name ; pytest fixtures
# pylint:disable=protected-access ; allowed for test code
import os
import time
import shutil
@ -36,7 +39,7 @@ ENV = {
}
def sh(*cmd):
def shell(*cmd):
return sp.check_output(cmd, env=ENV)
@ -157,8 +160,8 @@ def test_incr_invalid(runner):
def _add_project_files(*files):
if "README.md" in files:
with pl.Path("README.md").open(mode="wt", encoding="utf-8") as fh:
fh.write(
with pl.Path("README.md").open(mode="wt", encoding="utf-8") as fobj:
fobj.write(
"""
Hello World v201701.0002-alpha !
aka. 201701.2a0 !
@ -166,16 +169,16 @@ def _add_project_files(*files):
)
if "setup.cfg" in files:
with pl.Path("setup.cfg").open(mode="wt", encoding="utf-8") as fh:
fh.write(SETUP_CFG_FIXTURE)
with pl.Path("setup.cfg").open(mode="wt", encoding="utf-8") as fobj:
fobj.write(SETUP_CFG_FIXTURE)
if "pycalver.toml" in files:
with pl.Path("pycalver.toml").open(mode="wt", encoding="utf-8") as fh:
fh.write(PYCALVER_TOML_FIXTURE)
with pl.Path("pycalver.toml").open(mode="wt", encoding="utf-8") as fobj:
fobj.write(PYCALVER_TOML_FIXTURE)
if "pyproject.toml" in files:
with pl.Path("pyproject.toml").open(mode="wt", encoding="utf-8") as fh:
fh.write(PYPROJECT_TOML_FIXTURE)
with pl.Path("pyproject.toml").open(mode="wt", encoding="utf-8") as fobj:
fobj.write(PYPROJECT_TOML_FIXTURE)
def test_nocfg(runner, caplog):
@ -212,8 +215,8 @@ def test_novcs_nocfg_init(runner, caplog):
assert "File not found" in log.message
assert os.path.exists("pycalver.toml")
with pl.Path("pycalver.toml").open(mode="r", encoding="utf-8") as fh:
cfg_content = fh.read()
with pl.Path("pycalver.toml").open(mode="r", encoding="utf-8") as fobj:
cfg_content = fobj.read()
base_str = config.DEFAULT_TOML_BASE_TMPL.format(initial_version=config._initial_version())
assert base_str in cfg_content
@ -234,13 +237,13 @@ def test_novcs_nocfg_init(runner, caplog):
assert "Configuration already initialized" in log.message
def test_novcs_setupcfg_init(runner, caplog):
def test_novcs_setupcfg_init(runner):
_add_project_files("README.md", "setup.cfg")
result = runner.invoke(cli.cli, ['init', "-vv"])
assert result.exit_code == 0
with pl.Path("setup.cfg").open(mode="r", encoding="utf-8") as fh:
cfg_content = fh.read()
with pl.Path("setup.cfg").open(mode="r", encoding="utf-8") as fobj:
cfg_content = fobj.read()
base_str = config.DEFAULT_CONFIGPARSER_BASE_TMPL.format(
initial_version=config._initial_version()
@ -259,8 +262,8 @@ def test_novcs_pyproject_init(runner):
result = runner.invoke(cli.cli, ['init', "-vv"])
assert result.exit_code == 0
with pl.Path("pyproject.toml").open(mode="r", encoding="utf-8") as fh:
cfg_content = fh.read()
with pl.Path("pyproject.toml").open(mode="r", encoding="utf-8") as fobj:
cfg_content = fobj.read()
base_str = config.DEFAULT_TOML_BASE_TMPL.format(initial_version=config._initial_version())
assert base_str in cfg_content
@ -272,16 +275,16 @@ def test_novcs_pyproject_init(runner):
assert f"PEP440 : {config._initial_version_pep440()}\n" in result.output
def _vcs_init(vcs, files=["README.md"]):
def _vcs_init(vcs, files=("README.md",)):
assert vcs in ("git", "hg")
assert not pl.Path(f".{vcs}").exists()
sh(f"{vcs}", "init")
shell(f"{vcs}", "init")
assert pl.Path(f".{vcs}").is_dir()
for filename in files:
sh(f"{vcs}", "add", filename)
shell(f"{vcs}", "add", filename)
sh(f"{vcs}", "commit", "-m", "initial commit")
shell(f"{vcs}", "commit", "-m", "initial commit")
def test_git_init(runner):
@ -322,7 +325,7 @@ def test_git_tag_eval(runner):
tag_version = initial_version.replace(".0001-alpha", ".0123-beta")
tag_version_pep440 = tag_version[1:7] + ".123b0"
sh("git", "tag", "--annotate", tag_version, "--message", f"bump version to {tag_version}")
shell("git", "tag", "--annotate", tag_version, "--message", f"bump version to {tag_version}")
result = runner.invoke(cli.cli, ['show', "-vv"])
assert result.exit_code == 0
@ -342,7 +345,7 @@ def test_hg_tag_eval(runner):
tag_version = initial_version.replace(".0001-alpha", ".0123-beta")
tag_version_pep440 = tag_version[1:7] + ".123b0"
sh("hg", "tag", tag_version, "--message", f"bump version to {tag_version}")
shell("hg", "tag", tag_version, "--message", f"bump version to {tag_version}")
result = runner.invoke(cli.cli, ['show', "-vv"])
assert result.exit_code == 0
@ -361,16 +364,16 @@ def test_novcs_bump(runner):
calver = config._initial_version()[:7]
with pl.Path("README.md").open() as fh:
content = fh.read()
with pl.Path("README.md").open() as fobj:
content = fobj.read()
assert calver + ".0002-alpha !\n" in content
assert calver[1:] + ".2a0 !\n" in content
result = runner.invoke(cli.cli, ['bump', "-vv", "--release", "beta"])
assert result.exit_code == 0
with pl.Path("README.md").open() as fh:
content = fh.read()
with pl.Path("README.md").open() as fobj:
content = fobj.read()
assert calver + ".0003-beta !\n" in content
assert calver[1:] + ".3b0 !\n" in content
@ -382,16 +385,16 @@ def test_git_bump(runner):
result = runner.invoke(cli.cli, ['init', "-vv"])
assert result.exit_code == 0
sh("git", "add", "pycalver.toml")
sh("git", "commit", "-m", "initial commit")
shell("git", "add", "pycalver.toml")
shell("git", "commit", "-m", "initial commit")
result = runner.invoke(cli.cli, ['bump', "-vv"])
assert result.exit_code == 0
calver = config._initial_version()[:7]
with pl.Path("README.md").open() as fh:
content = fh.read()
with pl.Path("README.md").open() as fobj:
content = fobj.read()
assert calver + ".0002-alpha !\n" in content
@ -402,28 +405,28 @@ def test_hg_bump(runner):
result = runner.invoke(cli.cli, ['init', "-vv"])
assert result.exit_code == 0
sh("hg", "add", "pycalver.toml")
sh("hg", "commit", "-m", "initial commit")
shell("hg", "add", "pycalver.toml")
shell("hg", "commit", "-m", "initial commit")
result = runner.invoke(cli.cli, ['bump', "-vv"])
assert result.exit_code == 0
calver = config._initial_version()[:7]
with pl.Path("README.md").open() as fh:
content = fh.read()
with pl.Path("README.md").open() as fobj:
content = fobj.read()
assert calver + ".0002-alpha !\n" in content
def test_empty_git_bump(runner, caplog):
sh("git", "init")
with pl.Path("setup.cfg").open(mode="w") as fh:
fh.write("")
shell("git", "init")
with pl.Path("setup.cfg").open(mode="w") as fobj:
fobj.write("")
result = runner.invoke(cli.cli, ['init', "-vv"])
assert result.exit_code == 0
with pl.Path("setup.cfg").open(mode="r") as fh:
default_cfg_data = fh.read()
with pl.Path("setup.cfg").open(mode="r") as fobj:
default_cfg_data = fobj.read()
assert "[pycalver]\n" in default_cfg_data
assert "\ncurrent_version = " in default_cfg_data
@ -437,14 +440,14 @@ def test_empty_git_bump(runner, caplog):
def test_empty_hg_bump(runner, caplog):
sh("hg", "init")
with pl.Path("setup.cfg").open(mode="w") as fh:
fh.write("")
shell("hg", "init")
with pl.Path("setup.cfg").open(mode="w") as fobj:
fobj.write("")
result = runner.invoke(cli.cli, ['init', "-vv"])
assert result.exit_code == 0
with pl.Path("setup.cfg").open(mode="r") as fh:
default_cfg_text = fh.read()
with pl.Path("setup.cfg").open(mode="r") as fobj:
default_cfg_text = fobj.read()
assert "[pycalver]\n" in default_cfg_text
assert "\ncurrent_version = " in default_cfg_text

View file

@ -1,3 +1,6 @@
# pylint:disable=redefined-outer-name ; pytest fixtures
# pylint:disable=protected-access ; allowed for test code
import io
from pycalver import config
@ -141,8 +144,8 @@ def test_parse_project_toml():
project_path = util.FIXTURES_DIR / "project_a"
config_path = util.FIXTURES_DIR / "project_a" / "pycalver.toml"
with config_path.open() as fh:
config_data = fh.read()
with config_path.open() as fobj:
config_data = fobj.read()
assert "v201710.0123-alpha" in config_data
@ -165,8 +168,8 @@ def test_parse_project_cfg():
project_path = util.FIXTURES_DIR / "project_b"
config_path = util.FIXTURES_DIR / "project_b" / "setup.cfg"
with config_path.open() as fh:
config_data = fh.read()
with config_path.open() as fobj:
config_data = fobj.read()
assert "v201307.0456-beta" in config_data
@ -211,7 +214,7 @@ def test_parse_toml_file(tmpdir):
}
def test_parse_default_pattern(tmpdir):
def test_parse_default_pattern():
project_path = util.FIXTURES_DIR / "project_c"
config_path = util.FIXTURES_DIR / "project_c" / "pyproject.toml"

View file

@ -1,3 +1,5 @@
# pylint:disable=protected-access ; allowed for test code
import random
from pycalver import lex_id
@ -18,7 +20,7 @@ def test_next_id_overflow():
def test_next_id_random():
for i in range(1000):
for _ in range(1000):
prev_id = str(random.randint(1, 100 * 1000))
try:
next_id = lex_id.next_id(prev_id)

View file

@ -1,3 +1,5 @@
# pylint:disable=protected-access ; allowed for test code
import copy
from pycalver import config

View file

@ -1,3 +1,5 @@
# pylint:disable=protected-access ; allowed for test code
import random
import datetime as dt
@ -42,7 +44,7 @@ def test_bump_random(monkeypatch):
monkeypatch.setattr(version, 'TODAY', cur_date)
for i in range(1000):
for _ in range(1000):
cur_date += dt.timedelta(days=int((1 + random.random()) ** 10))
new_version = version.incr(
cur_version, release=random.choice([None, "alpha", "beta", "rc", "final", "post"])

View file

@ -36,8 +36,9 @@ FIXTURE_PATH_PARTS = [
class Project:
def __init__(self, project=None):
tmpdir = pl.Path(tempfile.mkdtemp(prefix="pytest_"))
self.tmpdir = tmpdir
tmpdir = pl.Path(tempfile.mkdtemp(prefix="pytest_"))
self.tmpdir = tmpdir
self.prev_cwd = os.getcwd()
self.dir = tmpdir / "pycalver_project"
self.dir.mkdir()
@ -58,7 +59,6 @@ class Project:
shutil.copy(str(fixture_fpath), str(project_fpath))
def __enter__(self):
self.prev_cwd = os.getcwd()
os.chdir(str(self.dir))
return self
@ -67,7 +67,7 @@ class Project:
os.chdir(self.prev_cwd)
return False
def sh(self, cmd):
def shell(self, cmd):
shell = Shell(str(self.dir))
return shell(cmd)
@ -76,17 +76,17 @@ class Project:
for path_parts in FIXTURE_PATH_PARTS:
maybe_file_path = self.dir.joinpath(*path_parts)
if maybe_file_path.exists():
self.sh(f"{cmd} add {str(maybe_file_path)}")
self.shell(f"{cmd} add {str(maybe_file_path)}")
added_file_paths.append(maybe_file_path)
assert len(added_file_paths) >= 2
def git_init(self):
self.sh("git init")
self.shell("git init")
self._vcs_addall(cmd="git")
self.sh("git commit -m 'initial commit'")
self.shell("git commit -m 'initial commit'")
def hg_init(self):
self.sh("hg init")
self.shell("hg init")
self._vcs_addall(cmd="hg")
self.sh("hg commit -m 'initial commit'")
self.shell("hg commit -m 'initial commit'")