mirror of
https://github.com/TECHNOFAB11/bumpver.git
synced 2025-12-12 06:20:08 +01:00
misc linter updates
This commit is contained in:
parent
f1e17562b8
commit
0439ddf7d5
14 changed files with 158 additions and 131 deletions
|
|
@ -342,6 +342,16 @@ lint_flake8:
|
|||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run pylint. Should not break the build yet
|
||||
.PHONY: lint_pylint
|
||||
lint_pylint:
|
||||
@mkdir -p "reports/";
|
||||
|
||||
@printf "pylint ..\n";
|
||||
@$(DEV_ENV)/bin/pylint --rcfile=setup.cfg src/ test/
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run flake8 linter and check for fmt
|
||||
.PHONY: lint
|
||||
lint: lint_isort lint_sjfmt lint_flake8
|
||||
|
|
@ -362,16 +372,6 @@ mypy:
|
|||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run pylint. Should not break the build yet
|
||||
.PHONY: pylint
|
||||
pylint:
|
||||
@mkdir -p "reports/";
|
||||
|
||||
@printf "pylint ..\n";
|
||||
@$(DEV_ENV)/bin/pylint --rcfile=setup.cfg src/ test/
|
||||
@printf "\e[1F\e[9C ok\n"
|
||||
|
||||
|
||||
## Run pytest unit and integration tests
|
||||
.PHONY: test
|
||||
test:
|
||||
|
|
|
|||
8
scripts/exit_0_if_empty.py
Executable file
8
scripts/exit_0_if_empty.py
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env python
|
||||
# if you know a bash one liner for this, be my guest
|
||||
import sys
|
||||
|
||||
data = open(sys.argv[1]).read(10)
|
||||
has_data = len(data) > 0
|
||||
|
||||
sys.exit(has_data)
|
||||
|
|
@ -137,7 +137,11 @@ notes=TODO,FIXME,XXX,SLOW,BUG
|
|||
disable =
|
||||
bad-continuation,
|
||||
bad-whitespace,
|
||||
line-too-long,
|
||||
logging-not-lazy,
|
||||
logging-fstring-interpolation,
|
||||
no-else-return,
|
||||
no-else-raise,
|
||||
missing-docstring,
|
||||
missing-module-docstring,
|
||||
missing-class-docstring,
|
||||
|
|
|
|||
|
|
@ -135,6 +135,7 @@ def _parse_cfg_file_patterns(cfg_parser: configparser.RawConfigParser) -> FilePa
|
|||
|
||||
|
||||
class _ConfigParser(configparser.RawConfigParser):
|
||||
# pylint:disable=too-many-ancestors ; from our perspective, it's just one
|
||||
"""Custom parser, simply to override optionxform behaviour."""
|
||||
|
||||
def optionxform(self, optionstr: str) -> str:
|
||||
|
|
@ -277,7 +278,8 @@ def _parse_current_version_default_pattern(cfg: Config, raw_cfg_text: str) -> st
|
|||
for line in raw_cfg_text.splitlines():
|
||||
if is_pycalver_section and line.startswith("current_version"):
|
||||
return line.replace(cfg.current_version, cfg.version_pattern)
|
||||
elif line.strip() == "[pycalver]":
|
||||
|
||||
if line.strip() == "[pycalver]":
|
||||
is_pycalver_section = True
|
||||
elif line and line[0] == "[" and line[-1] == "]":
|
||||
is_pycalver_section = False
|
||||
|
|
@ -291,7 +293,7 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
|
|||
logger.warning(f"File not found: {ctx.config_filepath}")
|
||||
return None
|
||||
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
cfg_path: str
|
||||
if ctx.config_filepath.is_absolute():
|
||||
|
|
@ -302,11 +304,11 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
|
|||
raw_cfg: RawConfig
|
||||
|
||||
try:
|
||||
with ctx.config_filepath.open(mode="rt", encoding="utf-8") as fh:
|
||||
with ctx.config_filepath.open(mode="rt", encoding="utf-8") as fobj:
|
||||
if ctx.config_format == 'toml':
|
||||
raw_cfg = _parse_toml(fh)
|
||||
raw_cfg = _parse_toml(fobj)
|
||||
elif ctx.config_format == 'cfg':
|
||||
raw_cfg = _parse_cfg(fh)
|
||||
raw_cfg = _parse_cfg(fobj)
|
||||
else:
|
||||
err_msg = "Invalid config_format='{ctx.config_format}'"
|
||||
raise RuntimeError(err_msg)
|
||||
|
|
@ -314,8 +316,8 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
|
|||
cfg: Config = _parse_config(raw_cfg)
|
||||
|
||||
if cfg_path not in cfg.file_patterns:
|
||||
fh.seek(0)
|
||||
raw_cfg_text = fh.read()
|
||||
fobj.seek(0)
|
||||
raw_cfg_text = fobj.read()
|
||||
cfg.file_patterns[cfg_path] = [
|
||||
_parse_current_version_default_pattern(cfg, raw_cfg_text)
|
||||
]
|
||||
|
|
@ -469,12 +471,12 @@ def default_config(ctx: ProjectContext) -> str:
|
|||
|
||||
def write_content(ctx: ProjectContext) -> None:
|
||||
"""Update project config file with initial default config."""
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
cfg_content = default_config(ctx)
|
||||
if ctx.config_filepath.exists():
|
||||
cfg_content = "\n" + cfg_content
|
||||
|
||||
with ctx.config_filepath.open(mode="at", encoding="utf-8") as fh:
|
||||
fh.write(cfg_content)
|
||||
with ctx.config_filepath.open(mode="at", encoding="utf-8") as fobj:
|
||||
fobj.write(cfg_content)
|
||||
print(f"Updated {ctx.config_filepath}")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import typing as typ
|
||||
|
||||
from . import patterns
|
||||
from .patterns import compile_pattern
|
||||
|
||||
|
||||
class PatternMatch(typ.NamedTuple):
|
||||
|
|
@ -26,7 +26,7 @@ PatternMatches = typ.Iterable[PatternMatch]
|
|||
def _iter_for_pattern(lines: typ.List[str], pattern: str) -> PatternMatches:
|
||||
# The pattern is escaped, so that everything besides the format
|
||||
# string variables is treated literally.
|
||||
pattern_re = patterns.compile_pattern(pattern)
|
||||
pattern_re = compile_pattern(pattern)
|
||||
|
||||
for lineno, line in enumerate(lines):
|
||||
match = pattern_re.search(line)
|
||||
|
|
|
|||
|
|
@ -70,12 +70,12 @@ def rewrite_lines(
|
|||
new_lines = old_lines[:]
|
||||
found_patterns = set()
|
||||
|
||||
for m in parse.iter_matches(old_lines, pattern_strs):
|
||||
found_patterns.add(m.pattern)
|
||||
replacement = version.format_version(new_vinfo, m.pattern)
|
||||
span_l, span_r = m.span
|
||||
new_line = m.line[:span_l] + replacement + m.line[span_r:]
|
||||
new_lines[m.lineno] = new_line
|
||||
for match in parse.iter_matches(old_lines, pattern_strs):
|
||||
found_patterns.add(match.pattern)
|
||||
replacement = version.format_version(new_vinfo, match.pattern)
|
||||
span_l, span_r = match.span
|
||||
new_line = match.line[:span_l] + replacement + match.line[span_r:]
|
||||
new_lines[match.lineno] = new_line
|
||||
|
||||
non_matched_patterns = set(pattern_strs) - found_patterns
|
||||
if non_matched_patterns:
|
||||
|
|
@ -158,11 +158,11 @@ def iter_rewritten(
|
|||
>>>
|
||||
'''
|
||||
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
for file_path, pattern_strs in _iter_file_paths(file_patterns):
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fh:
|
||||
content = fh.read()
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fobj:
|
||||
content = fobj.read()
|
||||
|
||||
rfd = rfd_from_content(pattern_strs, new_vinfo, content)
|
||||
yield rfd._replace(path=str(file_path))
|
||||
|
|
@ -202,11 +202,11 @@ def diff(new_vinfo: version.VersionInfo, file_patterns: config.PatternsByGlob) -
|
|||
"""
|
||||
|
||||
full_diff = ""
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
for file_path, pattern_strs in sorted(_iter_file_paths(file_patterns)):
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fh:
|
||||
content = fh.read()
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fobj:
|
||||
content = fobj.read()
|
||||
|
||||
try:
|
||||
rfd = rfd_from_content(pattern_strs, new_vinfo, content)
|
||||
|
|
@ -228,9 +228,9 @@ def diff(new_vinfo: version.VersionInfo, file_patterns: config.PatternsByGlob) -
|
|||
|
||||
def rewrite(file_patterns: config.PatternsByGlob, new_vinfo: version.VersionInfo) -> None:
|
||||
"""Rewrite project files, updating each with the new version."""
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
for file_data in iter_rewritten(file_patterns, new_vinfo):
|
||||
new_content = file_data.line_sep.join(file_data.new_lines)
|
||||
with io.open(file_data.path, mode="wt", encoding="utf-8") as fh:
|
||||
fh.write(new_content)
|
||||
with io.open(file_data.path, mode="wt", encoding="utf-8") as fobj:
|
||||
fobj.write(new_content)
|
||||
|
|
|
|||
|
|
@ -87,11 +87,12 @@ class VCSAPI:
|
|||
try:
|
||||
retcode = sp.call(cmd, stderr=sp.PIPE, stdout=sp.PIPE)
|
||||
return retcode == 0
|
||||
except OSError as e:
|
||||
if e.errno == 2:
|
||||
except OSError as err:
|
||||
if err.errno == 2:
|
||||
# git/mercurial is not installed.
|
||||
return False
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
@property
|
||||
def has_remote(self) -> bool:
|
||||
|
|
@ -143,10 +144,10 @@ class VCSAPI:
|
|||
tmp_file = tempfile.NamedTemporaryFile("wb", delete=False)
|
||||
assert " " not in tmp_file.name
|
||||
|
||||
fh: typ.IO[bytes]
|
||||
fobj: typ.IO[bytes]
|
||||
|
||||
with tmp_file as fh:
|
||||
fh.write(message_data)
|
||||
with tmp_file as fobj:
|
||||
fobj.write(message_data)
|
||||
|
||||
env: Env = os.environ.copy()
|
||||
env['HGENCODING'] = "utf-8"
|
||||
|
|
@ -172,7 +173,7 @@ def get_vcs_api() -> VCSAPI:
|
|||
|
||||
raises OSError if the directory doesn't use a supported VCS.
|
||||
"""
|
||||
for vcs_name in VCS_SUBCOMMANDS_BY_NAME.keys():
|
||||
for vcs_name in VCS_SUBCOMMANDS_BY_NAME:
|
||||
vcs_api = VCSAPI(name=vcs_name)
|
||||
if vcs_api.is_usable:
|
||||
return vcs_api
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ def cal_info(date: dt.date = None) -> CalendarInfo:
|
|||
if date is None:
|
||||
date = TODAY
|
||||
|
||||
kw = {
|
||||
kwargs = {
|
||||
'year' : date.year,
|
||||
'quarter' : _quarter_from_month(date.month),
|
||||
'month' : date.month,
|
||||
|
|
@ -130,7 +130,7 @@ def cal_info(date: dt.date = None) -> CalendarInfo:
|
|||
'us_week' : int(date.strftime("%U"), base=10),
|
||||
}
|
||||
|
||||
return CalendarInfo(**kw)
|
||||
return CalendarInfo(**kwargs)
|
||||
|
||||
|
||||
class VersionInfo(typ.NamedTuple):
|
||||
|
|
@ -159,17 +159,17 @@ FieldValues = typ.Dict[FieldKey , MatchGroupStr]
|
|||
|
||||
|
||||
def _parse_field_values(field_values: FieldValues) -> VersionInfo:
|
||||
fv = field_values
|
||||
tag = fv.get('tag')
|
||||
fvals = field_values
|
||||
tag = fvals.get('tag')
|
||||
if tag is None:
|
||||
tag = "final"
|
||||
tag = TAG_ALIASES.get(tag, tag)
|
||||
assert tag is not None
|
||||
|
||||
bid = fv['bid'] if 'bid' in fv else "0001"
|
||||
bid = fvals['bid'] if 'bid' in fvals else "0001"
|
||||
|
||||
year = int(fv['year']) if 'year' in fv else None
|
||||
doy = int(fv['doy' ]) if 'doy' in fv else None
|
||||
year = int(fvals['year']) if 'year' in fvals else None
|
||||
doy = int(fvals['doy' ]) if 'doy' in fvals else None
|
||||
|
||||
month: typ.Optional[int]
|
||||
dom : typ.Optional[int]
|
||||
|
|
@ -179,8 +179,8 @@ def _parse_field_values(field_values: FieldValues) -> VersionInfo:
|
|||
month = date.month
|
||||
dom = date.day
|
||||
else:
|
||||
month = int(fv['month']) if 'month' in fv else None
|
||||
dom = int(fv['dom' ]) if 'dom' in fv else None
|
||||
month = int(fvals['month']) if 'month' in fvals else None
|
||||
dom = int(fvals['dom' ]) if 'dom' in fvals else None
|
||||
|
||||
iso_week: typ.Optional[int]
|
||||
us_week : typ.Optional[int]
|
||||
|
|
@ -194,13 +194,13 @@ def _parse_field_values(field_values: FieldValues) -> VersionInfo:
|
|||
iso_week = None
|
||||
us_week = None
|
||||
|
||||
quarter = int(fv['quarter']) if 'quarter' in fv else None
|
||||
quarter = int(fvals['quarter']) if 'quarter' in fvals else None
|
||||
if quarter is None and month:
|
||||
quarter = _quarter_from_month(month)
|
||||
|
||||
major = int(fv['major']) if 'major' in fv else 0
|
||||
minor = int(fv['minor']) if 'minor' in fv else 0
|
||||
patch = int(fv['patch']) if 'patch' in fv else 0
|
||||
major = int(fvals['major']) if 'major' in fvals else 0
|
||||
minor = int(fvals['minor']) if 'minor' in fvals else 0
|
||||
patch = int(fvals['patch']) if 'patch' in fvals else 0
|
||||
|
||||
return VersionInfo(
|
||||
year=year,
|
||||
|
|
@ -417,38 +417,38 @@ def format_version(vinfo: VersionInfo, pattern: str) -> str:
|
|||
for part_name, full_part_format in patterns.FULL_PART_FORMATS.items():
|
||||
full_pattern = full_pattern.replace("{" + part_name + "}", full_part_format)
|
||||
|
||||
kw: typ.Dict[str, typ.Union[str, int, None]] = vinfo._asdict()
|
||||
kwargs: typ.Dict[str, typ.Union[str, int, None]] = vinfo._asdict()
|
||||
|
||||
tag = vinfo.tag
|
||||
if tag == 'final':
|
||||
kw['release' ] = ""
|
||||
kw['pep440_tag'] = ""
|
||||
kwargs['release' ] = ""
|
||||
kwargs['pep440_tag'] = ""
|
||||
else:
|
||||
kw['release' ] = "-" + tag
|
||||
kw['pep440_tag'] = PEP440_TAGS[tag] + "0"
|
||||
kwargs['release' ] = "-" + tag
|
||||
kwargs['pep440_tag'] = PEP440_TAGS[tag] + "0"
|
||||
|
||||
kw['release_tag'] = tag
|
||||
kwargs['release_tag'] = tag
|
||||
|
||||
year = vinfo.year
|
||||
if year:
|
||||
kw['yy' ] = str(year)[-2:]
|
||||
kw['yyyy'] = year
|
||||
kwargs['yy' ] = str(year)[-2:]
|
||||
kwargs['yyyy'] = year
|
||||
|
||||
kw['BID'] = int(vinfo.bid, 10)
|
||||
kwargs['BID'] = int(vinfo.bid, 10)
|
||||
|
||||
for part_name, field in ID_FIELDS_BY_PART.items():
|
||||
val = kw[field]
|
||||
val = kwargs[field]
|
||||
if part_name.lower() == field.lower():
|
||||
if isinstance(val, str):
|
||||
kw[part_name] = int(val, base=10)
|
||||
kwargs[part_name] = int(val, base=10)
|
||||
else:
|
||||
kw[part_name] = val
|
||||
kwargs[part_name] = val
|
||||
else:
|
||||
assert len(set(part_name)) == 1
|
||||
padded_len = len(part_name)
|
||||
kw[part_name] = str(val).zfill(padded_len)
|
||||
kwargs[part_name] = str(val).zfill(padded_len)
|
||||
|
||||
return full_pattern.format(**kw)
|
||||
return full_pattern.format(**kwargs)
|
||||
|
||||
|
||||
def incr(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
# pylint:disable=redefined-outer-name ; pytest fixtures
|
||||
# pylint:disable=protected-access ; allowed for test code
|
||||
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
|
|
@ -36,7 +39,7 @@ ENV = {
|
|||
}
|
||||
|
||||
|
||||
def sh(*cmd):
|
||||
def shell(*cmd):
|
||||
return sp.check_output(cmd, env=ENV)
|
||||
|
||||
|
||||
|
|
@ -157,8 +160,8 @@ def test_incr_invalid(runner):
|
|||
|
||||
def _add_project_files(*files):
|
||||
if "README.md" in files:
|
||||
with pl.Path("README.md").open(mode="wt", encoding="utf-8") as fh:
|
||||
fh.write(
|
||||
with pl.Path("README.md").open(mode="wt", encoding="utf-8") as fobj:
|
||||
fobj.write(
|
||||
"""
|
||||
Hello World v201701.0002-alpha !
|
||||
aka. 201701.2a0 !
|
||||
|
|
@ -166,16 +169,16 @@ def _add_project_files(*files):
|
|||
)
|
||||
|
||||
if "setup.cfg" in files:
|
||||
with pl.Path("setup.cfg").open(mode="wt", encoding="utf-8") as fh:
|
||||
fh.write(SETUP_CFG_FIXTURE)
|
||||
with pl.Path("setup.cfg").open(mode="wt", encoding="utf-8") as fobj:
|
||||
fobj.write(SETUP_CFG_FIXTURE)
|
||||
|
||||
if "pycalver.toml" in files:
|
||||
with pl.Path("pycalver.toml").open(mode="wt", encoding="utf-8") as fh:
|
||||
fh.write(PYCALVER_TOML_FIXTURE)
|
||||
with pl.Path("pycalver.toml").open(mode="wt", encoding="utf-8") as fobj:
|
||||
fobj.write(PYCALVER_TOML_FIXTURE)
|
||||
|
||||
if "pyproject.toml" in files:
|
||||
with pl.Path("pyproject.toml").open(mode="wt", encoding="utf-8") as fh:
|
||||
fh.write(PYPROJECT_TOML_FIXTURE)
|
||||
with pl.Path("pyproject.toml").open(mode="wt", encoding="utf-8") as fobj:
|
||||
fobj.write(PYPROJECT_TOML_FIXTURE)
|
||||
|
||||
|
||||
def test_nocfg(runner, caplog):
|
||||
|
|
@ -212,8 +215,8 @@ def test_novcs_nocfg_init(runner, caplog):
|
|||
assert "File not found" in log.message
|
||||
|
||||
assert os.path.exists("pycalver.toml")
|
||||
with pl.Path("pycalver.toml").open(mode="r", encoding="utf-8") as fh:
|
||||
cfg_content = fh.read()
|
||||
with pl.Path("pycalver.toml").open(mode="r", encoding="utf-8") as fobj:
|
||||
cfg_content = fobj.read()
|
||||
|
||||
base_str = config.DEFAULT_TOML_BASE_TMPL.format(initial_version=config._initial_version())
|
||||
assert base_str in cfg_content
|
||||
|
|
@ -234,13 +237,13 @@ def test_novcs_nocfg_init(runner, caplog):
|
|||
assert "Configuration already initialized" in log.message
|
||||
|
||||
|
||||
def test_novcs_setupcfg_init(runner, caplog):
|
||||
def test_novcs_setupcfg_init(runner):
|
||||
_add_project_files("README.md", "setup.cfg")
|
||||
result = runner.invoke(cli.cli, ['init', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
with pl.Path("setup.cfg").open(mode="r", encoding="utf-8") as fh:
|
||||
cfg_content = fh.read()
|
||||
with pl.Path("setup.cfg").open(mode="r", encoding="utf-8") as fobj:
|
||||
cfg_content = fobj.read()
|
||||
|
||||
base_str = config.DEFAULT_CONFIGPARSER_BASE_TMPL.format(
|
||||
initial_version=config._initial_version()
|
||||
|
|
@ -259,8 +262,8 @@ def test_novcs_pyproject_init(runner):
|
|||
result = runner.invoke(cli.cli, ['init', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
with pl.Path("pyproject.toml").open(mode="r", encoding="utf-8") as fh:
|
||||
cfg_content = fh.read()
|
||||
with pl.Path("pyproject.toml").open(mode="r", encoding="utf-8") as fobj:
|
||||
cfg_content = fobj.read()
|
||||
|
||||
base_str = config.DEFAULT_TOML_BASE_TMPL.format(initial_version=config._initial_version())
|
||||
assert base_str in cfg_content
|
||||
|
|
@ -272,16 +275,16 @@ def test_novcs_pyproject_init(runner):
|
|||
assert f"PEP440 : {config._initial_version_pep440()}\n" in result.output
|
||||
|
||||
|
||||
def _vcs_init(vcs, files=["README.md"]):
|
||||
def _vcs_init(vcs, files=("README.md",)):
|
||||
assert vcs in ("git", "hg")
|
||||
assert not pl.Path(f".{vcs}").exists()
|
||||
sh(f"{vcs}", "init")
|
||||
shell(f"{vcs}", "init")
|
||||
assert pl.Path(f".{vcs}").is_dir()
|
||||
|
||||
for filename in files:
|
||||
sh(f"{vcs}", "add", filename)
|
||||
shell(f"{vcs}", "add", filename)
|
||||
|
||||
sh(f"{vcs}", "commit", "-m", "initial commit")
|
||||
shell(f"{vcs}", "commit", "-m", "initial commit")
|
||||
|
||||
|
||||
def test_git_init(runner):
|
||||
|
|
@ -322,7 +325,7 @@ def test_git_tag_eval(runner):
|
|||
tag_version = initial_version.replace(".0001-alpha", ".0123-beta")
|
||||
tag_version_pep440 = tag_version[1:7] + ".123b0"
|
||||
|
||||
sh("git", "tag", "--annotate", tag_version, "--message", f"bump version to {tag_version}")
|
||||
shell("git", "tag", "--annotate", tag_version, "--message", f"bump version to {tag_version}")
|
||||
|
||||
result = runner.invoke(cli.cli, ['show', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
|
@ -342,7 +345,7 @@ def test_hg_tag_eval(runner):
|
|||
tag_version = initial_version.replace(".0001-alpha", ".0123-beta")
|
||||
tag_version_pep440 = tag_version[1:7] + ".123b0"
|
||||
|
||||
sh("hg", "tag", tag_version, "--message", f"bump version to {tag_version}")
|
||||
shell("hg", "tag", tag_version, "--message", f"bump version to {tag_version}")
|
||||
|
||||
result = runner.invoke(cli.cli, ['show', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
|
@ -361,16 +364,16 @@ def test_novcs_bump(runner):
|
|||
|
||||
calver = config._initial_version()[:7]
|
||||
|
||||
with pl.Path("README.md").open() as fh:
|
||||
content = fh.read()
|
||||
with pl.Path("README.md").open() as fobj:
|
||||
content = fobj.read()
|
||||
assert calver + ".0002-alpha !\n" in content
|
||||
assert calver[1:] + ".2a0 !\n" in content
|
||||
|
||||
result = runner.invoke(cli.cli, ['bump', "-vv", "--release", "beta"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
with pl.Path("README.md").open() as fh:
|
||||
content = fh.read()
|
||||
with pl.Path("README.md").open() as fobj:
|
||||
content = fobj.read()
|
||||
assert calver + ".0003-beta !\n" in content
|
||||
assert calver[1:] + ".3b0 !\n" in content
|
||||
|
||||
|
|
@ -382,16 +385,16 @@ def test_git_bump(runner):
|
|||
result = runner.invoke(cli.cli, ['init', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
sh("git", "add", "pycalver.toml")
|
||||
sh("git", "commit", "-m", "initial commit")
|
||||
shell("git", "add", "pycalver.toml")
|
||||
shell("git", "commit", "-m", "initial commit")
|
||||
|
||||
result = runner.invoke(cli.cli, ['bump', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
calver = config._initial_version()[:7]
|
||||
|
||||
with pl.Path("README.md").open() as fh:
|
||||
content = fh.read()
|
||||
with pl.Path("README.md").open() as fobj:
|
||||
content = fobj.read()
|
||||
assert calver + ".0002-alpha !\n" in content
|
||||
|
||||
|
||||
|
|
@ -402,28 +405,28 @@ def test_hg_bump(runner):
|
|||
result = runner.invoke(cli.cli, ['init', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
sh("hg", "add", "pycalver.toml")
|
||||
sh("hg", "commit", "-m", "initial commit")
|
||||
shell("hg", "add", "pycalver.toml")
|
||||
shell("hg", "commit", "-m", "initial commit")
|
||||
|
||||
result = runner.invoke(cli.cli, ['bump', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
calver = config._initial_version()[:7]
|
||||
|
||||
with pl.Path("README.md").open() as fh:
|
||||
content = fh.read()
|
||||
with pl.Path("README.md").open() as fobj:
|
||||
content = fobj.read()
|
||||
assert calver + ".0002-alpha !\n" in content
|
||||
|
||||
|
||||
def test_empty_git_bump(runner, caplog):
|
||||
sh("git", "init")
|
||||
with pl.Path("setup.cfg").open(mode="w") as fh:
|
||||
fh.write("")
|
||||
shell("git", "init")
|
||||
with pl.Path("setup.cfg").open(mode="w") as fobj:
|
||||
fobj.write("")
|
||||
result = runner.invoke(cli.cli, ['init', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
with pl.Path("setup.cfg").open(mode="r") as fh:
|
||||
default_cfg_data = fh.read()
|
||||
with pl.Path("setup.cfg").open(mode="r") as fobj:
|
||||
default_cfg_data = fobj.read()
|
||||
|
||||
assert "[pycalver]\n" in default_cfg_data
|
||||
assert "\ncurrent_version = " in default_cfg_data
|
||||
|
|
@ -437,14 +440,14 @@ def test_empty_git_bump(runner, caplog):
|
|||
|
||||
|
||||
def test_empty_hg_bump(runner, caplog):
|
||||
sh("hg", "init")
|
||||
with pl.Path("setup.cfg").open(mode="w") as fh:
|
||||
fh.write("")
|
||||
shell("hg", "init")
|
||||
with pl.Path("setup.cfg").open(mode="w") as fobj:
|
||||
fobj.write("")
|
||||
result = runner.invoke(cli.cli, ['init', "-vv"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
with pl.Path("setup.cfg").open(mode="r") as fh:
|
||||
default_cfg_text = fh.read()
|
||||
with pl.Path("setup.cfg").open(mode="r") as fobj:
|
||||
default_cfg_text = fobj.read()
|
||||
|
||||
assert "[pycalver]\n" in default_cfg_text
|
||||
assert "\ncurrent_version = " in default_cfg_text
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
# pylint:disable=redefined-outer-name ; pytest fixtures
|
||||
# pylint:disable=protected-access ; allowed for test code
|
||||
|
||||
import io
|
||||
|
||||
from pycalver import config
|
||||
|
|
@ -141,8 +144,8 @@ def test_parse_project_toml():
|
|||
project_path = util.FIXTURES_DIR / "project_a"
|
||||
config_path = util.FIXTURES_DIR / "project_a" / "pycalver.toml"
|
||||
|
||||
with config_path.open() as fh:
|
||||
config_data = fh.read()
|
||||
with config_path.open() as fobj:
|
||||
config_data = fobj.read()
|
||||
|
||||
assert "v201710.0123-alpha" in config_data
|
||||
|
||||
|
|
@ -165,8 +168,8 @@ def test_parse_project_cfg():
|
|||
project_path = util.FIXTURES_DIR / "project_b"
|
||||
config_path = util.FIXTURES_DIR / "project_b" / "setup.cfg"
|
||||
|
||||
with config_path.open() as fh:
|
||||
config_data = fh.read()
|
||||
with config_path.open() as fobj:
|
||||
config_data = fobj.read()
|
||||
|
||||
assert "v201307.0456-beta" in config_data
|
||||
|
||||
|
|
@ -211,7 +214,7 @@ def test_parse_toml_file(tmpdir):
|
|||
}
|
||||
|
||||
|
||||
def test_parse_default_pattern(tmpdir):
|
||||
def test_parse_default_pattern():
|
||||
project_path = util.FIXTURES_DIR / "project_c"
|
||||
config_path = util.FIXTURES_DIR / "project_c" / "pyproject.toml"
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# pylint:disable=protected-access ; allowed for test code
|
||||
|
||||
import random
|
||||
|
||||
from pycalver import lex_id
|
||||
|
|
@ -18,7 +20,7 @@ def test_next_id_overflow():
|
|||
|
||||
|
||||
def test_next_id_random():
|
||||
for i in range(1000):
|
||||
for _ in range(1000):
|
||||
prev_id = str(random.randint(1, 100 * 1000))
|
||||
try:
|
||||
next_id = lex_id.next_id(prev_id)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# pylint:disable=protected-access ; allowed for test code
|
||||
|
||||
import copy
|
||||
|
||||
from pycalver import config
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# pylint:disable=protected-access ; allowed for test code
|
||||
|
||||
import random
|
||||
import datetime as dt
|
||||
|
||||
|
|
@ -42,7 +44,7 @@ def test_bump_random(monkeypatch):
|
|||
|
||||
monkeypatch.setattr(version, 'TODAY', cur_date)
|
||||
|
||||
for i in range(1000):
|
||||
for _ in range(1000):
|
||||
cur_date += dt.timedelta(days=int((1 + random.random()) ** 10))
|
||||
new_version = version.incr(
|
||||
cur_version, release=random.choice([None, "alpha", "beta", "rc", "final", "post"])
|
||||
|
|
|
|||
18
test/util.py
18
test/util.py
|
|
@ -36,8 +36,9 @@ FIXTURE_PATH_PARTS = [
|
|||
|
||||
class Project:
|
||||
def __init__(self, project=None):
|
||||
tmpdir = pl.Path(tempfile.mkdtemp(prefix="pytest_"))
|
||||
self.tmpdir = tmpdir
|
||||
tmpdir = pl.Path(tempfile.mkdtemp(prefix="pytest_"))
|
||||
self.tmpdir = tmpdir
|
||||
self.prev_cwd = os.getcwd()
|
||||
|
||||
self.dir = tmpdir / "pycalver_project"
|
||||
self.dir.mkdir()
|
||||
|
|
@ -58,7 +59,6 @@ class Project:
|
|||
shutil.copy(str(fixture_fpath), str(project_fpath))
|
||||
|
||||
def __enter__(self):
|
||||
self.prev_cwd = os.getcwd()
|
||||
os.chdir(str(self.dir))
|
||||
return self
|
||||
|
||||
|
|
@ -67,7 +67,7 @@ class Project:
|
|||
os.chdir(self.prev_cwd)
|
||||
return False
|
||||
|
||||
def sh(self, cmd):
|
||||
def shell(self, cmd):
|
||||
shell = Shell(str(self.dir))
|
||||
return shell(cmd)
|
||||
|
||||
|
|
@ -76,17 +76,17 @@ class Project:
|
|||
for path_parts in FIXTURE_PATH_PARTS:
|
||||
maybe_file_path = self.dir.joinpath(*path_parts)
|
||||
if maybe_file_path.exists():
|
||||
self.sh(f"{cmd} add {str(maybe_file_path)}")
|
||||
self.shell(f"{cmd} add {str(maybe_file_path)}")
|
||||
added_file_paths.append(maybe_file_path)
|
||||
|
||||
assert len(added_file_paths) >= 2
|
||||
|
||||
def git_init(self):
|
||||
self.sh("git init")
|
||||
self.shell("git init")
|
||||
self._vcs_addall(cmd="git")
|
||||
self.sh("git commit -m 'initial commit'")
|
||||
self.shell("git commit -m 'initial commit'")
|
||||
|
||||
def hg_init(self):
|
||||
self.sh("hg init")
|
||||
self.shell("hg init")
|
||||
self._vcs_addall(cmd="hg")
|
||||
self.sh("hg commit -m 'initial commit'")
|
||||
self.shell("hg commit -m 'initial commit'")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue