mirror of
https://github.com/TECHNOFAB11/bumpver.git
synced 2025-12-12 22:40:09 +01:00
misc linter updates
This commit is contained in:
parent
f1e17562b8
commit
0439ddf7d5
14 changed files with 158 additions and 131 deletions
|
|
@ -135,6 +135,7 @@ def _parse_cfg_file_patterns(cfg_parser: configparser.RawConfigParser) -> FilePa
|
|||
|
||||
|
||||
class _ConfigParser(configparser.RawConfigParser):
|
||||
# pylint:disable=too-many-ancestors ; from our perspective, it's just one
|
||||
"""Custom parser, simply to override optionxform behaviour."""
|
||||
|
||||
def optionxform(self, optionstr: str) -> str:
|
||||
|
|
@ -277,7 +278,8 @@ def _parse_current_version_default_pattern(cfg: Config, raw_cfg_text: str) -> st
|
|||
for line in raw_cfg_text.splitlines():
|
||||
if is_pycalver_section and line.startswith("current_version"):
|
||||
return line.replace(cfg.current_version, cfg.version_pattern)
|
||||
elif line.strip() == "[pycalver]":
|
||||
|
||||
if line.strip() == "[pycalver]":
|
||||
is_pycalver_section = True
|
||||
elif line and line[0] == "[" and line[-1] == "]":
|
||||
is_pycalver_section = False
|
||||
|
|
@ -291,7 +293,7 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
|
|||
logger.warning(f"File not found: {ctx.config_filepath}")
|
||||
return None
|
||||
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
cfg_path: str
|
||||
if ctx.config_filepath.is_absolute():
|
||||
|
|
@ -302,11 +304,11 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
|
|||
raw_cfg: RawConfig
|
||||
|
||||
try:
|
||||
with ctx.config_filepath.open(mode="rt", encoding="utf-8") as fh:
|
||||
with ctx.config_filepath.open(mode="rt", encoding="utf-8") as fobj:
|
||||
if ctx.config_format == 'toml':
|
||||
raw_cfg = _parse_toml(fh)
|
||||
raw_cfg = _parse_toml(fobj)
|
||||
elif ctx.config_format == 'cfg':
|
||||
raw_cfg = _parse_cfg(fh)
|
||||
raw_cfg = _parse_cfg(fobj)
|
||||
else:
|
||||
err_msg = "Invalid config_format='{ctx.config_format}'"
|
||||
raise RuntimeError(err_msg)
|
||||
|
|
@ -314,8 +316,8 @@ def parse(ctx: ProjectContext) -> MaybeConfig:
|
|||
cfg: Config = _parse_config(raw_cfg)
|
||||
|
||||
if cfg_path not in cfg.file_patterns:
|
||||
fh.seek(0)
|
||||
raw_cfg_text = fh.read()
|
||||
fobj.seek(0)
|
||||
raw_cfg_text = fobj.read()
|
||||
cfg.file_patterns[cfg_path] = [
|
||||
_parse_current_version_default_pattern(cfg, raw_cfg_text)
|
||||
]
|
||||
|
|
@ -469,12 +471,12 @@ def default_config(ctx: ProjectContext) -> str:
|
|||
|
||||
def write_content(ctx: ProjectContext) -> None:
|
||||
"""Update project config file with initial default config."""
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
cfg_content = default_config(ctx)
|
||||
if ctx.config_filepath.exists():
|
||||
cfg_content = "\n" + cfg_content
|
||||
|
||||
with ctx.config_filepath.open(mode="at", encoding="utf-8") as fh:
|
||||
fh.write(cfg_content)
|
||||
with ctx.config_filepath.open(mode="at", encoding="utf-8") as fobj:
|
||||
fobj.write(cfg_content)
|
||||
print(f"Updated {ctx.config_filepath}")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import typing as typ
|
||||
|
||||
from . import patterns
|
||||
from .patterns import compile_pattern
|
||||
|
||||
|
||||
class PatternMatch(typ.NamedTuple):
|
||||
|
|
@ -26,7 +26,7 @@ PatternMatches = typ.Iterable[PatternMatch]
|
|||
def _iter_for_pattern(lines: typ.List[str], pattern: str) -> PatternMatches:
|
||||
# The pattern is escaped, so that everything besides the format
|
||||
# string variables is treated literally.
|
||||
pattern_re = patterns.compile_pattern(pattern)
|
||||
pattern_re = compile_pattern(pattern)
|
||||
|
||||
for lineno, line in enumerate(lines):
|
||||
match = pattern_re.search(line)
|
||||
|
|
|
|||
|
|
@ -70,12 +70,12 @@ def rewrite_lines(
|
|||
new_lines = old_lines[:]
|
||||
found_patterns = set()
|
||||
|
||||
for m in parse.iter_matches(old_lines, pattern_strs):
|
||||
found_patterns.add(m.pattern)
|
||||
replacement = version.format_version(new_vinfo, m.pattern)
|
||||
span_l, span_r = m.span
|
||||
new_line = m.line[:span_l] + replacement + m.line[span_r:]
|
||||
new_lines[m.lineno] = new_line
|
||||
for match in parse.iter_matches(old_lines, pattern_strs):
|
||||
found_patterns.add(match.pattern)
|
||||
replacement = version.format_version(new_vinfo, match.pattern)
|
||||
span_l, span_r = match.span
|
||||
new_line = match.line[:span_l] + replacement + match.line[span_r:]
|
||||
new_lines[match.lineno] = new_line
|
||||
|
||||
non_matched_patterns = set(pattern_strs) - found_patterns
|
||||
if non_matched_patterns:
|
||||
|
|
@ -158,11 +158,11 @@ def iter_rewritten(
|
|||
>>>
|
||||
'''
|
||||
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
for file_path, pattern_strs in _iter_file_paths(file_patterns):
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fh:
|
||||
content = fh.read()
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fobj:
|
||||
content = fobj.read()
|
||||
|
||||
rfd = rfd_from_content(pattern_strs, new_vinfo, content)
|
||||
yield rfd._replace(path=str(file_path))
|
||||
|
|
@ -202,11 +202,11 @@ def diff(new_vinfo: version.VersionInfo, file_patterns: config.PatternsByGlob) -
|
|||
"""
|
||||
|
||||
full_diff = ""
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
for file_path, pattern_strs in sorted(_iter_file_paths(file_patterns)):
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fh:
|
||||
content = fh.read()
|
||||
with file_path.open(mode="rt", encoding="utf-8") as fobj:
|
||||
content = fobj.read()
|
||||
|
||||
try:
|
||||
rfd = rfd_from_content(pattern_strs, new_vinfo, content)
|
||||
|
|
@ -228,9 +228,9 @@ def diff(new_vinfo: version.VersionInfo, file_patterns: config.PatternsByGlob) -
|
|||
|
||||
def rewrite(file_patterns: config.PatternsByGlob, new_vinfo: version.VersionInfo) -> None:
|
||||
"""Rewrite project files, updating each with the new version."""
|
||||
fh: typ.IO[str]
|
||||
fobj: typ.IO[str]
|
||||
|
||||
for file_data in iter_rewritten(file_patterns, new_vinfo):
|
||||
new_content = file_data.line_sep.join(file_data.new_lines)
|
||||
with io.open(file_data.path, mode="wt", encoding="utf-8") as fh:
|
||||
fh.write(new_content)
|
||||
with io.open(file_data.path, mode="wt", encoding="utf-8") as fobj:
|
||||
fobj.write(new_content)
|
||||
|
|
|
|||
|
|
@ -87,11 +87,12 @@ class VCSAPI:
|
|||
try:
|
||||
retcode = sp.call(cmd, stderr=sp.PIPE, stdout=sp.PIPE)
|
||||
return retcode == 0
|
||||
except OSError as e:
|
||||
if e.errno == 2:
|
||||
except OSError as err:
|
||||
if err.errno == 2:
|
||||
# git/mercurial is not installed.
|
||||
return False
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
@property
|
||||
def has_remote(self) -> bool:
|
||||
|
|
@ -143,10 +144,10 @@ class VCSAPI:
|
|||
tmp_file = tempfile.NamedTemporaryFile("wb", delete=False)
|
||||
assert " " not in tmp_file.name
|
||||
|
||||
fh: typ.IO[bytes]
|
||||
fobj: typ.IO[bytes]
|
||||
|
||||
with tmp_file as fh:
|
||||
fh.write(message_data)
|
||||
with tmp_file as fobj:
|
||||
fobj.write(message_data)
|
||||
|
||||
env: Env = os.environ.copy()
|
||||
env['HGENCODING'] = "utf-8"
|
||||
|
|
@ -172,7 +173,7 @@ def get_vcs_api() -> VCSAPI:
|
|||
|
||||
raises OSError if the directory doesn't use a supported VCS.
|
||||
"""
|
||||
for vcs_name in VCS_SUBCOMMANDS_BY_NAME.keys():
|
||||
for vcs_name in VCS_SUBCOMMANDS_BY_NAME:
|
||||
vcs_api = VCSAPI(name=vcs_name)
|
||||
if vcs_api.is_usable:
|
||||
return vcs_api
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ def cal_info(date: dt.date = None) -> CalendarInfo:
|
|||
if date is None:
|
||||
date = TODAY
|
||||
|
||||
kw = {
|
||||
kwargs = {
|
||||
'year' : date.year,
|
||||
'quarter' : _quarter_from_month(date.month),
|
||||
'month' : date.month,
|
||||
|
|
@ -130,7 +130,7 @@ def cal_info(date: dt.date = None) -> CalendarInfo:
|
|||
'us_week' : int(date.strftime("%U"), base=10),
|
||||
}
|
||||
|
||||
return CalendarInfo(**kw)
|
||||
return CalendarInfo(**kwargs)
|
||||
|
||||
|
||||
class VersionInfo(typ.NamedTuple):
|
||||
|
|
@ -159,17 +159,17 @@ FieldValues = typ.Dict[FieldKey , MatchGroupStr]
|
|||
|
||||
|
||||
def _parse_field_values(field_values: FieldValues) -> VersionInfo:
|
||||
fv = field_values
|
||||
tag = fv.get('tag')
|
||||
fvals = field_values
|
||||
tag = fvals.get('tag')
|
||||
if tag is None:
|
||||
tag = "final"
|
||||
tag = TAG_ALIASES.get(tag, tag)
|
||||
assert tag is not None
|
||||
|
||||
bid = fv['bid'] if 'bid' in fv else "0001"
|
||||
bid = fvals['bid'] if 'bid' in fvals else "0001"
|
||||
|
||||
year = int(fv['year']) if 'year' in fv else None
|
||||
doy = int(fv['doy' ]) if 'doy' in fv else None
|
||||
year = int(fvals['year']) if 'year' in fvals else None
|
||||
doy = int(fvals['doy' ]) if 'doy' in fvals else None
|
||||
|
||||
month: typ.Optional[int]
|
||||
dom : typ.Optional[int]
|
||||
|
|
@ -179,8 +179,8 @@ def _parse_field_values(field_values: FieldValues) -> VersionInfo:
|
|||
month = date.month
|
||||
dom = date.day
|
||||
else:
|
||||
month = int(fv['month']) if 'month' in fv else None
|
||||
dom = int(fv['dom' ]) if 'dom' in fv else None
|
||||
month = int(fvals['month']) if 'month' in fvals else None
|
||||
dom = int(fvals['dom' ]) if 'dom' in fvals else None
|
||||
|
||||
iso_week: typ.Optional[int]
|
||||
us_week : typ.Optional[int]
|
||||
|
|
@ -194,13 +194,13 @@ def _parse_field_values(field_values: FieldValues) -> VersionInfo:
|
|||
iso_week = None
|
||||
us_week = None
|
||||
|
||||
quarter = int(fv['quarter']) if 'quarter' in fv else None
|
||||
quarter = int(fvals['quarter']) if 'quarter' in fvals else None
|
||||
if quarter is None and month:
|
||||
quarter = _quarter_from_month(month)
|
||||
|
||||
major = int(fv['major']) if 'major' in fv else 0
|
||||
minor = int(fv['minor']) if 'minor' in fv else 0
|
||||
patch = int(fv['patch']) if 'patch' in fv else 0
|
||||
major = int(fvals['major']) if 'major' in fvals else 0
|
||||
minor = int(fvals['minor']) if 'minor' in fvals else 0
|
||||
patch = int(fvals['patch']) if 'patch' in fvals else 0
|
||||
|
||||
return VersionInfo(
|
||||
year=year,
|
||||
|
|
@ -417,38 +417,38 @@ def format_version(vinfo: VersionInfo, pattern: str) -> str:
|
|||
for part_name, full_part_format in patterns.FULL_PART_FORMATS.items():
|
||||
full_pattern = full_pattern.replace("{" + part_name + "}", full_part_format)
|
||||
|
||||
kw: typ.Dict[str, typ.Union[str, int, None]] = vinfo._asdict()
|
||||
kwargs: typ.Dict[str, typ.Union[str, int, None]] = vinfo._asdict()
|
||||
|
||||
tag = vinfo.tag
|
||||
if tag == 'final':
|
||||
kw['release' ] = ""
|
||||
kw['pep440_tag'] = ""
|
||||
kwargs['release' ] = ""
|
||||
kwargs['pep440_tag'] = ""
|
||||
else:
|
||||
kw['release' ] = "-" + tag
|
||||
kw['pep440_tag'] = PEP440_TAGS[tag] + "0"
|
||||
kwargs['release' ] = "-" + tag
|
||||
kwargs['pep440_tag'] = PEP440_TAGS[tag] + "0"
|
||||
|
||||
kw['release_tag'] = tag
|
||||
kwargs['release_tag'] = tag
|
||||
|
||||
year = vinfo.year
|
||||
if year:
|
||||
kw['yy' ] = str(year)[-2:]
|
||||
kw['yyyy'] = year
|
||||
kwargs['yy' ] = str(year)[-2:]
|
||||
kwargs['yyyy'] = year
|
||||
|
||||
kw['BID'] = int(vinfo.bid, 10)
|
||||
kwargs['BID'] = int(vinfo.bid, 10)
|
||||
|
||||
for part_name, field in ID_FIELDS_BY_PART.items():
|
||||
val = kw[field]
|
||||
val = kwargs[field]
|
||||
if part_name.lower() == field.lower():
|
||||
if isinstance(val, str):
|
||||
kw[part_name] = int(val, base=10)
|
||||
kwargs[part_name] = int(val, base=10)
|
||||
else:
|
||||
kw[part_name] = val
|
||||
kwargs[part_name] = val
|
||||
else:
|
||||
assert len(set(part_name)) == 1
|
||||
padded_len = len(part_name)
|
||||
kw[part_name] = str(val).zfill(padded_len)
|
||||
kwargs[part_name] = str(val).zfill(padded_len)
|
||||
|
||||
return full_pattern.format(**kw)
|
||||
return full_pattern.format(**kwargs)
|
||||
|
||||
|
||||
def incr(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue