Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,18 +203,21 @@ def _get_version_from_arguments(arguments: list[str]) -> str:
Otherwise, raise a ValueError describing what's wrong.
"""
if len(arguments) != 1:
raise ValueError("Expected exactly 1 argument")
msg = "Expected exactly 1 argument"
raise ValueError(msg)

version = arguments[0]
parts = version.split(".")

if len(parts) != 2:
# Not of the form: YY.N
raise ValueError("not of the form: YY.N")
msg = "not of the form: YY.N"
raise ValueError(msg)

if not all(part.isdigit() for part in parts):
# Not all segments are integers.
raise ValueError("non-integer segments")
msg = "non-integer segments"
raise ValueError(msg)

# All is good.
return version
Expand Down
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,11 @@ extend-exclude = [
show-fixes = true

[tool.ruff.lint]
select = ["ALL"]
ignore = [
"A002", # function args shadowing builtins is fine
"C9", # complexity
"COM812", # trailing commas teach the formatter
"D", # doc formatting
"EM", # flake8-errmsg
"ERA", # commented out code
"FBT", # boolean positional args (existing API)
"FIX", # has todos
Expand Down
14 changes: 9 additions & 5 deletions src/packaging/_elffile.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,12 @@ def __init__(self, f: IO[bytes]) -> None:
try:
ident = self._read("16B")
except struct.error as e:
raise ELFInvalid("unable to parse identification") from e
msg = "unable to parse identification"
raise ELFInvalid(msg) from e
magic = bytes(ident[:4])
if magic != b"\x7fELF":
raise ELFInvalid(f"invalid magic: {magic!r}")
msg = f"invalid magic: {magic!r}"
raise ELFInvalid(msg)

self.capacity = ident[4] # Format for program header (bitness).
self.encoding = ident[5] # Data structure encoding (endianness).
Expand All @@ -67,9 +69,10 @@ def __init__(self, f: IO[bytes]) -> None:
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
}[(self.capacity, self.encoding)]
except KeyError as e:
raise ELFInvalid(
msg = (
f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})"
) from e
)
raise ELFInvalid(msg) from e

try:
(
Expand All @@ -85,7 +88,8 @@ def __init__(self, f: IO[bytes]) -> None:
self._e_phnum, # Number of sections.
) = self._read(e_fmt)
except struct.error as e:
raise ELFInvalid("unable to parse machine and section information") from e
msg = "unable to parse machine and section information"
raise ELFInvalid(msg) from e

def _read(self, fmt: str) -> tuple[int, ...]:
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
Expand Down
3 changes: 2 additions & 1 deletion src/packaging/_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,8 @@ def expect(self, name: str, *, expected: str) -> Token:
The token is *not* read.
"""
if not self.check(name):
raise self.raise_syntax_error(f"Expected {expected}")
msg = f"Expected {expected}"
raise self.raise_syntax_error(msg)
return self.read()

def read(self) -> Token:
Expand Down
3 changes: 2 additions & 1 deletion src/packaging/markers.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,8 @@ def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str]) -> bool:

oper: Operator | None = _operators.get(op.serialize())
if oper is None:
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
msg = f"Undefined {op!r} on {lhs!r} and {rhs!r}."
raise UndefinedComparison(msg)

return oper(lhs, rhs)

Expand Down
113 changes: 57 additions & 56 deletions src/packaging/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,8 @@ def _parse_project_urls(data: list[str]) -> dict[str, str]:
# The label already exists in our set of urls, so this field
# is unparsable, and we can just add the whole thing to our
# unparsable data and stop processing it.
raise KeyError("duplicate labels in project urls")
msg = "duplicate labels in project urls"
raise KeyError(msg)
urls[label] = url

return urls
Expand All @@ -244,7 +245,8 @@ def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
try:
return bpayload.decode("utf8", "strict")
except UnicodeDecodeError as exc:
raise ValueError("payload in an invalid encoding") from exc
errmsg = "payload in an invalid encoding"
raise ValueError(errmsg) from exc


# The various parse_FORMAT functions here are intended to be as lenient as
Expand Down Expand Up @@ -590,36 +592,38 @@ def _invalid_metadata(
def _process_metadata_version(self, value: str) -> _MetadataVersion:
# Implicitly makes Metadata-Version required.
if value not in _VALID_METADATA_VERSIONS:
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
msg = f"{value!r} is not a valid metadata version"
raise self._invalid_metadata(msg)
return cast("_MetadataVersion", value)

def _process_name(self, value: str) -> str:
if not value:
raise self._invalid_metadata("{field} is a required field")
msg = "{field} is a required field"
raise self._invalid_metadata(msg)
# Validate the name as a side-effect.
try:
utils.canonicalize_name(value, validate=True)
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
msg = f"{value!r} is invalid for {{field}}"
raise self._invalid_metadata(msg, cause=exc) from exc
else:
return value

def _process_version(self, value: str) -> version_module.Version:
if not value:
raise self._invalid_metadata("{field} is a required field")
msg = "{field} is a required field"
raise self._invalid_metadata(msg)
try:
return version_module.parse(value)
except version_module.InvalidVersion as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
msg = f"{value!r} is invalid for {{field}}"
raise self._invalid_metadata(msg, cause=exc) from exc

def _process_summary(self, value: str) -> str:
"""Check the field contains no newlines."""
if "\n" in value:
raise self._invalid_metadata("{field} must be a single line")
msg = "{field} must be a single line"
raise self._invalid_metadata(msg)
return value

def _process_description_content_type(self, value: str) -> str:
Expand All @@ -635,35 +639,34 @@ def _process_description_content_type(self, value: str) -> str:
# Check if content-type is valid or defaulted to `text/plain` and thus was
# not parseable.
if content_type not in content_types or content_type not in value.lower():
raise self._invalid_metadata(
f"{{field}} must be one of {list(content_types)}, not {value!r}"
)
msg = f"{{field}} must be one of {list(content_types)}, not {value!r}"
raise self._invalid_metadata(msg)

charset = parameters.get("charset", "UTF-8")
if charset != "UTF-8":
raise self._invalid_metadata(
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
)
msg = f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
raise self._invalid_metadata(msg)

markdown_variants = {"GFM", "CommonMark"}
variant = parameters.get("variant", "GFM") # Use an acceptable default.
if content_type == "text/markdown" and variant not in markdown_variants:
raise self._invalid_metadata(
msg = (
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
f"not {variant!r}",
f"not {variant!r}"
)
raise self._invalid_metadata(
msg,
)
return value

def _process_dynamic(self, value: list[str]) -> list[str]:
for dynamic_field in map(str.lower, value):
if dynamic_field in {"name", "version", "metadata-version"}:
raise self._invalid_metadata(
f"{dynamic_field!r} is not allowed as a dynamic field"
)
msg = f"{dynamic_field!r} is not allowed as a dynamic field"
raise self._invalid_metadata(msg)
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
raise self._invalid_metadata(
f"{dynamic_field!r} is not a valid dynamic field"
)
msg = f"{dynamic_field!r} is not a valid dynamic field"
raise self._invalid_metadata(msg)
return list(map(str.lower, value))

def _process_provides_extra(
Expand All @@ -675,19 +678,17 @@ def _process_provides_extra(
for name in value:
normalized_names.append(utils.canonicalize_name(name, validate=True))
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{name!r} is invalid for {{field}}", cause=exc
) from exc
msg = f"{name!r} is invalid for {{field}}"
raise self._invalid_metadata(msg, cause=exc) from exc
else:
return normalized_names

def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
try:
return specifiers.SpecifierSet(value)
except specifiers.InvalidSpecifier as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
msg = f"{value!r} is invalid for {{field}}"
raise self._invalid_metadata(msg, cause=exc) from exc

def _process_requires_dist(
self,
Expand All @@ -698,43 +699,39 @@ def _process_requires_dist(
for req in value:
reqs.append(requirements.Requirement(req))
except requirements.InvalidRequirement as exc:
raise self._invalid_metadata(
f"{req!r} is invalid for {{field}}", cause=exc
) from exc
msg = f"{req!r} is invalid for {{field}}"
raise self._invalid_metadata(msg, cause=exc) from exc
else:
return reqs

def _process_license_expression(self, value: str) -> NormalizedLicenseExpression:
try:
return licenses.canonicalize_license_expression(value)
except ValueError as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
) from exc
msg = f"{value!r} is invalid for {{field}}"
raise self._invalid_metadata(msg, cause=exc) from exc

def _process_license_files(self, value: list[str]) -> list[str]:
paths = []
for path in value:
if ".." in path:
raise self._invalid_metadata(
msg = (
f"{path!r} is invalid for {{field}}, "
"parent directory indicators are not allowed"
)
raise self._invalid_metadata(msg)
if "*" in path:
raise self._invalid_metadata(
f"{path!r} is invalid for {{field}}, paths must be resolved"
)
msg = f"{path!r} is invalid for {{field}}, paths must be resolved"
raise self._invalid_metadata(msg)
if (
pathlib.PurePosixPath(path).is_absolute()
or pathlib.PureWindowsPath(path).is_absolute()
):
raise self._invalid_metadata(
f"{path!r} is invalid for {{field}}, paths must be relative"
)
msg = f"{path!r} is invalid for {{field}}, paths must be relative"
raise self._invalid_metadata(msg)
if pathlib.PureWindowsPath(path).as_posix() != path:
raise self._invalid_metadata(
f"{path!r} is invalid for {{field}}, paths must use '/' delimiter"
)
msg = f"{path!r} is invalid for {{field}}, paths must use '/' delimiter"
raise self._invalid_metadata(msg)
paths.append(path)
return paths

Expand All @@ -744,20 +741,23 @@ def _process_import_names(self, value: list[str]) -> list[str]:
name = name.rstrip()
for identifier in name.split("."):
if not identifier.isidentifier():
raise self._invalid_metadata(
msg = (
f"{name!r} is invalid for {{field}}; "
f"{identifier!r} is not a valid identifier"
)
raise self._invalid_metadata(msg)
elif keyword.iskeyword(identifier):
raise self._invalid_metadata(
msg = (
f"{name!r} is invalid for {{field}}; "
f"{identifier!r} is a keyword"
)
raise self._invalid_metadata(msg)
if semicolon and private.lstrip() != "private":
raise self._invalid_metadata(
msg = (
f"{import_name!r} is invalid for {{field}}; "
"the only valid option is 'private'"
)
raise self._invalid_metadata(msg)
return value

_process_import_namespaces = _process_import_names
Expand Down Expand Up @@ -827,7 +827,8 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
exceptions.append(exc)

if exceptions:
raise ExceptionGroup("invalid metadata", exceptions)
msg = "invalid metadata"
raise ExceptionGroup(msg, exceptions)

return ins

Expand All @@ -850,14 +851,14 @@ def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
exceptions.append(InvalidMetadata(unparsed_key, message))

if exceptions:
raise ExceptionGroup("unparsed", exceptions)
msg = "unparsed"
raise ExceptionGroup(msg, exceptions)

try:
return cls.from_raw(raw, validate=validate)
except ExceptionGroup as exc_group:
raise ExceptionGroup(
"invalid or unparsed metadata", exc_group.exceptions
) from None
msg = "invalid or unparsed metadata"
raise ExceptionGroup(msg, exc_group.exceptions) from None

metadata_version: _Validator[_MetadataVersion] = _Validator()
""":external:ref:`core-metadata-metadata-version`
Expand Down
Loading
Loading