Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions newsfragments/145.internal.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Dev dependency updates, include Jinja security.
1,849 changes: 940 additions & 909 deletions poetry.lock

Large diffs are not rendered by default.

22 changes: 11 additions & 11 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,18 @@ python = ">=3.12,<4.0"

[tool.poetry.group.dev.dependencies]
pyhelm3 = "^0.4.0"
lightkube = "^0.15.5"
pytz = "^2024.2"
cryptography = "^43.0.3"
lightkube = "^0.17.1"
pytz = "^2025.1"
cryptography = "^44.0.0"
platformdirs = "^4.3.6"
python-on-whales = "^0.73.0"
aiohttp = "^3.11.6"
yamale = "^5.2.1"
typer = "^0.12.5"
pytest = "^8.3.3"
ruff = "^0.7.2"
jinja2 = "^3.1.4"
checkov = "^3.2.296"
python-on-whales = "^0.75.1"
aiohttp = "^3.11.11"
yamale = "^6.0.0"
typer = "^0.15.1"
pytest = "^8.3.4"
ruff = "^0.9.4"
jinja2 = "^3.1.5"
checkov = "^3.2.363"
reuse = "^5.0.2"
signedjson = "^1.1.4"
aiohttp-retry = "^2.9.1"
Expand Down
2 changes: 1 addition & 1 deletion scripts/spdx_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@

import re
import sys
from typing import Annotated

import typer
from spdx_tools.spdx.model import Document
from spdx_tools.spdx.parser.tagvalue.parser import Parser
from typing_extensions import Annotated


def run_spdx_checks(input_file: Annotated[typer.FileText, typer.Argument()] = sys.stdin):
Expand Down
3 changes: 2 additions & 1 deletion tests/integration/fixtures/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@

import asyncio
import os
from collections.abc import AsyncGenerator
from pathlib import Path
from typing import Any, AsyncGenerator
from typing import Any

import pyhelm3
import pytest
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/fixtures/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

def generate_signing_key():
signing_key = signedjson.key.generate_signing_key("0")
value = f"{signing_key.alg} {signing_key.version} " f"{signedjson.key.encode_signing_key_base64(signing_key)}"
value = f"{signing_key.alg} {signing_key.version} {signedjson.key.encode_signing_key_base64(signing_key)}"
return value


Expand Down
36 changes: 20 additions & 16 deletions tests/integration/lib/synapse.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,17 @@ async def upload_media(synapse_fqdn: str, user_access_token: str, file_path: Pat
params = {"filename": file_path.name}

with open(file_path, "rb") as f:
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session, RetryClient(
session, retry_options=retry_options, raise_for_status=True
) as retry, retry.post(
"https://127.0.0.1/_matrix/media/v3/upload",
server_hostname=synapse_fqdn,
headers=headers,
params=params,
data=f.read(),
) as response:
async with (
aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session,
RetryClient(session, retry_options=retry_options, raise_for_status=True) as retry,
retry.post(
"https://127.0.0.1/_matrix/media/v3/upload",
server_hostname=synapse_fqdn,
headers=headers,
params=params,
data=f.read(),
) as response,
):
response_json = await response.json()

assert response_json["content_uri"].startswith("mxc://")
Expand All @@ -106,13 +108,15 @@ async def download_media(

# Initialize SHA-256 hasher
sha256_hash = hashlib.sha256()
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session, RetryClient(
session, retry_options=retry_options, raise_for_status=True
) as retry, retry.get(
f"https://127.0.0.1/_matrix/client/v1/media/download/{server_name}/{content_id}",
headers=headers,
server_hostname=synapse_fqdn,
) as response:
async with (
aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session,
RetryClient(session, retry_options=retry_options, raise_for_status=True) as retry,
retry.get(
f"https://127.0.0.1/_matrix/client/v1/media/download/{server_name}/{content_id}",
headers=headers,
server_hostname=synapse_fqdn,
) as response,
):
# Process the stream in chunks
while True:
chunk = await response.content.read(8192) # 8KB chunks
Expand Down
35 changes: 20 additions & 15 deletions tests/integration/lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,15 @@ async def aiottp_get_json(url: str, ssl_context: SSLContext) -> Any:
"""
host = urlparse(url).hostname

async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session, RetryClient(
session, retry_options=retry_options, raise_for_status=True
) as retry, retry.get(
url.replace(host, "127.0.0.1"),
headers={"Host": host},
server_hostname=host,
) as response:
async with (
aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session,
RetryClient(session, retry_options=retry_options, raise_for_status=True) as retry,
retry.get(
url.replace(host, "127.0.0.1"),
headers={"Host": host},
server_hostname=host,
) as response,
):
return await response.json()


Expand All @@ -96,11 +98,13 @@ async def aiohttp_post_json(url: str, data: dict, headers: dict, ssl_context: SS
"""
host = urlparse(url).hostname

async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session, RetryClient(
session, retry_options=retry_options, raise_for_status=True
) as retry, retry.post(
url.replace(host, "127.0.0.1"), headers=headers | {"Host": host}, server_hostname=host, json=data
) as response:
async with (
aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=ssl_context)) as session,
RetryClient(session, retry_options=retry_options, raise_for_status=True) as retry,
retry.post(
url.replace(host, "127.0.0.1"), headers=headers | {"Host": host}, server_hostname=host, json=data
) as response,
):
return await response.json()


Expand All @@ -124,9 +128,10 @@ def merge(a: dict, b: dict, path=None):
a[key] = b[key]
return a

with open(Path().resolve() / "charts" / "matrix-stack" / "values.yaml") as base_value_file, open(
os.environ["TEST_VALUES_FILE"]
) as test_value_file:
with (
open(Path().resolve() / "charts" / "matrix-stack" / "values.yaml") as base_value_file,
open(os.environ["TEST_VALUES_FILE"]) as test_value_file,
):
data = merge(yaml.safe_load(base_value_file), yaml.safe_load(test_value_file))

keys = property_path.split(".")
Expand Down
8 changes: 3 additions & 5 deletions tests/integration/test_networking.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,16 +138,14 @@ async def test_pods_monitored(

# Something monitored by multiple ServiceMonitors smells like a bug
assert covered_pod.metadata.name not in monitored_pods, (
f"Pod {covered_pod.metadata.name} " "is monitored multiple times"
f"Pod {covered_pod.metadata.name} is monitored multiple times"
)

monitored_pods.add(covered_pod.metadata.name)
service_monitor_is_useful = True

assert service_monitor_is_useful, (
f"ServiceMonitor {service_monitor['metadata']['name']} " "does not cover any pod"
)
assert service_monitor_is_useful, f"ServiceMonitor {service_monitor['metadata']['name']} does not cover any pod"

assert all_monitorable_pods == monitored_pods, (
f"Some pods are not monitored : " f"{', '.join(list(set(all_monitorable_pods) ^ set(monitored_pods)))}"
f"Some pods are not monitored : {', '.join(list(set(all_monitorable_pods) ^ set(monitored_pods)))}"
)
6 changes: 3 additions & 3 deletions tests/manifests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial

import copy
from typing import Any, Dict
from typing import Any

_raw_shared_components_details = {
"initSecrets": {"hyphened_name": "init-secrets", "has_service_monitor": False, "has_ingress": False},
Expand Down Expand Up @@ -35,13 +35,13 @@
}


def _enrich_components_to_test(details) -> Dict[str, Any]:
def _enrich_components_to_test(details) -> dict[str, Any]:
_component_details = copy.deepcopy(details)
for component in details:
_component_details[component].setdefault("hyphened_name", component)

values_files = _component_details[component].setdefault("additional_values_files", [])
values_files.append(f"{_component_details[component]["hyphened_name"]}-minimal-values.yaml")
values_files.append(f"{_component_details[component]['hyphened_name']}-minimal-values.yaml")
_component_details[component]["values_files"] = values_files
del _component_details[component]["additional_values_files"]

Expand Down
13 changes: 7 additions & 6 deletions tests/manifests/test_ingresses.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,11 +234,12 @@ async def test_ingress_services(templates):
found_service = services_by_name[backend_service["name"]]
if backend_service["port"].get("name"):
port_names = [port["name"] for port in found_service["spec"]["ports"]]
assert (
backend_service["port"]["name"] in port_names
), f"Port name {backend_service['port']['name']} not found in service {backend_service['name']}"
assert backend_service["port"]["name"] in port_names, (
f"Port name {backend_service['port']['name']} not found in service {backend_service['name']}"
)
else:
port_numbers = [port["port"] for port in found_service["spec"]["ports"]]
assert (
backend_service["port"]["number"] in port_numbers
), f"Port number {backend_service['port']['number']} not found in service {backend_service['name']}"
assert backend_service["port"]["number"] in port_numbers, (
f"Port number {backend_service['port']['number']} "
f"not found in service {backend_service['name']}"
)
12 changes: 6 additions & 6 deletions tests/manifests/test_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ async def test_templates_have_expected_labels(templates):
]

for template in templates:
id = f"{template["kind"]}/{template["metadata"]["name"]}"
id = f"{template['kind']}/{template['metadata']['name']}"
labels = template["metadata"]["labels"]

for expected_label in expected_labels:
assert expected_label in labels, f"{expected_label} label not present in {id}"
assert (
labels[expected_label] is not None
), f"{expected_label} label is null in {id} and so won't be present in cluster"
assert labels[expected_label] is not None, (
f"{expected_label} label is null in {id} and so won't be present in cluster"
)

assert labels["helm.sh/chart"].startswith("matrix-stack-")
assert labels["app.kubernetes.io/managed-by"] == "Helm"
Expand All @@ -39,10 +39,10 @@ async def test_templates_have_expected_labels(templates):
f"The app.kubernetes.io/instance label for {id}"
"does not start with the expected chart release name of 'pytest'. "
)
f"The label value is {labels["app.kubernetes.io/instance"]}"
f"The label value is {labels['app.kubernetes.io/instance']}"

assert labels["app.kubernetes.io/instance"].replace("pytest-", "") == labels["app.kubernetes.io/name"], (
f"The app.kubernetes.io/name label for {id}"
"is not a concatenation of the expected chart release name of 'pytest' and the instance label. "
f"The label value is {labels["app.kubernetes.io/instance"]} vs {labels["app.kubernetes.io/name"]}"
f"The label value is {labels['app.kubernetes.io/instance']} vs {labels['app.kubernetes.io/name']}"
)
48 changes: 24 additions & 24 deletions tests/manifests/test_pod_securityContext.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ async def test_sets_nonRoot_uids_gids_in_pod_securityContext_by_default(template
if template["kind"] in ["Deployment", "StatefulSet", "Job"]:
id = f"{template['kind']}/{template['metadata']['name']}"

assert (
"securityContext" in template["spec"]["template"]["spec"]
), f"Pod securityContext unexpectedly absent for {id}"
assert "securityContext" in template["spec"]["template"]["spec"], (
f"Pod securityContext unexpectedly absent for {id}"
)

pod_securityContext = template["spec"]["template"]["spec"]["securityContext"]

Expand All @@ -28,12 +28,12 @@ async def test_sets_nonRoot_uids_gids_in_pod_securityContext_by_default(template
assert "runAsNonRoot" in pod_securityContext, f"No runAsNonRoot in {id}'s Pod securityContext"
assert pod_securityContext["runAsNonRoot"], f"{id} is running as root"

assert (
pod_securityContext["runAsUser"] == pod_securityContext["runAsGroup"]
), f"{id} has distinct uid and gid in the Pod securityContext"
assert (
pod_securityContext["runAsGroup"] == pod_securityContext["fsGroup"]
), f"{id} has distinct run and FS gids in the Pod securityContext"
assert pod_securityContext["runAsUser"] == pod_securityContext["runAsGroup"], (
f"{id} has distinct uid and gid in the Pod securityContext"
)
assert pod_securityContext["runAsGroup"] == pod_securityContext["fsGroup"], (
f"{id} has distinct run and FS gids in the Pod securityContext"
)


@pytest.mark.parametrize("values_file", values_files_to_test)
Expand All @@ -51,9 +51,9 @@ async def test_can_nuke_pod_securityContext_ids(component, values, make_template
if template["kind"] in ["Deployment", "StatefulSet", "Job"]:
id = f"{template['kind']}/{template['metadata']['name']}"

assert (
"securityContext" in template["spec"]["template"]["spec"]
), f"Pod securityContext unexpectedly absent for {id}"
assert "securityContext" in template["spec"]["template"]["spec"], (
f"Pod securityContext unexpectedly absent for {id}"
)

pod_securityContext = template["spec"]["template"]["spec"]["securityContext"]

Expand All @@ -68,19 +68,19 @@ async def test_sets_seccompProfile_in_pod_securityContext_by_default(templates):
if template["kind"] in ["Deployment", "StatefulSet", "Job"]:
id = f"{template['kind']}/{template['metadata']['name']}"

assert (
"securityContext" in template["spec"]["template"]["spec"]
), f"Pod securityContext unexpectedly absent for {id}"
assert "securityContext" in template["spec"]["template"]["spec"], (
f"Pod securityContext unexpectedly absent for {id}"
)

pod_securityContext = template["spec"]["template"]["spec"]["securityContext"]

assert "seccompProfile" in pod_securityContext, f"No seccompProfile in {id}'s Pod securityContext"
assert (
"type" in pod_securityContext["seccompProfile"]
), f"No type in {id}'s Pod securityContext.seccompProfile"
assert (
pod_securityContext["seccompProfile"]["type"] == "RuntimeDefault"
), f"{id} has unexpected seccompProfile type"
assert "type" in pod_securityContext["seccompProfile"], (
f"No type in {id}'s Pod securityContext.seccompProfile"
)
assert pod_securityContext["seccompProfile"]["type"] == "RuntimeDefault", (
f"{id} has unexpected seccompProfile type"
)


@pytest.mark.parametrize("values_file", values_files_to_test)
Expand All @@ -94,9 +94,9 @@ async def test_can_nuke_pod_securityContext_seccompProfile(component, values, ma
if template["kind"] in ["Deployment", "StatefulSet", "Job"]:
id = f"{template['kind']}/{template['metadata']['name']}"

assert (
"securityContext" in template["spec"]["template"]["spec"]
), f"Pod securityContext unexpectedly absent for {id}"
assert "securityContext" in template["spec"]["template"]["spec"], (
f"Pod securityContext unexpectedly absent for {id}"
)

pod_securityContext = template["spec"]["template"]["spec"]["securityContext"]

Expand Down
6 changes: 3 additions & 3 deletions tests/manifests/test_secrets_consistency.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ def get_volume_from_mount(template, volume_mount):
def match_in_content(container_name, mounted_secret_keys, mount_path, match_in):
found_mount = False
for match in re.findall(rf"{mount_path}/([^\s\n\");]+)", match_in):
assert (
f"{mount_path}/{match}" in mounted_secret_keys
), f"{mount_path}/{match} used in {container_name} but it is not found from any mounted secret"
assert f"{mount_path}/{match}" in mounted_secret_keys, (
f"{mount_path}/{match} used in {container_name} but it is not found from any mounted secret"
)
found_mount = True
return found_mount

Expand Down
Loading