Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
132 changes: 114 additions & 18 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ pygments = "^2.19.1"
[tool.poetry.group.dev.dependencies]
bandit = "^1.8.3"
black = "^25.1.0"
moto = "^5.1.9"

[tool.poetry.group.docs.dependencies]
sphinx-rtd-theme = "^3.0.2"
Expand Down
Empty file added tests/s3/__init__.py
Empty file.
46 changes: 46 additions & 0 deletions tests/s3/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import pytest
from pathlib import Path
from moto import mock_aws
from hashlib import md5, sha256
import boto3


# Dummy CName replacement
class DummyCName:
def __init__(self, cname): # pylint: disable=unused-argument
self.platform = "aws"
self.arch = "amd64"
self.version = "1234.1"
self.commit_id = "abc123"


# Helpers to compute digests for fake files
def dummy_digest(data: bytes, algo: str) -> str:
"""
Dummy for file_digest() to compute hashes for in-memory byte streams
"""
content = data.read()
data.seek(0) # Reset byte cursor to start for multiple uses

if algo == "md5":
return md5(content) # nosec B324
elif algo == "sha256":
return sha256(content)
else:
raise ValueError(f"Unsupported algo: {algo}")


@pytest.fixture(autouse=True)
def s3_setup(tmp_path, monkeypatch):
"""
Provides a clean S3 setup for each test.
"""
with mock_aws():
s3 = boto3.resource("s3", region_name="us-east-1")
bucket_name = "test-bucket"
s3.create_bucket(Bucket=bucket_name)

monkeypatch.setattr("gardenlinux.s3.s3_artifacts.CName", DummyCName)
monkeypatch.setattr("gardenlinux.s3.s3_artifacts.file_digest", dummy_digest)

yield s3, bucket_name, tmp_path
Loading