Skip to content

Commit 6f553d3

Browse files
committed
Pass the artifacts checksums in the headers of the deploy request
1 parent 320a00c commit 6f553d3

5 files changed

Lines changed: 164 additions & 20 deletions

File tree

README.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ This library enables you to manage Artifactory resources such as users, groups,
3434
+ [Deploy an artifact](#deploy-an-artifact)
3535
+ [Deploy an artifact with properties](#deploy-an-artifact-with-properties)
3636
+ [Deploy an artifact with checksums](#deploy-an-artifact-with-checksums)
37+
+ [Deploy an artifact by checksums](#deploy-an-artifact-by-checksums)
3738
+ [Download an artifact](#download-an-artifact)
3839
+ [Retrieve artifact list](#retrieve-artifact-list)
3940
+ [Retrieve artifact properties](#retrieve-artifact-properties)
@@ -426,6 +427,16 @@ artifact = art.artifacts.deploy("<LOCAL_FILE_LOCATION>", "<ARTIFACT_PATH_IN_ARTI
426427

427428
#### Deploy an artifact with checksums
428429

430+
```python
431+
artifact = art.artifacts.deploy("<LOCAL_FILE_LOCATION>", "<ARTIFACT_PATH_IN_ARTIFACTORY>", "<CHECKSUM_ALGORITHMS>")
432+
# artifact = art.artifacts.deploy("Desktop/myNewFile.txt", "my-repository/my/new/artifact/directory/file.txt", checksum_algorithms=["md5", "sha1", "sha256"] )
433+
```
434+
435+
Provides locally calculated checksums for files during deployment so that Artifactory can verify the authenticity of artifacts.
436+
437+
438+
#### Deploy an artifact by checksums
439+
429440
```python
430441
artifact = art.artifacts.deploy("<LOCAL_FILE_LOCATION>", "<ARTIFACT_PATH_IN_ARTIFACTORY>", checksum_enabled=True)
431442
# artifact = art.artifacts.deploy("Desktop/myNewFile.txt", "my-repository/my/new/artifact/directory/file.txt", checksums=True)

pyartifactory/exception.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,3 +58,7 @@ class InvalidTokenDataError(ArtifactoryError):
5858

5959
class BuildNotFoundError(ArtifactoryError):
6060
"""Requested build were not found"""
61+
62+
63+
class InvalidAlgorithmError(ArtifactoryError):
64+
"""The Algoritnm is not supported by Artifactory."""

pyartifactory/models/artifact.py

Lines changed: 18 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,28 +10,33 @@
1010

1111
from pydantic import BaseModel
1212

13+
from pyartifactory.exception import InvalidAlgorithmError
14+
1315

1416
class Checksums(BaseModel):
1517
"""Models a checksum."""
1618

17-
sha1: str
18-
md5: str
19-
sha256: str
19+
sha1: Optional[str] = None
20+
md5: Optional[str] = None
21+
sha256: Optional[str] = None
2022

2123
@classmethod
22-
def generate(cls, file_: Path) -> Checksums:
24+
def generate(cls, file_: Path, algorithms: List[str] = ["sha1", "sha256", "md5"]) -> Checksums:
2325
block_size: int = 65536
2426
mapping: dict[str, Callable[[], Any]] = {"md5": hashlib.md5, "sha1": hashlib.sha1, "sha256": hashlib.sha256}
2527
results = {}
2628

27-
for algorithm, hashing_function in mapping.items():
28-
hasher = hashing_function()
29-
with file_.absolute().open("rb") as fd:
30-
buf = fd.read(block_size)
31-
while len(buf) > 0:
32-
hasher.update(buf)
29+
for algorithm in algorithms:
30+
if algorithm in mapping:
31+
hasher = mapping[algorithm]()
32+
with file_.absolute().open("rb") as fd:
3333
buf = fd.read(block_size)
34-
results[algorithm] = hasher.hexdigest()
34+
while len(buf) > 0:
35+
hasher.update(buf)
36+
buf = fd.read(block_size)
37+
results[algorithm] = hasher.hexdigest()
38+
else:
39+
raise InvalidAlgorithmError(f"'{algorithm}' is not a supported checksum algorithm")
3540

3641
return cls(**results)
3742

@@ -40,6 +45,8 @@ class OriginalChecksums(BaseModel):
4045
"""Models original checksums."""
4146

4247
sha256: str
48+
sha1: Optional[str] = None
49+
md5: Optional[str] = None
4350

4451

4552
class Child(BaseModel):

pyartifactory/objects/artifact.py

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ def deploy(
8282
artifact_path: Union[Path, str],
8383
properties: Optional[Dict[str, List[str]]] = None,
8484
checksum_enabled: bool = False,
85+
checksum_algorithms: Optional[List[str]] = None,
8586
) -> ArtifactInfoResponse:
8687
"""
8788
Deploy a file or directory.
@@ -98,17 +99,30 @@ def deploy(
9899
for file in files:
99100
self.deploy(Path(f"{root}/{file}"), Path(f"{new_root}/{file}"), properties, checksum_enabled)
100101
else:
102+
checksum_headers: Dict[str, str] = {}
103+
104+
if checksum_enabled:
105+
checksum_headers["X-Checksum-Deploy"] = "true"
106+
checksum_algorithms = ["sha1", "sha256", "md5"]
107+
108+
if checksum_algorithms is not None:
109+
artifact_check_sums = Checksums.generate(local_file, checksum_algorithms)
110+
if artifact_check_sums.md5:
111+
checksum_headers["X-Checksum"] = artifact_check_sums.md5
112+
if artifact_check_sums.sha1:
113+
checksum_headers["X-Checksum-Sha1"] = artifact_check_sums.sha1
114+
if artifact_check_sums.sha256:
115+
checksum_headers["X-Checksum-Sha256"] = artifact_check_sums.sha256
116+
101117
if checksum_enabled:
102-
artifact_check_sums = Checksums.generate(local_file)
103118
try:
119+
properties_param_str = ""
120+
if properties is not None:
121+
properties_param_str = self._format_properties(properties)
122+
route = ";".join(s for s in [artifact_folder.as_posix(), properties_param_str] if s)
104123
self._put(
105-
route=artifact_folder.as_posix(),
106-
headers={
107-
"X-Checksum-Deploy": "true",
108-
"X-Checksum-Sha1": artifact_check_sums.sha1,
109-
"X-Checksum-Sha256": artifact_check_sums.sha256,
110-
"X-Checksum": artifact_check_sums.md5,
111-
},
124+
route=route,
125+
headers=checksum_headers,
112126
)
113127
except requests.exceptions.HTTPError as error:
114128
if error.response.status_code == 404:
@@ -125,7 +139,7 @@ def deploy(
125139
if properties is not None:
126140
properties_param_str = self._format_properties(properties)
127141
route = ";".join(s for s in [artifact_folder.as_posix(), properties_param_str] if s)
128-
self._put(route, data=stream)
142+
self._put(route=route, headers=checksum_headers, data=stream)
129143

130144
logger.debug("Artifact %s successfully deployed", local_file)
131145
return self.info(artifact_folder)

tests/test_artifacts.py

Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
ArtifactListFolderResponse,
1717
ArtifactListResponse,
1818
Checksums,
19+
OriginalChecksums,
1920
)
2021

2122
URL = "http://localhost:8080/artifactory"
@@ -609,6 +610,113 @@ def test_checksum_defined_file(file_path: Path, expected_sha1: str, expected_md5
609610
assert result == expected
610611

611612

613+
@pytest.mark.parametrize(
614+
"file_info,expected_checksums",
615+
[
616+
pytest.param(
617+
{
618+
**FILE_INFO_RESPONSE.copy(),
619+
"originalChecksums": {
620+
"md5": "4cf609e0fe1267df8815bc650f5851e9",
621+
"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858",
622+
},
623+
},
624+
{
625+
"md5": "4cf609e0fe1267df8815bc650f5851e9",
626+
"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858",
627+
},
628+
id="md5",
629+
),
630+
pytest.param(
631+
{
632+
**FILE_INFO_RESPONSE.copy(),
633+
"originalChecksums": {
634+
"sha1": "962c287c760e03b03c17eb920f5358d05f44dd3b",
635+
"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858",
636+
},
637+
},
638+
{
639+
"sha1": "962c287c760e03b03c17eb920f5358d05f44dd3b",
640+
"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858",
641+
},
642+
id="sha1",
643+
),
644+
pytest.param(
645+
{
646+
**FILE_INFO_RESPONSE.copy(),
647+
"originalChecksums": {"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858"},
648+
},
649+
{"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858"},
650+
id="sha256",
651+
),
652+
pytest.param(
653+
{
654+
**FILE_INFO_RESPONSE.copy(),
655+
"originalChecksums": {
656+
"md5": "4cf609e0fe1267df8815bc650f5851e9",
657+
"sha1": "962c287c760e03b03c17eb920f5358d05f44dd3b",
658+
"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858",
659+
},
660+
},
661+
{
662+
"sha256": "396cf16e8ce000342c95ffc7feb2a15701d0994b70c1b13fea7112f85ac8e858",
663+
"md5": "4cf609e0fe1267df8815bc650f5851e9",
664+
"sha1": "962c287c760e03b03c17eb920f5358d05f44dd3b",
665+
},
666+
id="md5&sha1",
667+
),
668+
],
669+
)
670+
@responses.activate
671+
def test_deploy_artifact_with_checksum_algorithms_success(file_info: dict, expected_checksums: dict):
672+
responses.add(responses.PUT, f"{URL}/{ARTIFACT_PATH}", status=200)
673+
responses.add(
674+
responses.GET,
675+
f"{URL}/api/storage/{ARTIFACT_PATH}",
676+
json=file_info,
677+
status=200,
678+
)
679+
expected = OriginalChecksums(**expected_checksums)
680+
artifactory = ArtifactoryArtifact(AuthModel(url=URL, auth=AUTH))
681+
artifact = artifactory.deploy(
682+
Path(LOCAL_FILE_LOCATION),
683+
Path(ARTIFACT_PATH),
684+
checksum_algorithms=file_info["originalChecksums"].keys(),
685+
)
686+
assert expected == artifact.originalChecksums
687+
688+
689+
@pytest.mark.parametrize(
690+
"file_info",
691+
[
692+
pytest.param(
693+
{
694+
**FILE_INFO_RESPONSE.copy(),
695+
"originalChecksums": {"sha224": "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f"},
696+
},
697+
id="sha224",
698+
),
699+
],
700+
)
701+
@responses.activate
702+
def test_deploy_artifact_with_checksum_algorithms_error(file_info: dict):
703+
responses.add(responses.PUT, f"{URL}/{ARTIFACT_PATH}", status=409)
704+
705+
responses.add(
706+
responses.GET,
707+
f"{URL}/api/storage/{ARTIFACT_PATH}",
708+
json=file_info,
709+
status=200,
710+
)
711+
with pytest.raises(Exception):
712+
artifactory = ArtifactoryArtifact(AuthModel(url=URL, auth=AUTH))
713+
artifactory.deploy(
714+
Path(LOCAL_FILE_LOCATION),
715+
Path(ARTIFACT_PATH),
716+
checksum_algorithms=file_info["originalChecksums"].keys(),
717+
)
718+
719+
612720
@responses.activate
613721
def test_deploy_artifact_with_checksum_success(mocker):
614722
responses.add(responses.PUT, f"{URL}/{ARTIFACT_PATH}", status=200)

0 commit comments

Comments
 (0)