sources/curl: use util.checksum.verify_file

Now that there is a common utility function to verify the checksum
of a file, use that.
Also fix the json schema entry for the property to have to correct
minium and maximum digest length, given the supported algorithm,
which is 32 (md5) and 128 (sha512) characters.
This commit is contained in:
Christian Kellner 2021-04-30 09:39:50 +00:00 committed by Achilleas Koutsou
parent f000b8e6cb
commit 3ebfc6f657

View file

@ -26,6 +26,8 @@ import time
from typing import Dict
from osbuild.util.checksum import verify_file
SCHEMA = """
"additionalProperties": false,
@ -35,7 +37,7 @@ SCHEMA = """
"type": "object",
"additionalProperties": false,
"patternProperties": {
"(md5|sha1|sha256|sha384|sha512):[0-9a-f]{5,64}": {
"(md5|sha1|sha256|sha384|sha512):[0-9a-f]{32,128}": {
"oneOf": [
{
"type": "string",
@ -84,22 +86,6 @@ SCHEMA = """
"""
def verify_checksum(filename, checksum):
algorithm, checksum = checksum.split(":", 1)
if algorithm not in ("md5", "sha1", "sha256", "sha384", "sha512"):
raise RuntimeError(f"unsupported checksum algorithm: {algorithm}")
ret = subprocess.run(
[f"{algorithm}sum", "-c"],
input=f"{checksum} {filename}",
stdout=subprocess.DEVNULL,
encoding="utf-8",
check=False
)
return ret.returncode == 0
def fetch(url, checksum, directory):
# Invariant: all files in @directory must be named after their (verified) checksum.
if os.path.isfile(f"{directory}/{checksum}"):
@ -144,7 +130,7 @@ def fetch(url, checksum, directory):
else:
raise RuntimeError(f"curl: error downloading {url}: error code {return_code}")
if not verify_checksum(f"{tmpdir}/{checksum}", checksum):
if not verify_file(f"{tmpdir}/{checksum}", checksum):
raise RuntimeError(f"checksum mismatch: {checksum} {url}")
# The checksum has been verified, move the file into place. in case we race