replace md5 with sha256

This commit is contained in:
Tomas Kopecek 2020-06-23 10:25:01 +02:00
parent a893e8bf2b
commit 50bcb6f3bc
10 changed files with 60 additions and 60 deletions

View file

@ -6637,22 +6637,18 @@ class CG_Importer(object):
(filesize, fileinfo['filename'], fileinfo['filesize']))
# checksum
if fileinfo['checksum_type'] != 'md5':
# XXX
# until we change the way we handle checksums, we have to limit this to md5
raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo)
with open(path, 'rb') as fp:
m = md5_constructor()
chksum = get_verify_class(fileinfo['checksum_type'])()
while True:
contents = fp.read(8192)
if not contents:
break
m.update(contents)
if fileinfo['checksum'] != m.hexdigest():
chksum.update(contents)
if fileinfo['checksum'] != chksum.hexdigest():
raise koji.GenericError("File checksum mismatch for %s: %s != %s" %
(fileinfo['filename'], fileinfo['checksum'],
m.hexdigest()))
fileinfo['hub.checked_md5'] = True
chksum.hexdigest()))
fileinfo['hub.checked_hash'] = True
if fileinfo['buildroot_id'] not in self.br_prep:
raise koji.GenericError("Missing buildroot metadata for id %(buildroot_id)r" %
@ -7214,9 +7210,7 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No
archiveinfo['filename'] = filename
archiveinfo['size'] = fileinfo['filesize']
archiveinfo['checksum'] = fileinfo['checksum']
if fileinfo['checksum_type'] != 'md5':
# XXX
# until we change the way we handle checksums, we have to limit this to md5
if fileinfo['checksum_type'] not in ('md5', 'sha256'):
raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo)
archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES[fileinfo['checksum_type']]
archiveinfo['metadata_only'] = True
@ -7225,28 +7219,26 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No
archiveinfo['filename'] = filename
archiveinfo['size'] = os.path.getsize(filepath)
# trust values computed on hub (CG_Importer.prep_outputs)
if not fileinfo or not fileinfo.get('hub.checked_md5'):
if not fileinfo or not fileinfo.get('hub.checked_hash'):
with open(filepath, 'rb') as archivefp:
m = md5_constructor()
chksum = get_verify_class('sha256')()
while True:
contents = archivefp.read(8192)
if not contents:
break
m.update(contents)
archiveinfo['checksum'] = m.hexdigest()
chksum.update(contents)
archiveinfo['checksum'] = chksum.hexdigest()
archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES['sha256']
else:
archiveinfo['checksum'] = fileinfo['checksum']
archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES['md5']
archiveinfo['checksum_type'] = fileinfo['checksum_type']
if fileinfo:
# check against metadata
if archiveinfo['size'] != fileinfo['filesize']:
raise koji.GenericError("File size mismatch for %s: %s != %s" %
(filename, archiveinfo['size'], fileinfo['filesize']))
if fileinfo['checksum_type'] != 'md5':
# XXX
# until we change the way we handle checksums, we have to limit this to md5
raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo)
if archiveinfo['checksum'] != fileinfo['checksum']:
if (archiveinfo['checksum'] != fileinfo['checksum'] or
archiveinfo['checksum_type'] != fileinfo['checksum_type']):
raise koji.GenericError("File checksum mismatch for %s: %s != %s" %
(filename, archiveinfo['checksum'], fileinfo['checksum']))
archivetype = get_archive_type(filename, strict=True)
@ -10328,13 +10320,14 @@ class RootExports(object):
def uploadFile(self, path, name, size, md5sum, offset, data, volume=None):
"""upload file to the hub
Files can be uploaded in chunks, if so the md5 and size describe the
Files can be uploaded in chunks, if so the hash and size describe the
chunk rather than the whole file.
:param str path: the relative path to upload to
:param str name: the name of the file
:param int size: size of contents (bytes)
:param str md5: md5sum (hex digest) of contents
:param str md5sum: md5sum (hex digest) of contents or tuple (hash_type, digest)
md5sum name is misleading, but it is here for backwas compatibility
:param str data: base64 encoded file contents
:param int offset: The offset indicates where the chunk belongs.
The special offset -1 is used to indicate the final
@ -10351,11 +10344,11 @@ class RootExports(object):
if isinstance(md5sum, str):
# this case is for backwards compatibility
verify = "md5"
digest = md5sum
elif md5sum is None:
digest = hash
elif hash is None:
verify = None
else:
verify, digest = md5sum
verify, digest = hash
sum_cls = get_verify_class(verify)
if offset != -1:
if size is not None:
@ -10453,14 +10446,13 @@ class RootExports(object):
data['size'] = st.st_size
data['mtime'] = st.st_mtime
if verify:
sum_cls = get_verify_class(verify)
chksum = get_verify_class(verify)()
if tail is not None:
if tail < 0:
raise koji.GenericError("invalid tail value: %r" % tail)
offset = max(st.st_size - tail, 0)
os.lseek(fd, offset, 0)
length = 0
chksum = sum_cls()
chunk = os.read(fd, 8192)
while chunk:
length += len(chunk)
@ -14640,6 +14632,8 @@ def get_verify_class(verify):
return md5_constructor
elif verify == 'adler32':
return koji.util.adler32_constructor
elif verify == 'sha256':
return hashlib.sha256
elif verify:
raise koji.GenericError("Unsupported verify type: %s" % verify)
else:
@ -14667,9 +14661,8 @@ def handle_upload(environ):
raise koji.GenericError("destination not a file: %s" % fn)
if offset == 0 and not overwrite:
raise koji.GenericError("upload path exists: %s" % fn)
sum_cls = get_verify_class(verify)
chksum = get_verify_class(verify)()
size = 0
chksum = sum_cls()
inf = environ['wsgi.input']
fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0o666)
try:

View file

@ -27,6 +27,7 @@ from __future__ import absolute_import, division
import base64
import datetime
import errno
import hashlib
import imp
import logging
import logging.handlers
@ -3109,24 +3110,24 @@ class ClientSession(object):
fo = open(localfile, "rb") # specify bufsize?
totalsize = os.path.getsize(localfile)
ofs = 0
md5sum = util.md5_constructor()
sha256sum = hashlib.sha256sum()
debug = self.opts.get('debug', False)
if callback:
callback(0, totalsize, 0, 0, 0)
while True:
lap = time.time()
contents = fo.read(blocksize)
md5sum.update(contents)
sha256sum.update(contents)
size = len(contents)
data = util.base64encode(contents)
if size == 0:
# end of file, use offset = -1 to finalize upload
offset = -1
digest = md5sum.hexdigest()
digest = sha256sum.hexdigest()
sz = ofs
else:
offset = ofs
digest = util.md5_constructor(contents).hexdigest()
digest = hashlib.sha256(contents).hexdigest()
sz = size
del contents
tries = 0
@ -3134,7 +3135,8 @@ class ClientSession(object):
if debug:
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" %
(path, name, sz, digest, offset))
if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts):
if self.callMethod('uploadFile', path, name, sz, ("sha256", digest),
offset, data, **volopts):
break
if tries <= retries:
tries += 1

View file

@ -23,6 +23,7 @@
from __future__ import absolute_import, division
import errno
import hashlib
import logging
import os
import signal
@ -43,7 +44,6 @@ from koji.util import (
adler32_constructor,
base64encode,
dslice,
md5_constructor,
parseStatus,
to_list,
joinpath,
@ -69,12 +69,12 @@ def incremental_upload(session, fname, fd, path, retries=5, logger=None):
break
data = base64encode(contents)
digest = md5_constructor(contents).hexdigest()
digest = hashlib.sha256(contents).hexdigest()
del contents
tries = 0
while True:
if session.uploadFile(path, fname, size, digest, offset, data):
if session.uploadFile(path, fname, size, ("sha256", digest), offset, data):
break
if tries <= retries:

View file

@ -49,7 +49,8 @@ from koji.xmlrpcplus import DateTime
def md5_constructor(*args, **kwargs):
if hasattr(hashlib._hashlib, 'get_fips_mode') and hashlib._hashlib.get_fips_mode():
# do not care about FIPS
# do not care about FIPS we need md5 for signatures and older hashes
# It is still used for *some* security
kwargs['usedforsecurity'] = False
return hashlib.md5(*args, **kwargs)

View file

@ -6,9 +6,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "19a674d997af7098a444b60d7b51cee6",
"checksum": "ca9dd08a0b9f81b209c3ac768a7d1ca27973cfd920095e2dc3df5159f752039e",
"filename": "tdl-x86_64.xml",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1001,
@ -30,9 +30,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "a5114a20d790cf17eca1b1115a4546f8",
"checksum": "4083a6838c1b6895df27a69373f4c527a9722c045bccc08efe064f105d566c77",
"filename": "image.ks",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1002,
@ -54,9 +54,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "9828cf75d9d17ac8e79e53ed71c6a71c",
"checksum": "963a4396be7072012370db407b9ea3633b09dbe45926bb2ef912a86baac1d7b7",
"filename": "image-base.ks",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1003,
@ -78,9 +78,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "f601c0f647d7cdd4c92aa511876f8533",
"checksum": "9f4dea3a4b64def36be0119fef4d3f6e62eb6e316bf5749acddb134596faf5e9",
"filename": "foo-x86_64.xml",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1004,
@ -102,9 +102,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "84547200ef5002292ecdd50c62de518e",
"checksum": "e3ff2b57824a7ee9201786a624c54057de1b279fbcf6782fe25898d657ebd354",
"filename": "my-image-7.4.2-2.x86_64.ova",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1005,

View file

@ -20,3 +20,7 @@ class TestGetVerifyClass(unittest.TestCase):
def test_get_verify_class_is_adler32(self):
kojihub.get_verify_class('adler32') is adler32_constructor
def test_get_verify_class_is_sha256(self):
kojihub.get_verify_class('sha256') is hashlib.sha256

View file

@ -608,7 +608,7 @@ def upload_file(server, prefix, path):
destpath = os.path.join(prefix, path)
fobj = open(destpath, 'r')
offset = 0
sum = hashlib.md5()
sum = hashlib.sha256()
while True:
data = fobj.read(131072)
if not data:
@ -619,8 +619,8 @@ def upload_file(server, prefix, path):
sum.update(data)
fobj.close()
digest = sum.hexdigest()
server.verifyChecksum(path, digest, 'md5')
logger.info('Uploaded %s (%s bytes, md5: %s)', destpath, offset, digest)
server.verifyChecksum(path, digest, 'sha256')
logger.info('Uploaded %s (%s bytes, sha256: %s)', destpath, offset, digest)
def get_mgmt_server():
@ -709,10 +709,10 @@ def stream_logs(server, handler, builds):
if contents:
size = len(contents)
data = base64.b64encode(contents)
digest = hashlib.md5(contents).hexdigest()
digest = hashlib.sha256(contents).hexdigest()
del contents
try:
server.uploadDirect(relpath, offset, size, digest, data)
server.uploadDirect(relpath, offset, size, ('sha256', digest), data)
except Exception:
log_local('error uploading %s' % relpath)
time.sleep(1)

View file

@ -775,14 +775,14 @@ class VMExecTask(BaseTaskHandler):
fobj.close()
return len(data)
def uploadDirect(self, filepath, offset, size, md5sum, data):
def uploadDirect(self, filepath, offset, size, hash, data):
"""
Upload contents directly to the server.
"""
remotepath = os.path.dirname(os.path.join(self.getUploadDir(), filepath))
filename = os.path.basename(filepath)
self.session.uploadFile(remotepath, filename, size,
md5sum, offset, data)
hash, offset, data)
def verifyChecksum(self, path, checksum, algo='sha1'):
local_path = os.path.abspath(os.path.join(self.output_dir, path))

View file

@ -56,7 +56,7 @@ def _setUserCookie(environ, user):
raise koji.AuthError('Unable to authenticate, server secret not configured')
digest_string = value + options['Secret'].value
digest_string = digest_string.encode('utf-8')
shasum = hashlib.sha1(digest_string)
shasum = hashlib.sha256(digest_string)
value = "%s:%s" % (shasum.hexdigest(), value)
cookies = http.cookies.SimpleCookie()
cookies['user'] = value
@ -96,7 +96,7 @@ def _getUserCookie(environ):
raise koji.AuthError('Unable to authenticate, server secret not configured')
digest_string = value + options['Secret'].value
digest_string = digest_string.encode('utf-8')
shasum = hashlib.sha1(digest_string)
shasum = hashlib.sha256(digest_string)
if shasum.hexdigest() != sig:
authlogger.warning('invalid user cookie: %s:%s', sig, value)
return None

View file

@ -173,7 +173,7 @@ def _genToken(environ, tstamp=None):
tstamp = _truncTime()
value = user + str(tstamp) + environ['koji.options']['Secret'].value
value = value.encode('utf-8')
return hashlib.sha1(value).hexdigest()[-8:]
return hashlib.sha256(value).hexdigest()
def _getValidTokens(environ):