PR#2317: md5: try the best to use sha256 instead of md5 and ignore FIPS in other parts

Merges #2317
https://pagure.io/koji/pull-request/2317

Fixes: #2291
https://pagure.io/koji/issue/2291
error raised by `hashlib.md5()` on RHEL8.2 (MD5 is disabled by FIPS)
This commit is contained in:
Tomas Kopecek 2020-06-24 14:42:11 +02:00
commit 2778b7ac25
13 changed files with 88 additions and 74 deletions

View file

@ -2,7 +2,6 @@ from __future__ import absolute_import, division
import ast
import fnmatch
import hashlib
import itertools
import json
import logging
@ -24,7 +23,7 @@ import six.moves.xmlrpc_client
from six.moves import filter, map, range, zip
import koji
from koji.util import base64encode, to_list
from koji.util import base64encode, md5_constructor, to_list
from koji_cli.lib import (
_,
_list_tasks,
@ -1500,7 +1499,7 @@ def handle_import_sig(goptions, session, args):
previous = session.queryRPMSigs(rpm_id=rinfo['id'], sigkey=sigkey)
assert len(previous) <= 1
if previous:
sighash = hashlib.md5(sighdr).hexdigest()
sighash = md5_constructor(sighdr).hexdigest()
if previous[0]['sighash'] == sighash:
print(_("Signature already imported: %s") % path)
continue

View file

@ -18,7 +18,7 @@ from six.moves import range
import koji
# import parse_arches to current namespace for backward compatibility
from koji import parse_arches
from koji.util import to_list
from koji.util import md5_constructor, to_list
try:
import krbV
@ -612,7 +612,7 @@ def download_archive(build, archive, topurl, quiet=False, noprogress=False):
# check checksum/checksum_type
if archive['checksum_type'] == koji.CHECKSUM_TYPES['md5']:
hash = hashlib.md5()
hash = md5_constructor()
elif archive['checksum_type'] == koji.CHECKSUM_TYPES['sha1']:
hash = hashlib.sha1()
elif archive['checksum_type'] == koji.CHECKSUM_TYPES['sha256']:

View file

@ -43,11 +43,11 @@ import tarfile
import tempfile
import time
import traceback
from urllib.parse import parse_qs
import xmlrpc.client
import zipfile
import rpm
import xmlrpc.client
from urllib.parse import parse_qs
import koji
import koji.auth
@ -64,6 +64,7 @@ from koji.util import (
decode_bytes,
dslice,
joinpath,
md5_constructor,
move_and_symlink,
multi_fnmatch,
safer_move,
@ -6636,22 +6637,18 @@ class CG_Importer(object):
(filesize, fileinfo['filename'], fileinfo['filesize']))
# checksum
if fileinfo['checksum_type'] != 'md5':
# XXX
# until we change the way we handle checksums, we have to limit this to md5
raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo)
with open(path, 'rb') as fp:
m = hashlib.md5()
chksum = get_verify_class(fileinfo['checksum_type'])()
while True:
contents = fp.read(8192)
if not contents:
break
m.update(contents)
if fileinfo['checksum'] != m.hexdigest():
chksum.update(contents)
if fileinfo['checksum'] != chksum.hexdigest():
raise koji.GenericError("File checksum mismatch for %s: %s != %s" %
(fileinfo['filename'], fileinfo['checksum'],
m.hexdigest()))
fileinfo['hub.checked_md5'] = True
chksum.hexdigest()))
fileinfo['hub.checked_hash'] = True
if fileinfo['buildroot_id'] not in self.br_prep:
raise koji.GenericError("Missing buildroot metadata for id %(buildroot_id)r" %
@ -7213,9 +7210,7 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No
archiveinfo['filename'] = filename
archiveinfo['size'] = fileinfo['filesize']
archiveinfo['checksum'] = fileinfo['checksum']
if fileinfo['checksum_type'] != 'md5':
# XXX
# until we change the way we handle checksums, we have to limit this to md5
if fileinfo['checksum_type'] not in ('md5', 'sha256'):
raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo)
archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES[fileinfo['checksum_type']]
archiveinfo['metadata_only'] = True
@ -7224,28 +7219,26 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No
archiveinfo['filename'] = filename
archiveinfo['size'] = os.path.getsize(filepath)
# trust values computed on hub (CG_Importer.prep_outputs)
if not fileinfo or not fileinfo.get('hub.checked_md5'):
if not fileinfo or not fileinfo.get('hub.checked_hash'):
with open(filepath, 'rb') as archivefp:
m = hashlib.md5()
chksum = get_verify_class('sha256')()
while True:
contents = archivefp.read(8192)
if not contents:
break
m.update(contents)
archiveinfo['checksum'] = m.hexdigest()
chksum.update(contents)
archiveinfo['checksum'] = chksum.hexdigest()
archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES['sha256']
else:
archiveinfo['checksum'] = fileinfo['checksum']
archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES['md5']
archiveinfo['checksum_type'] = fileinfo['checksum_type']
if fileinfo:
# check against metadata
if archiveinfo['size'] != fileinfo['filesize']:
raise koji.GenericError("File size mismatch for %s: %s != %s" %
(filename, archiveinfo['size'], fileinfo['filesize']))
if fileinfo['checksum_type'] != 'md5':
# XXX
# until we change the way we handle checksums, we have to limit this to md5
raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo)
if archiveinfo['checksum'] != fileinfo['checksum']:
if (archiveinfo['checksum'] != fileinfo['checksum'] or
archiveinfo['checksum_type'] != fileinfo['checksum_type']):
raise koji.GenericError("File checksum mismatch for %s: %s != %s" %
(filename, archiveinfo['checksum'], fileinfo['checksum']))
archivetype = get_archive_type(filename, strict=True)
@ -7367,7 +7360,7 @@ def _generate_maven_metadata(mavendir):
continue
if not os.path.isfile('%s/%s' % (mavendir, mavenfile)):
continue
for ext, sum_constr in (('.md5', hashlib.md5), ('.sha1', hashlib.sha1)):
for ext, sum_constr in (('.md5', md5_constructor), ('.sha1', hashlib.sha1)):
sumfile = mavenfile + ext
if sumfile not in mavenfiles:
sum = sum_constr()
@ -7417,7 +7410,7 @@ def add_rpm_sig(an_rpm, sighdr):
# we use the sigkey='' to represent unsigned in the db (so that uniqueness works)
else:
sigkey = koji.get_sigpacket_key_id(sigkey)
sighash = hashlib.md5(sighdr).hexdigest()
sighash = md5_constructor(sighdr).hexdigest()
rpm_id = rinfo['id']
# - db entry
q = """SELECT sighash FROM rpmsigs WHERE rpm_id=%(rpm_id)i AND sigkey=%(sigkey)s"""
@ -10324,20 +10317,23 @@ class RootExports(object):
context.session.assertPerm('admin')
return make_task(*args, **opts)
def uploadFile(self, path, name, size, md5sum, offset, data, volume=None):
def uploadFile(self, path, name, size, md5sum, offset, data, volume=None, checksum=None):
"""upload file to the hub
Files can be uploaded in chunks, if so the md5 and size describe the
Files can be uploaded in chunks, if so the hash and size describe the
chunk rather than the whole file.
:param str path: the relative path to upload to
:param str name: the name of the file
:param int size: size of contents (bytes)
:param str md5: md5sum (hex digest) of contents
:param checksum: MD5 hex digest (see md5sum) or a tuple (hash_type, digest) of contents
:type checksum: str or tuple
:param str data: base64 encoded file contents
:param int offset: The offset indicates where the chunk belongs.
The special offset -1 is used to indicate the final
chunk.
:param str md5sum: legacy param name of checksum. md5sum name is misleading,
but it is here for backwards compatibility
:returns: True
"""
@ -10347,14 +10343,16 @@ class RootExports(object):
# we will accept offset and size as strings to work around xmlrpc limits
offset = koji.decode_int(offset)
size = koji.decode_int(size)
if isinstance(md5sum, str):
if checksum is None and md5sum is not None:
checksum = md5sum
if isinstance(checksum, str):
# this case is for backwards compatibility
verify = "md5"
digest = md5sum
elif md5sum is None:
digest = checksum
elif checksum is None:
verify = None
else:
verify, digest = md5sum
verify, digest = checksum
sum_cls = get_verify_class(verify)
if offset != -1:
if size is not None:
@ -10452,14 +10450,13 @@ class RootExports(object):
data['size'] = st.st_size
data['mtime'] = st.st_mtime
if verify:
sum_cls = get_verify_class(verify)
chksum = get_verify_class(verify)()
if tail is not None:
if tail < 0:
raise koji.GenericError("invalid tail value: %r" % tail)
offset = max(st.st_size - tail, 0)
os.lseek(fd, offset, 0)
length = 0
chksum = sum_cls()
chunk = os.read(fd, 8192)
while chunk:
length += len(chunk)
@ -14636,9 +14633,11 @@ def get_upload_path(reldir, name, create=False, volume=None):
def get_verify_class(verify):
if verify == 'md5':
return hashlib.md5
return md5_constructor
elif verify == 'adler32':
return koji.util.adler32_constructor
elif verify == 'sha256':
return hashlib.sha256
elif verify:
raise koji.GenericError("Unsupported verify type: %s" % verify)
else:
@ -14666,9 +14665,8 @@ def handle_upload(environ):
raise koji.GenericError("destination not a file: %s" % fn)
if offset == 0 and not overwrite:
raise koji.GenericError("upload path exists: %s" % fn)
sum_cls = get_verify_class(verify)
chksum = get_verify_class(verify)()
size = 0
chksum = sum_cls()
inf = environ['wsgi.input']
fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0o666)
try:

View file

@ -3110,24 +3110,24 @@ class ClientSession(object):
fo = open(localfile, "rb") # specify bufsize?
totalsize = os.path.getsize(localfile)
ofs = 0
md5sum = hashlib.md5()
sha256sum = hashlib.sha256sum()
debug = self.opts.get('debug', False)
if callback:
callback(0, totalsize, 0, 0, 0)
while True:
lap = time.time()
contents = fo.read(blocksize)
md5sum.update(contents)
sha256sum.update(contents)
size = len(contents)
data = util.base64encode(contents)
if size == 0:
# end of file, use offset = -1 to finalize upload
offset = -1
digest = md5sum.hexdigest()
digest = sha256sum.hexdigest()
sz = ofs
else:
offset = ofs
digest = hashlib.md5(contents).hexdigest()
digest = hashlib.sha256(contents).hexdigest()
sz = size
del contents
tries = 0
@ -3135,7 +3135,8 @@ class ClientSession(object):
if debug:
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" %
(path, name, sz, digest, offset))
if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts):
if self.callMethod('uploadFile', path, name, sz, ("sha256", digest),
offset, data, **volopts):
break
if tries <= retries:
tries += 1

View file

@ -69,12 +69,12 @@ def incremental_upload(session, fname, fd, path, retries=5, logger=None):
break
data = base64encode(contents)
digest = hashlib.md5(contents).hexdigest()
digest = hashlib.sha256(contents).hexdigest()
del contents
tries = 0
while True:
if session.uploadFile(path, fname, size, digest, offset, data):
if session.uploadFile(path, fname, size, ("sha256", digest), offset, data):
break
if tries <= retries:

View file

@ -45,6 +45,18 @@ import koji
from koji.xmlrpcplus import DateTime
# BEGIN kojikamid dup #
def md5_constructor(*args, **kwargs):
if hasattr(hashlib._hashlib, 'get_fips_mode') and hashlib._hashlib.get_fips_mode():
# do not care about FIPS we need md5 for signatures and older hashes
# It is still used for *some* security
kwargs['usedforsecurity'] = False
return hashlib.md5(*args, **kwargs)
# END kojikamid dup #
# imported from kojiweb and kojihub
def deprecated(message):
"""Print deprecation warning"""
@ -583,7 +595,7 @@ def check_sigmd5(filename):
f.seek(o)
# compute md5 of rest of file
md5 = hashlib.md5()
md5 = md5_constructor()
while True:
d = f.read(1024**2)
if not d:

View file

@ -6,9 +6,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "19a674d997af7098a444b60d7b51cee6",
"checksum": "ca9dd08a0b9f81b209c3ac768a7d1ca27973cfd920095e2dc3df5159f752039e",
"filename": "tdl-x86_64.xml",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1001,
@ -30,9 +30,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "a5114a20d790cf17eca1b1115a4546f8",
"checksum": "4083a6838c1b6895df27a69373f4c527a9722c045bccc08efe064f105d566c77",
"filename": "image.ks",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1002,
@ -54,9 +54,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "9828cf75d9d17ac8e79e53ed71c6a71c",
"checksum": "963a4396be7072012370db407b9ea3633b09dbe45926bb2ef912a86baac1d7b7",
"filename": "image-base.ks",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1003,
@ -78,9 +78,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "f601c0f647d7cdd4c92aa511876f8533",
"checksum": "9f4dea3a4b64def36be0119fef4d3f6e62eb6e316bf5749acddb134596faf5e9",
"filename": "foo-x86_64.xml",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1004,
@ -102,9 +102,9 @@
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "84547200ef5002292ecdd50c62de518e",
"checksum": "e3ff2b57824a7ee9201786a624c54057de1b279fbcf6782fe25898d657ebd354",
"filename": "my-image-7.4.2-2.x86_64.ova",
"checksum_type": 0,
"checksum_type": 2,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1005,

View file

@ -20,3 +20,7 @@ class TestGetVerifyClass(unittest.TestCase):
def test_get_verify_class_is_adler32(self):
kojihub.get_verify_class('adler32') is adler32_constructor
def test_get_verify_class_is_sha256(self):
kojihub.get_verify_class('sha256') is hashlib.sha256

View file

@ -2,7 +2,7 @@
awk '/^# INSERT kojikamid dup #/ {exit} {print $0}' kojikamid.py
for fn in ../koji/__init__.py ../koji/daemon.py
for fn in ../koji/__init__.py ../koji/daemon.py ../koji/util.py
do
awk '/^# END kojikamid dup #/ {p=0} p {print $0} /^# BEGIN kojikamid dup #/ {p=1}' $fn
done

View file

@ -333,7 +333,7 @@ class WindowsBuild(object):
elif checksum_type == 'sha256':
checksum = hashlib.sha256()
elif checksum_type == 'md5':
checksum = hashlib.md5()
checksum = md5_constructor.md5() # noqa: F821
else:
raise BuildError('Unknown checksum type %s for %s' % ( # noqa: F821
checksum_type,
@ -608,7 +608,7 @@ def upload_file(server, prefix, path):
destpath = os.path.join(prefix, path)
fobj = open(destpath, 'r')
offset = 0
sum = hashlib.md5()
sum = hashlib.sha256()
while True:
data = fobj.read(131072)
if not data:
@ -619,8 +619,8 @@ def upload_file(server, prefix, path):
sum.update(data)
fobj.close()
digest = sum.hexdigest()
server.verifyChecksum(path, digest, 'md5')
logger.info('Uploaded %s (%s bytes, md5: %s)', destpath, offset, digest)
server.verifyChecksum(path, digest, 'sha256')
logger.info('Uploaded %s (%s bytes, sha256: %s)', destpath, offset, digest)
def get_mgmt_server():
@ -709,10 +709,10 @@ def stream_logs(server, handler, builds):
if contents:
size = len(contents)
data = base64.b64encode(contents)
digest = hashlib.md5(contents).hexdigest()
digest = hashlib.sha256(contents).hexdigest()
del contents
try:
server.uploadDirect(relpath, offset, size, digest, data)
server.uploadDirect(relpath, offset, size, ('sha256', digest), data)
except Exception:
log_local('error uploading %s' % relpath)
time.sleep(1)

View file

@ -775,14 +775,14 @@ class VMExecTask(BaseTaskHandler):
fobj.close()
return len(data)
def uploadDirect(self, filepath, offset, size, md5sum, data):
def uploadDirect(self, filepath, offset, size, checksum, data):
"""
Upload contents directly to the server.
"""
remotepath = os.path.dirname(os.path.join(self.getUploadDir(), filepath))
filename = os.path.basename(filepath)
self.session.uploadFile(remotepath, filename, size,
md5sum, offset, data)
checksum, offset, data)
def verifyChecksum(self, path, checksum, algo='sha1'):
local_path = os.path.abspath(os.path.join(self.output_dir, path))
@ -795,7 +795,7 @@ class VMExecTask(BaseTaskHandler):
if algo == 'sha1':
sum = hashlib.sha1()
elif algo == 'md5':
sum = hashlib.md5()
sum = koji.util.md5_constructor()
elif algo == 'sha256':
sum == hashlib.sha256()
else:

View file

@ -56,7 +56,7 @@ def _setUserCookie(environ, user):
raise koji.AuthError('Unable to authenticate, server secret not configured')
digest_string = value + options['Secret'].value
digest_string = digest_string.encode('utf-8')
shasum = hashlib.sha1(digest_string)
shasum = hashlib.sha256(digest_string)
value = "%s:%s" % (shasum.hexdigest(), value)
cookies = http.cookies.SimpleCookie()
cookies['user'] = value
@ -96,7 +96,7 @@ def _getUserCookie(environ):
raise koji.AuthError('Unable to authenticate, server secret not configured')
digest_string = value + options['Secret'].value
digest_string = digest_string.encode('utf-8')
shasum = hashlib.sha1(digest_string)
shasum = hashlib.sha256(digest_string)
if shasum.hexdigest() != sig:
authlogger.warning('invalid user cookie: %s:%s', sig, value)
return None

View file

@ -173,7 +173,7 @@ def _genToken(environ, tstamp=None):
tstamp = _truncTime()
value = user + str(tstamp) + environ['koji.options']['Secret'].value
value = value.encode('utf-8')
return hashlib.md5(value).hexdigest()[-8:]
return hashlib.sha256(value).hexdigest()
def _getValidTokens(environ):