From: Anthony Towns Date: Fri, 11 Apr 2008 11:07:57 +0000 (+0000) Subject: * dak/utils.py: build_file_list() extra parameters so it can X-Git-Url: https://err.no/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=195caf3ae4b2150a7234de64d0907d74f9a4c0be;p=dak * dak/utils.py: build_file_list() extra parameters so it can build a file list for checksums-foo fields. Don't use float() to compare formats, because Format: 1.10 should compare greater than Format: 1.9 (use "1.9".split(".",1) and tuple comparison instead) * dak/process_unchecked.py: check_md5sum becomes check_hashes and check_hash. If changes format is 1.8 or later, also check checksums-sha1 and checksums-sha256 for both .changes and .dsc. --- diff --git a/ChangeLog b/ChangeLog index 65b0c7c5..90475c4f 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,14 @@ +2008-04-11 Anthony Towns + + * dak/utils.py: build_file_list() extra parameters so it can + build a file list for checksums-foo fields. Don't use float() to + compare formats, because Format: 1.10 should compare greater than + Format: 1.9 (use "1.9".split(".",1) and tuple comparison instead) + + * dak/process_unchecked.py: check_md5sum becomes check_hashes + and check_hash. If changes format is 1.8 or later, also check + checksums-sha1 and checksums-sha256 for both .changes and .dsc. + 2008-04-07 Joerg Jaspert * daklib/utils.py (build_file_list): Check for dpkg .changes diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index 4a4cfd6b..c4fcf4b7 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -899,40 +899,59 @@ def check_urgency (): ################################################################################ -def check_md5sums (): - for file in files.keys(): - try: - file_handle = daklib.utils.open_file(file) - except daklib.utils.cant_open_exc: - continue +def check_hashes (): + # Make sure we recognise the format of the Files: field + format = changes.get("format", "0.0").split(".",1) + if len(format) == 2: + format = int(format[0]), int(format[1]) + else: + format = int(float(format[0])), 0 - # Check md5sum - if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]: - reject("%s: md5sum check failed." % (file)) - file_handle.close() - # Check size - actual_size = os.stat(file)[stat.ST_SIZE] - size = int(files[file]["size"]) - if size != actual_size: - reject("%s: actual file size (%s) does not match size (%s) in .changes" - % (file, actual_size, size)) + check_hash(".changes", files, "md5sum", apt_pkg.md5sum) + check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum) + + if format >= (1,8): + hashes = [("sha1", apt_pkg.sha1sum), + ("sha256", apt_pkg.sha256sum)] + else: + hashes = [] + + for h,f in hashes: + fs = daklib.utils.build_file_list(changes, 0, "checksums-%s" % h, h) + check_hash( ".changes %s" % (h), fs, h, f, files) + + if "source" not in changes["architecture"]: continue + + fs = daklib.utils.build_file_list(dsc, 1, "checksums-%s" % h, h) + check_hash( ".dsc %s" % (h), fs, h, f, dsc_files) + +################################################################################ + +def check_hash (where, files, key, testfn, basedict = None): + if basedict: + for file in basedict.keys(): + if file not in files: + reject("%s: no %s checksum" % (file, key)) + + for file in files.keys(): + if basedict and file not in basedict: + reject("%s: extraneous entry in %s checksums" % (file, key)) - for file in dsc_files.keys(): try: file_handle = daklib.utils.open_file(file) except daklib.utils.cant_open_exc: continue - # Check md5sum - if apt_pkg.md5sum(file_handle) != dsc_files[file]["md5sum"]: - reject("%s: md5sum check failed." % (file)) + # Check hash + if testfn(file_handle) != files[file][key]: + reject("%s: %s check failed." % (file, key)) file_handle.close() # Check size actual_size = os.stat(file)[stat.ST_SIZE] - size = int(dsc_files[file]["size"]) + size = int(files[file]["size"]) if size != actual_size: - reject("%s: actual file size (%s) does not match size (%s) in .dsc" - % (file, actual_size, size)) + reject("%s: actual file size (%s) does not match size (%s) in %s" + % (file, actual_size, size, where)) ################################################################################ @@ -1518,7 +1537,7 @@ def process_it (changes_file): valid_dsc_p = check_dsc() if valid_dsc_p: check_source() - check_md5sums() + check_hashes() check_urgency() check_timestamps() check_signed_by_key() diff --git a/daklib/utils.py b/daklib/utils.py index e3767056..8b80f246 100755 --- a/daklib/utils.py +++ b/daklib/utils.py @@ -229,31 +229,42 @@ The rules for (signing_rules == 1)-mode are: # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl -def build_file_list(changes, is_a_dsc=0): +def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): files = {} # Make sure we have a Files: field to parse... - if not changes.has_key("files"): - raise no_files_exc + if not changes.has_key(field): + raise no_files_exc # Make sure we recognise the format of the Files: field - format = changes.get("format", "") - if format != "": - format = float(format) - if not is_a_dsc and (format < 1.5 or format > 1.7): - raise nk_format_exc, format + format = changes.get("format", "0.0").split(".",1) + if len(format) == 2: + format = int(format[0]), int(format[1]) + else: + format = int(float(format[0])), 0 + + if is_a_dsc: + if format != (1,0): + raise nk_format_exc, "%s" % (changes.get("format","0.0")) + else: + if (format < (1,5) or format > (1,8)): + raise nk_format_exc, "%s" % (changes.get("format","0.0")) + if field != "files" and format < (1,8): + raise nk_format_exc, "%s" % (changes.get("format","0.0")) + + includes_section = (not is_a_dsc) and field == "files" # Parse each entry/line: - for i in changes["files"].split('\n'): + for i in changes[field].split('\n'): if not i: break s = i.split() section = priority = "" try: - if is_a_dsc: - (md5, size, name) = s - else: + if includes_section: (md5, size, section, priority, name) = s + else: + (md5, size, name) = s except ValueError: raise changes_parse_error_exc, i @@ -264,8 +275,9 @@ def build_file_list(changes, is_a_dsc=0): (section, component) = extract_component_from_section(section) - files[name] = Dict(md5sum=md5, size=size, section=section, + files[name] = Dict(size=size, section=section, priority=priority, component=component) + files[name][hashname] = md5 return files