From: Mark Hymers Date: Thu, 23 Jul 2009 20:45:40 +0000 (+0100) Subject: Break out .dak file i/o handling into a seperate class X-Git-Url: https://err.no/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=126a1c72aa67a25d615e4417c15e8fa74419ca78;p=dak Break out .dak file i/o handling into a seperate class This will help when we need to do hacking on .dak files for instance. Signed-off-by: Mark Hymers --- diff --git a/daklib/changes.py b/daklib/changes.py new file mode 100755 index 00000000..150e7c53 --- /dev/null +++ b/daklib/changes.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python +# vim:set et sw=4: + +""" +Changes class for dak + +@contact: Debian FTP Master +@copyright: 2001 - 2006 James Troup +@copyright: 2009 Joerg Jaspert +@copyright: 2009 Mark Hymers +@license: GNU General Public License version 2 or later +""" + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +############################################################################### + +import os +import stat +from cPickle import Unpickler, Pickler +from errno import EPERM + +from apt_inst import debExtractControl +from apt_pkg import ParseSection + +from utils import open_file, fubar, poolify + +############################################################################### + +__all__ = [] + +############################################################################### + +CHANGESFIELDS_MANDATORY = [ "distribution", "source", "architecture", + "version", "maintainer", "urgency", "fingerprint", "changedby822", + "changedby2047", "changedbyname", "maintainer822", "maintainer2047", + "maintainername", "maintaineremail", "closes", "changes" ] + +__all__.append('CHANGESFIELDS_MANDATORY') + +CHANGESFIELDS_OPTIONAL = [ "changed-by", "filecontents", "format", + "process-new note", "adv id", "distribution-version", "sponsoremail" ] + +__all__.append('CHANGESFIELDS_OPTIONAL') + +CHANGESFIELDS_FILES = [ "package", "version", "architecture", "type", "size", + "md5sum", "sha1sum", "sha256sum", "component", "location id", + "source package", "source version", "maintainer", "dbtype", "files id", + "new", "section", "priority", "othercomponents", "pool name", + "original component" ] + +__all__.append('CHANGESFIELDS_FILES') + +CHANGESFIELDS_DSC = [ "source", "version", "maintainer", "fingerprint", + "uploaders", "bts changelog", "dm-upload-allowed" ] + +__all__.append('CHANGESFIELDS_DSC') + +CHANGESFIELDS_DSCFILES_MANDATORY = [ "size", "md5sum" ] + +__all__.append('CHANGESFIELDS_DSCFILES_MANDATORY') + +CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ] + +__all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL') + +############################################################################### + +class Changes(object): + """ Convenience wrapper to carry around all the package information """ + + def __init__(self, **kwds): + self.reset() + + def reset(self): + self.changes_file = "" + + self.changes = {} + self.dsc = {} + self.files = {} + self.dsc_files = {} + + self.orig_tar_id = None + self.orig_tar_location = "" + self.orig_tar_gz = None + + def file_summary(self): + # changes["distribution"] may not exist in corner cases + # (e.g. unreadable changes files) + if not self.changes.has_key("distribution") or not \ + isinstance(self.changes["distribution"], dict): + self.changes["distribution"] = {} + + byhand = False + new = False + summary = "" + override_summary = "" + + for name, entry in sorted(self.files.items()): + if entry.has_key("byhand"): + byhand = True + summary += name + " byhand\n" + + elif entry.has_key("new"): + new = True + summary += "(new) %s %s %s\n" % (name, entry["priority"], entry["section"]) + + if entry.has_key("othercomponents"): + summary += "WARNING: Already present in %s distribution.\n" % (entry["othercomponents"]) + + if entry["type"] == "deb": + deb_fh = open_file(name) + summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n' + deb_fh.close() + + else: + entry["pool name"] = poolify(self.changes.get("source", ""), entry["component"]) + destination = entry["pool name"] + name + summary += name + "\n to " + destination + "\n" + + if not entry.has_key("type"): + entry["type"] = "unknown" + + if entry["type"] in ["deb", "udeb", "dsc"]: + # (queue/unchecked), there we have override entries already, use them + # (process-new), there we dont have override entries, use the newly generated ones. + override_prio = entry.get("override priority", entry["priority"]) + override_sect = entry.get("override section", entry["section"]) + override_summary += "%s - %s %s\n" % (name, override_prio, override_sect) + + return (byhand, new, summary, override_summary) + + def check_override(self): + """ + Checks override entries for validity. + + Returns an empty string if there are no problems + or the text of a warning if there are + """ + + conf = Config() + summary = "" + + # Abandon the check if it's a non-sourceful upload + if not self.changes["architecture"].has_key("source"): + return summary + + for name, entry in sorted(self.files.items()): + if not entry.has_key("new") and entry["type"] == "deb": + if entry["section"] != "-": + if entry["section"].lower() != entry["override section"].lower(): + summary += "%s: package says section is %s, override says %s.\n" % (name, + entry["section"], + entry["override section"]) + + if entry["priority"] != "-": + if entry["priority"] != entry["override_priority"]: + summary += "%s: package says priority is %s, override says %s.\n" % (name, + entry["priority"], + entry["override priority"]) + + return summary + + + def load_dot_dak(self, changesfile): + """ + Update ourself by reading a previously created cPickle .dak dumpfile. + """ + + self.changes_file = changesfile + dump_filename = self.changes_file[:-8]+".dak" + dump_file = open_file(dump_filename) + + p = Unpickler(dump_file) + + self.changes.update(p.load()) + self.dsc.update(p.load()) + self.files.update(p.load()) + self.dsc_files.update(p.load()) + + self.orig_tar_id = p.load() + self.orig_tar_location = p.load() + + dump_file.close() + + def sanitised_files(self): + ret = {} + for name, entry in self.files.items(): + ret[name] = {} + for i in CHANGESFIELDS_FILES: + if entry.has_key(i): + ret[name][i] = entry[i] + + return ret + + def sanitised_changes(self): + ret = {} + # Mandatory changes fields + for i in CHANGESFIELDS_MANDATORY: + ret[i] = self.changes[i] + + # Optional changes fields + for i in CHANGESFIELDS_OPTIONAL: + if self.changes.has_key(i): + ret[i] = self.changes[i] + + return ret + + def sanitised_dsc(self): + ret = {} + for i in CHANGESFIELDS_DSC: + if self.dsc.has_key(i): + ret[i] = self.dsc[i] + + return ret + + def sanitised_dsc_files(self): + ret = {} + for name, entry in self.dsc_files.items(): + ret[name] = {} + # Mandatory dsc_files fields + for i in CHANGESFIELDS_DSCFILES_MANDATORY: + ret[name][i] = entry[i] + + # Optional dsc_files fields + for i in CHANGESFIELDS_DSCFILES_OPTIONAL: + if entry.has_key(i): + ret[name][i] = entry[i] + + return ret + + def write_dot_dak(self, dest_dir): + """ + Dump ourself into a cPickle file. + + @type dest_dir: string + @param dest_dir: Path where the dumpfile should be stored + + @note: This could just dump the dictionaries as is, but I'd like to avoid this so + there's some idea of what process-accepted & process-new use from + process-unchecked. (JT) + + """ + + dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak") + dump_file = open_file(dump_filename, 'w') + + try: + os.chmod(dump_filename, 0664) + except OSError, e: + # chmod may fail when the dumpfile is not owned by the user + # invoking dak (like e.g. when NEW is processed by a member + # of ftpteam) + if e.errno == EPERM: + perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]) + # security precaution, should never happen unless a weird + # umask is set anywhere + if perms & stat.S_IWOTH: + fubar("%s is world writable and chmod failed." % \ + (dump_filename,)) + # ignore the failed chmod otherwise as the file should + # already have the right privileges and is just, at worst, + # unreadable for world + else: + raise + + p = Pickler(dump_file, 1) + + p.dump(self.sanitised_changes()) + p.dump(self.sanitised_dsc()) + p.dump(self.sanitised_files()) + p.dump(self.sanitised_dsc_files()) + p.dump(self.orig_tar_id) + p.dump(self.orig_tar_location) + + dump_file.close() + +__all__.append('Changes')