From: Mike O'Connor Date: Sat, 19 Dec 2009 13:41:19 +0000 (-0500) Subject: merge from ftp-master X-Git-Url: https://err.no/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=4c7eee9642e82b6286f807ad92a93e7ef30288e6;p=dak merge from ftp-master Signed-off-by: Mike O'Connor --- 4c7eee9642e82b6286f807ad92a93e7ef30288e6 diff --cc dak/dakdb/update25.py index a61deb61,b2813d91..b2813d91 mode 100644,100755..100644 --- a/dak/dakdb/update25.py +++ b/dak/dakdb/update25.py diff --cc dak/dakdb/update28.py index 00000000,00000000..9e5c066d new file mode 100644 --- /dev/null +++ b/dak/dakdb/update28.py @@@ -1,0 -1,0 +1,270 @@@ ++#!/usr/bin/env python ++# coding=utf8 ++ ++""" ++Adding a trainee field to the process-new notes ++ ++@contact: Debian FTP Master ++@copyright: 2009 Mike O'Connor ++@license: GNU General Public License version 2 or later ++""" ++ ++# This program is free software; you can redistribute it and/or modify ++# it under the terms of the GNU General Public License as published by ++# the Free Software Foundation; either version 2 of the License, or ++# (at your option) any later version. ++ ++# This program is distributed in the hope that it will be useful, ++# but WITHOUT ANY WARRANTY; without even the implied warranty of ++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++# GNU General Public License for more details. ++ ++# You should have received a copy of the GNU General Public License ++# along with this program; if not, write to the Free Software ++# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ++ ++################################################################################ ++ ++ ++################################################################################ ++ ++import psycopg2 ++import time ++from daklib.dak_exceptions import DBUpdateError ++ ++################################################################################ ++ ++def suites(): ++ """ ++ return a list of suites to operate on ++ """ ++ if Config().has_key( "%s::%s" %(options_prefix,"Suite")): ++ suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")]) ++ else: ++ suites = [ 'unstable', 'testing' ] ++# suites = Config().SubTree("Suite").List() ++ ++ return suites ++ ++def arches(cursor, suite): ++ """ ++ return a list of archs to operate on ++ """ ++ arch_list = [] ++ cursor.execute("""SELECT s.architecture, a.arch_string ++ FROM suite_architectures s ++ JOIN architecture a ON (s.architecture=a.id) ++ WHERE suite = :suite""", {'suite' : suite }) ++ ++ while True: ++ r = cursor.fetchone() ++ if not r: ++ break ++ ++ if r[1] != "source" and r[1] != "all": ++ arch_list.append((r[0], r[1])) ++ ++ return arch_list ++ ++def do_update(self): ++ """ ++ Adding contents table as first step to maybe, finally getting rid ++ of apt-ftparchive ++ """ ++ ++ print __doc__ ++ ++ try: ++ c = self.db.cursor() ++ ++ c.execute("""CREATE TABLE pending_bin_contents ( ++ id serial NOT NULL, ++ package text NOT NULL, ++ version debversion NOT NULL, ++ arch int NOT NULL, ++ filename text NOT NULL, ++ type int NOT NULL, ++ PRIMARY KEY(id))""" ); ++ ++ c.execute("""CREATE TABLE deb_contents ( ++ filename text, ++ section text, ++ package text, ++ binary_id integer, ++ arch integer, ++ suite integer)""" ) ++ ++ c.execute("""CREATE TABLE udeb_contents ( ++ filename text, ++ section text, ++ package text, ++ binary_id integer, ++ suite integer, ++ arch integer)""" ) ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_arch_fkey ++ FOREIGN KEY (arch) REFERENCES architecture(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_arch_fkey ++ FOREIGN KEY (arch) REFERENCES architecture(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_pkey ++ PRIMARY KEY (filename,package,arch,suite);""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_pkey ++ PRIMARY KEY (filename,package,arch,suite);""") ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_suite_fkey ++ FOREIGN KEY (suite) REFERENCES suite(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_suite_fkey ++ FOREIGN KEY (suite) REFERENCES suite(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_binary_fkey ++ FOREIGN KEY (binary_id) REFERENCES binaries(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_binary_fkey ++ FOREIGN KEY (binary_id) REFERENCES binaries(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""CREATE INDEX ind_deb_contents_binary ON deb_contents(binary_id);""" ) ++ ++ ++ suites = self.suites() ++ ++ for suite in [i.lower() for i in suites]: ++ suite_id = DBConn().get_suite_id(suite) ++ arch_list = arches(c, suite_id) ++ arch_list = arches(c, suite_id) ++ ++ for (arch_id,arch_str) in arch_list: ++ c.execute( "CREATE INDEX ind_deb_contents_%s_%s ON deb_contents (arch,suite) WHERE (arch=2 OR arch=%d) AND suite=$d"%(arch_str,suite,arch_id,suite_id) ) ++ ++ for section, sname in [("debian-installer","main"), ++ ("non-free/debian-installer", "nonfree")]: ++ c.execute( "CREATE INDEX ind_udeb_contents_%s_%s ON udeb_contents (section,suite) WHERE section=%s AND suite=$d"%(sname,suite,section,suite_id) ) ++ ++ ++ c.execute( """CREATE OR REPLACE FUNCTION update_contents_for_bin_a() RETURNS trigger AS $$ ++ event = TD["event"] ++ if event == "DELETE" or event == "UPDATE": ++ ++ plpy.execute(plpy.prepare("DELETE FROM deb_contents WHERE binary_id=$1 and suite=$2", ++ ["int","int"]), ++ [TD["old"]["bin"], TD["old"]["suite"]]) ++ ++ if event == "INSERT" or event == "UPDATE": ++ ++ content_data = plpy.execute(plpy.prepare( ++ """SELECT s.section, b.package, b.architecture, ot.type ++ FROM override o ++ JOIN override_type ot on o.type=ot.id ++ JOIN binaries b on b.package=o.package ++ JOIN files f on b.file=f.id ++ JOIN location l on l.id=f.location ++ JOIN section s on s.id=o.section ++ WHERE b.id=$1 ++ AND o.suite=$2 ++ """, ++ ["int", "int"]), ++ [TD["new"]["bin"], TD["new"]["suite"]])[0] ++ ++ tablename="%s_contents" % content_data['type'] ++ ++ plpy.execute(plpy.prepare("""DELETE FROM %s ++ WHERE package=$1 and arch=$2 and suite=$3""" % tablename, ++ ['text','int','int']), ++ [content_data['package'], ++ content_data['architecture'], ++ TD["new"]["suite"]]) ++ ++ filenames = plpy.execute(plpy.prepare( ++ "SELECT bc.file FROM bin_contents bc where bc.binary_id=$1", ++ ["int"]), ++ [TD["new"]["bin"]]) ++ ++ for filename in filenames: ++ plpy.execute(plpy.prepare( ++ """INSERT INTO %s ++ (filename,section,package,binary_id,arch,suite) ++ VALUES($1,$2,$3,$4,$5,$6)""" % tablename, ++ ["text","text","text","int","int","int"]), ++ [filename["file"], ++ content_data["section"], ++ content_data["package"], ++ TD["new"]["bin"], ++ content_data["architecture"], ++ TD["new"]["suite"]] ) ++$$ LANGUAGE plpythonu VOLATILE SECURITY DEFINER; ++""") ++ ++ ++ c.execute( """CREATE OR REPLACE FUNCTION update_contents_for_override() RETURNS trigger AS $$ ++ event = TD["event"] ++ if event == "UPDATE": ++ ++ otype = plpy.execute(plpy.prepare("SELECT type from override_type where id=$1",["int"]),[TD["new"]["type"]] )[0]; ++ if otype["type"].endswith("deb"): ++ section = plpy.execute(plpy.prepare("SELECT section from section where id=$1",["int"]),[TD["new"]["section"]] )[0]; ++ ++ table_name = "%s_contents" % otype["type"] ++ plpy.execute(plpy.prepare("UPDATE %s set section=$1 where package=$2 and suite=$3" % table_name, ++ ["text","text","int"]), ++ [section["section"], ++ TD["new"]["package"], ++ TD["new"]["suite"]]) ++ ++$$ LANGUAGE plpythonu VOLATILE SECURITY DEFINER; ++""") ++ ++ c.execute("""CREATE OR REPLACE FUNCTION update_contents_for_override() ++ RETURNS trigger AS $$ ++ event = TD["event"] ++ if event == "UPDATE" or event == "INSERT": ++ row = TD["new"] ++ r = plpy.execute(plpy.prepare( """SELECT 1 from suite_architectures sa ++ JOIN binaries b ON b.architecture = sa.architecture ++ WHERE b.id = $1 and sa.suite = $2""", ++ ["int", "int"]), ++ [row["bin"], row["suite"]]) ++ if not len(r): ++ plpy.error("Illegal architecture for this suite") ++ ++$$ LANGUAGE plpythonu VOLATILE;""") ++ ++ c.execute( """CREATE TRIGGER illegal_suite_arch_bin_associations_trigger ++ BEFORE INSERT OR UPDATE ON bin_associations ++ FOR EACH ROW EXECUTE PROCEDURE update_contents_for_override();""") ++ ++ c.execute( """CREATE TRIGGER bin_associations_contents_trigger ++ AFTER INSERT OR UPDATE OR DELETE ON bin_associations ++ FOR EACH ROW EXECUTE PROCEDURE update_contents_for_bin_a();""") ++ c.execute("""CREATE TRIGGER override_contents_trigger ++ AFTER UPDATE ON override ++ FOR EACH ROW EXECUTE PROCEDURE update_contents_for_override();""") ++ ++ ++ c.execute( "CREATE INDEX ind_deb_contents_name ON deb_contents(package);"); ++ c.execute( "CREATE INDEX ind_udeb_contents_name ON udeb_contents(package);"); ++ ++ c.execute("UPDATE config SET value = '28' WHERE name = 'db_revision'") ++ ++ self.db.commit() ++ ++ except psycopg2.ProgrammingError, msg: ++ self.db.rollback() ++ raise DBUpdateError, "Unable to apply process-new update 28, rollback issued. Error message : %s" % (str(msg)) ++ diff --cc dak/import_new_files.py index 00000000,f33c30fb..5b132c82 mode 000000,100755..100755 --- a/dak/import_new_files.py +++ b/dak/import_new_files.py @@@ -1,0 -1,187 +1,187 @@@ + #!/usr/bin/env python + # coding=utf8 + + """ + Import known_changes files + + @contact: Debian FTP Master + @copyright: 2009 Mike O'Connor + @license: GNU General Public License version 2 or later + """ + + # This program is free software; you can redistribute it and/or modify + # it under the terms of the GNU General Public License as published by + # the Free Software Foundation; either version 2 of the License, or + # (at your option) any later version. + + # This program is distributed in the hope that it will be useful, + # but WITHOUT ANY WARRANTY; without even the implied warranty of + # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + # GNU General Public License for more details. + + # You should have received a copy of the GNU General Public License + # along with this program; if not, write to the Free Software + # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + ################################################################################ + + + ################################################################################ + + import sys + import os + import logging + import threading + import glob + import apt_pkg + from daklib.dbconn import DBConn, get_dbchange, get_policy_queue, session_wrapper, ChangePendingFile, get_location, check_poolfile + from daklib.config import Config + from daklib.queue import Upload + from daklib.utils import poolify + + # where in dak.conf all of our configuration will be stowed + options_prefix = "NewFiles" + options_prefix = "%s::Options" % options_prefix + + log = logging.getLogger() + + ################################################################################ + + + def usage (exit_code=0): + print """Usage: dak import-new-files [options] + + OPTIONS + -v, --verbose + show verbose information messages + + -q, --quiet + supress all output but errors + + """ + sys.exit(exit_code) + + class ImportNewFiles(object): + @session_wrapper + def __init__(self, session=None): + cnf = Config() + try: + newq = get_policy_queue('new', session) + for changes_fn in glob.glob(newq.path + "/*.changes"): + changes_bn = os.path.basename(changes_fn) + chg = get_dbchange(changes_bn, session) + + u = Upload() + success = u.load_changes(changes_fn) + u.pkg.changes_file = changes_bn + u.check_hashes() + + if not chg: + chg = u.pkg.add_known_changes(newq.path, newq.policy_queue_id, session) + session.add(chg) + + if not success: + log.critical("failed to load %s" % changes_fn) + sys.exit(1) + else: + log.critical("ACCLAIM: %s" % changes_fn) + + files=[] + for chg_fn in u.pkg.files.keys(): + try: + f = open(os.path.join(newq.path, chg_fn)) + cpf = ChangePendingFile() + cpf.filename = chg_fn + cpf.size = u.pkg.files[chg_fn]['size'] + cpf.md5sum = u.pkg.files[chg_fn]['md5sum'] + + if u.pkg.files[chg_fn].has_key('sha1sum'): + cpf.sha1sum = u.pkg.files[chg_fn]['sha1sum'] + else: + log.warning("Having to generate sha1sum for %s" % chg_fn) + f.seek(0) + cpf.sha1sum = apt_pkg.sha1sum(f) + + if u.pkg.files[chg_fn].has_key('sha256sum'): + cpf.sha256sum = u.pkg.files[chg_fn]['sha256sum'] + else: + log.warning("Having to generate sha256sum for %s" % chg_fn) + f.seek(0) + cpf.sha256sum = apt_pkg.sha256sum(f) + + session.add(cpf) + files.append(cpf) + f.close() + except IOError: + # Can't find the file, try to look it up in the pool + poolname = poolify(u.pkg.changes["source"], u.pkg.files[chg_fn]["component"]) + l = get_location(cnf["Dir::Pool"], u.pkg.files[chg_fn]["component"], session=session) + if not l: + log.critical("ERROR: Can't find location for %s (component %s)" % (chg_fn, u.pkg.files[chg_fn]["component"])) + + found, poolfile = check_poolfile(os.path.join(poolname, chg_fn), + u.pkg.files[chg_fn]['size'], + u.pkg.files[chg_fn]["md5sum"], + l.location_id, + session=session) + + if found is None: + log.critical("ERROR: Found multiple files for %s in pool" % chg_fn) + sys.exit(1) + elif found is False and poolfile is not None: + log.critical("ERROR: md5sum / size mismatch for %s in pool" % chg_fn) + sys.exit(1) + else: + if poolfile is None: + log.critical("ERROR: Could not find %s in pool" % chg_fn) + sys.exit(1) + else: + chg.poolfiles.append(poolfile) + + + chg.files = files + + + session.commit() - ++ + except KeyboardInterrupt: + print("Caught C-c; terminating.") + utils.warn("Caught C-c; terminating.") + self.plsDie() + + + def main(): + cnf = Config() + + arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")), + ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")), + ('v',"verbose", "%s::%s" % (options_prefix,"Verbose")), + ] + + args = apt_pkg.ParseCommandLine(cnf.Cnf, arguments,sys.argv) + + num_threads = 1 + + if len(args) > 0: + usage(1) + + if cnf.has_key("%s::%s" % (options_prefix,"Help")): + usage(0) + + level=logging.INFO + if cnf.has_key("%s::%s" % (options_prefix,"Quiet")): + level=logging.ERROR + + elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")): + level=logging.DEBUG + + + logging.basicConfig( level=level, + format='%(asctime)s %(levelname)s %(message)s', + stream = sys.stderr ) + + ImportNewFiles() + + + if __name__ == '__main__': + main() diff --cc dak/update_db.py index ecdd99a7,5d1cb86c..2d962dcb --- a/dak/update_db.py +++ b/dak/update_db.py @@@ -45,7 -45,7 +45,7 @@@ from daklib.dak_exceptions import DBUpd ################################################################################ Cnf = None - required_database_schema = 24 -required_database_schema = 27 ++required_database_schema = 28 ################################################################################ diff --cc daklib/changes.py index 6b776268,ca9609ef..e016638c --- a/daklib/changes.py +++ b/daklib/changes.py @@@ -204,30 -204,75 +204,75 @@@ class Changes(object) else: multivalues[key] = self.changes[key] - # TODO: Use ORM - session.execute( - """INSERT INTO changes - (changesname, in_queue, seen, source, binaries, architecture, version, - distribution, urgency, maintainer, fingerprint, changedby, date) - VALUES (:changesfile,:in_queue,:filetime,:source,:binary, :architecture, - :version,:distribution,:urgency,:maintainer,:fingerprint,:changedby,:date)""", - { 'changesfile': self.changes_file, - 'filetime': filetime, - 'in_queue': in_queue, - 'source': self.changes["source"], - 'binary': multivalues["binary"], - 'architecture': multivalues["architecture"], - 'version': self.changes["version"], - 'distribution': multivalues["distribution"], - 'urgency': self.changes["urgency"], - 'maintainer': self.changes["maintainer"], - 'fingerprint': self.changes["fingerprint"], - 'changedby': self.changes["changed-by"], - 'date': self.changes["date"]} ) + chg = DBChange() + chg.changesname = self.changes_file + chg.seen = filetime + chg.in_queue_id = in_queue + chg.source = self.changes["source"] + chg.binaries = multivalues["binary"] + chg.architecture = multivalues["architecture"] + chg.version = self.changes["version"] + chg.distribution = multivalues["distribution"] + chg.urgency = self.changes["urgency"] + chg.maintainer = self.changes["maintainer"] + chg.fingerprint = self.changes["fingerprint"] + chg.changedby = self.changes["changed-by"] + chg.date = self.changes["date"] - ++ + session.add(chg) + + files = [] + for chg_fn, entry in self.files.items(): + try: + f = open(os.path.join(dirpath, chg_fn)) + cpf = ChangePendingFile() + cpf.filename = chg_fn + cpf.size = entry['size'] + cpf.md5sum = entry['md5sum'] + + if entry.has_key('sha1sum'): + cpf.sha1sum = entry['sha1sum'] + else: + f.seek(0) + cpf.sha1sum = apt_pkg.sha1sum(f) + + if entry.has_key('sha256sum'): + cpf.sha256sum = entry['sha256sum'] + else: + f.seek(0) + cpf.sha256sum = apt_pkg.sha256sum(f) + + session.add(cpf) + files.append(cpf) + f.close() + + except IOError: + # Can't find the file, try to look it up in the pool + poolname = poolify(entry["source"], entry["component"]) + l = get_location(cnf["Dir::Pool"], entry["component"], session=session) + + found, poolfile = check_poolfile(os.path.join(poolname, chg_fn), + entry['size'], + entry["md5sum"], + l.location_id, + session=session) + + if found is None: + Logger.log(["E: Found multiple files for pool (%s) for %s" % (chg_fn, entry["component"])]) + elif found is False and poolfile is not None: + Logger.log(["E: md5sum/size mismatch for %s in pool" % (chg_fn)]) + else: + if poolfile is None: + Logger.log(["E: Could not find %s in pool" % (chg_fn)]) + else: + chg.poolfiles.append(poolfile) + + chg.files = files session.commit() + chg = session.query(DBChange).filter_by(changesname = self.changes_file).one(); - + - return session.query(DBChange).filter_by(changesname = self.changes_file).one() + return chg def unknown_files_fields(self, name): return sorted(list( set(self.files[name].keys()) -