diff --git a/dak/dakdb/update101.py b/dak/dakdb/update101.py
new file mode 100644
index 0000000..d62e216
--- /dev/null
+++ b/dak/dakdb/update101.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Add component to association tables, allowing a package to exist in more than one component
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2013 Varnish Software AS
+@author: Tollef Fog Heen <tfheen@varnish-software.com>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from daklib.config import Config
+
+################################################################################
+def do_update(self):
+    print __doc__
+    try:
+        cnf = Config()
+
+        c = self.db.cursor()
+
+        c.execute("""CREATE OR REPLACE FUNCTION trigger_binsrc_assoc_update() RETURNS TRIGGER AS $$
+DECLARE
+  v_data RECORD;
+
+  v_package audit.package_changes.package%TYPE;
+  v_version audit.package_changes.version%TYPE;
+  v_architecture audit.package_changes.architecture%TYPE;
+  v_suite audit.package_changes.suite%TYPE;
+  v_event audit.package_changes.event%TYPE;
+  v_priority audit.package_changes.priority%TYPE;
+  v_component audit.package_changes.component%TYPE;
+  v_section audit.package_changes.section%TYPE;
+BEGIN
+  CASE TG_OP
+    WHEN 'INSERT' THEN v_event := 'I'; v_data := NEW;
+    WHEN 'DELETE' THEN v_event := 'D'; v_data := OLD;
+    ELSE RAISE EXCEPTION 'trigger called for invalid operation (%)', TG_OP;
+  END CASE;
+
+  SELECT suite_name INTO STRICT v_suite FROM suite WHERE id = v_data.suite;
+  SELECT name INTO STRICT v_component FROM component WHERE id = v_data.component_id;
+
+  CASE TG_TABLE_NAME
+    WHEN 'bin_associations' THEN
+      SELECT package, version, arch_string
+        INTO STRICT v_package, v_version, v_architecture
+        FROM binaries LEFT JOIN architecture ON (architecture.id = binaries.architecture)
+        WHERE binaries.id = v_data.bin;
+
+      SELECT priority.priority, section.section
+        INTO v_priority, v_section
+        FROM override
+             JOIN override_type ON (override.type = override_type.id)
+             JOIN priority ON (priority.id = override.priority)
+             JOIN section ON (section.id = override.section)
+             JOIN suite ON (suite.id = override.suite)
+        WHERE override_type.type != 'dsc'
+              AND override.package = v_package AND suite.id = v_data.suite
+              AND override.component = v_data.component_id;
+
+    WHEN 'src_associations' THEN
+      SELECT source, version
+        INTO STRICT v_package, v_version
+        FROM source WHERE source.id = v_data.source;
+      v_architecture := 'source';
+
+      SELECT priority.priority, section.section
+        INTO v_priority, v_section
+        FROM override
+             JOIN override_type ON (override.type = override_type.id)
+             JOIN priority ON (priority.id = override.priority)
+             JOIN section ON (section.id = override.section)
+             JOIN suite ON (suite.id = override.suite)
+        WHERE override_type.type = 'dsc'
+              AND override.package = v_package AND suite.id = v_data.suite
+              AND override.component = v_data.component_id;
+
+    ELSE RAISE EXCEPTION 'trigger called for invalid table (%)', TG_TABLE_NAME;
+  END CASE;
+
+  INSERT INTO audit.package_changes
+    (package, version, architecture, suite, event, priority, component, section)
+    VALUES (v_package, v_version, v_architecture, v_suite, v_event, v_priority, v_component, v_section);
+
+  RETURN NEW;
+END;
+$$ LANGUAGE plpgsql VOLATILE SECURITY DEFINER
+SET search_path = public, pg_temp""")
+
+
+        # Source association table
+        c.execute("""ALTER TABLE src_associations DISABLE TRIGGER trigger_src_associations_audit""")
+        c.execute("""ALTER TABLE src_associations DISABLE TRIGGER modified_src_associations""")
+        c.execute("""DROP INDEX src_associations_suite_key""")
+        c.execute("""ALTER TABLE src_associations ADD COLUMN component_id int REFERENCES component(id)""")
+        c.execute("""CREATE UNIQUE INDEX src_associations_suite_key ON src_associations(suite, source, component_id)""")
+
+        # Put existing sources into the right components
+        c.execute("""
+          INSERT INTO src_associations(suite, source, component_id, created, modified)
+            SELECT DISTINCT sa.suite, sa.source, fam.component_id, sa.created, sa.modified FROM src_associations sa 
+              JOIN source ON (sa.source = source.id)
+              JOIN files_archive_map fam ON (source.file = fam.file_id
+                AND fam.archive_id = (SELECT archive_id FROM suite WHERE id = sa.suite))""")
+
+        c.execute("""DELETE FROM src_associations WHERE component_id IS NULL""")
+        c.execute("""ALTER TABLE src_associations ALTER COLUMN component_id SET NOT NULL""")
+        c.execute("""ALTER TABLE src_associations ENABLE TRIGGER modified_src_associations""")
+        c.execute("""ALTER TABLE src_associations ENABLE TRIGGER trigger_src_associations_audit""")
+
+        # Binary association table
+        c.execute("""ALTER TABLE bin_associations DISABLE TRIGGER trigger_bin_associations_audit""")
+        c.execute("""ALTER TABLE bin_associations DISABLE TRIGGER modified_bin_associations""")
+        c.execute("""DROP INDEX bin_associations_suite_key""")
+        c.execute("""ALTER TABLE bin_associations ADD COLUMN component_id int REFERENCES component(id)""")
+        c.execute("""CREATE UNIQUE INDEX bin_associations_suite_key ON bin_associations(suite, bin, component_id)""")
+
+        # Put existing binaries into the right components
+        c.execute("""
+          INSERT INTO bin_associations(suite, bin, component_id, created, modified)
+            SELECT DISTINCT ba.suite, ba.bin, fam.component_id, ba.created, ba.modified FROM bin_associations ba
+              JOIN binaries ON (ba.bin = binaries.id)
+              JOIN files_archive_map fam ON (binaries.file = fam.file_id
+                AND fam.archive_id = (SELECT archive_id FROM suite WHERE id = ba.suite))""")
+
+        c.execute("""DELETE FROM bin_associations WHERE component_id IS NULL""")
+        c.execute("""ALTER TABLE bin_associations ALTER COLUMN component_id SET NOT NULL""")
+        c.execute("""ALTER TABLE bin_associations ENABLE TRIGGER modified_bin_associations""")
+        c.execute("""ALTER TABLE bin_associations ENABLE TRIGGER trigger_bin_associations_audit""")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError as msg:
+        self.db.rollback()
+        raise DBUpdateError('Unable to apply sick update 101, rollback issued. Error message: {0}'.format(msg))
diff --git a/dak/process_policy.py b/dak/process_policy.py
index e88a502..58484a7 100755
--- a/dak/process_policy.py
+++ b/dak/process_policy.py
@@ -128,15 +128,16 @@ def comment_accept(upload, srcqueue, comments, transaction):
         overridesuite = session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()
 
     def binary_component_func(db_binary):
-        override = session.query(Override).filter_by(suite=overridesuite, package=db_binary.package) \
+        override = session.query(Override).filter_by(suite=overridesuite, package=db_binary.package,
+                                                     component=db_binary.associations[0].component) \
             .join(OverrideType).filter(OverrideType.overridetype == db_binary.binarytype) \
             .join(Component).one()
         return override.component
 
     def source_component_func(db_source):
-        override = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
-            .join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
-            .join(Component).one()
+        override = session.query(Override).filter_by(suite=overridesuite, package=db_source.source,
+                                                     component=db_source.associations[0].component) \
+            .join(OverrideType).filter(OverrideType.overridetype == 'dsc').one
         return override.component
 
     all_target_suites = [upload.target_suite]
diff --git a/daklib/archive.py b/daklib/archive.py
index 736c6bf..d6adaf8 100644
--- a/daklib/archive.py
+++ b/daklib/archive.py
@@ -152,11 +152,12 @@ class ArchiveTransaction(object):
         architecture = get_architecture(control['Architecture'], session)
 
         (source_name, source_version) = binary.source
-        source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
-        source = source_query.filter(DBSource.suites.contains(suite)).first()
+        source_query = session.query(DBSource).filter_by(source=source_name, version=source_version) \
+                                              .join(SrcAssociation)
+        source = source_query.filter(SrcAssociation.suite==suite).first()
         if source is None:
             if source_suites != True:
-                source_query = source_query.join(DBSource.suites) \
+                source_query = source_query.join(Suites) \
                     .filter(Suite.suite_id == source_suites.c.id)
             source = source_query.first()
             if source is None:
@@ -193,8 +194,8 @@ class ArchiveTransaction(object):
 
             self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
 
-        if suite not in db_binary.suites:
-            db_binary.suites.append(suite)
+        if suite not in [x.suite for x in db_binary.associations]:
+            db_binary.associations.append(BinAssociation(suite=suite, component=component))
 
         session.flush()
 
@@ -309,7 +310,10 @@ class ArchiveTransaction(object):
 
         created = False
         try:
-            db_source = session.query(DBSource).filter_by(**unique).one()
+            db_source = session.query(DBSource).filter_by(**unique) \
+                        .join(SrcAssociation) \
+                        .filter_by(component=component) \
+                        .one()
             for key, value in rest.iteritems():
                 if getattr(db_source, key) != value:
                     raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
@@ -330,10 +334,10 @@ class ArchiveTransaction(object):
             session.add(db_dsc_file)
             session.flush()
 
-        if suite in db_source.suites:
+        if suite in [x.suite for x in db_source.associations]:
             return db_source
 
-        db_source.suites.append(suite)
+        db_source.associations.append(SrcAssociation(suite=suite, component=component))
 
         if not created:
             for f in db_source.srcfiles:
@@ -438,8 +442,8 @@ class ArchiveTransaction(object):
         # copy binary
         db_file = db_binary.poolfile
         self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
-        if suite not in db_binary.suites:
-            db_binary.suites.append(suite)
+        if suite not in [x.suite for x in db_binary.associations]:
+            db_binary.associations.append(BinAssociation(suite=suite, component=component))
         self.session.flush()
 
     def copy_source(self, db_source, suite, component, allow_tainted=False):
@@ -462,8 +466,8 @@ class ArchiveTransaction(object):
             allow_tainted = True
         for db_dsc_file in db_source.srcfiles:
             self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
-        if suite not in db_source.suites:
-            db_source.suites.append(suite)
+        if suite not in [x.suite for x in db_source.associations]:
+            db_source.associations.append(SrcAssociation(suite=suite, component=component))
         self.session.flush()
 
     def remove_file(self, db_file, archive, component):
@@ -491,7 +495,7 @@ class ArchiveTransaction(object):
         @type  suite: L{daklib.dbconn.Suite}
         @param suite: suite to remove the package from
         """
-        binary.suites.remove(suite)
+        self.session.query(BinAssociation).filter_by(binary=binary, suite=suite).delete()
         self.session.flush()
 
     def remove_source(self, source, suite):
@@ -509,11 +513,12 @@ class ArchiveTransaction(object):
         session = self.session
 
         query = session.query(DBBinary).filter_by(source=source) \
-            .filter(DBBinary.suites.contains(suite))
+            .join(BinAssociation) \
+            .filter(BinAssociation.suite ==suite)
         if query.first() is not None:
             raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
 
-        source.suites.remove(suite)
+        session.query(SrcAssociation).filter_by(source=source, suite=suite).delete()
         session.flush()
 
     def commit(self):
@@ -840,9 +845,9 @@ class ArchiveUpload(object):
         """
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
+        component = self.session.query(Component).filter_by(component_name=source.package_list.values()[0]['component']).one()
 
-        # XXX: component for source?
-        query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
+        query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source'], component=component) \
                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
 
         try:
diff --git a/daklib/checks.py b/daklib/checks.py
index f0d19ad..05c8be7 100644
--- a/daklib/checks.py
+++ b/daklib/checks.py
@@ -705,9 +705,12 @@ class SuiteArchitectureCheck(Check):
 
 class VersionCheck(Check):
     """Check version constraints"""
-    def _highest_source_version(self, session, source_name, suite):
+    def _highest_source_version(self, session, source_name, suite, component):
         db_source = session.query(DBSource).filter_by(source=source_name) \
-            .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
+            .join(DBSource.associations) \
+            .filter(SrcAssociation.suite == suite) \
+            .filter(SrcAssociation.component == component) \
+            .order_by(DBSource.version.desc()).first()
         if db_source is None:
             return None
         else:
@@ -715,7 +718,8 @@ class VersionCheck(Check):
 
     def _highest_binary_version(self, session, binary_name, suite, architecture):
         db_binary = session.query(DBBinary).filter_by(package=binary_name) \
-            .filter(DBBinary.suites.contains(suite)) \
+            .join(DBBinary.associations) \
+            .filter(BinAssociation.suite == suite) \
             .join(DBBinary.architecture) \
             .filter(Architecture.arch_string.in_(['all', architecture])) \
             .order_by(DBBinary.version.desc()).first()
@@ -730,7 +734,8 @@ class VersionCheck(Check):
         if upload.changes.source is not None:
             source_name = upload.changes.source.dsc['Source']
             source_version = upload.changes.source.dsc['Version']
-            v = self._highest_source_version(session, source_name, other_suite)
+            component = session.query(Component).filter(Component.component_name == upload.changes.source.component).one()
+            v = self._highest_source_version(session, source_name, other_suite, component)
             if v is not None and not op(version_compare(source_version, v)):
                 raise Reject("Version check failed:\n"
                              "Your upload included the source package {0}, version {1},\n"
diff --git a/daklib/dbconn.py b/daklib/dbconn.py
index a900a0e..eaad4e3 100644
--- a/daklib/dbconn.py
+++ b/daklib/dbconn.py
@@ -1875,6 +1875,32 @@ __all__.append('SrcContents')
 
 ################################################################################
 
+class SrcAssociation(ORMObject):
+    def __init__(self, source = None, suite = None, component = None):
+        self.source = source
+        self.suite = suite
+        self.component = component
+
+    def properties(self):
+        return ['source', 'suite', 'component']
+
+__all__.append('SrcAssociation')
+
+################################################################################
+
+class BinAssociation(ORMObject):
+    def __init__(self, binary = None, suite = None, component = None):
+        self.binary = binary
+        self.suite = suite
+        self.component = component
+
+    def properties(self):
+        return ['binary', 'suite', 'component']
+
+__all__.append('BinAssociation')
+
+################################################################################
+
 from debian.debfile import Deb822
 
 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
@@ -2657,14 +2683,23 @@ class DBConn(object):
                                  fingerprint_id = self.tbl_binaries.c.sig_fpr,
                                  fingerprint = relation(Fingerprint),
                                  install_date = self.tbl_binaries.c.install_date,
-                                 suites = relation(Suite, secondary=self.tbl_bin_associations,
-                                     backref=backref('binaries', lazy='dynamic')),
+                                 associations = relation(BinAssociation,
+                                    backref=backref('binaries', lazy='dynamic')),
                                  extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
                                      backref=backref('extra_binary_references', lazy='dynamic')),
                                  key = relation(BinaryMetadata, cascade='all',
                                      collection_class=attribute_mapped_collection('key'))),
                 extension = validator)
 
+        mapper(BinAssociation, self.tbl_bin_associations,
+               properties = dict(bin_associations_id = self.tbl_bin_associations.c.id,
+                                 binary_id = self.tbl_bin_associations.c.bin,
+                                 binary = relation(DBBinary),
+                                 suite_id = self.tbl_bin_associations.c.suite,
+                                 suite = relation(Suite,
+                                    backref=backref('binary_associations', lazy='dynamic')),
+                                 component = relation(Component)))
+
         mapper(Component, self.tbl_component,
                properties = dict(component_id = self.tbl_component.c.id,
                                  component_name = self.tbl_component.c.name),
@@ -2792,7 +2827,7 @@ class DBConn(object):
                                  changedby_id = self.tbl_source.c.changedby,
                                  srcfiles = relation(DSCFile,
                                                      primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
-                                 suites = relation(Suite, secondary=self.tbl_src_associations,
+                                 associations = relation(SrcAssociation,
                                      backref=backref('sources', lazy='dynamic')),
                                  uploaders = relation(Maintainer,
                                      secondary=self.tbl_src_uploaders),
@@ -2800,6 +2835,14 @@ class DBConn(object):
                                      collection_class=attribute_mapped_collection('key'))),
                extension = validator)
 
+        mapper(SrcAssociation, self.tbl_src_associations,
+               properties = dict(src_associations_id = self.tbl_src_associations.c.id,
+                                 source_id = self.tbl_src_associations.c.source,
+                                 source = relation(DBSource),
+                                 suite_id = self.tbl_src_associations.c.suite,
+                                 suite = relation(Suite),
+                                 component = relation(Component)))
+
         mapper(SrcFormat, self.tbl_src_format,
                properties = dict(src_format_id = self.tbl_src_format.c.id,
                                  format_name = self.tbl_src_format.c.format_name))
diff --git a/daklib/upload.py b/daklib/upload.py
index af6bc85..75e70da 100644
--- a/daklib/upload.py
+++ b/daklib/upload.py
@@ -63,7 +63,7 @@ class InvalidFilenameException(Exception):
 class HashedFile(object):
     """file with checksums
     """
-    def __init__(self, filename, size, md5sum, sha1sum, sha256sum, section=None, priority=None):
+    def __init__(self, filename, size, md5sum, sha1sum, sha256sum, section=None, priority=None, component=None):
         self.filename = filename
         """name of the file
         @type: str
@@ -94,13 +94,18 @@ class HashedFile(object):
         @type: str or C{None}
         """
 
+        self.component = component
+        """component or C{None}
+        @type: str or C{None}
+        """
+
         self.priority = priority
         """priority or C{None}
         @type: str of C{None}
         """
 
     @classmethod
-    def from_file(cls, directory, filename, section=None, priority=None):
+    def from_file(cls, directory, filename, section=None, priority=None, component=None):
         """create with values for an existing file
 
         Create a C{HashedFile} object that refers to an already existing file.
@@ -114,6 +119,9 @@ class HashedFile(object):
         @type  section: str or C{None}
         @param section: optional section as given in .changes files
 
+        @type  component: str or C{None}
+        @param component: optional component as given in .changes files
+
         @type  priority: str or C{None}
         @param priority: optional priority as given in .changes files
 
@@ -124,7 +132,7 @@ class HashedFile(object):
         size = os.stat(path).st_size
         with open(path, 'r') as fh:
             hashes = apt_pkg.Hashes(fh)
-        return cls(filename, size, hashes.md5, hashes.sha1, hashes.sha256, section, priority)
+        return cls(filename, size, hashes.md5, hashes.sha1, hashes.sha256, section, priority, component)
 
     def check(self, directory):
         """Validate hashes
@@ -157,6 +165,12 @@ class HashedFile(object):
         if sha256sum != self.sha256sum:
             raise InvalidHashException(self.filename, 'sha256sum', self.sha256sum, sha256sum)
 
+def parse_component_from_section(section):
+    fields = section.split('/')
+    if len(fields) > 1:
+        return fields[0]
+    return "main"
+
 def parse_file_list(control, has_priority_and_section):
     """Parse Files and Checksums-* fields
 
@@ -180,7 +194,8 @@ def parse_file_list(control, has_priority_and_section):
 
         if has_priority_and_section:
             (md5sum, size, section, priority, filename) = line.split()
-            entry = dict(md5sum=md5sum, size=long(size), section=section, priority=priority, filename=filename)
+            component = parse_component_from_section(section)
+            entry = dict(md5sum=md5sum, size=long(size), section=section, component=component, priority=priority, filename=filename)
         else:
             (md5sum, size, filename) = line.split()
             entry = dict(md5sum=md5sum, size=long(size), filename=filename)
@@ -301,8 +316,10 @@ class Changes(object):
             for f in self.files.itervalues():
                 if re_file_dsc.match(f.filename) or re_file_source.match(f.filename):
                     source_files.append(f)
+                    # XXX: ensure component doesn't change
+                    component = f.component
             if len(source_files) > 0:
-                self._source = Source(self.directory, source_files, self._keyrings, self._require_signature)
+                self._source = Source(self.directory, source_files, self._keyrings, self._require_signature, component=component)
         return self._source
 
     @property
@@ -474,7 +491,7 @@ class Binary(object):
 class Source(object):
     """Representation of a source package
     """
-    def __init__(self, directory, hashed_files, keyrings, require_signature=True):
+    def __init__(self, directory, hashed_files, keyrings, require_signature=True, component=None):
         self.hashed_files = hashed_files
         """list of source files (including the .dsc itself)
         @type: list of L{HashedFile}
@@ -495,6 +512,7 @@ class Source(object):
         data = open(dsc_file_path, 'r').read()
         self._signed_file = SignedFile(data, keyrings, require_signature)
         self.dsc = apt_pkg.TagSection(self._signed_file.contents)
+        self._component = component
         """dict to access fields in the .dsc file
         @type: dict-like
         """
@@ -502,9 +520,9 @@ class Source(object):
         self._files = None
 
     @classmethod
-    def from_file(cls, directory, filename, keyrings, require_signature=True):
+    def from_file(cls, directory, filename, keyrings, require_signature=True, component=None):
         hashed_file = HashedFile.from_file(directory, filename)
-        return cls(directory, [hashed_file], keyrings, require_signature)
+        return cls(directory, [hashed_file], keyrings, require_signature, component)
 
     @property
     def files(self):
@@ -534,18 +552,15 @@ class Source(object):
 
     @property
     def component(self):
-        """guessed component name
-
-        Might be wrong. Don't rely on this.
+        """Component name
 
         @type: str
         """
+        if self._component:
+            return self._component
         if 'Section' not in self.dsc:
             return 'main'
-        fields = self.dsc['Section'].split('/')
-        if len(fields) > 1:
-            return fields[0]
-        return "main"
+        return parse_component_from_section(self.dsc['Section'])
 
     @property
     def filename(self):
