#!/usr/bin/python

"""Process the Invirt build queue.

The Invirtibuilder handles package builds and uploads. On demand, it
attempts to build a particular package.

If the build succeeds, the new version of the package is uploaded to
the apt repository, tagged in its git repository, and the Invirt
superproject is updated to point at the new version.

If the build fails, the Invirtibuilder sends mail with the build log.

The build queue is tracked via files in /var/lib/invirt-dev/queue. In
order to maintain ordering, all filenames in that directory are the
timestamp of their creation time.

Each queue file contains a file of the form

    pocket package hash principal

where pocket is one of the pockets globally configured in
build.pockets. For instance, the pockets in XVM are "prod" and "dev".

principal is the Kerberos principal that requested the build.
"""


from __future__ import with_statement

import contextlib
import glob
import os
import re
import shutil
import subprocess
import tempfile
import traceback

import pyinotify

from debian_bundle import deb822

import invirt.builder as b
import invirt.common as c
from invirt import database
from invirt.config import structs as config


DISTRIBUTION = 'hardy'
logfile = None

def logAndRun(cmd, *args, **kwargs):
    # Always grab stdout, even if the caller doesn't need it.
    # TODO: don't slurp it all into memory in that case.
    if 'stdout' in kwargs and kwargs['stdout'] is None:
        del kwargs['stdout']
    kwargs['stderr'] = logfile
    logfile.write('---> Ran %s\n' % (cmd, ))
    logfile.write('STDERR:\n')
    output = c.captureOutput(cmd, *args, **kwargs)
    logfile.write('STDOUT:\n')
    logfile.write(output)
    return output

def getControl(package, ref):
    """Get the parsed debian/control file for a given package.

    This returns a list of debian_bundle.deb822.Deb822 objects, one
    for each section of the debian/control file. Each Deb822 object
    acts roughly like a dict.
    """
    return deb822.Deb822.iter_paragraphs(
        b.getGitFile(package, ref, 'debian/control').split('\n'))


def getBinaries(package, ref):
    """Get a list of binary packages in a package at a given ref."""
    return [p['Package'] for p in getControl(package, ref)
            if 'Package' in p]


def getArches(package, ref):
    """Get the set of all architectures in any binary package."""
    arches = set()
    for section in getControl(package, ref):
        if 'Architecture' in section:
            arches.update(section['Architecture'].split())
    return arches


def getDscName(package, ref):
    """Return the .dsc file that will be generated for this package."""
    v = b.getVersion(package, ref)
    if v.debian_version:
        v_str = '%s-%s' % (v.upstream_version,
                           v.debian_version)
    else:
        v_str = v.upstream_version
    return '%s_%s.dsc' % (
        package,
        v_str)


def sanitizeVersion(version):
    """Sanitize a Debian package version for use as a git tag.

    This function strips the epoch from the version number and
    replaces any tildes with underscores."""
    if version.debian_version:
        v = '%s-%s' % (version.upstream_version,
                       version.debian_version)
    else:
        v = version.upstream_version
    return v.replace('~', '_')


def aptCopy(package, commit, dst_pocket, src_pocket):
    """Copy a package from one pocket to another."""
    binaries = getBinaries(package, commit)
    logAndRun(['reprepro-env', 'copy',
               b.pocketToApt(dst_pocket),
               b.pocketToApt(src_pocket),
               package] + binaries)


def sbuild(package, ref, arch, workdir, arch_all=False):
    """Build a package for a particular architecture."""
    args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch]
    if arch_all:
        args.append('-A')
    args.append(getDscName(package, ref))
    logAndRun(args, cwd=workdir)


def sbuildAll(package, ref, workdir):
    """Build a package for all architectures it supports."""
    arches = getArches(package, ref)
    if 'all' in arches or 'any' in arches or 'amd64' in arches:
        sbuild(package, ref, 'amd64', workdir, arch_all=True)
    if 'any' in arches or 'i386' in arches:
        sbuild(package, ref, 'i386', workdir)


def tagSubmodule(pocket, package, commit, principal, version, env):
    """Tag a new version of a submodule.

    If this pocket does not allow_backtracking, then this will create
    a new tag of the version at ref.

    This function doesn't need to care about lock
    contention. git-receive-pack updates one ref at a time, and only
    takes out a lock for that ref after it's passed the update
    hook. Because we reject pushes to tags in the update hook, no push
    can ever take out a lock on any tags.

    I'm sure that long description gives you great confidence in the
    legitimacy of my reasoning.
    """
    if not config.build.pockets[pocket].get('allow_backtracking', False):
        branch = b.pocketToGit(pocket)
        tag_msg = ('Tag %s of %s\n\n'
                   'Requested by %s' % (version.full_version,
                                        package,
                                        principal))

        logAndRun(
            ['git', 'tag', '-m', tag_msg, '--', sanitizeVersion(version),
             commit],
            env=env,
            cwd=b.getRepo(package))


def updateSubmoduleBranch(pocket, package, commit):
    """Update the appropriately named branch in the submodule."""
    branch = b.pocketToGit(pocket)
    logAndRun(
        ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))


def uploadBuild(pocket, workdir):
    """Upload all build products in the work directory."""
    force = config.build.pockets[pocket].get('allow_backtracking', False)
    apt = b.pocketToApt(pocket)
    for changes in glob.glob(os.path.join(workdir, '*.changes')):
        upload = ['reprepro-env', '--ignore=wrongdistribution',
                  'include', apt, changes]
        try:
            logAndRun(upload)
        except subprocess.CalledProcessError, e:
            if not force:
                raise
            package = deb822.Changes(open(changes).read())['Binary']
            logAndRun(['reprepro-env', 'remove', apt, package])
            logAndRun(upload)


def updateSuperproject(pocket, package, commit, principal, version, env):
    """Update the superproject.

    This will create a new commit on the branch for the given pocket
    that sets the commit for the package submodule to commit.

    Note that there's no locking issue here, because we disallow all
    pushes to the superproject.
    """
    superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
    branch = b.pocketToGit(pocket)
    tree = logAndRun(['git', 'ls-tree', branch],
                     cwd=superproject).strip()

    new_tree = re.compile(
        r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
        r'\g<1>%s\g<2>' % commit,
        tree)

    new_tree_id = logAndRun(['git', 'mktree', '--missing'],
                            cwd=superproject,
                            stdin_str=new_tree).strip()

    commit_msg = ('Update %s to version %s\n\n'
                  'Requested by %s' % (package,
                                       version.full_version,
                                       principal))
    new_commit = logAndRun(
        ['git', 'commit-tree', new_tree_id, '-p', branch],
        cwd=superproject,
        env=env,
        stdin_str=commit_msg).strip()

    logAndRun(
        ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
        cwd=superproject)


def makeReadable(workdir):
    os.chmod(workdir, 0755)

@contextlib.contextmanager
def packageWorkdir(package, commit):
    """Checkout the package in a temporary working directory.

    This context manager returns that working directory. The requested
    package is checked out into a subdirectory of the working
    directory with the same name as the package.

    When the context wrapped with this context manager is exited, the
    working directory is automatically deleted.
    """
    workdir = tempfile.mkdtemp()
    try:
        p_archive = subprocess.Popen(
            ['git', 'archive',
             '--remote=file://%s' % b.getRepo(package),
             '--prefix=%s/' % package,
             commit,
             ],
            stdout=subprocess.PIPE,
            )
        p_tar = subprocess.Popen(
            ['tar', '-x'],
            stdin=p_archive.stdout,
            cwd=workdir,
            )
        p_archive.wait()
        p_tar.wait()

        yield workdir
    finally:
        shutil.rmtree(workdir)

def build():
    """Deal with items in the build queue.

    When triggered, iterate over build queue items one at a time,
    until there are no more pending build jobs.
    """
    global logfile

    while True:
        stage = 'processing incoming job'
        queue = os.listdir(b._QUEUE_DIR)
        if not queue:
            break

        build = min(queue)
        job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
        pocket, package, commit, principal = job.split()

        database.session.begin()
        db = database.Build()
        db.package = package
        db.pocket = pocket
        db.commit = commit
        db.principal = principal
        database.session.save_or_update(db)
        database.session.commit()

        database.session.begin()

        logdir = os.path.join(b._LOG_DIR, str(db.build_id))
        if not os.path.exists(logdir):
            os.makedirs(logdir)

        try:
            db.failed_stage = 'validating job'
            # Don't expand the commit in the DB until we're sure the user
            # isn't trying to be tricky.
            b.ensureValidPackage(package)

            logfile = open(os.path.join(logdir, '%s.log' % db.package), 'w')

            db.commit = commit = b.canonicalize_commit(package, commit)
            src = b.validateBuild(pocket, package, commit)
            version = b.getVersion(package, commit)
            db.version = str(version)
            b.runHook('pre-build', [str(db.build_id), db.pocket, db.package,
                                    db.commit, db.principal, db.version, str(db.inserted_at)])

            env = dict(os.environ)
            env['GIT_COMMITTER_NAME'] = config.build.tagger.name
            env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email

            # If validateBuild returns something other than True, then
            # it means we should copy from that pocket to our pocket.
            #
            # (If the validation failed, validateBuild would have
            # raised an exception)
            if src != True:
                # TODO: cut out this code duplication
                db.failed_stage = 'tagging submodule before copying package'
                tagSubmodule(pocket, package, commit, principal, version, env)
                db.failed_stage = 'updating submodule branches before copying package'
                updateSubmoduleBranch(pocket, package, commit)
                db.failed_stage = 'updating superproject before copying package'
                updateSuperproject(pocket, package, commit, principal, version, env)
                db.failed_stage = 'copying package from another pocket'
                aptCopy(package, commit, pocket, src)
                
            # If we can't copy the package from somewhere, but
            # validateBuild didn't raise an exception, then we need to
            # do the build ourselves
            else:
                db.failed_stage = 'checking out package source'
                with packageWorkdir(package, commit) as workdir:
                    db.failed_stage = 'preparing source package'
                    packagedir = os.path.join(workdir, package)

                    # We should be more clever about dealing with
                    # things like non-Debian-native packages than we
                    # are.
                    #
                    # If we were, we could use debuild and get nice
                    # environment scrubbing. Since we're not, debuild
                    # complains about not having an orig.tar.gz
                    logAndRun(['dpkg-buildpackage', '-us', '-uc', '-S'],
                              cwd=packagedir)

                    db.failed_stage = 'building binary packages'
                    sbuildAll(package, commit, workdir)
                    db.failed_stage = 'tagging submodule'
                    tagSubmodule(pocket, package, commit, principal, version, env)
                    db.failed_stage = 'updating submodule branches'
                    updateSubmoduleBranch(pocket, package, commit)
                    db.failed_stage = 'updating superproject'
                    updateSuperproject(pocket, package, commit, principal, version, env)
                    db.failed_stage = 'relaxing permissions on workdir'
                    makeReadable(workdir)
                    db.failed_stage = 'uploading packages to apt repo'
                    uploadBuild(pocket, workdir)

                    db.failed_stage = 'cleaning up'
        except:
            db.traceback = traceback.format_exc()
        else:
            db.succeeded = True
            db.failed_stage = None
        finally:
            if logfile is not None:
                logfile.close()

            database.session.save_or_update(db)
            database.session.commit()

            # Finally, now that everything is done, remove the
            # build queue item
            os.unlink(os.path.join(b._QUEUE_DIR, build))

            if db.succeeded:
                b.runHook('post-build', [str(db.build_id)])
            else:
                b.runHook('failed-build', [str(db.build_id)])

class Invirtibuilder(pyinotify.ProcessEvent):
    """Process inotify triggers to build new packages."""
    def process_default(self, event):
        """Handle an inotify event.

        When an inotify event comes in, trigger the builder.
        """
        build()


def main():
    """Initialize the inotifications and start the main loop."""
    database.connect()

    watch_manager = pyinotify.WatchManager()
    invirtibuilder = Invirtibuilder()
    notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
    watch_manager.add_watch(b._QUEUE_DIR,
                            pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
                            pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])

    # Before inotifying, run any pending builds; otherwise we won't
    # get notified for them.
    build()

    while True:
        notifier.process_events()
        if notifier.check_events():
            notifier.read_events()


if __name__ == '__main__':
    main()