#!/usr/bin/python """Process the Invirt build queue. The Invirtibuilder handles package builds and uploads. On demand, it attempts to build a particular package. If the build succeeds, the new version of the package is uploaded to the apt repository, tagged in its git repository, and the Invirt superproject is updated to point at the new version. If the build fails, the Invirtibuilder sends mail with the build log. The build queue is tracked via files in /var/lib/invirt-dev/queue. In order to maintain ordering, all filenames in that directory are the timestamp of their creation time. Each queue file contains a file of the form pocket package hash principal where pocket is one of the pockets globally configured in build.pockets. For instance, the pockets in XVM are "prod" and "dev". principal is the Kerberos principal that requested the build. """ from __future__ import with_statement import contextlib import os import re import shutil import subprocess import pyinotify import invirt.builder as b from invirt import database DISTRIBUTION = 'hardy' def getControl(package, ref): """Get the parsed debian/control file for a given package. This returns a list of debian_bundle.deb822.Deb822 objects, one for each section of the debian/control file. Each Deb822 object acts roughly like a dict. """ return deb822.Deb822.iter_paragraphs( getGitFile(package, ref, 'debian/control').split('\n')) def getBinaries(package, ref): """Get a list of binary packages in a package at a given ref.""" return [p['Package'] for p in getControl(package, ref) if 'Package' in p] def getArches(package, ref): """Get the set of all architectures in any binary package.""" arches = set() for section in getControl(package, ref): if 'Architecture' in section: arches.update(section['Architecture'].split()) return arches def getDscName(package, ref): """Return the .dsc file that will be generated for this package.""" v = getVersion(package, ref) if v.debian_version: v_str = '%s-%s' % (v.upstream_version, v.debian_version) else: v_str = v.upstream_version return '%s_%s.dsc' % ( package, v_str) def sanitizeVersion(version): """Sanitize a Debian package version for use as a git tag. This function strips the epoch from the version number and replaces any tildes with periods.""" if v.debian_version: v = '%s-%s' % (version.upstream_version, version.debian_version) else: v = version.upstream_version return v.replace('~', '.') def aptCopy(packages, dst_pocket, src_pocket): """Copy a package from one pocket to another.""" binaries = getBinaries(package, commit) cpatureOutput(['reprepro-env', 'copy', b.pocketToApt(dst_pocket), b.pocketToApt(src_pocket), package] + binaries) def sbuild(package, ref, arch, workdir, arch_all=False): """Build a package for a particular architecture.""" args = ['sbuild', '-d', DISTRIBUTION, '--arch', arch] if arch_all: args.append('-A') args.append(getDscName(package, ref)) c.captureOutput(args, cwd=workdir, stdout=None) def sbuildAll(package, ref, workdir): """Build a package for all architectures it supports.""" arches = getArches(package, ref) if 'all' in arches or 'any' in arches or 'amd64' in arches: sbuild(package, ref, 'amd64', workdir, arch_all=True) if 'any' in arches or 'i386' in arches: sbuild(package, ref, 'i386', workdir) def tagSubmodule(pocket, package, ref, principal): """Tag a new version of a submodule. If this pocket does not allow_backtracking, then this will create a new tag of the version at ref. This function doesn't need to care about lock contention. git-receive-pack updates one ref at a time, and only takes out a lock for that ref after it's passed the update hook. Because we reject pushes to tags in the update hook, no push can ever take out a lock on any tags. I'm sure that long description gives you great confidence in the legitimacy of my reasoning. """ if not config.build.pockets[pocket].get('allow_backtracking', False): env = dict(os.environ) branch = b.pocketToGit(pocket) version = b.getVersion(package, ref) env['GIT_COMMITTER_NAME'] = config.build.tagger.name env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email tag_msg = ('Tag %s of %s\n\n' 'Requested by %s' % (version.full_version, package, principal)) c.captureOutput( ['git', 'tag', '-m', tag_msg, commit], stdout=None, env=env) def updateSubmoduleBranch(pocket, package, ref): """Update the appropriately named branch in the submodule.""" branch = b.pocketToGit(pocket) c.captureOutput( ['git', 'update-ref', 'refs/heads/%s' % branch, ref]) def uploadBuild(pocket, workdir): """Upload all build products in the work directory.""" apt = b.pocketToApt(pocket) for changes in glob.glob(os.path.join(workdir, '*.changes')): c.captureOutput(['reprepro-env', 'include', '--ignore=wrongdistribution', apt, changes]) def updateSuperproject(pocket, package, commit, principal): """Update the superproject. This will create a new commit on the branch for the given pocket that sets the commit for the package submodule to commit. Note that there's no locking issue here, because we disallow all pushes to the superproject. """ superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git') branch = b.pocketToGit(pocket) tree = c.captureOutput(['git', 'ls-tree', branch], cwd=superproject) new_tree = re.compile( r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub( r'\1%s\2' % commit, tree) new_tree_id = c.captureOutput(['git', 'mktree'], cwd=superproject, stdin_str=new_tree) commit_msg = ('Update %s to version %s\n\n' 'Requested by %s' % (package, version.full_version, principal)) new_commit = c.captureOutput( ['git', 'commit-tree', new_tree_hash, '-p', branch], cwd=superproject, env=env, stdin_str=commit_msg) c.captureOutput( ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit], cwd=superproject) @contextlib.contextmanager def packageWorkdir(package, commit): """Checkout the package in a temporary working directory. This context manager returns that working directory. The requested package is checked out into a subdirectory of the working directory with the same name as the package. When the context wrapped with this context manager is exited, the working directory is automatically deleted. """ workdir = tempfile.mkdtemp() try: p_archive = subprocess.Popen( ['git', 'archive', '--remote=file://%s' % b.getRepo(package), '--prefix=%s' % package, commit, ], stdout=subprocess.PIPE, ) p_tar = subprocess.Popen( ['tar', '-x'], stdin=p_archive.stdout, cwd=workdir, ) p_archive.wait() p_tar.wait() yield workdir finally: shutil.rmtree(workdir) def reportBuild(build): """Run hooks to report the results of a build attempt.""" c.captureOutput(['run-parts', '--arg=%s' % build.build_id, '--', b._HOOKS_DIR]) def build(): """Deal with items in the build queue. When triggered, iterate over build queue items one at a time, until there are no more pending build jobs. """ while True: stage = 'processing incoming job' queue = os.listdir(b._QUEUE_DIR) if not queue: break build = min(queue) job = open(os.path.join(b._QUEUE_DIR, build)).read().strip() pocket, package, commit, principal = job.split() database.session.begin() db = database.Build() db.package = package db.pocket = pocket db.commit = commit db.principal = principal database.session.save_or_update(db) database.commit() database.begin() try: db.failed_stage = 'validating job' src = validateBuild(pocket, package, commit) db.version = str(b.getVersion(package, commit)) # If validateBuild returns something other than True, then # it means we should copy from that pocket to our pocket. # # (If the validation failed, validateBuild would have # raised an exception) if src != True: db.failed_stage = 'copying package from another pocket' aptCopy(packages, pocket, src) # If we can't copy the package from somewhere, but # validateBuild didn't raise an exception, then we need to # do the build ourselves else: db.failed_stage = 'checking out package source' with packageWorkdir(package, commit) as workdir: db.failed_stage = 'preparing source package' packagedir = os.path.join(workdir, package) # We should be more clever about dealing with # things like non-Debian-native packages than we # are. # # If we were, we could use debuild and get nice # environment scrubbing. Since we're not, debuild # complains about not having an orig.tar.gz c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'], cwd=packagedir, stdout=None) try: db.failed_stage = 'building binary packages' sbuildAll(package, commit, workdir) finally: logdir = os.path.join(b._LOG_DIR, db.build_id) if not os.path.exists(logdir): os.makedirs(logdir) for log in glob.glob(os.path.join(workdir, '*.build')): os.copy2(log, logdir) db.failed_stage = 'tagging submodule' tagSubmodule(pocket, package, commit, principal) db.failed_stage = 'updating submodule branches' updateSubmoduleBranch(pocket, package, commit) db.failed_stage = 'updating superproject' updateSuperproject(pocket, package, commit, principal) db.failed_stage = 'uploading packages to apt repo' uploadBuild(pocket, workdir) db.failed_stage = 'cleaning up' except: db.traceback = traceback.format_exc() else: db.succeeded = True db.failed_stage = None finally: database.session.save_or_update(db) database.session.commit() # Finally, now that everything is done, remove the # build queue item os.unlink(os.path.join(b._QUEUE_DIR, build)) reportBuild(db) class Invirtibuilder(pyinotify.ProcessEvent): """Process inotify triggers to build new packages.""" def process_default(self, event): """Handle an inotify event. When an inotify event comes in, trigger the builder. """ build() def main(): """Initialize the inotifications and start the main loop.""" database.connect() watch_manager = pyinotify.WatchManager() invirtibuilder = Invirtibuilder() notifier = pyinotify.Notifier(watch_manager, invirtibuilder) watch_manager.add_watch(b._QUEUE_DIR, pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] | pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO']) # Before inotifying, run any pending builds; otherwise we won't # get notified for them. build() while True: notifier.process_events() if notifier.check_events(): notifier.read_events() if __name__ == '__main__': main()