If the build succeeds, the new version of the package is uploaded to
the apt repository, tagged in its git repository, and the Invirt
-superrepo is updated to point at the new version.
+superproject is updated to point at the new version.
If the build fails, the Invirtibuilder sends mail with the build log.
pocket package hash principal
where pocket is one of the pockets globally configured in
-git.pockets. For instance, the pockets in XVM are "prod" and "dev".
+build.pockets. For instance, the pockets in XVM are "prod" and "dev".
principal is the Kerberos principal that requested the build.
"""
from __future__ import with_statement
import contextlib
+import glob
import os
import re
import shutil
import subprocess
+import tempfile
+import traceback
import pyinotify
+from debian_bundle import deb822
+
import invirt.builder as b
+import invirt.common as c
from invirt import database
+from invirt.config import structs as config
DISTRIBUTION = 'hardy'
-
+logfile = None
+
+def logAndRun(cmd, *args, **kwargs):
+ # Always grab stdout, even if the caller doesn't need it.
+ # TODO: don't slurp it all into memory in that case.
+ if 'stdout' in kwargs and kwargs['stdout'] is None:
+ del kwargs['stdout']
+ kwargs['stderr'] = logfile
+ logfile.write('---> Ran %s\n' % (cmd, ))
+ logfile.write('STDERR:\n')
+ output = c.captureOutput(cmd, *args, **kwargs)
+ logfile.write('STDOUT:\n')
+ logfile.write(output)
+ return output
def getControl(package, ref):
"""Get the parsed debian/control file for a given package.
acts roughly like a dict.
"""
return deb822.Deb822.iter_paragraphs(
- getGitFile(package, ref, 'debian/control').split('\n'))
+ b.getGitFile(package, ref, 'debian/control').split('\n'))
def getBinaries(package, ref):
def getDscName(package, ref):
"""Return the .dsc file that will be generated for this package."""
- v = getVersion(package, ref)
+ v = b.getVersion(package, ref)
if v.debian_version:
v_str = '%s-%s' % (v.upstream_version,
v.debian_version)
"""Sanitize a Debian package version for use as a git tag.
This function strips the epoch from the version number and
- replaces any tildes with periods."""
- if v.debian_version:
+ replaces any tildes with underscores."""
+ if version.debian_version:
v = '%s-%s' % (version.upstream_version,
version.debian_version)
else:
v = version.upstream_version
- return v.replace('~', '.')
+ return v.replace('~', '_')
-def aptCopy(packages, dst_pocket, src_pocket):
+def aptCopy(package, commit, dst_pocket, src_pocket):
"""Copy a package from one pocket to another."""
binaries = getBinaries(package, commit)
- cpatureOutput(['reprepro-env', 'copy',
- b.pocketToApt(dst_pocket),
- b.pocketToApt(src_pocket),
- package] + binaries)
+ logAndRun(['reprepro-env', 'copy',
+ b.pocketToApt(dst_pocket),
+ b.pocketToApt(src_pocket),
+ package] + binaries)
def sbuild(package, ref, arch, workdir, arch_all=False):
"""Build a package for a particular architecture."""
- args = ['sbuild', '-d', DISTRIBUTION, '--arch', arch]
+ args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch]
if arch_all:
args.append('-A')
args.append(getDscName(package, ref))
- c.captureOutput(args, cwd=workdir, stdout=None)
+ logAndRun(args, cwd=workdir)
def sbuildAll(package, ref, workdir):
sbuild(package, ref, 'i386', workdir)
-def tagSubmodule(pocket, package, ref, principal):
+def tagSubmodule(pocket, package, commit, principal, version, env):
"""Tag a new version of a submodule.
If this pocket does not allow_backtracking, then this will create
hook. Because we reject pushes to tags in the update hook, no push
can ever take out a lock on any tags.
- I'm sure that long description gives you great confidence in teh
+ I'm sure that long description gives you great confidence in the
legitimacy of my reasoning.
"""
- if config.git.pockets[pocket].get('allow_backtracking', False):
- env = dict(os.environ)
+ if not config.build.pockets[pocket].get('allow_backtracking', False):
branch = b.pocketToGit(pocket)
- version = b.getVersion(package, ref)
-
- env['GIT_COMMITTER_NAME'] = config.git.tagger.name
- env['GIT_COMMITTER_EMAIL'] = config.git.tagger.email
tag_msg = ('Tag %s of %s\n\n'
'Requested by %s' % (version.full_version,
package,
principal))
- c.captureOutput(
- ['git', 'tag', '-m', tag_msg, commit],
- stdout=None,
- env=env)
+ logAndRun(
+ ['git', 'tag', '-m', tag_msg, '--', sanitizeVersion(version),
+ commit],
+ env=env,
+ cwd=b.getRepo(package))
-def updateSubmoduleBranch(pocket, package, ref):
+def updateSubmoduleBranch(pocket, package, commit):
"""Update the appropriately named branch in the submodule."""
branch = b.pocketToGit(pocket)
- c.captureOutput(
- ['git', 'update-ref', 'refs/heads/%s' % branch, ref])
+ logAndRun(
+ ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))
def uploadBuild(pocket, workdir):
"""Upload all build products in the work directory."""
+ force = config.build.pockets[pocket].get('allow_backtracking', False)
apt = b.pocketToApt(pocket)
for changes in glob.glob(os.path.join(workdir, '*.changes')):
- c.captureOutput(['reprepro-env',
- 'include',
- '--ignore=wrongdistribution',
- apt,
- changes])
+ upload = ['reprepro-env', '--ignore=wrongdistribution',
+ 'include', apt, changes]
+ try:
+ logAndRun(upload)
+ except subprocess.CalledProcessError, e:
+ if not force:
+ raise
+ package = deb822.Changes(open(changes).read())['Binary']
+ logAndRun(['reprepro-env', 'remove', apt, package])
+ logAndRun(upload)
-def updateSuperrepo(pocket, package, commit, principal):
- """Update the superrepo.
+def updateSuperproject(pocket, package, commit, principal, version, env):
+ """Update the superproject.
This will create a new commit on the branch for the given pocket
that sets the commit for the package submodule to commit.
Note that there's no locking issue here, because we disallow all
- pushes to the superrepo.
+ pushes to the superproject.
"""
- superrepo = os.path.join(b._REPO_DIR, 'packages.git')
+ superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
branch = b.pocketToGit(pocket)
- tree = c.captureOutput(['git', 'ls-tree', branch],
- cwd=superrepo)
+ tree = logAndRun(['git', 'ls-tree', branch],
+ cwd=superproject).strip()
new_tree = re.compile(
r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
- r'\1%s\2' % commit,
+ r'\g<1>%s\g<2>' % commit,
tree)
- new_tree_id = c.captureOutput(['git', 'mktree'],
- cwd=superrepo,
- stdin_str=new_tree)
+ new_tree_id = logAndRun(['git', 'mktree', '--missing'],
+ cwd=superproject,
+ stdin_str=new_tree).strip()
commit_msg = ('Update %s to version %s\n\n'
'Requested by %s' % (package,
version.full_version,
principal))
- new_commit = c.captureOutput(
- ['git', 'commit-tree', new_tree_hash, '-p', branch],
- cwd=superrepo,
+ new_commit = logAndRun(
+ ['git', 'commit-tree', new_tree_id, '-p', branch],
+ cwd=superproject,
env=env,
- stdin_str=commit_msg)
+ stdin_str=commit_msg).strip()
- c.captureOutput(
+ logAndRun(
['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
- cwd=superrepo)
+ cwd=superproject)
+def makeReadable(workdir):
+ os.chmod(workdir, 0755)
+
@contextlib.contextmanager
-def packageWorkdir(package):
+def packageWorkdir(package, commit):
"""Checkout the package in a temporary working directory.
This context manager returns that working directory. The requested
p_archive = subprocess.Popen(
['git', 'archive',
'--remote=file://%s' % b.getRepo(package),
- '--prefix=%s' % package,
+ '--prefix=%s/' % package,
commit,
],
stdout=subprocess.PIPE,
finally:
shutil.rmtree(workdir)
-
-def reportBuild(build):
- """Run hooks to report the results of a build attempt."""
-
- c.captureOutput(['run-parts',
- '--arg=%s' % build.build_id,
- '--',
- b._HOOKS_DIR])
-
-
def build():
"""Deal with items in the build queue.
When triggered, iterate over build queue items one at a time,
until there are no more pending build jobs.
"""
+ global logfile
+
while True:
stage = 'processing incoming job'
queue = os.listdir(b._QUEUE_DIR)
db.commit = commit
db.principal = principal
database.session.save_or_update(db)
- database.commit()
+ database.session.commit()
+
+ database.session.begin()
- database.begin()
+ logdir = os.path.join(b._LOG_DIR, str(db.build_id))
+ if not os.path.exists(logdir):
+ os.makedirs(logdir)
try:
db.failed_stage = 'validating job'
- src = validateBuild(pocket, package, commit)
+ # Don't expand the commit in the DB until we're sure the user
+ # isn't trying to be tricky.
+ b.ensureValidPackage(package)
+
+ logfile = open(os.path.join(logdir, '%s.log' % db.package), 'w')
+
+ db.commit = commit = b.canonicalize_commit(package, commit)
+ src = b.validateBuild(pocket, package, commit)
+ version = b.getVersion(package, commit)
+ db.version = str(version)
+ b.runHook('pre-build', [str(db.build_id), db.pocket, db.package,
+ db.commit, db.principal, db.version, str(db.inserted_at)])
- db.version = str(b.getVersion(package, commit))
+ env = dict(os.environ)
+ env['GIT_COMMITTER_NAME'] = config.build.tagger.name
+ env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
# If validateBuild returns something other than True, then
# it means we should copy from that pocket to our pocket.
# (If the validation failed, validateBuild would have
# raised an exception)
if src != True:
+ # TODO: cut out this code duplication
+ db.failed_stage = 'tagging submodule before copying package'
+ tagSubmodule(pocket, package, commit, principal, version, env)
+ db.failed_stage = 'updating submodule branches before copying package'
+ updateSubmoduleBranch(pocket, package, commit)
+ db.failed_stage = 'updating superproject before copying package'
+ updateSuperproject(pocket, package, commit, principal, version, env)
db.failed_stage = 'copying package from another pocket'
- aptCopy(packages, pocket, src)
+ aptCopy(package, commit, pocket, src)
+
# If we can't copy the package from somewhere, but
# validateBuild didn't raise an exception, then we need to
# do the build ourselves
else:
db.failed_stage = 'checking out package source'
- with packageWorkdir(package) as workdir:
+ with packageWorkdir(package, commit) as workdir:
db.failed_stage = 'preparing source package'
packagedir = os.path.join(workdir, package)
# If we were, we could use debuild and get nice
# environment scrubbing. Since we're not, debuild
# complains about not having an orig.tar.gz
- c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
- cwd=packagedir,
- stdout=None)
-
- try:
- db.failed_stage = 'building binary packages'
- sbuildAll(package, commit, workdir)
- finally:
- logdir = os.path.join(b._LOG_DIR, db.build_id)
- if not os.path.exists(logdir):
- os.makedirs(logdir)
-
- for log in glob.glob(os.path.join(workdir, '*.build')):
- os.copy2(log, logdir)
+ logAndRun(['dpkg-buildpackage', '-us', '-uc', '-S'],
+ cwd=packagedir)
+
+ db.failed_stage = 'building binary packages'
+ sbuildAll(package, commit, workdir)
db.failed_stage = 'tagging submodule'
- tagSubmodule(pocket, package, commit, principal)
+ tagSubmodule(pocket, package, commit, principal, version, env)
db.failed_stage = 'updating submodule branches'
updateSubmoduleBranch(pocket, package, commit)
- db.failed_stage = 'updating superrepo'
- updateSuperrepo(pocket, package, commit, principal)
+ db.failed_stage = 'updating superproject'
+ updateSuperproject(pocket, package, commit, principal, version, env)
+ db.failed_stage = 'relaxing permissions on workdir'
+ makeReadable(workdir)
db.failed_stage = 'uploading packages to apt repo'
uploadBuild(pocket, workdir)
db.failed_stage = 'cleaning up'
-
- # Finally, now that everything is done, remove the
- # build queue item
- os.unlink(os.path.join(b._QUEUE_DIR, build))
except:
db.traceback = traceback.format_exc()
else:
db.succeeded = True
db.failed_stage = None
finally:
+ if logfile is not None:
+ logfile.close()
+
database.session.save_or_update(db)
database.session.commit()
- reportBuild(db)
+ # Finally, now that everything is done, remove the
+ # build queue item
+ os.unlink(os.path.join(b._QUEUE_DIR, build))
+ if db.succeeded:
+ b.runHook('post-build', [str(db.build_id)])
+ else:
+ b.runHook('failed-build', [str(db.build_id)])
class Invirtibuilder(pyinotify.ProcessEvent):
"""Process inotify triggers to build new packages."""
- def process_IN_CREATE(self, event):
- """Handle a created file or directory.
+ def process_default(self, event):
+ """Handle an inotify event.
- When an IN_CREATE event comes in, trigger the builder.
+ When an inotify event comes in, trigger the builder.
"""
build()
invirtibuilder = Invirtibuilder()
notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
watch_manager.add_watch(b._QUEUE_DIR,
- pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'])
+ pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
+ pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
# Before inotifying, run any pending builds; otherwise we won't
# get notified for them.