3 """Process the Invirt build queue.
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
12 If the build fails, the Invirtibuilder sends mail with the build log.
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
18 Each queue file contains a file of the form
20 pocket package hash principal
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
25 principal is the Kerberos principal that requested the build.
29 from __future__ import with_statement
42 from debian_bundle import deb822
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
52 def logAndRun(cmd, *args, **kwargs):
53 # Always grab stdout, even if the caller doesn't need it.
54 # TODO: don't slurp it all into memory in that case.
55 if 'stdout' in kwargs and kwargs['stdout'] is None:
57 kwargs['stderr'] = logfile
58 logfile.write('---> Ran %s\n' % (cmd, ))
59 if 'stdin_str' in kwargs:
60 logfile.write('STDIN:\n')
61 logfile.write(kwargs['stdin_str'])
62 logfile.write('STDERR:\n')
63 output = c.captureOutput(cmd, *args, **kwargs)
64 logfile.write('STDOUT:\n')
68 def getControl(package, ref):
69 """Get the parsed debian/control file for a given package.
71 This returns a list of debian_bundle.deb822.Deb822 objects, one
72 for each section of the debian/control file. Each Deb822 object
73 acts roughly like a dict.
75 return deb822.Deb822.iter_paragraphs(
76 b.getGitFile(package, ref, 'debian/control').split('\n'))
79 def getBinaries(package, ref):
80 """Get a list of binary packages in a package at a given ref."""
81 return [p['Package'] for p in getControl(package, ref)
85 def getArches(package, ref):
86 """Get the set of all architectures in any binary package."""
88 for section in getControl(package, ref):
89 if 'Architecture' in section:
90 arches.update(section['Architecture'].split())
94 def getDscName(package, ref):
95 """Return the .dsc file that will be generated for this package."""
96 v = b.getVersion(package, ref)
98 v_str = '%s-%s' % (v.upstream_version,
101 v_str = v.upstream_version
102 return '%s_%s.dsc' % (
107 def sanitizeVersion(version):
108 """Sanitize a Debian package version for use as a git tag.
110 This function strips the epoch from the version number and
111 replaces any tildes with underscores."""
112 if version.debian_version:
113 v = '%s-%s' % (version.upstream_version,
114 version.debian_version)
116 v = version.upstream_version
117 return v.replace('~', '_')
120 def aptCopy(package, commit, dst_pocket, src_pocket):
121 """Copy a package from one pocket to another."""
122 binaries = getBinaries(package, commit)
123 logAndRun(['reprepro-env', 'copy',
124 b.pocketToApt(dst_pocket),
125 b.pocketToApt(src_pocket),
129 def sbuild(package, ref, distro, arch, workdir, arch_all=False):
130 """Build a package for a particular architecture and distro."""
131 # We append a suffix like ~ubuntu8.04 to differentiate the same
132 # version built for multiple distros
133 nmutag = b.distroToSuffix(distro)
134 env = os.environ.copy()
135 env['NMUTAG'] = nmutag
137 # Run sbuild with a hack in place to append arbitrary versions
138 args = ['perl', '-I/usr/share/invirt-dev', '-MSbuildHack',
140 '--binNMU=171717', '--make-binNMU=Build with sbuild',
141 '-v', '-d', distro, '--arch', arch]
144 args.append(getDscName(package, ref))
145 logAndRun(args, cwd=workdir, env=env)
148 def sbuildAll(package, ref, distro, workdir):
149 """Build a package for all architectures it supports."""
150 arches = getArches(package, ref)
151 if 'all' in arches or 'any' in arches or 'amd64' in arches:
152 sbuild(package, ref, distro, 'amd64', workdir, arch_all=True)
153 if 'any' in arches or 'i386' in arches:
154 sbuild(package, ref, distro, 'i386', workdir)
157 def tagSubmodule(pocket, package, commit, principal, version, env):
158 """Tag a new version of a submodule.
160 If this pocket does not allow_backtracking, then this will create
161 a new tag of the version at ref.
163 This function doesn't need to care about lock
164 contention. git-receive-pack updates one ref at a time, and only
165 takes out a lock for that ref after it's passed the update
166 hook. Because we reject pushes to tags in the update hook, no push
167 can ever take out a lock on any tags.
169 I'm sure that long description gives you great confidence in the
170 legitimacy of my reasoning.
172 if not config.build.pockets[pocket].get('allow_backtracking', False):
173 branch = b.pocketToGit(pocket)
174 tag_msg = ('Tag %s of %s\n\n'
175 'Requested by %s' % (version.full_version,
180 ['git', 'tag', '-m', tag_msg, '--', sanitizeVersion(version),
183 cwd=b.getRepo(package))
186 def updateSubmoduleBranch(pocket, package, commit):
187 """Update the appropriately named branch in the submodule."""
188 branch = b.pocketToGit(pocket)
190 ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))
193 def uploadBuild(pocket, workdir):
194 """Upload all build products in the work directory."""
195 force = config.build.pockets[pocket].get('allow_backtracking', False)
196 apt = b.pocketToApt(pocket)
197 for changes in glob.glob(os.path.join(workdir, '*.changes')):
198 upload = ['reprepro-env', '--ignore=wrongdistribution',
199 'include', apt, changes]
202 except subprocess.CalledProcessError, e:
205 packages = deb822.Changes(open(changes).read())['Binary']
206 for package in packages.split():
207 logAndRun(['reprepro-env', 'remove', apt, package])
211 def updateSuperproject(pocket, package, commit, principal, version, env):
212 """Update the superproject.
214 This will create a new commit on the branch for the given pocket
215 that sets the commit for the package submodule to commit.
217 Note that there's no locking issue here, because we disallow all
218 pushes to the superproject.
220 superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
221 branch = b.pocketToGit(pocket)
222 tree = logAndRun(['git', 'ls-tree', branch],
223 cwd=superproject).strip()
225 tree_items = dict((k, v) for (v, k) in (x.split("\t") for x in tree.split("\n")))
227 created = not (package in tree_items)
229 tree_items[package] = "160000 commit "+commit
231 # If "created" is true, we need to check if the package is
232 # mentioned in .gitmodules, and add it if not.
234 gitmodules = logAndRun(['git', 'cat-file', 'blob', '%s:.gitmodules' % (branch)],
236 if ('[submodule "%s"]' % (package)) not in gitmodules.split("\n"):
237 gitmodules += """[submodule "%s"]
239 \turl = ../packages/%s.git
240 """ % (package, package, package)
241 gitmodules_hash = logAndRun(['git', 'hash-object', '-w', '--stdin'],
243 stdin_str=gitmodules).strip()
244 tree_items[package] = "100644 blob "+gitmodules_hash
246 new_tree = "\n".join("%s\t%s" % (v, k) for (k, v) in tree_items.iteritems())
248 new_tree_id = logAndRun(['git', 'mktree', '--missing'],
250 stdin_str=new_tree).strip()
253 commit_msg = 'Add %s at version %s'
255 commit_msg = 'Update %s to version %s'
256 commit_msg = ((commit_msg + '\n\n'
257 'Requested by %s') % (package,
258 version.full_version,
260 new_commit = logAndRun(
261 ['git', 'commit-tree', new_tree_id, '-p', branch],
264 stdin_str=commit_msg).strip()
267 ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
271 def makeReadable(workdir):
272 os.chmod(workdir, 0755)
274 @contextlib.contextmanager
275 def packageWorkdir(package, commit, build_id):
276 """Checkout the package in a temporary working directory.
278 This context manager returns that working directory. The requested
279 package is checked out into a subdirectory of the working
280 directory with the same name as the package.
282 When the context wrapped with this context manager is exited, the
283 working directory is automatically deleted.
285 workdir = tempfile.mkdtemp(prefix=("b%d-" % build_id))
287 p_archive = subprocess.Popen(
289 '--remote=file://%s' % b.getRepo(package),
290 '--prefix=%s/' % package,
293 stdout=subprocess.PIPE,
295 p_tar = subprocess.Popen(
297 stdin=p_archive.stdout,
305 shutil.rmtree(workdir)
308 """Deal with items in the build queue.
310 When triggered, iterate over build queue items one at a time,
311 until there are no more pending build jobs.
316 stage = 'processing incoming job'
317 queue = os.listdir(b._QUEUE_DIR)
322 job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
323 pocket, package, commit, principal = job.split()
325 database.session.begin()
326 db = database.Build()
330 db.principal = principal
331 database.session.save_or_update(db)
332 database.session.commit()
334 database.session.begin()
336 logdir = os.path.join(b._LOG_DIR, str(db.build_id))
337 if not os.path.exists(logdir):
341 db.failed_stage = 'validating job'
342 # Don't expand the commit in the DB until we're sure the user
343 # isn't trying to be tricky.
344 b.ensureValidPackage(package)
346 logfile = open(os.path.join(logdir, '%s.log' % db.package), 'w')
348 db.commit = commit = b.canonicalize_commit(package, commit)
349 src = b.validateBuild(pocket, package, commit)
350 version = b.getVersion(package, commit)
351 db.version = str(version)
352 b.runHook('pre-build', [str(db.build_id)])
354 env = dict(os.environ)
355 env['GIT_COMMITTER_NAME'] = config.build.tagger.name
356 env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
358 # If validateBuild returns something other than True, then
359 # it means we should copy from that pocket to our pocket.
361 # (If the validation failed, validateBuild would have
362 # raised an exception)
364 # TODO: cut out this code duplication
365 db.failed_stage = 'tagging submodule before copying package'
366 tagSubmodule(pocket, package, commit, principal, version, env)
367 db.failed_stage = 'updating submodule branches before copying package'
368 updateSubmoduleBranch(pocket, package, commit)
369 db.failed_stage = 'updating superproject before copying package'
370 updateSuperproject(pocket, package, commit, principal, version, env)
371 db.failed_stage = 'copying package from another pocket'
372 aptCopy(package, commit, pocket, src)
374 # If we can't copy the package from somewhere, but
375 # validateBuild didn't raise an exception, then we need to
376 # do the build ourselves
378 db.failed_stage = 'checking out package source'
379 with packageWorkdir(package, commit, db.build_id) as workdir:
380 db.failed_stage = 'preparing source package'
381 packagedir = os.path.join(workdir, package)
383 # We should be more clever about dealing with
384 # things like non-Debian-native packages than we
387 # If we were, we could use debuild and get nice
388 # environment scrubbing. Since we're not, debuild
389 # complains about not having an orig.tar.gz
390 logAndRun(['dpkg-buildpackage', '-us', '-uc', '-S'],
393 db.failed_stage = 'building binary packages'
394 sbuildAll(package, commit, b.pocketToDistro(pocket), workdir)
395 db.failed_stage = 'tagging submodule'
396 tagSubmodule(pocket, package, commit, principal, version, env)
397 db.failed_stage = 'updating submodule branches'
398 updateSubmoduleBranch(pocket, package, commit)
399 db.failed_stage = 'updating superproject'
400 updateSuperproject(pocket, package, commit, principal, version, env)
401 db.failed_stage = 'relaxing permissions on workdir'
402 makeReadable(workdir)
403 db.failed_stage = 'uploading packages to apt repo'
404 uploadBuild(pocket, workdir)
406 db.failed_stage = 'cleaning up'
408 db.traceback = traceback.format_exc()
411 db.failed_stage = None
413 if logfile is not None:
416 database.session.save_or_update(db)
417 database.session.commit()
419 # Finally, now that everything is done, remove the
421 os.unlink(os.path.join(b._QUEUE_DIR, build))
424 b.runHook('post-build', [str(db.build_id)])
426 b.runHook('failed-build', [str(db.build_id)])
428 class Invirtibuilder(pyinotify.ProcessEvent):
429 """Process inotify triggers to build new packages."""
430 def process_default(self, event):
431 """Handle an inotify event.
433 When an inotify event comes in, trigger the builder.
439 """Initialize the inotifications and start the main loop."""
442 watch_manager = pyinotify.WatchManager()
443 invirtibuilder = Invirtibuilder()
444 notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
445 watch_manager.add_watch(b._QUEUE_DIR,
446 pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
447 pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
449 # Before inotifying, run any pending builds; otherwise we won't
450 # get notified for them.
454 notifier.process_events()
455 if notifier.check_events():
456 notifier.read_events()
459 if __name__ == '__main__':