Make the default build notifications more useful
[invirt/packages/invirt-dev.git] / invirtibuilder
1 #!/usr/bin/python
2
3 """Process the Invirt build queue.
4
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
7
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
11
12 If the build fails, the Invirtibuilder sends mail with the build log.
13
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
17
18 Each queue file contains a file of the form
19
20     pocket package hash principal
21
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
24
25 principal is the Kerberos principal that requested the build.
26 """
27
28
29 from __future__ import with_statement
30
31 import contextlib
32 import glob
33 import os
34 import re
35 import shutil
36 import subprocess
37 import tempfile
38 import traceback
39
40 import pyinotify
41
42 from debian_bundle import deb822
43
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
48
49
50 DISTRIBUTION = 'hardy'
51
52
53 def getControl(package, ref):
54     """Get the parsed debian/control file for a given package.
55
56     This returns a list of debian_bundle.deb822.Deb822 objects, one
57     for each section of the debian/control file. Each Deb822 object
58     acts roughly like a dict.
59     """
60     return deb822.Deb822.iter_paragraphs(
61         b.getGitFile(package, ref, 'debian/control').split('\n'))
62
63
64 def getBinaries(package, ref):
65     """Get a list of binary packages in a package at a given ref."""
66     return [p['Package'] for p in getControl(package, ref)
67             if 'Package' in p]
68
69
70 def getArches(package, ref):
71     """Get the set of all architectures in any binary package."""
72     arches = set()
73     for section in getControl(package, ref):
74         if 'Architecture' in section:
75             arches.update(section['Architecture'].split())
76     return arches
77
78
79 def getDscName(package, ref):
80     """Return the .dsc file that will be generated for this package."""
81     v = b.getVersion(package, ref)
82     if v.debian_version:
83         v_str = '%s-%s' % (v.upstream_version,
84                            v.debian_version)
85     else:
86         v_str = v.upstream_version
87     return '%s_%s.dsc' % (
88         package,
89         v_str)
90
91
92 def sanitizeVersion(version):
93     """Sanitize a Debian package version for use as a git tag.
94
95     This function strips the epoch from the version number and
96     replaces any tildes with periods."""
97     if v.debian_version:
98         v = '%s-%s' % (version.upstream_version,
99                        version.debian_version)
100     else:
101         v = version.upstream_version
102     return v.replace('~', '.')
103
104
105 def aptCopy(package, commit, dst_pocket, src_pocket):
106     """Copy a package from one pocket to another."""
107     binaries = getBinaries(package, commit)
108     c.captureOutput(['reprepro-env', 'copy',
109                      b.pocketToApt(dst_pocket),
110                      b.pocketToApt(src_pocket),
111                      package] + binaries)
112
113
114 def sbuild(package, ref, arch, workdir, arch_all=False):
115     """Build a package for a particular architecture."""
116     args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch]
117     if arch_all:
118         args.append('-A')
119     args.append(getDscName(package, ref))
120     c.captureOutput(args, cwd=workdir)
121
122
123 def sbuildAll(package, ref, workdir):
124     """Build a package for all architectures it supports."""
125     arches = getArches(package, ref)
126     if 'all' in arches or 'any' in arches or 'amd64' in arches:
127         sbuild(package, ref, 'amd64', workdir, arch_all=True)
128     if 'any' in arches or 'i386' in arches:
129         sbuild(package, ref, 'i386', workdir)
130
131
132 def tagSubmodule(pocket, package, commit, principal, version, env):
133     """Tag a new version of a submodule.
134
135     If this pocket does not allow_backtracking, then this will create
136     a new tag of the version at ref.
137
138     This function doesn't need to care about lock
139     contention. git-receive-pack updates one ref at a time, and only
140     takes out a lock for that ref after it's passed the update
141     hook. Because we reject pushes to tags in the update hook, no push
142     can ever take out a lock on any tags.
143
144     I'm sure that long description gives you great confidence in the
145     legitimacy of my reasoning.
146     """
147     if not config.build.pockets[pocket].get('allow_backtracking', False):
148         branch = b.pocketToGit(pocket)
149         tag_msg = ('Tag %s of %s\n\n'
150                    'Requested by %s' % (version.full_version,
151                                         package,
152                                         principal))
153
154         c.captureOutput(
155             ['git', 'tag', '-m', tag_msg, commit],
156             env=env,
157             cwd=b.getRepo(package))
158
159
160 def updateSubmoduleBranch(pocket, package, commit):
161     """Update the appropriately named branch in the submodule."""
162     branch = b.pocketToGit(pocket)
163     c.captureOutput(
164         ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))
165
166
167 def uploadBuild(pocket, workdir):
168     """Upload all build products in the work directory."""
169     force = config.build.pockets[pocket].get('allow_backtracking', False)
170     apt = b.pocketToApt(pocket)
171     for changes in glob.glob(os.path.join(workdir, '*.changes')):
172         upload = ['reprepro-env', '--ignore=wrongdistribution',
173                   'include', apt, changes]
174         try:
175             c.captureOutput(upload)
176         except subprocess.CalledProcessError, e:
177             if not force:
178                 raise
179             package = deb822.Changes(open(changes).read())['Binary']
180             c.captureOutput(['reprepro-env', 'remove', apt, package])
181             c.captureOutput(upload)
182
183
184 def updateSuperproject(pocket, package, commit, principal, version, env):
185     """Update the superproject.
186
187     This will create a new commit on the branch for the given pocket
188     that sets the commit for the package submodule to commit.
189
190     Note that there's no locking issue here, because we disallow all
191     pushes to the superproject.
192     """
193     superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
194     branch = b.pocketToGit(pocket)
195     tree = c.captureOutput(['git', 'ls-tree', branch],
196                            cwd=superproject).strip()
197
198     new_tree = re.compile(
199         r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
200         r'\g<1>%s\g<2>' % commit,
201         tree)
202
203     new_tree_id = c.captureOutput(['git', 'mktree', '--missing'],
204                                   cwd=superproject,
205                                   stdin_str=new_tree).strip()
206
207     commit_msg = ('Update %s to version %s\n\n'
208                   'Requested by %s' % (package,
209                                        version.full_version,
210                                        principal))
211     new_commit = c.captureOutput(
212         ['git', 'commit-tree', new_tree_id, '-p', branch],
213         cwd=superproject,
214         env=env,
215         stdin_str=commit_msg).strip()
216
217     c.captureOutput(
218         ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
219         cwd=superproject)
220
221
222 def makeReadable(workdir):
223     os.chmod(workdir, 0755)
224
225 @contextlib.contextmanager
226 def packageWorkdir(package, commit):
227     """Checkout the package in a temporary working directory.
228
229     This context manager returns that working directory. The requested
230     package is checked out into a subdirectory of the working
231     directory with the same name as the package.
232
233     When the context wrapped with this context manager is exited, the
234     working directory is automatically deleted.
235     """
236     workdir = tempfile.mkdtemp()
237     try:
238         p_archive = subprocess.Popen(
239             ['git', 'archive',
240              '--remote=file://%s' % b.getRepo(package),
241              '--prefix=%s/' % package,
242              commit,
243              ],
244             stdout=subprocess.PIPE,
245             )
246         p_tar = subprocess.Popen(
247             ['tar', '-x'],
248             stdin=p_archive.stdout,
249             cwd=workdir,
250             )
251         p_archive.wait()
252         p_tar.wait()
253
254         yield workdir
255     finally:
256         shutil.rmtree(workdir)
257
258 def build():
259     """Deal with items in the build queue.
260
261     When triggered, iterate over build queue items one at a time,
262     until there are no more pending build jobs.
263     """
264     while True:
265         stage = 'processing incoming job'
266         queue = os.listdir(b._QUEUE_DIR)
267         if not queue:
268             break
269
270         build = min(queue)
271         job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
272         pocket, package, commit, principal = job.split()
273
274         database.session.begin()
275         db = database.Build()
276         db.package = package
277         db.pocket = pocket
278         db.commit = commit
279         db.principal = principal
280         database.session.save_or_update(db)
281         database.session.commit()
282
283         database.session.begin()
284
285         try:
286             db.failed_stage = 'validating job'
287             # Don't expand the commit in the DB until we're sure the user
288             # isn't trying to be tricky.
289             b.ensureValidPackage(package)
290             db.commit = commit = b.canonicalize_commit(package, commit)
291             src = b.validateBuild(pocket, package, commit)
292             version = b.getVersion(package, commit)
293             db.version = str(version)
294             b.runHook('pre-build', [str(db.build_id), db.pocket, db.package,
295                                     db.commit, db.principal, db.version, str(db.inserted_at)])
296
297             env = dict(os.environ)
298             env['GIT_COMMITTER_NAME'] = config.build.tagger.name
299             env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
300
301             # If validateBuild returns something other than True, then
302             # it means we should copy from that pocket to our pocket.
303             #
304             # (If the validation failed, validateBuild would have
305             # raised an exception)
306             if src != True:
307                 # TODO: cut out this code duplication
308                 db.failed_stage = 'tagging submodule before copying package'
309                 tagSubmodule(pocket, package, commit, principal, version, env)
310                 db.failed_stage = 'updating submodule branches before copying package'
311                 updateSubmoduleBranch(pocket, package, commit)
312                 db.failed_stage = 'updating superproject before copying package'
313                 updateSuperproject(pocket, package, commit, principal, version, env)
314                 db.failed_stage = 'copying package from another pocket'
315                 aptCopy(package, commit, pocket, src)
316                 
317             # If we can't copy the package from somewhere, but
318             # validateBuild didn't raise an exception, then we need to
319             # do the build ourselves
320             else:
321                 db.failed_stage = 'checking out package source'
322                 with packageWorkdir(package, commit) as workdir:
323                     db.failed_stage = 'preparing source package'
324                     packagedir = os.path.join(workdir, package)
325
326                     # We should be more clever about dealing with
327                     # things like non-Debian-native packages than we
328                     # are.
329                     #
330                     # If we were, we could use debuild and get nice
331                     # environment scrubbing. Since we're not, debuild
332                     # complains about not having an orig.tar.gz
333                     c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
334                                   cwd=packagedir,
335                                   stdout=None)
336
337                     try:
338                         db.failed_stage = 'building binary packages'
339                         sbuildAll(package, commit, workdir)
340                     finally:
341                         logdir = os.path.join(b._LOG_DIR, str(db.build_id))
342                         if not os.path.exists(logdir):
343                             os.makedirs(logdir)
344
345                         for log in glob.glob(os.path.join(workdir, 'build-*.log')):
346                             os.copy(log, logdir)
347
348                     db.failed_stage = 'tagging submodule'
349                     tagSubmodule(pocket, package, commit, principal, version, env)
350                     db.failed_stage = 'updating submodule branches'
351                     updateSubmoduleBranch(pocket, package, commit)
352                     db.failed_stage = 'updating superproject'
353                     updateSuperproject(pocket, package, commit, principal, version, env)
354                     db.failed_stage = 'relaxing permissions on workdir'
355                     makeReadable(workdir)
356                     db.failed_stage = 'uploading packages to apt repo'
357                     uploadBuild(pocket, workdir)
358
359                     db.failed_stage = 'cleaning up'
360         except:
361             db.traceback = traceback.format_exc()
362         else:
363             db.succeeded = True
364             db.failed_stage = None
365         finally:
366             database.session.save_or_update(db)
367             database.session.commit()
368
369             # Finally, now that everything is done, remove the
370             # build queue item
371             os.unlink(os.path.join(b._QUEUE_DIR, build))
372
373             if db.succeeded:
374                 b.runHook('post-build', [str(db.build_id)])
375             else:
376                 b.runHook('failed-build', [str(db.build_id)])
377
378 class Invirtibuilder(pyinotify.ProcessEvent):
379     """Process inotify triggers to build new packages."""
380     def process_default(self, event):
381         """Handle an inotify event.
382
383         When an inotify event comes in, trigger the builder.
384         """
385         build()
386
387
388 def main():
389     """Initialize the inotifications and start the main loop."""
390     database.connect()
391
392     watch_manager = pyinotify.WatchManager()
393     invirtibuilder = Invirtibuilder()
394     notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
395     watch_manager.add_watch(b._QUEUE_DIR,
396                             pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
397                             pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
398
399     # Before inotifying, run any pending builds; otherwise we won't
400     # get notified for them.
401     build()
402
403     while True:
404         notifier.process_events()
405         if notifier.check_events():
406             notifier.read_events()
407
408
409 if __name__ == '__main__':
410     main()