e1c3d809fdf407bc942e7251e5337b71b4740912
[invirt/packages/invirt-dev.git] / invirtibuilder
1 #!/usr/bin/python
2
3 """Process the Invirt build queue.
4
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
7
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
11
12 If the build fails, the Invirtibuilder sends mail with the build log.
13
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
17
18 Each queue file contains a file of the form
19
20     pocket package hash principal
21
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
24
25 principal is the Kerberos principal that requested the build.
26 """
27
28
29 from __future__ import with_statement
30
31 import contextlib
32 import glob
33 import os
34 import re
35 import shutil
36 import subprocess
37 import tempfile
38 import traceback
39
40 import pyinotify
41
42 from debian_bundle import deb822
43
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
48
49
50 DISTRIBUTION = 'hardy'
51
52
53 def getControl(package, ref):
54     """Get the parsed debian/control file for a given package.
55
56     This returns a list of debian_bundle.deb822.Deb822 objects, one
57     for each section of the debian/control file. Each Deb822 object
58     acts roughly like a dict.
59     """
60     return deb822.Deb822.iter_paragraphs(
61         b.getGitFile(package, ref, 'debian/control').split('\n'))
62
63
64 def getBinaries(package, ref):
65     """Get a list of binary packages in a package at a given ref."""
66     return [p['Package'] for p in getControl(package, ref)
67             if 'Package' in p]
68
69
70 def getArches(package, ref):
71     """Get the set of all architectures in any binary package."""
72     arches = set()
73     for section in getControl(package, ref):
74         if 'Architecture' in section:
75             arches.update(section['Architecture'].split())
76     return arches
77
78
79 def getDscName(package, ref):
80     """Return the .dsc file that will be generated for this package."""
81     v = b.getVersion(package, ref)
82     if v.debian_version:
83         v_str = '%s-%s' % (v.upstream_version,
84                            v.debian_version)
85     else:
86         v_str = v.upstream_version
87     return '%s_%s.dsc' % (
88         package,
89         v_str)
90
91
92 def sanitizeVersion(version):
93     """Sanitize a Debian package version for use as a git tag.
94
95     This function strips the epoch from the version number and
96     replaces any tildes with periods."""
97     if v.debian_version:
98         v = '%s-%s' % (version.upstream_version,
99                        version.debian_version)
100     else:
101         v = version.upstream_version
102     return v.replace('~', '.')
103
104
105 def aptCopy(package, commit, dst_pocket, src_pocket):
106     """Copy a package from one pocket to another."""
107     binaries = getBinaries(package, commit)
108     c.captureOutput(['reprepro-env', 'copy',
109                      b.pocketToApt(dst_pocket),
110                      b.pocketToApt(src_pocket),
111                      package] + binaries)
112
113
114 def sbuild(package, ref, arch, workdir, arch_all=False):
115     """Build a package for a particular architecture."""
116     args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch]
117     if arch_all:
118         args.append('-A')
119     args.append(getDscName(package, ref))
120     c.captureOutput(args, cwd=workdir)
121
122
123 def sbuildAll(package, ref, workdir):
124     """Build a package for all architectures it supports."""
125     arches = getArches(package, ref)
126     if 'all' in arches or 'any' in arches or 'amd64' in arches:
127         sbuild(package, ref, 'amd64', workdir, arch_all=True)
128     if 'any' in arches or 'i386' in arches:
129         sbuild(package, ref, 'i386', workdir)
130
131
132 def tagSubmodule(pocket, package, commit, principal, version, env):
133     """Tag a new version of a submodule.
134
135     If this pocket does not allow_backtracking, then this will create
136     a new tag of the version at ref.
137
138     This function doesn't need to care about lock
139     contention. git-receive-pack updates one ref at a time, and only
140     takes out a lock for that ref after it's passed the update
141     hook. Because we reject pushes to tags in the update hook, no push
142     can ever take out a lock on any tags.
143
144     I'm sure that long description gives you great confidence in the
145     legitimacy of my reasoning.
146     """
147     if not config.build.pockets[pocket].get('allow_backtracking', False):
148         branch = b.pocketToGit(pocket)
149         tag_msg = ('Tag %s of %s\n\n'
150                    'Requested by %s' % (version.full_version,
151                                         package,
152                                         principal))
153
154         c.captureOutput(
155             ['git', 'tag', '-m', tag_msg, commit],
156             env=env,
157             cwd=b.getRepo(package))
158
159
160 def updateSubmoduleBranch(pocket, package, commit):
161     """Update the appropriately named branch in the submodule."""
162     branch = b.pocketToGit(pocket)
163     c.captureOutput(
164         ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))
165
166
167 def uploadBuild(pocket, workdir):
168     """Upload all build products in the work directory."""
169     force = config.build.pockets[pocket].get('allow_backtracking', False)
170     apt = b.pocketToApt(pocket)
171     for changes in glob.glob(os.path.join(workdir, '*.changes')):
172         upload = ['reprepro-env', '--ignore=wrongdistribution',
173                   'include', apt, changes]
174         try:
175             c.captureOutput(upload)
176         except subprocess.CalledProcessError, e:
177             if not force:
178                 raise
179             package = deb822.Changes(open(changes).read())['Binary']
180             c.captureOutput(['reprepro-env', 'remove', apt, package])
181             c.captureOutput(upload)
182
183
184 def updateSuperproject(pocket, package, commit, principal, version, env):
185     """Update the superproject.
186
187     This will create a new commit on the branch for the given pocket
188     that sets the commit for the package submodule to commit.
189
190     Note that there's no locking issue here, because we disallow all
191     pushes to the superproject.
192     """
193     superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
194     branch = b.pocketToGit(pocket)
195     tree = c.captureOutput(['git', 'ls-tree', branch],
196                            cwd=superproject).strip()
197
198     new_tree = re.compile(
199         r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
200         r'\g<1>%s\g<2>' % commit,
201         tree)
202
203     new_tree_id = c.captureOutput(['git', 'mktree', '--missing'],
204                                   cwd=superproject,
205                                   stdin_str=new_tree).strip()
206
207     commit_msg = ('Update %s to version %s\n\n'
208                   'Requested by %s' % (package,
209                                        version.full_version,
210                                        principal))
211     new_commit = c.captureOutput(
212         ['git', 'commit-tree', new_tree_id, '-p', branch],
213         cwd=superproject,
214         env=env,
215         stdin_str=commit_msg).strip()
216
217     c.captureOutput(
218         ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
219         cwd=superproject)
220
221
222 def makeReadable(workdir):
223     os.chmod(workdir, 0755)
224
225 @contextlib.contextmanager
226 def packageWorkdir(package, commit):
227     """Checkout the package in a temporary working directory.
228
229     This context manager returns that working directory. The requested
230     package is checked out into a subdirectory of the working
231     directory with the same name as the package.
232
233     When the context wrapped with this context manager is exited, the
234     working directory is automatically deleted.
235     """
236     workdir = tempfile.mkdtemp()
237     try:
238         p_archive = subprocess.Popen(
239             ['git', 'archive',
240              '--remote=file://%s' % b.getRepo(package),
241              '--prefix=%s/' % package,
242              commit,
243              ],
244             stdout=subprocess.PIPE,
245             )
246         p_tar = subprocess.Popen(
247             ['tar', '-x'],
248             stdin=p_archive.stdout,
249             cwd=workdir,
250             )
251         p_archive.wait()
252         p_tar.wait()
253
254         yield workdir
255     finally:
256         shutil.rmtree(workdir)
257
258 def build():
259     """Deal with items in the build queue.
260
261     When triggered, iterate over build queue items one at a time,
262     until there are no more pending build jobs.
263     """
264     while True:
265         stage = 'processing incoming job'
266         queue = os.listdir(b._QUEUE_DIR)
267         if not queue:
268             break
269
270         build = min(queue)
271         job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
272         pocket, package, commit, principal = job.split()
273
274         database.session.begin()
275         db = database.Build()
276         db.package = package
277         db.pocket = pocket
278         db.commit = commit
279         db.principal = principal
280         database.session.save_or_update(db)
281         database.session.commit()
282
283         database.session.begin()
284
285         try:
286             db.failed_stage = 'validating job'
287             # Don't expand the commit in the DB until we're sure the user
288             # isn't trying to be tricky.
289             b.ensureValidPackage(package)
290             db.commit = commit = b.canonicalize_commit(package, commit)
291             src = b.validateBuild(pocket, package, commit)
292
293             db.version = str(b.getVersion(package, commit))
294             b.runHook('pre-build', [str(db.build_id), db.pocket, db.package,
295                                     db.commit, db.principal, db.version, str(db.inserted_at)])
296
297             # If validateBuild returns something other than True, then
298             # it means we should copy from that pocket to our pocket.
299             #
300             # (If the validation failed, validateBuild would have
301             # raised an exception)
302             if src != True:
303                 db.failed_stage = 'copying package from another pocket'
304                 aptCopy(package, commit, pocket, src)
305             # If we can't copy the package from somewhere, but
306             # validateBuild didn't raise an exception, then we need to
307             # do the build ourselves
308             else:
309                 db.failed_stage = 'checking out package source'
310                 with packageWorkdir(package, commit) as workdir:
311                     db.failed_stage = 'preparing source package'
312                     packagedir = os.path.join(workdir, package)
313
314                     # We should be more clever about dealing with
315                     # things like non-Debian-native packages than we
316                     # are.
317                     #
318                     # If we were, we could use debuild and get nice
319                     # environment scrubbing. Since we're not, debuild
320                     # complains about not having an orig.tar.gz
321                     c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
322                                   cwd=packagedir,
323                                   stdout=None)
324
325                     try:
326                         db.failed_stage = 'building binary packages'
327                         sbuildAll(package, commit, workdir)
328                     finally:
329                         logdir = os.path.join(b._LOG_DIR, str(db.build_id))
330                         if not os.path.exists(logdir):
331                             os.makedirs(logdir)
332
333                         for log in glob.glob(os.path.join(workdir, 'build-*.log')):
334                             os.copy(log, logdir)
335
336                     db.failed_stage = 'processing metadata'
337                     env = dict(os.environ)
338                     env['GIT_COMMITTER_NAME'] = config.build.tagger.name
339                     env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
340                     version = b.getVersion(package, commit)
341
342                     db.failed_stage = 'tagging submodule'
343                     tagSubmodule(pocket, package, principal, version, env)
344                     db.failed_stage = 'updating submodule branches'
345                     updateSubmoduleBranch(pocket, package, commit)
346                     db.failed_stage = 'updating superproject'
347                     updateSuperproject(pocket, package, commit, principal, version, env)
348                     db.failed_stage = 'relaxing permissions on workdir'
349                     makeReadable(workdir)
350                     db.failed_stage = 'uploading packages to apt repo'
351                     uploadBuild(pocket, workdir)
352
353                     db.failed_stage = 'cleaning up'
354         except:
355             db.traceback = traceback.format_exc()
356         else:
357             db.succeeded = True
358             db.failed_stage = None
359         finally:
360             database.session.save_or_update(db)
361             database.session.commit()
362
363             # Finally, now that everything is done, remove the
364             # build queue item
365             os.unlink(os.path.join(b._QUEUE_DIR, build))
366
367             if db.succeeded:
368                 b.runHook('post-build', [str(db.build_id)])
369             else:
370                 b.runHook('failed-build', [str(db.build_id)])
371
372 class Invirtibuilder(pyinotify.ProcessEvent):
373     """Process inotify triggers to build new packages."""
374     def process_default(self, event):
375         """Handle an inotify event.
376
377         When an inotify event comes in, trigger the builder.
378         """
379         build()
380
381
382 def main():
383     """Initialize the inotifications and start the main loop."""
384     database.connect()
385
386     watch_manager = pyinotify.WatchManager()
387     invirtibuilder = Invirtibuilder()
388     notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
389     watch_manager.add_watch(b._QUEUE_DIR,
390                             pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
391                             pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
392
393     # Before inotifying, run any pending builds; otherwise we won't
394     # get notified for them.
395     build()
396
397     while True:
398         notifier.process_events()
399         if notifier.check_events():
400             notifier.read_events()
401
402
403 if __name__ == '__main__':
404     main()