d2a571c89425ff8def29332506460540f8f5ab65
[invirt/packages/invirt-dev.git] / invirtibuilder
1 #!/usr/bin/python
2
3 """Process the Invirt build queue.
4
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
7
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
11
12 If the build fails, the Invirtibuilder sends mail with the build log.
13
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
17
18 Each queue file contains a file of the form
19
20     pocket package hash principal
21
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
24
25 principal is the Kerberos principal that requested the build.
26 """
27
28
29 from __future__ import with_statement
30
31 import contextlib
32 import glob
33 import os
34 import re
35 import shutil
36 import subprocess
37 import tempfile
38 import traceback
39
40 import pyinotify
41
42 from debian_bundle import deb822
43
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
48
49
50 DISTRIBUTION = 'hardy'
51
52
53 def getControl(package, ref):
54     """Get the parsed debian/control file for a given package.
55
56     This returns a list of debian_bundle.deb822.Deb822 objects, one
57     for each section of the debian/control file. Each Deb822 object
58     acts roughly like a dict.
59     """
60     return deb822.Deb822.iter_paragraphs(
61         b.getGitFile(package, ref, 'debian/control').split('\n'))
62
63
64 def getBinaries(package, ref):
65     """Get a list of binary packages in a package at a given ref."""
66     return [p['Package'] for p in getControl(package, ref)
67             if 'Package' in p]
68
69
70 def getArches(package, ref):
71     """Get the set of all architectures in any binary package."""
72     arches = set()
73     for section in getControl(package, ref):
74         if 'Architecture' in section:
75             arches.update(section['Architecture'].split())
76     return arches
77
78
79 def getDscName(package, ref):
80     """Return the .dsc file that will be generated for this package."""
81     v = b.getVersion(package, ref)
82     if v.debian_version:
83         v_str = '%s-%s' % (v.upstream_version,
84                            v.debian_version)
85     else:
86         v_str = v.upstream_version
87     return '%s_%s.dsc' % (
88         package,
89         v_str)
90
91
92 def sanitizeVersion(version):
93     """Sanitize a Debian package version for use as a git tag.
94
95     This function strips the epoch from the version number and
96     replaces any tildes with periods."""
97     if v.debian_version:
98         v = '%s-%s' % (version.upstream_version,
99                        version.debian_version)
100     else:
101         v = version.upstream_version
102     return v.replace('~', '.')
103
104
105 def aptCopy(package, commit, dst_pocket, src_pocket):
106     """Copy a package from one pocket to another."""
107     binaries = getBinaries(package, commit)
108     c.captureOutput(['reprepro-env', 'copy',
109                      b.pocketToApt(dst_pocket),
110                      b.pocketToApt(src_pocket),
111                      package] + binaries)
112
113
114 def sbuild(package, ref, arch, workdir, arch_all=False):
115     """Build a package for a particular architecture."""
116     args = ['sbuild', '-d', DISTRIBUTION, '--arch', arch]
117     if arch_all:
118         args.append('-A')
119     args.append(getDscName(package, ref))
120     c.captureOutput(args, cwd=workdir, stdout=None)
121
122
123 def sbuildAll(package, ref, workdir):
124     """Build a package for all architectures it supports."""
125     arches = getArches(package, ref)
126     if 'all' in arches or 'any' in arches or 'amd64' in arches:
127         sbuild(package, ref, 'amd64', workdir, arch_all=True)
128     if 'any' in arches or 'i386' in arches:
129         sbuild(package, ref, 'i386', workdir)
130
131
132 def tagSubmodule(pocket, package, principal, version, env):
133     """Tag a new version of a submodule.
134
135     If this pocket does not allow_backtracking, then this will create
136     a new tag of the version at ref.
137
138     This function doesn't need to care about lock
139     contention. git-receive-pack updates one ref at a time, and only
140     takes out a lock for that ref after it's passed the update
141     hook. Because we reject pushes to tags in the update hook, no push
142     can ever take out a lock on any tags.
143
144     I'm sure that long description gives you great confidence in the
145     legitimacy of my reasoning.
146     """
147     if not config.build.pockets[pocket].get('allow_backtracking', False):
148         branch = b.pocketToGit(pocket)
149         tag_msg = ('Tag %s of %s\n\n'
150                    'Requested by %s' % (version.full_version,
151                                         package,
152                                         principal))
153
154         c.captureOutput(
155             ['git', 'tag', '-m', tag_msg, commit],
156             stdout=None,
157             env=env)
158
159
160 def updateSubmoduleBranch(pocket, package, ref):
161     """Update the appropriately named branch in the submodule."""
162     branch = b.pocketToGit(pocket)
163     c.captureOutput(
164         ['git', 'update-ref', 'refs/heads/%s' % branch, ref], cwd=b.getRepo(package))
165
166
167 def uploadBuild(pocket, workdir):
168     """Upload all build products in the work directory."""
169     apt = b.pocketToApt(pocket)
170     for changes in glob.glob(os.path.join(workdir, '*.changes')):
171         c.captureOutput(['reprepro-env',
172                        '--ignore=wrongdistribution',
173                        'include',
174                        apt,
175                        changes])
176
177
178 def updateSuperproject(pocket, package, commit, principal, version, env):
179     """Update the superproject.
180
181     This will create a new commit on the branch for the given pocket
182     that sets the commit for the package submodule to commit.
183
184     Note that there's no locking issue here, because we disallow all
185     pushes to the superproject.
186     """
187     superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
188     branch = b.pocketToGit(pocket)
189     tree = c.captureOutput(['git', 'ls-tree', branch],
190                            cwd=superproject).strip()
191
192     new_tree = re.compile(
193         r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
194         r'\g<1>%s\g<2>' % commit,
195         tree)
196
197     new_tree_id = c.captureOutput(['git', 'mktree', '--missing'],
198                                   cwd=superproject,
199                                   stdin_str=new_tree).strip()
200
201     commit_msg = ('Update %s to version %s\n\n'
202                   'Requested by %s' % (package,
203                                        version.full_version,
204                                        principal))
205     new_commit = c.captureOutput(
206         ['git', 'commit-tree', new_tree_id, '-p', branch],
207         cwd=superproject,
208         env=env,
209         stdin_str=commit_msg).strip()
210
211     c.captureOutput(
212         ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
213         cwd=superproject)
214
215
216 def makeReadable(workdir):
217     os.chmod(workdir, 0755)
218
219 @contextlib.contextmanager
220 def packageWorkdir(package, commit):
221     """Checkout the package in a temporary working directory.
222
223     This context manager returns that working directory. The requested
224     package is checked out into a subdirectory of the working
225     directory with the same name as the package.
226
227     When the context wrapped with this context manager is exited, the
228     working directory is automatically deleted.
229     """
230     workdir = tempfile.mkdtemp()
231     try:
232         p_archive = subprocess.Popen(
233             ['git', 'archive',
234              '--remote=file://%s' % b.getRepo(package),
235              '--prefix=%s/' % package,
236              commit,
237              ],
238             stdout=subprocess.PIPE,
239             )
240         p_tar = subprocess.Popen(
241             ['tar', '-x'],
242             stdin=p_archive.stdout,
243             cwd=workdir,
244             )
245         p_archive.wait()
246         p_tar.wait()
247
248         yield workdir
249     finally:
250         shutil.rmtree(workdir)
251
252
253 def reportBuild(build):
254     """Run hooks to report the results of a build attempt."""
255
256     c.captureOutput(['run-parts',
257                    '--arg=%s' % build.build_id,
258                    '--',
259                    b._HOOKS_DIR])
260
261
262 def build():
263     """Deal with items in the build queue.
264
265     When triggered, iterate over build queue items one at a time,
266     until there are no more pending build jobs.
267     """
268     while True:
269         stage = 'processing incoming job'
270         queue = os.listdir(b._QUEUE_DIR)
271         if not queue:
272             break
273
274         build = min(queue)
275         job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
276         pocket, package, commit, principal = job.split()
277
278         database.session.begin()
279         db = database.Build()
280         db.package = package
281         db.pocket = pocket
282         db.commit = commit
283         db.principal = principal
284         database.session.save_or_update(db)
285         database.session.commit()
286
287         database.session.begin()
288
289         try:
290             db.failed_stage = 'validating job'
291             src = b.validateBuild(pocket, package, commit)
292             # Don't expand the commit in the DB until we're sure the user
293             # isn't trying to be tricky.
294             db.commit = commit = c.captureOutput(['git', 'rev-parse', commit],
295                                                  cwd=b.getRepo(package)).strip()
296
297             db.version = str(b.getVersion(package, commit))
298
299             # If validateBuild returns something other than True, then
300             # it means we should copy from that pocket to our pocket.
301             #
302             # (If the validation failed, validateBuild would have
303             # raised an exception)
304             if src != True:
305                 db.failed_stage = 'copying package from another pocket'
306                 aptCopy(package, commit, pocket, src)
307             # If we can't copy the package from somewhere, but
308             # validateBuild didn't raise an exception, then we need to
309             # do the build ourselves
310             else:
311                 db.failed_stage = 'checking out package source'
312                 with packageWorkdir(package, commit) as workdir:
313                     db.failed_stage = 'preparing source package'
314                     packagedir = os.path.join(workdir, package)
315
316                     # We should be more clever about dealing with
317                     # things like non-Debian-native packages than we
318                     # are.
319                     #
320                     # If we were, we could use debuild and get nice
321                     # environment scrubbing. Since we're not, debuild
322                     # complains about not having an orig.tar.gz
323                     c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
324                                   cwd=packagedir,
325                                   stdout=None)
326
327                     try:
328                         db.failed_stage = 'building binary packages'
329                         sbuildAll(package, commit, workdir)
330                     finally:
331                         logdir = os.path.join(b._LOG_DIR, str(db.build_id))
332                         if not os.path.exists(logdir):
333                             os.makedirs(logdir)
334
335                         for log in glob.glob(os.path.join(workdir, '*.build')):
336                             os.copy2(log, logdir)
337
338                     db.failed_stage = 'processing metadata'
339                     env = dict(os.environ)
340                     env['GIT_COMMITTER_NAME'] = config.build.tagger.name
341                     env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
342                     version = b.getVersion(package, commit)
343
344                     db.failed_stage = 'tagging submodule'
345                     tagSubmodule(pocket, package, principal, version, env)
346                     db.failed_stage = 'updating submodule branches'
347                     updateSubmoduleBranch(pocket, package, commit)
348                     db.failed_stage = 'updating superproject'
349                     updateSuperproject(pocket, package, commit, principal, version, env)
350                     db.failed_stage = 'relaxing permissions on workdir'
351                     makeReadable(workdir)
352                     db.failed_stage = 'uploading packages to apt repo'
353                     uploadBuild(pocket, workdir)
354
355                     db.failed_stage = 'cleaning up'
356         except:
357             db.traceback = traceback.format_exc()
358         else:
359             db.succeeded = True
360             db.failed_stage = None
361         finally:
362             database.session.save_or_update(db)
363             database.session.commit()
364
365             # Finally, now that everything is done, remove the
366             # build queue item
367             os.unlink(os.path.join(b._QUEUE_DIR, build))
368
369             reportBuild(db)
370
371
372 class Invirtibuilder(pyinotify.ProcessEvent):
373     """Process inotify triggers to build new packages."""
374     def process_default(self, event):
375         """Handle an inotify event.
376
377         When an inotify event comes in, trigger the builder.
378         """
379         build()
380
381
382 def main():
383     """Initialize the inotifications and start the main loop."""
384     database.connect()
385
386     watch_manager = pyinotify.WatchManager()
387     invirtibuilder = Invirtibuilder()
388     notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
389     watch_manager.add_watch(b._QUEUE_DIR,
390                             pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
391                             pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
392
393     # Before inotifying, run any pending builds; otherwise we won't
394     # get notified for them.
395     build()
396
397     while True:
398         notifier.process_events()
399         if notifier.check_events():
400             notifier.read_events()
401
402
403 if __name__ == '__main__':
404     main()