Use version as the tag name
[invirt/packages/invirt-dev.git] / invirtibuilder
1 #!/usr/bin/python
2
3 """Process the Invirt build queue.
4
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
7
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
11
12 If the build fails, the Invirtibuilder sends mail with the build log.
13
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
17
18 Each queue file contains a file of the form
19
20     pocket package hash principal
21
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
24
25 principal is the Kerberos principal that requested the build.
26 """
27
28
29 from __future__ import with_statement
30
31 import contextlib
32 import glob
33 import os
34 import re
35 import shutil
36 import subprocess
37 import tempfile
38 import traceback
39
40 import pyinotify
41
42 from debian_bundle import deb822
43
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
48
49
50 DISTRIBUTION = 'hardy'
51
52
53 def getControl(package, ref):
54     """Get the parsed debian/control file for a given package.
55
56     This returns a list of debian_bundle.deb822.Deb822 objects, one
57     for each section of the debian/control file. Each Deb822 object
58     acts roughly like a dict.
59     """
60     return deb822.Deb822.iter_paragraphs(
61         b.getGitFile(package, ref, 'debian/control').split('\n'))
62
63
64 def getBinaries(package, ref):
65     """Get a list of binary packages in a package at a given ref."""
66     return [p['Package'] for p in getControl(package, ref)
67             if 'Package' in p]
68
69
70 def getArches(package, ref):
71     """Get the set of all architectures in any binary package."""
72     arches = set()
73     for section in getControl(package, ref):
74         if 'Architecture' in section:
75             arches.update(section['Architecture'].split())
76     return arches
77
78
79 def getDscName(package, ref):
80     """Return the .dsc file that will be generated for this package."""
81     v = b.getVersion(package, ref)
82     if v.debian_version:
83         v_str = '%s-%s' % (v.upstream_version,
84                            v.debian_version)
85     else:
86         v_str = v.upstream_version
87     return '%s_%s.dsc' % (
88         package,
89         v_str)
90
91
92 def sanitizeVersion(version):
93     """Sanitize a Debian package version for use as a git tag.
94
95     This function strips the epoch from the version number and
96     replaces any tildes with underscores."""
97     if version.debian_version:
98         v = '%s-%s' % (version.upstream_version,
99                        version.debian_version)
100     else:
101         v = version.upstream_version
102     return v.replace('~', '_')
103
104
105 def aptCopy(package, commit, dst_pocket, src_pocket):
106     """Copy a package from one pocket to another."""
107     binaries = getBinaries(package, commit)
108     c.captureOutput(['reprepro-env', 'copy',
109                      b.pocketToApt(dst_pocket),
110                      b.pocketToApt(src_pocket),
111                      package] + binaries)
112
113
114 def sbuild(package, ref, arch, workdir, arch_all=False):
115     """Build a package for a particular architecture."""
116     args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch]
117     if arch_all:
118         args.append('-A')
119     args.append(getDscName(package, ref))
120     c.captureOutput(args, cwd=workdir)
121
122
123 def sbuildAll(package, ref, workdir):
124     """Build a package for all architectures it supports."""
125     arches = getArches(package, ref)
126     if 'all' in arches or 'any' in arches or 'amd64' in arches:
127         sbuild(package, ref, 'amd64', workdir, arch_all=True)
128     if 'any' in arches or 'i386' in arches:
129         sbuild(package, ref, 'i386', workdir)
130
131
132 def tagSubmodule(pocket, package, commit, principal, version, env):
133     """Tag a new version of a submodule.
134
135     If this pocket does not allow_backtracking, then this will create
136     a new tag of the version at ref.
137
138     This function doesn't need to care about lock
139     contention. git-receive-pack updates one ref at a time, and only
140     takes out a lock for that ref after it's passed the update
141     hook. Because we reject pushes to tags in the update hook, no push
142     can ever take out a lock on any tags.
143
144     I'm sure that long description gives you great confidence in the
145     legitimacy of my reasoning.
146     """
147     if not config.build.pockets[pocket].get('allow_backtracking', False):
148         branch = b.pocketToGit(pocket)
149         tag_msg = ('Tag %s of %s\n\n'
150                    'Requested by %s' % (version.full_version,
151                                         package,
152                                         principal))
153
154         c.captureOutput(
155             ['git', 'tag', '-m', tag_msg, '--', sanitizeVersion(version),
156              commit],
157             env=env,
158             cwd=b.getRepo(package))
159
160
161 def updateSubmoduleBranch(pocket, package, commit):
162     """Update the appropriately named branch in the submodule."""
163     branch = b.pocketToGit(pocket)
164     c.captureOutput(
165         ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))
166
167
168 def uploadBuild(pocket, workdir):
169     """Upload all build products in the work directory."""
170     force = config.build.pockets[pocket].get('allow_backtracking', False)
171     apt = b.pocketToApt(pocket)
172     for changes in glob.glob(os.path.join(workdir, '*.changes')):
173         upload = ['reprepro-env', '--ignore=wrongdistribution',
174                   'include', apt, changes]
175         try:
176             c.captureOutput(upload)
177         except subprocess.CalledProcessError, e:
178             if not force:
179                 raise
180             package = deb822.Changes(open(changes).read())['Binary']
181             c.captureOutput(['reprepro-env', 'remove', apt, package])
182             c.captureOutput(upload)
183
184
185 def updateSuperproject(pocket, package, commit, principal, version, env):
186     """Update the superproject.
187
188     This will create a new commit on the branch for the given pocket
189     that sets the commit for the package submodule to commit.
190
191     Note that there's no locking issue here, because we disallow all
192     pushes to the superproject.
193     """
194     superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
195     branch = b.pocketToGit(pocket)
196     tree = c.captureOutput(['git', 'ls-tree', branch],
197                            cwd=superproject).strip()
198
199     new_tree = re.compile(
200         r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
201         r'\g<1>%s\g<2>' % commit,
202         tree)
203
204     new_tree_id = c.captureOutput(['git', 'mktree', '--missing'],
205                                   cwd=superproject,
206                                   stdin_str=new_tree).strip()
207
208     commit_msg = ('Update %s to version %s\n\n'
209                   'Requested by %s' % (package,
210                                        version.full_version,
211                                        principal))
212     new_commit = c.captureOutput(
213         ['git', 'commit-tree', new_tree_id, '-p', branch],
214         cwd=superproject,
215         env=env,
216         stdin_str=commit_msg).strip()
217
218     c.captureOutput(
219         ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
220         cwd=superproject)
221
222
223 def makeReadable(workdir):
224     os.chmod(workdir, 0755)
225
226 @contextlib.contextmanager
227 def packageWorkdir(package, commit):
228     """Checkout the package in a temporary working directory.
229
230     This context manager returns that working directory. The requested
231     package is checked out into a subdirectory of the working
232     directory with the same name as the package.
233
234     When the context wrapped with this context manager is exited, the
235     working directory is automatically deleted.
236     """
237     workdir = tempfile.mkdtemp()
238     try:
239         p_archive = subprocess.Popen(
240             ['git', 'archive',
241              '--remote=file://%s' % b.getRepo(package),
242              '--prefix=%s/' % package,
243              commit,
244              ],
245             stdout=subprocess.PIPE,
246             )
247         p_tar = subprocess.Popen(
248             ['tar', '-x'],
249             stdin=p_archive.stdout,
250             cwd=workdir,
251             )
252         p_archive.wait()
253         p_tar.wait()
254
255         yield workdir
256     finally:
257         shutil.rmtree(workdir)
258
259 def build():
260     """Deal with items in the build queue.
261
262     When triggered, iterate over build queue items one at a time,
263     until there are no more pending build jobs.
264     """
265     while True:
266         stage = 'processing incoming job'
267         queue = os.listdir(b._QUEUE_DIR)
268         if not queue:
269             break
270
271         build = min(queue)
272         job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
273         pocket, package, commit, principal = job.split()
274
275         database.session.begin()
276         db = database.Build()
277         db.package = package
278         db.pocket = pocket
279         db.commit = commit
280         db.principal = principal
281         database.session.save_or_update(db)
282         database.session.commit()
283
284         database.session.begin()
285
286         try:
287             db.failed_stage = 'validating job'
288             # Don't expand the commit in the DB until we're sure the user
289             # isn't trying to be tricky.
290             b.ensureValidPackage(package)
291             db.commit = commit = b.canonicalize_commit(package, commit)
292             src = b.validateBuild(pocket, package, commit)
293             version = b.getVersion(package, commit)
294             db.version = str(version)
295             b.runHook('pre-build', [str(db.build_id), db.pocket, db.package,
296                                     db.commit, db.principal, db.version, str(db.inserted_at)])
297
298             env = dict(os.environ)
299             env['GIT_COMMITTER_NAME'] = config.build.tagger.name
300             env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
301
302             # If validateBuild returns something other than True, then
303             # it means we should copy from that pocket to our pocket.
304             #
305             # (If the validation failed, validateBuild would have
306             # raised an exception)
307             if src != True:
308                 # TODO: cut out this code duplication
309                 db.failed_stage = 'tagging submodule before copying package'
310                 tagSubmodule(pocket, package, commit, principal, version, env)
311                 db.failed_stage = 'updating submodule branches before copying package'
312                 updateSubmoduleBranch(pocket, package, commit)
313                 db.failed_stage = 'updating superproject before copying package'
314                 updateSuperproject(pocket, package, commit, principal, version, env)
315                 db.failed_stage = 'copying package from another pocket'
316                 aptCopy(package, commit, pocket, src)
317                 
318             # If we can't copy the package from somewhere, but
319             # validateBuild didn't raise an exception, then we need to
320             # do the build ourselves
321             else:
322                 db.failed_stage = 'checking out package source'
323                 with packageWorkdir(package, commit) as workdir:
324                     db.failed_stage = 'preparing source package'
325                     packagedir = os.path.join(workdir, package)
326
327                     # We should be more clever about dealing with
328                     # things like non-Debian-native packages than we
329                     # are.
330                     #
331                     # If we were, we could use debuild and get nice
332                     # environment scrubbing. Since we're not, debuild
333                     # complains about not having an orig.tar.gz
334                     c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
335                                   cwd=packagedir,
336                                   stdout=None)
337
338                     try:
339                         db.failed_stage = 'building binary packages'
340                         sbuildAll(package, commit, workdir)
341                     finally:
342                         logdir = os.path.join(b._LOG_DIR, str(db.build_id))
343                         if not os.path.exists(logdir):
344                             os.makedirs(logdir)
345
346                         for log in glob.glob(os.path.join(workdir, 'build-*.log')):
347                             os.copy(log, logdir)
348
349                     db.failed_stage = 'tagging submodule'
350                     tagSubmodule(pocket, package, commit, principal, version, env)
351                     db.failed_stage = 'updating submodule branches'
352                     updateSubmoduleBranch(pocket, package, commit)
353                     db.failed_stage = 'updating superproject'
354                     updateSuperproject(pocket, package, commit, principal, version, env)
355                     db.failed_stage = 'relaxing permissions on workdir'
356                     makeReadable(workdir)
357                     db.failed_stage = 'uploading packages to apt repo'
358                     uploadBuild(pocket, workdir)
359
360                     db.failed_stage = 'cleaning up'
361         except:
362             db.traceback = traceback.format_exc()
363         else:
364             db.succeeded = True
365             db.failed_stage = None
366         finally:
367             database.session.save_or_update(db)
368             database.session.commit()
369
370             # Finally, now that everything is done, remove the
371             # build queue item
372             os.unlink(os.path.join(b._QUEUE_DIR, build))
373
374             if db.succeeded:
375                 b.runHook('post-build', [str(db.build_id)])
376             else:
377                 b.runHook('failed-build', [str(db.build_id)])
378
379 class Invirtibuilder(pyinotify.ProcessEvent):
380     """Process inotify triggers to build new packages."""
381     def process_default(self, event):
382         """Handle an inotify event.
383
384         When an inotify event comes in, trigger the builder.
385         """
386         build()
387
388
389 def main():
390     """Initialize the inotifications and start the main loop."""
391     database.connect()
392
393     watch_manager = pyinotify.WatchManager()
394     invirtibuilder = Invirtibuilder()
395     notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
396     watch_manager.add_watch(b._QUEUE_DIR,
397                             pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
398                             pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
399
400     # Before inotifying, run any pending builds; otherwise we won't
401     # get notified for them.
402     build()
403
404     while True:
405         notifier.process_events()
406         if notifier.check_events():
407             notifier.read_events()
408
409
410 if __name__ == '__main__':
411     main()