3 """Process the Invirt build queue.
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
12 If the build fails, the Invirtibuilder sends mail with the build log.
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
18 Each queue file contains a file of the form
20 pocket package hash principal
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
25 principal is the Kerberos principal that requested the build.
29 from __future__ import with_statement
42 from debian_bundle import deb822
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
52 def logAndRun(cmd, *args, **kwargs):
53 # Always grab stdout, even if the caller doesn't need it.
54 # TODO: don't slurp it all into memory in that case.
55 if 'stdout' in kwargs and kwargs['stdout'] is None:
57 kwargs['stderr'] = logfile
58 logfile.write('---> Ran %s\n' % (cmd, ))
59 logfile.write('STDERR:\n')
60 output = c.captureOutput(cmd, *args, **kwargs)
61 logfile.write('STDOUT:\n')
65 def getControl(package, ref):
66 """Get the parsed debian/control file for a given package.
68 This returns a list of debian_bundle.deb822.Deb822 objects, one
69 for each section of the debian/control file. Each Deb822 object
70 acts roughly like a dict.
72 return deb822.Deb822.iter_paragraphs(
73 b.getGitFile(package, ref, 'debian/control').split('\n'))
76 def getBinaries(package, ref):
77 """Get a list of binary packages in a package at a given ref."""
78 return [p['Package'] for p in getControl(package, ref)
82 def getArches(package, ref):
83 """Get the set of all architectures in any binary package."""
85 for section in getControl(package, ref):
86 if 'Architecture' in section:
87 arches.update(section['Architecture'].split())
91 def getDscName(package, ref):
92 """Return the .dsc file that will be generated for this package."""
93 v = b.getVersion(package, ref)
95 v_str = '%s-%s' % (v.upstream_version,
98 v_str = v.upstream_version
99 return '%s_%s.dsc' % (
104 def sanitizeVersion(version):
105 """Sanitize a Debian package version for use as a git tag.
107 This function strips the epoch from the version number and
108 replaces any tildes with underscores."""
109 if version.debian_version:
110 v = '%s-%s' % (version.upstream_version,
111 version.debian_version)
113 v = version.upstream_version
114 return v.replace('~', '_')
117 def aptCopy(package, commit, dst_pocket, src_pocket):
118 """Copy a package from one pocket to another."""
119 binaries = getBinaries(package, commit)
120 logAndRun(['reprepro-env', 'copy',
121 b.pocketToApt(dst_pocket),
122 b.pocketToApt(src_pocket),
126 def sbuild(package, ref, distro, arch, workdir, arch_all=False):
127 """Build a package for a particular architecture and distro."""
128 # We append a suffix like ~ubuntu8.04 to differentiate the same
129 # version built for multiple distros
130 nmutag = b.distroToSuffix(distro)
131 env = os.environ.copy()
132 env['NMUTAG'] = nmutag
134 # Run sbuild with a hack in place to append arbitrary versions
135 args = ['perl', '-I/usr/share/invirt-dev',
137 '--binNMU=171717', '--make-binNMU=Build with sbuild',
138 '-v', '-d', distro, '--arch', arch]
141 args.append(getDscName(package, ref))
142 logAndRun(args, cwd=workdir, env=env)
145 def sbuildAll(package, ref, distro, workdir):
146 """Build a package for all architectures it supports."""
147 arches = getArches(package, ref)
148 if 'all' in arches or 'any' in arches or 'amd64' in arches:
149 sbuild(package, ref, distro, 'amd64', workdir, arch_all=True)
150 if 'any' in arches or 'i386' in arches:
151 sbuild(package, ref, distro, 'i386', workdir)
154 def tagSubmodule(pocket, package, commit, principal, version, env):
155 """Tag a new version of a submodule.
157 If this pocket does not allow_backtracking, then this will create
158 a new tag of the version at ref.
160 This function doesn't need to care about lock
161 contention. git-receive-pack updates one ref at a time, and only
162 takes out a lock for that ref after it's passed the update
163 hook. Because we reject pushes to tags in the update hook, no push
164 can ever take out a lock on any tags.
166 I'm sure that long description gives you great confidence in the
167 legitimacy of my reasoning.
169 if not config.build.pockets[pocket].get('allow_backtracking', False):
170 branch = b.pocketToGit(pocket)
171 tag_msg = ('Tag %s of %s\n\n'
172 'Requested by %s' % (version.full_version,
177 ['git', 'tag', '-m', tag_msg, '--', sanitizeVersion(version),
180 cwd=b.getRepo(package))
183 def updateSubmoduleBranch(pocket, package, commit):
184 """Update the appropriately named branch in the submodule."""
185 branch = b.pocketToGit(pocket)
187 ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))
190 def uploadBuild(pocket, workdir):
191 """Upload all build products in the work directory."""
192 force = config.build.pockets[pocket].get('allow_backtracking', False)
193 apt = b.pocketToApt(pocket)
194 for changes in glob.glob(os.path.join(workdir, '*.changes')):
195 upload = ['reprepro-env', '--ignore=wrongdistribution',
196 'include', apt, changes]
199 except subprocess.CalledProcessError, e:
202 package = deb822.Changes(open(changes).read())['Binary']
203 logAndRun(['reprepro-env', 'remove', apt, package])
207 def updateSuperproject(pocket, package, commit, principal, version, env):
208 """Update the superproject.
210 This will create a new commit on the branch for the given pocket
211 that sets the commit for the package submodule to commit.
213 Note that there's no locking issue here, because we disallow all
214 pushes to the superproject.
216 superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
217 branch = b.pocketToGit(pocket)
218 tree = logAndRun(['git', 'ls-tree', branch],
219 cwd=superproject).strip()
221 new_tree = re.compile(
222 r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
223 r'\g<1>%s\g<2>' % commit,
226 new_tree_id = logAndRun(['git', 'mktree', '--missing'],
228 stdin_str=new_tree).strip()
230 commit_msg = ('Update %s to version %s\n\n'
231 'Requested by %s' % (package,
232 version.full_version,
234 new_commit = logAndRun(
235 ['git', 'commit-tree', new_tree_id, '-p', branch],
238 stdin_str=commit_msg).strip()
241 ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
245 def makeReadable(workdir):
246 os.chmod(workdir, 0755)
248 @contextlib.contextmanager
249 def packageWorkdir(package, commit):
250 """Checkout the package in a temporary working directory.
252 This context manager returns that working directory. The requested
253 package is checked out into a subdirectory of the working
254 directory with the same name as the package.
256 When the context wrapped with this context manager is exited, the
257 working directory is automatically deleted.
259 workdir = tempfile.mkdtemp()
261 p_archive = subprocess.Popen(
263 '--remote=file://%s' % b.getRepo(package),
264 '--prefix=%s/' % package,
267 stdout=subprocess.PIPE,
269 p_tar = subprocess.Popen(
271 stdin=p_archive.stdout,
279 shutil.rmtree(workdir)
282 """Deal with items in the build queue.
284 When triggered, iterate over build queue items one at a time,
285 until there are no more pending build jobs.
290 stage = 'processing incoming job'
291 queue = os.listdir(b._QUEUE_DIR)
296 job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
297 pocket, package, commit, principal = job.split()
299 database.session.begin()
300 db = database.Build()
304 db.principal = principal
305 database.session.save_or_update(db)
306 database.session.commit()
308 database.session.begin()
310 logdir = os.path.join(b._LOG_DIR, str(db.build_id))
311 if not os.path.exists(logdir):
315 db.failed_stage = 'validating job'
316 # Don't expand the commit in the DB until we're sure the user
317 # isn't trying to be tricky.
318 b.ensureValidPackage(package)
320 logfile = open(os.path.join(logdir, '%s.log' % db.package), 'w')
322 db.commit = commit = b.canonicalize_commit(package, commit)
323 src = b.validateBuild(pocket, package, commit)
324 version = b.getVersion(package, commit)
325 db.version = str(version)
326 b.runHook('pre-build', [str(db.build_id)])
328 env = dict(os.environ)
329 env['GIT_COMMITTER_NAME'] = config.build.tagger.name
330 env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
332 # If validateBuild returns something other than True, then
333 # it means we should copy from that pocket to our pocket.
335 # (If the validation failed, validateBuild would have
336 # raised an exception)
338 # TODO: cut out this code duplication
339 db.failed_stage = 'tagging submodule before copying package'
340 tagSubmodule(pocket, package, commit, principal, version, env)
341 db.failed_stage = 'updating submodule branches before copying package'
342 updateSubmoduleBranch(pocket, package, commit)
343 db.failed_stage = 'updating superproject before copying package'
344 updateSuperproject(pocket, package, commit, principal, version, env)
345 db.failed_stage = 'copying package from another pocket'
346 aptCopy(package, commit, pocket, src)
348 # If we can't copy the package from somewhere, but
349 # validateBuild didn't raise an exception, then we need to
350 # do the build ourselves
352 db.failed_stage = 'checking out package source'
353 with packageWorkdir(package, commit) as workdir:
354 db.failed_stage = 'preparing source package'
355 packagedir = os.path.join(workdir, package)
357 # We should be more clever about dealing with
358 # things like non-Debian-native packages than we
361 # If we were, we could use debuild and get nice
362 # environment scrubbing. Since we're not, debuild
363 # complains about not having an orig.tar.gz
364 logAndRun(['dpkg-buildpackage', '-us', '-uc', '-S'],
367 db.failed_stage = 'building binary packages'
368 sbuildAll(package, commit, workdir)
369 db.failed_stage = 'tagging submodule'
370 tagSubmodule(pocket, package, commit, principal, version, env)
371 db.failed_stage = 'updating submodule branches'
372 updateSubmoduleBranch(pocket, package, commit)
373 db.failed_stage = 'updating superproject'
374 updateSuperproject(pocket, package, commit, principal, version, env)
375 db.failed_stage = 'relaxing permissions on workdir'
376 makeReadable(workdir)
377 db.failed_stage = 'uploading packages to apt repo'
378 uploadBuild(pocket, workdir)
380 db.failed_stage = 'cleaning up'
382 db.traceback = traceback.format_exc()
385 db.failed_stage = None
387 if logfile is not None:
390 database.session.save_or_update(db)
391 database.session.commit()
393 # Finally, now that everything is done, remove the
395 os.unlink(os.path.join(b._QUEUE_DIR, build))
398 b.runHook('post-build', [str(db.build_id)])
400 b.runHook('failed-build', [str(db.build_id)])
402 class Invirtibuilder(pyinotify.ProcessEvent):
403 """Process inotify triggers to build new packages."""
404 def process_default(self, event):
405 """Handle an inotify event.
407 When an inotify event comes in, trigger the builder.
413 """Initialize the inotifications and start the main loop."""
416 watch_manager = pyinotify.WatchManager()
417 invirtibuilder = Invirtibuilder()
418 notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
419 watch_manager.add_watch(b._QUEUE_DIR,
420 pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
421 pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
423 # Before inotifying, run any pending builds; otherwise we won't
424 # get notified for them.
428 notifier.process_events()
429 if notifier.check_events():
430 notifier.read_events()
433 if __name__ == '__main__':