3 """Process the Invirt build queue.
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
12 If the build fails, the Invirtibuilder sends mail with the build log.
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
18 Each queue file contains a file of the form
20 pocket package hash principal
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
25 principal is the Kerberos principal that requested the build.
29 from __future__ import with_statement
42 from debian_bundle import deb822
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
50 DISTRIBUTION = 'hardy'
53 def getControl(package, ref):
54 """Get the parsed debian/control file for a given package.
56 This returns a list of debian_bundle.deb822.Deb822 objects, one
57 for each section of the debian/control file. Each Deb822 object
58 acts roughly like a dict.
60 return deb822.Deb822.iter_paragraphs(
61 b.getGitFile(package, ref, 'debian/control').split('\n'))
64 def getBinaries(package, ref):
65 """Get a list of binary packages in a package at a given ref."""
66 return [p['Package'] for p in getControl(package, ref)
70 def getArches(package, ref):
71 """Get the set of all architectures in any binary package."""
73 for section in getControl(package, ref):
74 if 'Architecture' in section:
75 arches.update(section['Architecture'].split())
79 def getDscName(package, ref):
80 """Return the .dsc file that will be generated for this package."""
81 v = b.getVersion(package, ref)
83 v_str = '%s-%s' % (v.upstream_version,
86 v_str = v.upstream_version
87 return '%s_%s.dsc' % (
92 def sanitizeVersion(version):
93 """Sanitize a Debian package version for use as a git tag.
95 This function strips the epoch from the version number and
96 replaces any tildes with periods."""
98 v = '%s-%s' % (version.upstream_version,
99 version.debian_version)
101 v = version.upstream_version
102 return v.replace('~', '.')
105 def aptCopy(package, commit, dst_pocket, src_pocket):
106 """Copy a package from one pocket to another."""
107 binaries = getBinaries(package, commit)
108 c.captureOutput(['reprepro-env', 'copy',
109 b.pocketToApt(dst_pocket),
110 b.pocketToApt(src_pocket),
114 def sbuild(package, ref, arch, workdir, arch_all=False):
115 """Build a package for a particular architecture."""
116 args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch]
119 args.append(getDscName(package, ref))
120 c.captureOutput(args, cwd=workdir)
123 def sbuildAll(package, ref, workdir):
124 """Build a package for all architectures it supports."""
125 arches = getArches(package, ref)
126 if 'all' in arches or 'any' in arches or 'amd64' in arches:
127 sbuild(package, ref, 'amd64', workdir, arch_all=True)
128 if 'any' in arches or 'i386' in arches:
129 sbuild(package, ref, 'i386', workdir)
132 def tagSubmodule(pocket, package, principal, version, env):
133 """Tag a new version of a submodule.
135 If this pocket does not allow_backtracking, then this will create
136 a new tag of the version at ref.
138 This function doesn't need to care about lock
139 contention. git-receive-pack updates one ref at a time, and only
140 takes out a lock for that ref after it's passed the update
141 hook. Because we reject pushes to tags in the update hook, no push
142 can ever take out a lock on any tags.
144 I'm sure that long description gives you great confidence in the
145 legitimacy of my reasoning.
147 if not config.build.pockets[pocket].get('allow_backtracking', False):
148 branch = b.pocketToGit(pocket)
149 tag_msg = ('Tag %s of %s\n\n'
150 'Requested by %s' % (version.full_version,
155 ['git', 'tag', '-m', tag_msg, commit],
160 def updateSubmoduleBranch(pocket, package, ref):
161 """Update the appropriately named branch in the submodule."""
162 branch = b.pocketToGit(pocket)
164 ['git', 'update-ref', 'refs/heads/%s' % branch, ref], cwd=b.getRepo(package))
167 def uploadBuild(pocket, workdir):
168 """Upload all build products in the work directory."""
169 apt = b.pocketToApt(pocket)
170 for changes in glob.glob(os.path.join(workdir, '*.changes')):
171 c.captureOutput(['reprepro-env',
172 '--ignore=wrongdistribution',
178 def updateSuperproject(pocket, package, commit, principal, version, env):
179 """Update the superproject.
181 This will create a new commit on the branch for the given pocket
182 that sets the commit for the package submodule to commit.
184 Note that there's no locking issue here, because we disallow all
185 pushes to the superproject.
187 superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
188 branch = b.pocketToGit(pocket)
189 tree = c.captureOutput(['git', 'ls-tree', branch],
190 cwd=superproject).strip()
192 new_tree = re.compile(
193 r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
194 r'\g<1>%s\g<2>' % commit,
197 new_tree_id = c.captureOutput(['git', 'mktree', '--missing'],
199 stdin_str=new_tree).strip()
201 commit_msg = ('Update %s to version %s\n\n'
202 'Requested by %s' % (package,
203 version.full_version,
205 new_commit = c.captureOutput(
206 ['git', 'commit-tree', new_tree_id, '-p', branch],
209 stdin_str=commit_msg).strip()
212 ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
216 def makeReadable(workdir):
217 os.chmod(workdir, 0755)
219 @contextlib.contextmanager
220 def packageWorkdir(package, commit):
221 """Checkout the package in a temporary working directory.
223 This context manager returns that working directory. The requested
224 package is checked out into a subdirectory of the working
225 directory with the same name as the package.
227 When the context wrapped with this context manager is exited, the
228 working directory is automatically deleted.
230 workdir = tempfile.mkdtemp()
232 p_archive = subprocess.Popen(
234 '--remote=file://%s' % b.getRepo(package),
235 '--prefix=%s/' % package,
238 stdout=subprocess.PIPE,
240 p_tar = subprocess.Popen(
242 stdin=p_archive.stdout,
250 shutil.rmtree(workdir)
253 def reportBuild(build):
254 """Run hooks to report the results of a build attempt."""
256 c.captureOutput(['run-parts',
257 '--arg=%s' % build.build_id,
263 """Deal with items in the build queue.
265 When triggered, iterate over build queue items one at a time,
266 until there are no more pending build jobs.
269 stage = 'processing incoming job'
270 queue = os.listdir(b._QUEUE_DIR)
275 job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
276 pocket, package, commit, principal = job.split()
278 database.session.begin()
279 db = database.Build()
283 db.principal = principal
284 database.session.save_or_update(db)
285 database.session.commit()
287 database.session.begin()
290 db.failed_stage = 'validating job'
291 # Don't expand the commit in the DB until we're sure the user
292 # isn't trying to be tricky.
293 b.ensureValidPackage(package)
294 db.commit = commit = b.canonicalize_commit(package, commit)
295 src = b.validateBuild(pocket, package, commit)
297 db.version = str(b.getVersion(package, commit))
299 # If validateBuild returns something other than True, then
300 # it means we should copy from that pocket to our pocket.
302 # (If the validation failed, validateBuild would have
303 # raised an exception)
305 db.failed_stage = 'copying package from another pocket'
306 aptCopy(package, commit, pocket, src)
307 # If we can't copy the package from somewhere, but
308 # validateBuild didn't raise an exception, then we need to
309 # do the build ourselves
311 db.failed_stage = 'checking out package source'
312 with packageWorkdir(package, commit) as workdir:
313 db.failed_stage = 'preparing source package'
314 packagedir = os.path.join(workdir, package)
316 # We should be more clever about dealing with
317 # things like non-Debian-native packages than we
320 # If we were, we could use debuild and get nice
321 # environment scrubbing. Since we're not, debuild
322 # complains about not having an orig.tar.gz
323 c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
328 db.failed_stage = 'building binary packages'
329 sbuildAll(package, commit, workdir)
331 logdir = os.path.join(b._LOG_DIR, str(db.build_id))
332 if not os.path.exists(logdir):
335 for log in glob.glob(os.path.join(workdir, 'build-*.log')):
338 db.failed_stage = 'processing metadata'
339 env = dict(os.environ)
340 env['GIT_COMMITTER_NAME'] = config.build.tagger.name
341 env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
342 version = b.getVersion(package, commit)
344 db.failed_stage = 'tagging submodule'
345 tagSubmodule(pocket, package, principal, version, env)
346 db.failed_stage = 'updating submodule branches'
347 updateSubmoduleBranch(pocket, package, commit)
348 db.failed_stage = 'updating superproject'
349 updateSuperproject(pocket, package, commit, principal, version, env)
350 db.failed_stage = 'relaxing permissions on workdir'
351 makeReadable(workdir)
352 db.failed_stage = 'uploading packages to apt repo'
353 uploadBuild(pocket, workdir)
355 db.failed_stage = 'cleaning up'
357 db.traceback = traceback.format_exc()
360 db.failed_stage = None
362 database.session.save_or_update(db)
363 database.session.commit()
365 # Finally, now that everything is done, remove the
367 os.unlink(os.path.join(b._QUEUE_DIR, build))
372 class Invirtibuilder(pyinotify.ProcessEvent):
373 """Process inotify triggers to build new packages."""
374 def process_default(self, event):
375 """Handle an inotify event.
377 When an inotify event comes in, trigger the builder.
383 """Initialize the inotifications and start the main loop."""
386 watch_manager = pyinotify.WatchManager()
387 invirtibuilder = Invirtibuilder()
388 notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
389 watch_manager.add_watch(b._QUEUE_DIR,
390 pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
391 pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
393 # Before inotifying, run any pending builds; otherwise we won't
394 # get notified for them.
398 notifier.process_events()
399 if notifier.check_events():
400 notifier.read_events()
403 if __name__ == '__main__':