3 """Process the Invirt build queue.
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superproject is updated to point at the new version.
12 If the build fails, the Invirtibuilder sends mail with the build log.
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
18 Each queue file contains a file of the form
20 pocket package hash principal
22 where pocket is one of the pockets globally configured in
23 build.pockets. For instance, the pockets in XVM are "prod" and "dev".
25 principal is the Kerberos principal that requested the build.
29 from __future__ import with_statement
42 from debian_bundle import deb822
44 import invirt.builder as b
45 import invirt.common as c
46 from invirt import database
47 from invirt.config import structs as config
50 DISTRIBUTION = 'hardy'
53 def getControl(package, ref):
54 """Get the parsed debian/control file for a given package.
56 This returns a list of debian_bundle.deb822.Deb822 objects, one
57 for each section of the debian/control file. Each Deb822 object
58 acts roughly like a dict.
60 return deb822.Deb822.iter_paragraphs(
61 b.getGitFile(package, ref, 'debian/control').split('\n'))
64 def getBinaries(package, ref):
65 """Get a list of binary packages in a package at a given ref."""
66 return [p['Package'] for p in getControl(package, ref)
70 def getArches(package, ref):
71 """Get the set of all architectures in any binary package."""
73 for section in getControl(package, ref):
74 if 'Architecture' in section:
75 arches.update(section['Architecture'].split())
79 def getDscName(package, ref):
80 """Return the .dsc file that will be generated for this package."""
81 v = b.getVersion(package, ref)
83 v_str = '%s-%s' % (v.upstream_version,
86 v_str = v.upstream_version
87 return '%s_%s.dsc' % (
92 def sanitizeVersion(version):
93 """Sanitize a Debian package version for use as a git tag.
95 This function strips the epoch from the version number and
96 replaces any tildes with periods."""
98 v = '%s-%s' % (version.upstream_version,
99 version.debian_version)
101 v = version.upstream_version
102 return v.replace('~', '.')
105 def aptCopy(package, commit, dst_pocket, src_pocket):
106 """Copy a package from one pocket to another."""
107 binaries = getBinaries(package, commit)
108 c.captureOutput(['reprepro-env', 'copy',
109 b.pocketToApt(dst_pocket),
110 b.pocketToApt(src_pocket),
114 def sbuild(package, ref, arch, workdir, arch_all=False):
115 """Build a package for a particular architecture."""
116 args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch]
119 args.append(getDscName(package, ref))
120 c.captureOutput(args, cwd=workdir)
123 def sbuildAll(package, ref, workdir):
124 """Build a package for all architectures it supports."""
125 arches = getArches(package, ref)
126 if 'all' in arches or 'any' in arches or 'amd64' in arches:
127 sbuild(package, ref, 'amd64', workdir, arch_all=True)
128 if 'any' in arches or 'i386' in arches:
129 sbuild(package, ref, 'i386', workdir)
132 def tagSubmodule(pocket, package, commit, principal, version, env):
133 """Tag a new version of a submodule.
135 If this pocket does not allow_backtracking, then this will create
136 a new tag of the version at ref.
138 This function doesn't need to care about lock
139 contention. git-receive-pack updates one ref at a time, and only
140 takes out a lock for that ref after it's passed the update
141 hook. Because we reject pushes to tags in the update hook, no push
142 can ever take out a lock on any tags.
144 I'm sure that long description gives you great confidence in the
145 legitimacy of my reasoning.
147 if not config.build.pockets[pocket].get('allow_backtracking', False):
148 branch = b.pocketToGit(pocket)
149 tag_msg = ('Tag %s of %s\n\n'
150 'Requested by %s' % (version.full_version,
155 ['git', 'tag', '-m', tag_msg, commit],
157 cwd=b.getRepo(package))
160 def updateSubmoduleBranch(pocket, package, commit):
161 """Update the appropriately named branch in the submodule."""
162 branch = b.pocketToGit(pocket)
164 ['git', 'update-ref', 'refs/heads/%s' % branch, commit], cwd=b.getRepo(package))
167 def uploadBuild(pocket, workdir):
168 """Upload all build products in the work directory."""
169 force = config.build.pockets[pocket].get('allow_backtracking', False)
170 apt = b.pocketToApt(pocket)
171 for changes in glob.glob(os.path.join(workdir, '*.changes')):
172 upload = ['reprepro-env', '--ignore=wrongdistribution',
173 'include', apt, changes]
175 c.captureOutput(upload)
176 except subprocess.CalledProcessError, e:
179 package = deb822.Changes(open(changes).read())['Binary']
180 c.captureOutput(['reprepro-env', 'remove', apt, package])
181 c.captureOutput(upload)
184 def updateSuperproject(pocket, package, commit, principal, version, env):
185 """Update the superproject.
187 This will create a new commit on the branch for the given pocket
188 that sets the commit for the package submodule to commit.
190 Note that there's no locking issue here, because we disallow all
191 pushes to the superproject.
193 superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git')
194 branch = b.pocketToGit(pocket)
195 tree = c.captureOutput(['git', 'ls-tree', branch],
196 cwd=superproject).strip()
198 new_tree = re.compile(
199 r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
200 r'\g<1>%s\g<2>' % commit,
203 new_tree_id = c.captureOutput(['git', 'mktree', '--missing'],
205 stdin_str=new_tree).strip()
207 commit_msg = ('Update %s to version %s\n\n'
208 'Requested by %s' % (package,
209 version.full_version,
211 new_commit = c.captureOutput(
212 ['git', 'commit-tree', new_tree_id, '-p', branch],
215 stdin_str=commit_msg).strip()
218 ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
222 def makeReadable(workdir):
223 os.chmod(workdir, 0755)
225 @contextlib.contextmanager
226 def packageWorkdir(package, commit):
227 """Checkout the package in a temporary working directory.
229 This context manager returns that working directory. The requested
230 package is checked out into a subdirectory of the working
231 directory with the same name as the package.
233 When the context wrapped with this context manager is exited, the
234 working directory is automatically deleted.
236 workdir = tempfile.mkdtemp()
238 p_archive = subprocess.Popen(
240 '--remote=file://%s' % b.getRepo(package),
241 '--prefix=%s/' % package,
244 stdout=subprocess.PIPE,
246 p_tar = subprocess.Popen(
248 stdin=p_archive.stdout,
256 shutil.rmtree(workdir)
259 """Deal with items in the build queue.
261 When triggered, iterate over build queue items one at a time,
262 until there are no more pending build jobs.
265 stage = 'processing incoming job'
266 queue = os.listdir(b._QUEUE_DIR)
271 job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
272 pocket, package, commit, principal = job.split()
274 database.session.begin()
275 db = database.Build()
279 db.principal = principal
280 database.session.save_or_update(db)
281 database.session.commit()
283 database.session.begin()
286 db.failed_stage = 'validating job'
287 # Don't expand the commit in the DB until we're sure the user
288 # isn't trying to be tricky.
289 b.ensureValidPackage(package)
290 db.commit = commit = b.canonicalize_commit(package, commit)
291 src = b.validateBuild(pocket, package, commit)
293 db.version = str(b.getVersion(package, commit))
294 b.runHook('pre-build', [str(db.build_id), db.pocket, db.package,
295 db.commit, db.principal, db.version, str(db.inserted_at)])
297 # If validateBuild returns something other than True, then
298 # it means we should copy from that pocket to our pocket.
300 # (If the validation failed, validateBuild would have
301 # raised an exception)
303 db.failed_stage = 'copying package from another pocket'
304 aptCopy(package, commit, pocket, src)
305 # If we can't copy the package from somewhere, but
306 # validateBuild didn't raise an exception, then we need to
307 # do the build ourselves
309 db.failed_stage = 'checking out package source'
310 with packageWorkdir(package, commit) as workdir:
311 db.failed_stage = 'preparing source package'
312 packagedir = os.path.join(workdir, package)
314 # We should be more clever about dealing with
315 # things like non-Debian-native packages than we
318 # If we were, we could use debuild and get nice
319 # environment scrubbing. Since we're not, debuild
320 # complains about not having an orig.tar.gz
321 c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
326 db.failed_stage = 'building binary packages'
327 sbuildAll(package, commit, workdir)
329 logdir = os.path.join(b._LOG_DIR, str(db.build_id))
330 if not os.path.exists(logdir):
333 for log in glob.glob(os.path.join(workdir, 'build-*.log')):
336 db.failed_stage = 'processing metadata'
337 env = dict(os.environ)
338 env['GIT_COMMITTER_NAME'] = config.build.tagger.name
339 env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email
340 version = b.getVersion(package, commit)
342 db.failed_stage = 'tagging submodule'
343 tagSubmodule(pocket, package, principal, version, env)
344 db.failed_stage = 'updating submodule branches'
345 updateSubmoduleBranch(pocket, package, commit)
346 db.failed_stage = 'updating superproject'
347 updateSuperproject(pocket, package, commit, principal, version, env)
348 db.failed_stage = 'relaxing permissions on workdir'
349 makeReadable(workdir)
350 db.failed_stage = 'uploading packages to apt repo'
351 uploadBuild(pocket, workdir)
353 db.failed_stage = 'cleaning up'
355 db.traceback = traceback.format_exc()
358 db.failed_stage = None
360 database.session.save_or_update(db)
361 database.session.commit()
363 # Finally, now that everything is done, remove the
365 os.unlink(os.path.join(b._QUEUE_DIR, build))
368 b.runHook('post-build', [str(db.build_id)])
370 b.runHook('failed-build', [str(db.build_id)])
372 class Invirtibuilder(pyinotify.ProcessEvent):
373 """Process inotify triggers to build new packages."""
374 def process_default(self, event):
375 """Handle an inotify event.
377 When an inotify event comes in, trigger the builder.
383 """Initialize the inotifications and start the main loop."""
386 watch_manager = pyinotify.WatchManager()
387 invirtibuilder = Invirtibuilder()
388 notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
389 watch_manager.add_watch(b._QUEUE_DIR,
390 pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] |
391 pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO'])
393 # Before inotifying, run any pending builds; otherwise we won't
394 # get notified for them.
398 notifier.process_events()
399 if notifier.check_events():
400 notifier.read_events()
403 if __name__ == '__main__':