3 """Process the Invirt build queue.
5 The Invirtibuilder handles package builds and uploads. On demand, it
6 attempts to build a particular package.
8 If the build succeeds, the new version of the package is uploaded to
9 the apt repository, tagged in its git repository, and the Invirt
10 superrepo is updated to point at the new version.
12 If the build fails, the Invirtibuilder sends mail with the build log.
14 The build queue is tracked via files in /var/lib/invirt-dev/queue. In
15 order to maintain ordering, all filenames in that directory are the
16 timestamp of their creation time.
18 Each queue file contains a file of the form
20 pocket package hash principal
22 where pocket is one of the pockets globally configured in
23 git.pockets. For instance, the pockets in XVM are "prod" and "dev".
25 principal is the Kerberos principal that requested the build.
37 import invirt.builder as b
38 from invirt import database
41 DISTRIBUTION = 'hardy'
44 def getControl(package, ref):
45 """Get the parsed debian/control file for a given package.
47 This returns a list of debian_bundle.deb822.Deb822 objects, one
48 for each section of the debian/control file. Each Deb822 object
49 acts roughly like a dict.
51 return deb822.Deb822.iter_paragraphs(
52 getGitFile(package, ref, 'debian/control').split('\n'))
55 def getBinaries(package, ref):
56 """Get a list of binary packages in a package at a given ref."""
57 return [p['Package'] for p in getControl(package, ref)
61 def getArches(package, ref):
62 """Get the set of all architectures in any binary package."""
64 for section in getControl(package, ref):
65 if 'Architecture' in section:
66 arches.update(section['Architecture'].split())
70 def getDscName(package, ref):
71 """Return the .dsc file that will be generated for this package."""
72 v = getVersion(package, ref)
74 v_str = '%s-%s' % (v.upstream_version,
77 v_str = v.upstream_version
78 return '%s_%s.dsc' % (
83 def sanitizeVersion(version):
84 """Sanitize a Debian package version for use as a git tag.
86 This function strips the epoch from the version number and
87 replaces any tildes with periods."""
89 v = '%s-%s' % (version.upstream_version,
90 version.debian_version)
92 v = version.upstream_version
93 return v.replace('~', '.')
96 def aptCopy(packages, dst_pocket, src_pocket):
97 """Copy a package from one pocket to another."""
98 binaries = getBinaries(package, commit)
99 cpatureOutput(['reprepro-env', 'copy',
100 b.pocketToApt(dst_pocket),
101 b.pocketToApt(src_pocket),
105 def sbuild(package, ref, arch, workdir, arch_all=False):
106 """Build a package for a particular architecture."""
107 args = ['sbuild', '-d', DISTRIBUTION, '--arch', arch]
110 args.append(getDscName(package, ref))
111 c.captureOutput(args, cwd=workdir, stdout=None)
114 def sbuildAll(package, ref, workdir):
115 """Build a package for all architectures it supports."""
116 arches = getArches(package, ref)
117 if 'all' in arches or 'any' in arches or 'amd64' in arches:
118 sbuild(package, ref, 'amd64', workdir, arch_all=True)
119 if 'any' in arches or 'i386' in arches:
120 sbuild(package, ref, 'i386', workdir)
123 def tagSubmodule(pocket, package, ref, principal):
124 """Tag a new version of a submodule.
126 If this pocket does not allow_backtracking, then this will create
127 a new tag of the version at ref.
129 This function doesn't need to care about lock
130 contention. git-receive-pack updates one ref at a time, and only
131 takes out a lock for that ref after it's passed the update
132 hook. Because we reject pushes to tags in the update hook, no push
133 can ever take out a lock on any tags.
135 I'm sure that long description gives you great confidence in teh
136 legitimacy of my reasoning.
138 if config.git.pockets[pocket].get('allow_backtracking', False):
139 env = dict(os.environ)
140 branch = b.pocketToGit(pocket)
141 version = b.getVersion(package, ref)
143 env['GIT_COMMITTER_NAME'] = config.git.tagger.name
144 env['GIT_COMMITTER_EMAIL'] = config.git.tagger.email
145 tag_msg = ('Tag %s of %s\n\n'
146 'Requested by %s' % (version.full_version,
151 ['git', 'tag', '-m', tag_msg, commit],
156 def updateSubmoduleBranch(pocket, package, ref):
157 """Update the appropriately named branch in the submodule."""
158 branch = b.pocketToGit(pocket)
160 ['git', 'update-ref', 'refs/heads/%s' % branch, ref])
163 def uploadBuild(pocket, workdir):
164 """Upload all build products in the work directory."""
165 apt = b.pocketToApt(pocket)
166 for changes in glob.glob(os.path.join(workdir, '*.changes')):
167 c.captureOutput(['reprepro-env',
169 '--ignore=wrongdistribution',
174 def updateSuperrepo(pocket, package, commit, principal):
175 """Update the superrepo.
177 This will create a new commit on the branch for the given pocket
178 that sets the commit for the package submodule to commit.
180 Note that there's no locking issue here, because we disallow all
181 pushes to the superrepo.
183 superrepo = os.path.join(b._REPO_DIR, 'packages.git')
184 branch = b.pocketToGit(pocket)
185 tree = c.captureOutput(['git', 'ls-tree', branch],
188 new_tree = re.compile(
189 r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub(
193 new_tree_id = c.captureOutput(['git', 'mktree'],
197 commit_msg = ('Update %s to version %s\n\n'
198 'Requested by %s' % (package,
199 version.full_version,
201 new_commit = c.captureOutput(
202 ['git', 'commit-tree', new_tree_hash, '-p', branch],
205 stdin_str=commit_msg)
208 ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit],
212 @contextlib.contextmanager
213 def packageWorkdir(package):
214 """Checkout the package in a temporary working directory.
216 This context manager returns that working directory. The requested
217 package is checked out into a subdirectory of the working
218 directory with the same name as the package.
220 When the context wrapped with this context manager is exited, the
221 working directory is automatically deleted.
223 workdir = tempfile.mkdtemp()
225 p_archive = subprocess.Popen(
227 '--remote=file://%s' % b.getRepo(package),
228 '--prefix=%s' % package,
231 stdout=subprocess.PIPE,
233 p_tar = subprocess.Popen(
235 stdin=p_archive.stdout,
243 shutil.rmtree(workdir)
246 def reportBuild(build):
247 """Run hooks to report the results of a build attempt."""
249 c.captureOutput(['run-parts',
250 '--arg=%s' % build.build_id,
256 """Deal with items in the build queue.
258 When triggered, iterate over build queue items one at a time,
259 until there are no more pending build jobs.
262 stage = 'processing incoming job'
263 queue = os.listdir(b._QUEUE_DIR)
268 job = open(os.path.join(b._QUEUE_DIR, build)).read().strip()
269 pocket, package, commit, principal = job.split()
271 database.session.begin()
272 db = database.Build()
276 db.principal = principal
277 database.session.save_or_update(db)
283 db.failed_stage = 'validating job'
284 src = validateBuild(pocket, package, commit)
286 db.version = str(b.getVersion(package, commit))
288 # If validateBuild returns something other than True, then
289 # it means we should copy from that pocket to our pocket.
291 # (If the validation failed, validateBuild would have
292 # raised an exception)
294 db.failed_stage = 'copying package from another pocket'
295 aptCopy(packages, pocket, src)
296 # If we can't copy the package from somewhere, but
297 # validateBuild didn't raise an exception, then we need to
298 # do the build ourselves
300 db.failed_stage = 'checking out package source'
301 with packageWorkdir(package) as workdir:
302 db.failed_stage = 'preparing source package'
303 packagedir = os.path.join(workdir, package)
305 # We should be more clever about dealing with
306 # things like non-Debian-native packages than we
309 # If we were, we could use debuild and get nice
310 # environment scrubbing. Since we're not, debuild
311 # complains about not having an orig.tar.gz
312 c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'],
317 db.failed_stage = 'building binary packages'
318 sbuildAll(package, commit, workdir)
320 logdir = os.path.join(b._LOG_DIR, db.build_id)
321 if not os.path.exists(logdir):
324 for log in glob.glob(os.path.join(workdir, '*.build')):
325 os.copy2(log, logdir)
326 db.failed_stage = 'tagging submodule'
327 tagSubmodule(pocket, package, commit, principal)
328 db.failed_stage = 'updating submodule branches'
329 updateSubmoduleBranch(pocket, package, commit)
330 db.failed_stage = 'updating superrepo'
331 updateSuperrepo(pocket, package, commit, principal)
332 db.failed_stage = 'uploading packages to apt repo'
333 uploadBuild(pocket, workdir)
335 db.failed_stage = 'cleaning up'
337 # Finally, now that everything is done, remove the
339 os.unlink(os.path.join(b._QUEUE_DIR, build))
341 db.traceback = traceback.format_exc()
344 db.failed_stage = None
346 database.session.save_or_update(db)
347 database.session.commit()
352 class Invirtibuilder(pyinotify.ProcessEvent):
353 """Process inotify triggers to build new packages."""
354 def process_IN_CREATE(self, event):
355 """Handle a created file or directory.
357 When an IN_CREATE event comes in, trigger the builder.
363 """Initialize the inotifications and start the main loop."""
366 watch_manager = pyinotify.WatchManager()
367 invirtibuilder = Invirtibuilder()
368 notifier = pyinotify.Notifier(watch_manager, invirtibuilder)
369 watch_manager.add_watch(b._QUEUE_DIR,
370 pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'])
372 # Before inotifying, run any pending builds; otherwise we won't
373 # get notified for them.
377 notifier.process_events()
378 if notifier.check_events():
379 notifier.read_events()
382 if __name__ == '__main__':