[2538] | 1 | #!/usr/bin/python |
---|
| 2 | |
---|
| 3 | """Process the Invirt build queue. |
---|
| 4 | |
---|
| 5 | The Invirtibuilder handles package builds and uploads. On demand, it |
---|
| 6 | attempts to build a particular package. |
---|
| 7 | |
---|
| 8 | If the build succeeds, the new version of the package is uploaded to |
---|
| 9 | the apt repository, tagged in its git repository, and the Invirt |
---|
[2801] | 10 | superproject is updated to point at the new version. |
---|
[2538] | 11 | |
---|
| 12 | If the build fails, the Invirtibuilder sends mail with the build log. |
---|
| 13 | |
---|
| 14 | The build queue is tracked via files in /var/lib/invirt-dev/queue. In |
---|
| 15 | order to maintain ordering, all filenames in that directory are the |
---|
| 16 | timestamp of their creation time. |
---|
| 17 | |
---|
| 18 | Each queue file contains a file of the form |
---|
| 19 | |
---|
| 20 | pocket package hash principal |
---|
| 21 | |
---|
| 22 | where pocket is one of the pockets globally configured in |
---|
[2593] | 23 | build.pockets. For instance, the pockets in XVM are "prod" and "dev". |
---|
[2538] | 24 | |
---|
| 25 | principal is the Kerberos principal that requested the build. |
---|
| 26 | """ |
---|
| 27 | |
---|
| 28 | |
---|
[2546] | 29 | from __future__ import with_statement |
---|
| 30 | |
---|
[2538] | 31 | import contextlib |
---|
[3028] | 32 | import glob |
---|
[2538] | 33 | import os |
---|
| 34 | import re |
---|
| 35 | import shutil |
---|
| 36 | import subprocess |
---|
[3028] | 37 | import tempfile |
---|
| 38 | import traceback |
---|
[2538] | 39 | |
---|
| 40 | import pyinotify |
---|
| 41 | |
---|
[3028] | 42 | from debian_bundle import deb822 |
---|
| 43 | |
---|
[2543] | 44 | import invirt.builder as b |
---|
[3028] | 45 | import invirt.common as c |
---|
[2538] | 46 | from invirt import database |
---|
[3028] | 47 | from invirt.config import structs as config |
---|
[2538] | 48 | |
---|
| 49 | |
---|
| 50 | DISTRIBUTION = 'hardy' |
---|
| 51 | |
---|
| 52 | |
---|
| 53 | def getControl(package, ref): |
---|
| 54 | """Get the parsed debian/control file for a given package. |
---|
| 55 | |
---|
| 56 | This returns a list of debian_bundle.deb822.Deb822 objects, one |
---|
| 57 | for each section of the debian/control file. Each Deb822 object |
---|
| 58 | acts roughly like a dict. |
---|
| 59 | """ |
---|
| 60 | return deb822.Deb822.iter_paragraphs( |
---|
[3028] | 61 | b.getGitFile(package, ref, 'debian/control').split('\n')) |
---|
[2538] | 62 | |
---|
| 63 | |
---|
| 64 | def getBinaries(package, ref): |
---|
| 65 | """Get a list of binary packages in a package at a given ref.""" |
---|
| 66 | return [p['Package'] for p in getControl(package, ref) |
---|
| 67 | if 'Package' in p] |
---|
| 68 | |
---|
| 69 | |
---|
| 70 | def getArches(package, ref): |
---|
| 71 | """Get the set of all architectures in any binary package.""" |
---|
| 72 | arches = set() |
---|
| 73 | for section in getControl(package, ref): |
---|
| 74 | if 'Architecture' in section: |
---|
| 75 | arches.update(section['Architecture'].split()) |
---|
| 76 | return arches |
---|
| 77 | |
---|
| 78 | |
---|
| 79 | def getDscName(package, ref): |
---|
| 80 | """Return the .dsc file that will be generated for this package.""" |
---|
[3028] | 81 | v = b.getVersion(package, ref) |
---|
[2544] | 82 | if v.debian_version: |
---|
| 83 | v_str = '%s-%s' % (v.upstream_version, |
---|
| 84 | v.debian_version) |
---|
| 85 | else: |
---|
| 86 | v_str = v.upstream_version |
---|
| 87 | return '%s_%s.dsc' % ( |
---|
[2538] | 88 | package, |
---|
[2544] | 89 | v_str) |
---|
[2538] | 90 | |
---|
| 91 | |
---|
| 92 | def sanitizeVersion(version): |
---|
| 93 | """Sanitize a Debian package version for use as a git tag. |
---|
| 94 | |
---|
| 95 | This function strips the epoch from the version number and |
---|
| 96 | replaces any tildes with periods.""" |
---|
[2544] | 97 | if v.debian_version: |
---|
| 98 | v = '%s-%s' % (version.upstream_version, |
---|
| 99 | version.debian_version) |
---|
| 100 | else: |
---|
| 101 | v = version.upstream_version |
---|
[2538] | 102 | return v.replace('~', '.') |
---|
| 103 | |
---|
| 104 | |
---|
[3028] | 105 | def aptCopy(package, commit, dst_pocket, src_pocket): |
---|
[2538] | 106 | """Copy a package from one pocket to another.""" |
---|
[2545] | 107 | binaries = getBinaries(package, commit) |
---|
[3028] | 108 | c.captureOutput(['reprepro-env', 'copy', |
---|
| 109 | b.pocketToApt(dst_pocket), |
---|
| 110 | b.pocketToApt(src_pocket), |
---|
| 111 | package] + binaries) |
---|
[2538] | 112 | |
---|
| 113 | |
---|
| 114 | def sbuild(package, ref, arch, workdir, arch_all=False): |
---|
| 115 | """Build a package for a particular architecture.""" |
---|
[3032] | 116 | args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch] |
---|
[2538] | 117 | if arch_all: |
---|
| 118 | args.append('-A') |
---|
| 119 | args.append(getDscName(package, ref)) |
---|
[3032] | 120 | c.captureOutput(args, cwd=workdir) |
---|
[2538] | 121 | |
---|
| 122 | |
---|
| 123 | def sbuildAll(package, ref, workdir): |
---|
| 124 | """Build a package for all architectures it supports.""" |
---|
| 125 | arches = getArches(package, ref) |
---|
| 126 | if 'all' in arches or 'any' in arches or 'amd64' in arches: |
---|
| 127 | sbuild(package, ref, 'amd64', workdir, arch_all=True) |
---|
| 128 | if 'any' in arches or 'i386' in arches: |
---|
| 129 | sbuild(package, ref, 'i386', workdir) |
---|
| 130 | |
---|
| 131 | |
---|
[3028] | 132 | def tagSubmodule(pocket, package, principal, version, env): |
---|
[2538] | 133 | """Tag a new version of a submodule. |
---|
| 134 | |
---|
| 135 | If this pocket does not allow_backtracking, then this will create |
---|
| 136 | a new tag of the version at ref. |
---|
| 137 | |
---|
| 138 | This function doesn't need to care about lock |
---|
| 139 | contention. git-receive-pack updates one ref at a time, and only |
---|
| 140 | takes out a lock for that ref after it's passed the update |
---|
| 141 | hook. Because we reject pushes to tags in the update hook, no push |
---|
| 142 | can ever take out a lock on any tags. |
---|
| 143 | |
---|
[2836] | 144 | I'm sure that long description gives you great confidence in the |
---|
[2538] | 145 | legitimacy of my reasoning. |
---|
| 146 | """ |
---|
[2837] | 147 | if not config.build.pockets[pocket].get('allow_backtracking', False): |
---|
[2543] | 148 | branch = b.pocketToGit(pocket) |
---|
[2538] | 149 | tag_msg = ('Tag %s of %s\n\n' |
---|
| 150 | 'Requested by %s' % (version.full_version, |
---|
| 151 | package, |
---|
| 152 | principal)) |
---|
| 153 | |
---|
[2543] | 154 | c.captureOutput( |
---|
[2538] | 155 | ['git', 'tag', '-m', tag_msg, commit], |
---|
| 156 | stdout=None, |
---|
| 157 | env=env) |
---|
| 158 | |
---|
| 159 | |
---|
| 160 | def updateSubmoduleBranch(pocket, package, ref): |
---|
| 161 | """Update the appropriately named branch in the submodule.""" |
---|
[2543] | 162 | branch = b.pocketToGit(pocket) |
---|
| 163 | c.captureOutput( |
---|
[3028] | 164 | ['git', 'update-ref', 'refs/heads/%s' % branch, ref], cwd=b.getRepo(package)) |
---|
[2538] | 165 | |
---|
| 166 | |
---|
| 167 | def uploadBuild(pocket, workdir): |
---|
| 168 | """Upload all build products in the work directory.""" |
---|
[2543] | 169 | apt = b.pocketToApt(pocket) |
---|
[2538] | 170 | for changes in glob.glob(os.path.join(workdir, '*.changes')): |
---|
[2543] | 171 | c.captureOutput(['reprepro-env', |
---|
[3028] | 172 | '--ignore=wrongdistribution', |
---|
[2538] | 173 | 'include', |
---|
| 174 | apt, |
---|
| 175 | changes]) |
---|
| 176 | |
---|
| 177 | |
---|
[3028] | 178 | def updateSuperproject(pocket, package, commit, principal, version, env): |
---|
[2801] | 179 | """Update the superproject. |
---|
[2538] | 180 | |
---|
| 181 | This will create a new commit on the branch for the given pocket |
---|
| 182 | that sets the commit for the package submodule to commit. |
---|
| 183 | |
---|
| 184 | Note that there's no locking issue here, because we disallow all |
---|
[2801] | 185 | pushes to the superproject. |
---|
[2538] | 186 | """ |
---|
[2801] | 187 | superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git') |
---|
[2543] | 188 | branch = b.pocketToGit(pocket) |
---|
| 189 | tree = c.captureOutput(['git', 'ls-tree', branch], |
---|
[3028] | 190 | cwd=superproject).strip() |
---|
[2538] | 191 | |
---|
| 192 | new_tree = re.compile( |
---|
| 193 | r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub( |
---|
[3028] | 194 | r'\g<1>%s\g<2>' % commit, |
---|
[2538] | 195 | tree) |
---|
| 196 | |
---|
[3028] | 197 | new_tree_id = c.captureOutput(['git', 'mktree', '--missing'], |
---|
| 198 | cwd=superproject, |
---|
| 199 | stdin_str=new_tree).strip() |
---|
[2538] | 200 | |
---|
| 201 | commit_msg = ('Update %s to version %s\n\n' |
---|
| 202 | 'Requested by %s' % (package, |
---|
| 203 | version.full_version, |
---|
| 204 | principal)) |
---|
[2543] | 205 | new_commit = c.captureOutput( |
---|
[3028] | 206 | ['git', 'commit-tree', new_tree_id, '-p', branch], |
---|
[2801] | 207 | cwd=superproject, |
---|
[2538] | 208 | env=env, |
---|
[3028] | 209 | stdin_str=commit_msg).strip() |
---|
[2538] | 210 | |
---|
[2543] | 211 | c.captureOutput( |
---|
[2538] | 212 | ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit], |
---|
[2801] | 213 | cwd=superproject) |
---|
[2538] | 214 | |
---|
| 215 | |
---|
[3028] | 216 | def makeReadable(workdir): |
---|
| 217 | os.chmod(workdir, 0755) |
---|
| 218 | |
---|
[2538] | 219 | @contextlib.contextmanager |
---|
[2835] | 220 | def packageWorkdir(package, commit): |
---|
[2538] | 221 | """Checkout the package in a temporary working directory. |
---|
| 222 | |
---|
| 223 | This context manager returns that working directory. The requested |
---|
| 224 | package is checked out into a subdirectory of the working |
---|
| 225 | directory with the same name as the package. |
---|
| 226 | |
---|
| 227 | When the context wrapped with this context manager is exited, the |
---|
| 228 | working directory is automatically deleted. |
---|
| 229 | """ |
---|
| 230 | workdir = tempfile.mkdtemp() |
---|
| 231 | try: |
---|
| 232 | p_archive = subprocess.Popen( |
---|
| 233 | ['git', 'archive', |
---|
[2543] | 234 | '--remote=file://%s' % b.getRepo(package), |
---|
[3028] | 235 | '--prefix=%s/' % package, |
---|
[2538] | 236 | commit, |
---|
| 237 | ], |
---|
| 238 | stdout=subprocess.PIPE, |
---|
| 239 | ) |
---|
| 240 | p_tar = subprocess.Popen( |
---|
| 241 | ['tar', '-x'], |
---|
| 242 | stdin=p_archive.stdout, |
---|
| 243 | cwd=workdir, |
---|
| 244 | ) |
---|
| 245 | p_archive.wait() |
---|
| 246 | p_tar.wait() |
---|
| 247 | |
---|
| 248 | yield workdir |
---|
| 249 | finally: |
---|
| 250 | shutil.rmtree(workdir) |
---|
| 251 | |
---|
| 252 | |
---|
| 253 | def reportBuild(build): |
---|
| 254 | """Run hooks to report the results of a build attempt.""" |
---|
| 255 | |
---|
[2543] | 256 | c.captureOutput(['run-parts', |
---|
[2538] | 257 | '--arg=%s' % build.build_id, |
---|
| 258 | '--', |
---|
[2543] | 259 | b._HOOKS_DIR]) |
---|
[2538] | 260 | |
---|
| 261 | |
---|
| 262 | def build(): |
---|
| 263 | """Deal with items in the build queue. |
---|
| 264 | |
---|
| 265 | When triggered, iterate over build queue items one at a time, |
---|
| 266 | until there are no more pending build jobs. |
---|
| 267 | """ |
---|
| 268 | while True: |
---|
| 269 | stage = 'processing incoming job' |
---|
[2543] | 270 | queue = os.listdir(b._QUEUE_DIR) |
---|
[2538] | 271 | if not queue: |
---|
| 272 | break |
---|
| 273 | |
---|
| 274 | build = min(queue) |
---|
[2543] | 275 | job = open(os.path.join(b._QUEUE_DIR, build)).read().strip() |
---|
[2538] | 276 | pocket, package, commit, principal = job.split() |
---|
| 277 | |
---|
| 278 | database.session.begin() |
---|
| 279 | db = database.Build() |
---|
| 280 | db.package = package |
---|
| 281 | db.pocket = pocket |
---|
| 282 | db.commit = commit |
---|
| 283 | db.principal = principal |
---|
| 284 | database.session.save_or_update(db) |
---|
[3028] | 285 | database.session.commit() |
---|
[2538] | 286 | |
---|
[3028] | 287 | database.session.begin() |
---|
[2538] | 288 | |
---|
| 289 | try: |
---|
| 290 | db.failed_stage = 'validating job' |
---|
[3028] | 291 | # Don't expand the commit in the DB until we're sure the user |
---|
| 292 | # isn't trying to be tricky. |
---|
[3035] | 293 | b.ensureValidPackage(package) |
---|
| 294 | db.commit = commit = b.canonicalize_commit(package, commit) |
---|
| 295 | src = b.validateBuild(pocket, package, commit) |
---|
[2538] | 296 | |
---|
[2543] | 297 | db.version = str(b.getVersion(package, commit)) |
---|
[2538] | 298 | |
---|
| 299 | # If validateBuild returns something other than True, then |
---|
| 300 | # it means we should copy from that pocket to our pocket. |
---|
| 301 | # |
---|
| 302 | # (If the validation failed, validateBuild would have |
---|
| 303 | # raised an exception) |
---|
| 304 | if src != True: |
---|
| 305 | db.failed_stage = 'copying package from another pocket' |
---|
[3028] | 306 | aptCopy(package, commit, pocket, src) |
---|
[2538] | 307 | # If we can't copy the package from somewhere, but |
---|
| 308 | # validateBuild didn't raise an exception, then we need to |
---|
| 309 | # do the build ourselves |
---|
| 310 | else: |
---|
| 311 | db.failed_stage = 'checking out package source' |
---|
[2835] | 312 | with packageWorkdir(package, commit) as workdir: |
---|
[2538] | 313 | db.failed_stage = 'preparing source package' |
---|
| 314 | packagedir = os.path.join(workdir, package) |
---|
| 315 | |
---|
| 316 | # We should be more clever about dealing with |
---|
| 317 | # things like non-Debian-native packages than we |
---|
| 318 | # are. |
---|
| 319 | # |
---|
| 320 | # If we were, we could use debuild and get nice |
---|
| 321 | # environment scrubbing. Since we're not, debuild |
---|
| 322 | # complains about not having an orig.tar.gz |
---|
[2543] | 323 | c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'], |
---|
[2538] | 324 | cwd=packagedir, |
---|
| 325 | stdout=None) |
---|
| 326 | |
---|
| 327 | try: |
---|
| 328 | db.failed_stage = 'building binary packages' |
---|
| 329 | sbuildAll(package, commit, workdir) |
---|
| 330 | finally: |
---|
[3028] | 331 | logdir = os.path.join(b._LOG_DIR, str(db.build_id)) |
---|
[2538] | 332 | if not os.path.exists(logdir): |
---|
| 333 | os.makedirs(logdir) |
---|
| 334 | |
---|
[3032] | 335 | for log in glob.glob(os.path.join(workdir, 'build-*.log')): |
---|
| 336 | os.copy(log, logdir) |
---|
[3028] | 337 | |
---|
| 338 | db.failed_stage = 'processing metadata' |
---|
| 339 | env = dict(os.environ) |
---|
| 340 | env['GIT_COMMITTER_NAME'] = config.build.tagger.name |
---|
| 341 | env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email |
---|
| 342 | version = b.getVersion(package, commit) |
---|
| 343 | |
---|
[2538] | 344 | db.failed_stage = 'tagging submodule' |
---|
[3028] | 345 | tagSubmodule(pocket, package, principal, version, env) |
---|
[2538] | 346 | db.failed_stage = 'updating submodule branches' |
---|
| 347 | updateSubmoduleBranch(pocket, package, commit) |
---|
[2801] | 348 | db.failed_stage = 'updating superproject' |
---|
[3028] | 349 | updateSuperproject(pocket, package, commit, principal, version, env) |
---|
| 350 | db.failed_stage = 'relaxing permissions on workdir' |
---|
| 351 | makeReadable(workdir) |
---|
[2538] | 352 | db.failed_stage = 'uploading packages to apt repo' |
---|
| 353 | uploadBuild(pocket, workdir) |
---|
| 354 | |
---|
| 355 | db.failed_stage = 'cleaning up' |
---|
| 356 | except: |
---|
| 357 | db.traceback = traceback.format_exc() |
---|
| 358 | else: |
---|
| 359 | db.succeeded = True |
---|
| 360 | db.failed_stage = None |
---|
| 361 | finally: |
---|
| 362 | database.session.save_or_update(db) |
---|
| 363 | database.session.commit() |
---|
| 364 | |
---|
[2838] | 365 | # Finally, now that everything is done, remove the |
---|
| 366 | # build queue item |
---|
| 367 | os.unlink(os.path.join(b._QUEUE_DIR, build)) |
---|
| 368 | |
---|
[2538] | 369 | reportBuild(db) |
---|
| 370 | |
---|
| 371 | |
---|
| 372 | class Invirtibuilder(pyinotify.ProcessEvent): |
---|
| 373 | """Process inotify triggers to build new packages.""" |
---|
[2820] | 374 | def process_default(self, event): |
---|
| 375 | """Handle an inotify event. |
---|
[2538] | 376 | |
---|
[2820] | 377 | When an inotify event comes in, trigger the builder. |
---|
[2538] | 378 | """ |
---|
| 379 | build() |
---|
| 380 | |
---|
| 381 | |
---|
| 382 | def main(): |
---|
| 383 | """Initialize the inotifications and start the main loop.""" |
---|
| 384 | database.connect() |
---|
| 385 | |
---|
| 386 | watch_manager = pyinotify.WatchManager() |
---|
| 387 | invirtibuilder = Invirtibuilder() |
---|
| 388 | notifier = pyinotify.Notifier(watch_manager, invirtibuilder) |
---|
[2543] | 389 | watch_manager.add_watch(b._QUEUE_DIR, |
---|
[2820] | 390 | pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] | |
---|
| 391 | pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO']) |
---|
[2538] | 392 | |
---|
| 393 | # Before inotifying, run any pending builds; otherwise we won't |
---|
| 394 | # get notified for them. |
---|
| 395 | build() |
---|
| 396 | |
---|
| 397 | while True: |
---|
| 398 | notifier.process_events() |
---|
| 399 | if notifier.check_events(): |
---|
| 400 | notifier.read_events() |
---|
| 401 | |
---|
| 402 | |
---|
| 403 | if __name__ == '__main__': |
---|
| 404 | main() |
---|