[2538] | 1 | #!/usr/bin/python |
---|
| 2 | |
---|
| 3 | """Process the Invirt build queue. |
---|
| 4 | |
---|
| 5 | The Invirtibuilder handles package builds and uploads. On demand, it |
---|
| 6 | attempts to build a particular package. |
---|
| 7 | |
---|
| 8 | If the build succeeds, the new version of the package is uploaded to |
---|
| 9 | the apt repository, tagged in its git repository, and the Invirt |
---|
[2801] | 10 | superproject is updated to point at the new version. |
---|
[2538] | 11 | |
---|
| 12 | If the build fails, the Invirtibuilder sends mail with the build log. |
---|
| 13 | |
---|
| 14 | The build queue is tracked via files in /var/lib/invirt-dev/queue. In |
---|
| 15 | order to maintain ordering, all filenames in that directory are the |
---|
| 16 | timestamp of their creation time. |
---|
| 17 | |
---|
| 18 | Each queue file contains a file of the form |
---|
| 19 | |
---|
| 20 | pocket package hash principal |
---|
| 21 | |
---|
| 22 | where pocket is one of the pockets globally configured in |
---|
[2593] | 23 | build.pockets. For instance, the pockets in XVM are "prod" and "dev". |
---|
[2538] | 24 | |
---|
| 25 | principal is the Kerberos principal that requested the build. |
---|
| 26 | """ |
---|
| 27 | |
---|
| 28 | |
---|
[2546] | 29 | from __future__ import with_statement |
---|
| 30 | |
---|
[2538] | 31 | import contextlib |
---|
[3028] | 32 | import glob |
---|
[2538] | 33 | import os |
---|
| 34 | import re |
---|
| 35 | import shutil |
---|
| 36 | import subprocess |
---|
[3028] | 37 | import tempfile |
---|
| 38 | import traceback |
---|
[2538] | 39 | |
---|
| 40 | import pyinotify |
---|
| 41 | |
---|
[3028] | 42 | from debian_bundle import deb822 |
---|
| 43 | |
---|
[2543] | 44 | import invirt.builder as b |
---|
[3028] | 45 | import invirt.common as c |
---|
[2538] | 46 | from invirt import database |
---|
[3028] | 47 | from invirt.config import structs as config |
---|
[2538] | 48 | |
---|
| 49 | |
---|
| 50 | DISTRIBUTION = 'hardy' |
---|
| 51 | |
---|
| 52 | |
---|
| 53 | def getControl(package, ref): |
---|
| 54 | """Get the parsed debian/control file for a given package. |
---|
| 55 | |
---|
| 56 | This returns a list of debian_bundle.deb822.Deb822 objects, one |
---|
| 57 | for each section of the debian/control file. Each Deb822 object |
---|
| 58 | acts roughly like a dict. |
---|
| 59 | """ |
---|
| 60 | return deb822.Deb822.iter_paragraphs( |
---|
[3028] | 61 | b.getGitFile(package, ref, 'debian/control').split('\n')) |
---|
[2538] | 62 | |
---|
| 63 | |
---|
| 64 | def getBinaries(package, ref): |
---|
| 65 | """Get a list of binary packages in a package at a given ref.""" |
---|
| 66 | return [p['Package'] for p in getControl(package, ref) |
---|
| 67 | if 'Package' in p] |
---|
| 68 | |
---|
| 69 | |
---|
| 70 | def getArches(package, ref): |
---|
| 71 | """Get the set of all architectures in any binary package.""" |
---|
| 72 | arches = set() |
---|
| 73 | for section in getControl(package, ref): |
---|
| 74 | if 'Architecture' in section: |
---|
| 75 | arches.update(section['Architecture'].split()) |
---|
| 76 | return arches |
---|
| 77 | |
---|
| 78 | |
---|
| 79 | def getDscName(package, ref): |
---|
| 80 | """Return the .dsc file that will be generated for this package.""" |
---|
[3028] | 81 | v = b.getVersion(package, ref) |
---|
[2544] | 82 | if v.debian_version: |
---|
| 83 | v_str = '%s-%s' % (v.upstream_version, |
---|
| 84 | v.debian_version) |
---|
| 85 | else: |
---|
| 86 | v_str = v.upstream_version |
---|
| 87 | return '%s_%s.dsc' % ( |
---|
[2538] | 88 | package, |
---|
[2544] | 89 | v_str) |
---|
[2538] | 90 | |
---|
| 91 | |
---|
| 92 | def sanitizeVersion(version): |
---|
| 93 | """Sanitize a Debian package version for use as a git tag. |
---|
| 94 | |
---|
| 95 | This function strips the epoch from the version number and |
---|
| 96 | replaces any tildes with periods.""" |
---|
[2544] | 97 | if v.debian_version: |
---|
| 98 | v = '%s-%s' % (version.upstream_version, |
---|
| 99 | version.debian_version) |
---|
| 100 | else: |
---|
| 101 | v = version.upstream_version |
---|
[2538] | 102 | return v.replace('~', '.') |
---|
| 103 | |
---|
| 104 | |
---|
[3028] | 105 | def aptCopy(package, commit, dst_pocket, src_pocket): |
---|
[2538] | 106 | """Copy a package from one pocket to another.""" |
---|
[2545] | 107 | binaries = getBinaries(package, commit) |
---|
[3028] | 108 | c.captureOutput(['reprepro-env', 'copy', |
---|
| 109 | b.pocketToApt(dst_pocket), |
---|
| 110 | b.pocketToApt(src_pocket), |
---|
| 111 | package] + binaries) |
---|
[2538] | 112 | |
---|
| 113 | |
---|
| 114 | def sbuild(package, ref, arch, workdir, arch_all=False): |
---|
| 115 | """Build a package for a particular architecture.""" |
---|
[3032] | 116 | args = ['sbuild', '-v', '-d', DISTRIBUTION, '--arch', arch] |
---|
[2538] | 117 | if arch_all: |
---|
| 118 | args.append('-A') |
---|
| 119 | args.append(getDscName(package, ref)) |
---|
[3032] | 120 | c.captureOutput(args, cwd=workdir) |
---|
[2538] | 121 | |
---|
| 122 | |
---|
| 123 | def sbuildAll(package, ref, workdir): |
---|
| 124 | """Build a package for all architectures it supports.""" |
---|
| 125 | arches = getArches(package, ref) |
---|
| 126 | if 'all' in arches or 'any' in arches or 'amd64' in arches: |
---|
| 127 | sbuild(package, ref, 'amd64', workdir, arch_all=True) |
---|
| 128 | if 'any' in arches or 'i386' in arches: |
---|
| 129 | sbuild(package, ref, 'i386', workdir) |
---|
| 130 | |
---|
| 131 | |
---|
[3028] | 132 | def tagSubmodule(pocket, package, principal, version, env): |
---|
[2538] | 133 | """Tag a new version of a submodule. |
---|
| 134 | |
---|
| 135 | If this pocket does not allow_backtracking, then this will create |
---|
| 136 | a new tag of the version at ref. |
---|
| 137 | |
---|
| 138 | This function doesn't need to care about lock |
---|
| 139 | contention. git-receive-pack updates one ref at a time, and only |
---|
| 140 | takes out a lock for that ref after it's passed the update |
---|
| 141 | hook. Because we reject pushes to tags in the update hook, no push |
---|
| 142 | can ever take out a lock on any tags. |
---|
| 143 | |
---|
[2836] | 144 | I'm sure that long description gives you great confidence in the |
---|
[2538] | 145 | legitimacy of my reasoning. |
---|
| 146 | """ |
---|
[2837] | 147 | if not config.build.pockets[pocket].get('allow_backtracking', False): |
---|
[2543] | 148 | branch = b.pocketToGit(pocket) |
---|
[2538] | 149 | tag_msg = ('Tag %s of %s\n\n' |
---|
| 150 | 'Requested by %s' % (version.full_version, |
---|
| 151 | package, |
---|
| 152 | principal)) |
---|
| 153 | |
---|
[2543] | 154 | c.captureOutput( |
---|
[2538] | 155 | ['git', 'tag', '-m', tag_msg, commit], |
---|
| 156 | stdout=None, |
---|
| 157 | env=env) |
---|
| 158 | |
---|
| 159 | |
---|
| 160 | def updateSubmoduleBranch(pocket, package, ref): |
---|
| 161 | """Update the appropriately named branch in the submodule.""" |
---|
[2543] | 162 | branch = b.pocketToGit(pocket) |
---|
| 163 | c.captureOutput( |
---|
[3028] | 164 | ['git', 'update-ref', 'refs/heads/%s' % branch, ref], cwd=b.getRepo(package)) |
---|
[2538] | 165 | |
---|
| 166 | |
---|
| 167 | def uploadBuild(pocket, workdir): |
---|
| 168 | """Upload all build products in the work directory.""" |
---|
[2543] | 169 | apt = b.pocketToApt(pocket) |
---|
[2538] | 170 | for changes in glob.glob(os.path.join(workdir, '*.changes')): |
---|
[2543] | 171 | c.captureOutput(['reprepro-env', |
---|
[3028] | 172 | '--ignore=wrongdistribution', |
---|
[2538] | 173 | 'include', |
---|
| 174 | apt, |
---|
| 175 | changes]) |
---|
| 176 | |
---|
| 177 | |
---|
[3028] | 178 | def updateSuperproject(pocket, package, commit, principal, version, env): |
---|
[2801] | 179 | """Update the superproject. |
---|
[2538] | 180 | |
---|
| 181 | This will create a new commit on the branch for the given pocket |
---|
| 182 | that sets the commit for the package submodule to commit. |
---|
| 183 | |
---|
| 184 | Note that there's no locking issue here, because we disallow all |
---|
[2801] | 185 | pushes to the superproject. |
---|
[2538] | 186 | """ |
---|
[2801] | 187 | superproject = os.path.join(b._REPO_DIR, 'invirt/packages.git') |
---|
[2543] | 188 | branch = b.pocketToGit(pocket) |
---|
| 189 | tree = c.captureOutput(['git', 'ls-tree', branch], |
---|
[3028] | 190 | cwd=superproject).strip() |
---|
[2538] | 191 | |
---|
| 192 | new_tree = re.compile( |
---|
| 193 | r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub( |
---|
[3028] | 194 | r'\g<1>%s\g<2>' % commit, |
---|
[2538] | 195 | tree) |
---|
| 196 | |
---|
[3028] | 197 | new_tree_id = c.captureOutput(['git', 'mktree', '--missing'], |
---|
| 198 | cwd=superproject, |
---|
| 199 | stdin_str=new_tree).strip() |
---|
[2538] | 200 | |
---|
| 201 | commit_msg = ('Update %s to version %s\n\n' |
---|
| 202 | 'Requested by %s' % (package, |
---|
| 203 | version.full_version, |
---|
| 204 | principal)) |
---|
[2543] | 205 | new_commit = c.captureOutput( |
---|
[3028] | 206 | ['git', 'commit-tree', new_tree_id, '-p', branch], |
---|
[2801] | 207 | cwd=superproject, |
---|
[2538] | 208 | env=env, |
---|
[3028] | 209 | stdin_str=commit_msg).strip() |
---|
[2538] | 210 | |
---|
[2543] | 211 | c.captureOutput( |
---|
[2538] | 212 | ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit], |
---|
[2801] | 213 | cwd=superproject) |
---|
[2538] | 214 | |
---|
| 215 | |
---|
[3028] | 216 | def makeReadable(workdir): |
---|
| 217 | os.chmod(workdir, 0755) |
---|
| 218 | |
---|
[2538] | 219 | @contextlib.contextmanager |
---|
[2835] | 220 | def packageWorkdir(package, commit): |
---|
[2538] | 221 | """Checkout the package in a temporary working directory. |
---|
| 222 | |
---|
| 223 | This context manager returns that working directory. The requested |
---|
| 224 | package is checked out into a subdirectory of the working |
---|
| 225 | directory with the same name as the package. |
---|
| 226 | |
---|
| 227 | When the context wrapped with this context manager is exited, the |
---|
| 228 | working directory is automatically deleted. |
---|
| 229 | """ |
---|
| 230 | workdir = tempfile.mkdtemp() |
---|
| 231 | try: |
---|
| 232 | p_archive = subprocess.Popen( |
---|
| 233 | ['git', 'archive', |
---|
[2543] | 234 | '--remote=file://%s' % b.getRepo(package), |
---|
[3028] | 235 | '--prefix=%s/' % package, |
---|
[2538] | 236 | commit, |
---|
| 237 | ], |
---|
| 238 | stdout=subprocess.PIPE, |
---|
| 239 | ) |
---|
| 240 | p_tar = subprocess.Popen( |
---|
| 241 | ['tar', '-x'], |
---|
| 242 | stdin=p_archive.stdout, |
---|
| 243 | cwd=workdir, |
---|
| 244 | ) |
---|
| 245 | p_archive.wait() |
---|
| 246 | p_tar.wait() |
---|
| 247 | |
---|
| 248 | yield workdir |
---|
| 249 | finally: |
---|
| 250 | shutil.rmtree(workdir) |
---|
| 251 | |
---|
| 252 | def build(): |
---|
| 253 | """Deal with items in the build queue. |
---|
| 254 | |
---|
| 255 | When triggered, iterate over build queue items one at a time, |
---|
| 256 | until there are no more pending build jobs. |
---|
| 257 | """ |
---|
| 258 | while True: |
---|
| 259 | stage = 'processing incoming job' |
---|
[2543] | 260 | queue = os.listdir(b._QUEUE_DIR) |
---|
[2538] | 261 | if not queue: |
---|
| 262 | break |
---|
| 263 | |
---|
| 264 | build = min(queue) |
---|
[2543] | 265 | job = open(os.path.join(b._QUEUE_DIR, build)).read().strip() |
---|
[2538] | 266 | pocket, package, commit, principal = job.split() |
---|
| 267 | |
---|
| 268 | database.session.begin() |
---|
| 269 | db = database.Build() |
---|
| 270 | db.package = package |
---|
| 271 | db.pocket = pocket |
---|
| 272 | db.commit = commit |
---|
| 273 | db.principal = principal |
---|
| 274 | database.session.save_or_update(db) |
---|
[3028] | 275 | database.session.commit() |
---|
[2538] | 276 | |
---|
[3028] | 277 | database.session.begin() |
---|
[2538] | 278 | |
---|
| 279 | try: |
---|
| 280 | db.failed_stage = 'validating job' |
---|
[3028] | 281 | # Don't expand the commit in the DB until we're sure the user |
---|
| 282 | # isn't trying to be tricky. |
---|
[3035] | 283 | b.ensureValidPackage(package) |
---|
| 284 | db.commit = commit = b.canonicalize_commit(package, commit) |
---|
| 285 | src = b.validateBuild(pocket, package, commit) |
---|
[2538] | 286 | |
---|
[2543] | 287 | db.version = str(b.getVersion(package, commit)) |
---|
[3036] | 288 | b.runHook('pre-build', [str(db.build_id), db.pocket, db.package, |
---|
| 289 | db.commit, db.principal, db.version, str(db.inserted_at)]) |
---|
[2538] | 290 | |
---|
| 291 | # If validateBuild returns something other than True, then |
---|
| 292 | # it means we should copy from that pocket to our pocket. |
---|
| 293 | # |
---|
| 294 | # (If the validation failed, validateBuild would have |
---|
| 295 | # raised an exception) |
---|
| 296 | if src != True: |
---|
| 297 | db.failed_stage = 'copying package from another pocket' |
---|
[3028] | 298 | aptCopy(package, commit, pocket, src) |
---|
[2538] | 299 | # If we can't copy the package from somewhere, but |
---|
| 300 | # validateBuild didn't raise an exception, then we need to |
---|
| 301 | # do the build ourselves |
---|
| 302 | else: |
---|
| 303 | db.failed_stage = 'checking out package source' |
---|
[2835] | 304 | with packageWorkdir(package, commit) as workdir: |
---|
[2538] | 305 | db.failed_stage = 'preparing source package' |
---|
| 306 | packagedir = os.path.join(workdir, package) |
---|
| 307 | |
---|
| 308 | # We should be more clever about dealing with |
---|
| 309 | # things like non-Debian-native packages than we |
---|
| 310 | # are. |
---|
| 311 | # |
---|
| 312 | # If we were, we could use debuild and get nice |
---|
| 313 | # environment scrubbing. Since we're not, debuild |
---|
| 314 | # complains about not having an orig.tar.gz |
---|
[2543] | 315 | c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'], |
---|
[2538] | 316 | cwd=packagedir, |
---|
| 317 | stdout=None) |
---|
| 318 | |
---|
| 319 | try: |
---|
| 320 | db.failed_stage = 'building binary packages' |
---|
| 321 | sbuildAll(package, commit, workdir) |
---|
| 322 | finally: |
---|
[3028] | 323 | logdir = os.path.join(b._LOG_DIR, str(db.build_id)) |
---|
[2538] | 324 | if not os.path.exists(logdir): |
---|
| 325 | os.makedirs(logdir) |
---|
| 326 | |
---|
[3032] | 327 | for log in glob.glob(os.path.join(workdir, 'build-*.log')): |
---|
| 328 | os.copy(log, logdir) |
---|
[3028] | 329 | |
---|
| 330 | db.failed_stage = 'processing metadata' |
---|
| 331 | env = dict(os.environ) |
---|
| 332 | env['GIT_COMMITTER_NAME'] = config.build.tagger.name |
---|
| 333 | env['GIT_COMMITTER_EMAIL'] = config.build.tagger.email |
---|
| 334 | version = b.getVersion(package, commit) |
---|
| 335 | |
---|
[2538] | 336 | db.failed_stage = 'tagging submodule' |
---|
[3028] | 337 | tagSubmodule(pocket, package, principal, version, env) |
---|
[2538] | 338 | db.failed_stage = 'updating submodule branches' |
---|
| 339 | updateSubmoduleBranch(pocket, package, commit) |
---|
[2801] | 340 | db.failed_stage = 'updating superproject' |
---|
[3028] | 341 | updateSuperproject(pocket, package, commit, principal, version, env) |
---|
| 342 | db.failed_stage = 'relaxing permissions on workdir' |
---|
| 343 | makeReadable(workdir) |
---|
[2538] | 344 | db.failed_stage = 'uploading packages to apt repo' |
---|
| 345 | uploadBuild(pocket, workdir) |
---|
| 346 | |
---|
| 347 | db.failed_stage = 'cleaning up' |
---|
| 348 | except: |
---|
| 349 | db.traceback = traceback.format_exc() |
---|
| 350 | else: |
---|
| 351 | db.succeeded = True |
---|
| 352 | db.failed_stage = None |
---|
| 353 | finally: |
---|
| 354 | database.session.save_or_update(db) |
---|
| 355 | database.session.commit() |
---|
| 356 | |
---|
[2838] | 357 | # Finally, now that everything is done, remove the |
---|
| 358 | # build queue item |
---|
| 359 | os.unlink(os.path.join(b._QUEUE_DIR, build)) |
---|
| 360 | |
---|
[3036] | 361 | if db.succeeded: |
---|
| 362 | b.runHook('post-build', [str(db.build_id)]) |
---|
| 363 | else: |
---|
| 364 | b.runHook('failed-build', [str(db.build_id)]) |
---|
[2538] | 365 | |
---|
| 366 | class Invirtibuilder(pyinotify.ProcessEvent): |
---|
| 367 | """Process inotify triggers to build new packages.""" |
---|
[2820] | 368 | def process_default(self, event): |
---|
| 369 | """Handle an inotify event. |
---|
[2538] | 370 | |
---|
[2820] | 371 | When an inotify event comes in, trigger the builder. |
---|
[2538] | 372 | """ |
---|
| 373 | build() |
---|
| 374 | |
---|
| 375 | |
---|
| 376 | def main(): |
---|
| 377 | """Initialize the inotifications and start the main loop.""" |
---|
| 378 | database.connect() |
---|
| 379 | |
---|
| 380 | watch_manager = pyinotify.WatchManager() |
---|
| 381 | invirtibuilder = Invirtibuilder() |
---|
| 382 | notifier = pyinotify.Notifier(watch_manager, invirtibuilder) |
---|
[2543] | 383 | watch_manager.add_watch(b._QUEUE_DIR, |
---|
[2820] | 384 | pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE'] | |
---|
| 385 | pyinotify.EventsCodes.ALL_FLAGS['IN_MOVED_TO']) |
---|
[2538] | 386 | |
---|
| 387 | # Before inotifying, run any pending builds; otherwise we won't |
---|
| 388 | # get notified for them. |
---|
| 389 | build() |
---|
| 390 | |
---|
| 391 | while True: |
---|
| 392 | notifier.process_events() |
---|
| 393 | if notifier.check_events(): |
---|
| 394 | notifier.read_events() |
---|
| 395 | |
---|
| 396 | |
---|
| 397 | if __name__ == '__main__': |
---|
| 398 | main() |
---|