1 | #!/usr/bin/python |
---|
2 | |
---|
3 | """Process the Invirt build queue. |
---|
4 | |
---|
5 | The Invirtibuilder handles package builds and uploads. On demand, it |
---|
6 | attempts to build a particular package. |
---|
7 | |
---|
8 | If the build succeeds, the new version of the package is uploaded to |
---|
9 | the apt repository, tagged in its git repository, and the Invirt |
---|
10 | superrepo is updated to point at the new version. |
---|
11 | |
---|
12 | If the build fails, the Invirtibuilder sends mail with the build log. |
---|
13 | |
---|
14 | The build queue is tracked via files in /var/lib/invirt-dev/queue. In |
---|
15 | order to maintain ordering, all filenames in that directory are the |
---|
16 | timestamp of their creation time. |
---|
17 | |
---|
18 | Each queue file contains a file of the form |
---|
19 | |
---|
20 | pocket package hash principal |
---|
21 | |
---|
22 | where pocket is one of the pockets globally configured in |
---|
23 | git.pockets. For instance, the pockets in XVM are "prod" and "dev". |
---|
24 | |
---|
25 | principal is the Kerberos principal that requested the build. |
---|
26 | """ |
---|
27 | |
---|
28 | |
---|
29 | import contextlib |
---|
30 | import os |
---|
31 | import re |
---|
32 | import shutil |
---|
33 | import subprocess |
---|
34 | |
---|
35 | import pyinotify |
---|
36 | |
---|
37 | import invirt.builder as b |
---|
38 | from invirt import database |
---|
39 | |
---|
40 | |
---|
41 | DISTRIBUTION = 'hardy' |
---|
42 | |
---|
43 | |
---|
44 | def getControl(package, ref): |
---|
45 | """Get the parsed debian/control file for a given package. |
---|
46 | |
---|
47 | This returns a list of debian_bundle.deb822.Deb822 objects, one |
---|
48 | for each section of the debian/control file. Each Deb822 object |
---|
49 | acts roughly like a dict. |
---|
50 | """ |
---|
51 | return deb822.Deb822.iter_paragraphs( |
---|
52 | getGitFile(package, ref, 'debian/control').split('\n')) |
---|
53 | |
---|
54 | |
---|
55 | def getBinaries(package, ref): |
---|
56 | """Get a list of binary packages in a package at a given ref.""" |
---|
57 | return [p['Package'] for p in getControl(package, ref) |
---|
58 | if 'Package' in p] |
---|
59 | |
---|
60 | |
---|
61 | def getArches(package, ref): |
---|
62 | """Get the set of all architectures in any binary package.""" |
---|
63 | arches = set() |
---|
64 | for section in getControl(package, ref): |
---|
65 | if 'Architecture' in section: |
---|
66 | arches.update(section['Architecture'].split()) |
---|
67 | return arches |
---|
68 | |
---|
69 | |
---|
70 | def getDscName(package, ref): |
---|
71 | """Return the .dsc file that will be generated for this package.""" |
---|
72 | v = getVersion(package, ref) |
---|
73 | if v.debian_version: |
---|
74 | v_str = '%s-%s' % (v.upstream_version, |
---|
75 | v.debian_version) |
---|
76 | else: |
---|
77 | v_str = v.upstream_version |
---|
78 | return '%s_%s.dsc' % ( |
---|
79 | package, |
---|
80 | v_str) |
---|
81 | |
---|
82 | |
---|
83 | def sanitizeVersion(version): |
---|
84 | """Sanitize a Debian package version for use as a git tag. |
---|
85 | |
---|
86 | This function strips the epoch from the version number and |
---|
87 | replaces any tildes with periods.""" |
---|
88 | if v.debian_version: |
---|
89 | v = '%s-%s' % (version.upstream_version, |
---|
90 | version.debian_version) |
---|
91 | else: |
---|
92 | v = version.upstream_version |
---|
93 | return v.replace('~', '.') |
---|
94 | |
---|
95 | |
---|
96 | def aptCopy(packages, dst_pocket, src_pocket): |
---|
97 | """Copy a package from one pocket to another.""" |
---|
98 | binaries = [] |
---|
99 | for line in b.getGitFile(package, commit, 'debian/control').split('\n'): |
---|
100 | m = re.match('Package: (.*)$') |
---|
101 | if m: |
---|
102 | binaries.append(m.group(1)) |
---|
103 | |
---|
104 | cpatureOutput(['reprepro-env', 'copy', |
---|
105 | b.pocketToApt(dst_pocket), |
---|
106 | b.pocketToApt(src_pocket), |
---|
107 | package] + binaries) |
---|
108 | |
---|
109 | |
---|
110 | def sbuild(package, ref, arch, workdir, arch_all=False): |
---|
111 | """Build a package for a particular architecture.""" |
---|
112 | args = ['sbuild', '-d', DISTRIBUTION, '--arch', arch] |
---|
113 | if arch_all: |
---|
114 | args.append('-A') |
---|
115 | args.append(getDscName(package, ref)) |
---|
116 | c.captureOutput(args, cwd=workdir, stdout=None) |
---|
117 | |
---|
118 | |
---|
119 | def sbuildAll(package, ref, workdir): |
---|
120 | """Build a package for all architectures it supports.""" |
---|
121 | arches = getArches(package, ref) |
---|
122 | if 'all' in arches or 'any' in arches or 'amd64' in arches: |
---|
123 | sbuild(package, ref, 'amd64', workdir, arch_all=True) |
---|
124 | if 'any' in arches or 'i386' in arches: |
---|
125 | sbuild(package, ref, 'i386', workdir) |
---|
126 | |
---|
127 | |
---|
128 | def tagSubmodule(pocket, package, ref, principal): |
---|
129 | """Tag a new version of a submodule. |
---|
130 | |
---|
131 | If this pocket does not allow_backtracking, then this will create |
---|
132 | a new tag of the version at ref. |
---|
133 | |
---|
134 | This function doesn't need to care about lock |
---|
135 | contention. git-receive-pack updates one ref at a time, and only |
---|
136 | takes out a lock for that ref after it's passed the update |
---|
137 | hook. Because we reject pushes to tags in the update hook, no push |
---|
138 | can ever take out a lock on any tags. |
---|
139 | |
---|
140 | I'm sure that long description gives you great confidence in teh |
---|
141 | legitimacy of my reasoning. |
---|
142 | """ |
---|
143 | if config.git.pockets[pocket].get('allow_backtracking', False): |
---|
144 | env = dict(os.environ) |
---|
145 | branch = b.pocketToGit(pocket) |
---|
146 | version = b.getVersion(package, ref) |
---|
147 | |
---|
148 | env['GIT_COMMITTER_NAME'] = config.git.tagger.name |
---|
149 | env['GIT_COMMITTER_EMAIL'] = config.git.tagger.email |
---|
150 | tag_msg = ('Tag %s of %s\n\n' |
---|
151 | 'Requested by %s' % (version.full_version, |
---|
152 | package, |
---|
153 | principal)) |
---|
154 | |
---|
155 | c.captureOutput( |
---|
156 | ['git', 'tag', '-m', tag_msg, commit], |
---|
157 | stdout=None, |
---|
158 | env=env) |
---|
159 | |
---|
160 | |
---|
161 | def updateSubmoduleBranch(pocket, package, ref): |
---|
162 | """Update the appropriately named branch in the submodule.""" |
---|
163 | branch = b.pocketToGit(pocket) |
---|
164 | c.captureOutput( |
---|
165 | ['git', 'update-ref', 'refs/heads/%s' % branch, ref]) |
---|
166 | |
---|
167 | |
---|
168 | def uploadBuild(pocket, workdir): |
---|
169 | """Upload all build products in the work directory.""" |
---|
170 | apt = b.pocketToApt(pocket) |
---|
171 | for changes in glob.glob(os.path.join(workdir, '*.changes')): |
---|
172 | c.captureOutput(['reprepro-env', |
---|
173 | 'include', |
---|
174 | '--ignore=wrongdistribution', |
---|
175 | apt, |
---|
176 | changes]) |
---|
177 | |
---|
178 | |
---|
179 | def updateSuperrepo(pocket, package, commit, principal): |
---|
180 | """Update the superrepo. |
---|
181 | |
---|
182 | This will create a new commit on the branch for the given pocket |
---|
183 | that sets the commit for the package submodule to commit. |
---|
184 | |
---|
185 | Note that there's no locking issue here, because we disallow all |
---|
186 | pushes to the superrepo. |
---|
187 | """ |
---|
188 | superrepo = os.path.join(b._REPO_DIR, 'packages.git') |
---|
189 | branch = b.pocketToGit(pocket) |
---|
190 | tree = c.captureOutput(['git', 'ls-tree', branch], |
---|
191 | cwd=superrepo) |
---|
192 | |
---|
193 | new_tree = re.compile( |
---|
194 | r'^(160000 commit )[0-9a-f]*(\t%s)$' % package, re.M).sub( |
---|
195 | r'\1%s\2' % commit, |
---|
196 | tree) |
---|
197 | |
---|
198 | new_tree_id = c.captureOutput(['git', 'mktree'], |
---|
199 | cwd=superrepo, |
---|
200 | stdin_str=new_tree) |
---|
201 | |
---|
202 | commit_msg = ('Update %s to version %s\n\n' |
---|
203 | 'Requested by %s' % (package, |
---|
204 | version.full_version, |
---|
205 | principal)) |
---|
206 | new_commit = c.captureOutput( |
---|
207 | ['git', 'commit-tree', new_tree_hash, '-p', branch], |
---|
208 | cwd=superrepo, |
---|
209 | env=env, |
---|
210 | stdin_str=commit_msg) |
---|
211 | |
---|
212 | c.captureOutput( |
---|
213 | ['git', 'update-ref', 'refs/heads/%s' % branch, new_commit], |
---|
214 | cwd=superrepo) |
---|
215 | |
---|
216 | |
---|
217 | @contextlib.contextmanager |
---|
218 | def packageWorkdir(package): |
---|
219 | """Checkout the package in a temporary working directory. |
---|
220 | |
---|
221 | This context manager returns that working directory. The requested |
---|
222 | package is checked out into a subdirectory of the working |
---|
223 | directory with the same name as the package. |
---|
224 | |
---|
225 | When the context wrapped with this context manager is exited, the |
---|
226 | working directory is automatically deleted. |
---|
227 | """ |
---|
228 | workdir = tempfile.mkdtemp() |
---|
229 | try: |
---|
230 | p_archive = subprocess.Popen( |
---|
231 | ['git', 'archive', |
---|
232 | '--remote=file://%s' % b.getRepo(package), |
---|
233 | '--prefix=%s' % package, |
---|
234 | commit, |
---|
235 | ], |
---|
236 | stdout=subprocess.PIPE, |
---|
237 | ) |
---|
238 | p_tar = subprocess.Popen( |
---|
239 | ['tar', '-x'], |
---|
240 | stdin=p_archive.stdout, |
---|
241 | cwd=workdir, |
---|
242 | ) |
---|
243 | p_archive.wait() |
---|
244 | p_tar.wait() |
---|
245 | |
---|
246 | yield workdir |
---|
247 | finally: |
---|
248 | shutil.rmtree(workdir) |
---|
249 | |
---|
250 | |
---|
251 | def reportBuild(build): |
---|
252 | """Run hooks to report the results of a build attempt.""" |
---|
253 | |
---|
254 | c.captureOutput(['run-parts', |
---|
255 | '--arg=%s' % build.build_id, |
---|
256 | '--', |
---|
257 | b._HOOKS_DIR]) |
---|
258 | |
---|
259 | |
---|
260 | def build(): |
---|
261 | """Deal with items in the build queue. |
---|
262 | |
---|
263 | When triggered, iterate over build queue items one at a time, |
---|
264 | until there are no more pending build jobs. |
---|
265 | """ |
---|
266 | while True: |
---|
267 | stage = 'processing incoming job' |
---|
268 | queue = os.listdir(b._QUEUE_DIR) |
---|
269 | if not queue: |
---|
270 | break |
---|
271 | |
---|
272 | build = min(queue) |
---|
273 | job = open(os.path.join(b._QUEUE_DIR, build)).read().strip() |
---|
274 | pocket, package, commit, principal = job.split() |
---|
275 | |
---|
276 | database.session.begin() |
---|
277 | db = database.Build() |
---|
278 | db.package = package |
---|
279 | db.pocket = pocket |
---|
280 | db.commit = commit |
---|
281 | db.principal = principal |
---|
282 | database.session.save_or_update(db) |
---|
283 | database.commit() |
---|
284 | |
---|
285 | database.begin() |
---|
286 | |
---|
287 | try: |
---|
288 | db.failed_stage = 'validating job' |
---|
289 | src = validateBuild(pocket, package, commit) |
---|
290 | |
---|
291 | db.version = str(b.getVersion(package, commit)) |
---|
292 | |
---|
293 | # If validateBuild returns something other than True, then |
---|
294 | # it means we should copy from that pocket to our pocket. |
---|
295 | # |
---|
296 | # (If the validation failed, validateBuild would have |
---|
297 | # raised an exception) |
---|
298 | if src != True: |
---|
299 | db.failed_stage = 'copying package from another pocket' |
---|
300 | aptCopy(packages, pocket, src) |
---|
301 | # If we can't copy the package from somewhere, but |
---|
302 | # validateBuild didn't raise an exception, then we need to |
---|
303 | # do the build ourselves |
---|
304 | else: |
---|
305 | db.failed_stage = 'checking out package source' |
---|
306 | with packageWorkdir(package) as workdir: |
---|
307 | db.failed_stage = 'preparing source package' |
---|
308 | packagedir = os.path.join(workdir, package) |
---|
309 | |
---|
310 | # We should be more clever about dealing with |
---|
311 | # things like non-Debian-native packages than we |
---|
312 | # are. |
---|
313 | # |
---|
314 | # If we were, we could use debuild and get nice |
---|
315 | # environment scrubbing. Since we're not, debuild |
---|
316 | # complains about not having an orig.tar.gz |
---|
317 | c.captureOutput(['dpkg-buildpackage', '-us', '-uc', '-S'], |
---|
318 | cwd=packagedir, |
---|
319 | stdout=None) |
---|
320 | |
---|
321 | try: |
---|
322 | db.failed_stage = 'building binary packages' |
---|
323 | sbuildAll(package, commit, workdir) |
---|
324 | finally: |
---|
325 | logdir = os.path.join(b._LOG_DIR, db.build_id) |
---|
326 | if not os.path.exists(logdir): |
---|
327 | os.makedirs(logdir) |
---|
328 | |
---|
329 | for log in glob.glob(os.path.join(workdir, '*.build')): |
---|
330 | os.copy2(log, logdir) |
---|
331 | db.failed_stage = 'tagging submodule' |
---|
332 | tagSubmodule(pocket, package, commit, principal) |
---|
333 | db.failed_stage = 'updating submodule branches' |
---|
334 | updateSubmoduleBranch(pocket, package, commit) |
---|
335 | db.failed_stage = 'updating superrepo' |
---|
336 | updateSuperrepo(pocket, package, commit, principal) |
---|
337 | db.failed_stage = 'uploading packages to apt repo' |
---|
338 | uploadBuild(pocket, workdir) |
---|
339 | |
---|
340 | db.failed_stage = 'cleaning up' |
---|
341 | |
---|
342 | # Finally, now that everything is done, remove the |
---|
343 | # build queue item |
---|
344 | os.unlink(os.path.join(b._QUEUE_DIR, build)) |
---|
345 | except: |
---|
346 | db.traceback = traceback.format_exc() |
---|
347 | else: |
---|
348 | db.succeeded = True |
---|
349 | db.failed_stage = None |
---|
350 | finally: |
---|
351 | database.session.save_or_update(db) |
---|
352 | database.session.commit() |
---|
353 | |
---|
354 | reportBuild(db) |
---|
355 | |
---|
356 | |
---|
357 | class Invirtibuilder(pyinotify.ProcessEvent): |
---|
358 | """Process inotify triggers to build new packages.""" |
---|
359 | def process_IN_CREATE(self, event): |
---|
360 | """Handle a created file or directory. |
---|
361 | |
---|
362 | When an IN_CREATE event comes in, trigger the builder. |
---|
363 | """ |
---|
364 | build() |
---|
365 | |
---|
366 | |
---|
367 | def main(): |
---|
368 | """Initialize the inotifications and start the main loop.""" |
---|
369 | database.connect() |
---|
370 | |
---|
371 | watch_manager = pyinotify.WatchManager() |
---|
372 | invirtibuilder = Invirtibuilder() |
---|
373 | notifier = pyinotify.Notifier(watch_manager, invirtibuilder) |
---|
374 | watch_manager.add_watch(b._QUEUE_DIR, |
---|
375 | pyinotify.EventsCodes.ALL_FLAGS['IN_CREATE']) |
---|
376 | |
---|
377 | # Before inotifying, run any pending builds; otherwise we won't |
---|
378 | # get notified for them. |
---|
379 | build() |
---|
380 | |
---|
381 | while True: |
---|
382 | notifier.process_events() |
---|
383 | if notifier.check_events(): |
---|
384 | notifier.read_events() |
---|
385 | |
---|
386 | |
---|
387 | if __name__ == '__main__': |
---|
388 | main() |
---|