1 | #!/usr/bin/env python2
|
2 | from __future__ import print_function
|
3 | """
|
4 | wild_report.py
|
5 | """
|
6 |
|
7 | import json
|
8 | import optparse
|
9 | import os
|
10 | import sys
|
11 |
|
12 | from vendor import jsontemplate
|
13 |
|
14 | # JSON Template Evaluation:
|
15 | #
|
16 | # - {.if}{.or} is confusing
|
17 | # I think there is even a bug with {.if}{.else}{.end} -- it accepts it but
|
18 | # doesn't do the right thing!
|
19 | # - {.if test} does work though, but it took me awhile to remember that or
|
20 | # - I forgot about {.link?} too
|
21 | # even find it in the source code. I don't like this separate predicate
|
22 | # language. Could just be PHP-ish I guess.
|
23 | # - Predicates are a little annoying.
|
24 | # - Lack of location information on undefined variables is annoying. It spews
|
25 | # a big stack trace.
|
26 | # - The styles thing seems awkward. Copied from srcbook.
|
27 | # - I don't have {total_secs|%.3f} , but the
|
28 | # LookupChain/DictRegistry/CallableRegistry thing is quite onerous.
|
29 | #
|
30 | # Good parts:
|
31 | # Just making one big dict is pretty nice.
|
32 |
|
33 | T = jsontemplate.Template
|
34 |
|
35 | def F(format_str):
|
36 | # {x|commas}
|
37 | if format_str == 'commas':
|
38 | return lambda n: '{:,}'.format(n)
|
39 |
|
40 | # {x|printf %.1f}
|
41 | if format_str.startswith('printf '):
|
42 | fmt = format_str[len('printf '):]
|
43 | return lambda value: fmt % value
|
44 |
|
45 | #'urlesc': urllib.quote_plus,
|
46 | return None
|
47 |
|
48 |
|
49 | def MakeHtmlGroup(title_str, body_str):
|
50 | """Make a group of templates that we can expand with a common style."""
|
51 | return {
|
52 | 'TITLE': T(title_str, default_formatter='html', more_formatters=F),
|
53 | 'BODY': T(body_str, default_formatter='html', more_formatters=F),
|
54 | 'NAV': NAV_TEMPLATE,
|
55 | }
|
56 |
|
57 | BODY_STYLE = jsontemplate.Template("""\
|
58 | <!DOCTYPE html>
|
59 | <html>
|
60 | <head>
|
61 | <meta name="viewport" content="width=device-width, initial-scale=1">
|
62 | <title>{.template TITLE}</title>
|
63 |
|
64 | <script type="text/javascript" src="{base_url}../../web/ajax.js"></script>
|
65 | <script type="text/javascript" src="{base_url}../../web/table/table-sort.js"></script>
|
66 | <link rel="stylesheet" type="text/css" href="{base_url}../../web/base.css" />
|
67 | <link rel="stylesheet" type="text/css" href="{base_url}../../web/table/table-sort.css" />
|
68 | <link rel="stylesheet" type="text/css" href="{base_url}../../web/wild.css" />
|
69 | </head>
|
70 |
|
71 | <body onload="initPage(gUrlHash, gTables, gTableStates, kStatusElem);"
|
72 | onhashchange="onHashChange(gUrlHash, gTableStates, kStatusElem);"
|
73 | class="width60">
|
74 | <p id="status"></p>
|
75 |
|
76 | <p style="text-align: right"><a href="/">oilshell.org</a></p>
|
77 | <p>
|
78 | {.template NAV}
|
79 | </p>
|
80 |
|
81 | {.template BODY}
|
82 | </body>
|
83 |
|
84 | </html>
|
85 | """, default_formatter='html')
|
86 |
|
87 | # NOTE: {.link} {.or id?} {.or} {.end} doesn't work? That is annoying.
|
88 | NAV_TEMPLATE = jsontemplate.Template("""\
|
89 | {.section nav}
|
90 | <span id="nav">
|
91 | {.repeated section @}
|
92 | {.link?}
|
93 | <a href="{link|htmltag}">{anchor}</a>
|
94 | {.or}
|
95 | {anchor}
|
96 | {.end}
|
97 | {.alternates with}
|
98 | /
|
99 | {.end}
|
100 | </span>
|
101 | {.end}
|
102 | """, default_formatter='html')
|
103 |
|
104 |
|
105 | PAGE_TEMPLATES = {}
|
106 |
|
107 | # <a href="{base_url}osh-to-oil.html#{rel_path|htmltag}/{name|htmltag}">view</a>
|
108 | PAGE_TEMPLATES['FAILED'] = MakeHtmlGroup(
|
109 | '{task}_failed',
|
110 | """\
|
111 | <h1>{failures|size} {task} failures</h1>
|
112 |
|
113 | {.repeated section failures}
|
114 | <a href="{base_url}osh-to-oil.html#{rel_path|htmltag}">{rel_path|html}</a>
|
115 | <pre>
|
116 | {stderr}
|
117 | </pre>
|
118 | {.end}
|
119 | """)
|
120 |
|
121 | # One is used for sort order. One is used for alignment.
|
122 | # type="string"
|
123 | # should we use the column css class as the sort order? Why not?
|
124 |
|
125 | # NOTES on columns:
|
126 | # - The col is used to COLOR the column when it's being sorted by
|
127 | # - But it can't be use to align text right. See
|
128 | # https://stackoverflow.com/questions/1238115/using-text-align-center-in-colgroup
|
129 | # - type="number" is used in table-sort.js for the sort order.
|
130 | # - We use CSS classes on individual cells like <td class="name"> to align
|
131 | # columns. That seems to be the only way to do it?
|
132 |
|
133 | PAGE_TEMPLATES['LISTING'] = MakeHtmlGroup(
|
134 | 'WILD/{rel_path} - Parsing and Translating Shell Scripts with Oil',
|
135 | """\
|
136 |
|
137 | {.section subtree_stats}
|
138 | <div id="summary">
|
139 | <ul>
|
140 | {.parse_failed?}
|
141 | <li>
|
142 | Attempted to parse <b>{num_files|commas}</b> shell scripts totalling
|
143 | <b>{num_lines|commas}</b> lines.
|
144 | </li>
|
145 | {.not_shell?}
|
146 | <li>
|
147 | <b>{not_shell|commas}</b> files are known not to be shell.
|
148 | {.if test top_level_links}
|
149 | (<a href="not-shell.html">full list</a>)
|
150 | {.end}
|
151 | </li>
|
152 | {.end}
|
153 | {.not_osh?}
|
154 | <li>
|
155 | <b>{not_osh|commas}</b> files are known not to be OSH.
|
156 | {.if test top_level_links}
|
157 | (<a href="not-osh.html">full list</a>)
|
158 | {.end}
|
159 | </li>
|
160 | {.end}
|
161 | <li>
|
162 | Failed to parse <b>{parse_failed|commas}</b> scripts, leaving
|
163 | <b>{lines_parsed|commas}</b> lines parsed in <b>{parse_proc_secs|printf %.1f}</b>
|
164 | seconds (<b>{lines_per_sec|printf %.1f}</b> lines/sec).
|
165 | {.if test top_level_links}
|
166 | (<a href="parse-failed.html">all failures</a>,
|
167 | <a href="parse-failed.txt">text</a>)
|
168 | {.end}
|
169 | </li>
|
170 | {.or}
|
171 | <li>
|
172 | Successfully parsed <b>{num_files|commas}</b> shell scripts totalling
|
173 | <b>{num_lines|commas}</b> lines
|
174 | in <b>{parse_proc_secs|printf %.1f}</b> seconds
|
175 | (<b>{lines_per_sec|printf %.1f}</b> lines/sec).
|
176 | </li>
|
177 | {.end}
|
178 |
|
179 | <li>
|
180 | <b>{osh2oil_failed|commas}</b> OSH-to-Oil translations failed.
|
181 | {.if test top_level_links}
|
182 | (<a href="osh2oil-failed.html">all failures</a>,
|
183 | <a href="osh2oil-failed.txt">text</a>)
|
184 | {.end}
|
185 | </li>
|
186 | </ul>
|
187 | </div>
|
188 |
|
189 | <p></p>
|
190 | {.end}
|
191 |
|
192 |
|
193 | {.section dirs}
|
194 | <table id="dirs">
|
195 | <colgroup> <!-- for table-sort.js -->
|
196 | <col type="number">
|
197 | <col type="number">
|
198 | <col type="number">
|
199 | <col type="number">
|
200 | <col type="number">
|
201 | <col type="number">
|
202 | <col type="number">
|
203 | <col type="case-insensitive">
|
204 | </colgroup>
|
205 | <thead>
|
206 | <tr>
|
207 | <td>Files</td>
|
208 | <td>Max Lines</td>
|
209 | <td>Total Lines</td>
|
210 | <!-- <td>Lines Parsed</td> -->
|
211 | <td>Parse Failures</td>
|
212 | <td>Max Parse Time (secs)</td>
|
213 | <td>Total Parse Time (secs)</td>
|
214 | <td>Translation Failures</td>
|
215 | <td class="name">Directory</td>
|
216 | </tr>
|
217 | </thead>
|
218 | <tbody>
|
219 | {.repeated section @}
|
220 | <tr>
|
221 | <td>{num_files|commas}</td>
|
222 | <td>{max_lines|commas}</td>
|
223 | <td>{num_lines|commas}</td>
|
224 | <!-- <td>{lines_parsed|commas}</td> -->
|
225 | {.parse_failed?}
|
226 | <td class="fail">{parse_failed|commas}</td>
|
227 | {.or}
|
228 | <td class="ok">{parse_failed|commas}</td>
|
229 | {.end}
|
230 | <td>{max_parse_secs|printf %.2f}</td>
|
231 | <td>{parse_proc_secs|printf %.2f}</td>
|
232 |
|
233 | {.osh2oil_failed?}
|
234 | <!-- <td class="fail">{osh2oil_failed|commas}</td> -->
|
235 | <td>{osh2oil_failed|commas}</td>
|
236 | {.or}
|
237 | <!-- <td class="ok">{osh2oil_failed|commas}</td> -->
|
238 | <td>{osh2oil_failed|commas}</td>
|
239 | {.end}
|
240 |
|
241 | <td class="name">
|
242 | <a href="{name|htmltag}/index.html">{name|html}/</a>
|
243 | </td>
|
244 | </tr>
|
245 | {.end}
|
246 | </tbody>
|
247 | </table>
|
248 | {.end}
|
249 |
|
250 | <p>
|
251 | </p>
|
252 |
|
253 | {.section files}
|
254 | <table id="files">
|
255 | <colgroup> <!-- for table-sort.js -->
|
256 | <col type="case-insensitive">
|
257 | <col type="number">
|
258 | <col type="case-insensitive">
|
259 | <col type="number">
|
260 | <col type="case-insensitive">
|
261 | <col type="case-insensitive">
|
262 | </colgroup>
|
263 | <thead>
|
264 | <tr>
|
265 | <td>Side By Side</td>
|
266 | <td>Lines</td>
|
267 | <td>Parsed?</td>
|
268 | <td>Parse Process Time (secs)</td>
|
269 | <td>Translated?</td>
|
270 | <td class="name">Filename</td>
|
271 | </tr>
|
272 | </thead>
|
273 | <tbody>
|
274 | {.repeated section @}
|
275 | <tr>
|
276 | <td>
|
277 | <a href="{base_url}osh-to-oil.html#{rel_path|htmltag}/{name|htmltag}">view</a>
|
278 | </td>
|
279 | <td>{num_lines|commas}</td>
|
280 | <td>
|
281 | {.parse_failed?}
|
282 | <a class="fail" href="#stderr_parse_{name}">FAIL</a>
|
283 | <td>{parse_proc_secs}</td>
|
284 | {.or}
|
285 | <span class="ok">OK</a>
|
286 | <td>{parse_proc_secs}</td>
|
287 | {.end}
|
288 | </td>
|
289 |
|
290 | <td>
|
291 | {.osh2oil_failed?}
|
292 | <a class="fail" href="#stderr_osh2oil_{name}">FAIL</a>
|
293 | {.or}
|
294 | <a class="ok" href="{name}__ysh.txt">OK</a>
|
295 | {.end}
|
296 | </td>
|
297 | <td class="name">
|
298 | <a href="{name|htmltag}.txt">{name|html}</a>
|
299 | </td>
|
300 | </tr>
|
301 | {.end}
|
302 | </tbody>
|
303 | </table>
|
304 | {.end}
|
305 |
|
306 | {.if test empty}
|
307 | <i>(empty dir)</i>
|
308 | {.end}
|
309 |
|
310 | {.section stderr}
|
311 | <h2>stderr</h2>
|
312 |
|
313 | <table id="stderr">
|
314 |
|
315 | {.repeated section @}
|
316 | <tr>
|
317 | <td>
|
318 | <a name="stderr_{action}_{name|htmltag}"></a>
|
319 | {.if test parsing}
|
320 | Parsing {name|html}
|
321 | {.or}
|
322 | Translating {name|html}
|
323 | {.end}
|
324 | </td>
|
325 | <td>
|
326 | <pre>
|
327 | {contents|html}
|
328 | </pre>
|
329 | </td>
|
330 | <tr/>
|
331 | {.end}
|
332 |
|
333 | </table>
|
334 | {.end}
|
335 |
|
336 | {.if test top_level_links}
|
337 | <a href="version-info.txt">Date and OSH version<a>
|
338 | {.end}
|
339 |
|
340 | <!-- page globals -->
|
341 | <script type="text/javascript">
|
342 | var gUrlHash = new UrlHash(location.hash);
|
343 | var gTableStates = {};
|
344 | var kStatusElem = document.getElementById('status');
|
345 |
|
346 | var gTables = [];
|
347 | var e1 = document.getElementById('dirs');
|
348 | var e2 = document.getElementById('files');
|
349 |
|
350 | // If no hash, "redirect" to a state where we sort ascending by dir name and
|
351 | // filename. TODO: These column numbers are a bit fragile.
|
352 | var params = [];
|
353 | if (e1) {
|
354 | gTables.push(e1);
|
355 | params.push('t:dirs=8a');
|
356 | }
|
357 | if (e2) {
|
358 | gTables.push(e2);
|
359 | params.push('t:files=7a');
|
360 | }
|
361 |
|
362 | function initPage(urlHash, gTables, tableStates, statusElem) {
|
363 | makeTablesSortable(urlHash, gTables, tableStates);
|
364 | /* Disable for now, this seems odd? Think about mutability of gUrlHash.
|
365 | if (location.hash === '') {
|
366 | document.location = '#' + params.join('&');
|
367 | gUrlHash = new UrlHash(location.hash);
|
368 | }
|
369 | */
|
370 | updateTables(urlHash, tableStates, statusElem);
|
371 | }
|
372 |
|
373 | function onHashChange(urlHash, tableStates, statusElem) {
|
374 | updateTables(urlHash, tableStates, statusElem);
|
375 | }
|
376 | </script>
|
377 | """)
|
378 |
|
379 |
|
380 | def log(msg, *args):
|
381 | if msg:
|
382 | msg = msg % args
|
383 | print(msg, file=sys.stderr)
|
384 |
|
385 |
|
386 | class DirNode:
|
387 | """Entry in the file system tree."""
|
388 |
|
389 | def __init__(self):
|
390 | self.files = {} # filename -> stats for success/failure, time, etc.
|
391 | self.dirs = {} # subdir name -> DirNode object
|
392 |
|
393 | self.subtree_stats = {} # name -> value
|
394 |
|
395 | # show all the non-empty stderr here?
|
396 | # __osh2oil.stderr.txt
|
397 | # __parse.stderr.txt
|
398 | self.stderr = []
|
399 |
|
400 |
|
401 | def UpdateNodes(node, path_parts, file_stats):
|
402 | """
|
403 | Create a file node and update the stats of all its descendants in the FS
|
404 | tree.
|
405 | """
|
406 | first = path_parts[0]
|
407 | rest = path_parts[1:]
|
408 |
|
409 | for name, value in file_stats.iteritems():
|
410 | # Sum numerical properties, but not strings
|
411 | if isinstance(value, int) or isinstance(value, float):
|
412 | if name in node.subtree_stats:
|
413 | node.subtree_stats[name] += value
|
414 | else:
|
415 | # NOTE: Could be int or float!!!
|
416 | node.subtree_stats[name] = value
|
417 |
|
418 | # Calculate maximums
|
419 | m = node.subtree_stats.get('max_parse_secs', 0.0)
|
420 | node.subtree_stats['max_parse_secs'] = max(m, file_stats['parse_proc_secs'])
|
421 |
|
422 | m = node.subtree_stats.get('max_lines', 0) # integer
|
423 | node.subtree_stats['max_lines'] = max(m, file_stats['num_lines'])
|
424 |
|
425 | if rest: # update an intermediate node
|
426 | if first in node.dirs:
|
427 | child = node.dirs[first]
|
428 | else:
|
429 | child = DirNode()
|
430 | node.dirs[first] = child
|
431 |
|
432 | UpdateNodes(child, rest, file_stats)
|
433 | else:
|
434 | # TODO: Put these in different sections? Or least one below the other?
|
435 |
|
436 | # Include stderr if non-empty, or if FAILED
|
437 | parse_stderr = file_stats.pop('parse_stderr')
|
438 | if parse_stderr or file_stats['parse_failed']:
|
439 | node.stderr.append({
|
440 | 'parsing': True,
|
441 | 'action': 'parse',
|
442 | 'name': first,
|
443 | 'contents': parse_stderr,
|
444 | })
|
445 | osh2oil_stderr = file_stats.pop('osh2oil_stderr')
|
446 |
|
447 | # TODO: Could disable this with a flag to concentrate on parse errors.
|
448 | # Or just show parse errors all in one file.
|
449 | if 1:
|
450 | if osh2oil_stderr or file_stats['osh2oil_failed']:
|
451 | node.stderr.append({
|
452 | 'parsing': False,
|
453 | 'action': 'osh2oil',
|
454 | 'name': first,
|
455 | 'contents': osh2oil_stderr,
|
456 | })
|
457 |
|
458 | # Attach to this dir
|
459 | node.files[first] = file_stats
|
460 |
|
461 |
|
462 | def DebugPrint(node, indent=0):
|
463 | """Debug print."""
|
464 | ind = indent * ' '
|
465 | #print('FILES', node.files.keys())
|
466 | for name in node.files:
|
467 | print('%s%s - %s' % (ind, name, node.files[name]))
|
468 | for name, child in node.dirs.iteritems():
|
469 | print('%s%s/ - %s' % (ind, name, child.subtree_stats))
|
470 | DebugPrint(child, indent=indent+1)
|
471 |
|
472 |
|
473 | def WriteJsonFiles(node, out_dir):
|
474 | """Write a index.json file for every directory."""
|
475 | path = os.path.join(out_dir, 'index.json')
|
476 | with open(path, 'w') as f:
|
477 | raise AssertionError # fix dir_totals
|
478 | d = {'files': node.files, 'dirs': node.dir_totals}
|
479 | json.dump(d, f)
|
480 |
|
481 | log('Wrote %s', path)
|
482 |
|
483 | for name, child in node.dirs.iteritems():
|
484 | WriteJsonFiles(child, os.path.join(out_dir, name))
|
485 |
|
486 |
|
487 | def MakeNav(rel_path, root_name='WILD', offset=0):
|
488 | """
|
489 | Args:
|
490 | offset: for doctools/src_tree.py to render files
|
491 | """
|
492 | assert not rel_path.startswith('/'), rel_path
|
493 | assert not rel_path.endswith('/'), rel_path
|
494 | # Get rid of ['']
|
495 | parts = [root_name] + [p for p in rel_path.split('/') if p]
|
496 | data = []
|
497 | n = len(parts)
|
498 | for i, p in enumerate(parts):
|
499 | if i == n - 1:
|
500 | link = None # Current page shouldn't have link
|
501 | else:
|
502 | # files need to link to .
|
503 | link = '../' * (n - 1 - i + offset) + 'index.html'
|
504 | data.append({'anchor': p, 'link': link})
|
505 | return data
|
506 |
|
507 |
|
508 | def _Lower(s):
|
509 | return s.lower()
|
510 |
|
511 |
|
512 | def WriteHtmlFiles(node, out_dir, rel_path='', base_url=''):
|
513 | """Write a index.html file for every directory.
|
514 |
|
515 | NOTE:
|
516 | - osh-to-oil.html lives at $base_url
|
517 | - table-sort.js lives at $base_url/../table-sort.js
|
518 |
|
519 | wild/
|
520 | table-sort.js
|
521 | table-sort.css
|
522 | www/
|
523 | index.html
|
524 | osh-to-oil.html
|
525 |
|
526 | wild/
|
527 | table-sort.js
|
528 | table-sort.css
|
529 | wild.wwz/ # Zip file
|
530 | index.html
|
531 | osh-to-oil.html
|
532 |
|
533 | wwz latency is subject to caching headers.
|
534 | """
|
535 | files = []
|
536 | for name in sorted(node.files, key=_Lower):
|
537 | stats = node.files[name]
|
538 | entry = dict(stats)
|
539 | entry['name'] = name
|
540 | # TODO: This should be internal time
|
541 | entry['lines_per_sec'] = entry['lines_parsed'] / entry['parse_proc_secs']
|
542 | files.append(entry)
|
543 |
|
544 | dirs = []
|
545 | for name in sorted(node.dirs, key=_Lower):
|
546 | entry = dict(node.dirs[name].subtree_stats)
|
547 | entry['name'] = name
|
548 | # TODO: This should be internal time
|
549 | entry['lines_per_sec'] = entry['lines_parsed'] / entry['parse_proc_secs']
|
550 | dirs.append(entry)
|
551 |
|
552 | # TODO: Is there a way to make this less redundant?
|
553 | st = node.subtree_stats
|
554 | try:
|
555 | st['lines_per_sec'] = st['lines_parsed'] / st['parse_proc_secs']
|
556 | except KeyError:
|
557 | # This usually there were ZERO files.
|
558 | print(node, st, repr(rel_path), file=sys.stderr)
|
559 | raise
|
560 |
|
561 | data = {
|
562 | 'rel_path': rel_path,
|
563 | 'subtree_stats': node.subtree_stats, # redundant totals
|
564 | 'files': files,
|
565 | 'dirs': dirs,
|
566 | 'base_url': base_url,
|
567 | 'stderr': node.stderr,
|
568 | 'nav': MakeNav(rel_path),
|
569 | }
|
570 | # Hack to add links for top level page:
|
571 | if rel_path == '':
|
572 | data['top_level_links'] = True
|
573 |
|
574 | group = PAGE_TEMPLATES['LISTING']
|
575 | body = BODY_STYLE.expand(data, group=group)
|
576 |
|
577 | path = os.path.join(out_dir, 'index.html')
|
578 | with open(path, 'w') as f:
|
579 | f.write(body)
|
580 |
|
581 | log('Wrote %s', path)
|
582 |
|
583 | # Recursive
|
584 | for name, child in node.dirs.iteritems():
|
585 | child_out = os.path.join(out_dir, name)
|
586 | child_rel = os.path.join(rel_path, name)
|
587 | child_base = base_url + '../'
|
588 | WriteHtmlFiles(child, child_out, rel_path=child_rel, base_url=child_base)
|
589 |
|
590 |
|
591 | def _ReadTaskFile(path):
|
592 | """
|
593 | Parses the a file that looks like '0 0.11', for the status code and timing.
|
594 | This is output by test/common.sh run-task-with-status.
|
595 | """
|
596 | try:
|
597 | with open(path) as f:
|
598 | parts = f.read().split()
|
599 | status, secs = parts
|
600 | except ValueError as e:
|
601 | log('ERROR reading %s: %s', path, e)
|
602 | raise
|
603 | # Turn it into pass/fail
|
604 | num_failed = 1 if int(status) >= 1 else 0
|
605 | return num_failed, float(secs)
|
606 |
|
607 |
|
608 | def _ReadLinesToSet(path):
|
609 | """Read blacklist files like not-shell.txt and not-osh.txt.
|
610 |
|
611 | TODO: Consider adding globs here? There are a lot of FreeBSD and illumos
|
612 | files we want to get rid of.
|
613 |
|
614 | Or we could probably do that in the original 'find' expression.
|
615 | """
|
616 | result = set()
|
617 | if not path:
|
618 | return result
|
619 |
|
620 | with open(path) as f:
|
621 | for line in f:
|
622 | # Allow comments. We assume filenames don't have #
|
623 | i = line.find('#')
|
624 | if i != -1:
|
625 | line = line[:i]
|
626 |
|
627 | line = line.strip()
|
628 | if not line: # Lines that are blank or only comments.
|
629 | continue
|
630 |
|
631 | result.add(line)
|
632 |
|
633 | return result
|
634 |
|
635 |
|
636 | def SumStats(stdin, in_dir, not_shell, not_osh, root_node, failures):
|
637 | """Reads pairs of paths from stdin, and updates root_node."""
|
638 | # Collect work into dirs
|
639 | for line in stdin:
|
640 | rel_path, abs_path = line.split()
|
641 | #print proj, '-', abs_path, '-', rel_path
|
642 |
|
643 | raw_base = os.path.join(in_dir, rel_path)
|
644 | st = {}
|
645 |
|
646 | st['not_shell'] = 1 if rel_path in not_shell else 0
|
647 | st['not_osh'] = 1 if rel_path in not_osh else 0
|
648 | if st['not_shell'] and st['not_osh']:
|
649 | raise RuntimeError(
|
650 | "%r can't be in both not-shell.txt and not-osh.txt" % rel_path)
|
651 |
|
652 | expected_failure = bool(st['not_shell'] or st['not_osh'])
|
653 |
|
654 | parse_task_path = raw_base + '__parse.task.txt'
|
655 | parse_failed, st['parse_proc_secs'] = _ReadTaskFile(
|
656 | parse_task_path)
|
657 | st['parse_failed'] = 0 if expected_failure else parse_failed
|
658 |
|
659 | with open(raw_base + '__parse.stderr.txt') as f:
|
660 | st['parse_stderr'] = f.read()
|
661 |
|
662 | if st['not_shell']:
|
663 | failures.not_shell.append(
|
664 | {'rel_path': rel_path, 'stderr': st['parse_stderr']}
|
665 | )
|
666 | if st['not_osh']:
|
667 | failures.not_osh.append(
|
668 | {'rel_path': rel_path, 'stderr': st['parse_stderr']}
|
669 | )
|
670 | if st['parse_failed']:
|
671 | failures.parse_failed.append(
|
672 | {'rel_path': rel_path, 'stderr': st['parse_stderr']}
|
673 | )
|
674 |
|
675 | osh2oil_task_path = raw_base + '__ysh-ify.task.txt'
|
676 | osh2oil_failed, st['osh2oil_proc_secs'] = _ReadTaskFile(
|
677 | osh2oil_task_path)
|
678 |
|
679 | # Only count translation failures if the parse succeeded!
|
680 | st['osh2oil_failed'] = osh2oil_failed if not parse_failed else 0
|
681 |
|
682 | with open(raw_base + '__ysh-ify.stderr.txt') as f:
|
683 | st['osh2oil_stderr'] = f.read()
|
684 |
|
685 | if st['osh2oil_failed']:
|
686 | failures.osh2oil_failed.append(
|
687 | {'rel_path': rel_path, 'stderr': st['osh2oil_stderr']}
|
688 | )
|
689 |
|
690 | wc_path = raw_base + '__wc.txt'
|
691 | with open(wc_path) as f:
|
692 | st['num_lines'] = int(f.read().split()[0])
|
693 | # For lines per second calculation
|
694 | st['lines_parsed'] = 0 if st['parse_failed'] else st['num_lines']
|
695 |
|
696 | st['num_files'] = 1
|
697 |
|
698 | path_parts = rel_path.split('/')
|
699 | #print path_parts
|
700 | UpdateNodes(root_node, path_parts, st)
|
701 |
|
702 |
|
703 | class Failures(object):
|
704 | """Simple object that gets transformed to HTML and text."""
|
705 | def __init__(self):
|
706 | self.parse_failed = []
|
707 | self.osh2oil_failed = []
|
708 | self.not_shell = []
|
709 | self.not_osh = []
|
710 |
|
711 | def Write(self, out_dir):
|
712 | with open(os.path.join(out_dir, 'parse-failed.txt'), 'w') as f:
|
713 | for failure in self.parse_failed:
|
714 | print(failure['rel_path'], file=f)
|
715 |
|
716 | with open(os.path.join(out_dir, 'osh2oil-failed.txt'), 'w') as f:
|
717 | for failure in self.osh2oil_failed:
|
718 | print(failure['rel_path'], file=f)
|
719 |
|
720 | base_url = ''
|
721 |
|
722 | with open(os.path.join(out_dir, 'not-shell.html'), 'w') as f:
|
723 | data = {
|
724 | 'task': 'not-shell', 'failures': self.not_shell, 'base_url': base_url
|
725 | }
|
726 | body = BODY_STYLE.expand(data, group=PAGE_TEMPLATES['FAILED'])
|
727 | f.write(body)
|
728 |
|
729 | with open(os.path.join(out_dir, 'not-osh.html'), 'w') as f:
|
730 | data = {
|
731 | 'task': 'not-osh', 'failures': self.not_osh, 'base_url': base_url
|
732 | }
|
733 | body = BODY_STYLE.expand(data, group=PAGE_TEMPLATES['FAILED'])
|
734 | f.write(body)
|
735 |
|
736 | with open(os.path.join(out_dir, 'parse-failed.html'), 'w') as f:
|
737 | data = {
|
738 | 'task': 'parse', 'failures': self.parse_failed, 'base_url': base_url
|
739 | }
|
740 | body = BODY_STYLE.expand(data, group=PAGE_TEMPLATES['FAILED'])
|
741 | f.write(body)
|
742 |
|
743 | with open(os.path.join(out_dir, 'osh2oil-failed.html'), 'w') as f:
|
744 | data = {
|
745 | 'task': 'osh2oil', 'failures': self.osh2oil_failed,
|
746 | 'base_url': base_url
|
747 | }
|
748 | body = BODY_STYLE.expand(data, group=PAGE_TEMPLATES['FAILED'])
|
749 | f.write(body)
|
750 |
|
751 |
|
752 | def Options():
|
753 | """Returns an option parser instance."""
|
754 | p = optparse.OptionParser('wild_report.py [options] ACTION...')
|
755 | p.add_option(
|
756 | '-v', '--verbose', dest='verbose', action='store_true', default=False,
|
757 | help='Show details about test execution')
|
758 | p.add_option(
|
759 | '--not-shell', default=None,
|
760 | help="A file that contains a list of files that are known to be invalid "
|
761 | "shell")
|
762 | p.add_option(
|
763 | '--not-osh', default=None,
|
764 | help="A file that contains a list of files that are known to be invalid "
|
765 | "under the OSH language.")
|
766 | return p
|
767 |
|
768 |
|
769 | def main(argv):
|
770 | o = Options()
|
771 | (opts, argv) = o.parse_args(argv)
|
772 |
|
773 | action = argv[1]
|
774 |
|
775 | if action == 'summarize-dirs':
|
776 | in_dir = argv[2]
|
777 | out_dir = argv[3]
|
778 |
|
779 | not_shell = _ReadLinesToSet(opts.not_shell)
|
780 | not_osh = _ReadLinesToSet(opts.not_osh)
|
781 |
|
782 | # lines and size, oops
|
783 |
|
784 | # TODO: Need read the manifest instead, and then go by dirname() I guess
|
785 | # I guess it is a BFS so you can just assume?
|
786 | # os.path.dirname() on the full path?
|
787 | # Or maybe you need the output files?
|
788 |
|
789 | root_node = DirNode()
|
790 | failures = Failures()
|
791 | SumStats(sys.stdin, in_dir, not_shell, not_osh, root_node, failures)
|
792 |
|
793 | failures.Write(out_dir)
|
794 |
|
795 | # Debug print
|
796 | #DebugPrint(root_node)
|
797 | #WriteJsonFiles(root_node, out_dir)
|
798 |
|
799 | WriteHtmlFiles(root_node, out_dir)
|
800 |
|
801 | else:
|
802 | raise RuntimeError('Invalid action %r' % action)
|
803 |
|
804 |
|
805 | if __name__ == '__main__':
|
806 | try:
|
807 | main(sys.argv)
|
808 | except RuntimeError as e:
|
809 | print('FATAL: %s' % e, file=sys.stderr)
|
810 | sys.exit(1)
|
811 |
|
812 |
|
813 | # vim: sw=2
|