OILS / jsontestsuite / run_tests.py View on Github | oilshell.org

971 lines, 318 significant
1#!/usr/bin/env python3
2
3import io
4import os
5import os.path
6import subprocess
7import sys
8import json
9
10from os import listdir
11from time import strftime
12
13BASE_DIR = os.path.dirname(os.path.realpath(__file__))
14PARSERS_DIR = os.path.join(BASE_DIR, "parsers")
15TEST_CASES_DIR_PATH = os.path.join(BASE_DIR, "test_parsing")
16LOGS_DIR_PATH = os.path.join(BASE_DIR, "results")
17LOG_FILENAME = "logs.txt"
18LOG_FILE_PATH = os.path.join(LOGS_DIR_PATH, LOG_FILENAME)
19
20INVALID_BINARY_FORMAT = 8
21BAD_CPU_TYPE = 86
22
23programs = {
24 # "Awk JSON.awk busybox":
25 # {
26 # "url":"https://github.com/step-/JSON.awk",
27 # "commands":["/bin/busybox", "awk", "-f", os.path.join(PARSERS_DIR, "test_JSON.awk", "JSON-busybox.awk")]
28 # },
29 # "Awk JSON.awk gawk POSIX":
30 # {
31 # "url":"https://github.com/step-/JSON.awk",
32 # "commands":["/usr/bin/gawk", "--posix", "-f", os.path.join(PARSERS_DIR, "test_JSON.awk", "JSON.awk")]
33 # },
34 "Awk JSON.awk gawk":
35 {
36 "url":"https://github.com/step-/JSON.awk",
37 "commands":["/usr/bin/gawk", "-f", os.path.join(PARSERS_DIR, "test_JSON.awk", "JSON.awk")]
38 },
39 # "Awk JSON.awk mawk":
40 # {
41 # "url":"https://github.com/step-/JSON.awk",
42 # "commands":["/usr/bin/mawk", "-f", os.path.join(PARSERS_DIR, "test_JSON.awk", "callbacks.awk"), "-f", os.path.join(PARSERS_DIR, "test_JSON.awk", "JSON.awk")]
43 # },
44 "Bash JSON.sh 2016-08-12":
45 {
46 "url":"https://github.com/dominictarr/JSON.sh",
47 "commands":[os.path.join(PARSERS_DIR, "test_Bash_JSON/JSON.sh")],
48 "use_stdin":True
49 },
50 # "R rjson":
51 # {
52 # "url":"",
53 # "commands":["/usr/local/bin/RScript", os.path.join(PARSERS_DIR, "test_rjson.r")]
54 # },
55 # "R jsonlite":
56 # {
57 # "url":"",
58 # "commands":["/usr/local/bin/RScript", os.path.join(PARSERS_DIR, "test_jsonlite.r")]
59 # },
60 # "Obj-C JSONKit":
61 # {
62 # "url":"",
63 # "commands":[os.path.join(PARSERS_DIR, "test_JSONKit/bin/test-JSONKit")]
64 # },
65 # "Obj-C Apple NSJSONSerialization":
66 # {
67 # "url":"",
68 # "commands":[os.path.join(PARSERS_DIR, "test_ObjCNSJSONSerializer/bin/test_ObjCNSJSONSerializer")]
69 # },
70 # "Obj-C TouchJSON":
71 # {
72 # "url":"https://github.com/TouchCode/TouchJSON",
73 # "commands":[os.path.join(PARSERS_DIR, "test_TouchJSON/bin/test_TouchJSON")]
74 # },
75 # "Obj-C SBJSON 4.0.3":
76 # {
77 # "url":"https://github.com/stig/json-framework",
78 # "commands":[os.path.join(PARSERS_DIR, "test_SBJSON_4_0_3/bin/test_sbjson")]
79 # },
80 # "Obj-C SBJSON 4.0.4":
81 # {
82 # "url":"https://github.com/stig/json-framework",
83 # "commands":[os.path.join(PARSERS_DIR, "test_SBJSON_4_0_4/bin/test_sbjson")]
84 # },
85 # "Obj-C SBJson 5.0.0":
86 # {
87 # "url":"https://github.com/stig/json-framework",
88 # "commands":[os.path.join(PARSERS_DIR, "test_SBJson_5_0_0/bin/test_sbjson")]
89 # },
90 # "Go 1.7.1":
91 # {
92 # "url":"",
93 # "commands":[os.path.join(PARSERS_DIR, "test_go/test_json")]
94 # },
95 # "Zig 0.8.0-dev.1354+081698156":
96 # {
97 # "url":"",
98 # "commands":[os.path.join(PARSERS_DIR, "test_zig/test_json")]
99 # },
100 # "Free Pascal fcl-json":
101 # {
102 # "url":"",
103 # "commands":[os.path.join(PARSERS_DIR, "test_fpc/test_json")]
104 # },
105 # "Xidel Internet Tools":
106 # {
107 # "url":"http://www.videlibri.de/xidel.html",
108 # "commands":["/usr/bin/env", "xidel", "--input-format=json-strict", "-e=."]
109 # },
110 # "Lua JSON 20160916.19":
111 # {
112 # "url":"http://regex.info/blog/lua/json",
113 # "commands":["/usr/local/bin/lua", os.path.join(PARSERS_DIR, "test_Lua_JSON/test_JSON.lua")]
114 # },
115 # "Lua dkjson":
116 # {
117 # "url":"http://dkolf.de/src/dkjson-lua.fsl/home",
118 # "commands":["/usr/local/bin/lua", os.path.join(PARSERS_DIR, "test_dkjson.lua")]
119 # },
120 # "Ruby":
121 # {
122 # "url":"",
123 # "commands":["/usr/bin/env", "ruby", os.path.join(PARSERS_DIR, "test_json.rb")]
124 # },
125 # "Ruby regex":
126 # {
127 # "url":"",
128 # "commands":["/usr/bin/env", "ruby", os.path.join(PARSERS_DIR, "test_json_re.rb")]
129 # },
130 # "Ruby Yajl":
131 # {
132 # "url":"https://github.com/brianmario/yajl-ruby",
133 # "commands":["/usr/bin/env", "ruby", os.path.join(PARSERS_DIR, "test_yajl.rb")]
134 # },
135 # "Ruby Oj (strict mode)":
136 # {
137 # "url":"https://github.com/ohler55/oj",
138 # "commands":["/usr/bin/env", "ruby", os.path.join(PARSERS_DIR, "test_oj_strict.rb")]
139 # },
140 # "Ruby Oj (compat mode)":
141 # {
142 # "url":"https://github.com/ohler55/oj",
143 # "commands":["/usr/bin/env", "ruby", os.path.join(PARSERS_DIR, "test_oj_compat.rb")]
144 # },
145 # "Crystal":
146 # {
147 # "url":"https://github.com/crystal-lang/crystal",
148 # "commands":[os.path.join(PARSERS_DIR, "test_json_cr")]
149 # },
150 # "JavaScript":
151 # {
152 # "url":"",
153 # "commands":["/usr/local/bin/node", os.path.join(PARSERS_DIR, "test_json.js")]
154 # },
155 # "Python 2.7.10":
156 # {
157 # "url":"",
158 # "commands":["/usr/bin/python", os.path.join(PARSERS_DIR, "test_json.py")]
159 # },
160 # "Python 3.5.2":
161 # {
162 # "url":"",
163 # "commands":["/usr/bin/env", "python3.5", os.path.join(PARSERS_DIR, "test_json.py")]
164 # },
165 # "Python cjson 1.10": # pip install cjson
166 # {
167 # "url":"https://pypi.python.org/pypi/python-cjson",
168 # "commands":["/usr/bin/python", os.path.join(PARSERS_DIR, "test_cjson.py")]
169 # },
170 # "Python ujson 1.35": # pip install ujson
171 # {
172 # "url":"https://pypi.python.org/pypi/ujson",
173 # "commands":["/usr/bin/python", os.path.join(PARSERS_DIR, "test_ujson.py")]
174 # },
175 # "Python simplejson 3.10": # pip install simplejson
176 # {
177 # "url":"https://pypi.python.org/pypi/simplejson",
178 # "commands":["/usr/bin/python", os.path.join(PARSERS_DIR, "test_simplejson.py")]
179 # },
180 # "Python demjson 2.2.4": # pip install demjson
181 # {
182 # "url":"https://pypi.python.org/pypi/demjson",
183 # "commands":["/usr/bin/python", os.path.join(PARSERS_DIR, "test_demjson.py")]
184 # },
185 # "Python demjson 2.2.4 (py3)": # pip install demjson
186 # {
187 # "url":"https://pypi.python.org/pypi/demjson",
188 # "commands":["/usr/bin/env", "python3.5", os.path.join(PARSERS_DIR, "test_demjson.py")]
189 # },
190 # "Python demjson 2.2.4 (jsonlint)": # pip install demjson
191 # {
192 # "url":"https://pypi.python.org/pypi/demjson",
193 # "commands":["/usr/bin/env", "jsonlint", "--quiet", "--strict", "--allow=non-portable,duplicate-keys,zero-byte"]
194 # },
195 # "Perl Cpanel::JSON::XS":
196 # {
197 # "url":"https://metacpan.org/pod/Cpanel::JSON::XS",
198 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_cpanel_json_xs.pl")]
199 # },
200 # "Perl JSON::Parse":
201 # {
202 # "url":"https://metacpan.org/pod/JSON::Parse",
203 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_json_parse.pl")]
204 # },
205 # "Perl JSON::PP": # part of default install in perl >= v5.14
206 # {
207 # "url":"https://metacpan.org/pod/JSON::PP",
208 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_json_pp.pl")]
209 # },
210 # "Perl JSON::SL":
211 # {
212 # "url":"https://metacpan.org/pod/JSON::SL",
213 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_json_sl.pl")]
214 # },
215 # "Perl JSON::Tiny":
216 # {
217 # "url":"https://metacpan.org/pod/JSON::Tiny",
218 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_json_tiny.pl")]
219 # },
220 # "Perl JSON::XS":
221 # {
222 # "url":"https://metacpan.org/pod/JSON::XS",
223 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_json_xs.pl")]
224 # },
225 # "Perl MarpaX::ESLIF::ECMA404":
226 # {
227 # "url":"http://metacpan.org/pod/MarpaX::ESLIF::ECMA404",
228 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_marpax_eslif_ecma404.pl")]
229 # },
230 # "Perl Mojo::JSON":
231 # {
232 # "url":"http://metacpan.org/pod/Mojo::JSON",
233 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_mojo_json.pl")]
234 # },
235 # "Perl Pegex::JSON":
236 # {
237 # "url":"http://metacpan.org/pod/Pegex::JSON",
238 # "commands":["/usr/bin/perl", os.path.join(PARSERS_DIR, "test_pegex_json.pl")]
239 # },
240 # "PHP 5.5.36":
241 # {
242 # "url":"",
243 # "commands":["/usr/bin/php", os.path.join(PARSERS_DIR, "test_json.php")]
244 # },
245 # "Swift Freddy 2.1.0":
246 # {
247 # "url":"",
248 # "commands":[os.path.join(PARSERS_DIR, "test_Freddy_2_1_0/bin/test_Freddy_2_1_0")]
249 # },
250 # "Swift Freddy 20160830":
251 # {
252 # "url":"",
253 # "commands":[os.path.join(PARSERS_DIR, "test_Freddy_20160830/bin/test_Freddy")]
254 # },
255 # "Swift Freddy 20161018":
256 # {
257 # "url":"",
258 # "commands":[os.path.join(PARSERS_DIR, "test_Freddy_20161018/bin/test_Freddy")]
259 # },
260 # "Swift Freddy 20170118":
261 # {
262 # "url":"",
263 # "commands":[os.path.join(PARSERS_DIR, "test_Freddy_20170118/bin/test_Freddy")]
264 # },
265 # "Swift PMJSON 1.1.0":
266 # {
267 # "url":"https://github.com/postmates/PMJSON",
268 # "commands":[os.path.join(PARSERS_DIR, "test_PMJSON_1_1_0/bin/test_PMJSON")]
269 # },
270 # "Swift PMJSON 1.2.0":
271 # {
272 # "url":"https://github.com/postmates/PMJSON",
273 # "commands":[os.path.join(PARSERS_DIR, "test_PMJSON_1_2_0/bin/test_PMJSON")]
274 # },
275 # "Swift PMJSON 1.2.1":
276 # {
277 # "url":"https://github.com/postmates/PMJSON",
278 # "commands":[os.path.join(PARSERS_DIR, "test_PMJSON_1_2_1/bin/test_PMJSON")]
279 # },
280 # "Swift STJSON":
281 # {
282 # "url":"",
283 # "commands":[os.path.join(PARSERS_DIR, "test_STJSON/bin/STJSON")]
284 # },
285 # "Swift Apple JSONSerialization":
286 # {
287 # "url":"",
288 # "commands":[os.path.join(PARSERS_DIR, "test-AppleJSONSerialization/bin/test-AppleJSONSerialization")]
289 # },
290 # "C pdjson 20170325":
291 # {
292 # "url":"https://github.com/skeeto/pdjson",
293 # "commands":[os.path.join(PARSERS_DIR, "test_pdjson/bin/test_pdjson")]
294 # },
295 # "C jsmn":
296 # {
297 # "url":"https://github.com/zserge/jsmn",
298 # "commands":[os.path.join(PARSERS_DIR, "test_jsmn/bin/test_jsmn")]
299 # },
300 # "C jansson":
301 # {
302 # "url":"",
303 # "commands":[os.path.join(PARSERS_DIR, "test_jansson/bin/test_jansson")]
304 # },
305 # "C JSON Checker":
306 # {
307 # "url":"http://www.json.org/JSON_checker/",
308 # "commands":[os.path.join(PARSERS_DIR, "test_jsonChecker/bin/jsonChecker")]
309 # },
310 # "C JSON Checker 2":
311 # {
312 # "url":"",
313 # "commands":[os.path.join(PARSERS_DIR, "test_jsonChecker2/bin/jsonChecker2")]
314 # },
315 # "C JSON Checker 20161111":
316 # {
317 # "url":"https://github.com/douglascrockford/JSON-c",
318 # "commands":[os.path.join(PARSERS_DIR, "test_jsonChecker20161111/bin/jsonChecker20161111")]
319 # },
320 # "C++ sajson 20170724":
321 # {
322 # "url":"https://github.com/chadaustin/sajson",
323 # "commands":[os.path.join(PARSERS_DIR, "test_sajson_20170724/bin/test_sajson")]
324 # },
325 # "C ccan":
326 # {
327 # "url":"",
328 # "commands":[os.path.join(PARSERS_DIR, "test_ccan_json/bin/test_ccan")]
329 # },
330 # "C cJSON 20160806":
331 # {
332 # "url":"https://github.com/DaveGamble/cJSON",
333 # "commands":[os.path.join(PARSERS_DIR, "test_cJSON_20160806/bin/test_cJSON")]
334 # },
335 # "C cJSON 1.7.3":
336 # {
337 # "url":"https://github.com/DaveGamble/cJSON",
338 # "commands":[os.path.join(PARSERS_DIR, "test_cJSON_1_7_3/bin/test_cJSON")]
339 # },
340 # "C JSON-C":
341 # {
342 # "url":"https://github.com/json-c/json-c",
343 # "commands":[os.path.join(PARSERS_DIR, "test_json-c/bin/test_json-c")]
344 # },
345 # "C JSON Parser by udp":
346 # {
347 # "url":"https://github.com/udp/json-parser",
348 # "commands":[os.path.join(PARSERS_DIR, "test_json-parser/bin/test_json-parser")]
349 # },
350 # "C++ nlohmann JSON 20190718":
351 # {
352 # "url":"https://github.com/nlohmann/json/",
353 # "commands":[os.path.join(PARSERS_DIR, "test_nlohmann_json_20190718/bin/test_nlohmann_json")]
354 # },
355 # "C++ RapidJSON 20170724":
356 # {
357 # "url":"https://github.com/miloyip/rapidjson",
358 # "commands":[os.path.join(PARSERS_DIR, "test_rapidjson_20170724/bin/test_rapidjson")]
359 # },
360 # "Rust json-rust":
361 # {
362 # "url":"https://github.com/maciejhirsz/json-rust",
363 # "commands":[os.path.join(PARSERS_DIR, "test_json-rust/target/debug/tj")]
364 # },
365 # "Rust rustc_serialize::json":
366 # {
367 # "url":"https://doc.rust-lang.org/rustc-serialize/rustc_serialize/json/index.html",
368 # "commands":[os.path.join(PARSERS_DIR, "test_json-rustc_serialize/rj/target/debug/rj")]
369 # },
370 # "Rust serde_json":
371 # {
372 # "url":"https://github.com/serde-rs/json",
373 # "commands":[os.path.join(PARSERS_DIR, "test_json-rust-serde_json/rj/target/debug/rj")]
374 # },
375 # "Java json-simple 1.1.1":
376 # {
377 # "url":"",
378 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_simple_json_1_1_1/TestJSONParsing.jar")]
379 # },
380 # "Java org.json 2016-08-15":
381 # {
382 # "url":"https://github.com/stleary/JSON-java",
383 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_org_json_2016_08/TestJSONParsing.jar")]
384 # },
385 # "Java gson 2.7":
386 # {
387 # "url":"",
388 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_gson_2_7/TestJSONParsing.jar")]
389 # },
390 # "Java BFO v1":
391 # {
392 # "url":"https://github.com/faceless2/json",
393 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_bfo/TestJSONParsing.jar")]
394 # },
395 # "Java com.leastfixedpoint.json 1.0":
396 # {
397 # "url":"",
398 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_com_leastfixedpoint_json_1_0/TestJSONParsing.jar")]
399 # },
400 # "Java Jackson 2.8.4":
401 # {
402 # "url":"",
403 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_jackson_2_8_4/TestJSONParsing.jar")]
404 # },
405 # "Java JsonTree 0.5":
406 # {
407 # "url":"",
408 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_json_tree/TestJSONParsing.jar")]
409 # },
410 # "Scala Dijon 0.3.0":
411 # {
412 # "url":"",
413 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_scala_dijon_0.3.0/target/scala-2.13/TestJSONParsing.jar")]
414 # },
415 # "Java Mergebase Java2Json 2019.09.09":
416 # {
417 # "url":"https://github.com/mergebase/Java2Json",
418 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_mergebase_json_2019_09_09/TestJSONParsing.jar")]
419 # },
420 # "Java nanojson 1.0":
421 # {
422 # "url":"",
423 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_nanojson_1_0/TestJSONParsing.jar")]
424 # },
425 # "Java nanojson 1.1":
426 # {
427 # "url":"",
428 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_nanojson_1_1/TestJSONParsing.jar")]
429 # },
430 # "Java Actson 1.2.0":
431 # {
432 # "url":"https://github.com/michel-kraemer/actson",
433 # "commands":["/usr/bin/java", "-jar", os.path.join(PARSERS_DIR, "test_java_actson_1_2_0/TestJSONParsing.jar")]
434 # },
435 # "Haskell Aeson 0.11.2.1":
436 # {
437 # "url":"https://github.com/bos/aeson",
438 # "commands":[os.path.join(PARSERS_DIR, "test_haskell-aeson/testaeson")]
439 # },
440 # "OCaml Yojson":
441 # {
442 # "url":"https://github.com/mjambon/yojson",
443 # "commands":[os.path.join(PARSERS_DIR, "test_ocaml-yojson/testyojson")]
444 # },
445 # "OCaml Orsetto":
446 # {
447 # "url":"https://bitbucket.org/jhw/orsetto",
448 # "commands":[os.path.join(PARSERS_DIR, "test_ocaml_orsetto/test_orsetto_json")]
449 # },
450 # "Qt JSON":
451 # {
452 # "url":"",
453 # "commands":[os.path.join(PARSERS_DIR, "test_qt/test_qt")]
454 # },
455 # "C ConcreteServer":
456 # {
457 # "url":" https://github.com/RaphaelPrevost/ConcreteServer",
458 # "commands":[os.path.join(PARSERS_DIR, "test_ConcreteServer/json_checker")]
459 # },
460 # "Squeak JSON-tonyg":
461 # {
462 # "url":"http://www.squeaksource.com/JSON.html",
463 # "commands":[
464 # os.path.join(PARSERS_DIR, "test_Squeak_JSON_tonyg/Squeak.app/Contents/MacOS/Squeak"),
465 # "-headless", #<--optional
466 # os.path.join(PARSERS_DIR, "test_Squeak_JSON_tonyg/Squeak5.1-16549-32bit.image"),
467 # "test_JSON.st"
468 # ]
469 # },
470 # ".NET Newtonsoft.Json 12.0.3":
471 # {
472 # "url":"http://www.newtonsoft.com/json",
473 # "setup":["dotnet", "build", "--configuration", "Release", os.path.join(PARSERS_DIR, "test_dotnet_newtonsoft/app.csproj")],
474 # "commands":["dotnet", os.path.join(PARSERS_DIR, "test_dotnet_newtonsoft/bin/Release/net5.0/app.dll")]
475 # },
476 # ".NET System.Text.Json 5.0.0":
477 # {
478 # "url":"https://docs.microsoft.com/en-us/dotnet/api/system.text.json",
479 # "setup":["dotnet", "build", "--configuration", "Release", os.path.join(PARSERS_DIR, "test_dotnet_system_text_json/app.csproj")],
480 # "commands":["dotnet", os.path.join(PARSERS_DIR, "test_dotnet_system_text_json/bin/Release/net5.0/app.dll")]
481 # },
482 # "Elixir Json":
483 # {
484 # "url":"https://github.com/cblage/elixir-json",
485 # "commands":[ os.path.join( PARSERS_DIR, "test_elixir_json/test_elixir_json") ]
486 # },
487 # "Elixir ExJSON":
488 # {
489 # "url":"https://github.com/guedes/exjson",
490 # "commands":[ os.path.join( PARSERS_DIR, "test_elixir_exjson/test_elixir_exjson") ]
491 # },
492 # "Elixir Poison":
493 # {
494 # "url":"https://github.com/devinus/poison",
495 # "commands":[ os.path.join( PARSERS_DIR, "test_elixir_poison/test_elixir_poison") ]
496 # },
497 # "Elixir Jason":
498 # {
499 # "url":"https://github.com/michalmuskala/jason",
500 # "commands":[ os.path.join( PARSERS_DIR, "test_elixir_jason/test_elixir_jason") ]
501 # },
502 # "Erlang Euneus":
503 # {
504 # "url":"https://github.com/williamthome/euneus",
505 # "commands":[ os.path.join( PARSERS_DIR, "test_erlang_euneus/test_erlang_euneus") ]
506 # },
507 # "Nim":
508 # {
509 # "url":"http://nim-lang.org",
510 # "commands":[ os.path.join( PARSERS_DIR, "test_nim/test_json") ]
511 # },
512 # "Swift JSON 20170522":
513 # {
514 # "url":"https://github.com/owensd/json-swift",
515 # "commands":[os.path.join(PARSERS_DIR, "test_json_swift_20170522/bin/json_swift")]
516 # },
517 # "C++ nlohmann JSON 20190718":
518 # {
519 # "url":"https://github.com/nlohmann/json",
520 # "commands":[os.path.join(PARSERS_DIR, "test_nlohmann_json_20190718/bin/test_nlohmann_json")]
521 # },
522 "Oils":
523 {
524 "url":"",
525 "commands":[os.path.join(PARSERS_DIR, "test_oils.sh")]
526 }
527}
528
529def run_tests(restrict_to_path=None, restrict_to_program=None):
530
531 FNULL = open(os.devnull, 'w')
532 log_file = open(LOG_FILE_PATH, 'w')
533
534 prog_names = list(programs.keys())
535 prog_names.sort()
536
537 if isinstance(restrict_to_program, io.TextIOBase):
538 restrict_to_program = json.load(restrict_to_program)
539
540 if restrict_to_program:
541 prog_names = filter(lambda x: x in restrict_to_program, prog_names)
542
543 for prog_name in prog_names:
544 d = programs[prog_name]
545
546 url = d["url"]
547 commands = d["commands"]
548 setup = d.get("setup")
549 if setup != None:
550 print("--", " ".join(setup))
551 try:
552 subprocess.call(setup)
553 except Exception as e:
554 print("-- skip", e)
555 continue
556
557 for root, dirs, files in os.walk(TEST_CASES_DIR_PATH):
558 json_files = (f for f in files if f.endswith(".json"))
559 for filename in json_files:
560
561 if restrict_to_path:
562 restrict_to_filename = os.path.basename(restrict_to_path)
563 if filename != restrict_to_filename:
564 continue
565
566 file_path = os.path.join(root, filename)
567
568 my_stdin = FNULL
569
570 use_stdin = "use_stdin" in d and d["use_stdin"]
571 if use_stdin:
572 my_stdin = open(file_path, "rb")
573 a = commands
574 else:
575 a = commands + [file_path]
576
577 #print("->", a)
578 print("--", " ".join(a))
579
580 try:
581 status = subprocess.call(
582 a,
583 stdin=my_stdin,
584 stdout=FNULL,
585 stderr=subprocess.STDOUT,
586 timeout=5
587 )
588 #print("-->", status)
589 except subprocess.TimeoutExpired:
590 print("timeout expired")
591 s = "%s\tTIMEOUT\t%s" % (prog_name, filename)
592 log_file.write("%s\n" % s)
593 print("RESULT:", result)
594 continue
595 except FileNotFoundError as e:
596 print("-- skip non-existing", e.filename)
597 break
598 except OSError as e:
599 if e.errno == INVALID_BINARY_FORMAT or e.errno == BAD_CPU_TYPE:
600 print("-- skip invalid-binary", commands[0])
601 break
602 raise e
603
604 if use_stdin:
605 my_stdin.close()
606
607 result = None
608 if status == 0:
609 result = "PASS"
610 elif status == 1:
611 result = "FAIL"
612 else:
613 result = "CRASH"
614
615 s = None
616 if result == "CRASH":
617 s = "%s\tCRASH\t%s" % (prog_name, filename)
618 elif filename.startswith("y_") and result != "PASS":
619 s = "%s\tSHOULD_HAVE_PASSED\t%s" % (prog_name, filename)
620 elif filename.startswith("n_") and result == "PASS":
621 s = "%s\tSHOULD_HAVE_FAILED\t%s" % (prog_name, filename)
622 elif filename.startswith("i_") and result == "PASS":
623 s = "%s\tIMPLEMENTATION_PASS\t%s" % (prog_name, filename)
624 elif filename.startswith("i_") and result != "PASS":
625 s = "%s\tIMPLEMENTATION_FAIL\t%s" % (prog_name, filename)
626
627 if s != None:
628 print(s)
629 log_file.write("%s\n" % s)
630
631 FNULL.close()
632 log_file.close()
633
634def f_underline_non_printable_bytes(bytes):
635
636 html = ""
637
638 has_non_printable_characters = False
639
640 for b in bytes:
641
642 is_not_printable = b < 0x20 or b > 0x7E
643
644 has_non_printable_characters |= is_not_printable
645
646 if is_not_printable:
647 html += "<U>%02X</U>" % b
648 else:
649 html += "%c" % b
650
651 if has_non_printable_characters:
652 try:
653 html += " <=> %s" % bytes.decode("utf-8", errors='ignore')
654 except:
655 pass
656
657 if len(bytes) > 36:
658 return "%s(...)" % html[:36]
659
660 return html
661
662def f_status_for_lib_for_file(json_dir, results_dir):
663
664 txt_filenames = [f for f in listdir(results_dir) if f.endswith(".txt")]
665
666 # comment to ignore some tests
667 statuses = [
668 "SHOULD_HAVE_FAILED",
669
670 "SHOULD_HAVE_PASSED",
671 "CRASH",
672
673 "IMPLEMENTATION_FAIL",
674 "IMPLEMENTATION_PASS",
675
676 "TIMEOUT"
677 ]
678
679 d = {}
680 libs = []
681
682 for filename in txt_filenames:
683 path = os.path.join(results_dir, filename)
684
685 with open(path) as f:
686 for l in f:
687 comps = l.split("\t")
688 if len(comps) != 3:
689 print("***", comps)
690 continue
691
692 if comps[1] not in statuses:
693 print("-- unhandled status:", comps[1])
694
695 (lib, status, json_filename) = (comps[0], comps[1], comps[2].rstrip())
696
697 if lib not in libs:
698 libs.append(lib)
699
700 json_path = os.path.join(TEST_CASES_DIR_PATH, json_filename)
701
702 if json_path not in d:
703 d[json_path] = {}
704
705 d[json_path][lib] = status
706
707 return d, libs
708
709def f_status_for_path_for_lib(json_dir, results_dir):
710
711 txt_filenames = [f for f in listdir(results_dir) if f.endswith(".txt")]
712
713 # comment to ignore some tests
714 statuses = [
715 "SHOULD_HAVE_FAILED",
716
717 "SHOULD_HAVE_PASSED",
718 "CRASH",
719
720 "IMPLEMENTATION_FAIL",
721 "IMPLEMENTATION_PASS",
722
723 "TIMEOUT"
724
725 ]
726
727 d = {} # d['lib']['file'] = status
728
729 for filename in txt_filenames:
730 path = os.path.join(results_dir, filename)
731
732 with open(path) as f:
733 for l in f:
734 comps = l.split("\t")
735 if len(comps) != 3:
736 continue
737
738 if comps[1] not in statuses:
739 #print "-- unhandled status:", comps[1]
740 continue
741
742 (lib, status, json_filename) = (comps[0], comps[1], comps[2].rstrip())
743
744 if lib not in d:
745 d[lib] = {}
746
747 json_path = os.path.join(TEST_CASES_DIR_PATH, json_filename)
748
749 d[lib][json_path] = status
750
751 return d
752
753def f_tests_with_same_results(libs, status_for_lib_for_file):
754
755 tests_with_same_results = {} #{ {lib1:status, lib2:status, lib3:status} : { filenames } }
756
757 files = list(status_for_lib_for_file.keys())
758 files.sort()
759
760 for f in files:
761 prefix = os.path.basename(f)[:1]
762 lib_status_for_file = []
763 for l in libs:
764 if l in status_for_lib_for_file[f]:
765 status = status_for_lib_for_file[f][l]
766 lib_status = "%s_%s" % (status, l)
767 lib_status_for_file.append(lib_status)
768 results = " || ".join(lib_status_for_file)
769 if results not in tests_with_same_results:
770 tests_with_same_results[results] = set()
771 tests_with_same_results[results].add(f)
772
773 r = []
774 for k,v in tests_with_same_results.items():
775 r.append((k,v))
776 r.sort()
777
778 return r
779
780def generate_report(report_path, keep_only_first_result_in_set = False):
781
782 (status_for_lib_for_file, libs) = f_status_for_lib_for_file(TEST_CASES_DIR_PATH, LOGS_DIR_PATH)
783
784 status_for_path_for_lib = f_status_for_path_for_lib(TEST_CASES_DIR_PATH, LOGS_DIR_PATH)
785
786 tests_with_same_results = f_tests_with_same_results(libs, status_for_lib_for_file)
787
788 with open(report_path, 'w', encoding='utf-8') as f:
789
790 f.write("""<!DOCTYPE html>
791
792 <HTML>
793
794 <HEAD>
795 <TITLE>JSON Parsing Tests</TITLE>
796 <LINK rel="stylesheet" type="text/css" href="style.css">
797 <META charset="UTF-8">
798 </HEAD>
799
800 <BODY>
801 """)
802
803 prog_names = list(programs.keys())
804 prog_names.sort()
805
806 libs = list(status_for_path_for_lib.keys())
807 libs.sort()
808
809 title = "JSON Parsing Tests"
810 if keep_only_first_result_in_set:
811 title += ", Pruned"
812 else:
813 title += ", Full"
814 f.write("<H1>%s</H1>\n" % title)
815 f.write('<P>Appendix to: seriot.ch <A HREF="http://www.seriot.ch/parsing_json.php">Parsing JSON is a Minefield</A> http://www.seriot.ch/parsing_json.php</P>\n')
816 f.write("<PRE>%s</PRE>\n" % strftime("%Y-%m-%d %H:%M:%S"))
817
818 f.write("""<H4>Contents</H4>
819 <OL>
820 <LI><A HREF="#color_scheme">Color Scheme</A>
821 <LI><A HREF="#all_results">Full Results</A>
822 <LI><A HREF="#results_by_parser">Results by Parser</A>""")
823 f.write("<UL>\n")
824 for i, prog in enumerate(prog_names):
825 f.write(' <LI><A HREF="#%d">%s</A>\n' % (i, prog))
826 f.write("</OL>\n")
827
828 f.write("""
829 <A NAME="color_scheme"></A>
830 <H4>1. Color scheme:</H4>
831 <TABLE>
832 <TR><TD class="EXPECTED_RESULT">expected result</TD><TR>
833 <TR><TD class="SHOULD_HAVE_PASSED">parsing should have succeeded but failed</TD><TR>
834 <TR><TD class="SHOULD_HAVE_FAILED">parsing should have failed but succeeded</TD><TR>
835 <TR><TD class="IMPLEMENTATION_PASS">result undefined, parsing succeeded</TD><TR>
836 <TR><TD class="IMPLEMENTATION_FAIL">result undefined, parsing failed</TD><TR>
837 <TR><TD class="CRASH">parser crashed</TD><TR>
838 <TR><TD class="TIMEOUT">timeout</TD><TR>
839 </TABLE>
840 """)
841
842 ###
843
844 f.write('<A NAME="all_results"></A>\n')
845 f.write("<H4>2. Full Results</H4>\n")
846 f.write("<TABLE>\n")
847
848 f.write(" <TR>\n")
849 f.write(" <TH></TH>\n")
850 for lib in libs:
851 f.write(' <TH class="vertical"><DIV>%s</DIV></TH>\n' % lib)
852 f.write(" <TH></TH>\n")
853 f.write(" </TR>\n")
854
855 for (k, file_set) in tests_with_same_results:
856
857 ordered_file_set = list(file_set)
858 ordered_file_set.sort()
859
860 if keep_only_first_result_in_set:
861 ordered_file_set = [ordered_file_set[0]]
862
863 for path in [path for path in ordered_file_set if os.path.exists(path)]:
864
865 f.write(" <TR>\n")
866 f.write(' <TD>%s</TD>' % os.path.basename(path))
867
868 status_for_lib = status_for_lib_for_file[path]
869 bytes = open(path, "rb").read()
870
871 for lib in libs:
872 if lib in status_for_lib:
873 status = status_for_lib[lib]
874 f.write(' <TD class="%s">%s</TD>' % (status, ""))
875 else:
876 f.write(' <TD class="EXPECTED_RESULT"></TD>')
877 f.write(' <TD>%s</TD>' % f_underline_non_printable_bytes(bytes))
878 f.write(" </TR>")
879
880 f.write("</TABLE>\n")
881
882
883 ###
884
885 f.write('<A NAME="results_by_parser"></A>\n')
886 f.write("<H4>3. Results by Parser</H4>")
887 for i, prog in enumerate(prog_names):
888 url = programs[prog]["url"]
889 f.write("<P>\n")
890 f.write('<A NAME="%d"></A>' % i)
891 if len(url) > 0:
892 f.write('<H4><A HREF="%s">%s</A></H4>\n' % (url, prog))
893 else:
894 f.write('<H4>%s</H4>\n' % prog)
895
896 ###
897
898 if prog not in status_for_path_for_lib:
899 continue
900 status_for_path = status_for_path_for_lib[prog]
901
902 paths = list(status_for_path.keys())
903 paths.sort()
904
905 f.write('<TABLE>\n')
906
907 f.write(" <TR>\n")
908 f.write(" <TH></TH>\n")
909 f.write(' <TH class="space"><DIV></DIV></TH>\n')
910 f.write(" <TH></TH>\n")
911 f.write(" </TR>\n")
912
913 for path in paths:
914
915 f.write(" <TR>\n")
916 f.write(" <TD>%s</TD>" % os.path.basename(path))
917
918 status_for_lib = status_for_lib_for_file[path]
919 if os.path.exists(path):
920 bytes = open(path, "rb").read()
921 else:
922 bytes = [ord(x) for x in "(MISSING FILE)"]
923
924 if prog in status_for_lib:
925 status = status_for_lib[prog]
926 f.write(' <TD class="%s">%s</TD>' % (status, ""))
927 else:
928 f.write(" <TD></TD>")
929 f.write(" <TD>%s</TD>" % f_underline_non_printable_bytes(bytes))
930 f.write(" </TR>")
931
932 f.write('</TABLE>\n')
933 f.write("</P>\n")
934
935 ###
936
937 f.write("""
938
939 </BODY>
940
941 </HTML>
942 """)
943 if os.path.exists('/usr/bin/open'):
944 os.system('/usr/bin/open "%s"' % report_path)
945
946###
947
948if __name__ == '__main__':
949
950 restrict_to_path = None
951 """
952 if len(sys.argv) == 2:
953 restrict_to_path = os.path.join(BASE_DIR, sys.argv[1])
954 if not os.path.exists(restrict_to_path):
955 print("-- file does not exist:", restrict_to_path)
956 sys.exit(-1)
957 """
958
959 import argparse
960 parser = argparse.ArgumentParser()
961 parser.add_argument('restrict_to_path', nargs='?', type=str, default=None)
962 parser.add_argument('--filter', dest='restrict_to_program', type=argparse.FileType('r'), default=None)
963
964 args = parser.parse_args()
965
966 #args.restrict_to_program = ["C ConcreteServer"]
967
968 run_tests(args.restrict_to_path, args.restrict_to_program)
969
970 generate_report(os.path.join(BASE_DIR, "results/parsing.html"), keep_only_first_result_in_set = False)
971 generate_report(os.path.join(BASE_DIR, "results/parsing_pruned.html"), keep_only_first_result_in_set = True)