Skip to content

Commit b174656

Browse files
committed
migrated all tests to mocha+chai (in-browser test mode via tests/index.html is not working yet, but that wasn't available before either, so nothing is lost. Tests should pass in node via make or make test but do NOT YET)
1 parent ad88be9 commit b174656

21 files changed

+557
-532
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ deploy: site
3838
git checkout master
3939

4040
test:
41-
node tests/all-tests.js
41+
node_modules/.bin/mocha tests/
4242

4343
web-examples: web/content/assets/js/calculator.js
4444

package.json

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
"preferGlobal": true,
2323
"repository": {
2424
"type": "git",
25-
"url": "git://github.com/GerHobbelt/jison.git"
25+
"url": "https://github.com/GerHobbelt/jison.git"
2626
},
2727
"bugs": {
2828
"email": "[email protected]",
@@ -48,11 +48,12 @@
4848
"devDependencies": {
4949
"browserify": "14.0.0",
5050
"glob": "7.1.1",
51-
"test": "0.6.0",
52-
"uglify-js": "2.7.5"
51+
"uglify-js": "2.7.5",
52+
"chai": "3.5.0",
53+
"mocha": "3.2.0"
5354
},
5455
"scripts": {
55-
"test": "node tests/all-tests.js"
56+
"test": "make test"
5657
},
5758
"homepage": "http://jison.org"
5859
}

tests/all-tests.js

Lines changed: 0 additions & 5 deletions
This file was deleted.

tests/parser/actions.js

Lines changed: 56 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
1-
var Jison = require("../setup").Jison,
2-
RegExpLexer = require("../setup").RegExpLexer,
3-
assert = require("assert");
1+
var assert = require("chai").assert;
2+
var Jison = require("../setup").Jison;
3+
var RegExpLexer = require("../setup").RegExpLexer;
44

5-
exports["test Semantic action basic return"] = function() {
5+
6+
describe("Parser Actions", function () {
7+
it("test Semantic action basic return", function() {
68
var lexData = {
79
rules: [
810
["x", "return 'x';"],
@@ -22,9 +24,9 @@ exports["test Semantic action basic return"] = function() {
2224

2325
assert.equal(parser.parse('x'), 0, "semantic action");
2426
assert.equal(parser.parse('y'), 1, "semantic action");
25-
};
27+
});
2628

27-
exports["test return null"] = function() {
29+
it("test return null", function() {
2830
var lexData = {
2931
rules: [
3032
["x", "return 'x';"]
@@ -41,9 +43,9 @@ exports["test return null"] = function() {
4143
parser.lexer = new RegExpLexer(lexData);
4244

4345
assert.equal(parser.parse('x'), null, "semantic action");
44-
};
46+
});
4547

46-
exports["test terminal semantic values are not null"] = function() {
48+
it("test terminal semantic values are not null", function() {
4749
var lexData = {
4850
rules: [
4951
["x", "return 'x';"],
@@ -63,9 +65,9 @@ exports["test terminal semantic values are not null"] = function() {
6365

6466
assert.deepEqual(parser.parse('x'), [true], "semantic action");
6567
assert.deepEqual(parser.parse('y'), ['y'], "semantic action");
66-
};
68+
});
6769

68-
exports["test Semantic action stack lookup"] = function() {
70+
it("test Semantic action stack lookup", function() {
6971
var lexData = {
7072
rules: [
7173
["x", "return 'x';"],
@@ -86,9 +88,9 @@ exports["test Semantic action stack lookup"] = function() {
8688

8789
assert.equal(parser.parse('x'), "EX", "return first token");
8890
assert.equal(parser.parse('yx'), "BYEX", "return first after reduction");
89-
};
91+
});
9092

91-
exports["test Semantic actions on nullable grammar"] = function() {
93+
it("test Semantic actions on nullable grammar", function() {
9294
var lexData = {
9395
rules: [
9496
["x", "return 'x';"]
@@ -106,9 +108,9 @@ exports["test Semantic actions on nullable grammar"] = function() {
106108
parser.lexer = new RegExpLexer(lexData);
107109

108110
assert.equal(parser.parse('xx'), "->xx", "return first after reduction");
109-
};
111+
});
110112

111-
exports["test named semantic value"] = function() {
113+
it("test named semantic value", function() {
112114
var lexData = {
113115
rules: [
114116
["x", "return 'x';"]
@@ -126,9 +128,9 @@ exports["test named semantic value"] = function() {
126128
parser.lexer = new RegExpLexer(lexData);
127129

128130
assert.equal(parser.parse('xx'), "->xx", "return first after reduction");
129-
};
131+
});
130132

131-
exports["test ambiguous named semantic value"] = function() {
133+
it("test ambiguous named semantic value", function() {
132134
var lexData = {
133135
rules: [
134136
["x", "return 'x';"],
@@ -148,9 +150,9 @@ exports["test ambiguous named semantic value"] = function() {
148150
parser.lexer = new RegExpLexer(lexData);
149151

150152
assert.equal(parser.parse('xyx'), "xyx", "return first after reduction");
151-
};
153+
});
152154

153-
exports["test vars that look like named semantic values shouldn't be replaced"] = function() {
155+
it("test vars that look like named semantic values shouldn't be replaced", function() {
154156
var lexData = {
155157
rules: [
156158
["x", "return 'x';"]
@@ -168,9 +170,9 @@ exports["test vars that look like named semantic values shouldn't be replaced"]
168170
parser.lexer = new RegExpLexer(lexData);
169171

170172
assert.equal(parser.parse('xx'), "->xx", "return first after reduction");
171-
};
173+
});
172174

173-
exports["test previous semantic value lookup ($0)"] = function() {
175+
it("test previous semantic value lookup ($0)", function() {
174176
var lexData = {
175177
rules: [
176178
["x", "return 'x';"],
@@ -189,10 +191,10 @@ exports["test previous semantic value lookup ($0)"] = function() {
189191
parser.lexer = new RegExpLexer(lexData);
190192

191193
assert.equal(parser.parse('xxy'), "xxxx", "return first after reduction");
192-
};
194+
});
193195

194196

195-
exports["test negative semantic value lookup ($-1)"] = function() {
197+
it("test negative semantic value lookup ($-1)", function() {
196198
var lexData = {
197199
rules: [
198200
["x", "return 'x';"],
@@ -213,9 +215,9 @@ exports["test negative semantic value lookup ($-1)"] = function() {
213215
parser.lexer = new RegExpLexer(lexData);
214216

215217
assert.equal(parser.parse('zxy'), "zxz", "return first after reduction");
216-
};
218+
});
217219

218-
exports["test Build AST"] = function() {
220+
it("test Build AST", function() {
219221
var lexData = {
220222
rules: [
221223
["x", "return 'x';"]
@@ -239,9 +241,9 @@ exports["test Build AST"] = function() {
239241

240242
var r = parser.parse("xxx");
241243
assert.deepEqual(r, expectedAST);
242-
};
244+
});
243245

244-
exports["test 0+0 grammar"] = function() {
246+
it("test 0+0 grammar", function() {
245247
var lexData2 = {
246248
rules: [
247249
["0", "return 'ZERO';"],
@@ -264,9 +266,9 @@ exports["test 0+0 grammar"] = function() {
264266
var expectedAST = ["+", ["+", [0], [0]], [0]];
265267

266268
assert.deepEqual(parser.parse("0+0+0"), expectedAST);
267-
};
269+
});
268270

269-
exports["test implicit $$ = $1 action"] = function() {
271+
it("test implicit $$ = $1 action", function() {
270272
var lexData2 = {
271273
rules: [
272274
["0", "return 'ZERO';"],
@@ -289,9 +291,9 @@ exports["test implicit $$ = $1 action"] = function() {
289291
var expectedAST = ["+", ["+", [0], [0]], [0]];
290292

291293
assert.deepEqual(parser.parse("0+0+0"), expectedAST);
292-
};
294+
});
293295

294-
exports["test yytext"] = function() {
296+
it("test yytext", function() {
295297
var lexData = {
296298
rules: [
297299
["x", "return 'x';"]
@@ -308,9 +310,9 @@ exports["test yytext"] = function() {
308310
parser.lexer = new RegExpLexer(lexData);
309311

310312
assert.equal(parser.parse('x'), "x", "return first token");
311-
};
313+
});
312314

313-
exports["test yyleng"] = function() {
315+
it("test yyleng", function() {
314316
var lexData = {
315317
rules: [
316318
["x", "return 'x';"]
@@ -327,9 +329,9 @@ exports["test yyleng"] = function() {
327329
parser.lexer = new RegExpLexer(lexData);
328330

329331
assert.equal(parser.parse('x'), 1, "return first token");
330-
};
332+
});
331333

332-
exports["test yytext more"] = function() {
334+
it("test yytext more", function() {
333335
var lexData = {
334336
rules: [
335337
["x", "return 'x';"],
@@ -348,9 +350,9 @@ exports["test yytext more"] = function() {
348350
parser.lexer = new RegExpLexer(lexData);
349351

350352
assert.equal(parser.parse('xy'), "xy", "return first token");
351-
};
353+
});
352354

353-
exports["test action include"] = function() {
355+
it("test action include", function() {
354356
var lexData = {
355357
rules: [
356358
["y", "return 'y';"]
@@ -372,9 +374,9 @@ exports["test action include"] = function() {
372374
parser.lexer = new RegExpLexer(lexData);
373375

374376
assert.equal(parser.parse('y'), 1, "semantic action");
375-
};
377+
});
376378

377-
exports["test next token not shifted if only one action"] = function () {
379+
it("test next token not shifted if only one action", function () {
378380
var lexData = {
379381
rules: [
380382
["\\(", "return '(';"],
@@ -393,9 +395,9 @@ exports["test next token not shifted if only one action"] = function () {
393395
var parser = new Jison.Parser(grammar);
394396
parser.lexer = new RegExpLexer(lexData);
395397
assert.ok(parser.parse('(y)y'), "should parse correctly");
396-
};
398+
});
397399

398-
exports["test token array LIFO"] = function() {
400+
it("test token array LIFO", function() {
399401
var lexData = {
400402
rules: [
401403
["a", "return ['b','a'];"],
@@ -415,9 +417,9 @@ exports["test token array LIFO"] = function() {
415417
var parser = new Jison.Parser(grammar);
416418
parser.lexer = new RegExpLexer(lexData);
417419
assert.equal(parser.parse('ac'), "abc", "should return second token");
418-
};
420+
});
419421

420-
exports["test YYACCEPT"] = function() {
422+
it("test YYACCEPT", function() {
421423
var lexData = {
422424
rules: [
423425
["x", "return 'x';"],
@@ -438,9 +440,9 @@ exports["test YYACCEPT"] = function() {
438440

439441
assert.equal(parser.parse('x'), "EX", "return first token");
440442
assert.equal(parser.parse('yx'), true, "return first after reduction");
441-
};
443+
});
442444

443-
exports["test YYABORT"] = function() {
445+
it("test YYABORT", function() {
444446
var lexData = {
445447
rules: [
446448
["x", "return 'x';"],
@@ -461,9 +463,9 @@ exports["test YYABORT"] = function() {
461463

462464
assert.equal(parser.parse('x'), "EX", "return first token");
463465
assert.equal(parser.parse('yx'), false, "return first after reduction");
464-
};
466+
});
465467

466-
exports["test parse params"] = function() {
468+
it("test parse params", function() {
467469
var lexData = {
468470
rules: [
469471
["y", "return 'y';"]
@@ -481,9 +483,9 @@ exports["test parse params"] = function() {
481483
parser.lexer = new RegExpLexer(lexData);
482484

483485
assert.equal(parser.parse('y', "foo", "bar"), "foobar", "semantic action");
484-
};
486+
});
485487

486-
exports["test symbol aliases"] = function() {
488+
it("test symbol aliases", function() {
487489
var lexData = {
488490
rules: [
489491
["a", "return 'a';"],
@@ -503,9 +505,9 @@ exports["test symbol aliases"] = function() {
503505
var parser = new Jison.Parser(grammar);
504506
parser.lexer = new RegExpLexer(lexData);
505507
assert.equal(parser.parse('abc'), "abc", "should return original string");
506-
};
508+
});
507509

508-
exports["test symbol aliases in ebnf"] = function() {
510+
it("test symbol aliases in ebnf", function() {
509511
var lexData = {
510512
rules: [
511513
["a", "return 'a';"],
@@ -525,9 +527,9 @@ exports["test symbol aliases in ebnf"] = function() {
525527
var parser = new Jison.Parser(grammar);
526528
parser.lexer = new RegExpLexer(lexData);
527529
assert.equal(parser.parse('abc'), "a[b,c]", "should tolerate aliases in subexpression");
528-
};
530+
});
529531

530-
exports["test symbol aliases for terminals"] = function() {
532+
it("test symbol aliases for terminals", function() {
531533
var lexData = {
532534
rules: [
533535
["a", "return 'a';"],
@@ -544,4 +546,5 @@ exports["test symbol aliases for terminals"] = function() {
544546
var parser = new Jison.Parser(grammar);
545547
parser.lexer = new RegExpLexer(lexData);
546548
assert.equal(parser.parse('abc'), "abc", "should return original string");
547-
};
549+
});
550+
});

0 commit comments

Comments
 (0)