fluentasserts.core.results 565/609(92%) line coverage

      
10
20
30
40
50
60
70
80
90
100
110
120
130
140
150
160
170
180
190
200
210
220
230
240
250
260
270
280
290
300
310
320
330
340
350
360
370
380
390
400
410
420
430
440
450
460
470
480
490
500
510
520
530
540
550
560
570
580
590
600
610
620
634
644
654
664
674
680
690
704
714
720
734
744
754
764
770
780
790
800
811
820
830
840
850
860
870
880
890
900
910
920
930
940
950
960
970
980
990
10044
1010
1020
1030
10430
1050
1060
1070
1080
1090
1100
1110
1124
1130
1140
1150
1161
1170
1180
1190
1200
1210
1220
1230
1240
1250
1260
1270
1280
1290
1300
1316
1320
1336
1341
1350
1360
1375
1380
1390
1400
1410
1420
1430
1443
1450
1460
1470
1480
1490
1500
1510
1520
1530
1540
1550
1560
1570
1580
1590
1600
1610
1620
1630
1640
1650
1660
1670
1680
1690
1700
1710
1720
1730
1740
1750
1760
1770
1780
1790
1800
1810
1820
1830
1840
1850
1860
187842
1880
189842
1900
1910
1920
1930
194283
1950
1960
1970
1982895
1990
2000
2010
2020
2030
2040
2050
2062053
2070
2080
2090
2105034
2110
2120
2130
214121
2150
2160
2170
218121
2190
2200
2210
2220
22354
22413
2254
2260
2279
2280
2290
2300
2310
2320
2330
2340
2350
2360
2370
2380
2390
2400
2411
2422
2430
2440
2450
2460
2470
2481
2492
2500
2510
2520
2530
2540
2550
2561
2570
2580
2591
2601
2611
2620
2631
2642
2650
2660
2670
2680
2690
2701
2711
2721
2730
2742
2750
2760
2770
2780
2790
2801
2811
2821
2830
2842
2850
2860
2870
2880
2890
2901
2911
2921
2930
2941
2951
2960
2972
2980
2990
3000
3010
3020
3030
3040
3050
3060
3070
3080
309365
3100
311365
312365
3130
3140
3150
31656
31714
31814
31918
32018
32124
32224
3230
3240
3250
3260
32774
3280
3290
3300
3310
3320
3330
3340
3350
3360
3370
3380
3390
3400
3410
3420
3430
3440
3450
3460
3470
3480
3490
3500
3510
3520
3530
3541
3552
3560
3570
3580
3590
3600
3611
3620
3630
3641
3651
3661
3671
3680
3691
3702
3710
3720
3730
3740
3750
3760
3770
3780
3790
3801843
3811843
3821843
3830
3840
3850
3860
3870
3880
3890
3900
391640
39268
3930
3940
395572
3960
3970
3980
3990
4005
4011
4020
4030
4044
4054
4060
4074
4080
4094
41030
4116
4122
4132
4142
4150
4160
4176
4180
4196
4200
4210
4220
4230
4240
4250
4260
4270
4280
4290
4300
4310
4326
4330
4346
4350
436158
43765
4380
43935
4400
44164
44210
4430
4440
44535
4460
4470
44848
44910
4505
4510
4525
4530
4540
4550
4560
4570
45844
4597
4600
4610
46256
4632
4640
4650
46626
4670
4680
4690
47044
4717
4720
4730
47428
4750
4760
4770
47828
4792
4800
4810
48226
4830
4840
4850
4860
487572
4880
4890
4900
4910
4920
4930
4941
4951
4960
4971
4982
4990
5000
5010
5020
5031
5041
5050
5061
5072
5080
5090
5100
5110
5121
5131
5140
5151
5162
5170
5180
5190
5200
5211
5221
5230
5241
5250
5262
5270
5280
5290
5300
5310
5321
5330
5340
5351
5361
5371
5380
5391
5401
5410
5421
5430
5442
5450
5460
5470
5480
5490
5500
5510
5520
5530
5540
5550
5560
5572
5580
5592
5602
5610
5620
5630
564797
5650
566797
567797
5680
5690
5700
5710
572252
573252
574252
575252
5760
577492
578240
5790
5800
581264
582500
5830
5840
585252
5860
5870
5880
5890
5900
5910
5920
5930
5940
5950
5960
5970
5980
5990
6000
6010
6020
6030
6040
6050
6060
6070
6081
6092
6100
6110
6120
6130
6140
6151
6162
6170
6180
6190
6200
6210
6221
6232
6240
6250
6260
6270
6280
6290
6300
6311
6322
6330
6340
6350
6360
6370
6380
6390
6400
6410
6420
6430
6440
6450
6460
6470
648122
6490
650122
651122
6520
6530
6540
6550
65668
65768
65868
65968
6600
661100
66264
6630
6640
665104
66618
6670
6680
66968
6700
6710
6720
6730
6740
6750
6760
6770
6780
6790
6800
6810
6820
6830
6840
6850
6860
6870
688154
6890
6905507
6911786
692197
6930
6942204
6950
6960
6970
698154
6990
7000
7010
702161
703161
704161
705161
7060
707161
708161
709161
7100
7112764953
712691198
7130
714691198
71511934
71611934
7170
7180
719691198
72011796
7210
7220
723691198
7242476
7250
7260
727691198
72811864
729337
730337
7310
7320
7333259
734161
735161
7360
737161
7380
7390
7400
7410
742161
7430
7440
7450
7460
7471
7481
7490
7501
7511
7520
7532
7540
7550
7560
7570
7580
7590
7600
7610
7621
7631
7640
7651
7661
7670
7682
7690
7700
7710
7720
7730
7740
7751
7761
7770
7781
7791
7800
7812
7820
7830
7840
7850
7860
787151
788151
7890
7900
79118399
7924562
7930
7944562
795344
7960
7970
7984562
799344
8000
8010
8024715
80329
80429
8050
8060
8074797
808122
8090
8100
8110
81229
8130
8143669
815910
8160
817910
81840
8190
8200
821910
82240
8230
82440
82529
82629
8270
8280
8290
8300
83129
8320
8330
8340
8350
8361
8371
8380
8391
8401
8411
8420
8432
8440
8450
8460
8470
8480
8490
8500
8510
8520
8531
8541
8550
8561
8571
8581
8590
8602
8610
8620
8630
8640
8650
8660
8670
8680
8690
8701
8711
8720
8731
8740
8752
8760
8770
8780
8790
8800
8810
8820
8830
884419
885419
8860
887419
888419
8890
8905045
8911542
8921542
8930
8941542
895198
8960
8970
8981542
89975
9000
9010
9021542
903123
9040
9050
9061419
907237
9080
9090
9101182
91119
9120
9130
9142202
915127
9160
9170
9181036
91937
9200
9210
922999
923113
9240
9250
926886
92711
9280
9290
930967
9310
9320
9330
9340
935886
9360
9370
9380
9390
9400
9410
9420
9430
9440
9451
9461
9470
9481
9490
9501
9510
9522
9530
9540
9550
9560
9571
9581
9590
9601
9610
9621
9630
9641
9650
9662
9670
9680
9690
9700
9711
9721
9730
9741
9750
9761
9770
9781
9790
9802
9810
9820
9830
9840
9851
9861
9870
9881
9890
9901
9911
9920
9932
9940
9950
9960
99710
9980
9992272
100021
10010
10020
100310
10044
10050
10060
10076
10080
10090
10100
10110
10121
10131
10140
10151
10160
10172
10180
10190
10200
10215
10220
1023140
102445
10250
102683
10278
10280
10290
103081
103110
10322
10330
10340
10358
10360
10370
103843
103935
10400
10410
10428
10433
10440
10450
10460
10470
10480
10490
10500
10510
10520
10531
10541
10550
10561
10571
10582
10590
10600
10610
10620
10631
10641
10650
10661
10671
10682
10690
10700
10710
10720
10731
10741
10750
10761
10771
10780
10791
10800
10812
10820
10830
10840
10850
10861
10871
10880
10891
10901
10910
10921
10930
10942
10950
10960
10970
1098144
10990
11005762
1101290
11020
11030
1104144
11057
11060
11070
1108137
11090
11100
11110
11120
11131
11141
11150
11161
11170
11181
11192
11202
11212
11220
11230
11240
11250
11260
11270
11280
11290
11300
11310
11320
11330
11340
11350
11360
11370
11380
11390
1140149
1141151
1142151
11430
1144151
11450
11462
11470
11480
11490
1150149
1151149
11520
1153149
1154149
11550
1156149
11570
11580
11590
11600
11610
1162149
11639
11649
11650
11660
11670
11680
1169143
1170143
1171143
11720
1173143
1174143
11750
1176143
1177143
1178143
11790
1180143
1181136
11820
1183136
11840
11850
11867
11870
11887
11893
11903
11910
11923
11930
11940
11954
11960
11970
11980
11990
1200252
1201252
12020
1203252
12043
12050
12060
1207249
1208249
1209249
12100
121131285
12127264
12130
121424447
12151417
1216618
12170
1218267
12190
12200
12210
12227264
1223835
12240
12250
12267264
12271099
12280
12290
123014528
12310
12327264
12330
12347264
12357264
12367264
12370
123813738
1239507
12400
12410
12420
1243249
12440
12450
12460
12470
12481
12490
12500
12510
12521
12531
12540
12551
12561
12571
12580
125992
126078
12617
12620
12639
12646
12650
12661
12670
12680
12690
127019
12716
12720
12730
127419
12757
12760
12770
127838
12790
128031
128112
12827
12834
12840
12853
12860
12870
128819
128919
12900
129126
12921
12930
12940
12950
12961
12970
12980
12990
13000
13010
13020
13031
13041
13050
13062
13070
13080
13090
13100
13110
13120
13130
13140
13150
13160
13170
13180
13191
13201
13210
13222
13230
13240
13250
13260
13270
13280
13290
13300
13310
13320
13330
13340
13350
13360
13370
13380
13390
13400
13410
134250
134325
134425
13450
134625
13470
13480
13490
135050
13510
135225
135325
135425
135525
13560
135725
135825
13590
136046341
13610
13620
13630
13640
13650
13660
13670
13680
13691
13701
13710
13722
13730
13740
13750
13760
13770
13780
13790
13801
13812
13820
13830
13840
13850
13860
13871
13882
13890
13900
13910
13920
13930
13941
13952
13960
13970
13980
13990
14000
14011
14022
14030
14040
14050
14060
14070
14081
14092
14100
14110
14120
14130
14140
14151
14162
14170
14181
14192
14200
14211
14222
14230
14240
14250
14260
14270
14281
14292
14300
14311
14322
14330
14341
14352
14360
14370
14380
14390
14400
14411
14422
14430
14440
14450
14460
14470
14481
14492
14500
14510
14520
14530
14540
14551
14562
14570
14580
14590
14600
14610
14620
14630
14641
14651
14660
14671
14680
14690
14701
14710
14722
14732
14742
14750
14760
14770
14780
14790
14800
1481139023
148246316
14830
148446316
148541514
14860
14874802
14884802
14890
149047709
149111101
149211101
149311101
14940
14950
14960
14970
14980
14990
15000
15010
15020
15030
15040
15050
15060
15070
15080
15090
15100
15110
15120
15130
15140
15150
15160
15170
15180
151966
15200
15210
15220
152313
15240
152513
15260
152713
152899
152920
153020
153120
15320
15330
153413
15350
15360
15370
15380
15390
15400
15410
15426
15430
15440
15450
154614
15470
15480
15490
15506
15510
15526
15531
15540
15550
15565
15570
15580
15590
15605
15615
15620
15635
15641
15650
15660
156714
15680
15690
15700
15711
15721
15730
15741
15750
15760
15770
157811
15793
15803
15810
15820
15830
15840
15850
15860
15871
15880
15891
15901
15911
15920
15932
15940
15950
15960
15970
15980
15990
16000
16011
16021
16030
16041
16051
16061
16070
16081
16090
16102
16110
16120
16130
16140
16150
16160
16170
16180
16191
16200
16211
16221
16231
16241
16250
16262
16270
16280
16290
16300
module fluentasserts.core.results; import std.stdio; import std.file; import std.algorithm; import std.conv; import std.range; import std.string; import std.exception; import std.typecons; import dparse.lexer; import dparse.parser; @safe: /// Glyphs used to display special chars in the results struct ResultGlyphs { static { /// Glyph for the tab char string tab; /// Glyph for the \r char string carriageReturn; /// Glyph for the \n char string newline; /// Glyph for the space char string space; /// Glyph for the \0 char string nullChar; /// Glyph that indicates the error line string sourceIndicator; /// Glyph that sepparates the line number string sourceLineSeparator; /// Glyph for the diff begin indicator string diffBegin; /// Glyph for the diff end indicator string diffEnd; /// Glyph that marks an inserted text in diff string diffInsert; /// Glyph that marks deleted text in diff string diffDelete; } /// Set the default values. The values are static resetDefaults() { version(windows) { ResultGlyphs.tab = `\t`; ResultGlyphs.carriageReturn = `\r`; ResultGlyphs.newline = `\n`; ResultGlyphs.space = ` `; ResultGlyphs.nullChar = `␀`; } else { ResultGlyphs.tab = `¤`; ResultGlyphs.carriageReturn = `←`; ResultGlyphs.newline = `↲`; ResultGlyphs.space = `᛫`; ResultGlyphs.nullChar = `\0`; } ResultGlyphs.sourceIndicator = ">"; ResultGlyphs.sourceLineSeparator = ":"; ResultGlyphs.diffBegin = "["; ResultGlyphs.diffEnd = "]"; ResultGlyphs.diffInsert = "+"; ResultGlyphs.diffDelete = "-"; } } static this() { ResultGlyphs.resetDefaults; } /// interface ResultPrinter { void primary(string); void info(string); void danger(string); void success(string); void dangerReverse(string); void successReverse(string); } version(unittest) { class MockPrinter : ResultPrinter { string buffer; void primary(string val) { buffer ~= "[primary:" ~ val ~ "]"; } void info(string val) { buffer ~= "[info:" ~ val ~ "]"; } void danger(string val) { buffer ~= "[danger:" ~ val ~ "]"; } void success(string val) { buffer ~= "[success:" ~ val ~ "]"; } void dangerReverse(string val) { buffer ~= "[dangerReverse:" ~ val ~ "]"; } void successReverse(string val) { buffer ~= "[successReverse:" ~ val ~ "]"; } } } struct WhiteIntervals { size_t left; size_t right; } WhiteIntervals getWhiteIntervals(string text) { auto stripText = text.strip; if(stripText == "") { return WhiteIntervals(0, 0); } return WhiteIntervals(text.indexOf(stripText[0]), text.lastIndexOf(stripText[stripText.length - 1])); } /// This is the most simple implementation of a ResultPrinter. /// All the plain data is printed to stdout class DefaultResultPrinter : ResultPrinter { void primary(string text) { write(text); } void info(string text) { write(text); } void danger(string text) { write(text); } void success(string text) { write(text); } void dangerReverse(string text) { write(text); } void successReverse(string text) { write(text); } } interface IResult { string toString(); void print(ResultPrinter); } /// A result that prints a simple message to the user class MessageResult : IResult { private { struct Message { bool isValue; string text; } Message[] messages; } this(string message) nothrow { add(false, message); } override string toString() { return messages.map!"a.text".join.to!string; } void add(bool isValue, string message) nothrow { this.messages ~= Message(isValue, message .replace("\r", ResultGlyphs.carriageReturn) .replace("\n", ResultGlyphs.newline) .replace("\0", ResultGlyphs.nullChar) .replace("\t", ResultGlyphs.tab)); } void addValue(string text) { add(true, text); } void addText(string text) { this.messages ~= Message(false, text); } void prependText(string text) { this.messages = Message(false, text) ~ this.messages; } void prependValue(string text) { this.messages = Message(true, text) ~ this.messages; } void print(ResultPrinter printer) { foreach(message; messages) { if(message.isValue) { printer.info(message.text); } else { printer.primary(message.text); } } } } version (unittest) { import fluentasserts.core.base; } @("Message result should return the message") unittest { auto result = new MessageResult("Message"); result.toString.should.equal("Message"); } @("Message result should replace the special chars") unittest { auto result = new MessageResult("\t \r\n"); result.toString.should.equal(`¤ ←↲`); } @("Message result should replace the special chars with the custom glyphs") unittest { scope(exit) { ResultGlyphs.resetDefaults; } ResultGlyphs.tab = `\t`; ResultGlyphs.carriageReturn = `\r`; ResultGlyphs.newline = `\n`; auto result = new MessageResult("\t \r\n"); result.toString.should.equal(`\t \r\n`); } @("Message result should return values as string") unittest { auto result = new MessageResult("text"); result.addValue("value"); result.addText("text"); result.toString.should.equal(`textvaluetext`); } @("Message result should print a string as primary") unittest { auto result = new MessageResult("\t \r\n"); auto printer = new MockPrinter; result.print(printer); printer.buffer.should.equal(`[primary:¤ ←↲]`); } @("Message result should print values as info") unittest { auto result = new MessageResult("text"); result.addValue("value"); result.addText("text"); auto printer = new MockPrinter; result.print(printer); printer.buffer.should.equal(`[primary:text][info:value][primary:text]`); } class DiffResult : IResult { import ddmp.diff; protected { string expected; string actual; } this(string expected, string actual) { this.expected = expected.replace("\0", ResultGlyphs.nullChar); this.actual = actual.replace("\0", ResultGlyphs.nullChar); } private string getResult(const Diff d) { final switch(d.operation) { case Operation.DELETE: return ResultGlyphs.diffBegin ~ ResultGlyphs.diffDelete ~ d.text ~ ResultGlyphs.diffEnd; case Operation.INSERT: return ResultGlyphs.diffBegin ~ ResultGlyphs.diffInsert ~ d.text ~ ResultGlyphs.diffEnd; case Operation.EQUAL: return d.text; } } override string toString() @trusted { return "Diff:\n" ~ diff_main(expected, actual).map!(a => getResult(a)).join; } void print(ResultPrinter printer) @trusted { auto result = diff_main(expected, actual); printer.info("Diff:"); foreach(diff; result) { if(diff.operation == Operation.EQUAL) { printer.primary(diff.text); } if(diff.operation == Operation.INSERT) { printer.successReverse(diff.text); } if(diff.operation == Operation.DELETE) { printer.dangerReverse(diff.text); } } printer.primary("\n"); } } /// DiffResult should find the differences unittest { auto diff = new DiffResult("abc", "asc"); diff.toString.should.equal("Diff:\na[-b][+s]c"); } /// DiffResult should use the custom glyphs unittest { scope(exit) { ResultGlyphs.resetDefaults; } ResultGlyphs.diffBegin = "{"; ResultGlyphs.diffEnd = "}"; ResultGlyphs.diffInsert = "!"; ResultGlyphs.diffDelete = "?"; auto diff = new DiffResult("abc", "asc"); diff.toString.should.equal("Diff:\na{?b}{!s}c"); } class KeyResult(string key) : IResult { private immutable { string value; size_t indent; } this(string value, size_t indent = 10) { this.value = value.replace("\0", ResultGlyphs.nullChar); this.indent = indent; } bool hasValue() { return value != ""; } override string toString() { if(value == "") { return ""; } return rightJustify(key ~ ":", indent, ' ') ~ printableValue; } void print(ResultPrinter printer) { if(value == "") { return; } printer.info(rightJustify(key ~ ":", indent, ' ')); auto lines = value.split("\n"); auto spaces = rightJustify(":", indent, ' '); int index; foreach(line; lines) { if(index > 0) { printer.info(ResultGlyphs.newline); printer.primary("\n"); printer.info(spaces); } printLine(line, printer); index++; } } private { struct Message { bool isSpecial; string text; } void printLine(string line, ResultPrinter printer) { Message[] messages; auto whiteIntervals = line.getWhiteIntervals; foreach(size_t index, ch; line) { bool showSpaces = index < whiteIntervals.left || index >= whiteIntervals.right; auto special = isSpecial(ch, showSpaces); if(messages.length == 0 || messages[messages.length - 1].isSpecial != special) { messages ~= Message(special, ""); } messages[messages.length - 1].text ~= toVisible(ch, showSpaces); } foreach(message; messages) { if(message.isSpecial) { printer.info(message.text); } else { printer.primary(message.text); } } } bool isSpecial(T)(T ch, bool showSpaces) { if(ch == ' ' && showSpaces) { return true; } if(ch == '\r' || ch == '\t') { return true; } return false; } string toVisible(T)(T ch, bool showSpaces) { if(ch == ' ' && showSpaces) { return ResultGlyphs.space; } if(ch == '\r') { return ResultGlyphs.carriageReturn; } if(ch == '\t') { return ResultGlyphs.tab; } return ch.to!string; } pure string printableValue() { return value.split("\n").join("\\n\n" ~ rightJustify(":", indent, ' ')); } } } /// KeyResult should not dispaly spaces between words with special chars unittest { auto result = new KeyResult!"key"(" row1 row2 "); auto printer = new MockPrinter(); result.print(printer); printer.buffer.should.equal(`[info: key:][info:᛫][primary:row1 row2][info:᛫]`); } /// KeyResult should dispaly spaces with special chars on space lines unittest { auto result = new KeyResult!"key"(" "); auto printer = new MockPrinter(); result.print(printer); printer.buffer.should.equal(`[info: key:][info:᛫᛫᛫]`); } /// KeyResult should display no char for empty lines unittest { auto result = new KeyResult!"key"(""); auto printer = new MockPrinter(); result.print(printer); printer.buffer.should.equal(``); } /// KeyResult should display special characters with different contexts unittest { auto result = new KeyResult!"key"("row1\n \trow2"); auto printer = new MockPrinter(); result.print(printer); printer.buffer.should.equal(`[info: key:][primary:row1][info:↲][primary:` ~ "\n" ~ `][info: :][info:᛫¤][primary:row2]`); } /// KeyResult should display custom glyphs with different contexts unittest { scope(exit) { ResultGlyphs.resetDefaults; } ResultGlyphs.newline = `\n`; ResultGlyphs.tab = `\t`; ResultGlyphs.space = ` `; auto result = new KeyResult!"key"("row1\n \trow2"); auto printer = new MockPrinter(); result.print(printer); printer.buffer.should.equal(`[info: key:][primary:row1][info:\n][primary:` ~ "\n" ~ `][info: :][info: \t][primary:row2]`); } class ExpectedActualResult : IResult { protected { string title; KeyResult!"Expected" expected; KeyResult!"Actual" actual; } this(string title, string expected, string actual) { this.title = title; this(expected, actual); } this(string expected, string actual) { this.expected = new KeyResult!"Expected"(expected); this.actual = new KeyResult!"Actual"(actual); } override string toString() { auto line1 = expected.toString; auto line2 = actual.toString; string glue; string prefix; if(line1 != "" && line2 != "") { glue = "\n"; } if(line1 != "" || line2 != "") { prefix = title == "" ? "\n" : ("\n" ~ title ~ "\n"); } return prefix ~ line1 ~ glue ~ line2; } void print(ResultPrinter printer) { auto line1 = expected.toString; auto line2 = actual.toString; if(actual.hasValue || expected.hasValue) { printer.info(title == "" ? "\n" : ("\n" ~ title ~ "\n")); } expected.print(printer); if(actual.hasValue && expected.hasValue) { printer.primary("\n"); } actual.print(printer); } } @("ExpectedActual result should be empty when no data is provided") unittest { auto result = new ExpectedActualResult("", ""); result.toString.should.equal(""); } @("ExpectedActual result should be empty when null data is provided") unittest { auto result = new ExpectedActualResult(null, null); result.toString.should.equal(""); } @("ExpectedActual result should show one line of the expected and actual data") unittest { auto result = new ExpectedActualResult("data", "data"); result.toString.should.equal(` Expected:data Actual:data`); } @("ExpectedActual result should show one line of the expected and actual data") unittest { auto result = new ExpectedActualResult("data\ndata", "data\ndata"); result.toString.should.equal(` Expected:data\n :data Actual:data\n :data`); } /// A result that displays differences between ranges class ExtraMissingResult : IResult { protected { KeyResult!"Extra" extra; KeyResult!"Missing" missing; } this(string extra, string missing) { this.extra = new KeyResult!"Extra"(extra); this.missing = new KeyResult!"Missing"(missing); } override string toString() { auto line1 = extra.toString; auto line2 = missing.toString; string glue; string prefix; if(line1 != "" || line2 != "") { prefix = "\n"; } if(line1 != "" && line2 != "") { glue = "\n"; } return prefix ~ line1 ~ glue ~ line2; } void print(ResultPrinter printer) { if(extra.hasValue || missing.hasValue) { printer.primary("\n"); } extra.print(printer); if(extra.hasValue && missing.hasValue) { printer.primary("\n"); } missing.print(printer); } } string toString(const(Token)[] tokens) { string result; foreach(token; tokens.filter!(a => str(a.type) != "comment")) { if(str(token.type) == "whitespace" && token.text == "") { result ~= "\n"; } else { result ~= token.text == "" ? str(token.type) : token.text; } } return result; } auto getScope(const(Token)[] tokens, size_t line) nothrow { bool foundScope; bool foundAssert; size_t beginToken; size_t endToken = tokens.length; int paranthesisCount = 0; int scopeLevel; size_t[size_t] paranthesisLevels; foreach(i, token; tokens) { string type = str(token.type); if(type == "{") { paranthesisLevels[paranthesisCount] = i; paranthesisCount++; } if(type == "}") { paranthesisCount--; } if(line == token.line) { foundScope = true; } if(foundScope) { if(token.text == "should" || token.text == "Assert" || type == "assert" || type == ";") { foundAssert = true; scopeLevel = paranthesisCount; } if(type == "}" && paranthesisCount <= scopeLevel) { beginToken = paranthesisLevels[paranthesisCount]; endToken = i + 1; break; } } } return const Tuple!(size_t, "begin", size_t, "end")(beginToken, endToken); } /// Get the spec function and scope that contains a lambda unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto result = getScope(tokens, 101); auto identifierStart = getPreviousIdentifier(tokens, result.begin); tokens[identifierStart .. result.end].toString.strip.should.equal("it(\"should throw an exception if we request 2 android devices\", { ({ auto result = [ device1.idup, device2.idup ].filterBy(RunOptions(\"\", \"android\", 2)).array; }).should.throwException!DeviceException.withMessage.equal(\"You requested 2 `androdid` devices, but there is only 1 healthy.\"); }"); } /// Get the a method scope and signature unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/class.d"), tokens); auto result = getScope(tokens, 10); auto identifierStart = getPreviousIdentifier(tokens, result.begin); tokens[identifierStart .. result.end].toString.strip.should.equal("void bar() { assert(false); }"); } /// Get the a method scope without assert unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/class.d"), tokens); auto result = getScope(tokens, 14); auto identifierStart = getPreviousIdentifier(tokens, result.begin); tokens[identifierStart .. result.end].toString.strip.should.equal("void bar2() { enforce(false); }"); } size_t getFunctionEnd(const(Token)[] tokens, size_t start) { int paranthesisCount; size_t result = start; // iterate the parameters foreach(i, token; tokens[start .. $]) { string type = str(token.type); if(type == "(") { paranthesisCount++; } if(type == ")") { paranthesisCount--; } if(type == "{" && paranthesisCount == 0) { result = start + i; break; } if(type == ";" && paranthesisCount == 0) { return start + i; } } paranthesisCount = 0; // iterate the scope foreach(i, token; tokens[result .. $]) { string type = str(token.type); if(type == "{") { paranthesisCount++; } if(type == "}") { paranthesisCount--; if(paranthesisCount == 0) { result = result + i; break; } } } return result; } /// Get the end of a spec function with a lambda unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto result = getScope(tokens, 101); auto identifierStart = getPreviousIdentifier(tokens, result.begin); auto functionEnd = getFunctionEnd(tokens, identifierStart); tokens[identifierStart .. functionEnd].toString.strip.should.equal("it(\"should throw an exception if we request 2 android devices\", { ({ auto result = [ device1.idup, device2.idup ].filterBy(RunOptions(\"\", \"android\", 2)).array; }).should.throwException!DeviceException.withMessage.equal(\"You requested 2 `androdid` devices, but there is only 1 healthy.\"); })"); } /// Get the end of an unittest function with a lambda unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto result = getScope(tokens, 81); auto identifierStart = getPreviousIdentifier(tokens, result.begin); auto functionEnd = getFunctionEnd(tokens, identifierStart) + 1; tokens[identifierStart .. functionEnd].toString.strip.should.equal("unittest { ({ ({ }).should.beNull; }).should.throwException!TestException.msg; }"); } /// Get tokens from a scope that contains a lambda unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto result = getScope(tokens, 81); tokens[result.begin .. result.end].toString.strip.should.equal(`{ ({ ({ }).should.beNull; }).should.throwException!TestException.msg; }`); } size_t getPreviousIdentifier(const(Token)[] tokens, size_t startIndex) { enforce(startIndex > 0); enforce(startIndex < tokens.length); int paranthesisCount; bool foundIdentifier; foreach(i; 0..startIndex) { auto index = startIndex - i - 1; auto type = str(tokens[index].type); if(type == "(") { paranthesisCount--; } if(type == ")") { paranthesisCount++; } if(paranthesisCount < 0) { return getPreviousIdentifier(tokens, index - 1); } if(paranthesisCount != 0) { continue; } if(type == "unittest") { return index; } if(type == "{" || type == "}") { return index + 1; } if(type == ";") { return index + 1; } if(type == "=") { return index + 1; } if(type == ".") { foundIdentifier = false; } if(type == "identifier" && foundIdentifier) { foundIdentifier = true; continue; } if(foundIdentifier) { return index; } } return 0; } /// Get the the previous unittest identifier from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto scopeResult = getScope(tokens, 81); auto result = getPreviousIdentifier(tokens, scopeResult.begin); tokens[result .. scopeResult.begin].toString.strip.should.equal(`unittest`); } /// Get the the previous paranthesis identifier from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto scopeResult = getScope(tokens, 63); auto end = scopeResult.end - 11; auto result = getPreviousIdentifier(tokens, end); tokens[result .. end].toString.strip.should.equal(`(5, (11))`); } /// Get the the previous function call identifier from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto scopeResult = getScope(tokens, 75); auto end = scopeResult.end - 11; auto result = getPreviousIdentifier(tokens, end); tokens[result .. end].toString.strip.should.equal(`found(4)`); } /// Get the the previous map!"" identifier from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto scopeResult = getScope(tokens, 85); auto end = scopeResult.end - 12; auto result = getPreviousIdentifier(tokens, end); tokens[result .. end].toString.strip.should.equal(`[1, 2, 3].map!"a"`); } size_t getAssertIndex(const(Token)[] tokens, size_t startLine) { auto assertTokens = tokens .enumerate .filter!(a => a[1].text == "Assert") .filter!(a => a[1].line <= startLine) .array; if(assertTokens.length == 0) { return 0; } return assertTokens[assertTokens.length - 1].index; } /// Get the index of the Assert structure identifier from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto result = getAssertIndex(tokens, 55); tokens[result .. result + 4].toString.strip.should.equal(`Assert.equal(`); } auto getParameter(const(Token)[] tokens, size_t startToken) { size_t paranthesisCount; foreach(i; startToken..tokens.length) { string type = str(tokens[i].type); if(type == "(" || type == "[") { paranthesisCount++; } if(type == ")" || type == "]") { if(paranthesisCount == 0) { return i; } paranthesisCount--; } if(paranthesisCount > 0) { continue; } if(type == ",") { return i; } } return 0; } /// Get the first parameter from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto begin = getAssertIndex(tokens, 57) + 4; auto end = getParameter(tokens, begin); tokens[begin .. end].toString.strip.should.equal(`(5, (11))`); } /// Get the first list parameter from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto begin = getAssertIndex(tokens, 89) + 4; auto end = getParameter(tokens, begin); tokens[begin .. end].toString.strip.should.equal(`[ new Value(1), new Value(2) ]`); } /// Get the previous array identifier from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto scopeResult = getScope(tokens, 4); auto end = scopeResult.end - 13; auto result = getPreviousIdentifier(tokens, end); tokens[result .. end].toString.strip.should.equal(`[1, 2, 3]`); } /// Get the previous array of instances identifier from a list of tokens unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto scopeResult = getScope(tokens, 90); auto end = scopeResult.end - 16; auto result = getPreviousIdentifier(tokens, end); tokens[result .. end].toString.strip.should.equal(`[ new Value(1), new Value(2) ]`); } size_t getShouldIndex(const(Token)[] tokens, size_t startLine) { auto shouldTokens = tokens .enumerate .filter!(a => a[1].text == "should") .filter!(a => a[1].line <= startLine) .array; if(shouldTokens.length == 0) { return 0; } return shouldTokens[shouldTokens.length - 1].index; } /// Get the index of the should call unittest { const(Token)[] tokens = []; splitMultilinetokens(fileToDTokens("test/values.d"), tokens); auto result = getShouldIndex(tokens, 4); auto token = tokens[result]; token.line.should.equal(3); token.text.should.equal(`should`); str(token.type).text.should.equal(`identifier`); } /// An alternative to SourceResult that uses // DParse to get the source code class SourceResult : IResult { static private { const(Token)[][string] fileTokens; } private const { string file; size_t line; Token[] tokens; } this(string fileName = __FILE__, size_t line = __LINE__, size_t range = 6) nothrow @trusted { this.file = fileName; this.line = line; if (!fileName.exists) { return; } try { updateFileTokens(fileName); auto result = getScope(fileTokens[fileName], line); auto begin = getPreviousIdentifier(fileTokens[fileName], result.begin); auto end = getFunctionEnd(fileTokens[fileName], begin) + 1; this.tokens = fileTokens[fileName][begin .. end]; } catch (Throwable t) { } } static void updateFileTokens(string fileName) { if(fileName !in fileTokens) { fileTokens[fileName] = []; splitMultilinetokens(fileToDTokens(fileName), fileTokens[fileName]); } } string getValue() { size_t startIndex = 0; size_t possibleStartIndex = 0; size_t endIndex = 0; size_t lastStartIndex = 0; size_t lastEndIndex = 0; int paranthesisCount = 0; size_t begin; size_t end = getShouldIndex(tokens, line); if(end != 0) { begin = tokens.getPreviousIdentifier(end - 1); return tokens[begin .. end - 1].toString.strip; } auto beginAssert = getAssertIndex(tokens, line); if(beginAssert > 0) { begin = beginAssert + 4; end = getParameter(tokens, begin); return tokens[begin .. end].toString.strip; } return ""; } override string toString() nothrow { auto separator = leftJustify("", 20, '-'); string result = "\n" ~ separator ~ "\n" ~ file ~ ":" ~ line.to!string ~ "\n" ~ separator; if(tokens.length == 0) { return result ~ "\n"; } size_t line = tokens[0].line - 1; size_t column = 1; bool afterErrorLine = false; foreach(token; this.tokens.filter!(token => token != tok!"whitespace")) { string prefix = ""; foreach(lineNumber; line..token.line) { if(lineNumber < this.line -1 || afterErrorLine) { prefix ~= "\n" ~ rightJustify((lineNumber+1).to!string, 6, ' ') ~ ": "; } else { prefix ~= "\n>" ~ rightJustify((lineNumber+1).to!string, 5, ' ') ~ ": "; } } if(token.line != line) { column = 1; } if(token.column > column) { prefix ~= ' '.repeat.take(token.column - column).array; } auto stringRepresentation = token.text == "" ? str(token.type) : token.text; auto lines = stringRepresentation.split("\n"); result ~= prefix ~ lines[0]; line = token.line; column = token.column + stringRepresentation.length; if(token.line >= this.line && str(token.type) == ";") { afterErrorLine = true; } } return result; } void print(ResultPrinter printer) { if(tokens.length == 0) { return; } printer.primary("\n"); printer.info(file ~ ":" ~ line.to!string); size_t line = tokens[0].line - 1; size_t column = 1; bool afterErrorLine = false; foreach(token; this.tokens.filter!(token => token != tok!"whitespace")) { foreach(lineNumber; line..token.line) { printer.primary("\n"); if(lineNumber < this.line -1 || afterErrorLine) { printer.primary(rightJustify((lineNumber+1).to!string, 6, ' ') ~ ":"); } else { printer.dangerReverse(">" ~ rightJustify((lineNumber+1).to!string, 5, ' ') ~ ":"); } } if(token.line != line) { column = 1; } if(token.column > column) { printer.primary(' '.repeat.take(token.column - column).array); } auto stringRepresentation = token.text == "" ? str(token.type) : token.text; if(token.text == "" && str(token.type) != "whitespace") { printer.info(str(token.type)); } else if(str(token.type).indexOf("Literal") != -1) { printer.success(token.text); } else { printer.primary(token.text); } line = token.line; column = token.column + stringRepresentation.length; if(token.line >= this.line && str(token.type) == ";") { afterErrorLine = true; } } printer.primary("\n"); } } @("TestException should read the code from the file") unittest { auto result = new SourceResult("test/values.d", 26); auto msg = result.toString; msg.should.equal("\n--------------------\ntest/values.d:26\n--------------------\n" ~ " 23: unittest {\n" ~ " 24: /++/\n" ~ " 25: \n" ~ "> 26: [1, 2, 3]\n" ~ "> 27: .should\n" ~ "> 28: .contain(4);\n" ~ " 29: }"); } @("TestException should print the lines before multiline tokens") unittest { auto result = new SourceResult("test/values.d", 45); auto msg = result.toString; msg.should.equal("\n--------------------\ntest/values.d:45\n--------------------\n" ~ " 40: unittest {\n" ~ " 41: /*\n" ~ " 42: Multi line comment\n" ~ " 43: */\n" ~ " 44: \n" ~ "> 45: `multi\n" ~ "> 46: line\n" ~ "> 47: string`\n" ~ "> 48: .should\n" ~ "> 49: .contain(`multi\n" ~ "> 50: line\n" ~ "> 51: string`);\n" ~ " 52: }"); } /// Converts a file to D tokens provided by libDParse. /// All the whitespaces are ignored const(Token)[] fileToDTokens(string fileName) nothrow @trusted { try { auto f = File(fileName); immutable auto fileSize = f.size(); ubyte[] fileBytes = new ubyte[](fileSize.to!size_t); if(f.rawRead(fileBytes).length != fileSize) { return []; } StringCache cache = StringCache(StringCache.defaultBucketCount); LexerConfig config; config.stringBehavior = StringBehavior.source; config.fileName = fileName; config.commentBehavior = CommentBehavior.intern; auto lexer = DLexer(fileBytes, config, &cache); const(Token)[] tokens = lexer.array; return tokens.map!(token => const Token(token.type, token.text.idup, token.line, token.column, token.index)).array; } catch(Throwable) { return []; } } @("TestException should ignore missing files") unittest { auto result = new SourceResult("test/missing.txt", 10); auto msg = result.toString; msg.should.equal("\n" ~ `-------------------- test/missing.txt:10 --------------------` ~ "\n"); } @("Source reporter should find the tested value on scope start") unittest { auto result = new SourceResult("test/values.d", 4); result.getValue.should.equal("[1, 2, 3]"); } @("Source reporter should find the tested value after a statment") unittest { auto result = new SourceResult("test/values.d", 12); result.getValue.should.equal("[1, 2, 3]"); } @("Source reporter should find the tested value after a */ comment") unittest { auto result = new SourceResult("test/values.d", 20); result.getValue.should.equal("[1, 2, 3]"); } @("Source reporter should find the tested value after a +/ comment") unittest { auto result = new SourceResult("test/values.d", 28); result.getValue.should.equal("[1, 2, 3]"); } @("Source reporter should find the tested value after a // comment") unittest { auto result = new SourceResult("test/values.d", 36); result.getValue.should.equal("[1, 2, 3]"); } @("Source reporter should find the tested value from an assert utility") unittest { auto result = new SourceResult("test/values.d", 55); result.getValue.should.equal("5"); result = new SourceResult("test/values.d", 56); result.getValue.should.equal("(5+1)"); result = new SourceResult("test/values.d", 57); result.getValue.should.equal("(5, (11))"); } @("Source reporter should get the value from multiple should asserts") unittest { auto result = new SourceResult("test/values.d", 61); result.getValue.should.equal("5"); result = new SourceResult("test/values.d", 62); result.getValue.should.equal("(5+1)"); result = new SourceResult("test/values.d", 63); result.getValue.should.equal("(5, (11))"); } @("Source reporter should get the value after a scope") unittest { auto result = new SourceResult("test/values.d", 71); result.getValue.should.equal("found"); } @("Source reporter should get a function call value") unittest { auto result = new SourceResult("test/values.d", 75); result.getValue.should.equal("found(4)"); } @("Source reporter should parse nested lambdas") unittest { auto result = new SourceResult("test/values.d", 81); result.getValue.should.equal("({ ({ }).should.beNull; })"); } /// Source reporter should print the source code unittest { auto result = new SourceResult("test/values.d", 36); auto printer = new MockPrinter(); result.print(printer); auto lines = printer.buffer.split("[primary:\n]"); lines[1].should.equal(`[info:test/values.d:36]`); lines[2].should.equal(`[primary: 31:][info:unittest][primary: ][info:{]`); lines[7].should.equal(`[dangerReverse:> 36:][primary: ][info:.][primary:contain][info:(][success:4][info:)][info:;]`); } /// split multiline tokens in multiple single line tokens with the same type void splitMultilinetokens(const(Token)[] tokens, ref const(Token)[] result) nothrow @trusted { try { foreach(token; tokens) { auto pieces = token.text.idup.split("\n"); if(pieces.length <= 1) { result ~= const Token(token.type, token.text.dup, token.line, token.column, token.index); } else { size_t line = token.line; size_t column = token.column; foreach(textPiece; pieces) { result ~= const Token(token.type, textPiece, line, column, token.index); line++; column = 1; } } } } catch(Throwable) {} } /// A new line sepparator class SeparatorResult : IResult { override string toString() { return "\n"; } void print(ResultPrinter printer) { printer.primary("\n"); } } class ListInfoResult : IResult { private { struct Item { string singular; string plural; string[] valueList; string key() { return valueList.length > 1 ? plural : singular; } MessageResult toMessage(size_t indentation = 0) { auto printableKey = rightJustify(key ~ ":", indentation, ' '); auto result = new MessageResult(printableKey); string glue; foreach(value; valueList) { result.addText(glue); result.addValue(value); glue = ","; } return result; } } Item[] items; } void add(string key, string value) { items ~= Item(key, "", [value]); } void add(string singular, string plural, string[] valueList) { items ~= Item(singular, plural, valueList); } private size_t indentation() { auto elements = items.filter!"a.valueList.length > 0"; if(elements.empty) { return 0; } return elements.map!"a.key".map!"a.length".maxElement + 2; } override string toString() { auto indent = indentation; auto elements = items.filter!"a.valueList.length > 0"; if(elements.empty) { return ""; } return "\n" ~ elements.map!(a => a.toMessage(indent)).map!"a.toString".join("\n"); } void print(ResultPrinter printer) { auto indent = indentation; auto elements = items.filter!"a.valueList.length > 0"; if(elements.empty) { return; } foreach(item; elements) { printer.primary("\n"); item.toMessage(indent).print(printer); } } } /// convert to string the added data to ListInfoResult unittest { auto result = new ListInfoResult(); result.add("a", "1"); result.add("ab", "2"); result.add("abc", "3"); result.toString.should.equal(` a:1 ab:2 abc:3`); } /// print the added data to ListInfoResult unittest { auto printer = new MockPrinter(); auto result = new ListInfoResult(); result.add("a", "1"); result.add("ab", "2"); result.add("abc", "3"); result.print(printer); printer.buffer.should.equal(`[primary: ][primary: a:][primary:][info:1][primary: ][primary: ab:][primary:][info:2][primary: ][primary: abc:][primary:][info:3]`); } /// convert to string the added data lists to ListInfoResult unittest { auto result = new ListInfoResult(); result.add("a", "as", ["1", "2","3"]); result.add("ab", "abs", ["2", "3"]); result.add("abc", "abcs", ["3"]); result.add("abcd", "abcds", []); result.toString.should.equal(` as:1,2,3 abs:2,3 abc:3`); }