1 module source.dlexer;
2 
3 import source.context;
4 import source.location;
5 
6 enum TokenType {
7 	Invalid = 0,
8 	
9 	Begin,
10 	End,
11 	
12 	// Comments
13 	Comment,
14 	
15 	// Literals
16 	StringLiteral,
17 	CharacterLiteral,
18 	IntegerLiteral,
19 	FloatLiteral,
20 	
21 	// Identifier
22 	Identifier,
23 	
24 	// Keywords
25 	Abstract, Alias, Align, Asm, Assert, Auto,
26 	Body, Bool, Break, Byte,
27 	Case, Cast, Catch, Cdouble, Cent, Cfloat, Char,
28 	Class, Const, Continue, Creal,
29 	Dchar, Debug, Default, Delegate, Delete,
30 	Deprecated, Do, Double,
31 	Else, Enum, Export, Extern,
32 	False, Final, Finally, Float, For, Foreach,
33 	ForeachReverse, Function,
34 	Goto,
35 	Idouble, If, Ifloat, Immutable, Import, In,
36 	Inout, Int, Interface, Invariant, Ireal, Is,
37 	Lazy, Long,
38 	Macro, Mixin, Module,
39 	New, Nothrow, Null,
40 	Out, Override,
41 	Package, Pragma, Private, Protected, Public, Pure,
42 	Real, Ref, Return,
43 	Scope, Shared, Short, Static, Struct, Super,
44 	Switch, Synchronized,
45 	Template, This, Throw, True, Try, Typedef,
46 	Typeid, Typeof,
47 	Ubyte, Ucent, Uint, Ulong, Union, Unittest, Ushort,
48 	Version, Void, Volatile,
49 	Wchar, While, With,
50 	__File__, __Line__, __Gshared, __Traits, __Vector, __Parameters,
51 	
52 	// Operators.
53 	Slash,              // /
54 	SlashEqual,         // /=
55 	Dot,                // .
56 	DotDot,             // ..
57 	DotDotDot,          // ...
58 	Ampersand,          // &
59 	AmpersandEqual,     // &=
60 	AmpersandAmpersand, // &&
61 	Pipe,               // |
62 	PipeEqual,          // |=
63 	PipePipe,           // ||
64 	Minus,              // -
65 	MinusEqual,         // -=
66 	MinusMinus,         // --
67 	Plus,               // +
68 	PlusEqual,          // +=
69 	PlusPlus,           // ++
70 	Less,               // <
71 	LessEqual,          // <=
72 	LessLess,           // <<
73 	LessLessEqual,      // <<=
74 	LessMore,           // <>
75 	LessMoreEqual,      // <>=
76 	More,               // >
77 	MoreEqual,          // >=
78 	MoreMoreEqual,      // >>=
79 	MoreMoreMoreEqual,  // >>>=
80 	MoreMore,           // >>
81 	MoreMoreMore,       // >>>
82 	Bang,               // !
83 	BangEqual,          // !=
84 	BangLessMore,       // !<>
85 	BangLessMoreEqual,  // !<>=
86 	BangLess,           // !<
87 	BangLessEqual,      // !<=
88 	BangMore,           // !>
89 	BangMoreEqual,      // !>=
90 	OpenParen,          // (
91 	CloseParen,         // )
92 	OpenBracket,        // [
93 	CloseBracket,       // ]
94 	OpenBrace,          // {
95 	CloseBrace,         // }
96 	QuestionMark,       // ?
97 	Comma,              // ,
98 	Semicolon,          // ;
99 	Colon,              // :
100 	Dollar,             // $
101 	Equal,              // =
102 	EqualEqual,         // ==
103 	Star,               // *
104 	StarEqual,          // *=
105 	Percent,            // %
106 	PercentEqual,       // %=
107 	Caret,              // ^
108 	CaretEqual,         // ^=
109 	CaretCaret,         // ^^
110 	CaretCaretEqual,    // ^^=
111 	Tilde,              // ~
112 	TildeEqual,         // ~=
113 	At,                 // @
114 	EqualMore,          // =>
115 	Hash,               // #
116 }
117 
118 struct Token {
119 	import source.location;
120 	Location location;
121 	
122 	TokenType type;
123 	
124 	import source.name;
125 	Name name;
126 	
127 	import source.context;
128 	string toString(Context context) {
129 		return (type >= TokenType.Identifier)
130 			? name.toString(context)
131 			: location.getFullLocation(context).getSlice();
132 	}
133 }
134 
135 auto lex(Position base, Context context) {
136 	auto lexer = TokenRange();
137 	
138 	lexer.content = base.getFullPosition(context).getSource().getContent();
139 	lexer.t.type = TokenType.Begin;
140 	
141 	lexer.context = context;
142 	lexer.base = base;
143 	lexer.previous = base;
144 	
145 	// Pop #!
146 	lexer.t.name = lexer.popSheBang();
147 	
148 	lexer.t.location =  Location(base, base.getWithOffset(lexer.index));
149 	return lexer;
150 }
151 
152 alias TokenRange = DLexer;
153 
154 struct DLexer {
155 	enum BaseMap = () {
156 		auto ret = [
157 			// Comments
158 			"//" : "?tokenizeComments:lexComment|popComment",
159 			"/*" : "?tokenizeComments:lexComment|popComment",
160 			"/+" : "?tokenizeComments:lexComment|popComment",
161 			
162 			// Integer literals.
163 			"0b" : "lexNumeric",
164 			"0B" : "lexNumeric",
165 			"0x" : "lexNumeric",
166 			"0X" : "lexNumeric",
167 			
168 			// String literals.
169 			"`"   : "lexString",
170 			`"`   : "lexString",
171 			"q{"  : "lexDString",
172 			`q"`  : "lexDString",
173 			`q"(` : "lexDString",
174 			`q"[` : "lexDString",
175 			`q"{` : "lexDString",
176 			`q"<` : "lexDString",
177 			`r"`  : "lexDString",
178 			
179 			// Character literals.
180 			"'" : "lexCharacter",
181 		];
182 		
183 		foreach (i; 0 .. 10) {
184 			import std.conv;
185 			ret[to!string(i)] = "lexNumeric";
186 		}
187 		
188 		return ret;
189 	}();
190 	
191 	import source.lexbase;
192 	mixin LexBaseImpl!(Token, BaseMap, getKeywordsMap(), getOperatorsMap());
193 	
194 	import source.name;
195 	Name popSheBang() {
196 		auto c = frontChar;
197 		if (c != '#') {
198 			return BuiltinName!"";
199 		}
200 		
201 		while (c != '\n') {
202 			popChar();
203 			c = frontChar;
204 		}
205 		
206 		return context.getName(content[0 .. index]);
207 	}
208 	
209 	import source.lexnumeric;
210 	mixin LexNumericImpl!(Token, [
211 		"" : TokenType.IntegerLiteral,
212 		"u": TokenType.IntegerLiteral,
213 		"U": TokenType.IntegerLiteral,
214 		"ul": TokenType.IntegerLiteral,
215 		"uL": TokenType.IntegerLiteral,
216 		"Ul": TokenType.IntegerLiteral,
217 		"UL": TokenType.IntegerLiteral,
218 		"l": TokenType.IntegerLiteral,
219 		"L": TokenType.IntegerLiteral,
220 		"lu": TokenType.IntegerLiteral,
221 		"lU": TokenType.IntegerLiteral,
222 		"Lu": TokenType.IntegerLiteral,
223 		"LU": TokenType.IntegerLiteral,
224 		"f": TokenType.FloatLiteral,
225 		"F": TokenType.FloatLiteral,
226 	], [
227 		"" : TokenType.FloatLiteral,
228 		"f": TokenType.FloatLiteral,
229 		"F": TokenType.FloatLiteral,
230 		"L": TokenType.FloatLiteral,
231 	], null, [
232 		"l": "lexFloatSuffixError",
233 	]);
234 	
235 	auto lexFloatSuffixError(string s : "l")(uint begin, uint prefixStart) {
236 		return getError(begin, "Use 'L' suffix instead of 'l'.");
237 	}
238 	
239 	import source.lexstring;
240 	mixin LexStringImpl!(Token, [
241 		"" : TokenType.StringLiteral,
242 		"c" : TokenType.StringLiteral,
243 		"w" : TokenType.StringLiteral,
244 		"d" : TokenType.StringLiteral,
245 	]);
246 	
247 	Token lexDString(string s : `r"`)() {
248 		uint l = s.length;
249 		return lexRawString!'"'(index - l);
250 	}
251 	
252 	Token lexDString(string s : "q{")() {
253 		uint begin = index - 2;
254 		uint start = index;
255 		
256 		auto lookahead = getLookahead();
257 		
258 		uint level = 1;
259 		while (level > 0) {
260 			lookahead.popFront();
261 			auto lt = lookahead.front;
262 			
263 			switch (lt.type) with (TokenType) {
264 				case Invalid:
265 					// Bubble up errors.
266 					index = lookahead.index;
267 					return lt;
268 				
269 				case End:
270 					index = lookahead.index - 1;
271 					return getError(begin, "Unexpected end of file.");
272 				
273 				case OpenBrace:
274 					level++;
275 					break;
276 				
277 				case CloseBrace:
278 					level--;
279 					break;
280 				
281 				default:
282 					break;
283 			}
284 		}
285 		
286 		index = lookahead.index;
287 		return buildRawString(begin, start, index - 1);
288 	}
289 
290 	Token lexQDelimintedString(char delimiter) in {
291 		assert(delimiter != '"');
292 	} do {
293 		uint begin = index - 3;
294 		uint start = index;
295 
296 		// This is not technically correct, but the actual value of
297 		// previous doesn't matter when the delimiter isn't '"'.
298 		char previous = frontChar;
299 		char c = previous;
300 
301 		while (c != '\0' && (c != '"' || previous != delimiter)) {
302 			popChar();
303 			previous = c;
304 			c = frontChar;
305 		}
306 
307 		if (c == '\0') {
308 			return getError(begin, "Unexpected end of file.");
309 		}
310 
311 		popChar();
312 		return buildRawString(begin, start, index - 2);
313 	}
314 
315 	Token lexDString(string s : `q"(`)() {
316 		return lexQDelimintedString(')');
317 	}
318 
319 	Token lexDString(string s : `q"[`)() {
320 		return lexQDelimintedString(']');
321 	}
322 
323 	Token lexDString(string s : `q"{`)() {
324 		return lexQDelimintedString('}');
325 	}
326 
327 	Token lexDString(string s : `q"<`)() {
328 		return lexQDelimintedString('>');
329 	}
330 
331 	Token lexDString(string s : `q"`)() {
332 		uint idstart = index;
333 		uint begin = index - 2;
334 
335 		Token t = lexIdentifier();
336 		if (t.type == TokenType.Invalid) {
337 			// If this is an error, pass it on!
338 			return t;
339 		}
340 
341 		auto id = content[idstart .. index];
342 
343 		if (frontChar == '\r') {
344 			// Be nice to the Windows minions out there.
345 			popChar();
346 		}
347 
348 		if (frontChar != '\n') {
349 			return getError(begin, "Identifier must be followed by a new line.");
350 		}
351 
352 		popChar();
353 
354 		uint start = index;
355 		char c = frontChar;
356 
357 		// Skip the inital chars where a match is not possible.
358 		for (size_t i = 0; c != '\0' && i < id.length; i++) {
359 			popChar();
360 			c = frontChar;
361 		}
362 
363 		while (true) {
364 			while (c != '\0' && c != '"')  {
365 				popChar();
366 				c = frontChar;
367 			}
368 
369 			if (c == '\0') {
370 				return getError(begin, "Unexpected end of file.");
371 			}
372 
373 			scope(success) {
374 				popChar();
375 			}
376 
377 			if (content[index - id.length - 1] != '\n') {
378 				continue;
379 			}
380 
381 			for (size_t i = 0; c != '\0' && i < id.length; i++) {
382 				if (content[index - id.length + i] != id[i]) {
383 					continue;
384 				}
385 			}
386 
387 			// We found our guy.
388 			break;
389 		}
390 
391 		return buildRawString(begin, start, index - id.length - 1);
392 	}
393 }
394 
395 auto getOperatorsMap() {
396 	//with(TokenType): currently isn't working https://issues.dlang.org/show_bug.cgi?id=14332
397 	with(TokenType)
398 	return [
399 		"/"    : Slash,
400 		"/="   : SlashEqual,
401 		"."    : Dot,
402 		".."   : DotDot,
403 		"..."  : DotDotDot,
404 		"&"    : Ampersand,
405 		"&="   : AmpersandEqual,
406 		"&&"   : AmpersandAmpersand,
407 		"|"    : Pipe,
408 		"|="   : PipeEqual,
409 		"||"   : PipePipe,
410 		"-"    : Minus,
411 		"-="   : MinusEqual,
412 		"--"   : MinusMinus,
413 		"+"    : Plus,
414 		"+="   : PlusEqual,
415 		"++"   : PlusPlus,
416 		"<"    : Less,
417 		"<="   : LessEqual,
418 		"<<"   : LessLess,
419 		"<<="  : LessLessEqual,
420 		"<>"   : LessMore,
421 		"<>="  : LessMoreEqual,
422 		">"    : More,
423 		">="   : MoreEqual,
424 		">>="  : MoreMoreEqual,
425 		">>>=" : MoreMoreMoreEqual,
426 		">>"   : MoreMore,
427 		">>>"  : MoreMoreMore,
428 		"!"    : Bang,
429 		"!="   : BangEqual,
430 		"!<>"  : BangLessMore,
431 		"!<>=" : BangLessMoreEqual,
432 		"!<"   : BangLess,
433 		"!<="  : BangLessEqual,
434 		"!>"   : BangMore,
435 		"!>="  : BangMoreEqual,
436 		"("    : OpenParen,
437 		")"    : CloseParen,
438 		"["    : OpenBracket,
439 		"]"    : CloseBracket,
440 		"{"    : OpenBrace,
441 		"}"    : CloseBrace,
442 		"?"    : QuestionMark,
443 		","    : Comma,
444 		";"    : Semicolon,
445 		":"    : Colon,
446 		"$"    : Dollar,
447 		"="    : Equal,
448 		"=="   : EqualEqual,
449 		"*"    : Star,
450 		"*="   : StarEqual,
451 		"%"    : Percent,
452 		"%="   : PercentEqual,
453 		"^"    : Caret,
454 		"^="   : CaretEqual,
455 		"^^"   : CaretCaret,
456 		"^^="  : CaretCaretEqual,
457 		"~"    : Tilde,
458 		"~="   : TildeEqual,
459 		"@"    : At,
460 		"=>"   : EqualMore,
461 		"#"    : Hash,
462 		"\0"   : End,
463 	];
464 }
465 
466 auto getKeywordsMap() {
467 	//with(TokenType): currently isn't working https://issues.dlang.org/show_bug.cgi?id=14332
468 	with(TokenType)
469 	return [
470 		"abstract"        : Abstract,
471 		"alias"           : Alias,
472 		"align"           : Align,
473 		"asm"             : Asm,
474 		"assert"          : Assert,
475 		"auto"            : Auto,
476 		"body"            : Body,
477 		"bool"            : Bool,
478 		"break"           : Break,
479 		"byte"            : Byte,
480 		"case"            : Case,
481 		"cast"            : Cast,
482 		"catch"           : Catch,
483 		"cent"            : Cent,
484 		"char"            : Char,
485 		"class"           : Class,
486 		"const"           : Const,
487 		"continue"        : Continue,
488 		"dchar"           : Dchar,
489 		"debug"           : Debug,
490 		"default"         : Default,
491 		"delegate"        : Delegate,
492 		"deprecated"      : Deprecated,
493 		"do"              : Do,
494 		"double"          : Double,
495 		"else"            : Else,
496 		"enum"            : Enum,
497 		"export"          : Export,
498 		"extern"          : Extern,
499 		"false"           : False,
500 		"final"           : Final,
501 		"finally"         : Finally,
502 		"float"           : Float,
503 		"for"             : For,
504 		"foreach"         : Foreach,
505 		"foreach_reverse" : ForeachReverse,
506 		"function"        : Function,
507 		"goto"            : Goto,
508 		"if"              : If,
509 		"immutable"       : Immutable,
510 		"import"          : Import,
511 		"in"              : In,
512 		"inout"           : Inout,
513 		"int"             : Int,
514 		"interface"       : Interface,
515 		"invariant"       : Invariant,
516 		"is"              : Is,
517 		"lazy"            : Lazy,
518 		"long"            : Long,
519 		"macro"           : Macro,
520 		"mixin"           : Mixin,
521 		"module"          : Module,
522 		"new"             : New,
523 		"nothrow"         : Nothrow,
524 		"null"            : Null,
525 		"out"             : Out,
526 		"override"        : Override,
527 		"package"         : Package,
528 		"pragma"          : Pragma,
529 		"private"         : Private,
530 		"protected"       : Protected,
531 		"public"          : Public,
532 		"pure"            : Pure,
533 		"real"            : Real,
534 		"ref"             : Ref,
535 		"return"          : Return,
536 		"scope"           : Scope,
537 		"shared"          : Shared,
538 		"short"           : Short,
539 		"static"          : Static,
540 		"struct"          : Struct,
541 		"super"           : Super,
542 		"switch"          : Switch,
543 		"synchronized"    : Synchronized,
544 		"template"        : Template,
545 		"this"            : This,
546 		"throw"           : Throw,
547 		"true"            : True,
548 		"try"             : Try,
549 		"typeid"          : Typeid,
550 		"typeof"          : Typeof,
551 		"ubyte"           : Ubyte,
552 		"ucent"           : Ucent,
553 		"uint"            : Uint,
554 		"ulong"           : Ulong,
555 		"union"           : Union,
556 		"unittest"        : Unittest,
557 		"ushort"          : Ushort,
558 		"version"         : Version,
559 		"void"            : Void,
560 		"volatile"        : Volatile,
561 		"wchar"           : Wchar,
562 		"while"           : While,
563 		"with"            : With,
564 		"__FILE__"        : __File__,
565 		"__LINE__"        : __Line__,
566 		"__gshared"       : __Gshared,
567 		"__traits"        : __Traits,
568 		"__vector"        : __Vector,
569 		"__parameters"    : __Parameters,
570 	];
571 }
572 
573 unittest {
574 	auto context = new Context();
575 	
576 	auto testlexer(string s) {
577 		import source.name;
578 		auto base = context.registerMixin(Location.init, s ~ '\0');
579 		return lex(base, context);
580 	}
581 	
582 	import source.parserutil;
583 	
584 	{
585 		auto lex = testlexer("");
586 		lex.match(TokenType.Begin);
587 		assert(lex.front.type == TokenType.End);
588 	}
589 	
590 	{
591 		auto lex = testlexer("a");
592 		lex.match(TokenType.Begin);
593 		
594 		auto t = lex.front;
595 		
596 		assert(t.type == TokenType.Identifier);
597 		assert(t.name.toString(context) == "a");
598 		lex.popFront();
599 		
600 		assert(lex.front.type == TokenType.End);
601 	}
602 	
603 	{
604 		auto lex = testlexer("_");
605 		lex.match(TokenType.Begin);
606 		
607 		auto t = lex.front;
608 		
609 		assert(t.type == TokenType.Identifier);
610 		assert(t.name.toString(context) == "_");
611 		lex.popFront();
612 		
613 		assert(lex.front.type == TokenType.End);
614 	}
615 	
616 	{
617 		auto lex = testlexer("_0");
618 		lex.match(TokenType.Begin);
619 		
620 		auto t = lex.front;
621 		
622 		assert(t.type == TokenType.Identifier);
623 		assert(t.name.toString(context) == "_0");
624 		lex.popFront();
625 		
626 		assert(lex.front.type == TokenType.End);
627 	}
628 	
629 	{
630 		auto lex = testlexer("0b0");
631 		lex.match(TokenType.Begin);
632 		lex.match(TokenType.IntegerLiteral);
633 		assert(lex.front.type == TokenType.End);
634 	}
635 	
636 	{
637 		auto lex = testlexer("0b_0");
638 		lex.match(TokenType.Begin);
639 		lex.match(TokenType.IntegerLiteral);
640 		assert(lex.front.type == TokenType.End);
641 	}
642 	
643 	{
644 		auto lex = testlexer("0b_0_");
645 		lex.match(TokenType.Begin);
646 		lex.match(TokenType.IntegerLiteral);
647 		assert(lex.front.type == TokenType.End);
648 	}
649 	
650 	{
651 		auto lex = testlexer("0b_");
652 		lex.match(TokenType.Begin);
653 		lex.match(TokenType.Invalid);
654 		assert(lex.front.type == TokenType.End);
655 	}
656 	
657 	{
658 		auto lex = testlexer("0x0");
659 		lex.match(TokenType.Begin);
660 		lex.match(TokenType.IntegerLiteral);
661 		assert(lex.front.type == TokenType.End);
662 	}
663 	
664 	{
665 		auto lex = testlexer("0x_0");
666 		lex.match(TokenType.Begin);
667 		lex.match(TokenType.IntegerLiteral);
668 		assert(lex.front.type == TokenType.End);
669 	}
670 	
671 	{
672 		auto lex = testlexer("0x_0_");
673 		lex.match(TokenType.Begin);
674 		lex.match(TokenType.IntegerLiteral);
675 		assert(lex.front.type == TokenType.End);
676 	}
677 	
678 	{
679 		auto lex = testlexer("0x_");
680 		lex.match(TokenType.Begin);
681 		lex.match(TokenType.Invalid);
682 		assert(lex.front.type == TokenType.End);
683 	}
684 	
685 	{
686 		auto lex = testlexer("_0");
687 		lex.match(TokenType.Begin);
688 		
689 		auto t = lex.front;
690 		
691 		assert(t.type == TokenType.Identifier);
692 		assert(t.name.toString(context) == "_0");
693 		lex.popFront();
694 		
695 		assert(lex.front.type == TokenType.End);
696 	}
697 	
698 	{
699 		auto lex = testlexer("é");
700 		lex.match(TokenType.Begin);
701 		
702 		auto t = lex.front;
703 		
704 		assert(t.type == TokenType.Identifier);
705 		assert(t.name.toString(context) == "é");
706 		lex.popFront();
707 		
708 		assert(lex.front.type == TokenType.End);
709 	}
710 	
711 	{
712 		auto lex = testlexer("Γαῖα");
713 		lex.match(TokenType.Begin);
714 		
715 		auto t = lex.front;
716 		
717 		assert(t.type == TokenType.Identifier);
718 		assert(t.name.toString(context) == "Γαῖα");
719 		lex.popFront();
720 		
721 		assert(lex.front.type == TokenType.End);
722 	}
723 	
724 	{
725 		auto lex = testlexer("🙈🙉🙊");
726 		lex.match(TokenType.Begin);
727 		lex.match(TokenType.Invalid);
728 		lex.match(TokenType.Invalid);
729 		lex.match(TokenType.Invalid);
730 		assert(lex.front.type == TokenType.End);
731 	}
732 	
733 	{
734 		auto lex = testlexer("0");
735 		lex.match(TokenType.Begin);
736 		lex.match(TokenType.IntegerLiteral);
737 		assert(lex.front.type == TokenType.End);
738 	}
739 	
740 	{
741 		auto lex = testlexer("1");
742 		lex.match(TokenType.Begin);
743 		lex.match(TokenType.IntegerLiteral);
744 		assert(lex.front.type == TokenType.End);
745 	}
746 	
747 	{
748 		auto lex = testlexer("1.");
749 		lex.match(TokenType.Begin);
750 		lex.match(TokenType.FloatLiteral);
751 		assert(lex.front.type == TokenType.End);
752 	}
753 	
754 	{
755 		auto lex = testlexer("1.0");
756 		lex.match(TokenType.Begin);
757 		lex.match(TokenType.FloatLiteral);
758 		assert(lex.front.type == TokenType.End);
759 	}
760 	
761 	{
762 		auto lex = testlexer("1. 0");
763 		lex.match(TokenType.Begin);
764 		lex.match(TokenType.FloatLiteral);
765 		lex.match(TokenType.IntegerLiteral);
766 		assert(lex.front.type == TokenType.End);
767 	}
768 	
769 	{
770 		auto lex = testlexer("1..");
771 		lex.match(TokenType.Begin);
772 		lex.match(TokenType.IntegerLiteral);
773 		lex.match(TokenType.DotDot);
774 		assert(lex.front.type == TokenType.End);
775 	}
776 	
777 	{
778 		auto lex = testlexer("1 .");
779 		lex.match(TokenType.Begin);
780 		lex.match(TokenType.IntegerLiteral);
781 		lex.match(TokenType.Dot);
782 		assert(lex.front.type == TokenType.End);
783 	}
784 	
785 	{
786 		auto lex = testlexer("1u");
787 		lex.match(TokenType.Begin);
788 		lex.match(TokenType.IntegerLiteral);
789 		assert(lex.front.type == TokenType.End);
790 	}
791 	
792 	{
793 		auto lex = testlexer("1U");
794 		lex.match(TokenType.Begin);
795 		lex.match(TokenType.IntegerLiteral);
796 		assert(lex.front.type == TokenType.End);
797 	}
798 	
799 	{
800 		auto lex = testlexer("1l");
801 		lex.match(TokenType.Begin);
802 		lex.match(TokenType.IntegerLiteral);
803 		assert(lex.front.type == TokenType.End);
804 	}
805 	
806 	{
807 		auto lex = testlexer("1L");
808 		lex.match(TokenType.Begin);
809 		lex.match(TokenType.IntegerLiteral);
810 		assert(lex.front.type == TokenType.End);
811 	}
812 	
813 	{
814 		auto lex = testlexer("1ul");
815 		lex.match(TokenType.Begin);
816 		lex.match(TokenType.IntegerLiteral);
817 		assert(lex.front.type == TokenType.End);
818 	}
819 	
820 	{
821 		auto lex = testlexer("1uL");
822 		lex.match(TokenType.Begin);
823 		lex.match(TokenType.IntegerLiteral);
824 		assert(lex.front.type == TokenType.End);
825 	}
826 	
827 	{
828 		auto lex = testlexer("1Ul");
829 		lex.match(TokenType.Begin);
830 		lex.match(TokenType.IntegerLiteral);
831 		assert(lex.front.type == TokenType.End);
832 	}
833 	
834 	{
835 		auto lex = testlexer("1UL");
836 		lex.match(TokenType.Begin);
837 		lex.match(TokenType.IntegerLiteral);
838 		assert(lex.front.type == TokenType.End);
839 	}
840 	
841 	{
842 		auto lex = testlexer("1lu");
843 		lex.match(TokenType.Begin);
844 		lex.match(TokenType.IntegerLiteral);
845 		assert(lex.front.type == TokenType.End);
846 	}
847 	
848 	{
849 		auto lex = testlexer("1lU");
850 		lex.match(TokenType.Begin);
851 		lex.match(TokenType.IntegerLiteral);
852 		assert(lex.front.type == TokenType.End);
853 	}
854 	
855 	{
856 		auto lex = testlexer("1Lu");
857 		lex.match(TokenType.Begin);
858 		lex.match(TokenType.IntegerLiteral);
859 		assert(lex.front.type == TokenType.End);
860 	}
861 	
862 	{
863 		auto lex = testlexer("1LU");
864 		lex.match(TokenType.Begin);
865 		lex.match(TokenType.IntegerLiteral);
866 		assert(lex.front.type == TokenType.End);
867 	}
868 	
869 	{
870 		auto lex = testlexer("1f");
871 		lex.match(TokenType.Begin);
872 		lex.match(TokenType.FloatLiteral);
873 		assert(lex.front.type == TokenType.End);
874 	}
875 	
876 	{
877 		auto lex = testlexer("1F");
878 		lex.match(TokenType.Begin);
879 		lex.match(TokenType.FloatLiteral);
880 		assert(lex.front.type == TokenType.End);
881 	}
882 	
883 	{
884 		auto lex = testlexer("1.f");
885 		lex.match(TokenType.Begin);
886 		lex.match(TokenType.IntegerLiteral);
887 		lex.match(TokenType.Dot);
888 		lex.match(TokenType.Identifier);
889 		assert(lex.front.type == TokenType.End);
890 	}
891 	
892 	{
893 		auto lex = testlexer("1.1f");
894 		lex.match(TokenType.Begin);
895 		lex.match(TokenType.FloatLiteral);
896 		assert(lex.front.type == TokenType.End);
897 	}
898 	
899 	{
900 		auto lex = testlexer("1.1F");
901 		lex.match(TokenType.Begin);
902 		lex.match(TokenType.FloatLiteral);
903 		assert(lex.front.type == TokenType.End);
904 	}
905 	
906 	{
907 		auto lex = testlexer("1.1L");
908 		lex.match(TokenType.Begin);
909 		lex.match(TokenType.FloatLiteral);
910 		assert(lex.front.type == TokenType.End);
911 	}
912 	
913 	{
914 		// /!\ l is *NOT*  a valid suffix, this one is case sensitive.
915 		auto lex = testlexer("1.1l");
916 		lex.match(TokenType.Begin);
917 		lex.match(TokenType.Invalid);
918 		assert(lex.front.type == TokenType.End);
919 	}
920 	
921 	{
922 		auto lex = testlexer("1.1F");
923 		lex.match(TokenType.Begin);
924 		lex.match(TokenType.FloatLiteral);
925 		assert(lex.front.type == TokenType.End);
926 	}
927 	
928 	{
929 		auto lex = testlexer("1. f");
930 		lex.match(TokenType.Begin);
931 		lex.match(TokenType.IntegerLiteral);
932 		lex.match(TokenType.Dot);
933 		lex.match(TokenType.Identifier);
934 		assert(lex.front.type == TokenType.End);
935 	}
936 	
937 	{
938 		auto lex = testlexer("1.1 f");
939 		lex.match(TokenType.Begin);
940 		lex.match(TokenType.FloatLiteral);
941 		lex.match(TokenType.Identifier);
942 		assert(lex.front.type == TokenType.End);
943 	}
944 	
945 	{
946 		auto lex = testlexer(`q"(("))"`);
947 		lex.match(TokenType.Begin);
948 		
949 		auto t = lex.front;
950 		
951 		assert(t.type == TokenType.StringLiteral);
952 		assert(t.name.toString(context) == `(")`);
953 		lex.popFront();
954 		
955 		assert(lex.front.type == TokenType.End);
956 	}
957 
958 	{
959 		auto lex = testlexer(`q"[]"`);
960 		lex.match(TokenType.Begin);
961 		
962 		auto t = lex.front;
963 		
964 		assert(t.type == TokenType.StringLiteral);
965 		assert(t.name.toString(context) == "");
966 		lex.popFront();
967 		
968 		assert(lex.front.type == TokenType.End);
969 	}
970 
971 	{
972 		auto lex = testlexer(`q"{<}"`);
973 		lex.match(TokenType.Begin);
974 		
975 		auto t = lex.front;
976 		
977 		assert(t.type == TokenType.StringLiteral);
978 		assert(t.name.toString(context) == "<");
979 		lex.popFront();
980 		
981 		assert(lex.front.type == TokenType.End);
982 	}
983 	
984 	{
985 		auto lex = testlexer(`q"<">"`);
986 		lex.match(TokenType.Begin);
987 		
988 		auto t = lex.front;
989 		
990 		assert(t.type == TokenType.StringLiteral);
991 		assert(t.name.toString(context) == `"`);
992 		lex.popFront();
993 		
994 		assert(lex.front.type == TokenType.End);
995 	}
996 	
997 	{
998 		auto lex = testlexer("q{{foo}}");
999 		lex.match(TokenType.Begin);
1000 		
1001 		auto t = lex.front;
1002 		
1003 		assert(t.type == TokenType.StringLiteral);
1004 		assert(t.name.toString(context) == "{foo}");
1005 		lex.popFront();
1006 		
1007 		assert(lex.front.type == TokenType.End);
1008 	}
1009 	
1010 	{
1011 		auto lex = testlexer(`q"EOF
1012 EOF"`);
1013 		lex.match(TokenType.Begin);
1014 		
1015 		auto t = lex.front;
1016 		
1017 		assert(t.type == TokenType.StringLiteral);
1018 		assert(t.name.toString(context) == "");
1019 		lex.popFront();
1020 		
1021 		assert(lex.front.type == TokenType.End);
1022 	}
1023 	
1024 	{
1025 		auto lex = testlexer(`q"EOF
1026 
1027 EOF"`);
1028 		lex.match(TokenType.Begin);
1029 		
1030 		auto t = lex.front;
1031 		
1032 		assert(t.type == TokenType.StringLiteral);
1033 		assert(t.name.toString(context) == "\n");
1034 		lex.popFront();
1035 		
1036 		assert(lex.front.type == TokenType.End);
1037 	}
1038 	
1039 	{
1040 		auto lex = testlexer(`q"MONKEYS
1041 🙈🙉🙊
1042 MONKEYS"`);
1043 		lex.match(TokenType.Begin);
1044 		
1045 		auto t = lex.front;
1046 		
1047 		assert(t.type == TokenType.StringLiteral);
1048 		assert(t.name.toString(context) == "🙈🙉🙊\n");
1049 		lex.popFront();
1050 		
1051 		assert(lex.front.type == TokenType.End);
1052 	}
1053 	
1054 	{
1055 		auto lex = testlexer(`q"I_LOVE_PYTHON
1056 """python comment!"""
1057 I_LOVE_PYTHON"`);
1058 		lex.match(TokenType.Begin);
1059 		
1060 		auto t = lex.front;
1061 		
1062 		assert(t.type == TokenType.StringLiteral);
1063 		assert(t.name.toString(context) == `"""python comment!"""` ~ '\n');
1064 		lex.popFront();
1065 		
1066 		assert(lex.front.type == TokenType.End);
1067 	}
1068 	
1069 	{
1070 		auto lex = testlexer(`r"\r"`);
1071 		lex.match(TokenType.Begin);
1072 		
1073 		auto t = lex.front;
1074 		
1075 		assert(t.type == TokenType.StringLiteral);
1076 		assert(t.name.toString(context) == "\\r");
1077 		lex.popFront();
1078 		
1079 		assert(lex.front.type == TokenType.End);
1080 	}
1081 }