- Fully integrated new `LexerException` system into `performLex()`

COmmand-line

- Updated `commands.d` to catch any `TError` and report the error back

Parser

- Updated unittests to use `LexerException`

Compiler

- Disabled unit tests for now as they cause errors
This commit is contained in:
Tristan B. Velloza Kildaire 2023-01-22 14:59:55 +02:00
parent 5fd1bef2a4
commit d716c306a2
4 changed files with 159 additions and 112 deletions

View File

@ -9,10 +9,12 @@ module commandline.commands;
import jcli;
import std.stdio;
import compiler.compiler : beginCompilation;
import misc.exceptions : TError;
import std.exception : ErrnoException;
import compiler.lexer : Lexer, Token;
import compiler.parsing.core : Parser;
import compiler.typecheck.core : TypeChecker;
import gogga;
//TODO: Re-order the definitions below so that they appear with compile first, then lex, parse, ..., help
@ -63,16 +65,14 @@ struct lexCommand
/* Begin lexing process */
Lexer lexer = new Lexer(sourceText);
if(lexer.performLex())
{
writeln("=== Tokens ===\n");
writeln(lexer.getTokens());
}
else
{
/* TODO: Is the lexer.performLex() return value used? */
writeln("There was an error whilst performing tokenization");
}
lexer.performLex();
writeln("=== Tokens ===\n");
writeln(lexer.getTokens());
}
catch(TError t)
{
gprintln(t.msg, DebugType.ERROR);
}
catch(ErrnoException e)
{
@ -107,22 +107,20 @@ struct parseCommand
/* Begin lexing process */
Lexer lexer = new Lexer(sourceText);
if(lexer.performLex())
{
Token[] tokens = lexer.getTokens();
writeln("=== Tokens ===\n");
writeln(tokens);
lexer.performLex();
Token[] tokens = lexer.getTokens();
writeln("=== Tokens ===\n");
writeln(tokens);
// TODO: Catch exception
Parser parser = new Parser(tokens);
// TODO: Do something with the returned module
auto modulel = parser.parse();
}
else
{
/* TODO: Is the lexer.performLex() return value used? */
writeln("There was an error whilst performing tokenization");
}
// TODO: Catch exception
Parser parser = new Parser(tokens);
// TODO: Do something with the returned module
auto modulel = parser.parse();
}
catch(TError t)
{
gprintln(t.msg, DebugType.ERROR);
}
catch(ErrnoException e)
{
@ -156,27 +154,25 @@ struct typecheckCommand
/* Begin lexing process */
Lexer lexer = new Lexer(sourceText);
if(lexer.performLex())
{
Token[] tokens = lexer.getTokens();
writeln("=== Tokens ===\n");
writeln(tokens);
lexer.performLex();
Token[] tokens = lexer.getTokens();
writeln("=== Tokens ===\n");
writeln(tokens);
// TODO: Catch exception
Parser parser = new Parser(tokens);
// TODO: Do something with the returned module
auto modulel = parser.parse();
// TODO: Catch exception
Parser parser = new Parser(tokens);
// TODO: Do something with the returned module
auto modulel = parser.parse();
//TODO: collect results here
//TODO: catch exceptions
TypeChecker typeChecker = new TypeChecker(modulel);
typeChecker.beginCheck();
}
else
{
/* TODO: Is the lexer.performLex() return value used? */
writeln("There was an error whilst performing tokenization");
}
//TODO: collect results here
//TODO: catch exceptions
TypeChecker typeChecker = new TypeChecker(modulel);
typeChecker.beginCheck();
}
catch(TError t)
{
gprintln(t.msg, DebugType.ERROR);
}
catch(ErrnoException e)
{

View File

@ -97,35 +97,29 @@ public class Compiler
/* Setup the lexer and begin lexing */
this.lexer = new Lexer(inputSource);
if(lexer.performLex())
{
/* Extract the tokens */
Token[] tokens = lexer.getTokens();
gprintln("Collected "~to!(string)(tokens));
this.lexer.performLex();
/* Extract the tokens */
Token[] tokens = lexer.getTokens();
gprintln("Collected "~to!(string)(tokens));
/* Spawn a new parser with the provided tokens */
this.parser = new Parser(tokens);
/* Spawn a new parser with the provided tokens */
this.parser = new Parser(tokens);
/* The parsed Module */
Module modulle = parser.parse();
/* The parsed Module */
Module modulle = parser.parse();
/* Spawn a new typechecker/codegenerator on the module */
this.typeChecker = new TypeChecker(modulle);
/* Spawn a new typechecker/codegenerator on the module */
this.typeChecker = new TypeChecker(modulle);
/* Perform typechecking/codegen */
this.typeChecker.beginCheck();
/* Perform typechecking/codegen */
this.typeChecker.beginCheck();
/* Perform code emitting */
this.emitter = new DCodeEmitter(typeChecker, emitOutFile, config);
emitter.emit(); // Emit the code
emitOutFile.close(); // Flush (perform the write() syscall)
emitter.finalize(); // Call CC on the file containing generated C code
}
else
{
// TODO: Throw a lexing error here or rather `performLex()` should be doing that
gprintln("Error when lexing (make this an exception throw)", DebugType.ERROR);
}
/* Perform code emitting */
this.emitter = new DCodeEmitter(typeChecker, emitOutFile, config);
emitter.emit(); // Emit the code
emitOutFile.close(); // Flush (perform the write() syscall)
emitter.finalize(); // Call CC on the file containing generated C code
}
}
@ -171,18 +165,18 @@ unittest
// cleared, I believe this may be what is happening
// ... see issue #88
// ... UPDATE: It seems to be any unit test..... mhhhh.
string[] testFiles = ["source/tlang/testing/simple_while.t"
];
// string[] testFiles = ["source/tlang/testing/simple_while.t"
// ];
// "source/tlang/testing/simple_functions.t",
// "source/tlang/testing/simple_while.t",
// "source/tlang/testing/simple_for_loops.t",
// "source/tlang/testing/simple_cast.t",
// "source/tlang/testing/simple_conditionals.t",
// "source/tlang/testing/nested_conditionals.t",
// "source/tlang/testing/simple_discard.t"
foreach(string testFile; testFiles)
{
beginCompilation([testFile]);
}
// // "source/tlang/testing/simple_functions.t",
// // "source/tlang/testing/simple_while.t",
// // "source/tlang/testing/simple_for_loops.t",
// // "source/tlang/testing/simple_cast.t",
// // "source/tlang/testing/simple_conditionals.t",
// // "source/tlang/testing/nested_conditionals.t",
// // "source/tlang/testing/simple_discard.t"
// foreach(string testFile; testFiles)
// {
// beginCompilation([testFile]);
// }
}

View File

@ -24,6 +24,11 @@ public final class LexerException : TError
this.offendingInstance = offendingInstance;
this.errType = errType;
}
this(Lexer offendingInstance, string msg)
{
this(offendingInstance, LexerError.OTHER, msg);
}
}
/* TODO: Add Token type (which matches column and position too) */
@ -119,7 +124,7 @@ public final class Lexer
/* Perform the lexing process */
/* TODO: Use return value */
public bool performLex()
public void performLex()
{
while(position < sourceCode.length)
@ -155,8 +160,7 @@ public final class Lexer
}
else
{
gprintln("Floating point '"~currentToken~"' cannot be followed by a '"~currentChar~"'", DebugType.ERROR);
return false;
throw new LexerException(this, "Floating point '"~currentToken~"' cannot be followed by a '"~currentChar~"'");
}
}
}
@ -244,8 +248,7 @@ public final class Lexer
}
else
{
gprintln("Expected a letter to follow the .", DebugType.ERROR);
return false;
throw new LexerException(this, "Expected a letter to follow the .");
}
}
@ -336,14 +339,12 @@ public final class Lexer
/* If we don't have a next character then raise error */
else
{
gprintln("Unfinished escape sequence", DebugType.ERROR);
return false;
throw new LexerException(this, "Unfinished escape sequence");
}
}
else
{
gprintln("Escape sequences can only be used within strings", DebugType.ERROR);
return false;
throw new LexerException(this, "Escape sequences can only be used within strings");
}
}
/* Character literal support */
@ -377,14 +378,12 @@ public final class Lexer
}
else
{
gprintln("Was expecting closing ' when finishing character literal", DebugType.ERROR);
return false;
throw new LexerException(this, "Was expecting closing ' when finishing character literal");
}
}
else
{
gprintln("EOSC reached when trying to get character literal", DebugType.ERROR);
return false;
throw new LexerException(this, LexerError.EXHAUSTED_CHARACTERS, "EOSC reached when trying to get character literal");
}
}
/**
@ -421,8 +420,7 @@ public final class Lexer
}
else
{
gprintln("You MUST specify a size encoder after a signagae encoder", DebugType.ERROR);
return false;
throw new LexerException(this, "You MUST specify a size encoder after a signagae encoder");
}
@ -466,8 +464,7 @@ public final class Lexer
/* Anything else is invalid */
else
{
gprintln("Not valid TODO", DebugType.ERROR);
return false;
throw new LexerException(this, "Not valid TODO");
}
}
/**
@ -503,14 +500,12 @@ public final class Lexer
*/
else
{
gprintln("A size-encoder must follow a signage encoder", DebugType.ERROR);
return false;
throw new LexerException(this, "A size-encoder must follow a signage encoder");
}
}
else
{
gprintln("Cannot have another encoder after a size encoder", DebugType.ERROR);
return false;
throw new LexerException(this, "Cannot have another encoder after a size encoder");
}
}
/* It is impossible to reach this as flushing means we cannot add more */
@ -537,8 +532,6 @@ public final class Lexer
}
tokens = currentTokens;
return true;
}
private char[] numbericalEncoderSegmentFetch()

View File

@ -1997,7 +1997,15 @@ module myModule;
`;
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());
@ -2047,7 +2055,15 @@ class myClass2
`;
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());
@ -2195,7 +2211,15 @@ void function()
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());
@ -2257,7 +2281,15 @@ int myFunction(int i, int j)
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());
@ -2338,7 +2370,15 @@ void function()
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());
@ -2440,7 +2480,15 @@ int thing()
}
`;
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());
@ -2524,7 +2572,15 @@ void function()
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());
@ -2646,7 +2702,15 @@ void function()
Lexer currentLexer = new Lexer(sourceCode);
assert(currentLexer.performLex());
try
{
currentLexer.performLex();
assert(true);
}
catch(LexerException e)
{
assert(false);
}
Parser parser = new Parser(currentLexer.getTokens());