Error Fixes
- Lexer discarding all newlines (now replaces it with spaces) - Cleanup of Type check and remove function in parser - Cleanup in Compiler.cs
This commit is contained in:
parent
4e4b31be3f
commit
d0c844972d
8 changed files with 198 additions and 146 deletions
|
|
@ -1,37 +1,49 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using Compiler.Lexer;
|
||||
using Compiler.Parser;
|
||||
using Compiler.Parser.Nodes;
|
||||
|
||||
namespace Compiler
|
||||
{
|
||||
public class Compiler
|
||||
public static class Compiler
|
||||
{
|
||||
public static void Main()
|
||||
{
|
||||
List<List<Token>> tokensValid = TestLexer("../../../../tests/week_1/valid");
|
||||
List<List<Token>> tokensInvalid = TestLexer("../../../../tests/week_1/invalid");
|
||||
string[] validFiles = Directory.GetFiles("../../../../tests/week_1/valid");
|
||||
string[] invalidFiles = Directory.GetFiles("../../../../tests/week_1/invalid");
|
||||
|
||||
TestParser(tokensValid[0]);
|
||||
TestParser(tokensInvalid[0]);
|
||||
foreach (string filepath in validFiles)
|
||||
{
|
||||
List<Token> tokens = TestLexer(filepath, 0);
|
||||
TestParser(tokens, filepath);
|
||||
}
|
||||
|
||||
foreach (string filepath in invalidFiles)
|
||||
{
|
||||
List<Token> tokens = TestLexer(filepath, 0);
|
||||
TestParser(tokens, filepath);
|
||||
}
|
||||
}
|
||||
|
||||
static List<List<Token>> TestLexer(String path)
|
||||
static List<Token> TestLexer(string path, int debug)
|
||||
{
|
||||
List<List<Token>> tokenLists = new List<List<Token>>();
|
||||
String[] files = Directory.GetFiles(path);
|
||||
//List<List<Token>> tokenLists = new List<List<Token>>();
|
||||
//string[] files = Directory.GetFiles(path);
|
||||
Lexer.Lexer lexer = new Lexer.Lexer();
|
||||
|
||||
foreach (String filename in files)
|
||||
|
||||
StreamReader file = new StreamReader(path);
|
||||
string contents = file.ReadToEnd();
|
||||
|
||||
List<Token> tokens = lexer.Lex(contents);
|
||||
Console.WriteLine("Lexed \"" + path.Split("/").Last() + "\"");
|
||||
|
||||
if (debug > 0)
|
||||
{
|
||||
StreamReader file = new StreamReader(filename);
|
||||
String contents = file.ReadToEnd();
|
||||
|
||||
List<Token> tokens = lexer.Lex(contents);
|
||||
tokenLists.Add(tokens);
|
||||
|
||||
Console.WriteLine("-----------" + filename + "-----------");
|
||||
Console.WriteLine("-----------" + path + "-----------");
|
||||
foreach (Token token in tokens)
|
||||
{
|
||||
Console.WriteLine(token.ToString());
|
||||
|
|
@ -40,14 +52,24 @@ namespace Compiler
|
|||
Console.WriteLine("--------------------------------------");
|
||||
}
|
||||
|
||||
return tokenLists;
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
static void TestParser(List<Token> tokenList)
|
||||
static void TestParser(List<Token> tokenList, string path)
|
||||
{
|
||||
Parser.Parser p = new Parser.Parser();
|
||||
Parser.Parser p = new Parser.Parser(tokenList);
|
||||
|
||||
p.Parse(ref tokenList, NodeType.ProgramNode);
|
||||
try
|
||||
{
|
||||
Node programNode = p.Parse(NodeType.ProgramNode);
|
||||
Console.WriteLine("Parsed \"" + path.Split("/").Last() + "\"");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Console.WriteLine("Error in file \"" + path.Split("/").Last() + "\"");
|
||||
Console.WriteLine(e.Message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue