Enable displaying errors with source (files with code)

This commit is contained in:
2020-03-17 13:34:07 +01:00
parent 0c0982596a
commit 15b77e2c8b
18 changed files with 112 additions and 78 deletions

View File

@@ -1,21 +1,21 @@
package io.smnp.ext
import io.smnp.interpreter.Interpreter
import io.smnp.interpreter.LanguageModuleInterpreter
import io.smnp.type.module.Module
abstract class LanguageModuleProvider(path: String) : ModuleProvider(path) {
open fun files() = listOf("main.mus")
override fun provideModule(interpreter: Interpreter): Module {
override fun provideModule(interpreter: LanguageModuleInterpreter): Module {
val segments = path.split(".")
val parentNodesChainPath = segments.dropLast(1).joinToString(".")
val moduleName = segments.last()
val module = files()
.asSequence()
.map { javaClass.classLoader.getResource(it) }
.map { it.readText() }
.map { interpreter.run(it) }
.map { it to javaClass.classLoader.getResource(it) }
.map { it.first to (it.second?.readText() ?: throw RuntimeException("Module '$path' does not contain '${it.first}' file")) }
.map { interpreter.run(it.second, "module $path::${it.first}") }
.map { it.getRootModule() }
.reduce { acc, module -> acc.merge(module) }

View File

@@ -1,7 +1,7 @@
package io.smnp.ext
import io.smnp.environment.Environment
import io.smnp.interpreter.Interpreter
import io.smnp.interpreter.LanguageModuleInterpreter
import io.smnp.type.module.Module
import org.pf4j.ExtensionPoint
@@ -9,5 +9,5 @@ abstract class ModuleProvider(val path: String) : ExtensionPoint {
open fun onModuleLoad(environment: Environment) {}
open fun beforeModuleDisposal(environment: Environment) {}
open fun dependencies(): List<String> = emptyList()
abstract fun provideModule(interpreter: Interpreter): Module
abstract fun provideModule(interpreter: LanguageModuleInterpreter): Module
}

View File

@@ -2,12 +2,12 @@ package io.smnp.ext
import io.smnp.callable.function.Function
import io.smnp.callable.method.Method
import io.smnp.interpreter.Interpreter
import io.smnp.interpreter.LanguageModuleInterpreter
import io.smnp.type.module.Module
abstract class NativeModuleProvider(path: String) : ModuleProvider(path) {
open fun functions(): List<Function> = emptyList()
open fun methods(): List<Method> = emptyList()
final override fun provideModule(interpreter: Interpreter) = Module.create(path, functions(), methods())
final override fun provideModule(interpreter: LanguageModuleInterpreter) = Module.create(path, functions(), methods())
}

View File

@@ -1,7 +0,0 @@
package io.smnp.interpreter
import io.smnp.environment.Environment
interface Interpreter {
fun run(code: String): Environment
}

View File

@@ -0,0 +1,7 @@
package io.smnp.interpreter
import io.smnp.environment.Environment
interface LanguageModuleInterpreter {
fun run(code: String, source: String): Environment
}

View File

@@ -43,13 +43,13 @@ fun main(args: Array<String>): Unit = mainBody {
DefaultModuleRegistry.registeredModules().forEach { println(it) }
}
} catch (e: SmnpException) {
val position = e.exceptionChain.mapNotNull { it as? PositionException }.lastOrNull()?.position ?: ""
val stacktrace = e.exceptionChain.mapNotNull { it as? EnvironmentException }.lastOrNull()?.let {
"\nStack trace:\n${it.environment.stackTrace()}"
} ?: ""
System.err.println(e.friendlyName + " " + position)
System.err.println(e.friendlyName)
e.exceptionChain.mapNotNull { it as? PositionException }.lastOrNull()?.let { System.err.println(it.position.fullString) }
System.err.println()
System.err.println(e.message)
System.err.println(stacktrace)
e.exceptionChain.mapNotNull { it as? EnvironmentException }.lastOrNull()?.let {
System.err.println("\nStack trace:\n${it.environment.stackTrace()}")
}
exitProcess(1)
}
}

View File

@@ -14,6 +14,6 @@ data class Token(val type: TokenType, val value: Any, val rawValue: String, val
}
override fun toString(): String {
return "($type, »$rawValue«, ${position.short})"
return "($type, »$rawValue«, $position)"
}
}

View File

@@ -1,14 +1,11 @@
package io.smnp.dsl.token.model.entity
data class TokenPosition(val line: Int, val beginCol: Int, val endCol: Int) {
data class TokenPosition(val source: String, val line: Int, val beginCol: Int, val endCol: Int) {
companion object {
val NONE = TokenPosition(-1, -1, -1)
val NONE = TokenPosition("<NONE>", -1, -1, -1)
}
override fun toString(): String {
return "[line ${line + 1}, col ${beginCol + 1}]"
}
override fun toString() = "${line + 1}:${beginCol + 1}"
val short: String
get() = "${line + 1}:${beginCol + 1}"
val fullString = "Source: $source\nPosition: line ${line + 1}, column ${beginCol + 1}"
}

View File

@@ -18,12 +18,12 @@ data class TokenizerOutput(val consumedChars: Int, val token: Token) {
return if (consumedChars > 0) TokenizerOutput(consumedChars, token) else NONE
}
fun produce(consumedChars: Int, value: String, tokenType: TokenType, line: Int, beginCol: Int): TokenizerOutput {
return produce(consumedChars, Token(tokenType, value, TokenPosition(line, beginCol, beginCol + consumedChars)))
fun produce(consumedChars: Int, value: String, tokenType: TokenType, source: String, line: Int, beginCol: Int): TokenizerOutput {
return produce(consumedChars, Token(tokenType, value, TokenPosition(source, line, beginCol, beginCol + consumedChars)))
}
fun produce(consumedChars: Int, value: Any, rawValue: String, tokenType: TokenType, line: Int, beginCol: Int): TokenizerOutput {
return produce(consumedChars, Token(tokenType, value, rawValue, TokenPosition(line, beginCol, beginCol + consumedChars)))
fun produce(consumedChars: Int, value: Any, rawValue: String, tokenType: TokenType, source: String, line: Int, beginCol: Int): TokenizerOutput {
return produce(consumedChars, Token(tokenType, value, rawValue, TokenPosition(source, line, beginCol, beginCol + consumedChars)))
}
}
}

View File

@@ -4,7 +4,7 @@ import io.smnp.dsl.token.model.entity.TokenizerOutput
import io.smnp.dsl.token.model.enumeration.TokenType
class CommentTokenizer : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(input: String, current: Int, line: Int, source: String): TokenizerOutput {
if (input[current] == '#') {
var consumedChars = 0
var value = ""
@@ -13,7 +13,7 @@ class CommentTokenizer : Tokenizer {
consumedChars += 1
}
return TokenizerOutput.produce(consumedChars, value.substring(1).trim(), value, TokenType.COMMENT, line, current)
return TokenizerOutput.produce(consumedChars, value.substring(1).trim(), value, TokenType.COMMENT, source, line, current)
}
return TokenizerOutput.NONE

View File

@@ -83,16 +83,16 @@ class DefaultTokenizer : Tokenizer {
private val tokenizer = Tokenizer.firstOf(tokenizers)
fun tokenize(lines: List<String>): TokenList {
fun tokenize(lines: List<String>, source: String): TokenList {
val tokens: MutableList<Token> = mutableListOf()
for ((index, line) in lines.withIndex()) {
var current = 0
while (current < line.length) {
val output = tokenize(line, current, index)
val output = tokenize(line, current, index, source)
if (!output.consumed()) {
throw PositionException(InvalidSyntaxException("Unknown symbol ${line[current]}"), TokenPosition(index, current, -1))
throw PositionException(InvalidSyntaxException("Unknown symbol ${line[current]}"), TokenPosition(source, index, current, -1))
}
current += output.consumedChars
@@ -108,7 +108,7 @@ class DefaultTokenizer : Tokenizer {
return tokens.filter { token -> filters.all { filter -> filter(token) } }
}
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
return tokenizer.tokenize(input, current, line)
override fun tokenize(input: String, current: Int, line: Int, source: String): TokenizerOutput {
return tokenizer.tokenize(input, current, line, source)
}
}

View File

@@ -5,12 +5,12 @@ import io.smnp.dsl.token.model.entity.TokenizerOutput
import io.smnp.dsl.token.model.enumeration.TokenType
class FloatTokenizer : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(input: String, current: Int, line: Int, source: String): TokenizerOutput {
return Tokenizer.combined(
{ values, tokenPosition -> Token(TokenType.FLOAT, values.joinToString(""), tokenPosition) },
Tokenizer.regex(TokenType.NONE, "\\d"),
Tokenizer.keyword(TokenType.NONE, "."),
Tokenizer.regex(TokenType.NONE, "\\d")
).tokenize(input, current, line)
).tokenize(input, current, line, source)
}
}

View File

@@ -7,7 +7,7 @@ import io.smnp.dsl.token.model.enumeration.TokenType
import io.smnp.math.Fraction
class NoteTokenizer : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(input: String, current: Int, line: Int, source: String): TokenizerOutput {
var consumedChars = 0
var notePitch: String
var octave: Int? = null
@@ -64,7 +64,7 @@ class NoteTokenizer : Tokenizer {
val note = Note(Pitch.parse(notePitch), octave ?: 4, Fraction(1, duration?.toInt() ?: 4), dot)
return TokenizerOutput.produce(consumedChars, note, rawValue, TokenType.NOTE, line, current)
return TokenizerOutput.produce(consumedChars, note, rawValue, TokenType.NOTE, source, line, current)
}
}

View File

@@ -4,7 +4,7 @@ import io.smnp.dsl.token.model.entity.TokenizerOutput
import io.smnp.dsl.token.model.enumeration.TokenType
class StringTokenizer : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(input: String, current: Int, line: Int, source: String): TokenizerOutput {
if(input[current] == '"') {
var value = input[current].toString()
var consumedChars = 1
@@ -16,7 +16,7 @@ class StringTokenizer : Tokenizer {
value += input[current + consumedChars]
consumedChars += 1
return TokenizerOutput.produce(consumedChars, value.substring(1, value.length-1), value, TokenType.STRING, line, current)
return TokenizerOutput.produce(consumedChars, value.substring(1, value.length-1), value, TokenType.STRING, source, line, current)
}
return TokenizerOutput.NONE

View File

@@ -6,13 +6,18 @@ import io.smnp.dsl.token.model.entity.TokenizerOutput
import io.smnp.dsl.token.model.enumeration.TokenType
interface Tokenizer {
fun tokenize(input: String, current: Int, line: Int): TokenizerOutput
fun tokenize(input: String, current: Int, line: Int, source: String): TokenizerOutput
companion object {
// Char regex
fun regex(type: TokenType, pattern: String): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
var consumedChars = 0
var value = ""
@@ -21,7 +26,7 @@ interface Tokenizer {
consumedChars += 1
}
return TokenizerOutput.produce(consumedChars, value, type, line, current)
return TokenizerOutput.produce(consumedChars, value, type, source, line, current)
}
}
}
@@ -29,13 +34,18 @@ interface Tokenizer {
// Literal keyword ("function", "or", ".")
fun keyword(type: TokenType, keyword: String): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
if ((input.length >= current + keyword.length) && (input.substring(
current,
current + keyword.length
) == keyword)
) {
return TokenizerOutput.produce(keyword.length, keyword, type, line, current)
return TokenizerOutput.produce(keyword.length, keyword, type, source, line, current)
}
return TokenizerOutput.NONE
@@ -46,9 +56,14 @@ interface Tokenizer {
// One of keywords
fun keywords(type: TokenType, vararg keywords: String): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
for (keyword in keywords) {
val output = keyword(type, keyword).tokenize(input, current, line)
val output = keyword(type, keyword).tokenize(input, current, line, source)
if (output.consumed()) {
return output
}
@@ -63,8 +78,13 @@ interface Tokenizer {
// Token for regular TokenType
fun default(type: TokenType): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
return keyword(type, type.token).tokenize(input, current, line)
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
return keyword(type, type.token).tokenize(input, current, line, source)
}
}
@@ -73,8 +93,13 @@ interface Tokenizer {
// Isolate io.io.smnp.dsl.token (for example "function" | "functions" | "function s")
fun separated(tokenizer: Tokenizer, end: String = "\\W"): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
val output = tokenizer.tokenize(input, current, line)
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
val output = tokenizer.tokenize(input, current, line, source)
if (output.consumed()) {
if ((input.length > current + output.consumedChars) && end.toRegex().matches(input[current + output.consumedChars].toString())) {
return output
@@ -94,8 +119,13 @@ interface Tokenizer {
// Change io.io.smnp.dsl.token value (rawValue will be kept)
fun mapValue(tokenizer: Tokenizer, mapper: (Any) -> Any): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
val output = tokenizer.tokenize(input, current, line)
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
val output = tokenizer.tokenize(input, current, line, source)
if(output.consumed()) {
return output.mapToken { output.token.mapValue(mapper) }
}
@@ -108,12 +138,17 @@ interface Tokenizer {
// Complex tokenizer consisting of smaller ones (like "3.14" = regex(\d) + keyword(.) + regex(\d))
fun combined(createToken: (List<String>, TokenPosition) -> Token, vararg tokenizers: Tokenizer): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
var consumedChars = 0
val tokens: MutableList<Token> = mutableListOf()
for(tokenizer in tokenizers) {
val output = tokenizer.tokenize(input, current + consumedChars, line)
val output = tokenizer.tokenize(input, current + consumedChars, line, source)
if(output.consumed()) {
consumedChars += output.consumedChars
tokens.add(output.token)
@@ -126,7 +161,7 @@ interface Tokenizer {
return TokenizerOutput.NONE
}
return TokenizerOutput.produce(consumedChars, createToken(tokens.map { it.rawValue }, TokenPosition(line, tokens.first().position.beginCol, tokens.last().position.endCol)))
return TokenizerOutput.produce(consumedChars, createToken(tokens.map { it.rawValue }, TokenPosition(source, line, tokens.first().position.beginCol, tokens.last().position.endCol)))
}
}
@@ -135,9 +170,14 @@ interface Tokenizer {
// First matched tokenizer
fun firstOf(tokenizers: List<Tokenizer>): Tokenizer {
return object : Tokenizer {
override fun tokenize(input: String, current: Int, line: Int): TokenizerOutput {
override fun tokenize(
input: String,
current: Int,
line: Int,
source: String
): TokenizerOutput {
for (tokenizer in tokenizers) {
val output = tokenizer.tokenize(input, current, line)
val output = tokenizer.tokenize(input, current, line, source)
if(output.consumed()) {
return output
}

View File

@@ -8,7 +8,7 @@ import io.smnp.error.MethodInvocationException
import io.smnp.ext.DefaultModuleRegistry
import io.smnp.ext.DefaultModuleRegistry.requestModuleProviderForPath
import io.smnp.ext.ModuleProvider
import io.smnp.interpreter.LanguageModuleInterpreter
import io.smnp.interpreter.DefaultLanguageModuleInterpreter
import io.smnp.runtime.model.CallStack
import io.smnp.type.model.Value
import io.smnp.type.module.Module
@@ -43,7 +43,7 @@ class DefaultEnvironment : Environment {
private fun loadModule(moduleProvider: ModuleProvider, consumer: (ModuleProvider) -> Unit = {}) {
if (!loadedModules.contains(moduleProvider.path)) {
rootModule.addSubmodule(moduleProvider.provideModule(LanguageModuleInterpreter()))
rootModule.addSubmodule(moduleProvider.provideModule(DefaultLanguageModuleInterpreter()))
moduleProvider.onModuleLoad(this)
loadedModules.add(moduleProvider.path)
consumer(moduleProvider)

View File

@@ -8,7 +8,7 @@ import io.smnp.evaluation.evaluator.RootEvaluator
import io.smnp.evaluation.model.enumeration.EvaluationResult
import java.io.File
class DefaultInterpreter : Interpreter {
class DefaultInterpreter {
private val tokenizer = DefaultTokenizer()
private val parser = RootParser()
private val evaluator = RootEvaluator()
@@ -21,11 +21,12 @@ class DefaultInterpreter : Interpreter {
dryRun: Boolean = false
): Environment {
val lines = code.split("\n")
return run(lines, environment, printTokens, printAst, dryRun)
return run(lines, "<inline>", environment, printTokens, printAst, dryRun)
}
private fun run(
lines: List<String>,
source: String,
environment: Environment,
printTokens: Boolean,
printAst: Boolean,
@@ -33,7 +34,7 @@ class DefaultInterpreter : Interpreter {
): Environment {
environment.loadModule("smnp.lang")
val tokens = tokenizer.tokenize(lines)
val tokens = tokenizer.tokenize(lines, source)
val ast = parser.parse(tokens)
if (printTokens) println(tokens)
@@ -58,8 +59,6 @@ class DefaultInterpreter : Interpreter {
dryRun: Boolean = false
): Environment {
val lines = file.readLines()
return run(lines, environment, printTokens, printAst, dryRun)
return run(lines, file.canonicalPath, environment, printTokens, printAst, dryRun)
}
override fun run(code: String) = run(code, printTokens = false, printAst = false, dryRun = false)
}

View File

@@ -8,10 +8,8 @@ import io.smnp.evaluation.evaluator.Evaluator
import io.smnp.evaluation.evaluator.ExtendEvaluator
import io.smnp.evaluation.evaluator.FunctionDefinitionEvaluator
import io.smnp.evaluation.model.enumeration.EvaluationResult
import io.smnp.type.module.Module
class LanguageModuleInterpreter : Interpreter {
private var rootModule = Module.create("<root>")
class DefaultLanguageModuleInterpreter : LanguageModuleInterpreter {
private val tokenizer = DefaultTokenizer()
private val parser = RootParser()
private val evaluator = Evaluator.repeat(
@@ -21,9 +19,9 @@ class LanguageModuleInterpreter : Interpreter {
), "function definition or extend statement")
)
override fun run(code: String): Environment {
override fun run(code: String, source: String): Environment {
val lines = code.split("\n")
val tokens = tokenizer.tokenize(lines)
val tokens = tokenizer.tokenize(lines, source)
val ast = parser.parse(tokens)
val environment = DefaultEnvironment()