Skip to content

Commit

Permalink
WIP: Scaffolding dictionary implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
melvic-ybanez committed Oct 29, 2023
1 parent d14e742 commit 0c5955d
Show file tree
Hide file tree
Showing 14 changed files with 149 additions and 53 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,9 @@ The syntax of Dry should be familiar to Python and Scala developers. Here's the
<factor> ::= <unary> ("/" | "*" | "%" <unary>)*
<unary> ::= ("!" | "-" | "+" | "not") <expression> | <call>
<primary> ::= "false" | "true" | "none" | <number> | <string>
| "self" | <identifier> | "(" <expression> ")"
| "self" | <identifier> | | <dictionary> | "(" <expression> ")"
<dictionary> ::= "{" (<key-value> ("," <key-value>)*)? "}"
<key-value> ::= (<string> | <identifier>) ":" <expression>
<number> ::= <sign>?<nat>("."<nat>)?
<sign> ::= "-" | "+"
<string> ::= '"'(.?"\n"?)*'"'
Expand Down
1 change: 1 addition & 0 deletions src/main/scala/com/melvic/dry/Token.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ object Token {
case object Comma extends TokenType
case object Dot extends TokenType
case object Semicolon extends TokenType
case object Colon extends TokenType
case object Eof extends TokenType
}

Expand Down
25 changes: 22 additions & 3 deletions src/main/scala/com/melvic/dry/ast/Expr.scala
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.melvic.dry.ast

import com.melvic.dry.ast.Expr.Literal.Str
import com.melvic.dry.ast.Stmt.BlockStmt
import com.melvic.dry.aux.Show.ShowInterpolator
import com.melvic.dry.aux.implicits.ListOps
Expand Down Expand Up @@ -42,6 +43,23 @@ object Expr {

final case class Self(keyword: Token) extends Expr

final case class Dictionary(table: Map[Either[Str, Variable], Expr]) extends Expr

object Dictionary {
def show: Show[Dictionary] = { case Dictionary(table) =>
def fieldKeyToString(key: Either[Str, Variable]): String =
key match {
case Left(fieldName) => show""""$fieldName""""
case Right(variable) => Expr.show(variable)
}

def fieldToString(field: (Either[Str, Variable], Expr)): String =
show"${fieldKeyToString(field._1)}: ${Expr.show(field._2)}"

show"{ ${table.map(fieldToString).mkString(", ")} }"
}
}

def show: Show[Expr] = {
case literal: Literal => Literal.show(literal)
case Grouping(expr) => show"($expr)"
Expand All @@ -53,8 +71,9 @@ object Expr {
case Call(callee, arguments, _) => show"$callee(${arguments.map(Expr.show).toCsv})"
case Lambda(params, body) =>
show"lambda(${params.map(Token.show).toCsv}) ${BlockStmt.fromDecls(body: _*)}"
case Get(obj, name) => show"$obj.$name"
case Self(_) => "self"
case Set(obj, name, value) => show"$obj.$name = $value"
case Get(obj, name) => show"$obj.$name"
case Self(_) => "self"
case Set(obj, name, value) => show"$obj.$name = $value"
case dictionary: Dictionary => Dictionary.show(dictionary)
}
}
3 changes: 3 additions & 0 deletions src/main/scala/com/melvic/dry/interpreter/eval/EvalExpr.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ private[eval] trait EvalExpr {
case get: Get => Evaluate.get(get)
case set: Set => Evaluate.set(set)
case self: Self => Evaluate.self(self)
case dictionary: Dictionary => Evaluate.dictionary(dictionary)
}

def lambda(implicit context: Context[Lambda]): Out =
Expand Down Expand Up @@ -203,6 +204,8 @@ private[eval] trait EvalExpr {

def self(implicit context: Context[Self]): Out = varLookup(node.keyword, node)

def dictionary(implicit context: Context[Dictionary]): Out = ???

private def varLookup(name: Token, expr: Expr)(implicit context: Context[Expr]): Out =
locals
.get(LocalExprKey(expr))
Expand Down
10 changes: 10 additions & 0 deletions src/main/scala/com/melvic/dry/interpreter/values/DDictionary.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
package com.melvic.dry.interpreter.values
import com.melvic.dry.interpreter.Env

import scala.collection.mutable

final case class DDictionary(table: Map[String, Value], env: Env) extends DObject {
override def klass: Metaclass = DClass("Dictionary", Map.empty, env)

override def fields: mutable.Map[String, Value] = mutable.Map.empty
}
6 changes: 3 additions & 3 deletions src/main/scala/com/melvic/dry/interpreter/values/DList.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,18 @@ import scala.collection.mutable.ListBuffer
import scala.util.chaining.scalaUtilChainingOps

final case class DList(elems: ListBuffer[Value], env: Env) extends DObject {
type AddProperties = Map[String, Value] => Map[String, Value]
private type AddProperties = Map[String, Value] => Map[String, Value]

override def klass: Metaclass = DClass("List", Map.empty, env)

override val fields: mutable.Map[String, Value] =
addIndexFields
addIndexFields()
.pipe(addAtMethod)
.pipe(addSizeMethod)
.pipe(addAddMethod)
.to(mutable.Map)

private def addIndexFields =
private def addIndexFields() =
elems.zipWithIndex
.map { case (elem, i) => ("_" + i) -> elem }
.to(Map)
Expand Down
1 change: 1 addition & 0 deletions src/main/scala/com/melvic/dry/lexer/Lexer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ final case class Lexer(
case '%' => lexer.addToken(TokenType.Modulo).ok
case '-' => lexer.addToken(TokenType.Minus).ok
case ';' => lexer.addToken(TokenType.Semicolon).ok
case ':' => lexer.addToken(TokenType.Colon).ok
case '!' => lexer.addTokenOrElse('=', TokenType.NotEqual, TokenType.Not)
case '=' => lexer.addTokenOrElse('=', TokenType.EqualEqual, TokenType.Equal)
case '&' => lexer.addToken(TokenType.BAnd).ok
Expand Down
16 changes: 8 additions & 8 deletions src/main/scala/com/melvic/dry/parsers/DeclParser.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ private[parsers] trait DeclParser extends StmtParser { _: Parser =>
*/
def letDecl: ParseResult[Let] = {
def consumeSemicolon(parser: Parser): ParseResult[Token] =
parser.consume(TokenType.Semicolon, ";", "let")
parser.consumeAfter(TokenType.Semicolon, ";", "let")

consume(TokenType.Identifier, "identifier", "let").flatMap { case Step(name, parser) =>
consumeAfter(TokenType.Identifier, "identifier", "let").flatMap { case Step(name, parser) =>
parser
.matchAny(TokenType.Equal)
.fold[ParseResult[Let]](consumeSemicolon(parser).mapValue(_ => LetDecl(name))) { parser =>
Expand All @@ -51,14 +51,14 @@ private[parsers] trait DeclParser extends StmtParser { _: Parser =>
*/
def defDecl(kind: String): ParseResult[Def] =
for {
name <- consume(TokenType.Identifier, "identifier", s"'${Lexemes.Def}' keyword")
name <- consumeAfter(TokenType.Identifier, "identifier", s"'${Lexemes.Def}' keyword")
params <- name.params
body <- params.functionBody(kind)
} yield Step(Def(name.value, params.value, body.value.declarations), body.next)

private[parsers] def functionBody(kind: String): ParseResult[BlockStmt] =
for {
leftBrace <- consume(TokenType.LeftBrace, "{", kind + " signature")
leftBrace <- consumeAfter(TokenType.LeftBrace, "{", kind + " signature")
body <- leftBrace.block
} yield body

Expand All @@ -67,18 +67,18 @@ private[parsers] trait DeclParser extends StmtParser { _: Parser =>
*/
def classDecl: ParseResult[ClassDecl] =
for {
name <- consume(TokenType.Identifier, "identifier", s"'${Lexemes.Class}' keyword")
leftBrace <- name.consume(TokenType.LeftBrace, "{", "class name")
name <- consumeAfter(TokenType.Identifier, "identifier", s"'${Lexemes.Class}' keyword")
leftBrace <- name.consumeAfter(TokenType.LeftBrace, "{", "class name")
methods <- leftBrace.methods
rightBrace <- methods.consume(TokenType.RightBrace, "}", "class body")
rightBrace <- methods.consumeAfter(TokenType.RightBrace, "}", "class body")
} yield Step(ClassDecl(name.value, methods.value), rightBrace.next)

protected def methods: ParseResult[List[Def]] = {
def recurse(parser: Parser, acc: List[Def]): ParseResult[List[Def]] =
if (parser.check(TokenType.RightBrace) || parser.isAtEnd) ParseResult.succeed(acc.reverse, parser)
else
for {
method <- parser.consume(TokenType.Def, Lexemes.Def, "{ in class")
method <- parser.consumeAfter(TokenType.Def, Lexemes.Def, "{ in class")
function <- method.defDecl("method")
result <- recurse(function.next, function.value :: acc)
} yield result
Expand Down
48 changes: 41 additions & 7 deletions src/main/scala/com/melvic/dry/parsers/ExprParser.scala
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ private[parsers] trait ExprParser { _: Parser =>
* {{{<call> ::= <primary> ("(" <expression>* ")" | "." <identifier>)}}}
*/
def call: ParseResult[Expr] = {
// TODO: see if we can refactor this using the `sequence` utility from `Parser`
def parenCall(callee: Expr, parser: Parser): ParseResult[Expr] = {
def recurse(args: List[Expr], parser: Parser): ParseResult[List[Expr]] =
parser
Expand All @@ -135,7 +136,7 @@ private[parsers] trait ExprParser { _: Parser =>

resultForArgs.flatMap { case Step(_, next) =>
next
.consume(TokenType.RightParen, ")", "function call arguments")
.consumeAfter(TokenType.RightParen, ")", "function call arguments")
.flatMap(step =>
resultForArgs.mapValue(callOrLambda(callee, _, step.value)).mapParser(_ => step.next)
)
Expand All @@ -146,7 +147,7 @@ private[parsers] trait ExprParser { _: Parser =>
// Recursively checks if the expression is being called, or a property access is being invoked
def checkForCalls(step: Step[Expr]): ParseResult[Expr] = {
def propAccess(expr: Expr, next: Parser) =
next.consume(TokenType.Identifier, "property name", ".").mapValue(Get(expr, _))
next.consumeAfter(TokenType.Identifier, "property name", ".").mapValue(Get(expr, _))

def checkForPropAccess = step.next
.matchAny(TokenType.Dot)
Expand Down Expand Up @@ -226,14 +227,47 @@ private[parsers] trait ExprParser { _: Parser =>
.orElse(matchAny(TokenType.Identifier).map(p => Step(Variable(p.previousToken), p)))
.map(_.toParseResult)
.getOrElse(
matchAny(TokenType.LeftParen)
.fold[ParseResult[Expr]](ParseResult.fail(ParseError.expected(peek, "expression", "("), this)) {
_.expression.flatMap { case Step(expr, newParser) =>
newParser.consume(TokenType.RightParen, ")", "expression").mapValue(_ => Grouping(expr))
dictionary.orElse(
matchAny(TokenType.LeftParen)
.fold[ParseResult[Expr]](ParseResult.fail(ParseError.expected(peek, "expression", "("), this)) {
_.expression.flatMap { case Step(expr, newParser) =>
newParser.consumeAfter(TokenType.RightParen, ")", "expression").mapValue(_ => Grouping(expr))
}
}
}
)
)

/**
* {{{
* <dictionary> ::= "{" (<key-value> ("," <key-value>)*)? "}"
* <key-value> ::= (<string> | <identifier>) ":" <expression>
* }}}
*/
def dictionary: ParseResult[Expr] =
sequence[(Either[Literal.Str, Variable], Expr)](
TokenType.LeftBrace,
"{",
TokenType.RightBrace,
"}",
"at the start of dictionary",
"dictionary elements"
)(_.matchAnyWith {
case TokenType.Str(_) => true
case TokenType.Identifier => true
}.flatMap { next =>
@nowarn
val key = next.previousToken match {
case Token(TokenType.Str(string), _, _) => Left(Literal.Str(string))
case token @ Token(TokenType.Identifier, _, _) => Right(Variable(token))
}
next
.consumeAfter(TokenType.Colon, ":", "dictionary key")
.flatMap { case Step(_, next) =>
next.expression.map(_.map((key, _)))
}
.fold[Option[Step[(Either[Literal.Str, Variable], Expr)]]]((_, _) => None)(Some(_))
}).mapValue(elements => Dictionary(elements.toMap))

/**
* Like [[leftAssocBinaryWith]], but is specific to non-logical binary operators.
*/
Expand Down
3 changes: 3 additions & 0 deletions src/main/scala/com/melvic/dry/parsers/ParseResult.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ final case class ParseResult[+A](result: Result[A], parser: Parser) {
case Left(errors) => ifError(errors, parser)
case Right(value) => ifSuccess(Step(value, parser))
}

def orElse[B >: A](alternative: => ParseResult[B]): ParseResult[B] =
fold((_, _) => alternative)(_ => this)
}

object ParseResult {
Expand Down
44 changes: 32 additions & 12 deletions src/main/scala/com/melvic/dry/parsers/Parser.scala
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,12 @@ final case class Parser(tokens: List[Token], current: Int) extends ExprParser wi
Step(parser.previousToken, parser)
}

def consume(tokenType: TokenType, expected: String, after: String): ParseResult[Token] =
def consumeAfter(tokenType: TokenType, expected: String, after: String): ParseResult[Token] =
consume(tokenType, expected, "after " + after)

def consume(tokenType: TokenType, expected: String, at: String): ParseResult[Token] =
if (check(tokenType)) advance.toParseResult
else ParseResult.fail(ParseError.expected(peek, expected, after), this)
else ParseResult.fail(ParseError.expected(peek, expected, at), this)

def isAtEnd: Boolean =
peek.tokenType == TokenType.Eof
Expand Down Expand Up @@ -94,25 +97,42 @@ final case class Parser(tokens: List[Token], current: Int) extends ExprParser wi
}

/**
* {{{<params> ::= "(" <identifier>* ")"}}}
* {{{<params> ::= "(" (<identifier> | ("," <identifier>)*)? ")"}}}
*/
def params: ParseResult[List[Token]] =
sequence[Token](
TokenType.LeftParen,
"(",
TokenType.RightParen,
")",
"after function name",
"parameters"
)(_.matchAny(TokenType.Identifier).map(next => Step(next.previousToken, next)))

private[parsers] def sequence[A](
openingTokenType: TokenType,
openingLexeme: String,
closingTokenType: TokenType,
closingLexeme: String,
openingErrorLabel: String,
// e.g. parameters, list elements, dict elements
elementsLabel: String
)(parseElement: Parser => Option[Step[A]]): ParseResult[List[A]] =
for {
afterLeftParen <- consume(TokenType.LeftParen, "(", "function name")
afterOpening <- consume(openingTokenType, openingLexeme, openingErrorLabel)
params <- {
def parseWhileNextIsComma(params: List[Token], parser: Parser): ParseResult[List[Token]] =
parser
.matchAny(TokenType.Identifier)
.fold(ParseResult.succeed(params, parser)) { next =>
val newParams = next.previousToken :: params
def parseWhileThereIsComma(elements: List[A], parser: Parser): ParseResult[List[A]] =
parseElement(parser)
.fold(ParseResult.succeed(elements, parser)) { case Step(element, next) =>
val newElements = element :: elements
next
.matchAny(TokenType.Comma)
.fold(ParseResult.succeed(newParams, next))(parseWhileNextIsComma(newParams, _))
.fold(ParseResult.succeed(newElements, next))(parseWhileThereIsComma(newElements, _))
}

parseWhileNextIsComma(Nil, afterLeftParen)
parseWhileThereIsComma(Nil, afterOpening)
.mapValue(_.reverse)
.flatMapParser(_.consume(TokenType.RightParen, ")", "parameters"))
.flatMapParser(_.consumeAfter(closingTokenType, closingLexeme, elementsLabel))
}
} yield params
}
Expand Down
Loading

0 comments on commit 0c5955d

Please sign in to comment.