Skip to content

Commit

Permalink
Changes to improve compilation for Calculations.elm (finos#96)
Browse files Browse the repository at this point in the history
* Changes to improve compilation for `Calculations.elm`

Adds support for:

- Different set of function mappings when converting a function
  identified as Scala-only
- Convert empty DataFrame (still pending issue with session object)
- Adds several simple mappings for `Calculations.elm`: `List.maximum` ,
  `max`, `min`

* Code review fixes and fix List.sum with null values
  • Loading branch information
sfc-gh-lfallasavendano committed Dec 13, 2023
1 parent 71f8a93 commit 8293644
Show file tree
Hide file tree
Showing 19 changed files with 980 additions and 367 deletions.
165 changes: 16 additions & 149 deletions src/Morphir/Snowpark/Backend.elm
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
module Morphir.Snowpark.Backend exposing (..)
module Morphir.Snowpark.Backend exposing (mapDistribution, Options, mapFunctionDefinition)

import Dict
import List
Expand All @@ -11,42 +11,27 @@ import Morphir.IR.Package as Package
import Morphir.IR.Module as Module
import Morphir.IR.Path as Path exposing (Path)
import Morphir.IR.Type as Type exposing (Type)
import Morphir.Scala.AST as Scala
import Morphir.Scala.PrettyPrinter as PrettyPrinter
import Morphir.IR.Value as Value exposing (Pattern(..), Value(..))
import Morphir.IR.Literal exposing (Literal(..))
import Morphir.IR.Documented exposing (Documented)
import Morphir.IR.FQName as FQName
import Morphir.Scala.AST as Scala
import Morphir.Scala.PrettyPrinter as PrettyPrinter
import Morphir.Scala.Common exposing (scalaKeywords, mapValueName, javaObjectMethods)
import Morphir.Snowpark.Constants as Constants exposing (applySnowparkFunc)
import Morphir.Snowpark.AccessElementMapping exposing (
mapFieldAccess
, mapReferenceAccess
, mapVariableAccess
, mapConstructorAccess)
import Morphir.Snowpark.RecordWrapperGenerator as RecordWrapperGenerator
import Morphir.Snowpark.ReferenceUtils exposing (isTypeReferenceToSimpleTypesRecord, mapLiteral)
import Morphir.Snowpark.ReferenceUtils exposing (isTypeReferenceToSimpleTypesRecord, isTypeReferenceToSimpleTypesRecord)
import Morphir.Snowpark.TypeRefMapping exposing (mapTypeReference, mapFunctionReturnType)
import Morphir.Snowpark.MapFunctionsMapping as MapFunctionsMapping
import Morphir.Snowpark.PatternMatchMapping exposing (mapPatternMatch)
import Morphir.Snowpark.MappingContext as MappingContext exposing (
MappingContextInfo
, GlobalDefinitionInformation
, emptyValueMappingContext
, getFunctionClassification
, typeRefIsListOf
, ValueMappingContext
, FunctionClassification
, isCandidateForDataFrame
, isFunctionClassificationReturningDataFrameExpressions
, isFunctionReturningDataFrameExpressions
, isTypeRefToRecordWithSimpleTypes
, isTypeRefToRecordWithComplexTypes )
import Morphir.Snowpark.MappingContext exposing (isRecordWithComplexTypes)
import Morphir.Snowpark.ReferenceUtils exposing (scalaPathToModule)
import Morphir.Snowpark.Utils exposing (collectMaybeList)
import Morphir.Snowpark.MappingContext exposing (isRecordWithSimpleTypes)
import Morphir.Snowpark.MappingContext exposing (isAnonymousRecordWithSimpleTypes)
, isCandidateForDataFrame )
import Morphir.Snowpark.MappingContext exposing (FunctionClassification(..))
import Morphir.Snowpark.FunctionMappingsForPlainScala as FunctionMappingsForPlainScala
import Morphir.Snowpark.MapExpressionsToDataFrameOperations exposing (mapValue)

type alias Options =
{}
Expand Down Expand Up @@ -242,130 +227,12 @@ processParameters inputTypes currentFunctionClassification ctx =

mapFunctionBody : Value.Definition ta (Type ()) -> ValueMappingContext -> Maybe Scala.Value
mapFunctionBody value ctx =
Maybe.Just (mapValue value.body ctx)

mapValue : Value ta (Type ()) -> ValueMappingContext -> Scala.Value
mapValue value ctx =
case value of
Literal tpe literal ->
mapLiteral tpe literal
Field tpe val name ->
mapFieldAccess tpe val name ctx mapValue
Variable _ name as varAccess ->
mapVariableAccess name varAccess ctx
Constructor tpe name ->
mapConstructorAccess tpe name ctx
List listType values ->
mapListCreation listType values ctx
Reference tpe name ->
mapReferenceAccess tpe name ctx
Apply _ _ _ ->
MapFunctionsMapping.mapFunctionsMapping value mapValue ctx
PatternMatch tpe expr cases ->
mapPatternMatch (tpe, expr, cases) mapValue ctx
IfThenElse _ condition thenExpr elseExpr ->
mapIfThenElse condition thenExpr elseExpr ctx
LetDefinition _ name definition body ->
mapLetDefinition name definition body ctx
FieldFunction _ [name] ->
Constants.applySnowparkFunc "col" [(Scala.Literal (Scala.StringLit name))]
Value.Tuple _ tupleElements ->
Constants.applySnowparkFunc "array_construct" <| List.map (\e -> mapValue e ctx) tupleElements
Value.Record tpe fields ->
mapRecordCreation tpe fields ctx
_ ->
Scala.Literal (Scala.StringLit ("Unsupported element"))


mapRecordCreation : Type () -> Dict.Dict (Name.Name) (Value ta (Type ())) -> ValueMappingContext -> Scala.Value
mapRecordCreation tpe fields ctx =
if isTypeRefToRecordWithComplexTypes tpe ctx.typesContextInfo then
mapRecordCreationToCaseClassCreation tpe fields ctx
else
if (isTypeRefToRecordWithSimpleTypes tpe ctx.typesContextInfo ||
isAnonymousRecordWithSimpleTypes tpe ctx.typesContextInfo) &&
isFunctionClassificationReturningDataFrameExpressions ctx.currentFunctionClassification then
MappingContext.getFieldInfoIfRecordType tpe ctx.typesContextInfo
|> Maybe.map (\fieldInfo -> collectMaybeList
((\(fieldName, _) ->
(Dict.get fieldName fields)
|> Maybe.map (\argExpr -> mapValue argExpr ctx)))
fieldInfo )
|> Maybe.withDefault Nothing
|> Maybe.map (applySnowparkFunc "array_construct")
|> Maybe.withDefault (Scala.Literal (Scala.StringLit ("Record creation not converted1")))
else
Scala.Literal (Scala.StringLit ("Record creation not converted2"))


mapRecordCreationToCaseClassCreation : Type () -> Dict.Dict (Name.Name) (Value ta (Type ())) -> ValueMappingContext -> Scala.Value
mapRecordCreationToCaseClassCreation tpe fields ctx =
case tpe of
Type.Reference _ fullName [] ->
let
caseClassReference =
Scala.Ref (scalaPathToModule fullName) (fullName |> FQName.getLocalName |> Name.toTitleCase)
processArgs : List (Name.Name, Type ()) -> Maybe (List Scala.ArgValue)
processArgs fieldsInfo =
fieldsInfo
|> collectMaybeList (\(fieldName, _) ->
Dict.get fieldName fields
|> Maybe.map (\argExpr -> Scala.ArgValue (Just (Name.toCamelCase fieldName)) (mapValue argExpr ctx)))
in
MappingContext.getFieldInfoIfRecordType tpe ctx.typesContextInfo
|> Maybe.map processArgs
|> Maybe.withDefault Nothing
|> Maybe.map (\ctorArgs -> Scala.Apply caseClassReference ctorArgs)
|> Maybe.withDefault (Scala.Literal (Scala.StringLit ("Record creation not converted!")))
_ ->
Scala.Literal (Scala.StringLit ("Record creation not converted"))


mapListCreation : (Type ()) -> List (Value ta (Type ())) -> ValueMappingContext -> Scala.Value
mapListCreation tpe values ctx =
if typeRefIsListOf tpe (\innerTpe -> isTypeRefToRecordWithSimpleTypes innerTpe ctx.typesContextInfo) &&
isFunctionClassificationReturningDataFrameExpressions ctx.currentFunctionClassification then
applySnowparkFunc "array_construct"
(values |> List.map (\v -> mapValue v ctx))
else
Scala.Apply
(Scala.Variable "Seq")
(values |> List.map (\v -> Scala.ArgValue Nothing (mapValue v ctx)))


mapLetDefinition : Name.Name -> Value.Definition ta (Type ()) -> Value ta (Type ()) -> ValueMappingContext -> Scala.Value
mapLetDefinition name definition body ctx =
case definition.inputTypes of
[] ->
let
(pairs, bodyToConvert) = collectNestedLetDeclarations body []
decls = ((name, definition) :: pairs)
|> List.map (\(pName, pDefinition) ->
Scala.ValueDecl { modifiers = []
, pattern = Scala.NamedMatch (pName |> Name.toCamelCase)
, valueType = Nothing
, value = mapValue pDefinition.body ctx
})
in
Scala.Block decls (mapValue bodyToConvert ctx)
_ ->
Scala.Literal (Scala.StringLit ("Unsupported function let expression"))

collectNestedLetDeclarations : Value ta (Type ()) ->
List (Name.Name, Value.Definition ta (Type ())) ->
(List (Name.Name, Value.Definition ta (Type ())), Value ta (Type ()))
collectNestedLetDeclarations currentBody collectedPairs =
case currentBody of
Value.LetDefinition _ name definition body ->
collectNestedLetDeclarations body ((name, definition)::collectedPairs)
_ ->
(List.reverse collectedPairs, currentBody)

let
functionToMap =
if ctx.currentFunctionClassification == FromComplexValuesToDataFrames || ctx.currentFunctionClassification == FromComplexToValues then
FunctionMappingsForPlainScala.mapValueForPlainScala
else
mapValue
in
Maybe.Just (functionToMap value.body ctx)

mapIfThenElse : Value ta (Type ()) -> Value ta (Type ()) -> Value ta (Type ()) -> ValueMappingContext -> Scala.Value
mapIfThenElse condition thenExpr elseExpr ctx =
let
whenCall =
Constants.applySnowparkFunc "when" [ mapValue condition ctx, mapValue thenExpr ctx ]
in
Scala.Apply (Scala.Select whenCall "otherwise") [Scala.ArgValue Nothing (mapValue elseExpr ctx)]
26 changes: 22 additions & 4 deletions src/Morphir/Snowpark/Constants.elm
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,35 @@ snowflakeNamespace = ["com", "snowflake", "snowpark"]
functionsNamespace : List String
functionsNamespace = snowflakeNamespace ++ ["functions"]

typesNamespace : List String
typesNamespace = snowflakeNamespace ++ ["types"]

applySnowparkFunc : String -> List Scala.Value -> Scala.Value
applySnowparkFunc name args =
Scala.Apply
(Scala.Ref functionsNamespace name)
(args |> List.map (\v -> Scala.ArgValue Nothing v))

applyFunctionName functionsNamespace name args

typeRefForSnowparkType : String -> Scala.Type
typeRefForSnowparkType typeName =
(Scala.TypeRef snowflakeNamespace typeName)

typeRefForSnowparkTypesType : String -> Scala.Type
typeRefForSnowparkTypesType typeName =
(Scala.TypeRef typesNamespace typeName)

applyForSnowparkTypesType : String -> List Scala.Value -> Scala.Value
applyForSnowparkTypesType name args =
applyFunctionName typesNamespace name args

applyForSnowparkTypesTypeExpr : String -> Scala.Value
applyForSnowparkTypesTypeExpr name =
Scala.Ref typesNamespace name

applyFunctionName : List String -> String -> List Scala.Value -> Scala.Value
applyFunctionName namespace name args =
Scala.Apply
(Scala.Ref namespace name)
(args |> List.map (\v -> Scala.ArgValue Nothing v))

type alias MapValueType ta = ValueIR.Value ta (TypeIR.Type ()) -> ValueMappingContext -> Scala.Value

type alias VariableInformation = (List Scala.Value, List (String, Scala.Value))
Loading

0 comments on commit 8293644

Please sign in to comment.